]> gcc.gnu.org Git - gcc.git/blame - gcc/combine.c
acinclude.m4 (LIB_AC_PROG_CXX): Remove CXX from the list of alternatives for CXX_libs...
[gcc.git] / gcc / combine.c
CommitLineData
230d793d 1/* Optimize by combining instructions for GNU compiler.
3c71940f
JL
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
230d793d
RS
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
230d793d 21
230d793d
RS
22/* This module is essentially the "combiner" phase of the U. of Arizona
23 Portable Optimizer, but redone to work on our list-structured
24 representation for RTL instead of their string representation.
25
26 The LOG_LINKS of each insn identify the most recent assignment
27 to each REG used in the insn. It is a list of previous insns,
28 each of which contains a SET for a REG that is used in this insn
29 and not used or set in between. LOG_LINKs never cross basic blocks.
30 They were set up by the preceding pass (lifetime analysis).
31
32 We try to combine each pair of insns joined by a logical link.
33 We also try to combine triples of insns A, B and C when
34 C has a link back to B and B has a link back to A.
35
36 LOG_LINKS does not have links for use of the CC0. They don't
37 need to, because the insn that sets the CC0 is always immediately
38 before the insn that tests it. So we always regard a branch
39 insn as having a logical link to the preceding insn. The same is true
40 for an insn explicitly using CC0.
41
42 We check (with use_crosses_set_p) to avoid combining in such a way
43 as to move a computation to a place where its value would be different.
44
45 Combination is done by mathematically substituting the previous
46 insn(s) values for the regs they set into the expressions in
47 the later insns that refer to these regs. If the result is a valid insn
48 for our target machine, according to the machine description,
49 we install it, delete the earlier insns, and update the data flow
50 information (LOG_LINKS and REG_NOTES) for what we did.
51
52 There are a few exceptions where the dataflow information created by
53 flow.c aren't completely updated:
54
55 - reg_live_length is not updated
56 - reg_n_refs is not adjusted in the rare case when a register is
57 no longer required in a computation
58 - there are extremely rare cases (see distribute_regnotes) when a
59 REG_DEAD note is lost
60 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
663522cb 61 removed because there is no way to know which register it was
230d793d
RS
62 linking
63
64 To simplify substitution, we combine only when the earlier insn(s)
65 consist of only a single assignment. To simplify updating afterward,
66 we never combine when a subroutine call appears in the middle.
67
68 Since we do not represent assignments to CC0 explicitly except when that
69 is all an insn does, there is no LOG_LINKS entry in an insn that uses
70 the condition code for the insn that set the condition code.
71 Fortunately, these two insns must be consecutive.
72 Therefore, every JUMP_INSN is taken to have an implicit logical link
73 to the preceding insn. This is not quite right, since non-jumps can
74 also use the condition code; but in practice such insns would not
75 combine anyway. */
76
230d793d 77#include "config.h"
670ee920 78#include "system.h"
c5c76735 79#include "rtl.h"
a091679a 80#include "tm_p.h"
230d793d
RS
81#include "flags.h"
82#include "regs.h"
55310dad 83#include "hard-reg-set.h"
230d793d
RS
84#include "basic-block.h"
85#include "insn-config.h"
49ad7cfa 86#include "function.h"
d6f4ec51
KG
87/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
88#include "expr.h"
230d793d
RS
89#include "insn-flags.h"
90#include "insn-codes.h"
91#include "insn-attr.h"
92#include "recog.h"
93#include "real.h"
2e107e9e 94#include "toplev.h"
f73ad30e
JH
95#include "defaults.h"
96
97#ifndef ACCUMULATE_OUTGOING_ARGS
98#define ACCUMULATE_OUTGOING_ARGS 0
99#endif
100
101/* Supply a default definition for PUSH_ARGS. */
102#ifndef PUSH_ARGS
103#ifdef PUSH_ROUNDING
104#define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
105#else
106#define PUSH_ARGS 0
107#endif
108#endif
230d793d
RS
109
110/* It is not safe to use ordinary gen_lowpart in combine.
111 Use gen_lowpart_for_combine instead. See comments there. */
112#define gen_lowpart dont_use_gen_lowpart_you_dummy
113
114/* Number of attempts to combine instructions in this function. */
115
116static int combine_attempts;
117
118/* Number of attempts that got as far as substitution in this function. */
119
120static int combine_merges;
121
122/* Number of instructions combined with added SETs in this function. */
123
124static int combine_extras;
125
126/* Number of instructions combined in this function. */
127
128static int combine_successes;
129
130/* Totals over entire compilation. */
131
132static int total_attempts, total_merges, total_extras, total_successes;
9210df58 133
ddd5a7c1 134/* Define a default value for REVERSIBLE_CC_MODE.
9210df58
RK
135 We can never assume that a condition code mode is safe to reverse unless
136 the md tells us so. */
137#ifndef REVERSIBLE_CC_MODE
138#define REVERSIBLE_CC_MODE(MODE) 0
139#endif
230d793d
RS
140\f
141/* Vector mapping INSN_UIDs to cuids.
5089e22e 142 The cuids are like uids but increase monotonically always.
230d793d
RS
143 Combine always uses cuids so that it can compare them.
144 But actually renumbering the uids, which we used to do,
145 proves to be a bad idea because it makes it hard to compare
146 the dumps produced by earlier passes with those from later passes. */
147
148static int *uid_cuid;
4255220d 149static int max_uid_cuid;
230d793d
RS
150
151/* Get the cuid of an insn. */
152
1427d6d2
RK
153#define INSN_CUID(INSN) \
154(INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
230d793d
RS
155
156/* Maximum register number, which is the size of the tables below. */
157
770ae6cc 158static unsigned int combine_max_regno;
230d793d
RS
159
160/* Record last point of death of (hard or pseudo) register n. */
161
162static rtx *reg_last_death;
163
164/* Record last point of modification of (hard or pseudo) register n. */
165
166static rtx *reg_last_set;
167
168/* Record the cuid of the last insn that invalidated memory
169 (anything that writes memory, and subroutine calls, but not pushes). */
170
171static int mem_last_set;
172
173/* Record the cuid of the last CALL_INSN
174 so we can tell whether a potential combination crosses any calls. */
175
176static int last_call_cuid;
177
178/* When `subst' is called, this is the insn that is being modified
179 (by combining in a previous insn). The PATTERN of this insn
180 is still the old pattern partially modified and it should not be
181 looked at, but this may be used to examine the successors of the insn
182 to judge whether a simplification is valid. */
183
184static rtx subst_insn;
185
0d9641d1
JW
186/* This is an insn that belongs before subst_insn, but is not currently
187 on the insn chain. */
188
189static rtx subst_prev_insn;
190
230d793d
RS
191/* This is the lowest CUID that `subst' is currently dealing with.
192 get_last_value will not return a value if the register was set at or
193 after this CUID. If not for this mechanism, we could get confused if
194 I2 or I1 in try_combine were an insn that used the old value of a register
195 to obtain a new value. In that case, we might erroneously get the
196 new value of the register when we wanted the old one. */
197
198static int subst_low_cuid;
199
6e25d159
RK
200/* This contains any hard registers that are used in newpat; reg_dead_at_p
201 must consider all these registers to be always live. */
202
203static HARD_REG_SET newpat_used_regs;
204
abe6e52f
RK
205/* This is an insn to which a LOG_LINKS entry has been added. If this
206 insn is the earlier than I2 or I3, combine should rescan starting at
207 that location. */
208
209static rtx added_links_insn;
210
0d4d42c3
RK
211/* Basic block number of the block in which we are performing combines. */
212static int this_basic_block;
715e7fbc 213
663522cb
KH
214/* A bitmap indicating which blocks had registers go dead at entry.
215 After combine, we'll need to re-do global life analysis with
715e7fbc
RH
216 those blocks as starting points. */
217static sbitmap refresh_blocks;
218static int need_refresh;
230d793d
RS
219\f
220/* The next group of arrays allows the recording of the last value assigned
221 to (hard or pseudo) register n. We use this information to see if a
5089e22e 222 operation being processed is redundant given a prior operation performed
230d793d
RS
223 on the register. For example, an `and' with a constant is redundant if
224 all the zero bits are already known to be turned off.
225
226 We use an approach similar to that used by cse, but change it in the
227 following ways:
228
229 (1) We do not want to reinitialize at each label.
230 (2) It is useful, but not critical, to know the actual value assigned
231 to a register. Often just its form is helpful.
232
233 Therefore, we maintain the following arrays:
234
235 reg_last_set_value the last value assigned
236 reg_last_set_label records the value of label_tick when the
237 register was assigned
238 reg_last_set_table_tick records the value of label_tick when a
239 value using the register is assigned
240 reg_last_set_invalid set to non-zero when it is not valid
241 to use the value of this register in some
242 register's value
243
244 To understand the usage of these tables, it is important to understand
245 the distinction between the value in reg_last_set_value being valid
246 and the register being validly contained in some other expression in the
247 table.
248
249 Entry I in reg_last_set_value is valid if it is non-zero, and either
250 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
251
252 Register I may validly appear in any expression returned for the value
253 of another register if reg_n_sets[i] is 1. It may also appear in the
254 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
255 reg_last_set_invalid[j] is zero.
256
257 If an expression is found in the table containing a register which may
258 not validly appear in an expression, the register is replaced by
259 something that won't match, (clobber (const_int 0)).
260
261 reg_last_set_invalid[i] is set non-zero when register I is being assigned
262 to and reg_last_set_table_tick[i] == label_tick. */
263
0f41302f 264/* Record last value assigned to (hard or pseudo) register n. */
230d793d
RS
265
266static rtx *reg_last_set_value;
267
268/* Record the value of label_tick when the value for register n is placed in
269 reg_last_set_value[n]. */
270
568356af 271static int *reg_last_set_label;
230d793d
RS
272
273/* Record the value of label_tick when an expression involving register n
0f41302f 274 is placed in reg_last_set_value. */
230d793d 275
568356af 276static int *reg_last_set_table_tick;
230d793d
RS
277
278/* Set non-zero if references to register n in expressions should not be
279 used. */
280
281static char *reg_last_set_invalid;
282
0f41302f 283/* Incremented for each label. */
230d793d 284
568356af 285static int label_tick;
230d793d
RS
286
287/* Some registers that are set more than once and used in more than one
288 basic block are nevertheless always set in similar ways. For example,
289 a QImode register may be loaded from memory in two places on a machine
290 where byte loads zero extend.
291
951553af 292 We record in the following array what we know about the nonzero
230d793d
RS
293 bits of a register, specifically which bits are known to be zero.
294
295 If an entry is zero, it means that we don't know anything special. */
296
55310dad 297static unsigned HOST_WIDE_INT *reg_nonzero_bits;
230d793d 298
951553af 299/* Mode used to compute significance in reg_nonzero_bits. It is the largest
5f4f0e22 300 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
230d793d 301
951553af 302static enum machine_mode nonzero_bits_mode;
230d793d 303
d0ab8cd3
RK
304/* Nonzero if we know that a register has some leading bits that are always
305 equal to the sign bit. */
306
770ae6cc 307static unsigned char *reg_sign_bit_copies;
d0ab8cd3 308
951553af 309/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
1a26b032
RK
310 It is zero while computing them and after combine has completed. This
311 former test prevents propagating values based on previously set values,
312 which can be incorrect if a variable is modified in a loop. */
230d793d 313
951553af 314static int nonzero_sign_valid;
55310dad
RK
315
316/* These arrays are maintained in parallel with reg_last_set_value
317 and are used to store the mode in which the register was last set,
318 the bits that were known to be zero when it was last set, and the
319 number of sign bits copies it was known to have when it was last set. */
320
321static enum machine_mode *reg_last_set_mode;
322static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
323static char *reg_last_set_sign_bit_copies;
230d793d
RS
324\f
325/* Record one modification to rtl structure
326 to be undone by storing old_contents into *where.
327 is_int is 1 if the contents are an int. */
328
329struct undo
330{
241cea85 331 struct undo *next;
230d793d 332 int is_int;
0345195a
RK
333 union {rtx r; unsigned int i;} old_contents;
334 union {rtx *r; unsigned int *i;} where;
230d793d
RS
335};
336
337/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
338 num_undo says how many are currently recorded.
339
230d793d 340 other_insn is nonzero if we have modified some other insn in the process
241cea85 341 of working on subst_insn. It must be verified too.
230d793d 342
241cea85
RK
343 previous_undos is the value of undobuf.undos when we started processing
344 this substitution. This will prevent gen_rtx_combine from re-used a piece
345 from the previous expression. Doing so can produce circular rtl
346 structures. */
230d793d
RS
347
348struct undobuf
349{
241cea85
RK
350 struct undo *undos;
351 struct undo *frees;
352 struct undo *previous_undos;
230d793d
RS
353 rtx other_insn;
354};
355
356static struct undobuf undobuf;
357
230d793d
RS
358/* Number of times the pseudo being substituted for
359 was found and replaced. */
360
361static int n_occurrences;
362
83d2b3b9 363static void do_SUBST PARAMS ((rtx *, rtx));
0345195a
RK
364static void do_SUBST_INT PARAMS ((unsigned int *,
365 unsigned int));
83d2b3b9
KG
366static void init_reg_last_arrays PARAMS ((void));
367static void setup_incoming_promotions PARAMS ((void));
368static void set_nonzero_bits_and_sign_copies PARAMS ((rtx, rtx, void *));
369static int can_combine_p PARAMS ((rtx, rtx, rtx, rtx, rtx *, rtx *));
370static int sets_function_arg_p PARAMS ((rtx));
371static int combinable_i3pat PARAMS ((rtx, rtx *, rtx, rtx, int, rtx *));
372static int contains_muldiv PARAMS ((rtx));
44a76fc8 373static rtx try_combine PARAMS ((rtx, rtx, rtx, int *));
83d2b3b9
KG
374static void undo_all PARAMS ((void));
375static void undo_commit PARAMS ((void));
376static rtx *find_split_point PARAMS ((rtx *, rtx));
377static rtx subst PARAMS ((rtx, rtx, rtx, int, int));
378static rtx combine_simplify_rtx PARAMS ((rtx, enum machine_mode, int, int));
379static rtx simplify_if_then_else PARAMS ((rtx));
380static rtx simplify_set PARAMS ((rtx));
381static rtx simplify_logical PARAMS ((rtx, int));
382static rtx expand_compound_operation PARAMS ((rtx));
383static rtx expand_field_assignment PARAMS ((rtx));
770ae6cc
RK
384static rtx make_extraction PARAMS ((enum machine_mode, rtx, HOST_WIDE_INT,
385 rtx, unsigned HOST_WIDE_INT, int,
386 int, int));
83d2b3b9
KG
387static rtx extract_left_shift PARAMS ((rtx, int));
388static rtx make_compound_operation PARAMS ((rtx, enum rtx_code));
770ae6cc
RK
389static int get_pos_from_mask PARAMS ((unsigned HOST_WIDE_INT,
390 unsigned HOST_WIDE_INT *));
83d2b3b9
KG
391static rtx force_to_mode PARAMS ((rtx, enum machine_mode,
392 unsigned HOST_WIDE_INT, rtx, int));
393static rtx if_then_else_cond PARAMS ((rtx, rtx *, rtx *));
394static rtx known_cond PARAMS ((rtx, enum rtx_code, rtx, rtx));
395static int rtx_equal_for_field_assignment_p PARAMS ((rtx, rtx));
396static rtx make_field_assignment PARAMS ((rtx));
397static rtx apply_distributive_law PARAMS ((rtx));
398static rtx simplify_and_const_int PARAMS ((rtx, enum machine_mode, rtx,
399 unsigned HOST_WIDE_INT));
400static unsigned HOST_WIDE_INT nonzero_bits PARAMS ((rtx, enum machine_mode));
770ae6cc 401static unsigned int num_sign_bit_copies PARAMS ((rtx, enum machine_mode));
83d2b3b9
KG
402static int merge_outer_ops PARAMS ((enum rtx_code *, HOST_WIDE_INT *,
403 enum rtx_code, HOST_WIDE_INT,
404 enum machine_mode, int *));
405static rtx simplify_shift_const PARAMS ((rtx, enum rtx_code, enum machine_mode,
406 rtx, int));
407static int recog_for_combine PARAMS ((rtx *, rtx, rtx *));
408static rtx gen_lowpart_for_combine PARAMS ((enum machine_mode, rtx));
409static rtx gen_rtx_combine PARAMS ((enum rtx_code code, enum machine_mode mode,
410 ...));
411static rtx gen_binary PARAMS ((enum rtx_code, enum machine_mode,
412 rtx, rtx));
413static rtx gen_unary PARAMS ((enum rtx_code, enum machine_mode,
414 enum machine_mode, rtx));
415static enum rtx_code simplify_comparison PARAMS ((enum rtx_code, rtx *, rtx *));
416static int reversible_comparison_p PARAMS ((rtx));
417static void update_table_tick PARAMS ((rtx));
418static void record_value_for_reg PARAMS ((rtx, rtx, rtx));
419static void check_promoted_subreg PARAMS ((rtx, rtx));
420static void record_dead_and_set_regs_1 PARAMS ((rtx, rtx, void *));
421static void record_dead_and_set_regs PARAMS ((rtx));
422static int get_last_value_validate PARAMS ((rtx *, rtx, int, int));
423static rtx get_last_value PARAMS ((rtx));
424static int use_crosses_set_p PARAMS ((rtx, int));
425static void reg_dead_at_p_1 PARAMS ((rtx, rtx, void *));
426static int reg_dead_at_p PARAMS ((rtx, rtx));
427static void move_deaths PARAMS ((rtx, rtx, int, rtx, rtx *));
428static int reg_bitfield_target_p PARAMS ((rtx, rtx));
429static void distribute_notes PARAMS ((rtx, rtx, rtx, rtx, rtx, rtx));
430static void distribute_links PARAMS ((rtx));
431static void mark_used_regs_combine PARAMS ((rtx));
432static int insn_cuid PARAMS ((rtx));
c6991660 433static void record_promoted_value PARAMS ((rtx, rtx));
230d793d 434\f
76095e2f
RH
435/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
436 insn. The substitution can be undone by undo_all. If INTO is already
437 set to NEWVAL, do not record this change. Because computing NEWVAL might
438 also call SUBST, we have to compute it before we put anything into
439 the undo table. */
440
441static void
663522cb 442do_SUBST (into, newval)
76095e2f
RH
443 rtx *into, newval;
444{
445 struct undo *buf;
446 rtx oldval = *into;
447
448 if (oldval == newval)
449 return;
450
451 if (undobuf.frees)
452 buf = undobuf.frees, undobuf.frees = buf->next;
453 else
454 buf = (struct undo *) xmalloc (sizeof (struct undo));
455
456 buf->is_int = 0;
457 buf->where.r = into;
458 buf->old_contents.r = oldval;
459 *into = newval;
460
461 buf->next = undobuf.undos, undobuf.undos = buf;
462}
463
464#define SUBST(INTO, NEWVAL) do_SUBST(&(INTO), (NEWVAL))
465
466/* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
467 for the value of a HOST_WIDE_INT value (including CONST_INT) is
468 not safe. */
469
470static void
663522cb 471do_SUBST_INT (into, newval)
0345195a 472 unsigned int *into, newval;
76095e2f
RH
473{
474 struct undo *buf;
0345195a 475 unsigned int oldval = *into;
76095e2f
RH
476
477 if (oldval == newval)
478 return;
479
480 if (undobuf.frees)
481 buf = undobuf.frees, undobuf.frees = buf->next;
482 else
483 buf = (struct undo *) xmalloc (sizeof (struct undo));
484
485 buf->is_int = 1;
486 buf->where.i = into;
487 buf->old_contents.i = oldval;
488 *into = newval;
489
490 buf->next = undobuf.undos, undobuf.undos = buf;
491}
492
493#define SUBST_INT(INTO, NEWVAL) do_SUBST_INT(&(INTO), (NEWVAL))
494\f
230d793d 495/* Main entry point for combiner. F is the first insn of the function.
663522cb 496 NREGS is the first unused pseudo-reg number.
230d793d 497
44a76fc8
AG
498 Return non-zero if the combiner has turned an indirect jump
499 instruction into a direct jump. */
500int
230d793d
RS
501combine_instructions (f, nregs)
502 rtx f;
770ae6cc 503 unsigned int nregs;
230d793d 504{
b729186a
JL
505 register rtx insn, next;
506#ifdef HAVE_cc0
507 register rtx prev;
508#endif
230d793d
RS
509 register int i;
510 register rtx links, nextlinks;
511
44a76fc8
AG
512 int new_direct_jump_p = 0;
513
230d793d
RS
514 combine_attempts = 0;
515 combine_merges = 0;
516 combine_extras = 0;
517 combine_successes = 0;
518
519 combine_max_regno = nregs;
520
663522cb 521 reg_nonzero_bits = ((unsigned HOST_WIDE_INT *)
c05ddfa7 522 xcalloc (nregs, sizeof (unsigned HOST_WIDE_INT)));
770ae6cc
RK
523 reg_sign_bit_copies
524 = (unsigned char *) xcalloc (nregs, sizeof (unsigned char));
c05ddfa7
MM
525
526 reg_last_death = (rtx *) xmalloc (nregs * sizeof (rtx));
527 reg_last_set = (rtx *) xmalloc (nregs * sizeof (rtx));
528 reg_last_set_value = (rtx *) xmalloc (nregs * sizeof (rtx));
529 reg_last_set_table_tick = (int *) xmalloc (nregs * sizeof (int));
530 reg_last_set_label = (int *) xmalloc (nregs * sizeof (int));
531 reg_last_set_invalid = (char *) xmalloc (nregs * sizeof (char));
55310dad 532 reg_last_set_mode
c05ddfa7 533 = (enum machine_mode *) xmalloc (nregs * sizeof (enum machine_mode));
55310dad 534 reg_last_set_nonzero_bits
c05ddfa7 535 = (unsigned HOST_WIDE_INT *) xmalloc (nregs * sizeof (HOST_WIDE_INT));
55310dad 536 reg_last_set_sign_bit_copies
c05ddfa7 537 = (char *) xmalloc (nregs * sizeof (char));
55310dad 538
ef026f91 539 init_reg_last_arrays ();
230d793d
RS
540
541 init_recog_no_volatile ();
542
543 /* Compute maximum uid value so uid_cuid can be allocated. */
544
545 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
546 if (INSN_UID (insn) > i)
547 i = INSN_UID (insn);
548
c05ddfa7 549 uid_cuid = (int *) xmalloc ((i + 1) * sizeof (int));
4255220d 550 max_uid_cuid = i;
230d793d 551
951553af 552 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
230d793d 553
951553af 554 /* Don't use reg_nonzero_bits when computing it. This can cause problems
230d793d
RS
555 when, for example, we have j <<= 1 in a loop. */
556
951553af 557 nonzero_sign_valid = 0;
230d793d
RS
558
559 /* Compute the mapping from uids to cuids.
560 Cuids are numbers assigned to insns, like uids,
663522cb 561 except that cuids increase monotonically through the code.
230d793d
RS
562
563 Scan all SETs and see if we can deduce anything about what
951553af 564 bits are known to be zero for some registers and how many copies
d79f08e0
RK
565 of the sign bit are known to exist for those registers.
566
567 Also set any known values so that we can use it while searching
568 for what bits are known to be set. */
569
570 label_tick = 1;
230d793d 571
bcd49eb7
JW
572 /* We need to initialize it here, because record_dead_and_set_regs may call
573 get_last_value. */
574 subst_prev_insn = NULL_RTX;
575
7988fd36
RK
576 setup_incoming_promotions ();
577
715e7fbc
RH
578 refresh_blocks = sbitmap_alloc (n_basic_blocks);
579 sbitmap_zero (refresh_blocks);
580 need_refresh = 0;
581
230d793d
RS
582 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
583 {
4255220d 584 uid_cuid[INSN_UID (insn)] = ++i;
d79f08e0
RK
585 subst_low_cuid = i;
586 subst_insn = insn;
587
2c3c49de 588 if (INSN_P (insn))
d79f08e0 589 {
663522cb 590 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
84832317 591 NULL);
d79f08e0 592 record_dead_and_set_regs (insn);
2dab894a
RK
593
594#ifdef AUTO_INC_DEC
595 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
596 if (REG_NOTE_KIND (links) == REG_INC)
84832317
MM
597 set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
598 NULL);
2dab894a 599#endif
d79f08e0
RK
600 }
601
602 if (GET_CODE (insn) == CODE_LABEL)
603 label_tick++;
230d793d
RS
604 }
605
951553af 606 nonzero_sign_valid = 1;
230d793d
RS
607
608 /* Now scan all the insns in forward order. */
609
0d4d42c3 610 this_basic_block = -1;
230d793d
RS
611 label_tick = 1;
612 last_call_cuid = 0;
613 mem_last_set = 0;
ef026f91 614 init_reg_last_arrays ();
7988fd36
RK
615 setup_incoming_promotions ();
616
230d793d
RS
617 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
618 {
619 next = 0;
620
0d4d42c3 621 /* If INSN starts a new basic block, update our basic block number. */
f085c9cd 622 if (this_basic_block + 1 < n_basic_blocks
3b413743 623 && BLOCK_HEAD (this_basic_block + 1) == insn)
0d4d42c3
RK
624 this_basic_block++;
625
230d793d
RS
626 if (GET_CODE (insn) == CODE_LABEL)
627 label_tick++;
628
2c3c49de 629 else if (INSN_P (insn))
230d793d 630 {
732f2ac9
JJ
631 /* See if we know about function return values before this
632 insn based upon SUBREG flags. */
633 check_promoted_subreg (insn, PATTERN (insn));
732f2ac9 634
230d793d
RS
635 /* Try this insn with each insn it links back to. */
636
637 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
663522cb 638 if ((next = try_combine (insn, XEXP (links, 0),
44a76fc8 639 NULL_RTX, &new_direct_jump_p)) != 0)
230d793d
RS
640 goto retry;
641
642 /* Try each sequence of three linked insns ending with this one. */
643
644 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
aabb6c74
NC
645 {
646 rtx link = XEXP (links, 0);
647
648 /* If the linked insn has been replaced by a note, then there
649 is no point in persuing this chain any further. */
650 if (GET_CODE (link) == NOTE)
651 break;
652
653 for (nextlinks = LOG_LINKS (link);
654 nextlinks;
655 nextlinks = XEXP (nextlinks, 1))
656 if ((next = try_combine (insn, XEXP (links, 0),
865f50c5
RH
657 XEXP (nextlinks, 0),
658 &new_direct_jump_p)) != 0)
aabb6c74
NC
659 goto retry;
660 }
230d793d
RS
661
662#ifdef HAVE_cc0
663 /* Try to combine a jump insn that uses CC0
664 with a preceding insn that sets CC0, and maybe with its
665 logical predecessor as well.
666 This is how we make decrement-and-branch insns.
667 We need this special code because data flow connections
668 via CC0 do not get entered in LOG_LINKS. */
669
670 if (GET_CODE (insn) == JUMP_INSN
671 && (prev = prev_nonnote_insn (insn)) != 0
672 && GET_CODE (prev) == INSN
673 && sets_cc0_p (PATTERN (prev)))
674 {
663522cb 675 if ((next = try_combine (insn, prev,
44a76fc8 676 NULL_RTX, &new_direct_jump_p)) != 0)
230d793d
RS
677 goto retry;
678
679 for (nextlinks = LOG_LINKS (prev); nextlinks;
680 nextlinks = XEXP (nextlinks, 1))
681 if ((next = try_combine (insn, prev,
44a76fc8
AG
682 XEXP (nextlinks, 0),
683 &new_direct_jump_p)) != 0)
230d793d
RS
684 goto retry;
685 }
686
687 /* Do the same for an insn that explicitly references CC0. */
688 if (GET_CODE (insn) == INSN
689 && (prev = prev_nonnote_insn (insn)) != 0
690 && GET_CODE (prev) == INSN
691 && sets_cc0_p (PATTERN (prev))
692 && GET_CODE (PATTERN (insn)) == SET
693 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
694 {
663522cb 695 if ((next = try_combine (insn, prev,
44a76fc8 696 NULL_RTX, &new_direct_jump_p)) != 0)
230d793d
RS
697 goto retry;
698
699 for (nextlinks = LOG_LINKS (prev); nextlinks;
700 nextlinks = XEXP (nextlinks, 1))
701 if ((next = try_combine (insn, prev,
44a76fc8
AG
702 XEXP (nextlinks, 0),
703 &new_direct_jump_p)) != 0)
230d793d
RS
704 goto retry;
705 }
706
707 /* Finally, see if any of the insns that this insn links to
708 explicitly references CC0. If so, try this insn, that insn,
5089e22e 709 and its predecessor if it sets CC0. */
230d793d
RS
710 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
711 if (GET_CODE (XEXP (links, 0)) == INSN
712 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
713 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
714 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
715 && GET_CODE (prev) == INSN
716 && sets_cc0_p (PATTERN (prev))
663522cb 717 && (next = try_combine (insn, XEXP (links, 0),
44a76fc8 718 prev, &new_direct_jump_p)) != 0)
230d793d
RS
719 goto retry;
720#endif
721
722 /* Try combining an insn with two different insns whose results it
723 uses. */
724 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
725 for (nextlinks = XEXP (links, 1); nextlinks;
726 nextlinks = XEXP (nextlinks, 1))
727 if ((next = try_combine (insn, XEXP (links, 0),
44a76fc8
AG
728 XEXP (nextlinks, 0),
729 &new_direct_jump_p)) != 0)
230d793d
RS
730 goto retry;
731
732 if (GET_CODE (insn) != NOTE)
733 record_dead_and_set_regs (insn);
734
735 retry:
736 ;
737 }
738 }
739
715e7fbc 740 if (need_refresh)
49c3bb12
RH
741 {
742 compute_bb_for_insn (get_max_uid ());
743 update_life_info (refresh_blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
663522cb 744 PROP_DEATH_NOTES);
49c3bb12 745 }
c05ddfa7
MM
746
747 /* Clean up. */
715e7fbc 748 sbitmap_free (refresh_blocks);
c05ddfa7
MM
749 free (reg_nonzero_bits);
750 free (reg_sign_bit_copies);
751 free (reg_last_death);
752 free (reg_last_set);
753 free (reg_last_set_value);
754 free (reg_last_set_table_tick);
755 free (reg_last_set_label);
756 free (reg_last_set_invalid);
757 free (reg_last_set_mode);
758 free (reg_last_set_nonzero_bits);
759 free (reg_last_set_sign_bit_copies);
760 free (uid_cuid);
715e7fbc 761
e7749837
RH
762 {
763 struct undo *undo, *next;
764 for (undo = undobuf.frees; undo; undo = next)
765 {
766 next = undo->next;
767 free (undo);
768 }
769 undobuf.frees = 0;
770 }
771
230d793d
RS
772 total_attempts += combine_attempts;
773 total_merges += combine_merges;
774 total_extras += combine_extras;
775 total_successes += combine_successes;
1a26b032 776
951553af 777 nonzero_sign_valid = 0;
972b320c
R
778
779 /* Make recognizer allow volatile MEMs again. */
780 init_recog ();
44a76fc8
AG
781
782 return new_direct_jump_p;
230d793d 783}
ef026f91
RS
784
785/* Wipe the reg_last_xxx arrays in preparation for another pass. */
786
787static void
788init_reg_last_arrays ()
789{
770ae6cc 790 unsigned int nregs = combine_max_regno;
ef026f91 791
4c9a05bc
RK
792 bzero ((char *) reg_last_death, nregs * sizeof (rtx));
793 bzero ((char *) reg_last_set, nregs * sizeof (rtx));
794 bzero ((char *) reg_last_set_value, nregs * sizeof (rtx));
795 bzero ((char *) reg_last_set_table_tick, nregs * sizeof (int));
796 bzero ((char *) reg_last_set_label, nregs * sizeof (int));
ef026f91 797 bzero (reg_last_set_invalid, nregs * sizeof (char));
4c9a05bc
RK
798 bzero ((char *) reg_last_set_mode, nregs * sizeof (enum machine_mode));
799 bzero ((char *) reg_last_set_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
ef026f91
RS
800 bzero (reg_last_set_sign_bit_copies, nregs * sizeof (char));
801}
230d793d 802\f
7988fd36
RK
803/* Set up any promoted values for incoming argument registers. */
804
ee791cc3 805static void
7988fd36
RK
806setup_incoming_promotions ()
807{
808#ifdef PROMOTE_FUNCTION_ARGS
770ae6cc 809 unsigned int regno;
7988fd36
RK
810 rtx reg;
811 enum machine_mode mode;
812 int unsignedp;
813 rtx first = get_insns ();
814
c285f57a
JJ
815#ifndef OUTGOING_REGNO
816#define OUTGOING_REGNO(N) N
817#endif
7988fd36 818 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
c285f57a
JJ
819 /* Check whether this register can hold an incoming pointer
820 argument. FUNCTION_ARG_REGNO_P tests outgoing register
821 numbers, so translate if necessary due to register windows. */
822 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (regno))
7988fd36 823 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
38a448ca
RH
824 {
825 record_value_for_reg
826 (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
827 : SIGN_EXTEND),
828 GET_MODE (reg),
829 gen_rtx_CLOBBER (mode, const0_rtx)));
830 }
7988fd36
RK
831#endif
832}
833\f
91102d5a
RK
834/* Called via note_stores. If X is a pseudo that is narrower than
835 HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
230d793d
RS
836
837 If we are setting only a portion of X and we can't figure out what
838 portion, assume all bits will be used since we don't know what will
d0ab8cd3
RK
839 be happening.
840
841 Similarly, set how many bits of X are known to be copies of the sign bit
663522cb 842 at all locations in the function. This is the smallest number implied
d0ab8cd3 843 by any set of X. */
230d793d
RS
844
845static void
84832317 846set_nonzero_bits_and_sign_copies (x, set, data)
230d793d
RS
847 rtx x;
848 rtx set;
84832317 849 void *data ATTRIBUTE_UNUSED;
230d793d 850{
770ae6cc 851 unsigned int num;
d0ab8cd3 852
230d793d
RS
853 if (GET_CODE (x) == REG
854 && REGNO (x) >= FIRST_PSEUDO_REGISTER
e8095e80
RK
855 /* If this register is undefined at the start of the file, we can't
856 say what its contents were. */
e881bb1b 857 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, REGNO (x))
5f4f0e22 858 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
230d793d 859 {
2dab894a 860 if (set == 0 || GET_CODE (set) == CLOBBER)
e8095e80
RK
861 {
862 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
88306d12 863 reg_sign_bit_copies[REGNO (x)] = 1;
e8095e80
RK
864 return;
865 }
230d793d
RS
866
867 /* If this is a complex assignment, see if we can convert it into a
5089e22e 868 simple assignment. */
230d793d 869 set = expand_field_assignment (set);
d79f08e0
RK
870
871 /* If this is a simple assignment, or we have a paradoxical SUBREG,
872 set what we know about X. */
873
874 if (SET_DEST (set) == x
875 || (GET_CODE (SET_DEST (set)) == SUBREG
705c7b3b
JW
876 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
877 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
d79f08e0 878 && SUBREG_REG (SET_DEST (set)) == x))
d0ab8cd3 879 {
9afa3d54
RK
880 rtx src = SET_SRC (set);
881
882#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
883 /* If X is narrower than a word and SRC is a non-negative
884 constant that would appear negative in the mode of X,
885 sign-extend it for use in reg_nonzero_bits because some
886 machines (maybe most) will actually do the sign-extension
663522cb 887 and this is the conservative approach.
9afa3d54
RK
888
889 ??? For 2.5, try to tighten up the MD files in this regard
890 instead of this kludge. */
891
892 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
893 && GET_CODE (src) == CONST_INT
894 && INTVAL (src) > 0
895 && 0 != (INTVAL (src)
896 & ((HOST_WIDE_INT) 1
9e69be8c 897 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
898 src = GEN_INT (INTVAL (src)
899 | ((HOST_WIDE_INT) (-1)
900 << GET_MODE_BITSIZE (GET_MODE (x))));
901#endif
902
951553af 903 reg_nonzero_bits[REGNO (x)]
9afa3d54 904 |= nonzero_bits (src, nonzero_bits_mode);
d0ab8cd3
RK
905 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
906 if (reg_sign_bit_copies[REGNO (x)] == 0
907 || reg_sign_bit_copies[REGNO (x)] > num)
908 reg_sign_bit_copies[REGNO (x)] = num;
909 }
230d793d 910 else
d0ab8cd3 911 {
951553af 912 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
88306d12 913 reg_sign_bit_copies[REGNO (x)] = 1;
d0ab8cd3 914 }
230d793d
RS
915 }
916}
917\f
918/* See if INSN can be combined into I3. PRED and SUCC are optionally
919 insns that were previously combined into I3 or that will be combined
920 into the merger of INSN and I3.
921
922 Return 0 if the combination is not allowed for any reason.
923
663522cb 924 If the combination is allowed, *PDEST will be set to the single
230d793d
RS
925 destination of INSN and *PSRC to the single source, and this function
926 will return 1. */
927
928static int
929can_combine_p (insn, i3, pred, succ, pdest, psrc)
930 rtx insn;
931 rtx i3;
e51712db
KG
932 rtx pred ATTRIBUTE_UNUSED;
933 rtx succ;
230d793d
RS
934 rtx *pdest, *psrc;
935{
936 int i;
937 rtx set = 0, src, dest;
b729186a
JL
938 rtx p;
939#ifdef AUTO_INC_DEC
76d31c63 940 rtx link;
b729186a 941#endif
230d793d
RS
942 int all_adjacent = (succ ? (next_active_insn (insn) == succ
943 && next_active_insn (succ) == i3)
944 : next_active_insn (insn) == i3);
945
946 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
663522cb 947 or a PARALLEL consisting of such a SET and CLOBBERs.
230d793d
RS
948
949 If INSN has CLOBBER parallel parts, ignore them for our processing.
950 By definition, these happen during the execution of the insn. When it
951 is merged with another insn, all bets are off. If they are, in fact,
952 needed and aren't also supplied in I3, they may be added by
663522cb 953 recog_for_combine. Otherwise, it won't match.
230d793d
RS
954
955 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
956 note.
957
663522cb 958 Get the source and destination of INSN. If more than one, can't
230d793d 959 combine. */
663522cb 960
230d793d
RS
961 if (GET_CODE (PATTERN (insn)) == SET)
962 set = PATTERN (insn);
963 else if (GET_CODE (PATTERN (insn)) == PARALLEL
964 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
965 {
966 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
967 {
968 rtx elt = XVECEXP (PATTERN (insn), 0, i);
969
970 switch (GET_CODE (elt))
971 {
e3258cef
R
972 /* This is important to combine floating point insns
973 for the SH4 port. */
974 case USE:
975 /* Combining an isolated USE doesn't make sense.
976 We depend here on combinable_i3_pat to reject them. */
977 /* The code below this loop only verifies that the inputs of
978 the SET in INSN do not change. We call reg_set_between_p
979 to verify that the REG in the USE does not change betweeen
980 I3 and INSN.
981 If the USE in INSN was for a pseudo register, the matching
982 insn pattern will likely match any register; combining this
983 with any other USE would only be safe if we knew that the
984 used registers have identical values, or if there was
985 something to tell them apart, e.g. different modes. For
986 now, we forgo such compilcated tests and simply disallow
987 combining of USES of pseudo registers with any other USE. */
988 if (GET_CODE (XEXP (elt, 0)) == REG
989 && GET_CODE (PATTERN (i3)) == PARALLEL)
990 {
991 rtx i3pat = PATTERN (i3);
992 int i = XVECLEN (i3pat, 0) - 1;
770ae6cc
RK
993 unsigned int regno = REGNO (XEXP (elt, 0));
994
e3258cef
R
995 do
996 {
997 rtx i3elt = XVECEXP (i3pat, 0, i);
770ae6cc 998
e3258cef
R
999 if (GET_CODE (i3elt) == USE
1000 && GET_CODE (XEXP (i3elt, 0)) == REG
1001 && (REGNO (XEXP (i3elt, 0)) == regno
1002 ? reg_set_between_p (XEXP (elt, 0),
1003 PREV_INSN (insn), i3)
1004 : regno >= FIRST_PSEUDO_REGISTER))
1005 return 0;
1006 }
1007 while (--i >= 0);
1008 }
1009 break;
1010
230d793d
RS
1011 /* We can ignore CLOBBERs. */
1012 case CLOBBER:
1013 break;
1014
1015 case SET:
1016 /* Ignore SETs whose result isn't used but not those that
1017 have side-effects. */
1018 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
1019 && ! side_effects_p (elt))
1020 break;
1021
1022 /* If we have already found a SET, this is a second one and
1023 so we cannot combine with this insn. */
1024 if (set)
1025 return 0;
1026
1027 set = elt;
1028 break;
1029
1030 default:
1031 /* Anything else means we can't combine. */
1032 return 0;
1033 }
1034 }
1035
1036 if (set == 0
1037 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1038 so don't do anything with it. */
1039 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1040 return 0;
1041 }
1042 else
1043 return 0;
1044
1045 if (set == 0)
1046 return 0;
1047
1048 set = expand_field_assignment (set);
1049 src = SET_SRC (set), dest = SET_DEST (set);
1050
1051 /* Don't eliminate a store in the stack pointer. */
1052 if (dest == stack_pointer_rtx
230d793d
RS
1053 /* If we couldn't eliminate a field assignment, we can't combine. */
1054 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
1055 /* Don't combine with an insn that sets a register to itself if it has
1056 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
5f4f0e22 1057 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
62f7f1f5
GK
1058 /* Can't merge an ASM_OPERANDS. */
1059 || GET_CODE (src) == ASM_OPERANDS
230d793d
RS
1060 /* Can't merge a function call. */
1061 || GET_CODE (src) == CALL
cd5e8f1f 1062 /* Don't eliminate a function call argument. */
4dca5ec5
RK
1063 || (GET_CODE (i3) == CALL_INSN
1064 && (find_reg_fusage (i3, USE, dest)
1065 || (GET_CODE (dest) == REG
1066 && REGNO (dest) < FIRST_PSEUDO_REGISTER
1067 && global_regs[REGNO (dest)])))
230d793d
RS
1068 /* Don't substitute into an incremented register. */
1069 || FIND_REG_INC_NOTE (i3, dest)
1070 || (succ && FIND_REG_INC_NOTE (succ, dest))
ec35104c 1071#if 0
230d793d 1072 /* Don't combine the end of a libcall into anything. */
ec35104c
JL
1073 /* ??? This gives worse code, and appears to be unnecessary, since no
1074 pass after flow uses REG_LIBCALL/REG_RETVAL notes. Local-alloc does
1075 use REG_RETVAL notes for noconflict blocks, but other code here
1076 makes sure that those insns don't disappear. */
5f4f0e22 1077 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
ec35104c 1078#endif
230d793d
RS
1079 /* Make sure that DEST is not used after SUCC but before I3. */
1080 || (succ && ! all_adjacent
1081 && reg_used_between_p (dest, succ, i3))
1082 /* Make sure that the value that is to be substituted for the register
1083 does not use any registers whose values alter in between. However,
1084 If the insns are adjacent, a use can't cross a set even though we
1085 think it might (this can happen for a sequence of insns each setting
1086 the same destination; reg_last_set of that register might point to
d81481d3
RK
1087 a NOTE). If INSN has a REG_EQUIV note, the register is always
1088 equivalent to the memory so the substitution is valid even if there
1089 are intervening stores. Also, don't move a volatile asm or
1090 UNSPEC_VOLATILE across any other insns. */
230d793d 1091 || (! all_adjacent
d81481d3
RK
1092 && (((GET_CODE (src) != MEM
1093 || ! find_reg_note (insn, REG_EQUIV, src))
1094 && use_crosses_set_p (src, INSN_CUID (insn)))
a66a10c7
RS
1095 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1096 || GET_CODE (src) == UNSPEC_VOLATILE))
230d793d
RS
1097 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
1098 better register allocation by not doing the combine. */
1099 || find_reg_note (i3, REG_NO_CONFLICT, dest)
1100 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
1101 /* Don't combine across a CALL_INSN, because that would possibly
1102 change whether the life span of some REGs crosses calls or not,
1103 and it is a pain to update that information.
1104 Exception: if source is a constant, moving it later can't hurt.
1105 Accept that special case, because it helps -fforce-addr a lot. */
1106 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
1107 return 0;
1108
1109 /* DEST must either be a REG or CC0. */
1110 if (GET_CODE (dest) == REG)
1111 {
1112 /* If register alignment is being enforced for multi-word items in all
1113 cases except for parameters, it is possible to have a register copy
1114 insn referencing a hard register that is not allowed to contain the
1115 mode being copied and which would not be valid as an operand of most
1116 insns. Eliminate this problem by not combining with such an insn.
1117
1118 Also, on some machines we don't want to extend the life of a hard
4d2c432d
RK
1119 register.
1120
1121 This is the same test done in can_combine except that we don't test
1122 if SRC is a CALL operation to permit a hard register with
1123 SMALL_REGISTER_CLASSES, and that we have to take all_adjacent
1124 into account. */
230d793d
RS
1125
1126 if (GET_CODE (src) == REG
1127 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1128 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
c448a43e
RK
1129 /* Don't extend the life of a hard register unless it is
1130 user variable (if we have few registers) or it can't
1131 fit into the desired register (meaning something special
ecd40809
RK
1132 is going on).
1133 Also avoid substituting a return register into I3, because
1134 reload can't handle a conflict with constraints of other
1135 inputs. */
230d793d 1136 || (REGNO (src) < FIRST_PSEUDO_REGISTER
c448a43e 1137 && (! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src))
f95182a4
ILT
1138 || (SMALL_REGISTER_CLASSES
1139 && ((! all_adjacent && ! REG_USERVAR_P (src))
1140 || (FUNCTION_VALUE_REGNO_P (REGNO (src))
e9a25f70 1141 && ! REG_USERVAR_P (src))))))))
230d793d
RS
1142 return 0;
1143 }
1144 else if (GET_CODE (dest) != CC0)
1145 return 0;
1146
5f96750d
RS
1147 /* Don't substitute for a register intended as a clobberable operand.
1148 Similarly, don't substitute an expression containing a register that
1149 will be clobbered in I3. */
230d793d
RS
1150 if (GET_CODE (PATTERN (i3)) == PARALLEL)
1151 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1152 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
5f96750d
RS
1153 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
1154 src)
1155 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
230d793d
RS
1156 return 0;
1157
1158 /* If INSN contains anything volatile, or is an `asm' (whether volatile
d276f2bb 1159 or not), reject, unless nothing volatile comes between it and I3 */
230d793d
RS
1160
1161 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
d276f2bb
CM
1162 {
1163 /* Make sure succ doesn't contain a volatile reference. */
1164 if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1165 return 0;
663522cb 1166
d276f2bb 1167 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
2c3c49de 1168 if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p)))
d276f2bb
CM
1169 return 0;
1170 }
230d793d 1171
b79ee7eb
RH
1172 /* If INSN is an asm, and DEST is a hard register, reject, since it has
1173 to be an explicit register variable, and was chosen for a reason. */
1174
1175 if (GET_CODE (src) == ASM_OPERANDS
1176 && GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1177 return 0;
1178
4b2cb4a2
RS
1179 /* If there are any volatile insns between INSN and I3, reject, because
1180 they might affect machine state. */
1181
1182 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
2c3c49de 1183 if (INSN_P (p) && p != succ && volatile_insn_p (PATTERN (p)))
4b2cb4a2
RS
1184 return 0;
1185
230d793d
RS
1186 /* If INSN or I2 contains an autoincrement or autodecrement,
1187 make sure that register is not used between there and I3,
1188 and not already used in I3 either.
1189 Also insist that I3 not be a jump; if it were one
1190 and the incremented register were spilled, we would lose. */
1191
1192#ifdef AUTO_INC_DEC
1193 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1194 if (REG_NOTE_KIND (link) == REG_INC
1195 && (GET_CODE (i3) == JUMP_INSN
1196 || reg_used_between_p (XEXP (link, 0), insn, i3)
1197 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1198 return 0;
1199#endif
1200
1201#ifdef HAVE_cc0
1202 /* Don't combine an insn that follows a CC0-setting insn.
1203 An insn that uses CC0 must not be separated from the one that sets it.
1204 We do, however, allow I2 to follow a CC0-setting insn if that insn
1205 is passed as I1; in that case it will be deleted also.
1206 We also allow combining in this case if all the insns are adjacent
1207 because that would leave the two CC0 insns adjacent as well.
1208 It would be more logical to test whether CC0 occurs inside I1 or I2,
1209 but that would be much slower, and this ought to be equivalent. */
1210
1211 p = prev_nonnote_insn (insn);
1212 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1213 && ! all_adjacent)
1214 return 0;
1215#endif
1216
1217 /* If we get here, we have passed all the tests and the combination is
1218 to be allowed. */
1219
1220 *pdest = dest;
1221 *psrc = src;
1222
1223 return 1;
1224}
1225\f
956d6950
JL
1226/* Check if PAT is an insn - or a part of it - used to set up an
1227 argument for a function in a hard register. */
1228
1229static int
1230sets_function_arg_p (pat)
1231 rtx pat;
1232{
1233 int i;
1234 rtx inner_dest;
1235
1236 switch (GET_CODE (pat))
1237 {
1238 case INSN:
1239 return sets_function_arg_p (PATTERN (pat));
1240
1241 case PARALLEL:
1242 for (i = XVECLEN (pat, 0); --i >= 0;)
1243 if (sets_function_arg_p (XVECEXP (pat, 0, i)))
1244 return 1;
1245
1246 break;
1247
1248 case SET:
1249 inner_dest = SET_DEST (pat);
1250 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1251 || GET_CODE (inner_dest) == SUBREG
1252 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1253 inner_dest = XEXP (inner_dest, 0);
1254
1255 return (GET_CODE (inner_dest) == REG
1256 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1257 && FUNCTION_ARG_REGNO_P (REGNO (inner_dest)));
1d300e19
KG
1258
1259 default:
1260 break;
956d6950
JL
1261 }
1262
1263 return 0;
1264}
1265
230d793d
RS
1266/* LOC is the location within I3 that contains its pattern or the component
1267 of a PARALLEL of the pattern. We validate that it is valid for combining.
1268
1269 One problem is if I3 modifies its output, as opposed to replacing it
1270 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1271 so would produce an insn that is not equivalent to the original insns.
1272
1273 Consider:
1274
1275 (set (reg:DI 101) (reg:DI 100))
1276 (set (subreg:SI (reg:DI 101) 0) <foo>)
1277
1278 This is NOT equivalent to:
1279
1280 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1281 (set (reg:DI 101) (reg:DI 100))])
1282
1283 Not only does this modify 100 (in which case it might still be valid
663522cb 1284 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
230d793d
RS
1285
1286 We can also run into a problem if I2 sets a register that I1
1287 uses and I1 gets directly substituted into I3 (not via I2). In that
1288 case, we would be getting the wrong value of I2DEST into I3, so we
1289 must reject the combination. This case occurs when I2 and I1 both
1290 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1291 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1292 of a SET must prevent combination from occurring.
1293
e9a25f70 1294 On machines where SMALL_REGISTER_CLASSES is non-zero, we don't combine
c448a43e
RK
1295 if the destination of a SET is a hard register that isn't a user
1296 variable.
230d793d
RS
1297
1298 Before doing the above check, we first try to expand a field assignment
1299 into a set of logical operations.
1300
1301 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1302 we place a register that is both set and used within I3. If more than one
1303 such register is detected, we fail.
1304
1305 Return 1 if the combination is valid, zero otherwise. */
1306
1307static int
1308combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1309 rtx i3;
1310 rtx *loc;
1311 rtx i2dest;
1312 rtx i1dest;
1313 int i1_not_in_src;
1314 rtx *pi3dest_killed;
1315{
1316 rtx x = *loc;
1317
1318 if (GET_CODE (x) == SET)
1319 {
1320 rtx set = expand_field_assignment (x);
1321 rtx dest = SET_DEST (set);
1322 rtx src = SET_SRC (set);
29a82058 1323 rtx inner_dest = dest;
663522cb 1324
29a82058
JL
1325#if 0
1326 rtx inner_src = src;
1327#endif
230d793d
RS
1328
1329 SUBST (*loc, set);
1330
1331 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1332 || GET_CODE (inner_dest) == SUBREG
1333 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1334 inner_dest = XEXP (inner_dest, 0);
1335
1336 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1337 was added. */
1338#if 0
1339 while (GET_CODE (inner_src) == STRICT_LOW_PART
1340 || GET_CODE (inner_src) == SUBREG
1341 || GET_CODE (inner_src) == ZERO_EXTRACT)
1342 inner_src = XEXP (inner_src, 0);
1343
1344 /* If it is better that two different modes keep two different pseudos,
1345 avoid combining them. This avoids producing the following pattern
1346 on a 386:
1347 (set (subreg:SI (reg/v:QI 21) 0)
1348 (lshiftrt:SI (reg/v:SI 20)
1349 (const_int 24)))
1350 If that were made, reload could not handle the pair of
1351 reg 20/21, since it would try to get any GENERAL_REGS
1352 but some of them don't handle QImode. */
1353
1354 if (rtx_equal_p (inner_src, i2dest)
1355 && GET_CODE (inner_dest) == REG
1356 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1357 return 0;
1358#endif
1359
1360 /* Check for the case where I3 modifies its output, as
1361 discussed above. */
1362 if ((inner_dest != dest
1363 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1364 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
956d6950 1365
3f508eca
RK
1366 /* This is the same test done in can_combine_p except that we
1367 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
956d6950
JL
1368 CALL operation. Moreover, we can't test all_adjacent; we don't
1369 have to, since this instruction will stay in place, thus we are
1370 not considering increasing the lifetime of INNER_DEST.
1371
1372 Also, if this insn sets a function argument, combining it with
1373 something that might need a spill could clobber a previous
1374 function argument; the all_adjacent test in can_combine_p also
1375 checks this; here, we do a more specific test for this case. */
663522cb 1376
230d793d 1377 || (GET_CODE (inner_dest) == REG
dfbe1b2f 1378 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
c448a43e
RK
1379 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1380 GET_MODE (inner_dest))
e9a25f70
JL
1381 || (SMALL_REGISTER_CLASSES && GET_CODE (src) != CALL
1382 && ! REG_USERVAR_P (inner_dest)
956d6950
JL
1383 && (FUNCTION_VALUE_REGNO_P (REGNO (inner_dest))
1384 || (FUNCTION_ARG_REGNO_P (REGNO (inner_dest))
1385 && i3 != 0
1386 && sets_function_arg_p (prev_nonnote_insn (i3)))))))
230d793d
RS
1387 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1388 return 0;
1389
1390 /* If DEST is used in I3, it is being killed in this insn,
663522cb 1391 so record that for later.
36a9c2e9
JL
1392 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1393 STACK_POINTER_REGNUM, since these are always considered to be
1394 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
230d793d 1395 if (pi3dest_killed && GET_CODE (dest) == REG
36a9c2e9
JL
1396 && reg_referenced_p (dest, PATTERN (i3))
1397 && REGNO (dest) != FRAME_POINTER_REGNUM
6d7096b0
DE
1398#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1399 && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1400#endif
36a9c2e9
JL
1401#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1402 && (REGNO (dest) != ARG_POINTER_REGNUM
1403 || ! fixed_regs [REGNO (dest)])
1404#endif
1405 && REGNO (dest) != STACK_POINTER_REGNUM)
230d793d
RS
1406 {
1407 if (*pi3dest_killed)
1408 return 0;
1409
1410 *pi3dest_killed = dest;
1411 }
1412 }
1413
1414 else if (GET_CODE (x) == PARALLEL)
1415 {
1416 int i;
1417
1418 for (i = 0; i < XVECLEN (x, 0); i++)
1419 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1420 i1_not_in_src, pi3dest_killed))
1421 return 0;
1422 }
1423
1424 return 1;
1425}
1426\f
14a774a9
RK
1427/* Return 1 if X is an arithmetic expression that contains a multiplication
1428 and division. We don't count multiplications by powers of two here. */
1429
1430static int
1431contains_muldiv (x)
1432 rtx x;
1433{
1434 switch (GET_CODE (x))
1435 {
1436 case MOD: case DIV: case UMOD: case UDIV:
1437 return 1;
1438
1439 case MULT:
1440 return ! (GET_CODE (XEXP (x, 1)) == CONST_INT
1441 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0);
1442 default:
1443 switch (GET_RTX_CLASS (GET_CODE (x)))
1444 {
1445 case 'c': case '<': case '2':
1446 return contains_muldiv (XEXP (x, 0))
1447 || contains_muldiv (XEXP (x, 1));
1448
1449 case '1':
1450 return contains_muldiv (XEXP (x, 0));
1451
1452 default:
1453 return 0;
1454 }
1455 }
1456}
1457\f
230d793d
RS
1458/* Try to combine the insns I1 and I2 into I3.
1459 Here I1 and I2 appear earlier than I3.
1460 I1 can be zero; then we combine just I2 into I3.
663522cb 1461
230d793d
RS
1462 It we are combining three insns and the resulting insn is not recognized,
1463 try splitting it into two insns. If that happens, I2 and I3 are retained
1464 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1465 are pseudo-deleted.
1466
663522cb 1467 Return 0 if the combination does not work. Then nothing is changed.
abe6e52f 1468 If we did the combination, return the insn at which combine should
663522cb
KH
1469 resume scanning.
1470
44a76fc8
AG
1471 Set NEW_DIRECT_JUMP_P to a non-zero value if try_combine creates a
1472 new direct jump instruction. */
230d793d
RS
1473
1474static rtx
44a76fc8 1475try_combine (i3, i2, i1, new_direct_jump_p)
230d793d 1476 register rtx i3, i2, i1;
44a76fc8 1477 register int *new_direct_jump_p;
230d793d 1478{
02359929 1479 /* New patterns for I3 and I2, respectively. */
230d793d
RS
1480 rtx newpat, newi2pat = 0;
1481 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1482 int added_sets_1, added_sets_2;
1483 /* Total number of SETs to put into I3. */
1484 int total_sets;
1485 /* Nonzero is I2's body now appears in I3. */
1486 int i2_is_used;
1487 /* INSN_CODEs for new I3, new I2, and user of condition code. */
6a651371 1488 int insn_code_number, i2_code_number = 0, other_code_number = 0;
230d793d
RS
1489 /* Contains I3 if the destination of I3 is used in its source, which means
1490 that the old life of I3 is being killed. If that usage is placed into
1491 I2 and not in I3, a REG_DEAD note must be made. */
1492 rtx i3dest_killed = 0;
1493 /* SET_DEST and SET_SRC of I2 and I1. */
1494 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1495 /* PATTERN (I2), or a copy of it in certain cases. */
1496 rtx i2pat;
1497 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
c4e861e8 1498 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
230d793d
RS
1499 int i1_feeds_i3 = 0;
1500 /* Notes that must be added to REG_NOTES in I3 and I2. */
1501 rtx new_i3_notes, new_i2_notes;
176c9e6b
JW
1502 /* Notes that we substituted I3 into I2 instead of the normal case. */
1503 int i3_subst_into_i2 = 0;
df7d75de
RK
1504 /* Notes that I1, I2 or I3 is a MULT operation. */
1505 int have_mult = 0;
230d793d
RS
1506
1507 int maxreg;
1508 rtx temp;
1509 register rtx link;
1510 int i;
1511
1512 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1513 This can occur when flow deletes an insn that it has merged into an
1514 auto-increment address. We also can't do anything if I3 has a
1515 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1516 libcall. */
1517
2c3c49de 1518 if (! INSN_P (i3) || ! INSN_P (i2) || (i1 && ! INSN_P (i1))
ec35104c
JL
1519#if 0
1520 /* ??? This gives worse code, and appears to be unnecessary, since no
1521 pass after flow uses REG_LIBCALL/REG_RETVAL notes. */
1522 || find_reg_note (i3, REG_LIBCALL, NULL_RTX)
1523#endif
663522cb 1524 )
230d793d
RS
1525 return 0;
1526
1527 combine_attempts++;
230d793d
RS
1528 undobuf.other_insn = 0;
1529
6e25d159
RK
1530 /* Reset the hard register usage information. */
1531 CLEAR_HARD_REG_SET (newpat_used_regs);
1532
230d793d
RS
1533 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1534 code below, set I1 to be the earlier of the two insns. */
1535 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1536 temp = i1, i1 = i2, i2 = temp;
1537
abe6e52f 1538 added_links_insn = 0;
137e889e 1539
230d793d
RS
1540 /* First check for one important special-case that the code below will
1541 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1542 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1543 we may be able to replace that destination with the destination of I3.
1544 This occurs in the common code where we compute both a quotient and
1545 remainder into a structure, in which case we want to do the computation
1546 directly into the structure to avoid register-register copies.
1547
1548 We make very conservative checks below and only try to handle the
1549 most common cases of this. For example, we only handle the case
1550 where I2 and I3 are adjacent to avoid making difficult register
1551 usage tests. */
1552
1553 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1554 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1555 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
f95182a4 1556 && (! SMALL_REGISTER_CLASSES
e9a25f70
JL
1557 || (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1558 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1559 || REG_USERVAR_P (SET_DEST (PATTERN (i3)))))
230d793d
RS
1560 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1561 && GET_CODE (PATTERN (i2)) == PARALLEL
1562 && ! side_effects_p (SET_DEST (PATTERN (i3)))
5089e22e
RS
1563 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1564 below would need to check what is inside (and reg_overlap_mentioned_p
1565 doesn't support those codes anyway). Don't allow those destinations;
1566 the resulting insn isn't likely to be recognized anyway. */
1567 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1568 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
230d793d
RS
1569 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1570 SET_DEST (PATTERN (i3)))
1571 && next_real_insn (i2) == i3)
5089e22e
RS
1572 {
1573 rtx p2 = PATTERN (i2);
1574
1575 /* Make sure that the destination of I3,
1576 which we are going to substitute into one output of I2,
1577 is not used within another output of I2. We must avoid making this:
1578 (parallel [(set (mem (reg 69)) ...)
1579 (set (reg 69) ...)])
1580 which is not well-defined as to order of actions.
1581 (Besides, reload can't handle output reloads for this.)
1582
1583 The problem can also happen if the dest of I3 is a memory ref,
1584 if another dest in I2 is an indirect memory ref. */
1585 for (i = 0; i < XVECLEN (p2, 0); i++)
7ca919b7
RK
1586 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1587 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
5089e22e
RS
1588 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1589 SET_DEST (XVECEXP (p2, 0, i))))
1590 break;
230d793d 1591
5089e22e
RS
1592 if (i == XVECLEN (p2, 0))
1593 for (i = 0; i < XVECLEN (p2, 0); i++)
481c7efa
FS
1594 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1595 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1596 && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
5089e22e
RS
1597 {
1598 combine_merges++;
230d793d 1599
5089e22e
RS
1600 subst_insn = i3;
1601 subst_low_cuid = INSN_CUID (i2);
230d793d 1602
c4e861e8 1603 added_sets_2 = added_sets_1 = 0;
5089e22e 1604 i2dest = SET_SRC (PATTERN (i3));
230d793d 1605
5089e22e
RS
1606 /* Replace the dest in I2 with our dest and make the resulting
1607 insn the new pattern for I3. Then skip to where we
1608 validate the pattern. Everything was set up above. */
663522cb 1609 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
5089e22e
RS
1610 SET_DEST (PATTERN (i3)));
1611
1612 newpat = p2;
176c9e6b 1613 i3_subst_into_i2 = 1;
5089e22e
RS
1614 goto validate_replacement;
1615 }
1616 }
230d793d 1617
667c1c2c
RK
1618 /* If I2 is setting a double-word pseudo to a constant and I3 is setting
1619 one of those words to another constant, merge them by making a new
1620 constant. */
1621 if (i1 == 0
1622 && (temp = single_set (i2)) != 0
1623 && (GET_CODE (SET_SRC (temp)) == CONST_INT
1624 || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE)
1625 && GET_CODE (SET_DEST (temp)) == REG
1626 && GET_MODE_CLASS (GET_MODE (SET_DEST (temp))) == MODE_INT
1627 && GET_MODE_SIZE (GET_MODE (SET_DEST (temp))) == 2 * UNITS_PER_WORD
1628 && GET_CODE (PATTERN (i3)) == SET
1629 && GET_CODE (SET_DEST (PATTERN (i3))) == SUBREG
1630 && SUBREG_REG (SET_DEST (PATTERN (i3))) == SET_DEST (temp)
1631 && GET_MODE_CLASS (GET_MODE (SET_DEST (PATTERN (i3)))) == MODE_INT
1632 && GET_MODE_SIZE (GET_MODE (SET_DEST (PATTERN (i3)))) == UNITS_PER_WORD
1633 && GET_CODE (SET_SRC (PATTERN (i3))) == CONST_INT)
1634 {
1635 HOST_WIDE_INT lo, hi;
1636
1637 if (GET_CODE (SET_SRC (temp)) == CONST_INT)
1638 lo = INTVAL (SET_SRC (temp)), hi = lo < 0 ? -1 : 0;
1639 else
1640 {
1641 lo = CONST_DOUBLE_LOW (SET_SRC (temp));
1642 hi = CONST_DOUBLE_HIGH (SET_SRC (temp));
1643 }
1644
1645 if (subreg_lowpart_p (SET_DEST (PATTERN (i3))))
1646 lo = INTVAL (SET_SRC (PATTERN (i3)));
1647 else
1648 hi = INTVAL (SET_SRC (PATTERN (i3)));
1649
1650 combine_merges++;
1651 subst_insn = i3;
1652 subst_low_cuid = INSN_CUID (i2);
1653 added_sets_2 = added_sets_1 = 0;
1654 i2dest = SET_DEST (temp);
1655
1656 SUBST (SET_SRC (temp),
1657 immed_double_const (lo, hi, GET_MODE (SET_DEST (temp))));
1658
1659 newpat = PATTERN (i2);
1660 i3_subst_into_i2 = 1;
1661 goto validate_replacement;
1662 }
1663
230d793d
RS
1664#ifndef HAVE_cc0
1665 /* If we have no I1 and I2 looks like:
1666 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1667 (set Y OP)])
1668 make up a dummy I1 that is
1669 (set Y OP)
1670 and change I2 to be
1671 (set (reg:CC X) (compare:CC Y (const_int 0)))
1672
1673 (We can ignore any trailing CLOBBERs.)
1674
1675 This undoes a previous combination and allows us to match a branch-and-
1676 decrement insn. */
1677
1678 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1679 && XVECLEN (PATTERN (i2), 0) >= 2
1680 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1681 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1682 == MODE_CC)
1683 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1684 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1685 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1686 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1687 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1688 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1689 {
663522cb 1690 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
230d793d
RS
1691 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1692 break;
1693
1694 if (i == 1)
1695 {
1696 /* We make I1 with the same INSN_UID as I2. This gives it
1697 the same INSN_CUID for value tracking. Our fake I1 will
1698 never appear in the insn stream so giving it the same INSN_UID
1699 as I2 will not cause a problem. */
1700
0d9641d1 1701 subst_prev_insn = i1
38a448ca
RH
1702 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
1703 XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
1704 NULL_RTX);
230d793d
RS
1705
1706 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1707 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1708 SET_DEST (PATTERN (i1)));
1709 }
1710 }
1711#endif
1712
1713 /* Verify that I2 and I1 are valid for combining. */
5f4f0e22
CH
1714 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1715 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
230d793d
RS
1716 {
1717 undo_all ();
1718 return 0;
1719 }
1720
1721 /* Record whether I2DEST is used in I2SRC and similarly for the other
1722 cases. Knowing this will help in register status updating below. */
1723 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1724 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1725 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1726
916f14f1 1727 /* See if I1 directly feeds into I3. It does if I1DEST is not used
230d793d
RS
1728 in I2SRC. */
1729 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1730
1731 /* Ensure that I3's pattern can be the destination of combines. */
1732 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1733 i1 && i2dest_in_i1src && i1_feeds_i3,
1734 &i3dest_killed))
1735 {
1736 undo_all ();
1737 return 0;
1738 }
1739
df7d75de
RK
1740 /* See if any of the insns is a MULT operation. Unless one is, we will
1741 reject a combination that is, since it must be slower. Be conservative
1742 here. */
1743 if (GET_CODE (i2src) == MULT
1744 || (i1 != 0 && GET_CODE (i1src) == MULT)
1745 || (GET_CODE (PATTERN (i3)) == SET
1746 && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1747 have_mult = 1;
1748
230d793d
RS
1749 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1750 We used to do this EXCEPT in one case: I3 has a post-inc in an
1751 output operand. However, that exception can give rise to insns like
1752 mov r3,(r3)+
1753 which is a famous insn on the PDP-11 where the value of r3 used as the
5089e22e 1754 source was model-dependent. Avoid this sort of thing. */
230d793d
RS
1755
1756#if 0
1757 if (!(GET_CODE (PATTERN (i3)) == SET
1758 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1759 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1760 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1761 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1762 /* It's not the exception. */
1763#endif
1764#ifdef AUTO_INC_DEC
1765 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1766 if (REG_NOTE_KIND (link) == REG_INC
1767 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1768 || (i1 != 0
1769 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1770 {
1771 undo_all ();
1772 return 0;
1773 }
1774#endif
1775
1776 /* See if the SETs in I1 or I2 need to be kept around in the merged
1777 instruction: whenever the value set there is still needed past I3.
1778 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1779
1780 For the SET in I1, we have two cases: If I1 and I2 independently
1781 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1782 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1783 in I1 needs to be kept around unless I1DEST dies or is set in either
1784 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1785 I1DEST. If so, we know I1 feeds into I2. */
1786
1787 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1788
1789 added_sets_1
1790 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1791 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1792
1793 /* If the set in I2 needs to be kept around, we must make a copy of
1794 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
5089e22e 1795 PATTERN (I2), we are only substituting for the original I1DEST, not into
230d793d
RS
1796 an already-substituted copy. This also prevents making self-referential
1797 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1798 I2DEST. */
1799
1800 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
38a448ca 1801 ? gen_rtx_SET (VOIDmode, i2dest, i2src)
230d793d
RS
1802 : PATTERN (i2));
1803
1804 if (added_sets_2)
1805 i2pat = copy_rtx (i2pat);
1806
1807 combine_merges++;
1808
1809 /* Substitute in the latest insn for the regs set by the earlier ones. */
1810
1811 maxreg = max_reg_num ();
1812
1813 subst_insn = i3;
230d793d
RS
1814
1815 /* It is possible that the source of I2 or I1 may be performing an
1816 unneeded operation, such as a ZERO_EXTEND of something that is known
1817 to have the high part zero. Handle that case by letting subst look at
1818 the innermost one of them.
1819
1820 Another way to do this would be to have a function that tries to
1821 simplify a single insn instead of merging two or more insns. We don't
1822 do this because of the potential of infinite loops and because
1823 of the potential extra memory required. However, doing it the way
1824 we are is a bit of a kludge and doesn't catch all cases.
1825
1826 But only do this if -fexpensive-optimizations since it slows things down
1827 and doesn't usually win. */
1828
1829 if (flag_expensive_optimizations)
1830 {
1831 /* Pass pc_rtx so no substitutions are done, just simplifications.
1832 The cases that we are interested in here do not involve the few
1833 cases were is_replaced is checked. */
1834 if (i1)
d0ab8cd3
RK
1835 {
1836 subst_low_cuid = INSN_CUID (i1);
1837 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1838 }
230d793d 1839 else
d0ab8cd3
RK
1840 {
1841 subst_low_cuid = INSN_CUID (i2);
1842 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1843 }
230d793d 1844
241cea85 1845 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1846 }
1847
1848#ifndef HAVE_cc0
1849 /* Many machines that don't use CC0 have insns that can both perform an
1850 arithmetic operation and set the condition code. These operations will
1851 be represented as a PARALLEL with the first element of the vector
1852 being a COMPARE of an arithmetic operation with the constant zero.
1853 The second element of the vector will set some pseudo to the result
1854 of the same arithmetic operation. If we simplify the COMPARE, we won't
1855 match such a pattern and so will generate an extra insn. Here we test
1856 for this case, where both the comparison and the operation result are
1857 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1858 I2SRC. Later we will make the PARALLEL that contains I2. */
1859
1860 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1861 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1862 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1863 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1864 {
081f5e7e 1865#ifdef EXTRA_CC_MODES
230d793d
RS
1866 rtx *cc_use;
1867 enum machine_mode compare_mode;
081f5e7e 1868#endif
230d793d
RS
1869
1870 newpat = PATTERN (i3);
1871 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1872
1873 i2_is_used = 1;
1874
1875#ifdef EXTRA_CC_MODES
1876 /* See if a COMPARE with the operand we substituted in should be done
1877 with the mode that is currently being used. If not, do the same
1878 processing we do in `subst' for a SET; namely, if the destination
1879 is used only once, try to replace it with a register of the proper
1880 mode and also replace the COMPARE. */
1881 if (undobuf.other_insn == 0
1882 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1883 &undobuf.other_insn))
77fa0940
RK
1884 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1885 i2src, const0_rtx))
230d793d
RS
1886 != GET_MODE (SET_DEST (newpat))))
1887 {
770ae6cc 1888 unsigned int regno = REGNO (SET_DEST (newpat));
38a448ca 1889 rtx new_dest = gen_rtx_REG (compare_mode, regno);
230d793d
RS
1890
1891 if (regno < FIRST_PSEUDO_REGISTER
b1f21e0a 1892 || (REG_N_SETS (regno) == 1 && ! added_sets_2
230d793d
RS
1893 && ! REG_USERVAR_P (SET_DEST (newpat))))
1894 {
1895 if (regno >= FIRST_PSEUDO_REGISTER)
1896 SUBST (regno_reg_rtx[regno], new_dest);
1897
1898 SUBST (SET_DEST (newpat), new_dest);
1899 SUBST (XEXP (*cc_use, 0), new_dest);
1900 SUBST (SET_SRC (newpat),
1901 gen_rtx_combine (COMPARE, compare_mode,
1902 i2src, const0_rtx));
1903 }
1904 else
1905 undobuf.other_insn = 0;
1906 }
663522cb 1907#endif
230d793d
RS
1908 }
1909 else
1910#endif
1911 {
1912 n_occurrences = 0; /* `subst' counts here */
1913
1914 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1915 need to make a unique copy of I2SRC each time we substitute it
1916 to avoid self-referential rtl. */
1917
d0ab8cd3 1918 subst_low_cuid = INSN_CUID (i2);
230d793d
RS
1919 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1920 ! i1_feeds_i3 && i1dest_in_i1src);
241cea85 1921 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1922
1923 /* Record whether i2's body now appears within i3's body. */
1924 i2_is_used = n_occurrences;
1925 }
1926
1927 /* If we already got a failure, don't try to do more. Otherwise,
1928 try to substitute in I1 if we have it. */
1929
1930 if (i1 && GET_CODE (newpat) != CLOBBER)
1931 {
1932 /* Before we can do this substitution, we must redo the test done
1933 above (see detailed comments there) that ensures that I1DEST
0f41302f 1934 isn't mentioned in any SETs in NEWPAT that are field assignments. */
230d793d 1935
5f4f0e22
CH
1936 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1937 0, NULL_PTR))
230d793d
RS
1938 {
1939 undo_all ();
1940 return 0;
1941 }
1942
1943 n_occurrences = 0;
d0ab8cd3 1944 subst_low_cuid = INSN_CUID (i1);
230d793d 1945 newpat = subst (newpat, i1dest, i1src, 0, 0);
241cea85 1946 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1947 }
1948
916f14f1
RK
1949 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1950 to count all the ways that I2SRC and I1SRC can be used. */
5f4f0e22 1951 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
916f14f1 1952 && i2_is_used + added_sets_2 > 1)
5f4f0e22 1953 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
916f14f1
RK
1954 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1955 > 1))
230d793d
RS
1956 /* Fail if we tried to make a new register (we used to abort, but there's
1957 really no reason to). */
1958 || max_reg_num () != maxreg
1959 /* Fail if we couldn't do something and have a CLOBBER. */
df7d75de
RK
1960 || GET_CODE (newpat) == CLOBBER
1961 /* Fail if this new pattern is a MULT and we didn't have one before
1962 at the outer level. */
1963 || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
1964 && ! have_mult))
230d793d
RS
1965 {
1966 undo_all ();
1967 return 0;
1968 }
1969
1970 /* If the actions of the earlier insns must be kept
1971 in addition to substituting them into the latest one,
1972 we must make a new PARALLEL for the latest insn
1973 to hold additional the SETs. */
1974
1975 if (added_sets_1 || added_sets_2)
1976 {
1977 combine_extras++;
1978
1979 if (GET_CODE (newpat) == PARALLEL)
1980 {
1981 rtvec old = XVEC (newpat, 0);
1982 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
38a448ca 1983 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
59888de2 1984 bcopy ((char *) &old->elem[0], (char *) XVEC (newpat, 0)->elem,
230d793d
RS
1985 sizeof (old->elem[0]) * old->num_elem);
1986 }
1987 else
1988 {
1989 rtx old = newpat;
1990 total_sets = 1 + added_sets_1 + added_sets_2;
38a448ca 1991 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
230d793d
RS
1992 XVECEXP (newpat, 0, 0) = old;
1993 }
1994
1995 if (added_sets_1)
1996 XVECEXP (newpat, 0, --total_sets)
1997 = (GET_CODE (PATTERN (i1)) == PARALLEL
38a448ca 1998 ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
230d793d
RS
1999
2000 if (added_sets_2)
c5c76735
JL
2001 {
2002 /* If there is no I1, use I2's body as is. We used to also not do
2003 the subst call below if I2 was substituted into I3,
2004 but that could lose a simplification. */
2005 if (i1 == 0)
2006 XVECEXP (newpat, 0, --total_sets) = i2pat;
2007 else
2008 /* See comment where i2pat is assigned. */
2009 XVECEXP (newpat, 0, --total_sets)
2010 = subst (i2pat, i1dest, i1src, 0, 0);
2011 }
230d793d
RS
2012 }
2013
2014 /* We come here when we are replacing a destination in I2 with the
2015 destination of I3. */
2016 validate_replacement:
2017
6e25d159
RK
2018 /* Note which hard regs this insn has as inputs. */
2019 mark_used_regs_combine (newpat);
2020
230d793d 2021 /* Is the result of combination a valid instruction? */
8e2f6e35 2022 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
2023
2024 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
2025 the second SET's destination is a register that is unused. In that case,
2026 we just need the first SET. This can occur when simplifying a divmod
2027 insn. We *must* test for this case here because the code below that
2028 splits two independent SETs doesn't handle this case correctly when it
2029 updates the register status. Also check the case where the first
2030 SET's destination is unused. That would not cause incorrect code, but
2031 does cause an unneeded insn to remain. */
2032
2033 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
2034 && XVECLEN (newpat, 0) == 2
2035 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2036 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2037 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
2038 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
2039 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
2040 && asm_noperands (newpat) < 0)
2041 {
2042 newpat = XVECEXP (newpat, 0, 0);
8e2f6e35 2043 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
2044 }
2045
2046 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
2047 && XVECLEN (newpat, 0) == 2
2048 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2049 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2050 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
2051 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
2052 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
2053 && asm_noperands (newpat) < 0)
2054 {
2055 newpat = XVECEXP (newpat, 0, 1);
8e2f6e35 2056 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
2057 }
2058
2059 /* If we were combining three insns and the result is a simple SET
2060 with no ASM_OPERANDS that wasn't recognized, try to split it into two
663522cb 2061 insns. There are two ways to do this. It can be split using a
916f14f1
RK
2062 machine-specific method (like when you have an addition of a large
2063 constant) or by combine in the function find_split_point. */
2064
230d793d
RS
2065 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
2066 && asm_noperands (newpat) < 0)
2067 {
916f14f1 2068 rtx m_split, *split;
42495ca0 2069 rtx ni2dest = i2dest;
916f14f1
RK
2070
2071 /* See if the MD file can split NEWPAT. If it can't, see if letting it
42495ca0
RK
2072 use I2DEST as a scratch register will help. In the latter case,
2073 convert I2DEST to the mode of the source of NEWPAT if we can. */
916f14f1
RK
2074
2075 m_split = split_insns (newpat, i3);
a70c61d9
JW
2076
2077 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
2078 inputs of NEWPAT. */
2079
2080 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
2081 possible to try that as a scratch reg. This would require adding
2082 more code to make it work though. */
2083
2084 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
42495ca0
RK
2085 {
2086 /* If I2DEST is a hard register or the only use of a pseudo,
2087 we can change its mode. */
2088 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
02f4ada4 2089 && GET_MODE (SET_DEST (newpat)) != VOIDmode
60654f77 2090 && GET_CODE (i2dest) == REG
42495ca0 2091 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
b1f21e0a 2092 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
42495ca0 2093 && ! REG_USERVAR_P (i2dest))))
38a448ca 2094 ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)),
c5c76735
JL
2095 REGNO (i2dest));
2096
2097 m_split = split_insns (gen_rtx_PARALLEL
2098 (VOIDmode,
2099 gen_rtvec (2, newpat,
2100 gen_rtx_CLOBBER (VOIDmode,
2101 ni2dest))),
2102 i3);
42495ca0 2103 }
916f14f1 2104
d340408c
RH
2105 if (m_split && GET_CODE (m_split) != SEQUENCE)
2106 {
2107 insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
2108 if (insn_code_number >= 0)
2109 newpat = m_split;
2110 }
2111 else if (m_split && GET_CODE (m_split) == SEQUENCE
2112 && XVECLEN (m_split, 0) == 2
2113 && (next_real_insn (i2) == i3
2114 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
2115 INSN_CUID (i2))))
916f14f1 2116 {
1a26b032 2117 rtx i2set, i3set;
d0ab8cd3 2118 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
916f14f1 2119 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
916f14f1 2120
e4ba89be
RK
2121 i3set = single_set (XVECEXP (m_split, 0, 1));
2122 i2set = single_set (XVECEXP (m_split, 0, 0));
1a26b032 2123
42495ca0
RK
2124 /* In case we changed the mode of I2DEST, replace it in the
2125 pseudo-register table here. We can't do it above in case this
2126 code doesn't get executed and we do a split the other way. */
2127
2128 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2129 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
2130
8e2f6e35 2131 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1a26b032
RK
2132
2133 /* If I2 or I3 has multiple SETs, we won't know how to track
9cc96794
RK
2134 register status, so don't use these insns. If I2's destination
2135 is used between I2 and I3, we also can't use these insns. */
1a26b032 2136
9cc96794
RK
2137 if (i2_code_number >= 0 && i2set && i3set
2138 && (next_real_insn (i2) == i3
2139 || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
8e2f6e35
BS
2140 insn_code_number = recog_for_combine (&newi3pat, i3,
2141 &new_i3_notes);
d0ab8cd3
RK
2142 if (insn_code_number >= 0)
2143 newpat = newi3pat;
2144
c767f54b 2145 /* It is possible that both insns now set the destination of I3.
22609cbf 2146 If so, we must show an extra use of it. */
c767f54b 2147
393de53f
RK
2148 if (insn_code_number >= 0)
2149 {
2150 rtx new_i3_dest = SET_DEST (i3set);
2151 rtx new_i2_dest = SET_DEST (i2set);
2152
2153 while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
2154 || GET_CODE (new_i3_dest) == STRICT_LOW_PART
2155 || GET_CODE (new_i3_dest) == SUBREG)
2156 new_i3_dest = XEXP (new_i3_dest, 0);
2157
d4096689
RK
2158 while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
2159 || GET_CODE (new_i2_dest) == STRICT_LOW_PART
2160 || GET_CODE (new_i2_dest) == SUBREG)
2161 new_i2_dest = XEXP (new_i2_dest, 0);
2162
393de53f
RK
2163 if (GET_CODE (new_i3_dest) == REG
2164 && GET_CODE (new_i2_dest) == REG
2165 && REGNO (new_i3_dest) == REGNO (new_i2_dest))
b1f21e0a 2166 REG_N_SETS (REGNO (new_i2_dest))++;
393de53f 2167 }
916f14f1 2168 }
230d793d
RS
2169
2170 /* If we can split it and use I2DEST, go ahead and see if that
2171 helps things be recognized. Verify that none of the registers
2172 are set between I2 and I3. */
d0ab8cd3 2173 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
230d793d
RS
2174#ifdef HAVE_cc0
2175 && GET_CODE (i2dest) == REG
2176#endif
2177 /* We need I2DEST in the proper mode. If it is a hard register
2178 or the only use of a pseudo, we can change its mode. */
2179 && (GET_MODE (*split) == GET_MODE (i2dest)
2180 || GET_MODE (*split) == VOIDmode
2181 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
b1f21e0a 2182 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
230d793d
RS
2183 && ! REG_USERVAR_P (i2dest)))
2184 && (next_real_insn (i2) == i3
2185 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
2186 /* We can't overwrite I2DEST if its value is still used by
2187 NEWPAT. */
2188 && ! reg_referenced_p (i2dest, newpat))
2189 {
2190 rtx newdest = i2dest;
df7d75de
RK
2191 enum rtx_code split_code = GET_CODE (*split);
2192 enum machine_mode split_mode = GET_MODE (*split);
230d793d
RS
2193
2194 /* Get NEWDEST as a register in the proper mode. We have already
2195 validated that we can do this. */
df7d75de 2196 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
230d793d 2197 {
38a448ca 2198 newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
230d793d
RS
2199
2200 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2201 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
2202 }
2203
2204 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
2205 an ASHIFT. This can occur if it was inside a PLUS and hence
2206 appeared to be a memory address. This is a kludge. */
df7d75de 2207 if (split_code == MULT
230d793d
RS
2208 && GET_CODE (XEXP (*split, 1)) == CONST_INT
2209 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1dc8a823
JW
2210 {
2211 SUBST (*split, gen_rtx_combine (ASHIFT, split_mode,
2212 XEXP (*split, 0), GEN_INT (i)));
2213 /* Update split_code because we may not have a multiply
2214 anymore. */
2215 split_code = GET_CODE (*split);
2216 }
230d793d
RS
2217
2218#ifdef INSN_SCHEDULING
2219 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2220 be written as a ZERO_EXTEND. */
df7d75de
RK
2221 if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
2222 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, split_mode,
230d793d
RS
2223 XEXP (*split, 0)));
2224#endif
2225
2226 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
2227 SUBST (*split, newdest);
8e2f6e35 2228 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
df7d75de
RK
2229
2230 /* If the split point was a MULT and we didn't have one before,
2231 don't use one now. */
2232 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
8e2f6e35 2233 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
2234 }
2235 }
2236
2237 /* Check for a case where we loaded from memory in a narrow mode and
2238 then sign extended it, but we need both registers. In that case,
2239 we have a PARALLEL with both loads from the same memory location.
2240 We can split this into a load from memory followed by a register-register
2241 copy. This saves at least one insn, more if register allocation can
f0343c74
RK
2242 eliminate the copy.
2243
2244 We cannot do this if the destination of the second assignment is
2245 a register that we have already assumed is zero-extended. Similarly
2246 for a SUBREG of such a register. */
230d793d
RS
2247
2248 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2249 && GET_CODE (newpat) == PARALLEL
2250 && XVECLEN (newpat, 0) == 2
2251 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2252 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2253 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2254 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2255 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2256 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2257 INSN_CUID (i2))
2258 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2259 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
f0343c74
RK
2260 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2261 (GET_CODE (temp) == REG
2262 && reg_nonzero_bits[REGNO (temp)] != 0
2263 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2264 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2265 && (reg_nonzero_bits[REGNO (temp)]
2266 != GET_MODE_MASK (word_mode))))
2267 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2268 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2269 (GET_CODE (temp) == REG
2270 && reg_nonzero_bits[REGNO (temp)] != 0
2271 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2272 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2273 && (reg_nonzero_bits[REGNO (temp)]
2274 != GET_MODE_MASK (word_mode)))))
230d793d
RS
2275 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2276 SET_SRC (XVECEXP (newpat, 0, 1)))
2277 && ! find_reg_note (i3, REG_UNUSED,
2278 SET_DEST (XVECEXP (newpat, 0, 0))))
2279 {
472fbdd1
RK
2280 rtx ni2dest;
2281
230d793d 2282 newi2pat = XVECEXP (newpat, 0, 0);
472fbdd1 2283 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
230d793d
RS
2284 newpat = XVECEXP (newpat, 0, 1);
2285 SUBST (SET_SRC (newpat),
472fbdd1 2286 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
8e2f6e35 2287 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
a29ca9db 2288
230d793d 2289 if (i2_code_number >= 0)
8e2f6e35 2290 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
5089e22e
RS
2291
2292 if (insn_code_number >= 0)
2293 {
2294 rtx insn;
2295 rtx link;
2296
2297 /* If we will be able to accept this, we have made a change to the
2298 destination of I3. This can invalidate a LOG_LINKS pointing
2299 to I3. No other part of combine.c makes such a transformation.
2300
2301 The new I3 will have a destination that was previously the
2302 destination of I1 or I2 and which was used in i2 or I3. Call
2303 distribute_links to make a LOG_LINK from the next use of
2304 that destination. */
2305
2306 PATTERN (i3) = newpat;
38a448ca 2307 distribute_links (gen_rtx_INSN_LIST (VOIDmode, i3, NULL_RTX));
5089e22e
RS
2308
2309 /* I3 now uses what used to be its destination and which is
2310 now I2's destination. That means we need a LOG_LINK from
2311 I3 to I2. But we used to have one, so we still will.
2312
2313 However, some later insn might be using I2's dest and have
2314 a LOG_LINK pointing at I3. We must remove this link.
2315 The simplest way to remove the link is to point it at I1,
2316 which we know will be a NOTE. */
2317
2318 for (insn = NEXT_INSN (i3);
0d4d42c3 2319 insn && (this_basic_block == n_basic_blocks - 1
3b413743 2320 || insn != BLOCK_HEAD (this_basic_block + 1));
5089e22e
RS
2321 insn = NEXT_INSN (insn))
2322 {
2c3c49de 2323 if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn)))
5089e22e
RS
2324 {
2325 for (link = LOG_LINKS (insn); link;
2326 link = XEXP (link, 1))
2327 if (XEXP (link, 0) == i3)
2328 XEXP (link, 0) = i1;
2329
2330 break;
2331 }
2332 }
2333 }
230d793d 2334 }
663522cb 2335
230d793d
RS
2336 /* Similarly, check for a case where we have a PARALLEL of two independent
2337 SETs but we started with three insns. In this case, we can do the sets
2338 as two separate insns. This case occurs when some SET allows two
2339 other insns to combine, but the destination of that SET is still live. */
2340
2341 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2342 && GET_CODE (newpat) == PARALLEL
2343 && XVECLEN (newpat, 0) == 2
2344 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2345 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2346 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2347 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2348 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2349 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2350 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2351 INSN_CUID (i2))
2352 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
2353 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2354 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2355 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2356 XVECEXP (newpat, 0, 0))
2357 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
14a774a9
RK
2358 XVECEXP (newpat, 0, 1))
2359 && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
2360 && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1)))))
230d793d 2361 {
e9a25f70
JL
2362 /* Normally, it doesn't matter which of the two is done first,
2363 but it does if one references cc0. In that case, it has to
2364 be first. */
2365#ifdef HAVE_cc0
2366 if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2367 {
2368 newi2pat = XVECEXP (newpat, 0, 0);
2369 newpat = XVECEXP (newpat, 0, 1);
2370 }
2371 else
2372#endif
2373 {
2374 newi2pat = XVECEXP (newpat, 0, 1);
2375 newpat = XVECEXP (newpat, 0, 0);
2376 }
230d793d 2377
8e2f6e35 2378 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
a29ca9db 2379
230d793d 2380 if (i2_code_number >= 0)
8e2f6e35 2381 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
2382 }
2383
2384 /* If it still isn't recognized, fail and change things back the way they
2385 were. */
2386 if ((insn_code_number < 0
2387 /* Is the result a reasonable ASM_OPERANDS? */
2388 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2389 {
2390 undo_all ();
2391 return 0;
2392 }
2393
2394 /* If we had to change another insn, make sure it is valid also. */
2395 if (undobuf.other_insn)
2396 {
230d793d
RS
2397 rtx other_pat = PATTERN (undobuf.other_insn);
2398 rtx new_other_notes;
2399 rtx note, next;
2400
6e25d159
RK
2401 CLEAR_HARD_REG_SET (newpat_used_regs);
2402
8e2f6e35
BS
2403 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
2404 &new_other_notes);
230d793d
RS
2405
2406 if (other_code_number < 0 && ! check_asm_operands (other_pat))
2407 {
2408 undo_all ();
2409 return 0;
2410 }
2411
2412 PATTERN (undobuf.other_insn) = other_pat;
2413
2414 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2415 are still valid. Then add any non-duplicate notes added by
2416 recog_for_combine. */
2417 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2418 {
2419 next = XEXP (note, 1);
2420
2421 if (REG_NOTE_KIND (note) == REG_UNUSED
2422 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1a26b032
RK
2423 {
2424 if (GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 2425 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
1a26b032
RK
2426
2427 remove_note (undobuf.other_insn, note);
2428 }
230d793d
RS
2429 }
2430
1a26b032
RK
2431 for (note = new_other_notes; note; note = XEXP (note, 1))
2432 if (GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 2433 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
1a26b032 2434
230d793d 2435 distribute_notes (new_other_notes, undobuf.other_insn,
5f4f0e22 2436 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
230d793d 2437 }
5ef17dd2 2438#ifdef HAVE_cc0
663522cb 2439 /* If I2 is the setter CC0 and I3 is the user CC0 then check whether
5ef17dd2
CC
2440 they are adjacent to each other or not. */
2441 {
2442 rtx p = prev_nonnote_insn (i3);
663522cb
KH
2443 if (p && p != i2 && GET_CODE (p) == INSN && newi2pat
2444 && sets_cc0_p (newi2pat))
5ef17dd2 2445 {
663522cb
KH
2446 undo_all ();
2447 return 0;
5ef17dd2 2448 }
663522cb
KH
2449 }
2450#endif
230d793d 2451
663522cb 2452 /* We now know that we can do this combination. Merge the insns and
230d793d
RS
2453 update the status of registers and LOG_LINKS. */
2454
2455 {
2456 rtx i3notes, i2notes, i1notes = 0;
2457 rtx i3links, i2links, i1links = 0;
2458 rtx midnotes = 0;
770ae6cc 2459 unsigned int regno;
ff3467a9
JW
2460 /* Compute which registers we expect to eliminate. newi2pat may be setting
2461 either i3dest or i2dest, so we must check it. Also, i1dest may be the
2462 same as i3dest, in which case newi2pat may be setting i1dest. */
2463 rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
2464 || i2dest_in_i2src || i2dest_in_i1src
230d793d 2465 ? 0 : i2dest);
ff3467a9
JW
2466 rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
2467 || (newi2pat && reg_set_p (i1dest, newi2pat))
2468 ? 0 : i1dest);
230d793d
RS
2469
2470 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2471 clear them. */
2472 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2473 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2474 if (i1)
2475 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2476
2477 /* Ensure that we do not have something that should not be shared but
2478 occurs multiple times in the new insns. Check this by first
5089e22e 2479 resetting all the `used' flags and then copying anything is shared. */
230d793d
RS
2480
2481 reset_used_flags (i3notes);
2482 reset_used_flags (i2notes);
2483 reset_used_flags (i1notes);
2484 reset_used_flags (newpat);
2485 reset_used_flags (newi2pat);
2486 if (undobuf.other_insn)
2487 reset_used_flags (PATTERN (undobuf.other_insn));
2488
2489 i3notes = copy_rtx_if_shared (i3notes);
2490 i2notes = copy_rtx_if_shared (i2notes);
2491 i1notes = copy_rtx_if_shared (i1notes);
2492 newpat = copy_rtx_if_shared (newpat);
2493 newi2pat = copy_rtx_if_shared (newi2pat);
2494 if (undobuf.other_insn)
2495 reset_used_flags (PATTERN (undobuf.other_insn));
2496
2497 INSN_CODE (i3) = insn_code_number;
2498 PATTERN (i3) = newpat;
2499 if (undobuf.other_insn)
2500 INSN_CODE (undobuf.other_insn) = other_code_number;
2501
2502 /* We had one special case above where I2 had more than one set and
2503 we replaced a destination of one of those sets with the destination
2504 of I3. In that case, we have to update LOG_LINKS of insns later
176c9e6b
JW
2505 in this basic block. Note that this (expensive) case is rare.
2506
2507 Also, in this case, we must pretend that all REG_NOTEs for I2
2508 actually came from I3, so that REG_UNUSED notes from I2 will be
2509 properly handled. */
2510
f85cf636 2511 if (i3_subst_into_i2 && GET_CODE (PATTERN (i2)) == PARALLEL)
176c9e6b 2512 {
e6770d3c
R
2513 if (GET_CODE (PATTERN (i2)) == PARALLEL)
2514 {
2515 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2516 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2517 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2518 && ! find_reg_note (i2, REG_UNUSED,
2519 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2520 for (temp = NEXT_INSN (i2);
2521 temp && (this_basic_block == n_basic_blocks - 1
2522 || BLOCK_HEAD (this_basic_block) != temp);
2523 temp = NEXT_INSN (temp))
2c3c49de 2524 if (temp != i3 && INSN_P (temp))
e6770d3c
R
2525 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2526 if (XEXP (link, 0) == i2)
2527 XEXP (link, 0) = i3;
2528 }
176c9e6b
JW
2529
2530 if (i3notes)
2531 {
2532 rtx link = i3notes;
2533 while (XEXP (link, 1))
2534 link = XEXP (link, 1);
2535 XEXP (link, 1) = i2notes;
2536 }
2537 else
2538 i3notes = i2notes;
2539 i2notes = 0;
2540 }
230d793d
RS
2541
2542 LOG_LINKS (i3) = 0;
2543 REG_NOTES (i3) = 0;
2544 LOG_LINKS (i2) = 0;
2545 REG_NOTES (i2) = 0;
2546
2547 if (newi2pat)
2548 {
2549 INSN_CODE (i2) = i2_code_number;
2550 PATTERN (i2) = newi2pat;
2551 }
2552 else
2553 {
2554 PUT_CODE (i2, NOTE);
2555 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2556 NOTE_SOURCE_FILE (i2) = 0;
2557 }
2558
2559 if (i1)
2560 {
2561 LOG_LINKS (i1) = 0;
2562 REG_NOTES (i1) = 0;
2563 PUT_CODE (i1, NOTE);
2564 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2565 NOTE_SOURCE_FILE (i1) = 0;
2566 }
2567
2568 /* Get death notes for everything that is now used in either I3 or
663522cb 2569 I2 and used to die in a previous insn. If we built two new
6eb12cef
RK
2570 patterns, move from I1 to I2 then I2 to I3 so that we get the
2571 proper movement on registers that I2 modifies. */
230d793d 2572
230d793d 2573 if (newi2pat)
6eb12cef
RK
2574 {
2575 move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2576 move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2577 }
2578 else
2579 move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2580 i3, &midnotes);
230d793d
RS
2581
2582 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2583 if (i3notes)
5f4f0e22
CH
2584 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2585 elim_i2, elim_i1);
230d793d 2586 if (i2notes)
5f4f0e22
CH
2587 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2588 elim_i2, elim_i1);
230d793d 2589 if (i1notes)
5f4f0e22
CH
2590 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2591 elim_i2, elim_i1);
230d793d 2592 if (midnotes)
5f4f0e22
CH
2593 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2594 elim_i2, elim_i1);
230d793d
RS
2595
2596 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2597 know these are REG_UNUSED and want them to go to the desired insn,
663522cb 2598 so we always pass it as i3. We have not counted the notes in
1a26b032
RK
2599 reg_n_deaths yet, so we need to do so now. */
2600
230d793d 2601 if (newi2pat && new_i2_notes)
1a26b032
RK
2602 {
2603 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2604 if (GET_CODE (XEXP (temp, 0)) == REG)
b1f21e0a 2605 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
663522cb 2606
1a26b032
RK
2607 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2608 }
2609
230d793d 2610 if (new_i3_notes)
1a26b032
RK
2611 {
2612 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2613 if (GET_CODE (XEXP (temp, 0)) == REG)
b1f21e0a 2614 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
663522cb 2615
1a26b032
RK
2616 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2617 }
230d793d
RS
2618
2619 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
e9a25f70
JL
2620 put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets
2621 I3DEST, the death must be somewhere before I2, not I3. If we passed I3
2622 in that case, it might delete I2. Similarly for I2 and I1.
1a26b032
RK
2623 Show an additional death due to the REG_DEAD note we make here. If
2624 we discard it in distribute_notes, we will decrement it again. */
d0ab8cd3 2625
230d793d 2626 if (i3dest_killed)
1a26b032
RK
2627 {
2628 if (GET_CODE (i3dest_killed) == REG)
b1f21e0a 2629 REG_N_DEATHS (REGNO (i3dest_killed))++;
1a26b032 2630
e9a25f70 2631 if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
38a448ca
RH
2632 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2633 NULL_RTX),
ff3467a9 2634 NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
e9a25f70 2635 else
38a448ca
RH
2636 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2637 NULL_RTX),
e9a25f70 2638 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
ff3467a9 2639 elim_i2, elim_i1);
1a26b032 2640 }
58c8c593 2641
230d793d 2642 if (i2dest_in_i2src)
58c8c593 2643 {
1a26b032 2644 if (GET_CODE (i2dest) == REG)
b1f21e0a 2645 REG_N_DEATHS (REGNO (i2dest))++;
1a26b032 2646
58c8c593 2647 if (newi2pat && reg_set_p (i2dest, newi2pat))
38a448ca 2648 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
58c8c593
RK
2649 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2650 else
38a448ca 2651 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
58c8c593
RK
2652 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2653 NULL_RTX, NULL_RTX);
2654 }
2655
230d793d 2656 if (i1dest_in_i1src)
58c8c593 2657 {
1a26b032 2658 if (GET_CODE (i1dest) == REG)
b1f21e0a 2659 REG_N_DEATHS (REGNO (i1dest))++;
1a26b032 2660
58c8c593 2661 if (newi2pat && reg_set_p (i1dest, newi2pat))
38a448ca 2662 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
58c8c593
RK
2663 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2664 else
38a448ca 2665 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
58c8c593
RK
2666 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2667 NULL_RTX, NULL_RTX);
2668 }
230d793d
RS
2669
2670 distribute_links (i3links);
2671 distribute_links (i2links);
2672 distribute_links (i1links);
2673
2674 if (GET_CODE (i2dest) == REG)
2675 {
d0ab8cd3
RK
2676 rtx link;
2677 rtx i2_insn = 0, i2_val = 0, set;
2678
2679 /* The insn that used to set this register doesn't exist, and
2680 this life of the register may not exist either. See if one of
663522cb 2681 I3's links points to an insn that sets I2DEST. If it does,
d0ab8cd3
RK
2682 that is now the last known value for I2DEST. If we don't update
2683 this and I2 set the register to a value that depended on its old
230d793d
RS
2684 contents, we will get confused. If this insn is used, thing
2685 will be set correctly in combine_instructions. */
d0ab8cd3
RK
2686
2687 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2688 if ((set = single_set (XEXP (link, 0))) != 0
2689 && rtx_equal_p (i2dest, SET_DEST (set)))
2690 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2691
2692 record_value_for_reg (i2dest, i2_insn, i2_val);
230d793d
RS
2693
2694 /* If the reg formerly set in I2 died only once and that was in I3,
2695 zero its use count so it won't make `reload' do any work. */
538fe8cd
ILT
2696 if (! added_sets_2
2697 && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
2698 && ! i2dest_in_i2src)
230d793d
RS
2699 {
2700 regno = REGNO (i2dest);
b1f21e0a 2701 REG_N_SETS (regno)--;
230d793d
RS
2702 }
2703 }
2704
2705 if (i1 && GET_CODE (i1dest) == REG)
2706 {
d0ab8cd3
RK
2707 rtx link;
2708 rtx i1_insn = 0, i1_val = 0, set;
2709
2710 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2711 if ((set = single_set (XEXP (link, 0))) != 0
2712 && rtx_equal_p (i1dest, SET_DEST (set)))
2713 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2714
2715 record_value_for_reg (i1dest, i1_insn, i1_val);
2716
230d793d 2717 regno = REGNO (i1dest);
5af91171 2718 if (! added_sets_1 && ! i1dest_in_i1src)
770ae6cc 2719 REG_N_SETS (regno)--;
230d793d
RS
2720 }
2721
951553af 2722 /* Update reg_nonzero_bits et al for any changes that may have been made
663522cb 2723 to this insn. The order of set_nonzero_bits_and_sign_copies() is
5fb7c247 2724 important. Because newi2pat can affect nonzero_bits of newpat */
22609cbf 2725 if (newi2pat)
84832317 2726 note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
5fb7c247 2727 note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
22609cbf 2728
44a76fc8
AG
2729 /* Set new_direct_jump_p if a new return or simple jump instruction
2730 has been created.
2731
663522cb 2732 If I3 is now an unconditional jump, ensure that it has a
230d793d 2733 BARRIER following it since it may have initially been a
381ee8af 2734 conditional jump. It may also be the last nonnote insn. */
663522cb 2735
7f1c097d 2736 if (GET_CODE (newpat) == RETURN || any_uncondjump_p (i3))
44a76fc8
AG
2737 {
2738 *new_direct_jump_p = 1;
230d793d 2739
44a76fc8
AG
2740 if ((temp = next_nonnote_insn (i3)) == NULL_RTX
2741 || GET_CODE (temp) != BARRIER)
2742 emit_barrier_after (i3);
2743 }
230d793d
RS
2744 }
2745
2746 combine_successes++;
e7749837 2747 undo_commit ();
230d793d 2748
bcd49eb7
JW
2749 /* Clear this here, so that subsequent get_last_value calls are not
2750 affected. */
2751 subst_prev_insn = NULL_RTX;
2752
abe6e52f
RK
2753 if (added_links_insn
2754 && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2755 && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2756 return added_links_insn;
2757 else
2758 return newi2pat ? i2 : i3;
230d793d
RS
2759}
2760\f
2761/* Undo all the modifications recorded in undobuf. */
2762
2763static void
2764undo_all ()
2765{
241cea85
RK
2766 struct undo *undo, *next;
2767
2768 for (undo = undobuf.undos; undo; undo = next)
7c046e4e 2769 {
241cea85
RK
2770 next = undo->next;
2771 if (undo->is_int)
2772 *undo->where.i = undo->old_contents.i;
7c046e4e 2773 else
241cea85
RK
2774 *undo->where.r = undo->old_contents.r;
2775
2776 undo->next = undobuf.frees;
2777 undobuf.frees = undo;
7c046e4e 2778 }
230d793d 2779
845fc875 2780 undobuf.undos = undobuf.previous_undos = 0;
bcd49eb7
JW
2781
2782 /* Clear this here, so that subsequent get_last_value calls are not
2783 affected. */
2784 subst_prev_insn = NULL_RTX;
230d793d 2785}
e7749837
RH
2786
2787/* We've committed to accepting the changes we made. Move all
2788 of the undos to the free list. */
2789
2790static void
2791undo_commit ()
2792{
2793 struct undo *undo, *next;
2794
2795 for (undo = undobuf.undos; undo; undo = next)
2796 {
2797 next = undo->next;
2798 undo->next = undobuf.frees;
2799 undobuf.frees = undo;
2800 }
2801 undobuf.undos = undobuf.previous_undos = 0;
2802}
2803
230d793d
RS
2804\f
2805/* Find the innermost point within the rtx at LOC, possibly LOC itself,
d0ab8cd3
RK
2806 where we have an arithmetic expression and return that point. LOC will
2807 be inside INSN.
230d793d
RS
2808
2809 try_combine will call this function to see if an insn can be split into
2810 two insns. */
2811
2812static rtx *
d0ab8cd3 2813find_split_point (loc, insn)
230d793d 2814 rtx *loc;
d0ab8cd3 2815 rtx insn;
230d793d
RS
2816{
2817 rtx x = *loc;
2818 enum rtx_code code = GET_CODE (x);
2819 rtx *split;
770ae6cc
RK
2820 unsigned HOST_WIDE_INT len = 0;
2821 HOST_WIDE_INT pos = 0;
2822 int unsignedp = 0;
6a651371 2823 rtx inner = NULL_RTX;
230d793d
RS
2824
2825 /* First special-case some codes. */
2826 switch (code)
2827 {
2828 case SUBREG:
2829#ifdef INSN_SCHEDULING
2830 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2831 point. */
2832 if (GET_CODE (SUBREG_REG (x)) == MEM)
2833 return loc;
2834#endif
d0ab8cd3 2835 return find_split_point (&SUBREG_REG (x), insn);
230d793d 2836
230d793d 2837 case MEM:
916f14f1 2838#ifdef HAVE_lo_sum
230d793d
RS
2839 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2840 using LO_SUM and HIGH. */
2841 if (GET_CODE (XEXP (x, 0)) == CONST
2842 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2843 {
2844 SUBST (XEXP (x, 0),
2845 gen_rtx_combine (LO_SUM, Pmode,
2846 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2847 XEXP (x, 0)));
2848 return &XEXP (XEXP (x, 0), 0);
2849 }
230d793d
RS
2850#endif
2851
916f14f1
RK
2852 /* If we have a PLUS whose second operand is a constant and the
2853 address is not valid, perhaps will can split it up using
2854 the machine-specific way to split large constants. We use
ddd5a7c1 2855 the first pseudo-reg (one of the virtual regs) as a placeholder;
916f14f1
RK
2856 it will not remain in the result. */
2857 if (GET_CODE (XEXP (x, 0)) == PLUS
2858 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2859 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2860 {
2861 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
38a448ca 2862 rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
916f14f1
RK
2863 subst_insn);
2864
2865 /* This should have produced two insns, each of which sets our
2866 placeholder. If the source of the second is a valid address,
2867 we can make put both sources together and make a split point
2868 in the middle. */
2869
2870 if (seq && XVECLEN (seq, 0) == 2
2871 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2872 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2873 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2874 && ! reg_mentioned_p (reg,
2875 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2876 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2877 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2878 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2879 && memory_address_p (GET_MODE (x),
2880 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2881 {
2882 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2883 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2884
2885 /* Replace the placeholder in SRC2 with SRC1. If we can
2886 find where in SRC2 it was placed, that can become our
2887 split point and we can replace this address with SRC2.
2888 Just try two obvious places. */
2889
2890 src2 = replace_rtx (src2, reg, src1);
2891 split = 0;
2892 if (XEXP (src2, 0) == src1)
2893 split = &XEXP (src2, 0);
2894 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2895 && XEXP (XEXP (src2, 0), 0) == src1)
2896 split = &XEXP (XEXP (src2, 0), 0);
2897
2898 if (split)
2899 {
2900 SUBST (XEXP (x, 0), src2);
2901 return split;
2902 }
2903 }
663522cb 2904
1a26b032
RK
2905 /* If that didn't work, perhaps the first operand is complex and
2906 needs to be computed separately, so make a split point there.
2907 This will occur on machines that just support REG + CONST
2908 and have a constant moved through some previous computation. */
2909
2910 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2911 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2912 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2913 == 'o')))
2914 return &XEXP (XEXP (x, 0), 0);
916f14f1
RK
2915 }
2916 break;
2917
230d793d
RS
2918 case SET:
2919#ifdef HAVE_cc0
2920 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2921 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2922 we need to put the operand into a register. So split at that
2923 point. */
2924
2925 if (SET_DEST (x) == cc0_rtx
2926 && GET_CODE (SET_SRC (x)) != COMPARE
2927 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2928 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2929 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2930 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2931 return &SET_SRC (x);
2932#endif
2933
2934 /* See if we can split SET_SRC as it stands. */
d0ab8cd3 2935 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2936 if (split && split != &SET_SRC (x))
2937 return split;
2938
041d7180
JL
2939 /* See if we can split SET_DEST as it stands. */
2940 split = find_split_point (&SET_DEST (x), insn);
2941 if (split && split != &SET_DEST (x))
2942 return split;
2943
230d793d
RS
2944 /* See if this is a bitfield assignment with everything constant. If
2945 so, this is an IOR of an AND, so split it into that. */
2946 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2947 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
5f4f0e22 2948 <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
2949 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2950 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2951 && GET_CODE (SET_SRC (x)) == CONST_INT
2952 && ((INTVAL (XEXP (SET_DEST (x), 1))
2953 + INTVAL (XEXP (SET_DEST (x), 2)))
2954 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2955 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2956 {
770ae6cc
RK
2957 HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
2958 unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
2959 unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x));
230d793d
RS
2960 rtx dest = XEXP (SET_DEST (x), 0);
2961 enum machine_mode mode = GET_MODE (dest);
5f4f0e22 2962 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
230d793d 2963
f76b9db2
ILT
2964 if (BITS_BIG_ENDIAN)
2965 pos = GET_MODE_BITSIZE (mode) - len - pos;
230d793d 2966
770ae6cc 2967 if (src == mask)
230d793d 2968 SUBST (SET_SRC (x),
5f4f0e22 2969 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
230d793d
RS
2970 else
2971 SUBST (SET_SRC (x),
2972 gen_binary (IOR, mode,
663522cb
KH
2973 gen_binary (AND, mode, dest,
2974 GEN_INT (~(mask << pos)
5f4f0e22
CH
2975 & GET_MODE_MASK (mode))),
2976 GEN_INT (src << pos)));
230d793d
RS
2977
2978 SUBST (SET_DEST (x), dest);
2979
d0ab8cd3 2980 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2981 if (split && split != &SET_SRC (x))
2982 return split;
2983 }
2984
2985 /* Otherwise, see if this is an operation that we can split into two.
2986 If so, try to split that. */
2987 code = GET_CODE (SET_SRC (x));
2988
2989 switch (code)
2990 {
d0ab8cd3
RK
2991 case AND:
2992 /* If we are AND'ing with a large constant that is only a single
2993 bit and the result is only being used in a context where we
2994 need to know if it is zero or non-zero, replace it with a bit
2995 extraction. This will avoid the large constant, which might
2996 have taken more than one insn to make. If the constant were
2997 not a valid argument to the AND but took only one insn to make,
2998 this is no worse, but if it took more than one insn, it will
2999 be better. */
3000
3001 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3002 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
3003 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
3004 && GET_CODE (SET_DEST (x)) == REG
3005 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
3006 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
3007 && XEXP (*split, 0) == SET_DEST (x)
3008 && XEXP (*split, 1) == const0_rtx)
3009 {
76184def
DE
3010 rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
3011 XEXP (SET_SRC (x), 0),
3012 pos, NULL_RTX, 1, 1, 0, 0);
3013 if (extraction != 0)
3014 {
3015 SUBST (SET_SRC (x), extraction);
3016 return find_split_point (loc, insn);
3017 }
d0ab8cd3
RK
3018 }
3019 break;
3020
1a6ec070
RK
3021 case NE:
3022 /* if STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
3023 is known to be on, this can be converted into a NEG of a shift. */
3024 if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
3025 && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
4eb2cb10 3026 && 1 <= (pos = exact_log2
1a6ec070
RK
3027 (nonzero_bits (XEXP (SET_SRC (x), 0),
3028 GET_MODE (XEXP (SET_SRC (x), 0))))))
3029 {
3030 enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
3031
3032 SUBST (SET_SRC (x),
3033 gen_rtx_combine (NEG, mode,
3034 gen_rtx_combine (LSHIFTRT, mode,
3035 XEXP (SET_SRC (x), 0),
4eb2cb10 3036 GEN_INT (pos))));
1a6ec070
RK
3037
3038 split = find_split_point (&SET_SRC (x), insn);
3039 if (split && split != &SET_SRC (x))
3040 return split;
3041 }
3042 break;
3043
230d793d
RS
3044 case SIGN_EXTEND:
3045 inner = XEXP (SET_SRC (x), 0);
101c1a3d
JL
3046
3047 /* We can't optimize if either mode is a partial integer
3048 mode as we don't know how many bits are significant
3049 in those modes. */
3050 if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
3051 || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
3052 break;
3053
230d793d
RS
3054 pos = 0;
3055 len = GET_MODE_BITSIZE (GET_MODE (inner));
3056 unsignedp = 0;
3057 break;
3058
3059 case SIGN_EXTRACT:
3060 case ZERO_EXTRACT:
3061 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3062 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
3063 {
3064 inner = XEXP (SET_SRC (x), 0);
3065 len = INTVAL (XEXP (SET_SRC (x), 1));
3066 pos = INTVAL (XEXP (SET_SRC (x), 2));
3067
f76b9db2
ILT
3068 if (BITS_BIG_ENDIAN)
3069 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
230d793d
RS
3070 unsignedp = (code == ZERO_EXTRACT);
3071 }
3072 break;
e9a25f70
JL
3073
3074 default:
3075 break;
230d793d
RS
3076 }
3077
3078 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
3079 {
3080 enum machine_mode mode = GET_MODE (SET_SRC (x));
3081
d0ab8cd3
RK
3082 /* For unsigned, we have a choice of a shift followed by an
3083 AND or two shifts. Use two shifts for field sizes where the
3084 constant might be too large. We assume here that we can
3085 always at least get 8-bit constants in an AND insn, which is
3086 true for every current RISC. */
3087
3088 if (unsignedp && len <= 8)
230d793d
RS
3089 {
3090 SUBST (SET_SRC (x),
3091 gen_rtx_combine
3092 (AND, mode,
3093 gen_rtx_combine (LSHIFTRT, mode,
3094 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
3095 GEN_INT (pos)),
3096 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
230d793d 3097
d0ab8cd3 3098 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
3099 if (split && split != &SET_SRC (x))
3100 return split;
3101 }
3102 else
3103 {
3104 SUBST (SET_SRC (x),
3105 gen_rtx_combine
d0ab8cd3 3106 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
230d793d
RS
3107 gen_rtx_combine (ASHIFT, mode,
3108 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
3109 GEN_INT (GET_MODE_BITSIZE (mode)
3110 - len - pos)),
3111 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
230d793d 3112
d0ab8cd3 3113 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
3114 if (split && split != &SET_SRC (x))
3115 return split;
3116 }
3117 }
3118
3119 /* See if this is a simple operation with a constant as the second
3120 operand. It might be that this constant is out of range and hence
3121 could be used as a split point. */
3122 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3123 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3124 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
3125 && CONSTANT_P (XEXP (SET_SRC (x), 1))
3126 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
3127 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
3128 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
3129 == 'o'))))
3130 return &XEXP (SET_SRC (x), 1);
3131
3132 /* Finally, see if this is a simple operation with its first operand
3133 not in a register. The operation might require this operand in a
3134 register, so return it as a split point. We can always do this
3135 because if the first operand were another operation, we would have
3136 already found it as a split point. */
3137 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3138 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3139 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
3140 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
3141 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
3142 return &XEXP (SET_SRC (x), 0);
3143
3144 return 0;
3145
3146 case AND:
3147 case IOR:
3148 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
3149 it is better to write this as (not (ior A B)) so we can split it.
3150 Similarly for IOR. */
3151 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
3152 {
3153 SUBST (*loc,
3154 gen_rtx_combine (NOT, GET_MODE (x),
3155 gen_rtx_combine (code == IOR ? AND : IOR,
3156 GET_MODE (x),
3157 XEXP (XEXP (x, 0), 0),
3158 XEXP (XEXP (x, 1), 0))));
d0ab8cd3 3159 return find_split_point (loc, insn);
230d793d
RS
3160 }
3161
3162 /* Many RISC machines have a large set of logical insns. If the
3163 second operand is a NOT, put it first so we will try to split the
3164 other operand first. */
3165 if (GET_CODE (XEXP (x, 1)) == NOT)
3166 {
3167 rtx tem = XEXP (x, 0);
3168 SUBST (XEXP (x, 0), XEXP (x, 1));
3169 SUBST (XEXP (x, 1), tem);
3170 }
3171 break;
e9a25f70
JL
3172
3173 default:
3174 break;
230d793d
RS
3175 }
3176
3177 /* Otherwise, select our actions depending on our rtx class. */
3178 switch (GET_RTX_CLASS (code))
3179 {
3180 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
3181 case '3':
d0ab8cd3 3182 split = find_split_point (&XEXP (x, 2), insn);
230d793d
RS
3183 if (split)
3184 return split;
0f41302f 3185 /* ... fall through ... */
230d793d
RS
3186 case '2':
3187 case 'c':
3188 case '<':
d0ab8cd3 3189 split = find_split_point (&XEXP (x, 1), insn);
230d793d
RS
3190 if (split)
3191 return split;
0f41302f 3192 /* ... fall through ... */
230d793d
RS
3193 case '1':
3194 /* Some machines have (and (shift ...) ...) insns. If X is not
3195 an AND, but XEXP (X, 0) is, use it as our split point. */
3196 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
3197 return &XEXP (x, 0);
3198
d0ab8cd3 3199 split = find_split_point (&XEXP (x, 0), insn);
230d793d
RS
3200 if (split)
3201 return split;
3202 return loc;
3203 }
3204
3205 /* Otherwise, we don't have a split point. */
3206 return 0;
3207}
3208\f
3209/* Throughout X, replace FROM with TO, and return the result.
3210 The result is TO if X is FROM;
3211 otherwise the result is X, but its contents may have been modified.
3212 If they were modified, a record was made in undobuf so that
3213 undo_all will (among other things) return X to its original state.
3214
3215 If the number of changes necessary is too much to record to undo,
3216 the excess changes are not made, so the result is invalid.
3217 The changes already made can still be undone.
3218 undobuf.num_undo is incremented for such changes, so by testing that
3219 the caller can tell whether the result is valid.
3220
3221 `n_occurrences' is incremented each time FROM is replaced.
663522cb 3222
230d793d
RS
3223 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
3224
5089e22e 3225 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
230d793d
RS
3226 by copying if `n_occurrences' is non-zero. */
3227
3228static rtx
3229subst (x, from, to, in_dest, unique_copy)
3230 register rtx x, from, to;
3231 int in_dest;
3232 int unique_copy;
3233{
f24ad0e4 3234 register enum rtx_code code = GET_CODE (x);
230d793d 3235 enum machine_mode op0_mode = VOIDmode;
6f7d635c 3236 register const char *fmt;
8079805d
RK
3237 register int len, i;
3238 rtx new;
230d793d
RS
3239
3240/* Two expressions are equal if they are identical copies of a shared
3241 RTX or if they are both registers with the same register number
3242 and mode. */
3243
3244#define COMBINE_RTX_EQUAL_P(X,Y) \
3245 ((X) == (Y) \
3246 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
3247 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3248
3249 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
3250 {
3251 n_occurrences++;
3252 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
3253 }
3254
3255 /* If X and FROM are the same register but different modes, they will
663522cb 3256 not have been seen as equal above. However, flow.c will make a
230d793d
RS
3257 LOG_LINKS entry for that case. If we do nothing, we will try to
3258 rerecognize our original insn and, when it succeeds, we will
3259 delete the feeding insn, which is incorrect.
3260
3261 So force this insn not to match in this (rare) case. */
3262 if (! in_dest && code == REG && GET_CODE (from) == REG
3263 && REGNO (x) == REGNO (from))
38a448ca 3264 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
3265
3266 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3267 of which may contain things that can be combined. */
3268 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
3269 return x;
3270
3271 /* It is possible to have a subexpression appear twice in the insn.
3272 Suppose that FROM is a register that appears within TO.
3273 Then, after that subexpression has been scanned once by `subst',
3274 the second time it is scanned, TO may be found. If we were
3275 to scan TO here, we would find FROM within it and create a
3276 self-referent rtl structure which is completely wrong. */
3277 if (COMBINE_RTX_EQUAL_P (x, to))
3278 return to;
3279
4f4b3679
RH
3280 /* Parallel asm_operands need special attention because all of the
3281 inputs are shared across the arms. Furthermore, unsharing the
3282 rtl results in recognition failures. Failure to handle this case
3283 specially can result in circular rtl.
3284
3285 Solve this by doing a normal pass across the first entry of the
3286 parallel, and only processing the SET_DESTs of the subsequent
3287 entries. Ug. */
3288
3289 if (code == PARALLEL
3290 && GET_CODE (XVECEXP (x, 0, 0)) == SET
3291 && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
230d793d 3292 {
4f4b3679
RH
3293 new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
3294
3295 /* If this substitution failed, this whole thing fails. */
3296 if (GET_CODE (new) == CLOBBER
3297 && XEXP (new, 0) == const0_rtx)
3298 return new;
3299
3300 SUBST (XVECEXP (x, 0, 0), new);
3301
3302 for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
230d793d 3303 {
4f4b3679 3304 rtx dest = SET_DEST (XVECEXP (x, 0, i));
663522cb 3305
4f4b3679
RH
3306 if (GET_CODE (dest) != REG
3307 && GET_CODE (dest) != CC0
3308 && GET_CODE (dest) != PC)
230d793d 3309 {
4f4b3679 3310 new = subst (dest, from, to, 0, unique_copy);
230d793d 3311
4f4b3679
RH
3312 /* If this substitution failed, this whole thing fails. */
3313 if (GET_CODE (new) == CLOBBER
3314 && XEXP (new, 0) == const0_rtx)
3315 return new;
230d793d 3316
4f4b3679 3317 SUBST (SET_DEST (XVECEXP (x, 0, i)), new);
230d793d
RS
3318 }
3319 }
4f4b3679
RH
3320 }
3321 else
3322 {
3323 len = GET_RTX_LENGTH (code);
3324 fmt = GET_RTX_FORMAT (code);
3325
3326 /* We don't need to process a SET_DEST that is a register, CC0,
3327 or PC, so set up to skip this common case. All other cases
3328 where we want to suppress replacing something inside a
3329 SET_SRC are handled via the IN_DEST operand. */
3330 if (code == SET
3331 && (GET_CODE (SET_DEST (x)) == REG
3332 || GET_CODE (SET_DEST (x)) == CC0
3333 || GET_CODE (SET_DEST (x)) == PC))
3334 fmt = "ie";
3335
3336 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3337 constant. */
3338 if (fmt[0] == 'e')
3339 op0_mode = GET_MODE (XEXP (x, 0));
3340
3341 for (i = 0; i < len; i++)
230d793d 3342 {
4f4b3679 3343 if (fmt[i] == 'E')
230d793d 3344 {
4f4b3679
RH
3345 register int j;
3346 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3347 {
3348 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
3349 {
3350 new = (unique_copy && n_occurrences
3351 ? copy_rtx (to) : to);
3352 n_occurrences++;
3353 }
3354 else
3355 {
3356 new = subst (XVECEXP (x, i, j), from, to, 0,
3357 unique_copy);
3358
3359 /* If this substitution failed, this whole thing
3360 fails. */
3361 if (GET_CODE (new) == CLOBBER
3362 && XEXP (new, 0) == const0_rtx)
3363 return new;
3364 }
3365
3366 SUBST (XVECEXP (x, i, j), new);
3367 }
3368 }
3369 else if (fmt[i] == 'e')
3370 {
3371 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
3372 {
3373 /* In general, don't install a subreg involving two
3374 modes not tieable. It can worsen register
3375 allocation, and can even make invalid reload
3376 insns, since the reg inside may need to be copied
3377 from in the outside mode, and that may be invalid
3378 if it is an fp reg copied in integer mode.
3379
3380 We allow two exceptions to this: It is valid if
3381 it is inside another SUBREG and the mode of that
3382 SUBREG and the mode of the inside of TO is
3383 tieable and it is valid if X is a SET that copies
3384 FROM to CC0. */
3385
3386 if (GET_CODE (to) == SUBREG
3387 && ! MODES_TIEABLE_P (GET_MODE (to),
3388 GET_MODE (SUBREG_REG (to)))
3389 && ! (code == SUBREG
3390 && MODES_TIEABLE_P (GET_MODE (x),
3391 GET_MODE (SUBREG_REG (to))))
42301240 3392#ifdef HAVE_cc0
4f4b3679 3393 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
42301240 3394#endif
4f4b3679
RH
3395 )
3396 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
42301240 3397
02188693 3398#ifdef CLASS_CANNOT_CHANGE_MODE
ed8afe3a
GK
3399 if (code == SUBREG
3400 && GET_CODE (to) == REG
3401 && REGNO (to) < FIRST_PSEUDO_REGISTER
3402 && (TEST_HARD_REG_BIT
02188693 3403 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
ed8afe3a 3404 REGNO (to)))
02188693
RH
3405 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (to),
3406 GET_MODE (x)))
ed8afe3a
GK
3407 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3408#endif
3409
4f4b3679
RH
3410 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3411 n_occurrences++;
3412 }
3413 else
3414 /* If we are in a SET_DEST, suppress most cases unless we
3415 have gone inside a MEM, in which case we want to
3416 simplify the address. We assume here that things that
3417 are actually part of the destination have their inner
663522cb 3418 parts in the first expression. This is true for SUBREG,
4f4b3679
RH
3419 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
3420 things aside from REG and MEM that should appear in a
3421 SET_DEST. */
3422 new = subst (XEXP (x, i), from, to,
3423 (((in_dest
3424 && (code == SUBREG || code == STRICT_LOW_PART
3425 || code == ZERO_EXTRACT))
3426 || code == SET)
3427 && i == 0), unique_copy);
3428
3429 /* If we found that we will have to reject this combination,
3430 indicate that by returning the CLOBBER ourselves, rather than
3431 an expression containing it. This will speed things up as
3432 well as prevent accidents where two CLOBBERs are considered
3433 to be equal, thus producing an incorrect simplification. */
3434
3435 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3436 return new;
3437
3438 SUBST (XEXP (x, i), new);
230d793d 3439 }
230d793d
RS
3440 }
3441 }
3442
8079805d
RK
3443 /* Try to simplify X. If the simplification changed the code, it is likely
3444 that further simplification will help, so loop, but limit the number
3445 of repetitions that will be performed. */
3446
3447 for (i = 0; i < 4; i++)
3448 {
3449 /* If X is sufficiently simple, don't bother trying to do anything
3450 with it. */
3451 if (code != CONST_INT && code != REG && code != CLOBBER)
31ec4e5e 3452 x = combine_simplify_rtx (x, op0_mode, i == 3, in_dest);
d0ab8cd3 3453
8079805d
RK
3454 if (GET_CODE (x) == code)
3455 break;
d0ab8cd3 3456
8079805d 3457 code = GET_CODE (x);
eeb43d32 3458
8079805d
RK
3459 /* We no longer know the original mode of operand 0 since we
3460 have changed the form of X) */
3461 op0_mode = VOIDmode;
3462 }
eeb43d32 3463
8079805d
RK
3464 return x;
3465}
3466\f
3467/* Simplify X, a piece of RTL. We just operate on the expression at the
3468 outer level; call `subst' to simplify recursively. Return the new
3469 expression.
3470
3471 OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
3472 will be the iteration even if an expression with a code different from
3473 X is returned; IN_DEST is nonzero if we are inside a SET_DEST. */
eeb43d32 3474
8079805d 3475static rtx
31ec4e5e 3476combine_simplify_rtx (x, op0_mode, last, in_dest)
8079805d
RK
3477 rtx x;
3478 enum machine_mode op0_mode;
3479 int last;
3480 int in_dest;
3481{
3482 enum rtx_code code = GET_CODE (x);
3483 enum machine_mode mode = GET_MODE (x);
3484 rtx temp;
3485 int i;
d0ab8cd3 3486
230d793d
RS
3487 /* If this is a commutative operation, put a constant last and a complex
3488 expression first. We don't need to do this for comparisons here. */
3489 if (GET_RTX_CLASS (code) == 'c'
3490 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
3491 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
3492 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
3493 || (GET_CODE (XEXP (x, 0)) == SUBREG
3494 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
3495 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
3496 {
3497 temp = XEXP (x, 0);
3498 SUBST (XEXP (x, 0), XEXP (x, 1));
3499 SUBST (XEXP (x, 1), temp);
3500 }
3501
22609cbf
RK
3502 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
3503 sign extension of a PLUS with a constant, reverse the order of the sign
3504 extension and the addition. Note that this not the same as the original
3505 code, but overflow is undefined for signed values. Also note that the
3506 PLUS will have been partially moved "inside" the sign-extension, so that
3507 the first operand of X will really look like:
3508 (ashiftrt (plus (ashift A C4) C5) C4).
3509 We convert this to
3510 (plus (ashiftrt (ashift A C4) C2) C4)
3511 and replace the first operand of X with that expression. Later parts
3512 of this function may simplify the expression further.
3513
3514 For example, if we start with (mult (sign_extend (plus A C1)) C2),
3515 we swap the SIGN_EXTEND and PLUS. Later code will apply the
3516 distributive law to produce (plus (mult (sign_extend X) C1) C3).
3517
3518 We do this to simplify address expressions. */
3519
3520 if ((code == PLUS || code == MINUS || code == MULT)
3521 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3522 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3523 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3524 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3525 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3526 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3527 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3528 && (temp = simplify_binary_operation (ASHIFTRT, mode,
3529 XEXP (XEXP (XEXP (x, 0), 0), 1),
3530 XEXP (XEXP (x, 0), 1))) != 0)
3531 {
3532 rtx new
3533 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3534 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3535 INTVAL (XEXP (XEXP (x, 0), 1)));
3536
3537 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3538 INTVAL (XEXP (XEXP (x, 0), 1)));
3539
3540 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3541 }
3542
663522cb 3543 /* If this is a simple operation applied to an IF_THEN_ELSE, try
d0ab8cd3 3544 applying it to the arms of the IF_THEN_ELSE. This often simplifies
abe6e52f
RK
3545 things. Check for cases where both arms are testing the same
3546 condition.
3547
3548 Don't do anything if all operands are very simple. */
3549
3550 if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3551 || GET_RTX_CLASS (code) == '<')
3552 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3553 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3554 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3555 == 'o')))
3556 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3557 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3558 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3559 == 'o')))))
3560 || (GET_RTX_CLASS (code) == '1'
3561 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3562 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3563 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3564 == 'o'))))))
d0ab8cd3 3565 {
abe6e52f
RK
3566 rtx cond, true, false;
3567
3568 cond = if_then_else_cond (x, &true, &false);
0802d516
RK
3569 if (cond != 0
3570 /* If everything is a comparison, what we have is highly unlikely
3571 to be simpler, so don't use it. */
3572 && ! (GET_RTX_CLASS (code) == '<'
3573 && (GET_RTX_CLASS (GET_CODE (true)) == '<'
3574 || GET_RTX_CLASS (GET_CODE (false)) == '<')))
abe6e52f
RK
3575 {
3576 rtx cop1 = const0_rtx;
3577 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3578
15448afc
RK
3579 if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3580 return x;
3581
663522cb 3582 /* Simplify the alternative arms; this may collapse the true and
9210df58
RK
3583 false arms to store-flag values. */
3584 true = subst (true, pc_rtx, pc_rtx, 0, 0);
3585 false = subst (false, pc_rtx, pc_rtx, 0, 0);
3586
085f1714
RH
3587 /* If true and false are not general_operands, an if_then_else
3588 is unlikely to be simpler. */
3589 if (general_operand (true, VOIDmode)
3590 && general_operand (false, VOIDmode))
3591 {
3592 /* Restarting if we generate a store-flag expression will cause
3593 us to loop. Just drop through in this case. */
3594
3595 /* If the result values are STORE_FLAG_VALUE and zero, we can
3596 just make the comparison operation. */
3597 if (true == const_true_rtx && false == const0_rtx)
3598 x = gen_binary (cond_code, mode, cond, cop1);
3599 else if (true == const0_rtx && false == const_true_rtx)
3600 x = gen_binary (reverse_condition (cond_code),
3601 mode, cond, cop1);
3602
3603 /* Likewise, we can make the negate of a comparison operation
3604 if the result values are - STORE_FLAG_VALUE and zero. */
3605 else if (GET_CODE (true) == CONST_INT
3606 && INTVAL (true) == - STORE_FLAG_VALUE
3607 && false == const0_rtx)
3608 x = gen_unary (NEG, mode, mode,
3609 gen_binary (cond_code, mode, cond, cop1));
3610 else if (GET_CODE (false) == CONST_INT
3611 && INTVAL (false) == - STORE_FLAG_VALUE
3612 && true == const0_rtx)
3613 x = gen_unary (NEG, mode, mode,
663522cb 3614 gen_binary (reverse_condition (cond_code),
085f1714
RH
3615 mode, cond, cop1));
3616 else
3617 return gen_rtx_IF_THEN_ELSE (mode,
3618 gen_binary (cond_code, VOIDmode,
3619 cond, cop1),
3620 true, false);
5109d49f 3621
085f1714
RH
3622 code = GET_CODE (x);
3623 op0_mode = VOIDmode;
3624 }
abe6e52f 3625 }
d0ab8cd3
RK
3626 }
3627
230d793d
RS
3628 /* Try to fold this expression in case we have constants that weren't
3629 present before. */
3630 temp = 0;
3631 switch (GET_RTX_CLASS (code))
3632 {
3633 case '1':
3634 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3635 break;
3636 case '<':
47b1e19b
JH
3637 {
3638 enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
3639 if (cmp_mode == VOIDmode)
3640 cmp_mode = GET_MODE (XEXP (x, 1));
3641 temp = simplify_relational_operation (code, cmp_mode,
3642 XEXP (x, 0), XEXP (x, 1));
3643 }
77fa0940 3644#ifdef FLOAT_STORE_FLAG_VALUE
12530dbe
RH
3645 if (temp != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3646 {
3647 if (temp == const0_rtx)
3648 temp = CONST0_RTX (mode);
3649 else
3650 temp = immed_real_const_1 (FLOAT_STORE_FLAG_VALUE (mode), mode);
3651 }
77fa0940 3652#endif
230d793d
RS
3653 break;
3654 case 'c':
3655 case '2':
3656 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3657 break;
3658 case 'b':
3659 case '3':
3660 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3661 XEXP (x, 1), XEXP (x, 2));
3662 break;
3663 }
3664
3665 if (temp)
d0ab8cd3 3666 x = temp, code = GET_CODE (temp);
230d793d 3667
230d793d 3668 /* First see if we can apply the inverse distributive law. */
224eeff2
RK
3669 if (code == PLUS || code == MINUS
3670 || code == AND || code == IOR || code == XOR)
230d793d
RS
3671 {
3672 x = apply_distributive_law (x);
3673 code = GET_CODE (x);
3674 }
3675
3676 /* If CODE is an associative operation not otherwise handled, see if we
3677 can associate some operands. This can win if they are constants or
3678 if they are logically related (i.e. (a & b) & a. */
3679 if ((code == PLUS || code == MINUS
3680 || code == MULT || code == AND || code == IOR || code == XOR
3681 || code == DIV || code == UDIV
3682 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3ad2180a 3683 && INTEGRAL_MODE_P (mode))
230d793d
RS
3684 {
3685 if (GET_CODE (XEXP (x, 0)) == code)
3686 {
3687 rtx other = XEXP (XEXP (x, 0), 0);
3688 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3689 rtx inner_op1 = XEXP (x, 1);
3690 rtx inner;
663522cb 3691
230d793d
RS
3692 /* Make sure we pass the constant operand if any as the second
3693 one if this is a commutative operation. */
3694 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3695 {
3696 rtx tem = inner_op0;
3697 inner_op0 = inner_op1;
3698 inner_op1 = tem;
3699 }
3700 inner = simplify_binary_operation (code == MINUS ? PLUS
3701 : code == DIV ? MULT
3702 : code == UDIV ? MULT
3703 : code,
3704 mode, inner_op0, inner_op1);
3705
3706 /* For commutative operations, try the other pair if that one
3707 didn't simplify. */
3708 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3709 {
3710 other = XEXP (XEXP (x, 0), 1);
3711 inner = simplify_binary_operation (code, mode,
3712 XEXP (XEXP (x, 0), 0),
3713 XEXP (x, 1));
3714 }
3715
3716 if (inner)
8079805d 3717 return gen_binary (code, mode, other, inner);
230d793d
RS
3718 }
3719 }
3720
3721 /* A little bit of algebraic simplification here. */
3722 switch (code)
3723 {
3724 case MEM:
3725 /* Ensure that our address has any ASHIFTs converted to MULT in case
3726 address-recognizing predicates are called later. */
3727 temp = make_compound_operation (XEXP (x, 0), MEM);
3728 SUBST (XEXP (x, 0), temp);
3729 break;
3730
3731 case SUBREG:
3732 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
3733 is paradoxical. If we can't do that safely, then it becomes
3734 something nonsensical so that this combination won't take place. */
3735
3736 if (GET_CODE (SUBREG_REG (x)) == MEM
3737 && (GET_MODE_SIZE (mode)
3738 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3739 {
3740 rtx inner = SUBREG_REG (x);
3741 int endian_offset = 0;
3742 /* Don't change the mode of the MEM
3743 if that would change the meaning of the address. */
3744 if (MEM_VOLATILE_P (SUBREG_REG (x))
3745 || mode_dependent_address_p (XEXP (inner, 0)))
38a448ca 3746 return gen_rtx_CLOBBER (mode, const0_rtx);
230d793d 3747
f76b9db2
ILT
3748 if (BYTES_BIG_ENDIAN)
3749 {
3750 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3751 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
3752 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
3753 endian_offset -= (UNITS_PER_WORD
3754 - GET_MODE_SIZE (GET_MODE (inner)));
3755 }
230d793d
RS
3756 /* Note if the plus_constant doesn't make a valid address
3757 then this combination won't be accepted. */
38a448ca
RH
3758 x = gen_rtx_MEM (mode,
3759 plus_constant (XEXP (inner, 0),
3760 (SUBREG_WORD (x) * UNITS_PER_WORD
3761 + endian_offset)));
c6df88cb 3762 MEM_COPY_ATTRIBUTES (x, inner);
230d793d
RS
3763 return x;
3764 }
3765
3766 /* If we are in a SET_DEST, these other cases can't apply. */
3767 if (in_dest)
3768 return x;
3769
3770 /* Changing mode twice with SUBREG => just change it once,
3771 or not at all if changing back to starting mode. */
3772 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
3773 {
3774 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
3775 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
3776 return SUBREG_REG (SUBREG_REG (x));
3777
3778 SUBST_INT (SUBREG_WORD (x),
3779 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
3780 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
3781 }
3782
3783 /* SUBREG of a hard register => just change the register number
3784 and/or mode. If the hard register is not valid in that mode,
26ecfc76
RK
3785 suppress this combination. If the hard register is the stack,
3786 frame, or argument pointer, leave this as a SUBREG. */
230d793d
RS
3787
3788 if (GET_CODE (SUBREG_REG (x)) == REG
26ecfc76
RK
3789 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
3790 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
6d7096b0
DE
3791#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3792 && REGNO (SUBREG_REG (x)) != HARD_FRAME_POINTER_REGNUM
3793#endif
26ecfc76
RK
3794#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3795 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3796#endif
3797 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
230d793d
RS
3798 {
3799 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3800 mode))
38a448ca
RH
3801 return gen_rtx_REG (mode,
3802 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
230d793d 3803 else
38a448ca 3804 return gen_rtx_CLOBBER (mode, const0_rtx);
230d793d
RS
3805 }
3806
3807 /* For a constant, try to pick up the part we want. Handle a full
a4bde0b1
RK
3808 word and low-order part. Only do this if we are narrowing
3809 the constant; if it is being widened, we have no idea what
3810 the extra bits will have been set to. */
230d793d
RS
3811
3812 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3813 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3c99d5ff 3814 && GET_MODE_SIZE (op0_mode) > UNITS_PER_WORD
230d793d
RS
3815 && GET_MODE_CLASS (mode) == MODE_INT)
3816 {
3817 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
5f4f0e22 3818 0, op0_mode);
230d793d
RS
3819 if (temp)
3820 return temp;
3821 }
663522cb 3822
19808e22
RS
3823 /* If we want a subreg of a constant, at offset 0,
3824 take the low bits. On a little-endian machine, that's
3825 always valid. On a big-endian machine, it's valid
3c99d5ff 3826 only if the constant's mode fits in one word. Note that we
61b1bece 3827 cannot use subreg_lowpart_p since SUBREG_REG may be VOIDmode. */
3c99d5ff
RK
3828 if (CONSTANT_P (SUBREG_REG (x))
3829 && ((GET_MODE_SIZE (op0_mode) <= UNITS_PER_WORD
3830 || ! WORDS_BIG_ENDIAN)
3831 ? SUBREG_WORD (x) == 0
3832 : (SUBREG_WORD (x)
3833 == ((GET_MODE_SIZE (op0_mode)
3834 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
3835 / UNITS_PER_WORD)))
f82da7d2 3836 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (op0_mode)
f76b9db2
ILT
3837 && (! WORDS_BIG_ENDIAN
3838 || GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD))
230d793d
RS
3839 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3840
b65c1b5b
RK
3841 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
3842 since we are saying that the high bits don't matter. */
3843 if (CONSTANT_P (SUBREG_REG (x)) && GET_MODE (SUBREG_REG (x)) == VOIDmode
3844 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (op0_mode))
54f3b5c2
R
3845 {
3846 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
3847 && (WORDS_BIG_ENDIAN || SUBREG_WORD (x) != 0))
3848 return operand_subword (SUBREG_REG (x), SUBREG_WORD (x), 0, mode);
3849 return SUBREG_REG (x);
3850 }
b65c1b5b 3851
87e3e0c1
RK
3852 /* Note that we cannot do any narrowing for non-constants since
3853 we might have been counting on using the fact that some bits were
3854 zero. We now do this in the SET. */
3855
230d793d
RS
3856 break;
3857
3858 case NOT:
3859 /* (not (plus X -1)) can become (neg X). */
3860 if (GET_CODE (XEXP (x, 0)) == PLUS
3861 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
8079805d 3862 return gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3863
3864 /* Similarly, (not (neg X)) is (plus X -1). */
3865 if (GET_CODE (XEXP (x, 0)) == NEG)
8079805d
RK
3866 return gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0),
3867 constm1_rtx);
230d793d 3868
663522cb 3869 /* (not (xor X C)) for C constant is (xor X D) with D = ~C. */
d0ab8cd3
RK
3870 if (GET_CODE (XEXP (x, 0)) == XOR
3871 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3872 && (temp = simplify_unary_operation (NOT, mode,
3873 XEXP (XEXP (x, 0), 1),
3874 mode)) != 0)
787745f5 3875 return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
663522cb 3876
230d793d
RS
3877 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3878 other than 1, but that is not valid. We could do a similar
3879 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3880 but this doesn't seem common enough to bother with. */
3881 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3882 && XEXP (XEXP (x, 0), 0) == const1_rtx)
38a448ca
RH
3883 return gen_rtx_ROTATE (mode, gen_unary (NOT, mode, mode, const1_rtx),
3884 XEXP (XEXP (x, 0), 1));
663522cb 3885
230d793d
RS
3886 if (GET_CODE (XEXP (x, 0)) == SUBREG
3887 && subreg_lowpart_p (XEXP (x, 0))
3888 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3889 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3890 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3891 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3892 {
3893 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3894
38a448ca
RH
3895 x = gen_rtx_ROTATE (inner_mode,
3896 gen_unary (NOT, inner_mode, inner_mode,
3897 const1_rtx),
3898 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
8079805d 3899 return gen_lowpart_for_combine (mode, x);
230d793d 3900 }
663522cb 3901
0802d516
RK
3902 /* If STORE_FLAG_VALUE is -1, (not (comparison foo bar)) can be done by
3903 reversing the comparison code if valid. */
3904 if (STORE_FLAG_VALUE == -1
3905 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
230d793d
RS
3906 && reversible_comparison_p (XEXP (x, 0)))
3907 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3908 mode, XEXP (XEXP (x, 0), 0),
3909 XEXP (XEXP (x, 0), 1));
500c518b
RK
3910
3911 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
0802d516
RK
3912 is (lt foo (const_int 0)) if STORE_FLAG_VALUE is -1, so we can
3913 perform the above simplification. */
500c518b 3914
0802d516 3915 if (STORE_FLAG_VALUE == -1
500c518b 3916 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
37ac53d9 3917 && XEXP (x, 1) == const1_rtx
500c518b
RK
3918 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3919 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3920 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
230d793d
RS
3921
3922 /* Apply De Morgan's laws to reduce number of patterns for machines
3923 with negating logical insns (and-not, nand, etc.). If result has
3924 only one NOT, put it first, since that is how the patterns are
3925 coded. */
3926
3927 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3928 {
663522cb 3929 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
5bd60ce6 3930 enum machine_mode op_mode;
230d793d 3931
5bd60ce6
RH
3932 op_mode = GET_MODE (in1);
3933 in1 = gen_unary (NOT, op_mode, op_mode, in1);
230d793d 3934
5bd60ce6
RH
3935 op_mode = GET_MODE (in2);
3936 if (op_mode == VOIDmode)
3937 op_mode = mode;
3938 in2 = gen_unary (NOT, op_mode, op_mode, in2);
663522cb 3939
5bd60ce6 3940 if (GET_CODE (in2) == NOT && GET_CODE (in1) != NOT)
663522cb
KH
3941 {
3942 rtx tem = in2;
3943 in2 = in1; in1 = tem;
3944 }
3945
3946 return gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3947 mode, in1, in2);
3948 }
230d793d
RS
3949 break;
3950
3951 case NEG:
3952 /* (neg (plus X 1)) can become (not X). */
3953 if (GET_CODE (XEXP (x, 0)) == PLUS
3954 && XEXP (XEXP (x, 0), 1) == const1_rtx)
8079805d 3955 return gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3956
3957 /* Similarly, (neg (not X)) is (plus X 1). */
3958 if (GET_CODE (XEXP (x, 0)) == NOT)
8079805d 3959 return plus_constant (XEXP (XEXP (x, 0), 0), 1);
230d793d 3960
230d793d
RS
3961 /* (neg (minus X Y)) can become (minus Y X). */
3962 if (GET_CODE (XEXP (x, 0)) == MINUS
3ad2180a 3963 && (! FLOAT_MODE_P (mode)
0f41302f 3964 /* x-y != -(y-x) with IEEE floating point. */
7e2a0d8e
RK
3965 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3966 || flag_fast_math))
8079805d
RK
3967 return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3968 XEXP (XEXP (x, 0), 0));
230d793d 3969
0f41302f 3970 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
d0ab8cd3 3971 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
951553af 3972 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
8079805d 3973 return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
d0ab8cd3 3974
230d793d
RS
3975 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3976 if we can then eliminate the NEG (e.g.,
3977 if the operand is a constant). */
3978
3979 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3980 {
3981 temp = simplify_unary_operation (NEG, mode,
3982 XEXP (XEXP (x, 0), 0), mode);
3983 if (temp)
3984 {
3985 SUBST (XEXP (XEXP (x, 0), 0), temp);
3986 return XEXP (x, 0);
3987 }
3988 }
3989
3990 temp = expand_compound_operation (XEXP (x, 0));
3991
3992 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3993 replaced by (lshiftrt X C). This will convert
3994 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3995
3996 if (GET_CODE (temp) == ASHIFTRT
3997 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3998 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
8079805d
RK
3999 return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
4000 INTVAL (XEXP (temp, 1)));
230d793d 4001
951553af 4002 /* If X has only a single bit that might be nonzero, say, bit I, convert
230d793d
RS
4003 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
4004 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
4005 (sign_extract X 1 Y). But only do this if TEMP isn't a register
4006 or a SUBREG of one since we'd be making the expression more
4007 complex if it was just a register. */
4008
4009 if (GET_CODE (temp) != REG
4010 && ! (GET_CODE (temp) == SUBREG
4011 && GET_CODE (SUBREG_REG (temp)) == REG)
951553af 4012 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
230d793d
RS
4013 {
4014 rtx temp1 = simplify_shift_const
5f4f0e22
CH
4015 (NULL_RTX, ASHIFTRT, mode,
4016 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
230d793d
RS
4017 GET_MODE_BITSIZE (mode) - 1 - i),
4018 GET_MODE_BITSIZE (mode) - 1 - i);
4019
4020 /* If all we did was surround TEMP with the two shifts, we
4021 haven't improved anything, so don't use it. Otherwise,
4022 we are better off with TEMP1. */
4023 if (GET_CODE (temp1) != ASHIFTRT
4024 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
4025 || XEXP (XEXP (temp1, 0), 0) != temp)
8079805d 4026 return temp1;
230d793d
RS
4027 }
4028 break;
4029
2ca9ae17 4030 case TRUNCATE:
e30fb98f
JL
4031 /* We can't handle truncation to a partial integer mode here
4032 because we don't know the real bitsize of the partial
4033 integer mode. */
4034 if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
4035 break;
4036
80608e27
JL
4037 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4038 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
4039 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
2ca9ae17
JW
4040 SUBST (XEXP (x, 0),
4041 force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
4042 GET_MODE_MASK (mode), NULL_RTX, 0));
0f13a422
ILT
4043
4044 /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI. */
4045 if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4046 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4047 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
4048 return XEXP (XEXP (x, 0), 0);
4049
4050 /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
4051 (OP:SI foo:SI) if OP is NEG or ABS. */
4052 if ((GET_CODE (XEXP (x, 0)) == ABS
4053 || GET_CODE (XEXP (x, 0)) == NEG)
4054 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
4055 || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
4056 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
4057 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
4058 XEXP (XEXP (XEXP (x, 0), 0), 0));
4059
4060 /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
4061 (truncate:SI x). */
4062 if (GET_CODE (XEXP (x, 0)) == SUBREG
4063 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
4064 && subreg_lowpart_p (XEXP (x, 0)))
4065 return SUBREG_REG (XEXP (x, 0));
4066
4067 /* If we know that the value is already truncated, we can
14a774a9
RK
4068 replace the TRUNCATE with a SUBREG if TRULY_NOOP_TRUNCATION
4069 is nonzero for the corresponding modes. But don't do this
4070 for an (LSHIFTRT (MULT ...)) since this will cause problems
4071 with the umulXi3_highpart patterns. */
6a992214
JL
4072 if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
4073 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4074 && num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
14a774a9
RK
4075 >= GET_MODE_BITSIZE (mode) + 1
4076 && ! (GET_CODE (XEXP (x, 0)) == LSHIFTRT
4077 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT))
0f13a422
ILT
4078 return gen_lowpart_for_combine (mode, XEXP (x, 0));
4079
4080 /* A truncate of a comparison can be replaced with a subreg if
4081 STORE_FLAG_VALUE permits. This is like the previous test,
4082 but it works even if the comparison is done in a mode larger
4083 than HOST_BITS_PER_WIDE_INT. */
4084 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4085 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
663522cb 4086 && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0)
0f13a422
ILT
4087 return gen_lowpart_for_combine (mode, XEXP (x, 0));
4088
4089 /* Similarly, a truncate of a register whose value is a
4090 comparison can be replaced with a subreg if STORE_FLAG_VALUE
4091 permits. */
4092 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
663522cb 4093 && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
0f13a422
ILT
4094 && (temp = get_last_value (XEXP (x, 0)))
4095 && GET_RTX_CLASS (GET_CODE (temp)) == '<')
4096 return gen_lowpart_for_combine (mode, XEXP (x, 0));
4097
2ca9ae17
JW
4098 break;
4099
230d793d
RS
4100 case FLOAT_TRUNCATE:
4101 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
4102 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
4103 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
663522cb 4104 return XEXP (XEXP (x, 0), 0);
4635f748
RK
4105
4106 /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
4107 (OP:SF foo:SF) if OP is NEG or ABS. */
4108 if ((GET_CODE (XEXP (x, 0)) == ABS
4109 || GET_CODE (XEXP (x, 0)) == NEG)
4110 && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
4111 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
0c1c8ea6
RK
4112 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
4113 XEXP (XEXP (XEXP (x, 0), 0), 0));
1d12df72
RK
4114
4115 /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
4116 is (float_truncate:SF x). */
4117 if (GET_CODE (XEXP (x, 0)) == SUBREG
4118 && subreg_lowpart_p (XEXP (x, 0))
4119 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
4120 return SUBREG_REG (XEXP (x, 0));
663522cb 4121 break;
230d793d
RS
4122
4123#ifdef HAVE_cc0
4124 case COMPARE:
4125 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
4126 using cc0, in which case we want to leave it as a COMPARE
4127 so we can distinguish it from a register-register-copy. */
4128 if (XEXP (x, 1) == const0_rtx)
4129 return XEXP (x, 0);
4130
4131 /* In IEEE floating point, x-0 is not the same as x. */
4132 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e
RK
4133 || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
4134 || flag_fast_math)
230d793d
RS
4135 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
4136 return XEXP (x, 0);
4137 break;
4138#endif
4139
4140 case CONST:
4141 /* (const (const X)) can become (const X). Do it this way rather than
4142 returning the inner CONST since CONST can be shared with a
4143 REG_EQUAL note. */
4144 if (GET_CODE (XEXP (x, 0)) == CONST)
4145 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4146 break;
4147
4148#ifdef HAVE_lo_sum
4149 case LO_SUM:
4150 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
4151 can add in an offset. find_split_point will split this address up
4152 again if it doesn't match. */
4153 if (GET_CODE (XEXP (x, 0)) == HIGH
4154 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
4155 return XEXP (x, 1);
4156 break;
4157#endif
4158
4159 case PLUS:
4160 /* If we have (plus (plus (A const) B)), associate it so that CONST is
4161 outermost. That's because that's the way indexed addresses are
4162 supposed to appear. This code used to check many more cases, but
4163 they are now checked elsewhere. */
4164 if (GET_CODE (XEXP (x, 0)) == PLUS
4165 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
4166 return gen_binary (PLUS, mode,
4167 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
4168 XEXP (x, 1)),
4169 XEXP (XEXP (x, 0), 1));
4170
4171 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
4172 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
4173 bit-field and can be replaced by either a sign_extend or a
e6380233
JL
4174 sign_extract. The `and' may be a zero_extend and the two
4175 <c>, -<c> constants may be reversed. */
230d793d
RS
4176 if (GET_CODE (XEXP (x, 0)) == XOR
4177 && GET_CODE (XEXP (x, 1)) == CONST_INT
4178 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
663522cb 4179 && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
e6380233
JL
4180 && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
4181 || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
5f4f0e22 4182 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
230d793d
RS
4183 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
4184 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4185 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5f4f0e22 4186 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
230d793d
RS
4187 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
4188 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
770ae6cc 4189 == (unsigned int) i + 1))))
8079805d
RK
4190 return simplify_shift_const
4191 (NULL_RTX, ASHIFTRT, mode,
4192 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4193 XEXP (XEXP (XEXP (x, 0), 0), 0),
4194 GET_MODE_BITSIZE (mode) - (i + 1)),
4195 GET_MODE_BITSIZE (mode) - (i + 1));
230d793d 4196
bc0776c6
RK
4197 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
4198 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
4199 is 1. This produces better code than the alternative immediately
4200 below. */
4201 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4202 && reversible_comparison_p (XEXP (x, 0))
4203 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
4204 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx)))
8079805d 4205 return
0c1c8ea6 4206 gen_unary (NEG, mode, mode,
8079805d
RK
4207 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
4208 mode, XEXP (XEXP (x, 0), 0),
4209 XEXP (XEXP (x, 0), 1)));
bc0776c6
RK
4210
4211 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
230d793d
RS
4212 can become (ashiftrt (ashift (xor x 1) C) C) where C is
4213 the bitsize of the mode - 1. This allows simplification of
4214 "a = (b & 8) == 0;" */
4215 if (XEXP (x, 1) == constm1_rtx
4216 && GET_CODE (XEXP (x, 0)) != REG
4217 && ! (GET_CODE (XEXP (x,0)) == SUBREG
4218 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
951553af 4219 && nonzero_bits (XEXP (x, 0), mode) == 1)
8079805d
RK
4220 return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
4221 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4222 gen_rtx_combine (XOR, mode,
4223 XEXP (x, 0), const1_rtx),
4224 GET_MODE_BITSIZE (mode) - 1),
4225 GET_MODE_BITSIZE (mode) - 1);
02f4ada4
RK
4226
4227 /* If we are adding two things that have no bits in common, convert
4228 the addition into an IOR. This will often be further simplified,
4229 for example in cases like ((a & 1) + (a & 2)), which can
4230 become a & 3. */
4231
ac49a949 4232 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
951553af
RK
4233 && (nonzero_bits (XEXP (x, 0), mode)
4234 & nonzero_bits (XEXP (x, 1), mode)) == 0)
085f1714
RH
4235 {
4236 /* Try to simplify the expression further. */
4237 rtx tor = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
4238 temp = combine_simplify_rtx (tor, mode, last, in_dest);
4239
4240 /* If we could, great. If not, do not go ahead with the IOR
4241 replacement, since PLUS appears in many special purpose
4242 address arithmetic instructions. */
4243 if (GET_CODE (temp) != CLOBBER && temp != tor)
4244 return temp;
4245 }
230d793d
RS
4246 break;
4247
4248 case MINUS:
0802d516
RK
4249 /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
4250 by reversing the comparison code if valid. */
4251 if (STORE_FLAG_VALUE == 1
4252 && XEXP (x, 0) == const1_rtx
5109d49f
RK
4253 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
4254 && reversible_comparison_p (XEXP (x, 1)))
663522cb
KH
4255 return gen_binary (reverse_condition (GET_CODE (XEXP (x, 1))), mode,
4256 XEXP (XEXP (x, 1), 0),
4257 XEXP (XEXP (x, 1), 1));
5109d49f 4258
230d793d
RS
4259 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
4260 (and <foo> (const_int pow2-1)) */
4261 if (GET_CODE (XEXP (x, 1)) == AND
4262 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
663522cb 4263 && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
230d793d 4264 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
8079805d 4265 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
663522cb 4266 -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
7bef8680
RK
4267
4268 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
4269 integers. */
4270 if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
8079805d
RK
4271 return gen_binary (MINUS, mode,
4272 gen_binary (MINUS, mode, XEXP (x, 0),
4273 XEXP (XEXP (x, 1), 0)),
4274 XEXP (XEXP (x, 1), 1));
230d793d
RS
4275 break;
4276
4277 case MULT:
4278 /* If we have (mult (plus A B) C), apply the distributive law and then
4279 the inverse distributive law to see if things simplify. This
4280 occurs mostly in addresses, often when unrolling loops. */
4281
4282 if (GET_CODE (XEXP (x, 0)) == PLUS)
4283 {
4284 x = apply_distributive_law
4285 (gen_binary (PLUS, mode,
4286 gen_binary (MULT, mode,
4287 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4288 gen_binary (MULT, mode,
3749f4ca
BS
4289 XEXP (XEXP (x, 0), 1),
4290 copy_rtx (XEXP (x, 1)))));
230d793d
RS
4291
4292 if (GET_CODE (x) != MULT)
8079805d 4293 return x;
230d793d 4294 }
230d793d
RS
4295 break;
4296
4297 case UDIV:
4298 /* If this is a divide by a power of two, treat it as a shift if
4299 its first operand is a shift. */
4300 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4301 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
4302 && (GET_CODE (XEXP (x, 0)) == ASHIFT
4303 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
4304 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
4305 || GET_CODE (XEXP (x, 0)) == ROTATE
4306 || GET_CODE (XEXP (x, 0)) == ROTATERT))
8079805d 4307 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
230d793d
RS
4308 break;
4309
4310 case EQ: case NE:
4311 case GT: case GTU: case GE: case GEU:
4312 case LT: case LTU: case LE: case LEU:
4313 /* If the first operand is a condition code, we can't do anything
4314 with it. */
4315 if (GET_CODE (XEXP (x, 0)) == COMPARE
4316 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
4317#ifdef HAVE_cc0
4318 && XEXP (x, 0) != cc0_rtx
4319#endif
663522cb 4320 ))
230d793d
RS
4321 {
4322 rtx op0 = XEXP (x, 0);
4323 rtx op1 = XEXP (x, 1);
4324 enum rtx_code new_code;
4325
4326 if (GET_CODE (op0) == COMPARE)
4327 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4328
4329 /* Simplify our comparison, if possible. */
4330 new_code = simplify_comparison (code, &op0, &op1);
4331
230d793d 4332 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
951553af 4333 if only the low-order bit is possibly nonzero in X (such as when
5109d49f
RK
4334 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
4335 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
4336 known to be either 0 or -1, NE becomes a NEG and EQ becomes
4337 (plus X 1).
4338
4339 Remove any ZERO_EXTRACT we made when thinking this was a
4340 comparison. It may now be simpler to use, e.g., an AND. If a
4341 ZERO_EXTRACT is indeed appropriate, it will be placed back by
4342 the call to make_compound_operation in the SET case. */
4343
0802d516
RK
4344 if (STORE_FLAG_VALUE == 1
4345 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
a191f0ee
RH
4346 && op1 == const0_rtx
4347 && mode == GET_MODE (op0)
4348 && nonzero_bits (op0, mode) == 1)
818b11b9
RK
4349 return gen_lowpart_for_combine (mode,
4350 expand_compound_operation (op0));
5109d49f 4351
0802d516
RK
4352 else if (STORE_FLAG_VALUE == 1
4353 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5109d49f 4354 && op1 == const0_rtx
a191f0ee 4355 && mode == GET_MODE (op0)
5109d49f
RK
4356 && (num_sign_bit_copies (op0, mode)
4357 == GET_MODE_BITSIZE (mode)))
4358 {
4359 op0 = expand_compound_operation (op0);
0c1c8ea6 4360 return gen_unary (NEG, mode, mode,
8079805d 4361 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
4362 }
4363
0802d516
RK
4364 else if (STORE_FLAG_VALUE == 1
4365 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
230d793d 4366 && op1 == const0_rtx
a191f0ee 4367 && mode == GET_MODE (op0)
5109d49f 4368 && nonzero_bits (op0, mode) == 1)
818b11b9
RK
4369 {
4370 op0 = expand_compound_operation (op0);
8079805d
RK
4371 return gen_binary (XOR, mode,
4372 gen_lowpart_for_combine (mode, op0),
4373 const1_rtx);
5109d49f 4374 }
818b11b9 4375
0802d516
RK
4376 else if (STORE_FLAG_VALUE == 1
4377 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f 4378 && op1 == const0_rtx
a191f0ee 4379 && mode == GET_MODE (op0)
5109d49f
RK
4380 && (num_sign_bit_copies (op0, mode)
4381 == GET_MODE_BITSIZE (mode)))
4382 {
4383 op0 = expand_compound_operation (op0);
8079805d 4384 return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
818b11b9 4385 }
230d793d 4386
5109d49f
RK
4387 /* If STORE_FLAG_VALUE is -1, we have cases similar to
4388 those above. */
0802d516
RK
4389 if (STORE_FLAG_VALUE == -1
4390 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d 4391 && op1 == const0_rtx
5109d49f
RK
4392 && (num_sign_bit_copies (op0, mode)
4393 == GET_MODE_BITSIZE (mode)))
4394 return gen_lowpart_for_combine (mode,
4395 expand_compound_operation (op0));
4396
0802d516
RK
4397 else if (STORE_FLAG_VALUE == -1
4398 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5109d49f 4399 && op1 == const0_rtx
a191f0ee 4400 && mode == GET_MODE (op0)
5109d49f
RK
4401 && nonzero_bits (op0, mode) == 1)
4402 {
4403 op0 = expand_compound_operation (op0);
0c1c8ea6 4404 return gen_unary (NEG, mode, mode,
8079805d 4405 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
4406 }
4407
0802d516
RK
4408 else if (STORE_FLAG_VALUE == -1
4409 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f 4410 && op1 == const0_rtx
a191f0ee 4411 && mode == GET_MODE (op0)
5109d49f
RK
4412 && (num_sign_bit_copies (op0, mode)
4413 == GET_MODE_BITSIZE (mode)))
230d793d 4414 {
818b11b9 4415 op0 = expand_compound_operation (op0);
0c1c8ea6 4416 return gen_unary (NOT, mode, mode,
8079805d 4417 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
4418 }
4419
4420 /* If X is 0/1, (eq X 0) is X-1. */
0802d516
RK
4421 else if (STORE_FLAG_VALUE == -1
4422 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f 4423 && op1 == const0_rtx
a191f0ee 4424 && mode == GET_MODE (op0)
5109d49f
RK
4425 && nonzero_bits (op0, mode) == 1)
4426 {
4427 op0 = expand_compound_operation (op0);
8079805d 4428 return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
230d793d 4429 }
230d793d
RS
4430
4431 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
951553af
RK
4432 one bit that might be nonzero, we can convert (ne x 0) to
4433 (ashift x c) where C puts the bit in the sign bit. Remove any
4434 AND with STORE_FLAG_VALUE when we are done, since we are only
4435 going to test the sign bit. */
3f508eca 4436 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5f4f0e22 4437 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
0802d516 4438 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
e51712db 4439 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE(mode)-1))
230d793d
RS
4440 && op1 == const0_rtx
4441 && mode == GET_MODE (op0)
5109d49f 4442 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
230d793d 4443 {
818b11b9
RK
4444 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4445 expand_compound_operation (op0),
230d793d
RS
4446 GET_MODE_BITSIZE (mode) - 1 - i);
4447 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4448 return XEXP (x, 0);
4449 else
4450 return x;
4451 }
4452
4453 /* If the code changed, return a whole new comparison. */
4454 if (new_code != code)
4455 return gen_rtx_combine (new_code, mode, op0, op1);
4456
663522cb 4457 /* Otherwise, keep this operation, but maybe change its operands.
230d793d
RS
4458 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
4459 SUBST (XEXP (x, 0), op0);
4460 SUBST (XEXP (x, 1), op1);
4461 }
4462 break;
663522cb 4463
230d793d 4464 case IF_THEN_ELSE:
8079805d 4465 return simplify_if_then_else (x);
9210df58 4466
8079805d
RK
4467 case ZERO_EXTRACT:
4468 case SIGN_EXTRACT:
4469 case ZERO_EXTEND:
4470 case SIGN_EXTEND:
0f41302f 4471 /* If we are processing SET_DEST, we are done. */
8079805d
RK
4472 if (in_dest)
4473 return x;
d0ab8cd3 4474
8079805d 4475 return expand_compound_operation (x);
d0ab8cd3 4476
8079805d
RK
4477 case SET:
4478 return simplify_set (x);
1a26b032 4479
8079805d
RK
4480 case AND:
4481 case IOR:
4482 case XOR:
4483 return simplify_logical (x, last);
d0ab8cd3 4484
663522cb 4485 case ABS:
8079805d
RK
4486 /* (abs (neg <foo>)) -> (abs <foo>) */
4487 if (GET_CODE (XEXP (x, 0)) == NEG)
4488 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
1a26b032 4489
b472527b
JL
4490 /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
4491 do nothing. */
4492 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
4493 break;
f40421ce 4494
8079805d
RK
4495 /* If operand is something known to be positive, ignore the ABS. */
4496 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4497 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4498 <= HOST_BITS_PER_WIDE_INT)
4499 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4500 & ((HOST_WIDE_INT) 1
4501 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4502 == 0)))
4503 return XEXP (x, 0);
1a26b032 4504
8079805d
RK
4505 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
4506 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4507 return gen_rtx_combine (NEG, mode, XEXP (x, 0));
1a26b032 4508
8079805d 4509 break;
1a26b032 4510
8079805d
RK
4511 case FFS:
4512 /* (ffs (*_extend <X>)) = (ffs <X>) */
4513 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4514 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4515 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4516 break;
1a26b032 4517
8079805d
RK
4518 case FLOAT:
4519 /* (float (sign_extend <X>)) = (float <X>). */
4520 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4521 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4522 break;
1a26b032 4523
8079805d
RK
4524 case ASHIFT:
4525 case LSHIFTRT:
4526 case ASHIFTRT:
4527 case ROTATE:
4528 case ROTATERT:
4529 /* If this is a shift by a constant amount, simplify it. */
4530 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
663522cb 4531 return simplify_shift_const (x, code, mode, XEXP (x, 0),
8079805d
RK
4532 INTVAL (XEXP (x, 1)));
4533
4534#ifdef SHIFT_COUNT_TRUNCATED
4535 else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
4536 SUBST (XEXP (x, 1),
4537 force_to_mode (XEXP (x, 1), GET_MODE (x),
663522cb 4538 ((HOST_WIDE_INT) 1
8079805d
RK
4539 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4540 - 1,
4541 NULL_RTX, 0));
4542#endif
4543
4544 break;
e9a25f70 4545
82be40f7
BS
4546 case VEC_SELECT:
4547 {
4548 rtx op0 = XEXP (x, 0);
4549 rtx op1 = XEXP (x, 1);
4550 int len;
4551
4552 if (GET_CODE (op1) != PARALLEL)
4553 abort ();
4554 len = XVECLEN (op1, 0);
4555 if (len == 1
4556 && GET_CODE (XVECEXP (op1, 0, 0)) == CONST_INT
4557 && GET_CODE (op0) == VEC_CONCAT)
4558 {
4559 int offset = INTVAL (XVECEXP (op1, 0, 0)) * GET_MODE_SIZE (GET_MODE (x));
4560
4561 /* Try to find the element in the VEC_CONCAT. */
4562 for (;;)
4563 {
4564 if (GET_MODE (op0) == GET_MODE (x))
4565 return op0;
4566 if (GET_CODE (op0) == VEC_CONCAT)
4567 {
4568 HOST_WIDE_INT op0_size = GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)));
4569 if (op0_size < offset)
4570 op0 = XEXP (op0, 0);
4571 else
4572 {
4573 offset -= op0_size;
4574 op0 = XEXP (op0, 1);
4575 }
4576 }
4577 else
4578 break;
4579 }
4580 }
4581 }
4582
4583 break;
4584
e9a25f70
JL
4585 default:
4586 break;
8079805d
RK
4587 }
4588
4589 return x;
4590}
4591\f
4592/* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
5109d49f 4593
8079805d
RK
4594static rtx
4595simplify_if_then_else (x)
4596 rtx x;
4597{
4598 enum machine_mode mode = GET_MODE (x);
4599 rtx cond = XEXP (x, 0);
4600 rtx true = XEXP (x, 1);
4601 rtx false = XEXP (x, 2);
4602 enum rtx_code true_code = GET_CODE (cond);
4603 int comparison_p = GET_RTX_CLASS (true_code) == '<';
4604 rtx temp;
4605 int i;
4606
0f41302f 4607 /* Simplify storing of the truth value. */
8079805d
RK
4608 if (comparison_p && true == const_true_rtx && false == const0_rtx)
4609 return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
663522cb 4610
0f41302f 4611 /* Also when the truth value has to be reversed. */
8079805d
RK
4612 if (comparison_p && reversible_comparison_p (cond)
4613 && true == const0_rtx && false == const_true_rtx)
4614 return gen_binary (reverse_condition (true_code),
4615 mode, XEXP (cond, 0), XEXP (cond, 1));
4616
4617 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4618 in it is being compared against certain values. Get the true and false
4619 comparisons and see if that says anything about the value of each arm. */
4620
4621 if (comparison_p && reversible_comparison_p (cond)
4622 && GET_CODE (XEXP (cond, 0)) == REG)
4623 {
4624 HOST_WIDE_INT nzb;
4625 rtx from = XEXP (cond, 0);
4626 enum rtx_code false_code = reverse_condition (true_code);
4627 rtx true_val = XEXP (cond, 1);
4628 rtx false_val = true_val;
4629 int swapped = 0;
9210df58 4630
8079805d 4631 /* If FALSE_CODE is EQ, swap the codes and arms. */
5109d49f 4632
8079805d 4633 if (false_code == EQ)
1a26b032 4634 {
8079805d
RK
4635 swapped = 1, true_code = EQ, false_code = NE;
4636 temp = true, true = false, false = temp;
4637 }
5109d49f 4638
8079805d
RK
4639 /* If we are comparing against zero and the expression being tested has
4640 only a single bit that might be nonzero, that is its value when it is
4641 not equal to zero. Similarly if it is known to be -1 or 0. */
4642
4643 if (true_code == EQ && true_val == const0_rtx
4644 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4645 false_code = EQ, false_val = GEN_INT (nzb);
4646 else if (true_code == EQ && true_val == const0_rtx
4647 && (num_sign_bit_copies (from, GET_MODE (from))
4648 == GET_MODE_BITSIZE (GET_MODE (from))))
4649 false_code = EQ, false_val = constm1_rtx;
4650
4651 /* Now simplify an arm if we know the value of the register in the
4652 branch and it is used in the arm. Be careful due to the potential
4653 of locally-shared RTL. */
4654
4655 if (reg_mentioned_p (from, true))
4656 true = subst (known_cond (copy_rtx (true), true_code, from, true_val),
4657 pc_rtx, pc_rtx, 0, 0);
4658 if (reg_mentioned_p (from, false))
4659 false = subst (known_cond (copy_rtx (false), false_code,
4660 from, false_val),
4661 pc_rtx, pc_rtx, 0, 0);
4662
4663 SUBST (XEXP (x, 1), swapped ? false : true);
4664 SUBST (XEXP (x, 2), swapped ? true : false);
4665
4666 true = XEXP (x, 1), false = XEXP (x, 2), true_code = GET_CODE (cond);
4667 }
5109d49f 4668
8079805d
RK
4669 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4670 reversed, do so to avoid needing two sets of patterns for
4671 subtract-and-branch insns. Similarly if we have a constant in the true
4672 arm, the false arm is the same as the first operand of the comparison, or
4673 the false arm is more complicated than the true arm. */
4674
4675 if (comparison_p && reversible_comparison_p (cond)
663522cb 4676 && (true == pc_rtx
8079805d
RK
4677 || (CONSTANT_P (true)
4678 && GET_CODE (false) != CONST_INT && false != pc_rtx)
4679 || true == const0_rtx
4680 || (GET_RTX_CLASS (GET_CODE (true)) == 'o'
4681 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4682 || (GET_CODE (true) == SUBREG
4683 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true))) == 'o'
4684 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4685 || reg_mentioned_p (true, false)
4686 || rtx_equal_p (false, XEXP (cond, 0))))
4687 {
4688 true_code = reverse_condition (true_code);
4689 SUBST (XEXP (x, 0),
4690 gen_binary (true_code, GET_MODE (cond), XEXP (cond, 0),
4691 XEXP (cond, 1)));
5109d49f 4692
8079805d
RK
4693 SUBST (XEXP (x, 1), false);
4694 SUBST (XEXP (x, 2), true);
1a26b032 4695
8079805d 4696 temp = true, true = false, false = temp, cond = XEXP (x, 0);
bb821298 4697
0f41302f 4698 /* It is possible that the conditional has been simplified out. */
bb821298
RK
4699 true_code = GET_CODE (cond);
4700 comparison_p = GET_RTX_CLASS (true_code) == '<';
8079805d 4701 }
abe6e52f 4702
8079805d 4703 /* If the two arms are identical, we don't need the comparison. */
1a26b032 4704
8079805d
RK
4705 if (rtx_equal_p (true, false) && ! side_effects_p (cond))
4706 return true;
1a26b032 4707
5be669c7
RK
4708 /* Convert a == b ? b : a to "a". */
4709 if (true_code == EQ && ! side_effects_p (cond)
6ff85fd8 4710 && (! FLOAT_MODE_P (mode) || flag_fast_math)
5be669c7
RK
4711 && rtx_equal_p (XEXP (cond, 0), false)
4712 && rtx_equal_p (XEXP (cond, 1), true))
4713 return false;
4714 else if (true_code == NE && ! side_effects_p (cond)
6ff85fd8 4715 && (! FLOAT_MODE_P (mode) || flag_fast_math)
5be669c7
RK
4716 && rtx_equal_p (XEXP (cond, 0), true)
4717 && rtx_equal_p (XEXP (cond, 1), false))
4718 return true;
4719
8079805d
RK
4720 /* Look for cases where we have (abs x) or (neg (abs X)). */
4721
4722 if (GET_MODE_CLASS (mode) == MODE_INT
4723 && GET_CODE (false) == NEG
4724 && rtx_equal_p (true, XEXP (false, 0))
4725 && comparison_p
4726 && rtx_equal_p (true, XEXP (cond, 0))
4727 && ! side_effects_p (true))
4728 switch (true_code)
4729 {
4730 case GT:
4731 case GE:
0c1c8ea6 4732 return gen_unary (ABS, mode, mode, true);
8079805d
RK
4733 case LT:
4734 case LE:
0c1c8ea6 4735 return gen_unary (NEG, mode, mode, gen_unary (ABS, mode, mode, true));
e9a25f70
JL
4736 default:
4737 break;
8079805d
RK
4738 }
4739
4740 /* Look for MIN or MAX. */
4741
34c8be72 4742 if ((! FLOAT_MODE_P (mode) || flag_fast_math)
8079805d
RK
4743 && comparison_p
4744 && rtx_equal_p (XEXP (cond, 0), true)
4745 && rtx_equal_p (XEXP (cond, 1), false)
4746 && ! side_effects_p (cond))
4747 switch (true_code)
4748 {
4749 case GE:
4750 case GT:
4751 return gen_binary (SMAX, mode, true, false);
4752 case LE:
4753 case LT:
4754 return gen_binary (SMIN, mode, true, false);
4755 case GEU:
4756 case GTU:
4757 return gen_binary (UMAX, mode, true, false);
4758 case LEU:
4759 case LTU:
4760 return gen_binary (UMIN, mode, true, false);
e9a25f70
JL
4761 default:
4762 break;
8079805d 4763 }
663522cb 4764
8079805d
RK
4765 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4766 second operand is zero, this can be done as (OP Z (mult COND C2)) where
4767 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4768 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4769 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
0802d516 4770 neither 1 or -1, but it isn't worth checking for. */
8079805d 4771
0802d516
RK
4772 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
4773 && comparison_p && mode != VOIDmode && ! side_effects_p (x))
8079805d
RK
4774 {
4775 rtx t = make_compound_operation (true, SET);
4776 rtx f = make_compound_operation (false, SET);
4777 rtx cond_op0 = XEXP (cond, 0);
4778 rtx cond_op1 = XEXP (cond, 1);
6a651371 4779 enum rtx_code op = NIL, extend_op = NIL;
8079805d 4780 enum machine_mode m = mode;
6a651371 4781 rtx z = 0, c1 = NULL_RTX;
8079805d 4782
8079805d
RK
4783 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4784 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4785 || GET_CODE (t) == ASHIFT
4786 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4787 && rtx_equal_p (XEXP (t, 0), f))
4788 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4789
4790 /* If an identity-zero op is commutative, check whether there
0f41302f 4791 would be a match if we swapped the operands. */
8079805d
RK
4792 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4793 || GET_CODE (t) == XOR)
4794 && rtx_equal_p (XEXP (t, 1), f))
4795 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4796 else if (GET_CODE (t) == SIGN_EXTEND
4797 && (GET_CODE (XEXP (t, 0)) == PLUS
4798 || GET_CODE (XEXP (t, 0)) == MINUS
4799 || GET_CODE (XEXP (t, 0)) == IOR
4800 || GET_CODE (XEXP (t, 0)) == XOR
4801 || GET_CODE (XEXP (t, 0)) == ASHIFT
4802 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4803 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4804 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4805 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4806 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4807 && (num_sign_bit_copies (f, GET_MODE (f))
4808 > (GET_MODE_BITSIZE (mode)
4809 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4810 {
4811 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4812 extend_op = SIGN_EXTEND;
4813 m = GET_MODE (XEXP (t, 0));
1a26b032 4814 }
8079805d
RK
4815 else if (GET_CODE (t) == SIGN_EXTEND
4816 && (GET_CODE (XEXP (t, 0)) == PLUS
4817 || GET_CODE (XEXP (t, 0)) == IOR
4818 || GET_CODE (XEXP (t, 0)) == XOR)
4819 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4820 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4821 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4822 && (num_sign_bit_copies (f, GET_MODE (f))
4823 > (GET_MODE_BITSIZE (mode)
4824 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4825 {
4826 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4827 extend_op = SIGN_EXTEND;
4828 m = GET_MODE (XEXP (t, 0));
4829 }
4830 else if (GET_CODE (t) == ZERO_EXTEND
4831 && (GET_CODE (XEXP (t, 0)) == PLUS
4832 || GET_CODE (XEXP (t, 0)) == MINUS
4833 || GET_CODE (XEXP (t, 0)) == IOR
4834 || GET_CODE (XEXP (t, 0)) == XOR
4835 || GET_CODE (XEXP (t, 0)) == ASHIFT
4836 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4837 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4838 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4839 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4840 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4841 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4842 && ((nonzero_bits (f, GET_MODE (f))
663522cb 4843 & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
8079805d
RK
4844 == 0))
4845 {
4846 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4847 extend_op = ZERO_EXTEND;
4848 m = GET_MODE (XEXP (t, 0));
4849 }
4850 else if (GET_CODE (t) == ZERO_EXTEND
4851 && (GET_CODE (XEXP (t, 0)) == PLUS
4852 || GET_CODE (XEXP (t, 0)) == IOR
4853 || GET_CODE (XEXP (t, 0)) == XOR)
4854 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4855 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4856 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4857 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4858 && ((nonzero_bits (f, GET_MODE (f))
663522cb 4859 & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
8079805d
RK
4860 == 0))
4861 {
4862 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4863 extend_op = ZERO_EXTEND;
4864 m = GET_MODE (XEXP (t, 0));
4865 }
663522cb 4866
8079805d
RK
4867 if (z)
4868 {
4869 temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4870 pc_rtx, pc_rtx, 0, 0);
4871 temp = gen_binary (MULT, m, temp,
4872 gen_binary (MULT, m, c1, const_true_rtx));
4873 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4874 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4875
4876 if (extend_op != NIL)
0c1c8ea6 4877 temp = gen_unary (extend_op, mode, m, temp);
8079805d
RK
4878
4879 return temp;
4880 }
4881 }
224eeff2 4882
8079805d
RK
4883 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4884 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4885 negation of a single bit, we can convert this operation to a shift. We
4886 can actually do this more generally, but it doesn't seem worth it. */
4887
4888 if (true_code == NE && XEXP (cond, 1) == const0_rtx
4889 && false == const0_rtx && GET_CODE (true) == CONST_INT
4890 && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4891 && (i = exact_log2 (INTVAL (true))) >= 0)
4892 || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4893 == GET_MODE_BITSIZE (mode))
663522cb 4894 && (i = exact_log2 (-INTVAL (true))) >= 0)))
8079805d
RK
4895 return
4896 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4897 gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
230d793d 4898
8079805d
RK
4899 return x;
4900}
4901\f
4902/* Simplify X, a SET expression. Return the new expression. */
230d793d 4903
8079805d
RK
4904static rtx
4905simplify_set (x)
4906 rtx x;
4907{
4908 rtx src = SET_SRC (x);
4909 rtx dest = SET_DEST (x);
4910 enum machine_mode mode
4911 = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4912 rtx other_insn;
4913 rtx *cc_use;
4914
4915 /* (set (pc) (return)) gets written as (return). */
4916 if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4917 return src;
230d793d 4918
87e3e0c1
RK
4919 /* Now that we know for sure which bits of SRC we are using, see if we can
4920 simplify the expression for the object knowing that we only need the
4921 low-order bits. */
4922
4923 if (GET_MODE_CLASS (mode) == MODE_INT)
c5c76735 4924 {
e8dc6d50 4925 src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0);
c5c76735
JL
4926 SUBST (SET_SRC (x), src);
4927 }
87e3e0c1 4928
8079805d
RK
4929 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4930 the comparison result and try to simplify it unless we already have used
4931 undobuf.other_insn. */
4932 if ((GET_CODE (src) == COMPARE
230d793d 4933#ifdef HAVE_cc0
8079805d 4934 || dest == cc0_rtx
230d793d 4935#endif
8079805d
RK
4936 )
4937 && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4938 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4939 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
c0d3ac4d 4940 && rtx_equal_p (XEXP (*cc_use, 0), dest))
8079805d
RK
4941 {
4942 enum rtx_code old_code = GET_CODE (*cc_use);
4943 enum rtx_code new_code;
4944 rtx op0, op1;
4945 int other_changed = 0;
4946 enum machine_mode compare_mode = GET_MODE (dest);
4947
4948 if (GET_CODE (src) == COMPARE)
4949 op0 = XEXP (src, 0), op1 = XEXP (src, 1);
4950 else
4951 op0 = src, op1 = const0_rtx;
230d793d 4952
8079805d
RK
4953 /* Simplify our comparison, if possible. */
4954 new_code = simplify_comparison (old_code, &op0, &op1);
230d793d 4955
c141a106 4956#ifdef EXTRA_CC_MODES
8079805d
RK
4957 /* If this machine has CC modes other than CCmode, check to see if we
4958 need to use a different CC mode here. */
4959 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
c141a106 4960#endif /* EXTRA_CC_MODES */
230d793d 4961
c141a106 4962#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
8079805d
RK
4963 /* If the mode changed, we have to change SET_DEST, the mode in the
4964 compare, and the mode in the place SET_DEST is used. If SET_DEST is
4965 a hard register, just build new versions with the proper mode. If it
4966 is a pseudo, we lose unless it is only time we set the pseudo, in
4967 which case we can safely change its mode. */
4968 if (compare_mode != GET_MODE (dest))
4969 {
770ae6cc 4970 unsigned int regno = REGNO (dest);
38a448ca 4971 rtx new_dest = gen_rtx_REG (compare_mode, regno);
8079805d
RK
4972
4973 if (regno < FIRST_PSEUDO_REGISTER
b1f21e0a 4974 || (REG_N_SETS (regno) == 1 && ! REG_USERVAR_P (dest)))
230d793d 4975 {
8079805d
RK
4976 if (regno >= FIRST_PSEUDO_REGISTER)
4977 SUBST (regno_reg_rtx[regno], new_dest);
230d793d 4978
8079805d
RK
4979 SUBST (SET_DEST (x), new_dest);
4980 SUBST (XEXP (*cc_use, 0), new_dest);
4981 other_changed = 1;
230d793d 4982
8079805d 4983 dest = new_dest;
230d793d 4984 }
8079805d 4985 }
230d793d
RS
4986#endif
4987
8079805d
RK
4988 /* If the code changed, we have to build a new comparison in
4989 undobuf.other_insn. */
4990 if (new_code != old_code)
4991 {
4992 unsigned HOST_WIDE_INT mask;
4993
4994 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
4995 dest, const0_rtx));
4996
4997 /* If the only change we made was to change an EQ into an NE or
4998 vice versa, OP0 has only one bit that might be nonzero, and OP1
4999 is zero, check if changing the user of the condition code will
5000 produce a valid insn. If it won't, we can keep the original code
5001 in that insn by surrounding our operation with an XOR. */
5002
5003 if (((old_code == NE && new_code == EQ)
5004 || (old_code == EQ && new_code == NE))
5005 && ! other_changed && op1 == const0_rtx
5006 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
5007 && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
230d793d 5008 {
8079805d 5009 rtx pat = PATTERN (other_insn), note = 0;
230d793d 5010
8e2f6e35 5011 if ((recog_for_combine (&pat, other_insn, &note) < 0
8079805d
RK
5012 && ! check_asm_operands (pat)))
5013 {
5014 PUT_CODE (*cc_use, old_code);
5015 other_insn = 0;
230d793d 5016
8079805d 5017 op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
230d793d 5018 }
230d793d
RS
5019 }
5020
8079805d
RK
5021 other_changed = 1;
5022 }
5023
5024 if (other_changed)
5025 undobuf.other_insn = other_insn;
230d793d
RS
5026
5027#ifdef HAVE_cc0
8079805d
RK
5028 /* If we are now comparing against zero, change our source if
5029 needed. If we do not use cc0, we always have a COMPARE. */
5030 if (op1 == const0_rtx && dest == cc0_rtx)
5031 {
5032 SUBST (SET_SRC (x), op0);
5033 src = op0;
5034 }
5035 else
230d793d
RS
5036#endif
5037
8079805d
RK
5038 /* Otherwise, if we didn't previously have a COMPARE in the
5039 correct mode, we need one. */
5040 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
5041 {
5042 SUBST (SET_SRC (x),
5043 gen_rtx_combine (COMPARE, compare_mode, op0, op1));
5044 src = SET_SRC (x);
230d793d
RS
5045 }
5046 else
5047 {
8079805d
RK
5048 /* Otherwise, update the COMPARE if needed. */
5049 SUBST (XEXP (src, 0), op0);
5050 SUBST (XEXP (src, 1), op1);
230d793d 5051 }
8079805d
RK
5052 }
5053 else
5054 {
5055 /* Get SET_SRC in a form where we have placed back any
5056 compound expressions. Then do the checks below. */
5057 src = make_compound_operation (src, SET);
5058 SUBST (SET_SRC (x), src);
5059 }
230d793d 5060
8079805d
RK
5061 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
5062 and X being a REG or (subreg (reg)), we may be able to convert this to
663522cb 5063 (set (subreg:m2 x) (op)).
df62f951 5064
8079805d
RK
5065 We can always do this if M1 is narrower than M2 because that means that
5066 we only care about the low bits of the result.
df62f951 5067
8079805d 5068 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
9ec36da5 5069 perform a narrower operation than requested since the high-order bits will
8079805d
RK
5070 be undefined. On machine where it is defined, this transformation is safe
5071 as long as M1 and M2 have the same number of words. */
663522cb 5072
8079805d
RK
5073 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5074 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
5075 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
5076 / UNITS_PER_WORD)
5077 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5078 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
8baf60bb 5079#ifndef WORD_REGISTER_OPERATIONS
8079805d
RK
5080 && (GET_MODE_SIZE (GET_MODE (src))
5081 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
df62f951 5082#endif
02188693 5083#ifdef CLASS_CANNOT_CHANGE_MODE
f507a070
RK
5084 && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
5085 && (TEST_HARD_REG_BIT
02188693 5086 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
f507a070 5087 REGNO (dest)))
02188693
RH
5088 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (src),
5089 GET_MODE (SUBREG_REG (src))))
663522cb 5090#endif
8079805d
RK
5091 && (GET_CODE (dest) == REG
5092 || (GET_CODE (dest) == SUBREG
5093 && GET_CODE (SUBREG_REG (dest)) == REG)))
5094 {
5095 SUBST (SET_DEST (x),
5096 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
5097 dest));
5098 SUBST (SET_SRC (x), SUBREG_REG (src));
5099
5100 src = SET_SRC (x), dest = SET_DEST (x);
5101 }
df62f951 5102
8baf60bb 5103#ifdef LOAD_EXTEND_OP
8079805d
RK
5104 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
5105 would require a paradoxical subreg. Replace the subreg with a
0f41302f 5106 zero_extend to avoid the reload that would otherwise be required. */
8079805d
RK
5107
5108 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5109 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
5110 && SUBREG_WORD (src) == 0
5111 && (GET_MODE_SIZE (GET_MODE (src))
5112 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5113 && GET_CODE (SUBREG_REG (src)) == MEM)
5114 {
5115 SUBST (SET_SRC (x),
5116 gen_rtx_combine (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
5117 GET_MODE (src), XEXP (src, 0)));
5118
5119 src = SET_SRC (x);
5120 }
230d793d
RS
5121#endif
5122
8079805d
RK
5123 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
5124 are comparing an item known to be 0 or -1 against 0, use a logical
5125 operation instead. Check for one of the arms being an IOR of the other
5126 arm with some value. We compute three terms to be IOR'ed together. In
5127 practice, at most two will be nonzero. Then we do the IOR's. */
5128
5129 if (GET_CODE (dest) != PC
5130 && GET_CODE (src) == IF_THEN_ELSE
36b8d792 5131 && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
8079805d
RK
5132 && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
5133 && XEXP (XEXP (src, 0), 1) == const0_rtx
6dd49058 5134 && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
ea414472
DE
5135#ifdef HAVE_conditional_move
5136 && ! can_conditionally_move_p (GET_MODE (src))
5137#endif
8079805d
RK
5138 && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
5139 GET_MODE (XEXP (XEXP (src, 0), 0)))
5140 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
5141 && ! side_effects_p (src))
5142 {
5143 rtx true = (GET_CODE (XEXP (src, 0)) == NE
5144 ? XEXP (src, 1) : XEXP (src, 2));
5145 rtx false = (GET_CODE (XEXP (src, 0)) == NE
5146 ? XEXP (src, 2) : XEXP (src, 1));
5147 rtx term1 = const0_rtx, term2, term3;
5148
5149 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
5150 term1 = false, true = XEXP (true, 1), false = const0_rtx;
5151 else if (GET_CODE (true) == IOR
5152 && rtx_equal_p (XEXP (true, 1), false))
5153 term1 = false, true = XEXP (true, 0), false = const0_rtx;
5154 else if (GET_CODE (false) == IOR
5155 && rtx_equal_p (XEXP (false, 0), true))
5156 term1 = true, false = XEXP (false, 1), true = const0_rtx;
5157 else if (GET_CODE (false) == IOR
5158 && rtx_equal_p (XEXP (false, 1), true))
5159 term1 = true, false = XEXP (false, 0), true = const0_rtx;
5160
5161 term2 = gen_binary (AND, GET_MODE (src), XEXP (XEXP (src, 0), 0), true);
5162 term3 = gen_binary (AND, GET_MODE (src),
0c1c8ea6 5163 gen_unary (NOT, GET_MODE (src), GET_MODE (src),
8079805d
RK
5164 XEXP (XEXP (src, 0), 0)),
5165 false);
5166
5167 SUBST (SET_SRC (x),
5168 gen_binary (IOR, GET_MODE (src),
5169 gen_binary (IOR, GET_MODE (src), term1, term2),
5170 term3));
5171
5172 src = SET_SRC (x);
5173 }
230d793d 5174
c5c76735
JL
5175#ifdef HAVE_conditional_arithmetic
5176 /* If we have conditional arithmetic and the operand of a SET is
5177 a conditional expression, replace this with an IF_THEN_ELSE.
5178 We can either have a conditional expression or a MULT of that expression
5179 with a constant. */
5180 if ((GET_RTX_CLASS (GET_CODE (src)) == '1'
5181 || GET_RTX_CLASS (GET_CODE (src)) == '2'
5182 || GET_RTX_CLASS (GET_CODE (src)) == 'c')
5183 && (GET_RTX_CLASS (GET_CODE (XEXP (src, 0))) == '<'
5184 || (GET_CODE (XEXP (src, 0)) == MULT
5185 && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (src, 0), 0))) == '<'
5186 && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT)))
5187 {
5188 rtx cond = XEXP (src, 0);
5189 rtx true_val = const1_rtx;
5190 rtx false_arm, true_arm;
5191
5192 if (GET_CODE (cond) == MULT)
5193 {
5194 true_val = XEXP (cond, 1);
5195 cond = XEXP (cond, 0);
5196 }
5197
5198 if (GET_RTX_CLASS (GET_CODE (src)) == '1')
5199 {
5200 true_arm = gen_unary (GET_CODE (src), GET_MODE (src),
5201 GET_MODE (XEXP (src, 0)), true_val);
5202 false_arm = gen_unary (GET_CODE (src), GET_MODE (src),
5203 GET_MODE (XEXP (src, 0)), const0_rtx);
5204 }
5205 else
5206 {
5207 true_arm = gen_binary (GET_CODE (src), GET_MODE (src),
5208 true_val, XEXP (src, 1));
5209 false_arm = gen_binary (GET_CODE (src), GET_MODE (src),
5210 const0_rtx, XEXP (src, 1));
5211 }
5212
5213 /* Canonicalize if true_arm is the simpler one. */
5214 if (GET_RTX_CLASS (GET_CODE (true_arm)) == 'o'
5215 && GET_RTX_CLASS (GET_CODE (false_arm)) != 'o'
5216 && reversible_comparison_p (cond))
5217 {
5218 rtx temp = true_arm;
5219
5220 true_arm = false_arm;
5221 false_arm = temp;
5222
5223 cond = gen_rtx_combine (reverse_condition (GET_CODE (cond)),
5224 GET_MODE (cond), XEXP (cond, 0),
5225 XEXP (cond, 1));
5226 }
5227
5228 src = gen_rtx_combine (IF_THEN_ELSE, GET_MODE (src),
5229 gen_rtx_combine (GET_CODE (cond), VOIDmode,
5230 XEXP (cond, 0),
5231 XEXP (cond, 1)),
5232 true_arm, false_arm);
5233 SUBST (SET_SRC (x), src);
5234 }
5235#endif
5236
246e00f2
RK
5237 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
5238 whole thing fail. */
5239 if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
5240 return src;
5241 else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
5242 return dest;
5243 else
5244 /* Convert this into a field assignment operation, if possible. */
5245 return make_field_assignment (x);
8079805d
RK
5246}
5247\f
5248/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
5249 result. LAST is nonzero if this is the last retry. */
5250
5251static rtx
5252simplify_logical (x, last)
5253 rtx x;
5254 int last;
5255{
5256 enum machine_mode mode = GET_MODE (x);
5257 rtx op0 = XEXP (x, 0);
5258 rtx op1 = XEXP (x, 1);
5259
5260 switch (GET_CODE (x))
5261 {
230d793d 5262 case AND:
663522cb 5263 /* Convert (A ^ B) & A to A & (~B) since the latter is often a single
8079805d
RK
5264 insn (and may simplify more). */
5265 if (GET_CODE (op0) == XOR
5266 && rtx_equal_p (XEXP (op0, 0), op1)
5267 && ! side_effects_p (op1))
0c1c8ea6
RK
5268 x = gen_binary (AND, mode,
5269 gen_unary (NOT, mode, mode, XEXP (op0, 1)), op1);
8079805d
RK
5270
5271 if (GET_CODE (op0) == XOR
5272 && rtx_equal_p (XEXP (op0, 1), op1)
5273 && ! side_effects_p (op1))
0c1c8ea6
RK
5274 x = gen_binary (AND, mode,
5275 gen_unary (NOT, mode, mode, XEXP (op0, 0)), op1);
8079805d 5276
663522cb 5277 /* Similarly for (~(A ^ B)) & A. */
8079805d
RK
5278 if (GET_CODE (op0) == NOT
5279 && GET_CODE (XEXP (op0, 0)) == XOR
5280 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
5281 && ! side_effects_p (op1))
5282 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
5283
5284 if (GET_CODE (op0) == NOT
5285 && GET_CODE (XEXP (op0, 0)) == XOR
5286 && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
5287 && ! side_effects_p (op1))
5288 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
5289
2e8f9abf
DM
5290 /* We can call simplify_and_const_int only if we don't lose
5291 any (sign) bits when converting INTVAL (op1) to
5292 "unsigned HOST_WIDE_INT". */
5293 if (GET_CODE (op1) == CONST_INT
5294 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5295 || INTVAL (op1) > 0))
230d793d 5296 {
8079805d 5297 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
230d793d
RS
5298
5299 /* If we have (ior (and (X C1) C2)) and the next restart would be
5300 the last, simplify this by making C1 as small as possible
0f41302f 5301 and then exit. */
8079805d
RK
5302 if (last
5303 && GET_CODE (x) == IOR && GET_CODE (op0) == AND
5304 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5305 && GET_CODE (op1) == CONST_INT)
5306 return gen_binary (IOR, mode,
5307 gen_binary (AND, mode, XEXP (op0, 0),
5308 GEN_INT (INTVAL (XEXP (op0, 1))
663522cb 5309 & ~INTVAL (op1))), op1);
230d793d
RS
5310
5311 if (GET_CODE (x) != AND)
8079805d 5312 return x;
0e32506c 5313
663522cb 5314 if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
0e32506c
RK
5315 || GET_RTX_CLASS (GET_CODE (x)) == '2')
5316 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
230d793d
RS
5317 }
5318
5319 /* Convert (A | B) & A to A. */
8079805d
RK
5320 if (GET_CODE (op0) == IOR
5321 && (rtx_equal_p (XEXP (op0, 0), op1)
5322 || rtx_equal_p (XEXP (op0, 1), op1))
5323 && ! side_effects_p (XEXP (op0, 0))
5324 && ! side_effects_p (XEXP (op0, 1)))
5325 return op1;
230d793d 5326
d0ab8cd3 5327 /* In the following group of tests (and those in case IOR below),
230d793d
RS
5328 we start with some combination of logical operations and apply
5329 the distributive law followed by the inverse distributive law.
5330 Most of the time, this results in no change. However, if some of
5331 the operands are the same or inverses of each other, simplifications
5332 will result.
5333
5334 For example, (and (ior A B) (not B)) can occur as the result of
5335 expanding a bit field assignment. When we apply the distributive
5336 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
663522cb 5337 which then simplifies to (and (A (not B))).
230d793d 5338
8079805d 5339 If we have (and (ior A B) C), apply the distributive law and then
230d793d
RS
5340 the inverse distributive law to see if things simplify. */
5341
8079805d 5342 if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
230d793d
RS
5343 {
5344 x = apply_distributive_law
8079805d
RK
5345 (gen_binary (GET_CODE (op0), mode,
5346 gen_binary (AND, mode, XEXP (op0, 0), op1),
3749f4ca
BS
5347 gen_binary (AND, mode, XEXP (op0, 1),
5348 copy_rtx (op1))));
230d793d 5349 if (GET_CODE (x) != AND)
8079805d 5350 return x;
230d793d
RS
5351 }
5352
8079805d
RK
5353 if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
5354 return apply_distributive_law
5355 (gen_binary (GET_CODE (op1), mode,
5356 gen_binary (AND, mode, XEXP (op1, 0), op0),
3749f4ca
BS
5357 gen_binary (AND, mode, XEXP (op1, 1),
5358 copy_rtx (op0))));
230d793d
RS
5359
5360 /* Similarly, taking advantage of the fact that
5361 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
5362
8079805d
RK
5363 if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
5364 return apply_distributive_law
5365 (gen_binary (XOR, mode,
5366 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
3749f4ca
BS
5367 gen_binary (IOR, mode, copy_rtx (XEXP (op0, 0)),
5368 XEXP (op1, 1))));
663522cb 5369
8079805d
RK
5370 else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
5371 return apply_distributive_law
5372 (gen_binary (XOR, mode,
5373 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
3749f4ca 5374 gen_binary (IOR, mode, copy_rtx (XEXP (op1, 0)), XEXP (op0, 1))));
230d793d
RS
5375 break;
5376
5377 case IOR:
951553af 5378 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
8079805d 5379 if (GET_CODE (op1) == CONST_INT
ac49a949 5380 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
663522cb 5381 && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
8079805d 5382 return op1;
d0ab8cd3 5383
230d793d 5384 /* Convert (A & B) | A to A. */
8079805d
RK
5385 if (GET_CODE (op0) == AND
5386 && (rtx_equal_p (XEXP (op0, 0), op1)
5387 || rtx_equal_p (XEXP (op0, 1), op1))
5388 && ! side_effects_p (XEXP (op0, 0))
5389 && ! side_effects_p (XEXP (op0, 1)))
5390 return op1;
230d793d
RS
5391
5392 /* If we have (ior (and A B) C), apply the distributive law and then
5393 the inverse distributive law to see if things simplify. */
5394
8079805d 5395 if (GET_CODE (op0) == AND)
230d793d
RS
5396 {
5397 x = apply_distributive_law
5398 (gen_binary (AND, mode,
8079805d 5399 gen_binary (IOR, mode, XEXP (op0, 0), op1),
3749f4ca
BS
5400 gen_binary (IOR, mode, XEXP (op0, 1),
5401 copy_rtx (op1))));
230d793d
RS
5402
5403 if (GET_CODE (x) != IOR)
8079805d 5404 return x;
230d793d
RS
5405 }
5406
8079805d 5407 if (GET_CODE (op1) == AND)
230d793d
RS
5408 {
5409 x = apply_distributive_law
5410 (gen_binary (AND, mode,
8079805d 5411 gen_binary (IOR, mode, XEXP (op1, 0), op0),
3749f4ca
BS
5412 gen_binary (IOR, mode, XEXP (op1, 1),
5413 copy_rtx (op0))));
230d793d
RS
5414
5415 if (GET_CODE (x) != IOR)
8079805d 5416 return x;
230d793d
RS
5417 }
5418
5419 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
5420 mode size to (rotate A CX). */
5421
8079805d
RK
5422 if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
5423 || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
5424 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
5425 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5426 && GET_CODE (XEXP (op1, 1)) == CONST_INT
5427 && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
230d793d 5428 == GET_MODE_BITSIZE (mode)))
38a448ca
RH
5429 return gen_rtx_ROTATE (mode, XEXP (op0, 0),
5430 (GET_CODE (op0) == ASHIFT
5431 ? XEXP (op0, 1) : XEXP (op1, 1)));
230d793d 5432
71923da7
RK
5433 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
5434 a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
5435 does not affect any of the bits in OP1, it can really be done
5436 as a PLUS and we can associate. We do this by seeing if OP1
5437 can be safely shifted left C bits. */
5438 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
5439 && GET_CODE (XEXP (op0, 0)) == PLUS
5440 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
5441 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5442 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
5443 {
5444 int count = INTVAL (XEXP (op0, 1));
5445 HOST_WIDE_INT mask = INTVAL (op1) << count;
5446
5447 if (mask >> count == INTVAL (op1)
5448 && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
5449 {
5450 SUBST (XEXP (XEXP (op0, 0), 1),
5451 GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
5452 return op0;
5453 }
5454 }
230d793d
RS
5455 break;
5456
5457 case XOR:
79e8185c
JH
5458 /* If we are XORing two things that have no bits in common,
5459 convert them into an IOR. This helps to detect rotation encoded
5460 using those methods and possibly other simplifications. */
5461
5462 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5463 && (nonzero_bits (op0, mode)
5464 & nonzero_bits (op1, mode)) == 0)
5465 return (gen_binary (IOR, mode, op0, op1));
5466
230d793d
RS
5467 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
5468 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
5469 (NOT y). */
5470 {
5471 int num_negated = 0;
230d793d 5472
8079805d
RK
5473 if (GET_CODE (op0) == NOT)
5474 num_negated++, op0 = XEXP (op0, 0);
5475 if (GET_CODE (op1) == NOT)
5476 num_negated++, op1 = XEXP (op1, 0);
230d793d
RS
5477
5478 if (num_negated == 2)
5479 {
8079805d
RK
5480 SUBST (XEXP (x, 0), op0);
5481 SUBST (XEXP (x, 1), op1);
230d793d
RS
5482 }
5483 else if (num_negated == 1)
0c1c8ea6 5484 return gen_unary (NOT, mode, mode, gen_binary (XOR, mode, op0, op1));
230d793d
RS
5485 }
5486
5487 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
5488 correspond to a machine insn or result in further simplifications
5489 if B is a constant. */
5490
8079805d
RK
5491 if (GET_CODE (op0) == AND
5492 && rtx_equal_p (XEXP (op0, 1), op1)
5493 && ! side_effects_p (op1))
0c1c8ea6
RK
5494 return gen_binary (AND, mode,
5495 gen_unary (NOT, mode, mode, XEXP (op0, 0)),
8079805d 5496 op1);
230d793d 5497
8079805d
RK
5498 else if (GET_CODE (op0) == AND
5499 && rtx_equal_p (XEXP (op0, 0), op1)
5500 && ! side_effects_p (op1))
0c1c8ea6
RK
5501 return gen_binary (AND, mode,
5502 gen_unary (NOT, mode, mode, XEXP (op0, 1)),
8079805d 5503 op1);
230d793d 5504
230d793d 5505 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
0802d516
RK
5506 comparison if STORE_FLAG_VALUE is 1. */
5507 if (STORE_FLAG_VALUE == 1
5508 && op1 == const1_rtx
8079805d
RK
5509 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5510 && reversible_comparison_p (op0))
5511 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
5512 mode, XEXP (op0, 0), XEXP (op0, 1));
500c518b
RK
5513
5514 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
5515 is (lt foo (const_int 0)), so we can perform the above
0802d516 5516 simplification if STORE_FLAG_VALUE is 1. */
500c518b 5517
0802d516
RK
5518 if (STORE_FLAG_VALUE == 1
5519 && op1 == const1_rtx
8079805d
RK
5520 && GET_CODE (op0) == LSHIFTRT
5521 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5522 && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
5523 return gen_rtx_combine (GE, mode, XEXP (op0, 0), const0_rtx);
230d793d
RS
5524
5525 /* (xor (comparison foo bar) (const_int sign-bit))
5526 when STORE_FLAG_VALUE is the sign bit. */
5f4f0e22 5527 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
0802d516 5528 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
e51712db 5529 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
8079805d
RK
5530 && op1 == const_true_rtx
5531 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5532 && reversible_comparison_p (op0))
5533 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
5534 mode, XEXP (op0, 0), XEXP (op0, 1));
0918eca0 5535
230d793d 5536 break;
e9a25f70
JL
5537
5538 default:
5539 abort ();
230d793d
RS
5540 }
5541
5542 return x;
5543}
5544\f
5545/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5546 operations" because they can be replaced with two more basic operations.
5547 ZERO_EXTEND is also considered "compound" because it can be replaced with
5548 an AND operation, which is simpler, though only one operation.
5549
5550 The function expand_compound_operation is called with an rtx expression
663522cb 5551 and will convert it to the appropriate shifts and AND operations,
230d793d
RS
5552 simplifying at each stage.
5553
5554 The function make_compound_operation is called to convert an expression
5555 consisting of shifts and ANDs into the equivalent compound expression.
5556 It is the inverse of this function, loosely speaking. */
5557
5558static rtx
5559expand_compound_operation (x)
5560 rtx x;
5561{
770ae6cc 5562 unsigned HOST_WIDE_INT pos = 0, len;
230d793d 5563 int unsignedp = 0;
770ae6cc 5564 unsigned int modewidth;
230d793d
RS
5565 rtx tem;
5566
5567 switch (GET_CODE (x))
5568 {
5569 case ZERO_EXTEND:
5570 unsignedp = 1;
5571 case SIGN_EXTEND:
75473182
RS
5572 /* We can't necessarily use a const_int for a multiword mode;
5573 it depends on implicitly extending the value.
5574 Since we don't know the right way to extend it,
5575 we can't tell whether the implicit way is right.
5576
5577 Even for a mode that is no wider than a const_int,
5578 we can't win, because we need to sign extend one of its bits through
5579 the rest of it, and we don't know which bit. */
230d793d 5580 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
75473182 5581 return x;
230d793d 5582
8079805d
RK
5583 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5584 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
5585 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5586 reloaded. If not for that, MEM's would very rarely be safe.
5587
5588 Reject MODEs bigger than a word, because we might not be able
5589 to reference a two-register group starting with an arbitrary register
5590 (and currently gen_lowpart might crash for a SUBREG). */
663522cb 5591
8079805d 5592 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
230d793d
RS
5593 return x;
5594
5595 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5596 /* If the inner object has VOIDmode (the only way this can happen
5597 is if it is a ASM_OPERANDS), we can't do anything since we don't
5598 know how much masking to do. */
5599 if (len == 0)
5600 return x;
5601
5602 break;
5603
5604 case ZERO_EXTRACT:
5605 unsignedp = 1;
5606 case SIGN_EXTRACT:
5607 /* If the operand is a CLOBBER, just return it. */
5608 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5609 return XEXP (x, 0);
5610
5611 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5612 || GET_CODE (XEXP (x, 2)) != CONST_INT
5613 || GET_MODE (XEXP (x, 0)) == VOIDmode)
5614 return x;
5615
5616 len = INTVAL (XEXP (x, 1));
5617 pos = INTVAL (XEXP (x, 2));
5618
5619 /* If this goes outside the object being extracted, replace the object
5620 with a (use (mem ...)) construct that only combine understands
5621 and is used only for this purpose. */
5622 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
38a448ca 5623 SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0)));
230d793d 5624
f76b9db2
ILT
5625 if (BITS_BIG_ENDIAN)
5626 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5627
230d793d
RS
5628 break;
5629
5630 default:
5631 return x;
5632 }
0f808b6f
JH
5633 /* Convert sign extension to zero extension, if we know that the high
5634 bit is not set, as this is easier to optimize. It will be converted
5635 back to cheaper alternative in make_extraction. */
5636 if (GET_CODE (x) == SIGN_EXTEND
5637 && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5638 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
663522cb 5639 & ~(((unsigned HOST_WIDE_INT)
0f808b6f
JH
5640 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5641 >> 1))
5642 == 0)))
5643 {
5644 rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0));
5645 return expand_compound_operation (temp);
5646 }
230d793d 5647
0f13a422
ILT
5648 /* We can optimize some special cases of ZERO_EXTEND. */
5649 if (GET_CODE (x) == ZERO_EXTEND)
5650 {
5651 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5652 know that the last value didn't have any inappropriate bits
5653 set. */
5654 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5655 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5656 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5657 && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
663522cb 5658 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
0f13a422
ILT
5659 return XEXP (XEXP (x, 0), 0);
5660
5661 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5662 if (GET_CODE (XEXP (x, 0)) == SUBREG
5663 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5664 && subreg_lowpart_p (XEXP (x, 0))
5665 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5666 && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
663522cb 5667 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
0f13a422
ILT
5668 return SUBREG_REG (XEXP (x, 0));
5669
5670 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5671 is a comparison and STORE_FLAG_VALUE permits. This is like
5672 the first case, but it works even when GET_MODE (x) is larger
5673 than HOST_WIDE_INT. */
5674 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5675 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5676 && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) == '<'
5677 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5678 <= HOST_BITS_PER_WIDE_INT)
5679 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
663522cb 5680 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
0f13a422
ILT
5681 return XEXP (XEXP (x, 0), 0);
5682
5683 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5684 if (GET_CODE (XEXP (x, 0)) == SUBREG
5685 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5686 && subreg_lowpart_p (XEXP (x, 0))
5687 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == '<'
5688 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5689 <= HOST_BITS_PER_WIDE_INT)
5690 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
663522cb 5691 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
0f13a422
ILT
5692 return SUBREG_REG (XEXP (x, 0));
5693
0f13a422
ILT
5694 }
5695
230d793d
RS
5696 /* If we reach here, we want to return a pair of shifts. The inner
5697 shift is a left shift of BITSIZE - POS - LEN bits. The outer
5698 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
5699 logical depending on the value of UNSIGNEDP.
5700
5701 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5702 converted into an AND of a shift.
5703
5704 We must check for the case where the left shift would have a negative
5705 count. This can happen in a case like (x >> 31) & 255 on machines
5706 that can't shift by a constant. On those machines, we would first
663522cb 5707 combine the shift with the AND to produce a variable-position
230d793d
RS
5708 extraction. Then the constant of 31 would be substituted in to produce
5709 a such a position. */
5710
5711 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
770ae6cc 5712 if (modewidth + len >= pos)
5f4f0e22 5713 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
230d793d 5714 GET_MODE (x),
5f4f0e22
CH
5715 simplify_shift_const (NULL_RTX, ASHIFT,
5716 GET_MODE (x),
230d793d
RS
5717 XEXP (x, 0),
5718 modewidth - pos - len),
5719 modewidth - len);
5720
5f4f0e22
CH
5721 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
5722 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
5723 simplify_shift_const (NULL_RTX, LSHIFTRT,
230d793d
RS
5724 GET_MODE (x),
5725 XEXP (x, 0), pos),
5f4f0e22 5726 ((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
5727 else
5728 /* Any other cases we can't handle. */
5729 return x;
230d793d
RS
5730
5731 /* If we couldn't do this for some reason, return the original
5732 expression. */
5733 if (GET_CODE (tem) == CLOBBER)
5734 return x;
5735
5736 return tem;
5737}
5738\f
5739/* X is a SET which contains an assignment of one object into
5740 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5741 or certain SUBREGS). If possible, convert it into a series of
5742 logical operations.
5743
5744 We half-heartedly support variable positions, but do not at all
5745 support variable lengths. */
5746
5747static rtx
5748expand_field_assignment (x)
5749 rtx x;
5750{
5751 rtx inner;
0f41302f 5752 rtx pos; /* Always counts from low bit. */
230d793d
RS
5753 int len;
5754 rtx mask;
5755 enum machine_mode compute_mode;
5756
5757 /* Loop until we find something we can't simplify. */
5758 while (1)
5759 {
5760 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
5761 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
5762 {
5763 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
5764 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4d9cfc7b 5765 pos = GEN_INT (BITS_PER_WORD * SUBREG_WORD (XEXP (SET_DEST (x), 0)));
230d793d
RS
5766 }
5767 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5768 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
5769 {
5770 inner = XEXP (SET_DEST (x), 0);
5771 len = INTVAL (XEXP (SET_DEST (x), 1));
5772 pos = XEXP (SET_DEST (x), 2);
5773
5774 /* If the position is constant and spans the width of INNER,
5775 surround INNER with a USE to indicate this. */
5776 if (GET_CODE (pos) == CONST_INT
5777 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
38a448ca 5778 inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner);
230d793d 5779
f76b9db2
ILT
5780 if (BITS_BIG_ENDIAN)
5781 {
5782 if (GET_CODE (pos) == CONST_INT)
5783 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
5784 - INTVAL (pos));
5785 else if (GET_CODE (pos) == MINUS
5786 && GET_CODE (XEXP (pos, 1)) == CONST_INT
5787 && (INTVAL (XEXP (pos, 1))
5788 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
5789 /* If position is ADJUST - X, new position is X. */
5790 pos = XEXP (pos, 0);
5791 else
5792 pos = gen_binary (MINUS, GET_MODE (pos),
5793 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
5794 - len),
5795 pos);
5796 }
230d793d
RS
5797 }
5798
5799 /* A SUBREG between two modes that occupy the same numbers of words
5800 can be done by moving the SUBREG to the source. */
5801 else if (GET_CODE (SET_DEST (x)) == SUBREG
b1e9c8a9
AO
5802 /* We need SUBREGs to compute nonzero_bits properly. */
5803 && nonzero_sign_valid
230d793d
RS
5804 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
5805 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
5806 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
5807 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
5808 {
38a448ca 5809 x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
c5c76735
JL
5810 gen_lowpart_for_combine
5811 (GET_MODE (SUBREG_REG (SET_DEST (x))),
5812 SET_SRC (x)));
230d793d
RS
5813 continue;
5814 }
5815 else
5816 break;
5817
5818 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5819 inner = SUBREG_REG (inner);
5820
5821 compute_mode = GET_MODE (inner);
5822
861556b4
RH
5823 /* Don't attempt bitwise arithmetic on non-integral modes. */
5824 if (! INTEGRAL_MODE_P (compute_mode))
5825 {
5826 enum machine_mode imode;
5827
5828 /* Something is probably seriously wrong if this matches. */
5829 if (! FLOAT_MODE_P (compute_mode))
5830 break;
5831
5832 /* Try to find an integral mode to pun with. */
5833 imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
5834 if (imode == BLKmode)
5835 break;
5836
5837 compute_mode = imode;
5838 inner = gen_lowpart_for_combine (imode, inner);
5839 }
5840
230d793d 5841 /* Compute a mask of LEN bits, if we can do this on the host machine. */
5f4f0e22
CH
5842 if (len < HOST_BITS_PER_WIDE_INT)
5843 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
5844 else
5845 break;
5846
5847 /* Now compute the equivalent expression. Make a copy of INNER
5848 for the SET_DEST in case it is a MEM into which we will substitute;
5849 we don't want shared RTL in that case. */
c5c76735
JL
5850 x = gen_rtx_SET
5851 (VOIDmode, copy_rtx (inner),
5852 gen_binary (IOR, compute_mode,
5853 gen_binary (AND, compute_mode,
5854 gen_unary (NOT, compute_mode,
5855 compute_mode,
5856 gen_binary (ASHIFT,
5857 compute_mode,
5858 mask, pos)),
5859 inner),
5860 gen_binary (ASHIFT, compute_mode,
5861 gen_binary (AND, compute_mode,
5862 gen_lowpart_for_combine
5863 (compute_mode, SET_SRC (x)),
5864 mask),
5865 pos)));
230d793d
RS
5866 }
5867
5868 return x;
5869}
5870\f
8999a12e
RK
5871/* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
5872 it is an RTX that represents a variable starting position; otherwise,
5873 POS is the (constant) starting bit position (counted from the LSB).
230d793d
RS
5874
5875 INNER may be a USE. This will occur when we started with a bitfield
5876 that went outside the boundary of the object in memory, which is
5877 allowed on most machines. To isolate this case, we produce a USE
5878 whose mode is wide enough and surround the MEM with it. The only
5879 code that understands the USE is this routine. If it is not removed,
5880 it will cause the resulting insn not to match.
5881
663522cb 5882 UNSIGNEDP is non-zero for an unsigned reference and zero for a
230d793d
RS
5883 signed reference.
5884
5885 IN_DEST is non-zero if this is a reference in the destination of a
5886 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
5887 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5888 be used.
5889
5890 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
5891 ZERO_EXTRACT should be built even for bits starting at bit 0.
5892
76184def
DE
5893 MODE is the desired mode of the result (if IN_DEST == 0).
5894
5895 The result is an RTX for the extraction or NULL_RTX if the target
5896 can't handle it. */
230d793d
RS
5897
5898static rtx
5899make_extraction (mode, inner, pos, pos_rtx, len,
5900 unsignedp, in_dest, in_compare)
5901 enum machine_mode mode;
5902 rtx inner;
770ae6cc 5903 HOST_WIDE_INT pos;
230d793d 5904 rtx pos_rtx;
770ae6cc 5905 unsigned HOST_WIDE_INT len;
230d793d
RS
5906 int unsignedp;
5907 int in_dest, in_compare;
5908{
94b4b17a
RS
5909 /* This mode describes the size of the storage area
5910 to fetch the overall value from. Within that, we
5911 ignore the POS lowest bits, etc. */
230d793d
RS
5912 enum machine_mode is_mode = GET_MODE (inner);
5913 enum machine_mode inner_mode;
d7cd794f
RK
5914 enum machine_mode wanted_inner_mode = byte_mode;
5915 enum machine_mode wanted_inner_reg_mode = word_mode;
230d793d
RS
5916 enum machine_mode pos_mode = word_mode;
5917 enum machine_mode extraction_mode = word_mode;
5918 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5919 int spans_byte = 0;
5920 rtx new = 0;
8999a12e 5921 rtx orig_pos_rtx = pos_rtx;
770ae6cc 5922 HOST_WIDE_INT orig_pos;
230d793d
RS
5923
5924 /* Get some information about INNER and get the innermost object. */
5925 if (GET_CODE (inner) == USE)
94b4b17a 5926 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
230d793d
RS
5927 /* We don't need to adjust the position because we set up the USE
5928 to pretend that it was a full-word object. */
5929 spans_byte = 1, inner = XEXP (inner, 0);
5930 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
94b4b17a
RS
5931 {
5932 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5933 consider just the QI as the memory to extract from.
5934 The subreg adds or removes high bits; its mode is
5935 irrelevant to the meaning of this extraction,
5936 since POS and LEN count from the lsb. */
5937 if (GET_CODE (SUBREG_REG (inner)) == MEM)
5938 is_mode = GET_MODE (SUBREG_REG (inner));
5939 inner = SUBREG_REG (inner);
5940 }
230d793d
RS
5941
5942 inner_mode = GET_MODE (inner);
5943
5944 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
8999a12e 5945 pos = INTVAL (pos_rtx), pos_rtx = 0;
230d793d
RS
5946
5947 /* See if this can be done without an extraction. We never can if the
5948 width of the field is not the same as that of some integer mode. For
5949 registers, we can only avoid the extraction if the position is at the
5950 low-order bit and this is either not in the destination or we have the
5951 appropriate STRICT_LOW_PART operation available.
5952
5953 For MEM, we can avoid an extract if the field starts on an appropriate
5954 boundary and we can change the mode of the memory reference. However,
5955 we cannot directly access the MEM if we have a USE and the underlying
5956 MEM is not TMODE. This combination means that MEM was being used in a
5957 context where bits outside its mode were being referenced; that is only
5958 valid in bit-field insns. */
5959
5960 if (tmode != BLKmode
5961 && ! (spans_byte && inner_mode != tmode)
4d9cfc7b
RK
5962 && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
5963 && GET_CODE (inner) != MEM
230d793d 5964 && (! in_dest
df62f951
RK
5965 || (GET_CODE (inner) == REG
5966 && (movstrict_optab->handlers[(int) tmode].insn_code
5967 != CODE_FOR_nothing))))
8999a12e 5968 || (GET_CODE (inner) == MEM && pos_rtx == 0
dfbe1b2f
RK
5969 && (pos
5970 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
5971 : BITS_PER_UNIT)) == 0
230d793d
RS
5972 /* We can't do this if we are widening INNER_MODE (it
5973 may not be aligned, for one thing). */
5974 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5975 && (inner_mode == tmode
5976 || (! mode_dependent_address_p (XEXP (inner, 0))
5977 && ! MEM_VOLATILE_P (inner))))))
5978 {
230d793d
RS
5979 /* If INNER is a MEM, make a new MEM that encompasses just the desired
5980 field. If the original and current mode are the same, we need not
663522cb 5981 adjust the offset. Otherwise, we do if bytes big endian.
230d793d 5982
4d9cfc7b
RK
5983 If INNER is not a MEM, get a piece consisting of just the field
5984 of interest (in this case POS % BITS_PER_WORD must be 0). */
230d793d
RS
5985
5986 if (GET_CODE (inner) == MEM)
5987 {
94b4b17a
RS
5988 int offset;
5989 /* POS counts from lsb, but make OFFSET count in memory order. */
5990 if (BYTES_BIG_ENDIAN)
5991 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5992 else
5993 offset = pos / BITS_PER_UNIT;
230d793d 5994
38a448ca 5995 new = gen_rtx_MEM (tmode, plus_constant (XEXP (inner, 0), offset));
c6df88cb 5996 MEM_COPY_ATTRIBUTES (new, inner);
230d793d 5997 }
df62f951 5998 else if (GET_CODE (inner) == REG)
c0d3ac4d
RK
5999 {
6000 /* We can't call gen_lowpart_for_combine here since we always want
6001 a SUBREG and it would sometimes return a new hard register. */
6002 if (tmode != inner_mode)
38a448ca
RH
6003 new = gen_rtx_SUBREG (tmode, inner,
6004 (WORDS_BIG_ENDIAN
c5c76735
JL
6005 && (GET_MODE_SIZE (inner_mode)
6006 > UNITS_PER_WORD)
38a448ca
RH
6007 ? (((GET_MODE_SIZE (inner_mode)
6008 - GET_MODE_SIZE (tmode))
6009 / UNITS_PER_WORD)
6010 - pos / BITS_PER_WORD)
6011 : pos / BITS_PER_WORD));
c0d3ac4d
RK
6012 else
6013 new = inner;
6014 }
230d793d 6015 else
6139ff20
RK
6016 new = force_to_mode (inner, tmode,
6017 len >= HOST_BITS_PER_WIDE_INT
0345195a 6018 ? ~(unsigned HOST_WIDE_INT) 0
729a2125 6019 : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 6020 NULL_RTX, 0);
230d793d 6021
663522cb 6022 /* If this extraction is going into the destination of a SET,
230d793d
RS
6023 make a STRICT_LOW_PART unless we made a MEM. */
6024
6025 if (in_dest)
6026 return (GET_CODE (new) == MEM ? new
77fa0940 6027 : (GET_CODE (new) != SUBREG
38a448ca 6028 ? gen_rtx_CLOBBER (tmode, const0_rtx)
77fa0940 6029 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
230d793d 6030
0f808b6f
JH
6031 if (mode == tmode)
6032 return new;
6033
6034 /* If we know that no extraneous bits are set, and that the high
6035 bit is not set, convert the extraction to the cheaper of
6036 sign and zero extension, that are equivalent in these cases. */
6037 if (flag_expensive_optimizations
6038 && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
6039 && ((nonzero_bits (new, tmode)
663522cb
KH
6040 & ~(((unsigned HOST_WIDE_INT)
6041 GET_MODE_MASK (tmode))
6042 >> 1))
0f808b6f
JH
6043 == 0)))
6044 {
6045 rtx temp = gen_rtx_ZERO_EXTEND (mode, new);
6046 rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new);
6047
6048 /* Prefer ZERO_EXTENSION, since it gives more information to
6049 backends. */
25ffb1f6 6050 if (rtx_cost (temp, SET) <= rtx_cost (temp1, SET))
0f808b6f
JH
6051 return temp;
6052 return temp1;
6053 }
6054
230d793d
RS
6055 /* Otherwise, sign- or zero-extend unless we already are in the
6056 proper mode. */
6057
0f808b6f
JH
6058 return (gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
6059 mode, new));
230d793d
RS
6060 }
6061
cc471082
RS
6062 /* Unless this is a COMPARE or we have a funny memory reference,
6063 don't do anything with zero-extending field extracts starting at
6064 the low-order bit since they are simple AND operations. */
8999a12e
RK
6065 if (pos_rtx == 0 && pos == 0 && ! in_dest
6066 && ! in_compare && ! spans_byte && unsignedp)
230d793d
RS
6067 return 0;
6068
c5c76735
JL
6069 /* Unless we are allowed to span bytes or INNER is not MEM, reject this if
6070 we would be spanning bytes or if the position is not a constant and the
6071 length is not 1. In all other cases, we would only be going outside
6072 our object in cases when an original shift would have been
e7373556 6073 undefined. */
c5c76735 6074 if (! spans_byte && GET_CODE (inner) == MEM
e7373556
RK
6075 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
6076 || (pos_rtx != 0 && len != 1)))
6077 return 0;
6078
d7cd794f 6079 /* Get the mode to use should INNER not be a MEM, the mode for the position,
230d793d
RS
6080 and the mode for the result. */
6081#ifdef HAVE_insv
6082 if (in_dest)
6083 {
0d8e55d8 6084 wanted_inner_reg_mode
a995e389
RH
6085 = insn_data[(int) CODE_FOR_insv].operand[0].mode;
6086 if (wanted_inner_reg_mode == VOIDmode)
6087 wanted_inner_reg_mode = word_mode;
6088
6089 pos_mode = insn_data[(int) CODE_FOR_insv].operand[2].mode;
6090 if (pos_mode == VOIDmode)
6091 pos_mode = word_mode;
6092
6093 extraction_mode = insn_data[(int) CODE_FOR_insv].operand[3].mode;
6094 if (extraction_mode == VOIDmode)
6095 extraction_mode = word_mode;
230d793d
RS
6096 }
6097#endif
6098
6099#ifdef HAVE_extzv
6100 if (! in_dest && unsignedp)
6101 {
0d8e55d8 6102 wanted_inner_reg_mode
a995e389
RH
6103 = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
6104 if (wanted_inner_reg_mode == VOIDmode)
6105 wanted_inner_reg_mode = word_mode;
6106
6107 pos_mode = insn_data[(int) CODE_FOR_extzv].operand[3].mode;
6108 if (pos_mode == VOIDmode)
6109 pos_mode = word_mode;
6110
6111 extraction_mode = insn_data[(int) CODE_FOR_extzv].operand[0].mode;
6112 if (extraction_mode == VOIDmode)
6113 extraction_mode = word_mode;
230d793d
RS
6114 }
6115#endif
6116
6117#ifdef HAVE_extv
6118 if (! in_dest && ! unsignedp)
6119 {
0d8e55d8 6120 wanted_inner_reg_mode
a995e389
RH
6121 = insn_data[(int) CODE_FOR_extv].operand[1].mode;
6122 if (wanted_inner_reg_mode == VOIDmode)
6123 wanted_inner_reg_mode = word_mode;
6124
6125 pos_mode = insn_data[(int) CODE_FOR_extv].operand[3].mode;
6126 if (pos_mode == VOIDmode)
6127 pos_mode = word_mode;
6128
6129 extraction_mode = insn_data[(int) CODE_FOR_extv].operand[0].mode;
6130 if (extraction_mode == VOIDmode)
6131 extraction_mode = word_mode;
230d793d
RS
6132 }
6133#endif
6134
6135 /* Never narrow an object, since that might not be safe. */
6136
6137 if (mode != VOIDmode
6138 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
6139 extraction_mode = mode;
6140
6141 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
6142 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6143 pos_mode = GET_MODE (pos_rtx);
6144
d7cd794f
RK
6145 /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
6146 if we have to change the mode of memory and cannot, the desired mode is
6147 EXTRACTION_MODE. */
6148 if (GET_CODE (inner) != MEM)
6149 wanted_inner_mode = wanted_inner_reg_mode;
6150 else if (inner_mode != wanted_inner_mode
6151 && (mode_dependent_address_p (XEXP (inner, 0))
6152 || MEM_VOLATILE_P (inner)))
6153 wanted_inner_mode = extraction_mode;
230d793d 6154
6139ff20
RK
6155 orig_pos = pos;
6156
f76b9db2
ILT
6157 if (BITS_BIG_ENDIAN)
6158 {
cf54c2cd
DE
6159 /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
6160 BITS_BIG_ENDIAN style. If position is constant, compute new
6161 position. Otherwise, build subtraction.
6162 Note that POS is relative to the mode of the original argument.
6163 If it's a MEM we need to recompute POS relative to that.
6164 However, if we're extracting from (or inserting into) a register,
6165 we want to recompute POS relative to wanted_inner_mode. */
6166 int width = (GET_CODE (inner) == MEM
6167 ? GET_MODE_BITSIZE (is_mode)
6168 : GET_MODE_BITSIZE (wanted_inner_mode));
6169
f76b9db2 6170 if (pos_rtx == 0)
cf54c2cd 6171 pos = width - len - pos;
f76b9db2
ILT
6172 else
6173 pos_rtx
6174 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
cf54c2cd
DE
6175 GEN_INT (width - len), pos_rtx);
6176 /* POS may be less than 0 now, but we check for that below.
6177 Note that it can only be less than 0 if GET_CODE (inner) != MEM. */
f76b9db2 6178 }
230d793d
RS
6179
6180 /* If INNER has a wider mode, make it smaller. If this is a constant
6181 extract, try to adjust the byte to point to the byte containing
6182 the value. */
d7cd794f
RK
6183 if (wanted_inner_mode != VOIDmode
6184 && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
230d793d 6185 && ((GET_CODE (inner) == MEM
d7cd794f 6186 && (inner_mode == wanted_inner_mode
230d793d
RS
6187 || (! mode_dependent_address_p (XEXP (inner, 0))
6188 && ! MEM_VOLATILE_P (inner))))))
6189 {
6190 int offset = 0;
6191
6192 /* The computations below will be correct if the machine is big
6193 endian in both bits and bytes or little endian in bits and bytes.
6194 If it is mixed, we must adjust. */
663522cb 6195
230d793d 6196 /* If bytes are big endian and we had a paradoxical SUBREG, we must
0f41302f 6197 adjust OFFSET to compensate. */
f76b9db2
ILT
6198 if (BYTES_BIG_ENDIAN
6199 && ! spans_byte
230d793d
RS
6200 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
6201 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
230d793d
RS
6202
6203 /* If this is a constant position, we can move to the desired byte. */
8999a12e 6204 if (pos_rtx == 0)
230d793d
RS
6205 {
6206 offset += pos / BITS_PER_UNIT;
d7cd794f 6207 pos %= GET_MODE_BITSIZE (wanted_inner_mode);
230d793d
RS
6208 }
6209
f76b9db2
ILT
6210 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
6211 && ! spans_byte
d7cd794f 6212 && is_mode != wanted_inner_mode)
c6b3f1f2 6213 offset = (GET_MODE_SIZE (is_mode)
d7cd794f 6214 - GET_MODE_SIZE (wanted_inner_mode) - offset);
c6b3f1f2 6215
d7cd794f 6216 if (offset != 0 || inner_mode != wanted_inner_mode)
230d793d 6217 {
38a448ca
RH
6218 rtx newmem = gen_rtx_MEM (wanted_inner_mode,
6219 plus_constant (XEXP (inner, 0), offset));
bf49b139 6220
c6df88cb 6221 MEM_COPY_ATTRIBUTES (newmem, inner);
230d793d
RS
6222 inner = newmem;
6223 }
6224 }
6225
9e74dc41
RK
6226 /* If INNER is not memory, we can always get it into the proper mode. If we
6227 are changing its mode, POS must be a constant and smaller than the size
6228 of the new mode. */
230d793d 6229 else if (GET_CODE (inner) != MEM)
9e74dc41
RK
6230 {
6231 if (GET_MODE (inner) != wanted_inner_mode
6232 && (pos_rtx != 0
6233 || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
6234 return 0;
6235
6236 inner = force_to_mode (inner, wanted_inner_mode,
6237 pos_rtx
6238 || len + orig_pos >= HOST_BITS_PER_WIDE_INT
0345195a 6239 ? ~(unsigned HOST_WIDE_INT) 0
729a2125
RK
6240 : ((((unsigned HOST_WIDE_INT) 1 << len) - 1)
6241 << orig_pos),
9e74dc41
RK
6242 NULL_RTX, 0);
6243 }
230d793d
RS
6244
6245 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
6246 have to zero extend. Otherwise, we can just use a SUBREG. */
8999a12e 6247 if (pos_rtx != 0
230d793d 6248 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
0f808b6f
JH
6249 {
6250 rtx temp = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
6251
6252 /* If we know that no extraneous bits are set, and that the high
6253 bit is not set, convert extraction to cheaper one - eighter
6254 SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
6255 cases. */
6256 if (flag_expensive_optimizations
6257 && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT
6258 && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
663522cb
KH
6259 & ~(((unsigned HOST_WIDE_INT)
6260 GET_MODE_MASK (GET_MODE (pos_rtx)))
6261 >> 1))
0f808b6f
JH
6262 == 0)))
6263 {
6264 rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx);
6265
25ffb1f6 6266 /* Prefer ZERO_EXTENSION, since it gives more information to
0f808b6f
JH
6267 backends. */
6268 if (rtx_cost (temp1, SET) < rtx_cost (temp, SET))
6269 temp = temp1;
6270 }
6271 pos_rtx = temp;
6272 }
8999a12e 6273 else if (pos_rtx != 0
230d793d
RS
6274 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6275 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
6276
8999a12e
RK
6277 /* Make POS_RTX unless we already have it and it is correct. If we don't
6278 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
0f41302f 6279 be a CONST_INT. */
8999a12e
RK
6280 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
6281 pos_rtx = orig_pos_rtx;
6282
6283 else if (pos_rtx == 0)
5f4f0e22 6284 pos_rtx = GEN_INT (pos);
230d793d
RS
6285
6286 /* Make the required operation. See if we can use existing rtx. */
6287 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5f4f0e22 6288 extraction_mode, inner, GEN_INT (len), pos_rtx);
230d793d
RS
6289 if (! in_dest)
6290 new = gen_lowpart_for_combine (mode, new);
6291
6292 return new;
6293}
6294\f
71923da7
RK
6295/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
6296 with any other operations in X. Return X without that shift if so. */
6297
6298static rtx
6299extract_left_shift (x, count)
6300 rtx x;
6301 int count;
6302{
6303 enum rtx_code code = GET_CODE (x);
6304 enum machine_mode mode = GET_MODE (x);
6305 rtx tem;
6306
6307 switch (code)
6308 {
6309 case ASHIFT:
6310 /* This is the shift itself. If it is wide enough, we will return
6311 either the value being shifted if the shift count is equal to
6312 COUNT or a shift for the difference. */
6313 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6314 && INTVAL (XEXP (x, 1)) >= count)
6315 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
6316 INTVAL (XEXP (x, 1)) - count);
6317 break;
6318
6319 case NEG: case NOT:
6320 if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
0c1c8ea6 6321 return gen_unary (code, mode, mode, tem);
71923da7
RK
6322
6323 break;
6324
6325 case PLUS: case IOR: case XOR: case AND:
6326 /* If we can safely shift this constant and we find the inner shift,
6327 make a new operation. */
6328 if (GET_CODE (XEXP (x,1)) == CONST_INT
b729186a 6329 && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
71923da7 6330 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
663522cb 6331 return gen_binary (code, mode, tem,
71923da7
RK
6332 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
6333
6334 break;
663522cb 6335
e9a25f70
JL
6336 default:
6337 break;
71923da7
RK
6338 }
6339
6340 return 0;
6341}
6342\f
230d793d
RS
6343/* Look at the expression rooted at X. Look for expressions
6344 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
6345 Form these expressions.
6346
6347 Return the new rtx, usually just X.
6348
6349 Also, for machines like the Vax that don't have logical shift insns,
6350 try to convert logical to arithmetic shift operations in cases where
6351 they are equivalent. This undoes the canonicalizations to logical
6352 shifts done elsewhere.
6353
6354 We try, as much as possible, to re-use rtl expressions to save memory.
6355
6356 IN_CODE says what kind of expression we are processing. Normally, it is
42495ca0
RK
6357 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
6358 being kludges), it is MEM. When processing the arguments of a comparison
230d793d
RS
6359 or a COMPARE against zero, it is COMPARE. */
6360
6361static rtx
6362make_compound_operation (x, in_code)
6363 rtx x;
6364 enum rtx_code in_code;
6365{
6366 enum rtx_code code = GET_CODE (x);
6367 enum machine_mode mode = GET_MODE (x);
6368 int mode_width = GET_MODE_BITSIZE (mode);
71923da7 6369 rtx rhs, lhs;
230d793d 6370 enum rtx_code next_code;
f24ad0e4 6371 int i;
230d793d 6372 rtx new = 0;
280f58ba 6373 rtx tem;
6f7d635c 6374 const char *fmt;
230d793d
RS
6375
6376 /* Select the code to be used in recursive calls. Once we are inside an
6377 address, we stay there. If we have a comparison, set to COMPARE,
6378 but once inside, go back to our default of SET. */
6379
42495ca0 6380 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
230d793d
RS
6381 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
6382 && XEXP (x, 1) == const0_rtx) ? COMPARE
6383 : in_code == COMPARE ? SET : in_code);
6384
6385 /* Process depending on the code of this operation. If NEW is set
6386 non-zero, it will be returned. */
6387
6388 switch (code)
6389 {
6390 case ASHIFT:
230d793d
RS
6391 /* Convert shifts by constants into multiplications if inside
6392 an address. */
6393 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 6394 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
230d793d 6395 && INTVAL (XEXP (x, 1)) >= 0)
280f58ba
RK
6396 {
6397 new = make_compound_operation (XEXP (x, 0), next_code);
6398 new = gen_rtx_combine (MULT, mode, new,
6399 GEN_INT ((HOST_WIDE_INT) 1
6400 << INTVAL (XEXP (x, 1))));
6401 }
230d793d
RS
6402 break;
6403
6404 case AND:
6405 /* If the second operand is not a constant, we can't do anything
6406 with it. */
6407 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6408 break;
6409
6410 /* If the constant is a power of two minus one and the first operand
6411 is a logical right shift, make an extraction. */
6412 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6413 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
6414 {
6415 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6416 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
6417 0, in_code == COMPARE);
6418 }
dfbe1b2f 6419
230d793d
RS
6420 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
6421 else if (GET_CODE (XEXP (x, 0)) == SUBREG
6422 && subreg_lowpart_p (XEXP (x, 0))
6423 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
6424 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
6425 {
6426 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
6427 next_code);
2f99f437 6428 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
280f58ba
RK
6429 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
6430 0, in_code == COMPARE);
6431 }
45620ed4 6432 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
c2f9f64e
JW
6433 else if ((GET_CODE (XEXP (x, 0)) == XOR
6434 || GET_CODE (XEXP (x, 0)) == IOR)
6435 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
6436 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
6437 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6438 {
6439 /* Apply the distributive law, and then try to make extractions. */
6440 new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode,
38a448ca
RH
6441 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
6442 XEXP (x, 1)),
6443 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
6444 XEXP (x, 1)));
c2f9f64e
JW
6445 new = make_compound_operation (new, in_code);
6446 }
a7c99304
RK
6447
6448 /* If we are have (and (rotate X C) M) and C is larger than the number
6449 of bits in M, this is an extraction. */
6450
6451 else if (GET_CODE (XEXP (x, 0)) == ROTATE
6452 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6453 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
6454 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
280f58ba
RK
6455 {
6456 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6457 new = make_extraction (mode, new,
6458 (GET_MODE_BITSIZE (mode)
6459 - INTVAL (XEXP (XEXP (x, 0), 1))),
6460 NULL_RTX, i, 1, 0, in_code == COMPARE);
6461 }
a7c99304
RK
6462
6463 /* On machines without logical shifts, if the operand of the AND is
230d793d
RS
6464 a logical shift and our mask turns off all the propagated sign
6465 bits, we can replace the logical shift with an arithmetic shift. */
d0ab8cd3
RK
6466 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6467 && (lshr_optab->handlers[(int) mode].insn_code
6468 == CODE_FOR_nothing)
230d793d
RS
6469 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
6470 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6471 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5f4f0e22
CH
6472 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6473 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 6474 {
5f4f0e22 6475 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
6476
6477 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
6478 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
6479 SUBST (XEXP (x, 0),
280f58ba
RK
6480 gen_rtx_combine (ASHIFTRT, mode,
6481 make_compound_operation (XEXP (XEXP (x, 0), 0),
6482 next_code),
230d793d
RS
6483 XEXP (XEXP (x, 0), 1)));
6484 }
6485
6486 /* If the constant is one less than a power of two, this might be
6487 representable by an extraction even if no shift is present.
6488 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
6489 we are in a COMPARE. */
6490 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
6491 new = make_extraction (mode,
6492 make_compound_operation (XEXP (x, 0),
6493 next_code),
6494 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
230d793d
RS
6495
6496 /* If we are in a comparison and this is an AND with a power of two,
6497 convert this into the appropriate bit extract. */
6498 else if (in_code == COMPARE
6499 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
280f58ba
RK
6500 new = make_extraction (mode,
6501 make_compound_operation (XEXP (x, 0),
6502 next_code),
6503 i, NULL_RTX, 1, 1, 0, 1);
230d793d
RS
6504
6505 break;
6506
6507 case LSHIFTRT:
6508 /* If the sign bit is known to be zero, replace this with an
6509 arithmetic shift. */
d0ab8cd3
RK
6510 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
6511 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5f4f0e22 6512 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 6513 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
230d793d 6514 {
280f58ba
RK
6515 new = gen_rtx_combine (ASHIFTRT, mode,
6516 make_compound_operation (XEXP (x, 0),
6517 next_code),
6518 XEXP (x, 1));
230d793d
RS
6519 break;
6520 }
6521
0f41302f 6522 /* ... fall through ... */
230d793d
RS
6523
6524 case ASHIFTRT:
71923da7
RK
6525 lhs = XEXP (x, 0);
6526 rhs = XEXP (x, 1);
6527
230d793d
RS
6528 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
6529 this is a SIGN_EXTRACT. */
71923da7
RK
6530 if (GET_CODE (rhs) == CONST_INT
6531 && GET_CODE (lhs) == ASHIFT
6532 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6533 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
280f58ba 6534 {
71923da7 6535 new = make_compound_operation (XEXP (lhs, 0), next_code);
280f58ba 6536 new = make_extraction (mode, new,
71923da7
RK
6537 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
6538 NULL_RTX, mode_width - INTVAL (rhs),
d0ab8cd3 6539 code == LSHIFTRT, 0, in_code == COMPARE);
8231ad94 6540 break;
d0ab8cd3
RK
6541 }
6542
71923da7
RK
6543 /* See if we have operations between an ASHIFTRT and an ASHIFT.
6544 If so, try to merge the shifts into a SIGN_EXTEND. We could
6545 also do this for some cases of SIGN_EXTRACT, but it doesn't
6546 seem worth the effort; the case checked for occurs on Alpha. */
663522cb 6547
71923da7
RK
6548 if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
6549 && ! (GET_CODE (lhs) == SUBREG
6550 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
6551 && GET_CODE (rhs) == CONST_INT
6552 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
6553 && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
6554 new = make_extraction (mode, make_compound_operation (new, next_code),
6555 0, NULL_RTX, mode_width - INTVAL (rhs),
6556 code == LSHIFTRT, 0, in_code == COMPARE);
663522cb 6557
230d793d 6558 break;
280f58ba
RK
6559
6560 case SUBREG:
6561 /* Call ourselves recursively on the inner expression. If we are
6562 narrowing the object and it has a different RTL code from
6563 what it originally did, do this SUBREG as a force_to_mode. */
6564
0a5cbff6 6565 tem = make_compound_operation (SUBREG_REG (x), in_code);
280f58ba
RK
6566 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
6567 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
6568 && subreg_lowpart_p (x))
0a5cbff6 6569 {
e8dc6d50
JH
6570 rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0,
6571 NULL_RTX, 0);
0a5cbff6
RK
6572
6573 /* If we have something other than a SUBREG, we might have
6574 done an expansion, so rerun outselves. */
6575 if (GET_CODE (newer) != SUBREG)
6576 newer = make_compound_operation (newer, in_code);
6577
6578 return newer;
6579 }
6f28d3e9
RH
6580
6581 /* If this is a paradoxical subreg, and the new code is a sign or
6582 zero extension, omit the subreg and widen the extension. If it
6583 is a regular subreg, we can still get rid of the subreg by not
6584 widening so much, or in fact removing the extension entirely. */
6585 if ((GET_CODE (tem) == SIGN_EXTEND
6586 || GET_CODE (tem) == ZERO_EXTEND)
6587 && subreg_lowpart_p (x))
6588 {
6589 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (tem))
6590 || (GET_MODE_SIZE (mode) >
6591 GET_MODE_SIZE (GET_MODE (XEXP (tem, 0)))))
6592 tem = gen_rtx_combine (GET_CODE (tem), mode, XEXP (tem, 0));
6593 else
6594 tem = gen_lowpart_for_combine (mode, XEXP (tem, 0));
6595 return tem;
6596 }
e9a25f70 6597 break;
663522cb 6598
e9a25f70
JL
6599 default:
6600 break;
230d793d
RS
6601 }
6602
6603 if (new)
6604 {
df62f951 6605 x = gen_lowpart_for_combine (mode, new);
230d793d
RS
6606 code = GET_CODE (x);
6607 }
6608
6609 /* Now recursively process each operand of this operation. */
6610 fmt = GET_RTX_FORMAT (code);
6611 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6612 if (fmt[i] == 'e')
6613 {
6614 new = make_compound_operation (XEXP (x, i), next_code);
6615 SUBST (XEXP (x, i), new);
6616 }
6617
6618 return x;
6619}
6620\f
6621/* Given M see if it is a value that would select a field of bits
663522cb
KH
6622 within an item, but not the entire word. Return -1 if not.
6623 Otherwise, return the starting position of the field, where 0 is the
6624 low-order bit.
230d793d
RS
6625
6626 *PLEN is set to the length of the field. */
6627
6628static int
6629get_pos_from_mask (m, plen)
5f4f0e22 6630 unsigned HOST_WIDE_INT m;
770ae6cc 6631 unsigned HOST_WIDE_INT *plen;
230d793d
RS
6632{
6633 /* Get the bit number of the first 1 bit from the right, -1 if none. */
663522cb 6634 int pos = exact_log2 (m & -m);
d3bc8938 6635 int len;
230d793d
RS
6636
6637 if (pos < 0)
6638 return -1;
6639
6640 /* Now shift off the low-order zero bits and see if we have a power of
6641 two minus 1. */
d3bc8938 6642 len = exact_log2 ((m >> pos) + 1);
230d793d 6643
d3bc8938 6644 if (len <= 0)
230d793d
RS
6645 return -1;
6646
d3bc8938 6647 *plen = len;
230d793d
RS
6648 return pos;
6649}
6650\f
6139ff20
RK
6651/* See if X can be simplified knowing that we will only refer to it in
6652 MODE and will only refer to those bits that are nonzero in MASK.
6653 If other bits are being computed or if masking operations are done
6654 that select a superset of the bits in MASK, they can sometimes be
6655 ignored.
6656
6657 Return a possibly simplified expression, but always convert X to
6658 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
dfbe1b2f 6659
663522cb 6660 Also, if REG is non-zero and X is a register equal in value to REG,
e3d616e3
RK
6661 replace X with REG.
6662
6663 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6664 are all off in X. This is used when X will be complemented, by either
180b8e4b 6665 NOT, NEG, or XOR. */
dfbe1b2f
RK
6666
6667static rtx
e3d616e3 6668force_to_mode (x, mode, mask, reg, just_select)
dfbe1b2f
RK
6669 rtx x;
6670 enum machine_mode mode;
6139ff20 6671 unsigned HOST_WIDE_INT mask;
dfbe1b2f 6672 rtx reg;
e3d616e3 6673 int just_select;
dfbe1b2f
RK
6674{
6675 enum rtx_code code = GET_CODE (x);
180b8e4b 6676 int next_select = just_select || code == XOR || code == NOT || code == NEG;
ef026f91
RS
6677 enum machine_mode op_mode;
6678 unsigned HOST_WIDE_INT fuller_mask, nonzero;
6139ff20
RK
6679 rtx op0, op1, temp;
6680
132d2040
RK
6681 /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the
6682 code below will do the wrong thing since the mode of such an
663522cb 6683 expression is VOIDmode.
be3d27d6
CI
6684
6685 Also do nothing if X is a CLOBBER; this can happen if X was
6686 the return value from a call to gen_lowpart_for_combine. */
6687 if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
246e00f2
RK
6688 return x;
6689
6139ff20
RK
6690 /* We want to perform the operation is its present mode unless we know
6691 that the operation is valid in MODE, in which case we do the operation
6692 in MODE. */
1c75dfa4
RK
6693 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
6694 && code_to_optab[(int) code] != 0
ef026f91
RS
6695 && (code_to_optab[(int) code]->handlers[(int) mode].insn_code
6696 != CODE_FOR_nothing))
6697 ? mode : GET_MODE (x));
e3d616e3 6698
aa988991
RS
6699 /* It is not valid to do a right-shift in a narrower mode
6700 than the one it came in with. */
6701 if ((code == LSHIFTRT || code == ASHIFTRT)
6702 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
6703 op_mode = GET_MODE (x);
ef026f91
RS
6704
6705 /* Truncate MASK to fit OP_MODE. */
6706 if (op_mode)
6707 mask &= GET_MODE_MASK (op_mode);
6139ff20
RK
6708
6709 /* When we have an arithmetic operation, or a shift whose count we
6710 do not know, we need to assume that all bit the up to the highest-order
6711 bit in MASK will be needed. This is how we form such a mask. */
ef026f91
RS
6712 if (op_mode)
6713 fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
6714 ? GET_MODE_MASK (op_mode)
729a2125
RK
6715 : (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1))
6716 - 1));
ef026f91 6717 else
663522cb 6718 fuller_mask = ~(HOST_WIDE_INT) 0;
ef026f91
RS
6719
6720 /* Determine what bits of X are guaranteed to be (non)zero. */
6721 nonzero = nonzero_bits (x, mode);
6139ff20
RK
6722
6723 /* If none of the bits in X are needed, return a zero. */
e3d616e3 6724 if (! just_select && (nonzero & mask) == 0)
6139ff20 6725 return const0_rtx;
dfbe1b2f 6726
6139ff20
RK
6727 /* If X is a CONST_INT, return a new one. Do this here since the
6728 test below will fail. */
6729 if (GET_CODE (x) == CONST_INT)
ceb7983c
RK
6730 {
6731 HOST_WIDE_INT cval = INTVAL (x) & mask;
6732 int width = GET_MODE_BITSIZE (mode);
6733
6734 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6735 number, sign extend it. */
6736 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6737 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6738 cval |= (HOST_WIDE_INT) -1 << width;
663522cb 6739
ceb7983c
RK
6740 return GEN_INT (cval);
6741 }
dfbe1b2f 6742
180b8e4b
RK
6743 /* If X is narrower than MODE and we want all the bits in X's mode, just
6744 get X in the proper mode. */
6745 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
663522cb 6746 && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
dfbe1b2f
RK
6747 return gen_lowpart_for_combine (mode, x);
6748
71923da7
RK
6749 /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
6750 MASK are already known to be zero in X, we need not do anything. */
663522cb 6751 if (GET_MODE (x) == mode && code != SUBREG && (~mask & nonzero) == 0)
6139ff20
RK
6752 return x;
6753
dfbe1b2f
RK
6754 switch (code)
6755 {
6139ff20
RK
6756 case CLOBBER:
6757 /* If X is a (clobber (const_int)), return it since we know we are
0f41302f 6758 generating something that won't match. */
6139ff20
RK
6759 return x;
6760
6139ff20
RK
6761 case USE:
6762 /* X is a (use (mem ..)) that was made from a bit-field extraction that
6763 spanned the boundary of the MEM. If we are now masking so it is
6764 within that boundary, we don't need the USE any more. */
f76b9db2 6765 if (! BITS_BIG_ENDIAN
663522cb 6766 && (mask & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
e3d616e3 6767 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
f76b9db2 6768 break;
6139ff20 6769
dfbe1b2f
RK
6770 case SIGN_EXTEND:
6771 case ZERO_EXTEND:
6772 case ZERO_EXTRACT:
6773 case SIGN_EXTRACT:
6774 x = expand_compound_operation (x);
6775 if (GET_CODE (x) != code)
e3d616e3 6776 return force_to_mode (x, mode, mask, reg, next_select);
dfbe1b2f
RK
6777 break;
6778
6779 case REG:
6780 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
6781 || rtx_equal_p (reg, get_last_value (x))))
6782 x = reg;
6783 break;
6784
dfbe1b2f 6785 case SUBREG:
6139ff20 6786 if (subreg_lowpart_p (x)
180b8e4b
RK
6787 /* We can ignore the effect of this SUBREG if it narrows the mode or
6788 if the constant masks to zero all the bits the mode doesn't
6789 have. */
6139ff20
RK
6790 && ((GET_MODE_SIZE (GET_MODE (x))
6791 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6139ff20
RK
6792 || (0 == (mask
6793 & GET_MODE_MASK (GET_MODE (x))
663522cb 6794 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
e3d616e3 6795 return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
dfbe1b2f
RK
6796 break;
6797
6798 case AND:
6139ff20
RK
6799 /* If this is an AND with a constant, convert it into an AND
6800 whose constant is the AND of that constant with MASK. If it
6801 remains an AND of MASK, delete it since it is redundant. */
dfbe1b2f 6802
2ca9ae17 6803 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
dfbe1b2f 6804 {
6139ff20
RK
6805 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
6806 mask & INTVAL (XEXP (x, 1)));
dfbe1b2f
RK
6807
6808 /* If X is still an AND, see if it is an AND with a mask that
71923da7
RK
6809 is just some low-order bits. If so, and it is MASK, we don't
6810 need it. */
dfbe1b2f
RK
6811
6812 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
e51712db 6813 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) == mask)
dfbe1b2f 6814 x = XEXP (x, 0);
d0ab8cd3 6815
71923da7
RK
6816 /* If it remains an AND, try making another AND with the bits
6817 in the mode mask that aren't in MASK turned on. If the
6818 constant in the AND is wide enough, this might make a
6819 cheaper constant. */
6820
6821 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
2ca9ae17
JW
6822 && GET_MODE_MASK (GET_MODE (x)) != mask
6823 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
71923da7
RK
6824 {
6825 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
663522cb 6826 | (GET_MODE_MASK (GET_MODE (x)) & ~mask));
71923da7
RK
6827 int width = GET_MODE_BITSIZE (GET_MODE (x));
6828 rtx y;
6829
6830 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6831 number, sign extend it. */
6832 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6833 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6834 cval |= (HOST_WIDE_INT) -1 << width;
6835
6836 y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
6837 if (rtx_cost (y, SET) < rtx_cost (x, SET))
6838 x = y;
6839 }
6840
d0ab8cd3 6841 break;
dfbe1b2f
RK
6842 }
6843
6139ff20 6844 goto binop;
dfbe1b2f
RK
6845
6846 case PLUS:
6139ff20
RK
6847 /* In (and (plus FOO C1) M), if M is a mask that just turns off
6848 low-order bits (as in an alignment operation) and FOO is already
6849 aligned to that boundary, mask C1 to that boundary as well.
6850 This may eliminate that PLUS and, later, the AND. */
9fa6d012
TG
6851
6852 {
770ae6cc 6853 unsigned int width = GET_MODE_BITSIZE (mode);
9fa6d012
TG
6854 unsigned HOST_WIDE_INT smask = mask;
6855
6856 /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
6857 number, sign extend it. */
6858
6859 if (width < HOST_BITS_PER_WIDE_INT
6860 && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6861 smask |= (HOST_WIDE_INT) -1 << width;
6862
6863 if (GET_CODE (XEXP (x, 1)) == CONST_INT
0e9ff885
DM
6864 && exact_log2 (- smask) >= 0)
6865 {
6866#ifdef STACK_BIAS
6867 if (STACK_BIAS
6868 && (XEXP (x, 0) == stack_pointer_rtx
6869 || XEXP (x, 0) == frame_pointer_rtx))
6870 {
663522cb
KH
6871 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6872 unsigned HOST_WIDE_INT sp_mask = GET_MODE_MASK (mode);
6873
6874 sp_mask &= ~(sp_alignment - 1);
6875 if ((sp_mask & ~smask) == 0
6876 && ((INTVAL (XEXP (x, 1)) - STACK_BIAS) & ~smask) != 0)
0e9ff885 6877 return force_to_mode (plus_constant (XEXP (x, 0),
663522cb 6878 ((INTVAL (XEXP (x, 1)) -
835c8e04 6879 STACK_BIAS) & smask)
0e9ff885 6880 + STACK_BIAS),
663522cb
KH
6881 mode, smask, reg, next_select);
6882 }
0e9ff885 6883#endif
663522cb
KH
6884 if ((nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
6885 && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
0e9ff885 6886 return force_to_mode (plus_constant (XEXP (x, 0),
663522cb 6887 (INTVAL (XEXP (x, 1))
835c8e04
DT
6888 & smask)),
6889 mode, smask, reg, next_select);
0e9ff885 6890 }
9fa6d012 6891 }
6139ff20 6892
0f41302f 6893 /* ... fall through ... */
6139ff20 6894
dfbe1b2f 6895 case MULT:
6139ff20
RK
6896 /* For PLUS, MINUS and MULT, we need any bits less significant than the
6897 most significant bit in MASK since carries from those bits will
6898 affect the bits we are interested in. */
6899 mask = fuller_mask;
6900 goto binop;
6901
d41638e4
RH
6902 case MINUS:
6903 /* If X is (minus C Y) where C's least set bit is larger than any bit
6904 in the mask, then we may replace with (neg Y). */
6905 if (GET_CODE (XEXP (x, 0)) == CONST_INT
0345195a
RK
6906 && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0))
6907 & -INTVAL (XEXP (x, 0))))
6908 > mask))
d41638e4
RH
6909 {
6910 x = gen_unary (NEG, GET_MODE (x), GET_MODE (x), XEXP (x, 1));
6911 return force_to_mode (x, mode, mask, reg, next_select);
6912 }
6913
6914 /* Similarly, if C contains every bit in the mask, then we may
6915 replace with (not Y). */
6916 if (GET_CODE (XEXP (x, 0)) == CONST_INT
0345195a
RK
6917 && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) mask)
6918 == INTVAL (XEXP (x, 0))))
d41638e4
RH
6919 {
6920 x = gen_unary (NOT, GET_MODE (x), GET_MODE (x), XEXP (x, 1));
6921 return force_to_mode (x, mode, mask, reg, next_select);
6922 }
6923
6924 mask = fuller_mask;
6925 goto binop;
6926
dfbe1b2f
RK
6927 case IOR:
6928 case XOR:
6139ff20
RK
6929 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
6930 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
6931 operation which may be a bitfield extraction. Ensure that the
6932 constant we form is not wider than the mode of X. */
6933
6934 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6935 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6936 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6937 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6938 && GET_CODE (XEXP (x, 1)) == CONST_INT
6939 && ((INTVAL (XEXP (XEXP (x, 0), 1))
6940 + floor_log2 (INTVAL (XEXP (x, 1))))
6941 < GET_MODE_BITSIZE (GET_MODE (x)))
6942 && (INTVAL (XEXP (x, 1))
663522cb 6943 & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
6139ff20
RK
6944 {
6945 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
663522cb 6946 << INTVAL (XEXP (XEXP (x, 0), 1)));
6139ff20
RK
6947 temp = gen_binary (GET_CODE (x), GET_MODE (x),
6948 XEXP (XEXP (x, 0), 0), temp);
d4d2b13f
RK
6949 x = gen_binary (LSHIFTRT, GET_MODE (x), temp,
6950 XEXP (XEXP (x, 0), 1));
e3d616e3 6951 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6952 }
6953
6954 binop:
dfbe1b2f 6955 /* For most binary operations, just propagate into the operation and
6139ff20
RK
6956 change the mode if we have an operation of that mode. */
6957
e3d616e3
RK
6958 op0 = gen_lowpart_for_combine (op_mode,
6959 force_to_mode (XEXP (x, 0), mode, mask,
6960 reg, next_select));
6961 op1 = gen_lowpart_for_combine (op_mode,
6962 force_to_mode (XEXP (x, 1), mode, mask,
6963 reg, next_select));
6139ff20 6964
2dd484ed
RK
6965 /* If OP1 is a CONST_INT and X is an IOR or XOR, clear bits outside
6966 MASK since OP1 might have been sign-extended but we never want
6967 to turn on extra bits, since combine might have previously relied
6968 on them being off. */
6969 if (GET_CODE (op1) == CONST_INT && (code == IOR || code == XOR)
6970 && (INTVAL (op1) & mask) != 0)
6971 op1 = GEN_INT (INTVAL (op1) & mask);
663522cb 6972
6139ff20
RK
6973 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
6974 x = gen_binary (code, op_mode, op0, op1);
d0ab8cd3 6975 break;
dfbe1b2f
RK
6976
6977 case ASHIFT:
dfbe1b2f 6978 /* For left shifts, do the same, but just for the first operand.
f6785026
RK
6979 However, we cannot do anything with shifts where we cannot
6980 guarantee that the counts are smaller than the size of the mode
6981 because such a count will have a different meaning in a
6139ff20 6982 wider mode. */
f6785026
RK
6983
6984 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
6139ff20 6985 && INTVAL (XEXP (x, 1)) >= 0
f6785026
RK
6986 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
6987 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
6988 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
adb7a1cb 6989 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
f6785026 6990 break;
663522cb 6991
6139ff20
RK
6992 /* If the shift count is a constant and we can do arithmetic in
6993 the mode of the shift, refine which bits we need. Otherwise, use the
6994 conservative form of the mask. */
6995 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6996 && INTVAL (XEXP (x, 1)) >= 0
6997 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
6998 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
6999 mask >>= INTVAL (XEXP (x, 1));
7000 else
7001 mask = fuller_mask;
7002
7003 op0 = gen_lowpart_for_combine (op_mode,
7004 force_to_mode (XEXP (x, 0), op_mode,
e3d616e3 7005 mask, reg, next_select));
6139ff20
RK
7006
7007 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
663522cb 7008 x = gen_binary (code, op_mode, op0, XEXP (x, 1));
d0ab8cd3 7009 break;
dfbe1b2f
RK
7010
7011 case LSHIFTRT:
1347292b
JW
7012 /* Here we can only do something if the shift count is a constant,
7013 this shift constant is valid for the host, and we can do arithmetic
7014 in OP_MODE. */
dfbe1b2f
RK
7015
7016 if (GET_CODE (XEXP (x, 1)) == CONST_INT
1347292b 7017 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6139ff20 7018 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 7019 {
6139ff20 7020 rtx inner = XEXP (x, 0);
402b6c2a 7021 unsigned HOST_WIDE_INT inner_mask;
6139ff20
RK
7022
7023 /* Select the mask of the bits we need for the shift operand. */
402b6c2a 7024 inner_mask = mask << INTVAL (XEXP (x, 1));
d0ab8cd3 7025
6139ff20 7026 /* We can only change the mode of the shift if we can do arithmetic
402b6c2a
JW
7027 in the mode of the shift and INNER_MASK is no wider than the
7028 width of OP_MODE. */
6139ff20 7029 if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
663522cb 7030 || (inner_mask & ~GET_MODE_MASK (op_mode)) != 0)
d0ab8cd3
RK
7031 op_mode = GET_MODE (x);
7032
402b6c2a 7033 inner = force_to_mode (inner, op_mode, inner_mask, reg, next_select);
6139ff20
RK
7034
7035 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
7036 x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
d0ab8cd3 7037 }
6139ff20
RK
7038
7039 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
7040 shift and AND produces only copies of the sign bit (C2 is one less
7041 than a power of two), we can do this with just a shift. */
7042
7043 if (GET_CODE (x) == LSHIFTRT
7044 && GET_CODE (XEXP (x, 1)) == CONST_INT
cfff35c1
JW
7045 /* The shift puts one of the sign bit copies in the least significant
7046 bit. */
6139ff20
RK
7047 && ((INTVAL (XEXP (x, 1))
7048 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
7049 >= GET_MODE_BITSIZE (GET_MODE (x)))
7050 && exact_log2 (mask + 1) >= 0
cfff35c1
JW
7051 /* Number of bits left after the shift must be more than the mask
7052 needs. */
7053 && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
7054 <= GET_MODE_BITSIZE (GET_MODE (x)))
7055 /* Must be more sign bit copies than the mask needs. */
770ae6cc 7056 && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
6139ff20
RK
7057 >= exact_log2 (mask + 1)))
7058 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7059 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
7060 - exact_log2 (mask + 1)));
fae2db47
JW
7061
7062 goto shiftrt;
d0ab8cd3
RK
7063
7064 case ASHIFTRT:
6139ff20
RK
7065 /* If we are just looking for the sign bit, we don't need this shift at
7066 all, even if it has a variable count. */
9bf22b75 7067 if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
e51712db 7068 && (mask == ((unsigned HOST_WIDE_INT) 1
9bf22b75 7069 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
e3d616e3 7070 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20
RK
7071
7072 /* If this is a shift by a constant, get a mask that contains those bits
7073 that are not copies of the sign bit. We then have two cases: If
7074 MASK only includes those bits, this can be a logical shift, which may
7075 allow simplifications. If MASK is a single-bit field not within
7076 those bits, we are requesting a copy of the sign bit and hence can
7077 shift the sign bit to the appropriate location. */
7078
7079 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
7080 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7081 {
7082 int i = -1;
7083
b69960ac
RK
7084 /* If the considered data is wider then HOST_WIDE_INT, we can't
7085 represent a mask for all its bits in a single scalar.
7086 But we only care about the lower bits, so calculate these. */
7087
6a11342f 7088 if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
b69960ac 7089 {
663522cb 7090 nonzero = ~(HOST_WIDE_INT) 0;
b69960ac
RK
7091
7092 /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7093 is the number of bits a full-width mask would have set.
7094 We need only shift if these are fewer than nonzero can
7095 hold. If not, we must keep all bits set in nonzero. */
7096
7097 if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7098 < HOST_BITS_PER_WIDE_INT)
7099 nonzero >>= INTVAL (XEXP (x, 1))
7100 + HOST_BITS_PER_WIDE_INT
7101 - GET_MODE_BITSIZE (GET_MODE (x)) ;
7102 }
7103 else
7104 {
7105 nonzero = GET_MODE_MASK (GET_MODE (x));
7106 nonzero >>= INTVAL (XEXP (x, 1));
7107 }
6139ff20 7108
663522cb 7109 if ((mask & ~nonzero) == 0
6139ff20
RK
7110 || (i = exact_log2 (mask)) >= 0)
7111 {
7112 x = simplify_shift_const
7113 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7114 i < 0 ? INTVAL (XEXP (x, 1))
7115 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
7116
7117 if (GET_CODE (x) != ASHIFTRT)
e3d616e3 7118 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
7119 }
7120 }
7121
7122 /* If MASK is 1, convert this to a LSHIFTRT. This can be done
7123 even if the shift count isn't a constant. */
7124 if (mask == 1)
7125 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
7126
fae2db47
JW
7127 shiftrt:
7128
7129 /* If this is a zero- or sign-extension operation that just affects bits
4c002f29
RK
7130 we don't care about, remove it. Be sure the call above returned
7131 something that is still a shift. */
d0ab8cd3 7132
4c002f29
RK
7133 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
7134 && GET_CODE (XEXP (x, 1)) == CONST_INT
d0ab8cd3 7135 && INTVAL (XEXP (x, 1)) >= 0
6139ff20
RK
7136 && (INTVAL (XEXP (x, 1))
7137 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
d0ab8cd3
RK
7138 && GET_CODE (XEXP (x, 0)) == ASHIFT
7139 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7140 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
e3d616e3
RK
7141 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
7142 reg, next_select);
6139ff20 7143
dfbe1b2f
RK
7144 break;
7145
6139ff20
RK
7146 case ROTATE:
7147 case ROTATERT:
7148 /* If the shift count is constant and we can do computations
7149 in the mode of X, compute where the bits we care about are.
7150 Otherwise, we can't do anything. Don't change the mode of
7151 the shift or propagate MODE into the shift, though. */
7152 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7153 && INTVAL (XEXP (x, 1)) >= 0)
7154 {
7155 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
7156 GET_MODE (x), GEN_INT (mask),
7157 XEXP (x, 1));
7d171a1e 7158 if (temp && GET_CODE(temp) == CONST_INT)
6139ff20
RK
7159 SUBST (XEXP (x, 0),
7160 force_to_mode (XEXP (x, 0), GET_MODE (x),
e3d616e3 7161 INTVAL (temp), reg, next_select));
6139ff20
RK
7162 }
7163 break;
663522cb 7164
dfbe1b2f 7165 case NEG:
180b8e4b
RK
7166 /* If we just want the low-order bit, the NEG isn't needed since it
7167 won't change the low-order bit. */
7168 if (mask == 1)
7169 return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
7170
6139ff20
RK
7171 /* We need any bits less significant than the most significant bit in
7172 MASK since carries from those bits will affect the bits we are
7173 interested in. */
7174 mask = fuller_mask;
7175 goto unop;
7176
dfbe1b2f 7177 case NOT:
6139ff20
RK
7178 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
7179 same as the XOR case above. Ensure that the constant we form is not
7180 wider than the mode of X. */
7181
7182 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7183 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7184 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7185 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
7186 < GET_MODE_BITSIZE (GET_MODE (x)))
7187 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
7188 {
7189 temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
7190 temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
7191 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
7192
e3d616e3 7193 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
7194 }
7195
f82da7d2
JW
7196 /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
7197 use the full mask inside the NOT. */
7198 mask = fuller_mask;
7199
6139ff20 7200 unop:
e3d616e3
RK
7201 op0 = gen_lowpart_for_combine (op_mode,
7202 force_to_mode (XEXP (x, 0), mode, mask,
7203 reg, next_select));
6139ff20 7204 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
0c1c8ea6 7205 x = gen_unary (code, op_mode, op_mode, op0);
6139ff20
RK
7206 break;
7207
7208 case NE:
7209 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
3aceff0d 7210 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
1a6ec070 7211 which is equal to STORE_FLAG_VALUE. */
663522cb 7212 if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
3aceff0d 7213 && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
1a6ec070 7214 && nonzero_bits (XEXP (x, 0), mode) == STORE_FLAG_VALUE)
e3d616e3 7215 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20 7216
d0ab8cd3
RK
7217 break;
7218
7219 case IF_THEN_ELSE:
7220 /* We have no way of knowing if the IF_THEN_ELSE can itself be
7221 written in a narrower mode. We play it safe and do not do so. */
7222
7223 SUBST (XEXP (x, 1),
7224 gen_lowpart_for_combine (GET_MODE (x),
7225 force_to_mode (XEXP (x, 1), mode,
e3d616e3 7226 mask, reg, next_select)));
d0ab8cd3
RK
7227 SUBST (XEXP (x, 2),
7228 gen_lowpart_for_combine (GET_MODE (x),
7229 force_to_mode (XEXP (x, 2), mode,
e3d616e3 7230 mask, reg,next_select)));
d0ab8cd3 7231 break;
663522cb 7232
e9a25f70
JL
7233 default:
7234 break;
dfbe1b2f
RK
7235 }
7236
d0ab8cd3 7237 /* Ensure we return a value of the proper mode. */
dfbe1b2f
RK
7238 return gen_lowpart_for_combine (mode, x);
7239}
7240\f
abe6e52f
RK
7241/* Return nonzero if X is an expression that has one of two values depending on
7242 whether some other value is zero or nonzero. In that case, we return the
7243 value that is being tested, *PTRUE is set to the value if the rtx being
7244 returned has a nonzero value, and *PFALSE is set to the other alternative.
7245
7246 If we return zero, we set *PTRUE and *PFALSE to X. */
7247
7248static rtx
7249if_then_else_cond (x, ptrue, pfalse)
7250 rtx x;
7251 rtx *ptrue, *pfalse;
7252{
7253 enum machine_mode mode = GET_MODE (x);
7254 enum rtx_code code = GET_CODE (x);
abe6e52f
RK
7255 rtx cond0, cond1, true0, true1, false0, false1;
7256 unsigned HOST_WIDE_INT nz;
7257
14a774a9
RK
7258 /* If we are comparing a value against zero, we are done. */
7259 if ((code == NE || code == EQ)
7260 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
7261 {
e8758a3a
JL
7262 *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
7263 *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
14a774a9
RK
7264 return XEXP (x, 0);
7265 }
7266
abe6e52f
RK
7267 /* If this is a unary operation whose operand has one of two values, apply
7268 our opcode to compute those values. */
14a774a9
RK
7269 else if (GET_RTX_CLASS (code) == '1'
7270 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
abe6e52f 7271 {
0c1c8ea6
RK
7272 *ptrue = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), true0);
7273 *pfalse = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), false0);
abe6e52f
RK
7274 return cond0;
7275 }
7276
3a19aabc 7277 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
ddd5a7c1 7278 make can't possibly match and would suppress other optimizations. */
3a19aabc
RK
7279 else if (code == COMPARE)
7280 ;
7281
abe6e52f
RK
7282 /* If this is a binary operation, see if either side has only one of two
7283 values. If either one does or if both do and they are conditional on
7284 the same value, compute the new true and false values. */
7285 else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
7286 || GET_RTX_CLASS (code) == '<')
7287 {
7288 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
7289 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
7290
7291 if ((cond0 != 0 || cond1 != 0)
7292 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
7293 {
987e845a
JW
7294 /* If if_then_else_cond returned zero, then true/false are the
7295 same rtl. We must copy one of them to prevent invalid rtl
7296 sharing. */
7297 if (cond0 == 0)
7298 true0 = copy_rtx (true0);
7299 else if (cond1 == 0)
7300 true1 = copy_rtx (true1);
7301
abe6e52f
RK
7302 *ptrue = gen_binary (code, mode, true0, true1);
7303 *pfalse = gen_binary (code, mode, false0, false1);
7304 return cond0 ? cond0 : cond1;
7305 }
9210df58 7306
9210df58 7307 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
0802d516
RK
7308 operands is zero when the other is non-zero, and vice-versa,
7309 and STORE_FLAG_VALUE is 1 or -1. */
9210df58 7310
0802d516
RK
7311 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7312 && (code == PLUS || code == IOR || code == XOR || code == MINUS
663522cb 7313 || code == UMAX)
9210df58
RK
7314 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7315 {
7316 rtx op0 = XEXP (XEXP (x, 0), 1);
7317 rtx op1 = XEXP (XEXP (x, 1), 1);
7318
7319 cond0 = XEXP (XEXP (x, 0), 0);
7320 cond1 = XEXP (XEXP (x, 1), 0);
7321
7322 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7323 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
7324 && reversible_comparison_p (cond1)
7325 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
7326 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7327 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7328 || ((swap_condition (GET_CODE (cond0))
7329 == reverse_condition (GET_CODE (cond1)))
7330 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7331 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7332 && ! side_effects_p (x))
7333 {
7334 *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
663522cb
KH
7335 *pfalse = gen_binary (MULT, mode,
7336 (code == MINUS
0c1c8ea6 7337 ? gen_unary (NEG, mode, mode, op1) : op1),
9210df58
RK
7338 const_true_rtx);
7339 return cond0;
7340 }
7341 }
7342
7343 /* Similarly for MULT, AND and UMIN, execpt that for these the result
7344 is always zero. */
0802d516
RK
7345 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7346 && (code == MULT || code == AND || code == UMIN)
9210df58
RK
7347 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7348 {
7349 cond0 = XEXP (XEXP (x, 0), 0);
7350 cond1 = XEXP (XEXP (x, 1), 0);
7351
7352 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7353 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
7354 && reversible_comparison_p (cond1)
7355 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
7356 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7357 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7358 || ((swap_condition (GET_CODE (cond0))
7359 == reverse_condition (GET_CODE (cond1)))
7360 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7361 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7362 && ! side_effects_p (x))
7363 {
7364 *ptrue = *pfalse = const0_rtx;
7365 return cond0;
7366 }
7367 }
abe6e52f
RK
7368 }
7369
7370 else if (code == IF_THEN_ELSE)
7371 {
7372 /* If we have IF_THEN_ELSE already, extract the condition and
7373 canonicalize it if it is NE or EQ. */
7374 cond0 = XEXP (x, 0);
7375 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
7376 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
7377 return XEXP (cond0, 0);
7378 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
7379 {
7380 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
7381 return XEXP (cond0, 0);
7382 }
7383 else
7384 return cond0;
7385 }
7386
7387 /* If X is a normal SUBREG with both inner and outer modes integral,
7388 we can narrow both the true and false values of the inner expression,
7389 if there is a condition. */
7390 else if (code == SUBREG && GET_MODE_CLASS (mode) == MODE_INT
7391 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
7392 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
7393 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
7394 &true0, &false0)))
7395 {
668bcf76
JL
7396 if ((GET_CODE (SUBREG_REG (x)) == REG
7397 || GET_CODE (SUBREG_REG (x)) == MEM
7398 || CONSTANT_P (SUBREG_REG (x)))
7399 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
54f3b5c2
R
7400 && (WORDS_BIG_ENDIAN || SUBREG_WORD (x) != 0))
7401 {
7402 true0 = operand_subword (true0, SUBREG_WORD (x), 0, mode);
7403 false0 = operand_subword (false0, SUBREG_WORD (x), 0, mode);
7404 }
49219895 7405 *ptrue = force_to_mode (true0, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0);
00244e6b 7406 *pfalse
49219895 7407 = force_to_mode (false0, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0);
abe6e52f 7408
abe6e52f
RK
7409 return cond0;
7410 }
7411
7412 /* If X is a constant, this isn't special and will cause confusions
7413 if we treat it as such. Likewise if it is equivalent to a constant. */
7414 else if (CONSTANT_P (x)
7415 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
7416 ;
7417
1f3f36d1
RH
7418 /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
7419 will be least confusing to the rest of the compiler. */
7420 else if (mode == BImode)
7421 {
7422 *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
7423 return x;
7424 }
7425
663522cb 7426 /* If X is known to be either 0 or -1, those are the true and
abe6e52f 7427 false values when testing X. */
49219895
JH
7428 else if (x == constm1_rtx || x == const0_rtx
7429 || (mode != VOIDmode
7430 && num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode)))
abe6e52f
RK
7431 {
7432 *ptrue = constm1_rtx, *pfalse = const0_rtx;
7433 return x;
7434 }
7435
7436 /* Likewise for 0 or a single bit. */
49219895
JH
7437 else if (mode != VOIDmode
7438 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7439 && exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
abe6e52f
RK
7440 {
7441 *ptrue = GEN_INT (nz), *pfalse = const0_rtx;
7442 return x;
7443 }
7444
7445 /* Otherwise fail; show no condition with true and false values the same. */
7446 *ptrue = *pfalse = x;
7447 return 0;
7448}
7449\f
1a26b032
RK
7450/* Return the value of expression X given the fact that condition COND
7451 is known to be true when applied to REG as its first operand and VAL
7452 as its second. X is known to not be shared and so can be modified in
7453 place.
7454
7455 We only handle the simplest cases, and specifically those cases that
7456 arise with IF_THEN_ELSE expressions. */
7457
7458static rtx
7459known_cond (x, cond, reg, val)
7460 rtx x;
7461 enum rtx_code cond;
7462 rtx reg, val;
7463{
7464 enum rtx_code code = GET_CODE (x);
f24ad0e4 7465 rtx temp;
6f7d635c 7466 const char *fmt;
1a26b032
RK
7467 int i, j;
7468
7469 if (side_effects_p (x))
7470 return x;
7471
7472 if (cond == EQ && rtx_equal_p (x, reg))
7473 return val;
7474
7475 /* If X is (abs REG) and we know something about REG's relationship
7476 with zero, we may be able to simplify this. */
7477
7478 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
7479 switch (cond)
7480 {
7481 case GE: case GT: case EQ:
7482 return XEXP (x, 0);
7483 case LT: case LE:
0c1c8ea6
RK
7484 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), GET_MODE (XEXP (x, 0)),
7485 XEXP (x, 0));
e9a25f70
JL
7486 default:
7487 break;
1a26b032
RK
7488 }
7489
7490 /* The only other cases we handle are MIN, MAX, and comparisons if the
7491 operands are the same as REG and VAL. */
7492
7493 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
7494 {
7495 if (rtx_equal_p (XEXP (x, 0), val))
7496 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
7497
7498 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
7499 {
7500 if (GET_RTX_CLASS (code) == '<')
1eb8759b
RH
7501 {
7502 if (comparison_dominates_p (cond, code))
7503 return const_true_rtx;
1a26b032 7504
1eb8759b
RH
7505 code = reverse_condition (code);
7506 if (code != UNKNOWN
7507 && comparison_dominates_p (cond, code))
7508 return const0_rtx;
7509 else
7510 return x;
7511 }
1a26b032
RK
7512 else if (code == SMAX || code == SMIN
7513 || code == UMIN || code == UMAX)
7514 {
7515 int unsignedp = (code == UMIN || code == UMAX);
7516
7517 if (code == SMAX || code == UMAX)
7518 cond = reverse_condition (cond);
7519
7520 switch (cond)
7521 {
7522 case GE: case GT:
7523 return unsignedp ? x : XEXP (x, 1);
7524 case LE: case LT:
7525 return unsignedp ? x : XEXP (x, 0);
7526 case GEU: case GTU:
7527 return unsignedp ? XEXP (x, 1) : x;
7528 case LEU: case LTU:
7529 return unsignedp ? XEXP (x, 0) : x;
e9a25f70
JL
7530 default:
7531 break;
1a26b032
RK
7532 }
7533 }
7534 }
7535 }
7536
7537 fmt = GET_RTX_FORMAT (code);
7538 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7539 {
7540 if (fmt[i] == 'e')
7541 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
7542 else if (fmt[i] == 'E')
7543 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7544 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
7545 cond, reg, val));
7546 }
7547
7548 return x;
7549}
7550\f
e11fa86f
RK
7551/* See if X and Y are equal for the purposes of seeing if we can rewrite an
7552 assignment as a field assignment. */
7553
7554static int
7555rtx_equal_for_field_assignment_p (x, y)
7556 rtx x;
7557 rtx y;
7558{
e11fa86f
RK
7559 if (x == y || rtx_equal_p (x, y))
7560 return 1;
7561
7562 if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
7563 return 0;
7564
7565 /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
7566 Note that all SUBREGs of MEM are paradoxical; otherwise they
7567 would have been rewritten. */
7568 if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
7569 && GET_CODE (SUBREG_REG (y)) == MEM
7570 && rtx_equal_p (SUBREG_REG (y),
7571 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x)))
7572 return 1;
7573
7574 if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
7575 && GET_CODE (SUBREG_REG (x)) == MEM
7576 && rtx_equal_p (SUBREG_REG (x),
7577 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y)))
7578 return 1;
7579
9ec36da5
JL
7580 /* We used to see if get_last_value of X and Y were the same but that's
7581 not correct. In one direction, we'll cause the assignment to have
7582 the wrong destination and in the case, we'll import a register into this
7583 insn that might have already have been dead. So fail if none of the
7584 above cases are true. */
7585 return 0;
e11fa86f
RK
7586}
7587\f
230d793d
RS
7588/* See if X, a SET operation, can be rewritten as a bit-field assignment.
7589 Return that assignment if so.
7590
7591 We only handle the most common cases. */
7592
7593static rtx
7594make_field_assignment (x)
7595 rtx x;
7596{
7597 rtx dest = SET_DEST (x);
7598 rtx src = SET_SRC (x);
dfbe1b2f 7599 rtx assign;
e11fa86f 7600 rtx rhs, lhs;
5f4f0e22 7601 HOST_WIDE_INT c1;
770ae6cc
RK
7602 HOST_WIDE_INT pos;
7603 unsigned HOST_WIDE_INT len;
dfbe1b2f
RK
7604 rtx other;
7605 enum machine_mode mode;
230d793d
RS
7606
7607 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
7608 a clear of a one-bit field. We will have changed it to
7609 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
7610 for a SUBREG. */
7611
7612 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
7613 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
7614 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
e11fa86f 7615 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 7616 {
8999a12e 7617 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 7618 1, 1, 1, 0);
76184def 7619 if (assign != 0)
38a448ca 7620 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
76184def 7621 return x;
230d793d
RS
7622 }
7623
7624 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
7625 && subreg_lowpart_p (XEXP (src, 0))
663522cb 7626 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
230d793d
RS
7627 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
7628 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
7629 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
e11fa86f 7630 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 7631 {
8999a12e 7632 assign = make_extraction (VOIDmode, dest, 0,
230d793d
RS
7633 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
7634 1, 1, 1, 0);
76184def 7635 if (assign != 0)
38a448ca 7636 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
76184def 7637 return x;
230d793d
RS
7638 }
7639
9dd11dcb 7640 /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
230d793d
RS
7641 one-bit field. */
7642 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
7643 && XEXP (XEXP (src, 0), 0) == const1_rtx
e11fa86f 7644 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 7645 {
8999a12e 7646 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 7647 1, 1, 1, 0);
76184def 7648 if (assign != 0)
38a448ca 7649 return gen_rtx_SET (VOIDmode, assign, const1_rtx);
76184def 7650 return x;
230d793d
RS
7651 }
7652
dfbe1b2f 7653 /* The other case we handle is assignments into a constant-position
9dd11dcb 7654 field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
dfbe1b2f
RK
7655 a mask that has all one bits except for a group of zero bits and
7656 OTHER is known to have zeros where C1 has ones, this is such an
7657 assignment. Compute the position and length from C1. Shift OTHER
7658 to the appropriate position, force it to the required mode, and
7659 make the extraction. Check for the AND in both operands. */
7660
9dd11dcb 7661 if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
e11fa86f
RK
7662 return x;
7663
7664 rhs = expand_compound_operation (XEXP (src, 0));
7665 lhs = expand_compound_operation (XEXP (src, 1));
7666
7667 if (GET_CODE (rhs) == AND
7668 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
7669 && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
7670 c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
7671 else if (GET_CODE (lhs) == AND
7672 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
7673 && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
7674 c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
dfbe1b2f
RK
7675 else
7676 return x;
230d793d 7677
663522cb 7678 pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
dfbe1b2f 7679 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
e5e809f4
JL
7680 || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
7681 || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
dfbe1b2f 7682 return x;
230d793d 7683
5f4f0e22 7684 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
76184def
DE
7685 if (assign == 0)
7686 return x;
230d793d 7687
dfbe1b2f
RK
7688 /* The mode to use for the source is the mode of the assignment, or of
7689 what is inside a possible STRICT_LOW_PART. */
663522cb 7690 mode = (GET_CODE (assign) == STRICT_LOW_PART
dfbe1b2f 7691 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
230d793d 7692
dfbe1b2f
RK
7693 /* Shift OTHER right POS places and make it the source, restricting it
7694 to the proper length and mode. */
230d793d 7695
5f4f0e22
CH
7696 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
7697 GET_MODE (src), other, pos),
6139ff20
RK
7698 mode,
7699 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
0345195a 7700 ? ~(unsigned HOST_WIDE_INT) 0
729a2125 7701 : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 7702 dest, 0);
230d793d 7703
dfbe1b2f 7704 return gen_rtx_combine (SET, VOIDmode, assign, src);
230d793d
RS
7705}
7706\f
7707/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
7708 if so. */
7709
7710static rtx
7711apply_distributive_law (x)
7712 rtx x;
7713{
7714 enum rtx_code code = GET_CODE (x);
7715 rtx lhs, rhs, other;
7716 rtx tem;
7717 enum rtx_code inner_code;
7718
d8a8a4da
RS
7719 /* Distributivity is not true for floating point.
7720 It can change the value. So don't do it.
7721 -- rms and moshier@world.std.com. */
3ad2180a 7722 if (FLOAT_MODE_P (GET_MODE (x)))
d8a8a4da
RS
7723 return x;
7724
230d793d
RS
7725 /* The outer operation can only be one of the following: */
7726 if (code != IOR && code != AND && code != XOR
7727 && code != PLUS && code != MINUS)
7728 return x;
7729
7730 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
7731
0f41302f
MS
7732 /* If either operand is a primitive we can't do anything, so get out
7733 fast. */
230d793d 7734 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
dfbe1b2f 7735 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
230d793d
RS
7736 return x;
7737
7738 lhs = expand_compound_operation (lhs);
7739 rhs = expand_compound_operation (rhs);
7740 inner_code = GET_CODE (lhs);
7741 if (inner_code != GET_CODE (rhs))
7742 return x;
7743
7744 /* See if the inner and outer operations distribute. */
7745 switch (inner_code)
7746 {
7747 case LSHIFTRT:
7748 case ASHIFTRT:
7749 case AND:
7750 case IOR:
7751 /* These all distribute except over PLUS. */
7752 if (code == PLUS || code == MINUS)
7753 return x;
7754 break;
7755
7756 case MULT:
7757 if (code != PLUS && code != MINUS)
7758 return x;
7759 break;
7760
7761 case ASHIFT:
45620ed4 7762 /* This is also a multiply, so it distributes over everything. */
230d793d
RS
7763 break;
7764
7765 case SUBREG:
dfbe1b2f
RK
7766 /* Non-paradoxical SUBREGs distributes over all operations, provided
7767 the inner modes and word numbers are the same, this is an extraction
2b4bd1bc
JW
7768 of a low-order part, we don't convert an fp operation to int or
7769 vice versa, and we would not be converting a single-word
dfbe1b2f 7770 operation into a multi-word operation. The latter test is not
2b4bd1bc 7771 required, but it prevents generating unneeded multi-word operations.
dfbe1b2f
RK
7772 Some of the previous tests are redundant given the latter test, but
7773 are retained because they are required for correctness.
7774
7775 We produce the result slightly differently in this case. */
7776
7777 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
7778 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
7779 || ! subreg_lowpart_p (lhs)
2b4bd1bc
JW
7780 || (GET_MODE_CLASS (GET_MODE (lhs))
7781 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f 7782 || (GET_MODE_SIZE (GET_MODE (lhs))
8af24e26 7783 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f 7784 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
230d793d
RS
7785 return x;
7786
7787 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
7788 SUBREG_REG (lhs), SUBREG_REG (rhs));
7789 return gen_lowpart_for_combine (GET_MODE (x), tem);
7790
7791 default:
7792 return x;
7793 }
7794
7795 /* Set LHS and RHS to the inner operands (A and B in the example
7796 above) and set OTHER to the common operand (C in the example).
7797 These is only one way to do this unless the inner operation is
7798 commutative. */
7799 if (GET_RTX_CLASS (inner_code) == 'c'
7800 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
7801 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
7802 else if (GET_RTX_CLASS (inner_code) == 'c'
7803 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
7804 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
7805 else if (GET_RTX_CLASS (inner_code) == 'c'
7806 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
7807 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
7808 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
7809 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
7810 else
7811 return x;
7812
7813 /* Form the new inner operation, seeing if it simplifies first. */
7814 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
7815
7816 /* There is one exception to the general way of distributing:
7817 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
7818 if (code == XOR && inner_code == IOR)
7819 {
7820 inner_code = AND;
0c1c8ea6 7821 other = gen_unary (NOT, GET_MODE (x), GET_MODE (x), other);
230d793d
RS
7822 }
7823
7824 /* We may be able to continuing distributing the result, so call
7825 ourselves recursively on the inner operation before forming the
7826 outer operation, which we return. */
7827 return gen_binary (inner_code, GET_MODE (x),
7828 apply_distributive_law (tem), other);
7829}
7830\f
7831/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
7832 in MODE.
7833
7834 Return an equivalent form, if different from X. Otherwise, return X. If
7835 X is zero, we are to always construct the equivalent form. */
7836
7837static rtx
7838simplify_and_const_int (x, mode, varop, constop)
7839 rtx x;
7840 enum machine_mode mode;
7841 rtx varop;
5f4f0e22 7842 unsigned HOST_WIDE_INT constop;
230d793d 7843{
951553af 7844 unsigned HOST_WIDE_INT nonzero;
42301240 7845 int i;
230d793d 7846
6139ff20
RK
7847 /* Simplify VAROP knowing that we will be only looking at some of the
7848 bits in it. */
e3d616e3 7849 varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
230d793d 7850
6139ff20
RK
7851 /* If VAROP is a CLOBBER, we will fail so return it; if it is a
7852 CONST_INT, we are done. */
7853 if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT)
7854 return varop;
230d793d 7855
fc06d7aa
RK
7856 /* See what bits may be nonzero in VAROP. Unlike the general case of
7857 a call to nonzero_bits, here we don't care about bits outside
7858 MODE. */
7859
7860 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
7e4ce834 7861 nonzero = trunc_int_for_mode (nonzero, mode);
9fa6d012 7862
230d793d 7863 /* Turn off all bits in the constant that are known to already be zero.
951553af 7864 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
230d793d
RS
7865 which is tested below. */
7866
951553af 7867 constop &= nonzero;
230d793d
RS
7868
7869 /* If we don't have any bits left, return zero. */
7870 if (constop == 0)
7871 return const0_rtx;
7872
42301240
RK
7873 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
7874 a power of two, we can replace this with a ASHIFT. */
7875 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
7876 && (i = exact_log2 (constop)) >= 0)
7877 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
663522cb 7878
6139ff20
RK
7879 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
7880 or XOR, then try to apply the distributive law. This may eliminate
7881 operations if either branch can be simplified because of the AND.
7882 It may also make some cases more complex, but those cases probably
7883 won't match a pattern either with or without this. */
7884
7885 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
7886 return
7887 gen_lowpart_for_combine
7888 (mode,
7889 apply_distributive_law
7890 (gen_binary (GET_CODE (varop), GET_MODE (varop),
7891 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7892 XEXP (varop, 0), constop),
7893 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7894 XEXP (varop, 1), constop))));
7895
230d793d
RS
7896 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
7897 if we already had one (just check for the simplest cases). */
7898 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7899 && GET_MODE (XEXP (x, 0)) == mode
7900 && SUBREG_REG (XEXP (x, 0)) == varop)
7901 varop = XEXP (x, 0);
7902 else
7903 varop = gen_lowpart_for_combine (mode, varop);
7904
0f41302f 7905 /* If we can't make the SUBREG, try to return what we were given. */
230d793d
RS
7906 if (GET_CODE (varop) == CLOBBER)
7907 return x ? x : varop;
7908
7909 /* If we are only masking insignificant bits, return VAROP. */
951553af 7910 if (constop == nonzero)
230d793d
RS
7911 x = varop;
7912
7913 /* Otherwise, return an AND. See how much, if any, of X we can use. */
7914 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
6139ff20 7915 x = gen_binary (AND, mode, varop, GEN_INT (constop));
230d793d
RS
7916
7917 else
7918 {
7919 if (GET_CODE (XEXP (x, 1)) != CONST_INT
e51712db 7920 || (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) != constop)
5f4f0e22 7921 SUBST (XEXP (x, 1), GEN_INT (constop));
230d793d
RS
7922
7923 SUBST (XEXP (x, 0), varop);
7924 }
7925
7926 return x;
7927}
7928\f
b3728b0e
JW
7929/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
7930 We don't let nonzero_bits recur into num_sign_bit_copies, because that
7931 is less useful. We can't allow both, because that results in exponential
956d6950 7932 run time recursion. There is a nullstone testcase that triggered
b3728b0e
JW
7933 this. This macro avoids accidental uses of num_sign_bit_copies. */
7934#define num_sign_bit_copies()
7935
230d793d
RS
7936/* Given an expression, X, compute which bits in X can be non-zero.
7937 We don't care about bits outside of those defined in MODE.
7938
7939 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
7940 a shift, AND, or zero_extract, we can do better. */
7941
5f4f0e22 7942static unsigned HOST_WIDE_INT
951553af 7943nonzero_bits (x, mode)
230d793d
RS
7944 rtx x;
7945 enum machine_mode mode;
7946{
951553af
RK
7947 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
7948 unsigned HOST_WIDE_INT inner_nz;
230d793d 7949 enum rtx_code code;
770ae6cc 7950 unsigned int mode_width = GET_MODE_BITSIZE (mode);
230d793d
RS
7951 rtx tem;
7952
1c75dfa4
RK
7953 /* For floating-point values, assume all bits are needed. */
7954 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
7955 return nonzero;
7956
230d793d
RS
7957 /* If X is wider than MODE, use its mode instead. */
7958 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
7959 {
7960 mode = GET_MODE (x);
951553af 7961 nonzero = GET_MODE_MASK (mode);
230d793d
RS
7962 mode_width = GET_MODE_BITSIZE (mode);
7963 }
7964
5f4f0e22 7965 if (mode_width > HOST_BITS_PER_WIDE_INT)
230d793d
RS
7966 /* Our only callers in this case look for single bit values. So
7967 just return the mode mask. Those tests will then be false. */
951553af 7968 return nonzero;
230d793d 7969
8baf60bb 7970#ifndef WORD_REGISTER_OPERATIONS
c6965c0f 7971 /* If MODE is wider than X, but both are a single word for both the host
663522cb 7972 and target machines, we can compute this from which bits of the
0840fd91
RK
7973 object might be nonzero in its own mode, taking into account the fact
7974 that on many CISC machines, accessing an object in a wider mode
7975 causes the high-order bits to become undefined. So they are
7976 not known to be zero. */
7977
7978 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
7979 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
7980 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
c6965c0f 7981 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
0840fd91
RK
7982 {
7983 nonzero &= nonzero_bits (x, GET_MODE (x));
663522cb 7984 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
0840fd91
RK
7985 return nonzero;
7986 }
7987#endif
7988
230d793d
RS
7989 code = GET_CODE (x);
7990 switch (code)
7991 {
7992 case REG:
320dd7a7
RK
7993#ifdef POINTERS_EXTEND_UNSIGNED
7994 /* If pointers extend unsigned and this is a pointer in Pmode, say that
7995 all the bits above ptr_mode are known to be zero. */
7996 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
7997 && REGNO_POINTER_FLAG (REGNO (x)))
7998 nonzero &= GET_MODE_MASK (ptr_mode);
7999#endif
8000
b0d71df9
RK
8001#ifdef STACK_BOUNDARY
8002 /* If this is the stack pointer, we may know something about its
8003 alignment. If PUSH_ROUNDING is defined, it is possible for the
230d793d
RS
8004 stack to be momentarily aligned only to that amount, so we pick
8005 the least alignment. */
8006
ee49a9c7
JW
8007 /* We can't check for arg_pointer_rtx here, because it is not
8008 guaranteed to have as much alignment as the stack pointer.
8009 In particular, in the Irix6 n64 ABI, the stack has 128 bit
8010 alignment but the argument pointer has only 64 bit alignment. */
8011
0e9ff885
DM
8012 if ((x == frame_pointer_rtx
8013 || x == stack_pointer_rtx
8014 || x == hard_frame_pointer_rtx
8015 || (REGNO (x) >= FIRST_VIRTUAL_REGISTER
8016 && REGNO (x) <= LAST_VIRTUAL_REGISTER))
8017#ifdef STACK_BIAS
8018 && !STACK_BIAS
663522cb 8019#endif
0e9ff885 8020 )
230d793d 8021 {
b0d71df9 8022 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
230d793d
RS
8023
8024#ifdef PUSH_ROUNDING
f73ad30e 8025 if (REGNO (x) == STACK_POINTER_REGNUM && PUSH_ARGS)
b0d71df9 8026 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
230d793d
RS
8027#endif
8028
320dd7a7
RK
8029 /* We must return here, otherwise we may get a worse result from
8030 one of the choices below. There is nothing useful below as
8031 far as the stack pointer is concerned. */
663522cb 8032 return nonzero &= ~(sp_alignment - 1);
230d793d 8033 }
b0d71df9 8034#endif
230d793d 8035
55310dad
RK
8036 /* If X is a register whose nonzero bits value is current, use it.
8037 Otherwise, if X is a register whose value we can find, use that
8038 value. Otherwise, use the previously-computed global nonzero bits
8039 for this register. */
8040
8041 if (reg_last_set_value[REGNO (x)] != 0
8042 && reg_last_set_mode[REGNO (x)] == mode
57cf50a4
GRK
8043 && (reg_last_set_label[REGNO (x)] == label_tick
8044 || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8045 && REG_N_SETS (REGNO (x)) == 1
663522cb 8046 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
57cf50a4 8047 REGNO (x))))
55310dad
RK
8048 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
8049 return reg_last_set_nonzero_bits[REGNO (x)];
230d793d
RS
8050
8051 tem = get_last_value (x);
9afa3d54 8052
230d793d 8053 if (tem)
9afa3d54
RK
8054 {
8055#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8056 /* If X is narrower than MODE and TEM is a non-negative
8057 constant that would appear negative in the mode of X,
8058 sign-extend it for use in reg_nonzero_bits because some
8059 machines (maybe most) will actually do the sign-extension
663522cb 8060 and this is the conservative approach.
9afa3d54
RK
8061
8062 ??? For 2.5, try to tighten up the MD files in this regard
8063 instead of this kludge. */
8064
8065 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
8066 && GET_CODE (tem) == CONST_INT
8067 && INTVAL (tem) > 0
8068 && 0 != (INTVAL (tem)
8069 & ((HOST_WIDE_INT) 1
9e69be8c 8070 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
8071 tem = GEN_INT (INTVAL (tem)
8072 | ((HOST_WIDE_INT) (-1)
8073 << GET_MODE_BITSIZE (GET_MODE (x))));
8074#endif
8075 return nonzero_bits (tem, mode);
8076 }
951553af
RK
8077 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
8078 return reg_nonzero_bits[REGNO (x)] & nonzero;
230d793d 8079 else
951553af 8080 return nonzero;
230d793d
RS
8081
8082 case CONST_INT:
9afa3d54
RK
8083#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8084 /* If X is negative in MODE, sign-extend the value. */
9e69be8c
RK
8085 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
8086 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
8087 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
9afa3d54
RK
8088#endif
8089
230d793d
RS
8090 return INTVAL (x);
8091
230d793d 8092 case MEM:
8baf60bb 8093#ifdef LOAD_EXTEND_OP
230d793d
RS
8094 /* In many, if not most, RISC machines, reading a byte from memory
8095 zeros the rest of the register. Noticing that fact saves a lot
8096 of extra zero-extends. */
8baf60bb
RK
8097 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
8098 nonzero &= GET_MODE_MASK (GET_MODE (x));
230d793d 8099#endif
8baf60bb 8100 break;
230d793d 8101
230d793d
RS
8102 case EQ: case NE:
8103 case GT: case GTU:
8104 case LT: case LTU:
8105 case GE: case GEU:
8106 case LE: case LEU:
3f508eca 8107
c6965c0f
RK
8108 /* If this produces an integer result, we know which bits are set.
8109 Code here used to clear bits outside the mode of X, but that is
8110 now done above. */
230d793d 8111
c6965c0f
RK
8112 if (GET_MODE_CLASS (mode) == MODE_INT
8113 && mode_width <= HOST_BITS_PER_WIDE_INT)
8114 nonzero = STORE_FLAG_VALUE;
230d793d 8115 break;
230d793d 8116
230d793d 8117 case NEG:
b3728b0e
JW
8118#if 0
8119 /* Disabled to avoid exponential mutual recursion between nonzero_bits
8120 and num_sign_bit_copies. */
d0ab8cd3
RK
8121 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
8122 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 8123 nonzero = 1;
b3728b0e 8124#endif
230d793d
RS
8125
8126 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
663522cb 8127 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
230d793d 8128 break;
d0ab8cd3
RK
8129
8130 case ABS:
b3728b0e
JW
8131#if 0
8132 /* Disabled to avoid exponential mutual recursion between nonzero_bits
8133 and num_sign_bit_copies. */
d0ab8cd3
RK
8134 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
8135 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 8136 nonzero = 1;
b3728b0e 8137#endif
d0ab8cd3 8138 break;
230d793d
RS
8139
8140 case TRUNCATE:
951553af 8141 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
230d793d
RS
8142 break;
8143
8144 case ZERO_EXTEND:
951553af 8145 nonzero &= nonzero_bits (XEXP (x, 0), mode);
230d793d 8146 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
951553af 8147 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
230d793d
RS
8148 break;
8149
8150 case SIGN_EXTEND:
8151 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
8152 Otherwise, show all the bits in the outer mode but not the inner
8153 may be non-zero. */
951553af 8154 inner_nz = nonzero_bits (XEXP (x, 0), mode);
230d793d
RS
8155 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
8156 {
951553af 8157 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
e3da301d
MS
8158 if (inner_nz
8159 & (((HOST_WIDE_INT) 1
8160 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
951553af 8161 inner_nz |= (GET_MODE_MASK (mode)
663522cb 8162 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
230d793d
RS
8163 }
8164
951553af 8165 nonzero &= inner_nz;
230d793d
RS
8166 break;
8167
8168 case AND:
951553af
RK
8169 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
8170 & nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
8171 break;
8172
d0ab8cd3
RK
8173 case XOR: case IOR:
8174 case UMIN: case UMAX: case SMIN: case SMAX:
951553af
RK
8175 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
8176 | nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
8177 break;
8178
8179 case PLUS: case MINUS:
8180 case MULT:
8181 case DIV: case UDIV:
8182 case MOD: case UMOD:
8183 /* We can apply the rules of arithmetic to compute the number of
8184 high- and low-order zero bits of these operations. We start by
8185 computing the width (position of the highest-order non-zero bit)
8186 and the number of low-order zero bits for each value. */
8187 {
951553af
RK
8188 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
8189 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
8190 int width0 = floor_log2 (nz0) + 1;
8191 int width1 = floor_log2 (nz1) + 1;
8192 int low0 = floor_log2 (nz0 & -nz0);
8193 int low1 = floor_log2 (nz1 & -nz1);
318b149c
RK
8194 HOST_WIDE_INT op0_maybe_minusp
8195 = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
8196 HOST_WIDE_INT op1_maybe_minusp
8197 = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
770ae6cc 8198 unsigned int result_width = mode_width;
230d793d
RS
8199 int result_low = 0;
8200
8201 switch (code)
8202 {
8203 case PLUS:
0e9ff885
DM
8204#ifdef STACK_BIAS
8205 if (STACK_BIAS
663522cb
KH
8206 && (XEXP (x, 0) == stack_pointer_rtx
8207 || XEXP (x, 0) == frame_pointer_rtx)
8208 && GET_CODE (XEXP (x, 1)) == CONST_INT)
0e9ff885
DM
8209 {
8210 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
8211
663522cb
KH
8212 nz0 = (GET_MODE_MASK (mode) & ~(sp_alignment - 1));
8213 nz1 = INTVAL (XEXP (x, 1)) - STACK_BIAS;
8214 width0 = floor_log2 (nz0) + 1;
8215 width1 = floor_log2 (nz1) + 1;
8216 low0 = floor_log2 (nz0 & -nz0);
8217 low1 = floor_log2 (nz1 & -nz1);
0e9ff885 8218 }
663522cb 8219#endif
230d793d
RS
8220 result_width = MAX (width0, width1) + 1;
8221 result_low = MIN (low0, low1);
8222 break;
8223 case MINUS:
8224 result_low = MIN (low0, low1);
8225 break;
8226 case MULT:
8227 result_width = width0 + width1;
8228 result_low = low0 + low1;
8229 break;
8230 case DIV:
8231 if (! op0_maybe_minusp && ! op1_maybe_minusp)
8232 result_width = width0;
8233 break;
8234 case UDIV:
8235 result_width = width0;
8236 break;
8237 case MOD:
8238 if (! op0_maybe_minusp && ! op1_maybe_minusp)
8239 result_width = MIN (width0, width1);
8240 result_low = MIN (low0, low1);
8241 break;
8242 case UMOD:
8243 result_width = MIN (width0, width1);
8244 result_low = MIN (low0, low1);
8245 break;
e9a25f70
JL
8246 default:
8247 abort ();
230d793d
RS
8248 }
8249
8250 if (result_width < mode_width)
951553af 8251 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
230d793d
RS
8252
8253 if (result_low > 0)
663522cb 8254 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
230d793d
RS
8255 }
8256 break;
8257
8258 case ZERO_EXTRACT:
8259 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 8260 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
951553af 8261 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
230d793d
RS
8262 break;
8263
8264 case SUBREG:
c3c2cb37
RK
8265 /* If this is a SUBREG formed for a promoted variable that has
8266 been zero-extended, we know that at least the high-order bits
8267 are zero, though others might be too. */
8268
8269 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
951553af
RK
8270 nonzero = (GET_MODE_MASK (GET_MODE (x))
8271 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
c3c2cb37 8272
230d793d
RS
8273 /* If the inner mode is a single word for both the host and target
8274 machines, we can compute this from which bits of the inner
951553af 8275 object might be nonzero. */
230d793d 8276 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
5f4f0e22
CH
8277 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8278 <= HOST_BITS_PER_WIDE_INT))
230d793d 8279 {
951553af 8280 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
8baf60bb 8281
b52ce03d
R
8282#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
8283 /* If this is a typical RISC machine, we only have to worry
8284 about the way loads are extended. */
8285 if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
729a2125
RK
8286 ? (((nonzero
8287 & (((unsigned HOST_WIDE_INT) 1
8288 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
8289 != 0))
b52ce03d 8290 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
230d793d 8291#endif
b52ce03d
R
8292 {
8293 /* On many CISC machines, accessing an object in a wider mode
8294 causes the high-order bits to become undefined. So they are
8295 not known to be zero. */
8296 if (GET_MODE_SIZE (GET_MODE (x))
8297 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8298 nonzero |= (GET_MODE_MASK (GET_MODE (x))
663522cb 8299 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
b52ce03d 8300 }
230d793d
RS
8301 }
8302 break;
8303
8304 case ASHIFTRT:
8305 case LSHIFTRT:
8306 case ASHIFT:
230d793d 8307 case ROTATE:
951553af 8308 /* The nonzero bits are in two classes: any bits within MODE
230d793d 8309 that aren't in GET_MODE (x) are always significant. The rest of the
951553af 8310 nonzero bits are those that are significant in the operand of
230d793d
RS
8311 the shift when shifted the appropriate number of bits. This
8312 shows that high-order bits are cleared by the right shift and
8313 low-order bits by left shifts. */
8314 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8315 && INTVAL (XEXP (x, 1)) >= 0
5f4f0e22 8316 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
8317 {
8318 enum machine_mode inner_mode = GET_MODE (x);
770ae6cc 8319 unsigned int width = GET_MODE_BITSIZE (inner_mode);
230d793d 8320 int count = INTVAL (XEXP (x, 1));
5f4f0e22 8321 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
951553af
RK
8322 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
8323 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
5f4f0e22 8324 unsigned HOST_WIDE_INT outer = 0;
230d793d
RS
8325
8326 if (mode_width > width)
663522cb 8327 outer = (op_nonzero & nonzero & ~mode_mask);
230d793d
RS
8328
8329 if (code == LSHIFTRT)
8330 inner >>= count;
8331 else if (code == ASHIFTRT)
8332 {
8333 inner >>= count;
8334
951553af 8335 /* If the sign bit may have been nonzero before the shift, we
230d793d 8336 need to mark all the places it could have been copied to
951553af 8337 by the shift as possibly nonzero. */
5f4f0e22
CH
8338 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
8339 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
230d793d 8340 }
45620ed4 8341 else if (code == ASHIFT)
230d793d
RS
8342 inner <<= count;
8343 else
8344 inner = ((inner << (count % width)
8345 | (inner >> (width - (count % width)))) & mode_mask);
8346
951553af 8347 nonzero &= (outer | inner);
230d793d
RS
8348 }
8349 break;
8350
8351 case FFS:
8352 /* This is at most the number of bits in the mode. */
951553af 8353 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
230d793d 8354 break;
d0ab8cd3
RK
8355
8356 case IF_THEN_ELSE:
951553af
RK
8357 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
8358 | nonzero_bits (XEXP (x, 2), mode));
d0ab8cd3 8359 break;
663522cb 8360
e9a25f70
JL
8361 default:
8362 break;
230d793d
RS
8363 }
8364
951553af 8365 return nonzero;
230d793d 8366}
b3728b0e
JW
8367
8368/* See the macro definition above. */
8369#undef num_sign_bit_copies
230d793d 8370\f
d0ab8cd3 8371/* Return the number of bits at the high-order end of X that are known to
5109d49f
RK
8372 be equal to the sign bit. X will be used in mode MODE; if MODE is
8373 VOIDmode, X will be used in its own mode. The returned value will always
8374 be between 1 and the number of bits in MODE. */
d0ab8cd3 8375
770ae6cc 8376static unsigned int
d0ab8cd3
RK
8377num_sign_bit_copies (x, mode)
8378 rtx x;
8379 enum machine_mode mode;
8380{
8381 enum rtx_code code = GET_CODE (x);
770ae6cc 8382 unsigned int bitwidth;
d0ab8cd3 8383 int num0, num1, result;
951553af 8384 unsigned HOST_WIDE_INT nonzero;
d0ab8cd3
RK
8385 rtx tem;
8386
8387 /* If we weren't given a mode, use the mode of X. If the mode is still
1c75dfa4
RK
8388 VOIDmode, we don't know anything. Likewise if one of the modes is
8389 floating-point. */
d0ab8cd3
RK
8390
8391 if (mode == VOIDmode)
8392 mode = GET_MODE (x);
8393
1c75dfa4 8394 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
6752e8d2 8395 return 1;
d0ab8cd3
RK
8396
8397 bitwidth = GET_MODE_BITSIZE (mode);
8398
0f41302f 8399 /* For a smaller object, just ignore the high bits. */
312def2e 8400 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
770ae6cc
RK
8401 {
8402 num0 = num_sign_bit_copies (x, GET_MODE (x));
8403 return MAX (1,
8404 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
8405 }
663522cb 8406
e9a25f70
JL
8407 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
8408 {
0c314d1a
RK
8409#ifndef WORD_REGISTER_OPERATIONS
8410 /* If this machine does not do all register operations on the entire
8411 register and MODE is wider than the mode of X, we can say nothing
8412 at all about the high-order bits. */
e9a25f70
JL
8413 return 1;
8414#else
8415 /* Likewise on machines that do, if the mode of the object is smaller
8416 than a word and loads of that size don't sign extend, we can say
8417 nothing about the high order bits. */
8418 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
8419#ifdef LOAD_EXTEND_OP
8420 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
8421#endif
8422 )
8423 return 1;
0c314d1a 8424#endif
e9a25f70 8425 }
0c314d1a 8426
d0ab8cd3
RK
8427 switch (code)
8428 {
8429 case REG:
55310dad 8430
ff0dbdd1
RK
8431#ifdef POINTERS_EXTEND_UNSIGNED
8432 /* If pointers extend signed and this is a pointer in Pmode, say that
8433 all the bits above ptr_mode are known to be sign bit copies. */
8434 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
8435 && REGNO_POINTER_FLAG (REGNO (x)))
8436 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
8437#endif
8438
55310dad
RK
8439 if (reg_last_set_value[REGNO (x)] != 0
8440 && reg_last_set_mode[REGNO (x)] == mode
57cf50a4
GRK
8441 && (reg_last_set_label[REGNO (x)] == label_tick
8442 || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8443 && REG_N_SETS (REGNO (x)) == 1
8444 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
8445 REGNO (x))))
55310dad
RK
8446 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
8447 return reg_last_set_sign_bit_copies[REGNO (x)];
d0ab8cd3 8448
663522cb 8449 tem = get_last_value (x);
d0ab8cd3
RK
8450 if (tem != 0)
8451 return num_sign_bit_copies (tem, mode);
55310dad
RK
8452
8453 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
8454 return reg_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
8455 break;
8456
457816e2 8457 case MEM:
8baf60bb 8458#ifdef LOAD_EXTEND_OP
457816e2 8459 /* Some RISC machines sign-extend all loads of smaller than a word. */
8baf60bb 8460 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
770ae6cc
RK
8461 return MAX (1, ((int) bitwidth
8462 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
457816e2 8463#endif
8baf60bb 8464 break;
457816e2 8465
d0ab8cd3
RK
8466 case CONST_INT:
8467 /* If the constant is negative, take its 1's complement and remask.
8468 Then see how many zero bits we have. */
951553af 8469 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
ac49a949 8470 if (bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 8471 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
663522cb 8472 nonzero = (~nonzero) & GET_MODE_MASK (mode);
d0ab8cd3 8473
951553af 8474 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
8475
8476 case SUBREG:
c3c2cb37
RK
8477 /* If this is a SUBREG for a promoted object that is sign-extended
8478 and we are looking at it in a wider mode, we know that at least the
8479 high-order bits are known to be sign bit copies. */
8480
8481 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
770ae6cc
RK
8482 {
8483 num0 = num_sign_bit_copies (SUBREG_REG (x), mode);
8484 return MAX ((int) bitwidth
8485 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
8486 num0);
8487 }
663522cb 8488
0f41302f 8489 /* For a smaller object, just ignore the high bits. */
d0ab8cd3
RK
8490 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
8491 {
8492 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
8493 return MAX (1, (num0
770ae6cc
RK
8494 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8495 - bitwidth)));
d0ab8cd3 8496 }
457816e2 8497
8baf60bb 8498#ifdef WORD_REGISTER_OPERATIONS
2aec5b7a 8499#ifdef LOAD_EXTEND_OP
8baf60bb
RK
8500 /* For paradoxical SUBREGs on machines where all register operations
8501 affect the entire register, just look inside. Note that we are
8502 passing MODE to the recursive call, so the number of sign bit copies
8503 will remain relative to that mode, not the inner mode. */
457816e2 8504
2aec5b7a
JW
8505 /* This works only if loads sign extend. Otherwise, if we get a
8506 reload for the inner part, it may be loaded from the stack, and
8507 then we lose all sign bit copies that existed before the store
8508 to the stack. */
8509
8510 if ((GET_MODE_SIZE (GET_MODE (x))
8511 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8512 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND)
457816e2 8513 return num_sign_bit_copies (SUBREG_REG (x), mode);
2aec5b7a 8514#endif
457816e2 8515#endif
d0ab8cd3
RK
8516 break;
8517
8518 case SIGN_EXTRACT:
8519 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
770ae6cc 8520 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
d0ab8cd3
RK
8521 break;
8522
663522cb 8523 case SIGN_EXTEND:
d0ab8cd3
RK
8524 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8525 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
8526
8527 case TRUNCATE:
0f41302f 8528 /* For a smaller object, just ignore the high bits. */
d0ab8cd3 8529 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
770ae6cc
RK
8530 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8531 - bitwidth)));
d0ab8cd3
RK
8532
8533 case NOT:
8534 return num_sign_bit_copies (XEXP (x, 0), mode);
8535
8536 case ROTATE: case ROTATERT:
8537 /* If we are rotating left by a number of bits less than the number
8538 of sign bit copies, we can just subtract that amount from the
8539 number. */
8540 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8541 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
8542 {
8543 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8544 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
770ae6cc 8545 : (int) bitwidth - INTVAL (XEXP (x, 1))));
d0ab8cd3
RK
8546 }
8547 break;
8548
8549 case NEG:
8550 /* In general, this subtracts one sign bit copy. But if the value
8551 is known to be positive, the number of sign bit copies is the
951553af
RK
8552 same as that of the input. Finally, if the input has just one bit
8553 that might be nonzero, all the bits are copies of the sign bit. */
70186b34
BS
8554 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8555 if (bitwidth > HOST_BITS_PER_WIDE_INT)
8556 return num0 > 1 ? num0 - 1 : 1;
8557
951553af
RK
8558 nonzero = nonzero_bits (XEXP (x, 0), mode);
8559 if (nonzero == 1)
d0ab8cd3
RK
8560 return bitwidth;
8561
d0ab8cd3 8562 if (num0 > 1
951553af 8563 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
d0ab8cd3
RK
8564 num0--;
8565
8566 return num0;
8567
8568 case IOR: case AND: case XOR:
8569 case SMIN: case SMAX: case UMIN: case UMAX:
8570 /* Logical operations will preserve the number of sign-bit copies.
8571 MIN and MAX operations always return one of the operands. */
8572 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8573 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8574 return MIN (num0, num1);
8575
8576 case PLUS: case MINUS:
8577 /* For addition and subtraction, we can have a 1-bit carry. However,
8578 if we are subtracting 1 from a positive number, there will not
8579 be such a carry. Furthermore, if the positive number is known to
8580 be 0 or 1, we know the result is either -1 or 0. */
8581
3e3ea975 8582 if (code == PLUS && XEXP (x, 1) == constm1_rtx
9295e6af 8583 && bitwidth <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 8584 {
951553af
RK
8585 nonzero = nonzero_bits (XEXP (x, 0), mode);
8586 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
8587 return (nonzero == 1 || nonzero == 0 ? bitwidth
8588 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
8589 }
8590
8591 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8592 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8593 return MAX (1, MIN (num0, num1) - 1);
663522cb 8594
d0ab8cd3
RK
8595 case MULT:
8596 /* The number of bits of the product is the sum of the number of
8597 bits of both terms. However, unless one of the terms if known
8598 to be positive, we must allow for an additional bit since negating
8599 a negative number can remove one sign bit copy. */
8600
8601 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8602 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8603
8604 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
8605 if (result > 0
70186b34
BS
8606 && (bitwidth > HOST_BITS_PER_WIDE_INT
8607 || (((nonzero_bits (XEXP (x, 0), mode)
8608 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8609 && ((nonzero_bits (XEXP (x, 1), mode)
8610 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
d0ab8cd3
RK
8611 result--;
8612
8613 return MAX (1, result);
8614
8615 case UDIV:
70186b34
BS
8616 /* The result must be <= the first operand. If the first operand
8617 has the high bit set, we know nothing about the number of sign
8618 bit copies. */
8619 if (bitwidth > HOST_BITS_PER_WIDE_INT)
8620 return 1;
8621 else if ((nonzero_bits (XEXP (x, 0), mode)
8622 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8623 return 1;
8624 else
8625 return num_sign_bit_copies (XEXP (x, 0), mode);
663522cb 8626
d0ab8cd3
RK
8627 case UMOD:
8628 /* The result must be <= the scond operand. */
8629 return num_sign_bit_copies (XEXP (x, 1), mode);
8630
8631 case DIV:
8632 /* Similar to unsigned division, except that we have to worry about
8633 the case where the divisor is negative, in which case we have
8634 to add 1. */
8635 result = num_sign_bit_copies (XEXP (x, 0), mode);
8636 if (result > 1
70186b34
BS
8637 && (bitwidth > HOST_BITS_PER_WIDE_INT
8638 || (nonzero_bits (XEXP (x, 1), mode)
8639 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8640 result--;
d0ab8cd3
RK
8641
8642 return result;
8643
8644 case MOD:
8645 result = num_sign_bit_copies (XEXP (x, 1), mode);
8646 if (result > 1
70186b34
BS
8647 && (bitwidth > HOST_BITS_PER_WIDE_INT
8648 || (nonzero_bits (XEXP (x, 1), mode)
8649 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8650 result--;
d0ab8cd3
RK
8651
8652 return result;
8653
8654 case ASHIFTRT:
8655 /* Shifts by a constant add to the number of bits equal to the
8656 sign bit. */
8657 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8658 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8659 && INTVAL (XEXP (x, 1)) > 0)
8660 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
8661
8662 return num0;
8663
8664 case ASHIFT:
d0ab8cd3
RK
8665 /* Left shifts destroy copies. */
8666 if (GET_CODE (XEXP (x, 1)) != CONST_INT
8667 || INTVAL (XEXP (x, 1)) < 0
8668 || INTVAL (XEXP (x, 1)) >= bitwidth)
8669 return 1;
8670
8671 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8672 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
8673
8674 case IF_THEN_ELSE:
8675 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
8676 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
8677 return MIN (num0, num1);
8678
d0ab8cd3
RK
8679 case EQ: case NE: case GE: case GT: case LE: case LT:
8680 case GEU: case GTU: case LEU: case LTU:
0802d516
RK
8681 if (STORE_FLAG_VALUE == -1)
8682 return bitwidth;
e9a25f70 8683 break;
663522cb 8684
e9a25f70
JL
8685 default:
8686 break;
d0ab8cd3
RK
8687 }
8688
8689 /* If we haven't been able to figure it out by one of the above rules,
8690 see if some of the high-order bits are known to be zero. If so,
ac49a949
RS
8691 count those bits and return one less than that amount. If we can't
8692 safely compute the mask for this mode, always return BITWIDTH. */
8693
8694 if (bitwidth > HOST_BITS_PER_WIDE_INT)
6752e8d2 8695 return 1;
d0ab8cd3 8696
951553af 8697 nonzero = nonzero_bits (x, mode);
df6f4086 8698 return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
951553af 8699 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
8700}
8701\f
1a26b032
RK
8702/* Return the number of "extended" bits there are in X, when interpreted
8703 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
8704 unsigned quantities, this is the number of high-order zero bits.
8705 For signed quantities, this is the number of copies of the sign bit
8706 minus 1. In both case, this function returns the number of "spare"
8707 bits. For example, if two quantities for which this function returns
8708 at least 1 are added, the addition is known not to overflow.
8709
8710 This function will always return 0 unless called during combine, which
8711 implies that it must be called from a define_split. */
8712
770ae6cc 8713unsigned int
1a26b032
RK
8714extended_count (x, mode, unsignedp)
8715 rtx x;
8716 enum machine_mode mode;
8717 int unsignedp;
8718{
951553af 8719 if (nonzero_sign_valid == 0)
1a26b032
RK
8720 return 0;
8721
8722 return (unsignedp
ac49a949 8723 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
770ae6cc
RK
8724 ? (GET_MODE_BITSIZE (mode) - 1
8725 - floor_log2 (nonzero_bits (x, mode)))
8726 : 0)
1a26b032
RK
8727 : num_sign_bit_copies (x, mode) - 1);
8728}
8729\f
230d793d
RS
8730/* This function is called from `simplify_shift_const' to merge two
8731 outer operations. Specifically, we have already found that we need
8732 to perform operation *POP0 with constant *PCONST0 at the outermost
8733 position. We would now like to also perform OP1 with constant CONST1
8734 (with *POP0 being done last).
8735
8736 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
663522cb 8737 the resulting operation. *PCOMP_P is set to 1 if we would need to
230d793d
RS
8738 complement the innermost operand, otherwise it is unchanged.
8739
8740 MODE is the mode in which the operation will be done. No bits outside
8741 the width of this mode matter. It is assumed that the width of this mode
5f4f0e22 8742 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
230d793d
RS
8743
8744 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
8745 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
8746 result is simply *PCONST0.
8747
8748 If the resulting operation cannot be expressed as one operation, we
8749 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
8750
8751static int
8752merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
8753 enum rtx_code *pop0;
5f4f0e22 8754 HOST_WIDE_INT *pconst0;
230d793d 8755 enum rtx_code op1;
5f4f0e22 8756 HOST_WIDE_INT const1;
230d793d
RS
8757 enum machine_mode mode;
8758 int *pcomp_p;
8759{
8760 enum rtx_code op0 = *pop0;
5f4f0e22 8761 HOST_WIDE_INT const0 = *pconst0;
230d793d
RS
8762
8763 const0 &= GET_MODE_MASK (mode);
8764 const1 &= GET_MODE_MASK (mode);
8765
8766 /* If OP0 is an AND, clear unimportant bits in CONST1. */
8767 if (op0 == AND)
8768 const1 &= const0;
8769
8770 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
8771 if OP0 is SET. */
8772
8773 if (op1 == NIL || op0 == SET)
8774 return 1;
8775
8776 else if (op0 == NIL)
8777 op0 = op1, const0 = const1;
8778
8779 else if (op0 == op1)
8780 {
8781 switch (op0)
8782 {
8783 case AND:
8784 const0 &= const1;
8785 break;
8786 case IOR:
8787 const0 |= const1;
8788 break;
8789 case XOR:
8790 const0 ^= const1;
8791 break;
8792 case PLUS:
8793 const0 += const1;
8794 break;
8795 case NEG:
8796 op0 = NIL;
8797 break;
e9a25f70
JL
8798 default:
8799 break;
230d793d
RS
8800 }
8801 }
8802
8803 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
8804 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
8805 return 0;
8806
8807 /* If the two constants aren't the same, we can't do anything. The
8808 remaining six cases can all be done. */
8809 else if (const0 != const1)
8810 return 0;
8811
8812 else
8813 switch (op0)
8814 {
8815 case IOR:
8816 if (op1 == AND)
8817 /* (a & b) | b == b */
8818 op0 = SET;
8819 else /* op1 == XOR */
8820 /* (a ^ b) | b == a | b */
b729186a 8821 {;}
230d793d
RS
8822 break;
8823
8824 case XOR:
8825 if (op1 == AND)
8826 /* (a & b) ^ b == (~a) & b */
8827 op0 = AND, *pcomp_p = 1;
8828 else /* op1 == IOR */
8829 /* (a | b) ^ b == a & ~b */
663522cb 8830 op0 = AND, *pconst0 = ~const0;
230d793d
RS
8831 break;
8832
8833 case AND:
8834 if (op1 == IOR)
8835 /* (a | b) & b == b */
8836 op0 = SET;
8837 else /* op1 == XOR */
8838 /* (a ^ b) & b) == (~a) & b */
8839 *pcomp_p = 1;
8840 break;
e9a25f70
JL
8841 default:
8842 break;
230d793d
RS
8843 }
8844
8845 /* Check for NO-OP cases. */
8846 const0 &= GET_MODE_MASK (mode);
8847 if (const0 == 0
8848 && (op0 == IOR || op0 == XOR || op0 == PLUS))
8849 op0 = NIL;
8850 else if (const0 == 0 && op0 == AND)
8851 op0 = SET;
e51712db
KG
8852 else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
8853 && op0 == AND)
230d793d
RS
8854 op0 = NIL;
8855
7e4ce834
RH
8856 /* ??? Slightly redundant with the above mask, but not entirely.
8857 Moving this above means we'd have to sign-extend the mode mask
8858 for the final test. */
8859 const0 = trunc_int_for_mode (const0, mode);
9fa6d012 8860
230d793d
RS
8861 *pop0 = op0;
8862 *pconst0 = const0;
8863
8864 return 1;
8865}
8866\f
8867/* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
8868 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
8869 that we started with.
8870
8871 The shift is normally computed in the widest mode we find in VAROP, as
8872 long as it isn't a different number of words than RESULT_MODE. Exceptions
8873 are ASHIFTRT and ROTATE, which are always done in their original mode, */
8874
8875static rtx
770ae6cc 8876simplify_shift_const (x, code, result_mode, varop, input_count)
230d793d
RS
8877 rtx x;
8878 enum rtx_code code;
8879 enum machine_mode result_mode;
8880 rtx varop;
770ae6cc 8881 int input_count;
230d793d
RS
8882{
8883 enum rtx_code orig_code = code;
770ae6cc
RK
8884 int orig_count = input_count;
8885 unsigned int count;
8886 int signed_count;
230d793d
RS
8887 enum machine_mode mode = result_mode;
8888 enum machine_mode shift_mode, tmode;
770ae6cc 8889 unsigned int mode_words
230d793d
RS
8890 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
8891 /* We form (outer_op (code varop count) (outer_const)). */
8892 enum rtx_code outer_op = NIL;
c4e861e8 8893 HOST_WIDE_INT outer_const = 0;
230d793d
RS
8894 rtx const_rtx;
8895 int complement_p = 0;
8896 rtx new;
8897
8898 /* If we were given an invalid count, don't do anything except exactly
8899 what was requested. */
8900
770ae6cc 8901 if (input_count < 0 || input_count > (int) GET_MODE_BITSIZE (mode))
230d793d
RS
8902 {
8903 if (x)
8904 return x;
8905
770ae6cc 8906 return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (input_count));
230d793d
RS
8907 }
8908
770ae6cc
RK
8909 count = input_count;
8910
853d8828
RH
8911 /* Make sure and truncate the "natural" shift on the way in. We don't
8912 want to do this inside the loop as it makes it more difficult to
8913 combine shifts. */
8914#ifdef SHIFT_COUNT_TRUNCATED
8915 if (SHIFT_COUNT_TRUNCATED)
8916 count %= GET_MODE_BITSIZE (mode);
8917#endif
8918
230d793d
RS
8919 /* Unless one of the branches of the `if' in this loop does a `continue',
8920 we will `break' the loop after the `if'. */
8921
8922 while (count != 0)
8923 {
8924 /* If we have an operand of (clobber (const_int 0)), just return that
8925 value. */
8926 if (GET_CODE (varop) == CLOBBER)
8927 return varop;
8928
8929 /* If we discovered we had to complement VAROP, leave. Making a NOT
8930 here would cause an infinite loop. */
8931 if (complement_p)
8932 break;
8933
abc95ed3 8934 /* Convert ROTATERT to ROTATE. */
230d793d
RS
8935 if (code == ROTATERT)
8936 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
8937
230d793d 8938 /* We need to determine what mode we will do the shift in. If the
f6789c77
RK
8939 shift is a right shift or a ROTATE, we must always do it in the mode
8940 it was originally done in. Otherwise, we can do it in MODE, the
0f41302f 8941 widest mode encountered. */
f6789c77
RK
8942 shift_mode
8943 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8944 ? result_mode : mode);
230d793d
RS
8945
8946 /* Handle cases where the count is greater than the size of the mode
853d8828
RH
8947 minus 1. For ASHIFT, use the size minus one as the count (this can
8948 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
8949 take the count modulo the size. For other shifts, the result is
8950 zero.
230d793d
RS
8951
8952 Since these shifts are being produced by the compiler by combining
8953 multiple operations, each of which are defined, we know what the
8954 result is supposed to be. */
663522cb 8955
230d793d
RS
8956 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
8957 {
8958 if (code == ASHIFTRT)
8959 count = GET_MODE_BITSIZE (shift_mode) - 1;
8960 else if (code == ROTATE || code == ROTATERT)
8961 count %= GET_MODE_BITSIZE (shift_mode);
8962 else
8963 {
8964 /* We can't simply return zero because there may be an
8965 outer op. */
8966 varop = const0_rtx;
8967 count = 0;
8968 break;
8969 }
8970 }
8971
312def2e
RK
8972 /* An arithmetic right shift of a quantity known to be -1 or 0
8973 is a no-op. */
8974 if (code == ASHIFTRT
8975 && (num_sign_bit_copies (varop, shift_mode)
8976 == GET_MODE_BITSIZE (shift_mode)))
d0ab8cd3 8977 {
312def2e
RK
8978 count = 0;
8979 break;
8980 }
d0ab8cd3 8981
312def2e
RK
8982 /* If we are doing an arithmetic right shift and discarding all but
8983 the sign bit copies, this is equivalent to doing a shift by the
8984 bitsize minus one. Convert it into that shift because it will often
8985 allow other simplifications. */
500c518b 8986
312def2e
RK
8987 if (code == ASHIFTRT
8988 && (count + num_sign_bit_copies (varop, shift_mode)
8989 >= GET_MODE_BITSIZE (shift_mode)))
8990 count = GET_MODE_BITSIZE (shift_mode) - 1;
500c518b 8991
230d793d
RS
8992 /* We simplify the tests below and elsewhere by converting
8993 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
8994 `make_compound_operation' will convert it to a ASHIFTRT for
8995 those machines (such as Vax) that don't have a LSHIFTRT. */
5f4f0e22 8996 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 8997 && code == ASHIFTRT
951553af 8998 && ((nonzero_bits (varop, shift_mode)
5f4f0e22
CH
8999 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
9000 == 0))
230d793d
RS
9001 code = LSHIFTRT;
9002
9003 switch (GET_CODE (varop))
9004 {
9005 case SIGN_EXTEND:
9006 case ZERO_EXTEND:
9007 case SIGN_EXTRACT:
9008 case ZERO_EXTRACT:
9009 new = expand_compound_operation (varop);
9010 if (new != varop)
9011 {
9012 varop = new;
9013 continue;
9014 }
9015 break;
9016
9017 case MEM:
9018 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
9019 minus the width of a smaller mode, we can do this with a
9020 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
9021 if ((code == ASHIFTRT || code == LSHIFTRT)
9022 && ! mode_dependent_address_p (XEXP (varop, 0))
9023 && ! MEM_VOLATILE_P (varop)
9024 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9025 MODE_INT, 1)) != BLKmode)
9026 {
f76b9db2 9027 if (BYTES_BIG_ENDIAN)
38a448ca 9028 new = gen_rtx_MEM (tmode, XEXP (varop, 0));
f76b9db2 9029 else
38a448ca
RH
9030 new = gen_rtx_MEM (tmode,
9031 plus_constant (XEXP (varop, 0),
9032 count / BITS_PER_UNIT));
bf49b139 9033
c6df88cb 9034 MEM_COPY_ATTRIBUTES (new, varop);
230d793d
RS
9035 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
9036 : ZERO_EXTEND, mode, new);
9037 count = 0;
9038 continue;
9039 }
9040 break;
9041
9042 case USE:
9043 /* Similar to the case above, except that we can only do this if
9044 the resulting mode is the same as that of the underlying
9045 MEM and adjust the address depending on the *bits* endianness
9046 because of the way that bit-field extract insns are defined. */
9047 if ((code == ASHIFTRT || code == LSHIFTRT)
9048 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9049 MODE_INT, 1)) != BLKmode
9050 && tmode == GET_MODE (XEXP (varop, 0)))
9051 {
f76b9db2
ILT
9052 if (BITS_BIG_ENDIAN)
9053 new = XEXP (varop, 0);
9054 else
9055 {
9056 new = copy_rtx (XEXP (varop, 0));
663522cb 9057 SUBST (XEXP (new, 0),
f76b9db2
ILT
9058 plus_constant (XEXP (new, 0),
9059 count / BITS_PER_UNIT));
9060 }
230d793d
RS
9061
9062 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
9063 : ZERO_EXTEND, mode, new);
9064 count = 0;
9065 continue;
9066 }
9067 break;
9068
9069 case SUBREG:
9070 /* If VAROP is a SUBREG, strip it as long as the inner operand has
9071 the same number of words as what we've seen so far. Then store
9072 the widest mode in MODE. */
f9e67232
RS
9073 if (subreg_lowpart_p (varop)
9074 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9075 > GET_MODE_SIZE (GET_MODE (varop)))
230d793d
RS
9076 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9077 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
9078 == mode_words))
9079 {
9080 varop = SUBREG_REG (varop);
9081 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
9082 mode = GET_MODE (varop);
9083 continue;
9084 }
9085 break;
9086
9087 case MULT:
9088 /* Some machines use MULT instead of ASHIFT because MULT
9089 is cheaper. But it is still better on those machines to
9090 merge two shifts into one. */
9091 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9092 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9093 {
770ae6cc
RK
9094 varop
9095 = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
9096 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
230d793d
RS
9097 continue;
9098 }
9099 break;
9100
9101 case UDIV:
9102 /* Similar, for when divides are cheaper. */
9103 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9104 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9105 {
770ae6cc
RK
9106 varop
9107 = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
9108 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
230d793d
RS
9109 continue;
9110 }
9111 break;
9112
9113 case ASHIFTRT:
663522cb 9114 /* If we are extracting just the sign bit of an arithmetic right
230d793d
RS
9115 shift, that shift is not needed. */
9116 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
9117 {
9118 varop = XEXP (varop, 0);
9119 continue;
9120 }
9121
0f41302f 9122 /* ... fall through ... */
230d793d
RS
9123
9124 case LSHIFTRT:
9125 case ASHIFT:
230d793d
RS
9126 case ROTATE:
9127 /* Here we have two nested shifts. The result is usually the
9128 AND of a new shift with a mask. We compute the result below. */
9129 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9130 && INTVAL (XEXP (varop, 1)) >= 0
9131 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
5f4f0e22
CH
9132 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9133 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
9134 {
9135 enum rtx_code first_code = GET_CODE (varop);
770ae6cc 9136 unsigned int first_count = INTVAL (XEXP (varop, 1));
5f4f0e22 9137 unsigned HOST_WIDE_INT mask;
230d793d 9138 rtx mask_rtx;
230d793d 9139
230d793d
RS
9140 /* We have one common special case. We can't do any merging if
9141 the inner code is an ASHIFTRT of a smaller mode. However, if
9142 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
9143 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
9144 we can convert it to
9145 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
9146 This simplifies certain SIGN_EXTEND operations. */
9147 if (code == ASHIFT && first_code == ASHIFTRT
9148 && (GET_MODE_BITSIZE (result_mode)
9149 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
9150 {
9151 /* C3 has the low-order C1 bits zero. */
663522cb 9152
5f4f0e22 9153 mask = (GET_MODE_MASK (mode)
663522cb 9154 & ~(((HOST_WIDE_INT) 1 << first_count) - 1));
230d793d 9155
5f4f0e22 9156 varop = simplify_and_const_int (NULL_RTX, result_mode,
230d793d 9157 XEXP (varop, 0), mask);
5f4f0e22 9158 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
230d793d
RS
9159 varop, count);
9160 count = first_count;
9161 code = ASHIFTRT;
9162 continue;
9163 }
663522cb 9164
d0ab8cd3
RK
9165 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
9166 than C1 high-order bits equal to the sign bit, we can convert
9167 this to either an ASHIFT or a ASHIFTRT depending on the
663522cb 9168 two counts.
230d793d
RS
9169
9170 We cannot do this if VAROP's mode is not SHIFT_MODE. */
9171
9172 if (code == ASHIFTRT && first_code == ASHIFT
9173 && GET_MODE (varop) == shift_mode
d0ab8cd3
RK
9174 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
9175 > first_count))
230d793d 9176 {
d0ab8cd3 9177 varop = XEXP (varop, 0);
770ae6cc
RK
9178
9179 signed_count = count - first_count;
9180 if (signed_count < 0)
663522cb 9181 count = -signed_count, code = ASHIFT;
770ae6cc
RK
9182 else
9183 count = signed_count;
9184
d0ab8cd3 9185 continue;
230d793d
RS
9186 }
9187
9188 /* There are some cases we can't do. If CODE is ASHIFTRT,
9189 we can only do this if FIRST_CODE is also ASHIFTRT.
9190
9191 We can't do the case when CODE is ROTATE and FIRST_CODE is
9192 ASHIFTRT.
9193
9194 If the mode of this shift is not the mode of the outer shift,
bdaae9a0 9195 we can't do this if either shift is a right shift or ROTATE.
230d793d
RS
9196
9197 Finally, we can't do any of these if the mode is too wide
9198 unless the codes are the same.
9199
9200 Handle the case where the shift codes are the same
9201 first. */
9202
9203 if (code == first_code)
9204 {
9205 if (GET_MODE (varop) != result_mode
bdaae9a0
RK
9206 && (code == ASHIFTRT || code == LSHIFTRT
9207 || code == ROTATE))
230d793d
RS
9208 break;
9209
9210 count += first_count;
9211 varop = XEXP (varop, 0);
9212 continue;
9213 }
9214
9215 if (code == ASHIFTRT
9216 || (code == ROTATE && first_code == ASHIFTRT)
5f4f0e22 9217 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
230d793d 9218 || (GET_MODE (varop) != result_mode
bdaae9a0
RK
9219 && (first_code == ASHIFTRT || first_code == LSHIFTRT
9220 || first_code == ROTATE
230d793d
RS
9221 || code == ROTATE)))
9222 break;
9223
9224 /* To compute the mask to apply after the shift, shift the
663522cb 9225 nonzero bits of the inner shift the same way the
230d793d
RS
9226 outer shift will. */
9227
951553af 9228 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
230d793d
RS
9229
9230 mask_rtx
9231 = simplify_binary_operation (code, result_mode, mask_rtx,
5f4f0e22 9232 GEN_INT (count));
663522cb 9233
230d793d
RS
9234 /* Give up if we can't compute an outer operation to use. */
9235 if (mask_rtx == 0
9236 || GET_CODE (mask_rtx) != CONST_INT
9237 || ! merge_outer_ops (&outer_op, &outer_const, AND,
9238 INTVAL (mask_rtx),
9239 result_mode, &complement_p))
9240 break;
9241
9242 /* If the shifts are in the same direction, we add the
9243 counts. Otherwise, we subtract them. */
770ae6cc 9244 signed_count = count;
230d793d
RS
9245 if ((code == ASHIFTRT || code == LSHIFTRT)
9246 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
770ae6cc 9247 signed_count += first_count;
230d793d 9248 else
770ae6cc 9249 signed_count -= first_count;
230d793d 9250
663522cb 9251 /* If COUNT is positive, the new shift is usually CODE,
230d793d
RS
9252 except for the two exceptions below, in which case it is
9253 FIRST_CODE. If the count is negative, FIRST_CODE should
9254 always be used */
770ae6cc 9255 if (signed_count > 0
230d793d
RS
9256 && ((first_code == ROTATE && code == ASHIFT)
9257 || (first_code == ASHIFTRT && code == LSHIFTRT)))
770ae6cc
RK
9258 code = first_code, count = signed_count;
9259 else if (signed_count < 0)
663522cb 9260 code = first_code, count = -signed_count;
770ae6cc
RK
9261 else
9262 count = signed_count;
230d793d
RS
9263
9264 varop = XEXP (varop, 0);
9265 continue;
9266 }
9267
9268 /* If we have (A << B << C) for any shift, we can convert this to
9269 (A << C << B). This wins if A is a constant. Only try this if
9270 B is not a constant. */
9271
9272 else if (GET_CODE (varop) == code
9273 && GET_CODE (XEXP (varop, 1)) != CONST_INT
9274 && 0 != (new
9275 = simplify_binary_operation (code, mode,
9276 XEXP (varop, 0),
5f4f0e22 9277 GEN_INT (count))))
230d793d
RS
9278 {
9279 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
9280 count = 0;
9281 continue;
9282 }
9283 break;
9284
9285 case NOT:
9286 /* Make this fit the case below. */
9287 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
5f4f0e22 9288 GEN_INT (GET_MODE_MASK (mode)));
230d793d
RS
9289 continue;
9290
9291 case IOR:
9292 case AND:
9293 case XOR:
9294 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
9295 with C the size of VAROP - 1 and the shift is logical if
9296 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9297 we have an (le X 0) operation. If we have an arithmetic shift
9298 and STORE_FLAG_VALUE is 1 or we have a logical shift with
9299 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
9300
9301 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
9302 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
9303 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9304 && (code == LSHIFTRT || code == ASHIFTRT)
9305 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
9306 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9307 {
9308 count = 0;
9309 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
9310 const0_rtx);
9311
9312 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9313 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
9314
9315 continue;
9316 }
9317
9318 /* If we have (shift (logical)), move the logical to the outside
9319 to allow it to possibly combine with another logical and the
9320 shift to combine with another shift. This also canonicalizes to
9321 what a ZERO_EXTRACT looks like. Also, some machines have
9322 (and (shift)) insns. */
9323
9324 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9325 && (new = simplify_binary_operation (code, result_mode,
9326 XEXP (varop, 1),
5f4f0e22 9327 GEN_INT (count))) != 0
663522cb 9328 && GET_CODE (new) == CONST_INT
230d793d
RS
9329 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
9330 INTVAL (new), result_mode, &complement_p))
9331 {
9332 varop = XEXP (varop, 0);
9333 continue;
9334 }
9335
9336 /* If we can't do that, try to simplify the shift in each arm of the
9337 logical expression, make a new logical expression, and apply
9338 the inverse distributive law. */
9339 {
00d4ca1c 9340 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d 9341 XEXP (varop, 0), count);
00d4ca1c 9342 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d
RS
9343 XEXP (varop, 1), count);
9344
21a64bf1 9345 varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
230d793d
RS
9346 varop = apply_distributive_law (varop);
9347
9348 count = 0;
9349 }
9350 break;
9351
9352 case EQ:
45620ed4 9353 /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
230d793d 9354 says that the sign bit can be tested, FOO has mode MODE, C is
45620ed4
RK
9355 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
9356 that may be nonzero. */
9357 if (code == LSHIFTRT
230d793d
RS
9358 && XEXP (varop, 1) == const0_rtx
9359 && GET_MODE (XEXP (varop, 0)) == result_mode
9360 && count == GET_MODE_BITSIZE (result_mode) - 1
5f4f0e22 9361 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 9362 && ((STORE_FLAG_VALUE
663522cb 9363 & ((HOST_WIDE_INT) 1
770ae6cc 9364 < (GET_MODE_BITSIZE (result_mode) - 1))))
951553af 9365 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
9366 && merge_outer_ops (&outer_op, &outer_const, XOR,
9367 (HOST_WIDE_INT) 1, result_mode,
9368 &complement_p))
230d793d
RS
9369 {
9370 varop = XEXP (varop, 0);
9371 count = 0;
9372 continue;
9373 }
9374 break;
9375
9376 case NEG:
d0ab8cd3
RK
9377 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
9378 than the number of bits in the mode is equivalent to A. */
9379 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
951553af 9380 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
230d793d 9381 {
d0ab8cd3 9382 varop = XEXP (varop, 0);
230d793d
RS
9383 count = 0;
9384 continue;
9385 }
9386
9387 /* NEG commutes with ASHIFT since it is multiplication. Move the
9388 NEG outside to allow shifts to combine. */
9389 if (code == ASHIFT
5f4f0e22
CH
9390 && merge_outer_ops (&outer_op, &outer_const, NEG,
9391 (HOST_WIDE_INT) 0, result_mode,
9392 &complement_p))
230d793d
RS
9393 {
9394 varop = XEXP (varop, 0);
9395 continue;
9396 }
9397 break;
9398
9399 case PLUS:
d0ab8cd3
RK
9400 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
9401 is one less than the number of bits in the mode is
9402 equivalent to (xor A 1). */
230d793d
RS
9403 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
9404 && XEXP (varop, 1) == constm1_rtx
951553af 9405 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
9406 && merge_outer_ops (&outer_op, &outer_const, XOR,
9407 (HOST_WIDE_INT) 1, result_mode,
9408 &complement_p))
230d793d
RS
9409 {
9410 count = 0;
9411 varop = XEXP (varop, 0);
9412 continue;
9413 }
9414
3f508eca 9415 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
951553af 9416 that might be nonzero in BAR are those being shifted out and those
3f508eca
RK
9417 bits are known zero in FOO, we can replace the PLUS with FOO.
9418 Similarly in the other operand order. This code occurs when
9419 we are computing the size of a variable-size array. */
9420
9421 if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 9422 && count < HOST_BITS_PER_WIDE_INT
951553af
RK
9423 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
9424 && (nonzero_bits (XEXP (varop, 1), result_mode)
9425 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
3f508eca
RK
9426 {
9427 varop = XEXP (varop, 0);
9428 continue;
9429 }
9430 else if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 9431 && count < HOST_BITS_PER_WIDE_INT
ac49a949 9432 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
951553af 9433 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
3f508eca 9434 >> count)
951553af
RK
9435 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9436 & nonzero_bits (XEXP (varop, 1),
3f508eca
RK
9437 result_mode)))
9438 {
9439 varop = XEXP (varop, 1);
9440 continue;
9441 }
9442
230d793d
RS
9443 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
9444 if (code == ASHIFT
9445 && GET_CODE (XEXP (varop, 1)) == CONST_INT
9446 && (new = simplify_binary_operation (ASHIFT, result_mode,
9447 XEXP (varop, 1),
5f4f0e22 9448 GEN_INT (count))) != 0
770ae6cc 9449 && GET_CODE (new) == CONST_INT
230d793d
RS
9450 && merge_outer_ops (&outer_op, &outer_const, PLUS,
9451 INTVAL (new), result_mode, &complement_p))
9452 {
9453 varop = XEXP (varop, 0);
9454 continue;
9455 }
9456 break;
9457
9458 case MINUS:
9459 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
9460 with C the size of VAROP - 1 and the shift is logical if
9461 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9462 we have a (gt X 0) operation. If the shift is arithmetic with
9463 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
9464 we have a (neg (gt X 0)) operation. */
9465
0802d516
RK
9466 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9467 && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
230d793d 9468 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
230d793d
RS
9469 && (code == LSHIFTRT || code == ASHIFTRT)
9470 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9471 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
9472 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9473 {
9474 count = 0;
9475 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
9476 const0_rtx);
9477
9478 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9479 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
9480
9481 continue;
9482 }
9483 break;
6e0ef100
JC
9484
9485 case TRUNCATE:
9486 /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
9487 if the truncate does not affect the value. */
9488 if (code == LSHIFTRT
9489 && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
9490 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9491 && (INTVAL (XEXP (XEXP (varop, 0), 1))
b577a8ff
JL
9492 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
9493 - GET_MODE_BITSIZE (GET_MODE (varop)))))
6e0ef100
JC
9494 {
9495 rtx varop_inner = XEXP (varop, 0);
9496
770ae6cc
RK
9497 varop_inner
9498 = gen_rtx_combine (LSHIFTRT, GET_MODE (varop_inner),
9499 XEXP (varop_inner, 0),
9500 GEN_INT (count
9501 + INTVAL (XEXP (varop_inner, 1))));
6e0ef100
JC
9502 varop = gen_rtx_combine (TRUNCATE, GET_MODE (varop),
9503 varop_inner);
9504 count = 0;
9505 continue;
9506 }
9507 break;
663522cb 9508
e9a25f70
JL
9509 default:
9510 break;
230d793d
RS
9511 }
9512
9513 break;
9514 }
9515
9516 /* We need to determine what mode to do the shift in. If the shift is
f6789c77
RK
9517 a right shift or ROTATE, we must always do it in the mode it was
9518 originally done in. Otherwise, we can do it in MODE, the widest mode
9519 encountered. The code we care about is that of the shift that will
9520 actually be done, not the shift that was originally requested. */
9521 shift_mode
9522 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
9523 ? result_mode : mode);
230d793d
RS
9524
9525 /* We have now finished analyzing the shift. The result should be
9526 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
9527 OUTER_OP is non-NIL, it is an operation that needs to be applied
9528 to the result of the shift. OUTER_CONST is the relevant constant,
9529 but we must turn off all bits turned off in the shift.
9530
9531 If we were passed a value for X, see if we can use any pieces of
9532 it. If not, make new rtx. */
9533
9534 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
9535 && GET_CODE (XEXP (x, 1)) == CONST_INT
9536 && INTVAL (XEXP (x, 1)) == count)
9537 const_rtx = XEXP (x, 1);
9538 else
5f4f0e22 9539 const_rtx = GEN_INT (count);
230d793d
RS
9540
9541 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
9542 && GET_MODE (XEXP (x, 0)) == shift_mode
9543 && SUBREG_REG (XEXP (x, 0)) == varop)
9544 varop = XEXP (x, 0);
9545 else if (GET_MODE (varop) != shift_mode)
9546 varop = gen_lowpart_for_combine (shift_mode, varop);
9547
0f41302f 9548 /* If we can't make the SUBREG, try to return what we were given. */
230d793d
RS
9549 if (GET_CODE (varop) == CLOBBER)
9550 return x ? x : varop;
9551
9552 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
9553 if (new != 0)
9554 x = new;
9555 else
9556 {
9557 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
9558 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
9559
9560 SUBST (XEXP (x, 0), varop);
9561 SUBST (XEXP (x, 1), const_rtx);
9562 }
9563
224eeff2
RK
9564 /* If we have an outer operation and we just made a shift, it is
9565 possible that we could have simplified the shift were it not
9566 for the outer operation. So try to do the simplification
9567 recursively. */
9568
9569 if (outer_op != NIL && GET_CODE (x) == code
9570 && GET_CODE (XEXP (x, 1)) == CONST_INT)
9571 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
9572 INTVAL (XEXP (x, 1)));
9573
230d793d
RS
9574 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
9575 turn off all the bits that the shift would have turned off. */
9576 if (orig_code == LSHIFTRT && result_mode != shift_mode)
5f4f0e22 9577 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
230d793d 9578 GET_MODE_MASK (result_mode) >> orig_count);
663522cb 9579
230d793d
RS
9580 /* Do the remainder of the processing in RESULT_MODE. */
9581 x = gen_lowpart_for_combine (result_mode, x);
9582
9583 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
9584 operation. */
9585 if (complement_p)
0c1c8ea6 9586 x = gen_unary (NOT, result_mode, result_mode, x);
230d793d
RS
9587
9588 if (outer_op != NIL)
9589 {
5f4f0e22 9590 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
7e4ce834 9591 outer_const = trunc_int_for_mode (outer_const, result_mode);
230d793d
RS
9592
9593 if (outer_op == AND)
5f4f0e22 9594 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
230d793d
RS
9595 else if (outer_op == SET)
9596 /* This means that we have determined that the result is
9597 equivalent to a constant. This should be rare. */
5f4f0e22 9598 x = GEN_INT (outer_const);
230d793d 9599 else if (GET_RTX_CLASS (outer_op) == '1')
0c1c8ea6 9600 x = gen_unary (outer_op, result_mode, result_mode, x);
230d793d 9601 else
5f4f0e22 9602 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
230d793d
RS
9603 }
9604
9605 return x;
663522cb 9606}
230d793d
RS
9607\f
9608/* Like recog, but we receive the address of a pointer to a new pattern.
9609 We try to match the rtx that the pointer points to.
9610 If that fails, we may try to modify or replace the pattern,
9611 storing the replacement into the same pointer object.
9612
9613 Modifications include deletion or addition of CLOBBERs.
9614
9615 PNOTES is a pointer to a location where any REG_UNUSED notes added for
9616 the CLOBBERs are placed.
9617
9618 The value is the final insn code from the pattern ultimately matched,
9619 or -1. */
9620
9621static int
8e2f6e35 9622recog_for_combine (pnewpat, insn, pnotes)
230d793d
RS
9623 rtx *pnewpat;
9624 rtx insn;
9625 rtx *pnotes;
9626{
9627 register rtx pat = *pnewpat;
9628 int insn_code_number;
9629 int num_clobbers_to_add = 0;
9630 int i;
9631 rtx notes = 0;
c1194d74 9632 rtx old_notes;
230d793d 9633
974f4146
RK
9634 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
9635 we use to indicate that something didn't match. If we find such a
9636 thing, force rejection. */
d96023cf 9637 if (GET_CODE (pat) == PARALLEL)
974f4146 9638 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
d96023cf
RK
9639 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
9640 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
974f4146
RK
9641 return -1;
9642
c1194d74
JW
9643 /* Remove the old notes prior to trying to recognize the new pattern. */
9644 old_notes = REG_NOTES (insn);
9645 REG_NOTES (insn) = 0;
9646
230d793d
RS
9647 /* Is the result of combination a valid instruction? */
9648 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9649
9650 /* If it isn't, there is the possibility that we previously had an insn
9651 that clobbered some register as a side effect, but the combined
9652 insn doesn't need to do that. So try once more without the clobbers
9653 unless this represents an ASM insn. */
9654
9655 if (insn_code_number < 0 && ! check_asm_operands (pat)
9656 && GET_CODE (pat) == PARALLEL)
9657 {
9658 int pos;
9659
9660 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
9661 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
9662 {
9663 if (i != pos)
9664 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
9665 pos++;
9666 }
9667
9668 SUBST_INT (XVECLEN (pat, 0), pos);
9669
9670 if (pos == 1)
9671 pat = XVECEXP (pat, 0, 0);
9672
9673 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9674 }
9675
c1194d74
JW
9676 REG_NOTES (insn) = old_notes;
9677
230d793d
RS
9678 /* If we had any clobbers to add, make a new pattern than contains
9679 them. Then check to make sure that all of them are dead. */
9680 if (num_clobbers_to_add)
9681 {
38a448ca
RH
9682 rtx newpat = gen_rtx_PARALLEL (VOIDmode,
9683 gen_rtvec (GET_CODE (pat) == PARALLEL
c5c76735
JL
9684 ? (XVECLEN (pat, 0)
9685 + num_clobbers_to_add)
38a448ca 9686 : num_clobbers_to_add + 1));
230d793d
RS
9687
9688 if (GET_CODE (pat) == PARALLEL)
9689 for (i = 0; i < XVECLEN (pat, 0); i++)
9690 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
9691 else
9692 XVECEXP (newpat, 0, 0) = pat;
9693
9694 add_clobbers (newpat, insn_code_number);
9695
9696 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
9697 i < XVECLEN (newpat, 0); i++)
9698 {
9699 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
9700 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
9701 return -1;
38a448ca
RH
9702 notes = gen_rtx_EXPR_LIST (REG_UNUSED,
9703 XEXP (XVECEXP (newpat, 0, i), 0), notes);
230d793d
RS
9704 }
9705 pat = newpat;
9706 }
9707
9708 *pnewpat = pat;
9709 *pnotes = notes;
9710
9711 return insn_code_number;
9712}
9713\f
9714/* Like gen_lowpart but for use by combine. In combine it is not possible
9715 to create any new pseudoregs. However, it is safe to create
9716 invalid memory addresses, because combine will try to recognize
9717 them and all they will do is make the combine attempt fail.
9718
9719 If for some reason this cannot do its job, an rtx
9720 (clobber (const_int 0)) is returned.
9721 An insn containing that will not be recognized. */
9722
9723#undef gen_lowpart
9724
9725static rtx
9726gen_lowpart_for_combine (mode, x)
9727 enum machine_mode mode;
9728 register rtx x;
9729{
9730 rtx result;
9731
9732 if (GET_MODE (x) == mode)
9733 return x;
9734
eae957a8
RK
9735 /* We can only support MODE being wider than a word if X is a
9736 constant integer or has a mode the same size. */
9737
9738 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
9739 && ! ((GET_MODE (x) == VOIDmode
9740 && (GET_CODE (x) == CONST_INT
9741 || GET_CODE (x) == CONST_DOUBLE))
9742 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
38a448ca 9743 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
9744
9745 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
9746 won't know what to do. So we will strip off the SUBREG here and
9747 process normally. */
9748 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
9749 {
9750 x = SUBREG_REG (x);
9751 if (GET_MODE (x) == mode)
9752 return x;
9753 }
9754
9755 result = gen_lowpart_common (mode, x);
02188693 9756#ifdef CLASS_CANNOT_CHANGE_MODE
64bf47a2
RK
9757 if (result != 0
9758 && GET_CODE (result) == SUBREG
9759 && GET_CODE (SUBREG_REG (result)) == REG
9760 && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER
02188693
RH
9761 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (result),
9762 GET_MODE (SUBREG_REG (result))))
9763 REG_CHANGES_MODE (REGNO (SUBREG_REG (result))) = 1;
9764#endif
64bf47a2 9765
230d793d
RS
9766 if (result)
9767 return result;
9768
9769 if (GET_CODE (x) == MEM)
9770 {
9771 register int offset = 0;
9772 rtx new;
9773
9774 /* Refuse to work on a volatile memory ref or one with a mode-dependent
9775 address. */
9776 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
38a448ca 9777 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
9778
9779 /* If we want to refer to something bigger than the original memref,
9780 generate a perverse subreg instead. That will force a reload
9781 of the original memref X. */
9782 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
38a448ca 9783 return gen_rtx_SUBREG (mode, x, 0);
230d793d 9784
f76b9db2
ILT
9785 if (WORDS_BIG_ENDIAN)
9786 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
9787 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
c5c76735 9788
f76b9db2
ILT
9789 if (BYTES_BIG_ENDIAN)
9790 {
9791 /* Adjust the address so that the address-after-the-data is
9792 unchanged. */
9793 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
9794 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
9795 }
38a448ca 9796 new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
c6df88cb 9797 MEM_COPY_ATTRIBUTES (new, x);
230d793d
RS
9798 return new;
9799 }
9800
9801 /* If X is a comparison operator, rewrite it in a new mode. This
9802 probably won't match, but may allow further simplifications. */
9803 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
9804 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
9805
9806 /* If we couldn't simplify X any other way, just enclose it in a
9807 SUBREG. Normally, this SUBREG won't match, but some patterns may
a7c99304 9808 include an explicit SUBREG or we may simplify it further in combine. */
230d793d 9809 else
dfbe1b2f
RK
9810 {
9811 int word = 0;
9812
9813 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
9814 word = ((GET_MODE_SIZE (GET_MODE (x))
9815 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
9816 / UNITS_PER_WORD);
38a448ca 9817 return gen_rtx_SUBREG (mode, x, word);
dfbe1b2f 9818 }
230d793d
RS
9819}
9820\f
9821/* Make an rtx expression. This is a subset of gen_rtx and only supports
9822 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
9823
9824 If the identical expression was previously in the insn (in the undobuf),
9825 it will be returned. Only if it is not found will a new expression
9826 be made. */
9827
9828/*VARARGS2*/
9829static rtx
83d2b3b9 9830gen_rtx_combine VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
230d793d 9831{
5148a72b 9832#ifndef ANSI_PROTOTYPES
230d793d
RS
9833 enum rtx_code code;
9834 enum machine_mode mode;
4f90e4a0
RK
9835#endif
9836 va_list p;
230d793d
RS
9837 int n_args;
9838 rtx args[3];
b729186a 9839 int j;
6f7d635c 9840 const char *fmt;
230d793d 9841 rtx rt;
241cea85 9842 struct undo *undo;
230d793d 9843
4f90e4a0
RK
9844 VA_START (p, mode);
9845
5148a72b 9846#ifndef ANSI_PROTOTYPES
230d793d
RS
9847 code = va_arg (p, enum rtx_code);
9848 mode = va_arg (p, enum machine_mode);
4f90e4a0
RK
9849#endif
9850
230d793d
RS
9851 n_args = GET_RTX_LENGTH (code);
9852 fmt = GET_RTX_FORMAT (code);
9853
9854 if (n_args == 0 || n_args > 3)
9855 abort ();
9856
9857 /* Get each arg and verify that it is supposed to be an expression. */
9858 for (j = 0; j < n_args; j++)
9859 {
9860 if (*fmt++ != 'e')
9861 abort ();
9862
9863 args[j] = va_arg (p, rtx);
9864 }
9865
f0305a2b
KG
9866 va_end (p);
9867
230d793d
RS
9868 /* See if this is in undobuf. Be sure we don't use objects that came
9869 from another insn; this could produce circular rtl structures. */
9870
241cea85
RK
9871 for (undo = undobuf.undos; undo != undobuf.previous_undos; undo = undo->next)
9872 if (!undo->is_int
9873 && GET_CODE (undo->old_contents.r) == code
9874 && GET_MODE (undo->old_contents.r) == mode)
230d793d
RS
9875 {
9876 for (j = 0; j < n_args; j++)
241cea85 9877 if (XEXP (undo->old_contents.r, j) != args[j])
230d793d
RS
9878 break;
9879
9880 if (j == n_args)
241cea85 9881 return undo->old_contents.r;
230d793d
RS
9882 }
9883
9884 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
9885 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
9886 rt = rtx_alloc (code);
9887 PUT_MODE (rt, mode);
9888 XEXP (rt, 0) = args[0];
9889 if (n_args > 1)
9890 {
9891 XEXP (rt, 1) = args[1];
9892 if (n_args > 2)
9893 XEXP (rt, 2) = args[2];
9894 }
9895 return rt;
9896}
9897
9898/* These routines make binary and unary operations by first seeing if they
9899 fold; if not, a new expression is allocated. */
9900
9901static rtx
9902gen_binary (code, mode, op0, op1)
9903 enum rtx_code code;
9904 enum machine_mode mode;
9905 rtx op0, op1;
9906{
9907 rtx result;
1a26b032
RK
9908 rtx tem;
9909
9910 if (GET_RTX_CLASS (code) == 'c'
9911 && (GET_CODE (op0) == CONST_INT
9912 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
9913 tem = op0, op0 = op1, op1 = tem;
230d793d 9914
663522cb 9915 if (GET_RTX_CLASS (code) == '<')
230d793d
RS
9916 {
9917 enum machine_mode op_mode = GET_MODE (op0);
9210df58 9918
663522cb 9919 /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
0f41302f 9920 just (REL_OP X Y). */
9210df58
RK
9921 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
9922 {
9923 op1 = XEXP (op0, 1);
9924 op0 = XEXP (op0, 0);
9925 op_mode = GET_MODE (op0);
9926 }
9927
230d793d
RS
9928 if (op_mode == VOIDmode)
9929 op_mode = GET_MODE (op1);
9930 result = simplify_relational_operation (code, op_mode, op0, op1);
9931 }
9932 else
9933 result = simplify_binary_operation (code, mode, op0, op1);
9934
9935 if (result)
9936 return result;
9937
9938 /* Put complex operands first and constants second. */
9939 if (GET_RTX_CLASS (code) == 'c'
9940 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
9941 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
9942 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
9943 || (GET_CODE (op0) == SUBREG
9944 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
9945 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
9946 return gen_rtx_combine (code, mode, op1, op0);
9947
e5e809f4
JL
9948 /* If we are turning off bits already known off in OP0, we need not do
9949 an AND. */
9950 else if (code == AND && GET_CODE (op1) == CONST_INT
9951 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
663522cb 9952 && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
e5e809f4
JL
9953 return op0;
9954
230d793d
RS
9955 return gen_rtx_combine (code, mode, op0, op1);
9956}
9957
9958static rtx
0c1c8ea6 9959gen_unary (code, mode, op0_mode, op0)
230d793d 9960 enum rtx_code code;
0c1c8ea6 9961 enum machine_mode mode, op0_mode;
230d793d
RS
9962 rtx op0;
9963{
0c1c8ea6 9964 rtx result = simplify_unary_operation (code, mode, op0, op0_mode);
230d793d
RS
9965
9966 if (result)
9967 return result;
9968
9969 return gen_rtx_combine (code, mode, op0);
9970}
9971\f
9972/* Simplify a comparison between *POP0 and *POP1 where CODE is the
9973 comparison code that will be tested.
9974
9975 The result is a possibly different comparison code to use. *POP0 and
9976 *POP1 may be updated.
9977
9978 It is possible that we might detect that a comparison is either always
9979 true or always false. However, we do not perform general constant
5089e22e 9980 folding in combine, so this knowledge isn't useful. Such tautologies
230d793d
RS
9981 should have been detected earlier. Hence we ignore all such cases. */
9982
9983static enum rtx_code
9984simplify_comparison (code, pop0, pop1)
9985 enum rtx_code code;
9986 rtx *pop0;
9987 rtx *pop1;
9988{
9989 rtx op0 = *pop0;
9990 rtx op1 = *pop1;
9991 rtx tem, tem1;
9992 int i;
9993 enum machine_mode mode, tmode;
9994
9995 /* Try a few ways of applying the same transformation to both operands. */
9996 while (1)
9997 {
3a19aabc
RK
9998#ifndef WORD_REGISTER_OPERATIONS
9999 /* The test below this one won't handle SIGN_EXTENDs on these machines,
10000 so check specially. */
10001 if (code != GTU && code != GEU && code != LTU && code != LEU
10002 && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
10003 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10004 && GET_CODE (XEXP (op1, 0)) == ASHIFT
10005 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
10006 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
10007 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
ad25ba17 10008 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
3a19aabc
RK
10009 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10010 && GET_CODE (XEXP (op1, 1)) == CONST_INT
10011 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10012 && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
10013 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
10014 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
10015 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
10016 && (INTVAL (XEXP (op0, 1))
10017 == (GET_MODE_BITSIZE (GET_MODE (op0))
10018 - (GET_MODE_BITSIZE
10019 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
10020 {
10021 op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
10022 op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
10023 }
10024#endif
10025
230d793d
RS
10026 /* If both operands are the same constant shift, see if we can ignore the
10027 shift. We can if the shift is a rotate or if the bits shifted out of
951553af 10028 this shift are known to be zero for both inputs and if the type of
230d793d 10029 comparison is compatible with the shift. */
67232b23
RK
10030 if (GET_CODE (op0) == GET_CODE (op1)
10031 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10032 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
45620ed4 10033 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
67232b23
RK
10034 && (code != GT && code != LT && code != GE && code != LE))
10035 || (GET_CODE (op0) == ASHIFTRT
10036 && (code != GTU && code != LTU
10037 && code != GEU && code != GEU)))
10038 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10039 && INTVAL (XEXP (op0, 1)) >= 0
10040 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10041 && XEXP (op0, 1) == XEXP (op1, 1))
230d793d
RS
10042 {
10043 enum machine_mode mode = GET_MODE (op0);
5f4f0e22 10044 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
10045 int shift_count = INTVAL (XEXP (op0, 1));
10046
10047 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
10048 mask &= (mask >> shift_count) << shift_count;
45620ed4 10049 else if (GET_CODE (op0) == ASHIFT)
230d793d
RS
10050 mask = (mask & (mask << shift_count)) >> shift_count;
10051
663522cb
KH
10052 if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
10053 && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
230d793d
RS
10054 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
10055 else
10056 break;
10057 }
10058
10059 /* If both operands are AND's of a paradoxical SUBREG by constant, the
10060 SUBREGs are of the same mode, and, in both cases, the AND would
10061 be redundant if the comparison was done in the narrower mode,
10062 do the comparison in the narrower mode (e.g., we are AND'ing with 1
951553af
RK
10063 and the operand's possibly nonzero bits are 0xffffff01; in that case
10064 if we only care about QImode, we don't need the AND). This case
10065 occurs if the output mode of an scc insn is not SImode and
7e4dc511
RK
10066 STORE_FLAG_VALUE == 1 (e.g., the 386).
10067
10068 Similarly, check for a case where the AND's are ZERO_EXTEND
10069 operations from some narrower mode even though a SUBREG is not
10070 present. */
230d793d 10071
663522cb
KH
10072 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
10073 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10074 && GET_CODE (XEXP (op1, 1)) == CONST_INT)
230d793d 10075 {
7e4dc511
RK
10076 rtx inner_op0 = XEXP (op0, 0);
10077 rtx inner_op1 = XEXP (op1, 0);
10078 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
10079 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
10080 int changed = 0;
663522cb 10081
7e4dc511
RK
10082 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
10083 && (GET_MODE_SIZE (GET_MODE (inner_op0))
10084 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
10085 && (GET_MODE (SUBREG_REG (inner_op0))
10086 == GET_MODE (SUBREG_REG (inner_op1)))
729a2bc6 10087 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
7e4dc511 10088 <= HOST_BITS_PER_WIDE_INT)
01c82bbb 10089 && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
729a2bc6 10090 GET_MODE (SUBREG_REG (inner_op0)))))
01c82bbb
RK
10091 && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
10092 GET_MODE (SUBREG_REG (inner_op1))))))
7e4dc511
RK
10093 {
10094 op0 = SUBREG_REG (inner_op0);
10095 op1 = SUBREG_REG (inner_op1);
10096
10097 /* The resulting comparison is always unsigned since we masked
0f41302f 10098 off the original sign bit. */
7e4dc511
RK
10099 code = unsigned_condition (code);
10100
10101 changed = 1;
10102 }
230d793d 10103
7e4dc511
RK
10104 else if (c0 == c1)
10105 for (tmode = GET_CLASS_NARROWEST_MODE
10106 (GET_MODE_CLASS (GET_MODE (op0)));
10107 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
e51712db 10108 if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
7e4dc511
RK
10109 {
10110 op0 = gen_lowpart_for_combine (tmode, inner_op0);
10111 op1 = gen_lowpart_for_combine (tmode, inner_op1);
66415c8b 10112 code = unsigned_condition (code);
7e4dc511
RK
10113 changed = 1;
10114 break;
10115 }
10116
10117 if (! changed)
10118 break;
230d793d 10119 }
3a19aabc 10120
ad25ba17
RK
10121 /* If both operands are NOT, we can strip off the outer operation
10122 and adjust the comparison code for swapped operands; similarly for
10123 NEG, except that this must be an equality comparison. */
10124 else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
10125 || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
10126 && (code == EQ || code == NE)))
10127 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
3a19aabc 10128
230d793d
RS
10129 else
10130 break;
10131 }
663522cb 10132
230d793d 10133 /* If the first operand is a constant, swap the operands and adjust the
3aceff0d
RK
10134 comparison code appropriately, but don't do this if the second operand
10135 is already a constant integer. */
10136 if (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
230d793d
RS
10137 {
10138 tem = op0, op0 = op1, op1 = tem;
10139 code = swap_condition (code);
10140 }
10141
10142 /* We now enter a loop during which we will try to simplify the comparison.
10143 For the most part, we only are concerned with comparisons with zero,
10144 but some things may really be comparisons with zero but not start
10145 out looking that way. */
10146
10147 while (GET_CODE (op1) == CONST_INT)
10148 {
10149 enum machine_mode mode = GET_MODE (op0);
770ae6cc 10150 unsigned int mode_width = GET_MODE_BITSIZE (mode);
5f4f0e22 10151 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
10152 int equality_comparison_p;
10153 int sign_bit_comparison_p;
10154 int unsigned_comparison_p;
5f4f0e22 10155 HOST_WIDE_INT const_op;
230d793d
RS
10156
10157 /* We only want to handle integral modes. This catches VOIDmode,
10158 CCmode, and the floating-point modes. An exception is that we
10159 can handle VOIDmode if OP0 is a COMPARE or a comparison
10160 operation. */
10161
10162 if (GET_MODE_CLASS (mode) != MODE_INT
10163 && ! (mode == VOIDmode
10164 && (GET_CODE (op0) == COMPARE
10165 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
10166 break;
10167
10168 /* Get the constant we are comparing against and turn off all bits
10169 not on in our mode. */
3c094e22 10170 const_op = trunc_int_for_mode (INTVAL (op1), mode);
230d793d
RS
10171
10172 /* If we are comparing against a constant power of two and the value
951553af 10173 being compared can only have that single bit nonzero (e.g., it was
230d793d
RS
10174 `and'ed with that bit), we can replace this with a comparison
10175 with zero. */
10176 if (const_op
10177 && (code == EQ || code == NE || code == GE || code == GEU
10178 || code == LT || code == LTU)
5f4f0e22 10179 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 10180 && exact_log2 (const_op) >= 0
e51712db 10181 && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
230d793d
RS
10182 {
10183 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
10184 op1 = const0_rtx, const_op = 0;
10185 }
10186
d0ab8cd3
RK
10187 /* Similarly, if we are comparing a value known to be either -1 or
10188 0 with -1, change it to the opposite comparison against zero. */
10189
10190 if (const_op == -1
10191 && (code == EQ || code == NE || code == GT || code == LE
10192 || code == GEU || code == LTU)
10193 && num_sign_bit_copies (op0, mode) == mode_width)
10194 {
10195 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
10196 op1 = const0_rtx, const_op = 0;
10197 }
10198
230d793d 10199 /* Do some canonicalizations based on the comparison code. We prefer
663522cb 10200 comparisons against zero and then prefer equality comparisons.
4803a34a 10201 If we can reduce the size of a constant, we will do that too. */
230d793d
RS
10202
10203 switch (code)
10204 {
10205 case LT:
4803a34a
RK
10206 /* < C is equivalent to <= (C - 1) */
10207 if (const_op > 0)
230d793d 10208 {
4803a34a 10209 const_op -= 1;
5f4f0e22 10210 op1 = GEN_INT (const_op);
230d793d
RS
10211 code = LE;
10212 /* ... fall through to LE case below. */
10213 }
10214 else
10215 break;
10216
10217 case LE:
4803a34a
RK
10218 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
10219 if (const_op < 0)
10220 {
10221 const_op += 1;
5f4f0e22 10222 op1 = GEN_INT (const_op);
4803a34a
RK
10223 code = LT;
10224 }
230d793d
RS
10225
10226 /* If we are doing a <= 0 comparison on a value known to have
10227 a zero sign bit, we can replace this with == 0. */
10228 else if (const_op == 0
5f4f0e22 10229 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10230 && (nonzero_bits (op0, mode)
5f4f0e22 10231 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
10232 code = EQ;
10233 break;
10234
10235 case GE:
0f41302f 10236 /* >= C is equivalent to > (C - 1). */
4803a34a 10237 if (const_op > 0)
230d793d 10238 {
4803a34a 10239 const_op -= 1;
5f4f0e22 10240 op1 = GEN_INT (const_op);
230d793d
RS
10241 code = GT;
10242 /* ... fall through to GT below. */
10243 }
10244 else
10245 break;
10246
10247 case GT:
663522cb 10248 /* > C is equivalent to >= (C + 1); we do this for C < 0. */
4803a34a
RK
10249 if (const_op < 0)
10250 {
10251 const_op += 1;
5f4f0e22 10252 op1 = GEN_INT (const_op);
4803a34a
RK
10253 code = GE;
10254 }
230d793d
RS
10255
10256 /* If we are doing a > 0 comparison on a value known to have
10257 a zero sign bit, we can replace this with != 0. */
10258 else if (const_op == 0
5f4f0e22 10259 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10260 && (nonzero_bits (op0, mode)
5f4f0e22 10261 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
10262 code = NE;
10263 break;
10264
230d793d 10265 case LTU:
4803a34a
RK
10266 /* < C is equivalent to <= (C - 1). */
10267 if (const_op > 0)
10268 {
10269 const_op -= 1;
5f4f0e22 10270 op1 = GEN_INT (const_op);
4803a34a 10271 code = LEU;
0f41302f 10272 /* ... fall through ... */
4803a34a 10273 }
d0ab8cd3
RK
10274
10275 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
f77aada2
JW
10276 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10277 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
d0ab8cd3
RK
10278 {
10279 const_op = 0, op1 = const0_rtx;
10280 code = GE;
10281 break;
10282 }
4803a34a
RK
10283 else
10284 break;
230d793d
RS
10285
10286 case LEU:
10287 /* unsigned <= 0 is equivalent to == 0 */
10288 if (const_op == 0)
10289 code = EQ;
d0ab8cd3 10290
0f41302f 10291 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
f77aada2
JW
10292 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10293 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
d0ab8cd3
RK
10294 {
10295 const_op = 0, op1 = const0_rtx;
10296 code = GE;
10297 }
230d793d
RS
10298 break;
10299
4803a34a
RK
10300 case GEU:
10301 /* >= C is equivalent to < (C - 1). */
10302 if (const_op > 1)
10303 {
10304 const_op -= 1;
5f4f0e22 10305 op1 = GEN_INT (const_op);
4803a34a 10306 code = GTU;
0f41302f 10307 /* ... fall through ... */
4803a34a 10308 }
d0ab8cd3
RK
10309
10310 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
f77aada2
JW
10311 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10312 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
d0ab8cd3
RK
10313 {
10314 const_op = 0, op1 = const0_rtx;
10315 code = LT;
8b2e69e1 10316 break;
d0ab8cd3 10317 }
4803a34a
RK
10318 else
10319 break;
10320
230d793d
RS
10321 case GTU:
10322 /* unsigned > 0 is equivalent to != 0 */
10323 if (const_op == 0)
10324 code = NE;
d0ab8cd3
RK
10325
10326 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
f77aada2
JW
10327 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10328 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
d0ab8cd3
RK
10329 {
10330 const_op = 0, op1 = const0_rtx;
10331 code = LT;
10332 }
230d793d 10333 break;
e9a25f70
JL
10334
10335 default:
10336 break;
230d793d
RS
10337 }
10338
10339 /* Compute some predicates to simplify code below. */
10340
10341 equality_comparison_p = (code == EQ || code == NE);
10342 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
10343 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
d5010e66 10344 || code == GEU);
230d793d 10345
6139ff20
RK
10346 /* If this is a sign bit comparison and we can do arithmetic in
10347 MODE, say that we will only be needing the sign bit of OP0. */
10348 if (sign_bit_comparison_p
10349 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10350 op0 = force_to_mode (op0, mode,
10351 ((HOST_WIDE_INT) 1
10352 << (GET_MODE_BITSIZE (mode) - 1)),
e3d616e3 10353 NULL_RTX, 0);
6139ff20 10354
230d793d
RS
10355 /* Now try cases based on the opcode of OP0. If none of the cases
10356 does a "continue", we exit this loop immediately after the
10357 switch. */
10358
10359 switch (GET_CODE (op0))
10360 {
10361 case ZERO_EXTRACT:
10362 /* If we are extracting a single bit from a variable position in
10363 a constant that has only a single bit set and are comparing it
663522cb 10364 with zero, we can convert this into an equality comparison
d7cd794f 10365 between the position and the location of the single bit. */
230d793d 10366
230d793d
RS
10367 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
10368 && XEXP (op0, 1) == const1_rtx
10369 && equality_comparison_p && const_op == 0
d7cd794f 10370 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
230d793d 10371 {
f76b9db2 10372 if (BITS_BIG_ENDIAN)
0d8e55d8 10373 {
d7cd794f 10374#ifdef HAVE_extzv
a995e389 10375 mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
0d8e55d8
JL
10376 if (mode == VOIDmode)
10377 mode = word_mode;
10378 i = (GET_MODE_BITSIZE (mode) - 1 - i);
d7cd794f 10379#else
663522cb 10380 i = BITS_PER_WORD - 1 - i;
230d793d 10381#endif
0d8e55d8 10382 }
230d793d
RS
10383
10384 op0 = XEXP (op0, 2);
5f4f0e22 10385 op1 = GEN_INT (i);
230d793d
RS
10386 const_op = i;
10387
10388 /* Result is nonzero iff shift count is equal to I. */
10389 code = reverse_condition (code);
10390 continue;
10391 }
230d793d 10392
0f41302f 10393 /* ... fall through ... */
230d793d
RS
10394
10395 case SIGN_EXTRACT:
10396 tem = expand_compound_operation (op0);
10397 if (tem != op0)
10398 {
10399 op0 = tem;
10400 continue;
10401 }
10402 break;
10403
10404 case NOT:
10405 /* If testing for equality, we can take the NOT of the constant. */
10406 if (equality_comparison_p
10407 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
10408 {
10409 op0 = XEXP (op0, 0);
10410 op1 = tem;
10411 continue;
10412 }
10413
10414 /* If just looking at the sign bit, reverse the sense of the
10415 comparison. */
10416 if (sign_bit_comparison_p)
10417 {
10418 op0 = XEXP (op0, 0);
10419 code = (code == GE ? LT : GE);
10420 continue;
10421 }
10422 break;
10423
10424 case NEG:
10425 /* If testing for equality, we can take the NEG of the constant. */
10426 if (equality_comparison_p
10427 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
10428 {
10429 op0 = XEXP (op0, 0);
10430 op1 = tem;
10431 continue;
10432 }
10433
10434 /* The remaining cases only apply to comparisons with zero. */
10435 if (const_op != 0)
10436 break;
10437
10438 /* When X is ABS or is known positive,
10439 (neg X) is < 0 if and only if X != 0. */
10440
10441 if (sign_bit_comparison_p
10442 && (GET_CODE (XEXP (op0, 0)) == ABS
5f4f0e22 10443 || (mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10444 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 10445 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
230d793d
RS
10446 {
10447 op0 = XEXP (op0, 0);
10448 code = (code == LT ? NE : EQ);
10449 continue;
10450 }
10451
3bed8141 10452 /* If we have NEG of something whose two high-order bits are the
0f41302f 10453 same, we know that "(-a) < 0" is equivalent to "a > 0". */
3bed8141 10454 if (num_sign_bit_copies (op0, mode) >= 2)
230d793d
RS
10455 {
10456 op0 = XEXP (op0, 0);
10457 code = swap_condition (code);
10458 continue;
10459 }
10460 break;
10461
10462 case ROTATE:
10463 /* If we are testing equality and our count is a constant, we
10464 can perform the inverse operation on our RHS. */
10465 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10466 && (tem = simplify_binary_operation (ROTATERT, mode,
10467 op1, XEXP (op0, 1))) != 0)
10468 {
10469 op0 = XEXP (op0, 0);
10470 op1 = tem;
10471 continue;
10472 }
10473
10474 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
10475 a particular bit. Convert it to an AND of a constant of that
10476 bit. This will be converted into a ZERO_EXTRACT. */
10477 if (const_op == 0 && sign_bit_comparison_p
10478 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 10479 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 10480 {
5f4f0e22
CH
10481 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10482 ((HOST_WIDE_INT) 1
10483 << (mode_width - 1
10484 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
10485 code = (code == LT ? NE : EQ);
10486 continue;
10487 }
10488
663522cb 10489 /* Fall through. */
230d793d
RS
10490
10491 case ABS:
10492 /* ABS is ignorable inside an equality comparison with zero. */
10493 if (const_op == 0 && equality_comparison_p)
10494 {
10495 op0 = XEXP (op0, 0);
10496 continue;
10497 }
10498 break;
230d793d
RS
10499
10500 case SIGN_EXTEND:
10501 /* Can simplify (compare (zero/sign_extend FOO) CONST)
663522cb 10502 to (compare FOO CONST) if CONST fits in FOO's mode and we
230d793d
RS
10503 are either testing inequality or have an unsigned comparison
10504 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
10505 if (! unsigned_comparison_p
10506 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
10507 <= HOST_BITS_PER_WIDE_INT)
10508 && ((unsigned HOST_WIDE_INT) const_op
e51712db 10509 < (((unsigned HOST_WIDE_INT) 1
5f4f0e22 10510 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
230d793d
RS
10511 {
10512 op0 = XEXP (op0, 0);
10513 continue;
10514 }
10515 break;
10516
10517 case SUBREG:
a687e897 10518 /* Check for the case where we are comparing A - C1 with C2,
abc95ed3 10519 both constants are smaller than 1/2 the maximum positive
a687e897
RK
10520 value in MODE, and the comparison is equality or unsigned.
10521 In that case, if A is either zero-extended to MODE or has
10522 sufficient sign bits so that the high-order bit in MODE
10523 is a copy of the sign in the inner mode, we can prove that it is
10524 safe to do the operation in the wider mode. This simplifies
10525 many range checks. */
10526
10527 if (mode_width <= HOST_BITS_PER_WIDE_INT
10528 && subreg_lowpart_p (op0)
10529 && GET_CODE (SUBREG_REG (op0)) == PLUS
10530 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
10531 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
663522cb
KH
10532 && (-INTVAL (XEXP (SUBREG_REG (op0), 1))
10533 < (HOST_WIDE_INT) (GET_MODE_MASK (mode) / 2))
adb7a1cb 10534 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
951553af
RK
10535 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
10536 GET_MODE (SUBREG_REG (op0)))
663522cb 10537 & ~GET_MODE_MASK (mode))
a687e897
RK
10538 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
10539 GET_MODE (SUBREG_REG (op0)))
10540 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10541 - GET_MODE_BITSIZE (mode)))))
10542 {
10543 op0 = SUBREG_REG (op0);
10544 continue;
10545 }
10546
fe0cf571
RK
10547 /* If the inner mode is narrower and we are extracting the low part,
10548 we can treat the SUBREG as if it were a ZERO_EXTEND. */
10549 if (subreg_lowpart_p (op0)
89f1c7f2
RS
10550 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
10551 /* Fall through */ ;
10552 else
230d793d
RS
10553 break;
10554
0f41302f 10555 /* ... fall through ... */
230d793d
RS
10556
10557 case ZERO_EXTEND:
10558 if ((unsigned_comparison_p || equality_comparison_p)
10559 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
10560 <= HOST_BITS_PER_WIDE_INT)
10561 && ((unsigned HOST_WIDE_INT) const_op
230d793d
RS
10562 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
10563 {
10564 op0 = XEXP (op0, 0);
10565 continue;
10566 }
10567 break;
10568
10569 case PLUS:
20fdd649 10570 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
5089e22e 10571 this for equality comparisons due to pathological cases involving
230d793d 10572 overflows. */
20fdd649
RK
10573 if (equality_comparison_p
10574 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10575 op1, XEXP (op0, 1))))
230d793d
RS
10576 {
10577 op0 = XEXP (op0, 0);
10578 op1 = tem;
10579 continue;
10580 }
10581
10582 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
10583 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
10584 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
10585 {
10586 op0 = XEXP (XEXP (op0, 0), 0);
10587 code = (code == LT ? EQ : NE);
10588 continue;
10589 }
10590 break;
10591
10592 case MINUS:
65945ec1
HPN
10593 /* We used to optimize signed comparisons against zero, but that
10594 was incorrect. Unsigned comparisons against zero (GTU, LEU)
10595 arrive here as equality comparisons, or (GEU, LTU) are
10596 optimized away. No need to special-case them. */
0bd4b461 10597
20fdd649
RK
10598 /* (eq (minus A B) C) -> (eq A (plus B C)) or
10599 (eq B (minus A C)), whichever simplifies. We can only do
10600 this for equality comparisons due to pathological cases involving
10601 overflows. */
10602 if (equality_comparison_p
10603 && 0 != (tem = simplify_binary_operation (PLUS, mode,
10604 XEXP (op0, 1), op1)))
10605 {
10606 op0 = XEXP (op0, 0);
10607 op1 = tem;
10608 continue;
10609 }
10610
10611 if (equality_comparison_p
10612 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10613 XEXP (op0, 0), op1)))
10614 {
10615 op0 = XEXP (op0, 1);
10616 op1 = tem;
10617 continue;
10618 }
10619
230d793d
RS
10620 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
10621 of bits in X minus 1, is one iff X > 0. */
10622 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
10623 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10624 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
10625 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10626 {
10627 op0 = XEXP (op0, 1);
10628 code = (code == GE ? LE : GT);
10629 continue;
10630 }
10631 break;
10632
10633 case XOR:
10634 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
10635 if C is zero or B is a constant. */
10636 if (equality_comparison_p
10637 && 0 != (tem = simplify_binary_operation (XOR, mode,
10638 XEXP (op0, 1), op1)))
10639 {
10640 op0 = XEXP (op0, 0);
10641 op1 = tem;
10642 continue;
10643 }
10644 break;
10645
10646 case EQ: case NE:
10647 case LT: case LTU: case LE: case LEU:
10648 case GT: case GTU: case GE: case GEU:
10649 /* We can't do anything if OP0 is a condition code value, rather
10650 than an actual data value. */
10651 if (const_op != 0
10652#ifdef HAVE_cc0
10653 || XEXP (op0, 0) == cc0_rtx
10654#endif
10655 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
10656 break;
10657
10658 /* Get the two operands being compared. */
10659 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
10660 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
10661 else
10662 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
10663
10664 /* Check for the cases where we simply want the result of the
10665 earlier test or the opposite of that result. */
10666 if (code == NE
10667 || (code == EQ && reversible_comparison_p (op0))
5f4f0e22 10668 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
3f508eca 10669 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
230d793d 10670 && (STORE_FLAG_VALUE
5f4f0e22
CH
10671 & (((HOST_WIDE_INT) 1
10672 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
230d793d
RS
10673 && (code == LT
10674 || (code == GE && reversible_comparison_p (op0)))))
10675 {
10676 code = (code == LT || code == NE
10677 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
10678 op0 = tem, op1 = tem1;
10679 continue;
10680 }
10681 break;
10682
10683 case IOR:
10684 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
10685 iff X <= 0. */
10686 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
10687 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
10688 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10689 {
10690 op0 = XEXP (op0, 1);
10691 code = (code == GE ? GT : LE);
10692 continue;
10693 }
10694 break;
10695
10696 case AND:
10697 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
10698 will be converted to a ZERO_EXTRACT later. */
10699 if (const_op == 0 && equality_comparison_p
45620ed4 10700 && GET_CODE (XEXP (op0, 0)) == ASHIFT
230d793d
RS
10701 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
10702 {
10703 op0 = simplify_and_const_int
10704 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
10705 XEXP (op0, 1),
10706 XEXP (XEXP (op0, 0), 1)),
5f4f0e22 10707 (HOST_WIDE_INT) 1);
230d793d
RS
10708 continue;
10709 }
10710
10711 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
10712 zero and X is a comparison and C1 and C2 describe only bits set
10713 in STORE_FLAG_VALUE, we can compare with X. */
10714 if (const_op == 0 && equality_comparison_p
5f4f0e22 10715 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d
RS
10716 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10717 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10718 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10719 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
5f4f0e22 10720 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
10721 {
10722 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10723 << INTVAL (XEXP (XEXP (op0, 0), 1)));
663522cb 10724 if ((~STORE_FLAG_VALUE & mask) == 0
230d793d
RS
10725 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
10726 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
10727 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
10728 {
10729 op0 = XEXP (XEXP (op0, 0), 0);
10730 continue;
10731 }
10732 }
10733
10734 /* If we are doing an equality comparison of an AND of a bit equal
10735 to the sign bit, replace this with a LT or GE comparison of
10736 the underlying value. */
10737 if (equality_comparison_p
10738 && const_op == 0
10739 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 10740 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 10741 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
e51712db 10742 == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
230d793d
RS
10743 {
10744 op0 = XEXP (op0, 0);
10745 code = (code == EQ ? GE : LT);
10746 continue;
10747 }
10748
10749 /* If this AND operation is really a ZERO_EXTEND from a narrower
10750 mode, the constant fits within that mode, and this is either an
10751 equality or unsigned comparison, try to do this comparison in
10752 the narrower mode. */
10753 if ((equality_comparison_p || unsigned_comparison_p)
10754 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10755 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
10756 & GET_MODE_MASK (mode))
10757 + 1)) >= 0
10758 && const_op >> i == 0
10759 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
10760 {
10761 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
10762 continue;
10763 }
e5e809f4
JL
10764
10765 /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1 fits
10766 in both M1 and M2 and the SUBREG is either paradoxical or
10767 represents the low part, permute the SUBREG and the AND and
10768 try again. */
10769 if (GET_CODE (XEXP (op0, 0)) == SUBREG
c5c76735 10770 && (0
9ec36da5 10771#ifdef WORD_REGISTER_OPERATIONS
c5c76735
JL
10772 || ((mode_width
10773 > (GET_MODE_BITSIZE
10774 (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10775 && mode_width <= BITS_PER_WORD)
9ec36da5 10776#endif
c5c76735
JL
10777 || ((mode_width
10778 <= (GET_MODE_BITSIZE
10779 (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10780 && subreg_lowpart_p (XEXP (op0, 0))))
adc05e6c
JL
10781#ifndef WORD_REGISTER_OPERATIONS
10782 /* It is unsafe to commute the AND into the SUBREG if the SUBREG
10783 is paradoxical and WORD_REGISTER_OPERATIONS is not defined.
10784 As originally written the upper bits have a defined value
10785 due to the AND operation. However, if we commute the AND
10786 inside the SUBREG then they no longer have defined values
10787 and the meaning of the code has been changed. */
10788 && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
10789 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
10790#endif
e5e809f4
JL
10791 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10792 && mode_width <= HOST_BITS_PER_WIDE_INT
10793 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
10794 <= HOST_BITS_PER_WIDE_INT)
663522cb
KH
10795 && (INTVAL (XEXP (op0, 1)) & ~mask) == 0
10796 && 0 == (~GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
9ec36da5 10797 & INTVAL (XEXP (op0, 1)))
e51712db
KG
10798 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) != mask
10799 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
9ec36da5 10800 != GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
663522cb 10801
e5e809f4
JL
10802 {
10803 op0
10804 = gen_lowpart_for_combine
10805 (mode,
10806 gen_binary (AND, GET_MODE (SUBREG_REG (XEXP (op0, 0))),
10807 SUBREG_REG (XEXP (op0, 0)), XEXP (op0, 1)));
10808 continue;
10809 }
10810
9f8e169e
RH
10811 /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
10812 (eq (and (lshiftrt X) 1) 0). */
10813 if (const_op == 0 && equality_comparison_p
10814 && XEXP (op0, 1) == const1_rtx
10815 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10816 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == NOT)
10817 {
10818 op0 = simplify_and_const_int
10819 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
10820 XEXP (XEXP (XEXP (op0, 0), 0), 0),
10821 XEXP (XEXP (op0, 0), 1)),
10822 (HOST_WIDE_INT) 1);
10823 code = (code == NE ? EQ : NE);
10824 continue;
10825 }
230d793d
RS
10826 break;
10827
10828 case ASHIFT:
45620ed4 10829 /* If we have (compare (ashift FOO N) (const_int C)) and
230d793d 10830 the high order N bits of FOO (N+1 if an inequality comparison)
951553af 10831 are known to be zero, we can do this by comparing FOO with C
230d793d
RS
10832 shifted right N bits so long as the low-order N bits of C are
10833 zero. */
10834 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10835 && INTVAL (XEXP (op0, 1)) >= 0
10836 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
5f4f0e22
CH
10837 < HOST_BITS_PER_WIDE_INT)
10838 && ((const_op
34785d05 10839 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
5f4f0e22 10840 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10841 && (nonzero_bits (XEXP (op0, 0), mode)
663522cb
KH
10842 & ~(mask >> (INTVAL (XEXP (op0, 1))
10843 + ! equality_comparison_p))) == 0)
230d793d 10844 {
7ce787fe
NC
10845 /* We must perform a logical shift, not an arithmetic one,
10846 as we want the top N bits of C to be zero. */
aaaec114 10847 unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
663522cb 10848
7ce787fe 10849 temp >>= INTVAL (XEXP (op0, 1));
aaaec114 10850 op1 = GEN_INT (trunc_int_for_mode (temp, mode));
230d793d
RS
10851 op0 = XEXP (op0, 0);
10852 continue;
10853 }
10854
dfbe1b2f 10855 /* If we are doing a sign bit comparison, it means we are testing
230d793d 10856 a particular bit. Convert it to the appropriate AND. */
dfbe1b2f 10857 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 10858 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 10859 {
5f4f0e22
CH
10860 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10861 ((HOST_WIDE_INT) 1
10862 << (mode_width - 1
10863 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
10864 code = (code == LT ? NE : EQ);
10865 continue;
10866 }
dfbe1b2f
RK
10867
10868 /* If this an equality comparison with zero and we are shifting
10869 the low bit to the sign bit, we can convert this to an AND of the
10870 low-order bit. */
10871 if (const_op == 0 && equality_comparison_p
10872 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10873 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10874 {
5f4f0e22
CH
10875 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10876 (HOST_WIDE_INT) 1);
dfbe1b2f
RK
10877 continue;
10878 }
230d793d
RS
10879 break;
10880
10881 case ASHIFTRT:
d0ab8cd3
RK
10882 /* If this is an equality comparison with zero, we can do this
10883 as a logical shift, which might be much simpler. */
10884 if (equality_comparison_p && const_op == 0
10885 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
10886 {
10887 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
10888 XEXP (op0, 0),
10889 INTVAL (XEXP (op0, 1)));
10890 continue;
10891 }
10892
230d793d
RS
10893 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
10894 do the comparison in a narrower mode. */
10895 if (! unsigned_comparison_p
10896 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10897 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10898 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10899 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
22331794 10900 MODE_INT, 1)) != BLKmode
5f4f0e22 10901 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
663522cb 10902 || ((unsigned HOST_WIDE_INT) -const_op
5f4f0e22 10903 <= GET_MODE_MASK (tmode))))
230d793d
RS
10904 {
10905 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
10906 continue;
10907 }
10908
14a774a9
RK
10909 /* Likewise if OP0 is a PLUS of a sign extension with a
10910 constant, which is usually represented with the PLUS
10911 between the shifts. */
10912 if (! unsigned_comparison_p
10913 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10914 && GET_CODE (XEXP (op0, 0)) == PLUS
10915 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10916 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
10917 && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
10918 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10919 MODE_INT, 1)) != BLKmode
10920 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
663522cb 10921 || ((unsigned HOST_WIDE_INT) -const_op
14a774a9
RK
10922 <= GET_MODE_MASK (tmode))))
10923 {
10924 rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
10925 rtx add_const = XEXP (XEXP (op0, 0), 1);
10926 rtx new_const = gen_binary (ASHIFTRT, GET_MODE (op0), add_const,
10927 XEXP (op0, 1));
10928
10929 op0 = gen_binary (PLUS, tmode,
10930 gen_lowpart_for_combine (tmode, inner),
10931 new_const);
10932 continue;
10933 }
10934
0f41302f 10935 /* ... fall through ... */
230d793d
RS
10936 case LSHIFTRT:
10937 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
951553af 10938 the low order N bits of FOO are known to be zero, we can do this
230d793d
RS
10939 by comparing FOO with C shifted left N bits so long as no
10940 overflow occurs. */
10941 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10942 && INTVAL (XEXP (op0, 1)) >= 0
5f4f0e22
CH
10943 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10944 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10945 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 10946 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
230d793d
RS
10947 && (const_op == 0
10948 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
10949 < mode_width)))
10950 {
10951 const_op <<= INTVAL (XEXP (op0, 1));
5f4f0e22 10952 op1 = GEN_INT (const_op);
230d793d
RS
10953 op0 = XEXP (op0, 0);
10954 continue;
10955 }
10956
10957 /* If we are using this shift to extract just the sign bit, we
10958 can replace this with an LT or GE comparison. */
10959 if (const_op == 0
10960 && (equality_comparison_p || sign_bit_comparison_p)
10961 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10962 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10963 {
10964 op0 = XEXP (op0, 0);
10965 code = (code == NE || code == GT ? LT : GE);
10966 continue;
10967 }
10968 break;
663522cb 10969
e9a25f70
JL
10970 default:
10971 break;
230d793d
RS
10972 }
10973
10974 break;
10975 }
10976
10977 /* Now make any compound operations involved in this comparison. Then,
76d31c63 10978 check for an outmost SUBREG on OP0 that is not doing anything or is
230d793d
RS
10979 paradoxical. The latter case can only occur when it is known that the
10980 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
10981 We can never remove a SUBREG for a non-equality comparison because the
10982 sign bit is in a different place in the underlying object. */
10983
10984 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
10985 op1 = make_compound_operation (op1, SET);
10986
10987 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10988 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10989 && (code == NE || code == EQ)
10990 && ((GET_MODE_SIZE (GET_MODE (op0))
10991 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
10992 {
10993 op0 = SUBREG_REG (op0);
10994 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
10995 }
10996
10997 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10998 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10999 && (code == NE || code == EQ)
ac49a949
RS
11000 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
11001 <= HOST_BITS_PER_WIDE_INT)
951553af 11002 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
663522cb 11003 & ~GET_MODE_MASK (GET_MODE (op0))) == 0
230d793d
RS
11004 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
11005 op1),
951553af 11006 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
663522cb 11007 & ~GET_MODE_MASK (GET_MODE (op0))) == 0))
230d793d
RS
11008 op0 = SUBREG_REG (op0), op1 = tem;
11009
11010 /* We now do the opposite procedure: Some machines don't have compare
11011 insns in all modes. If OP0's mode is an integer mode smaller than a
11012 word and we can't do a compare in that mode, see if there is a larger
a687e897
RK
11013 mode for which we can do the compare. There are a number of cases in
11014 which we can use the wider mode. */
230d793d
RS
11015
11016 mode = GET_MODE (op0);
11017 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
11018 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
11019 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
11020 for (tmode = GET_MODE_WIDER_MODE (mode);
5f4f0e22
CH
11021 (tmode != VOIDmode
11022 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
230d793d 11023 tmode = GET_MODE_WIDER_MODE (tmode))
a687e897 11024 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
230d793d 11025 {
951553af 11026 /* If the only nonzero bits in OP0 and OP1 are those in the
a687e897
RK
11027 narrower mode and this is an equality or unsigned comparison,
11028 we can use the wider mode. Similarly for sign-extended
7e4dc511 11029 values, in which case it is true for all comparisons. */
a687e897
RK
11030 if (((code == EQ || code == NE
11031 || code == GEU || code == GTU || code == LEU || code == LTU)
663522cb
KH
11032 && (nonzero_bits (op0, tmode) & ~GET_MODE_MASK (mode)) == 0
11033 && (nonzero_bits (op1, tmode) & ~GET_MODE_MASK (mode)) == 0)
7e4dc511
RK
11034 || ((num_sign_bit_copies (op0, tmode)
11035 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
a687e897 11036 && (num_sign_bit_copies (op1, tmode)
58744483 11037 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
a687e897 11038 {
14a774a9
RK
11039 /* If OP0 is an AND and we don't have an AND in MODE either,
11040 make a new AND in the proper mode. */
11041 if (GET_CODE (op0) == AND
11042 && (add_optab->handlers[(int) mode].insn_code
11043 == CODE_FOR_nothing))
11044 op0 = gen_binary (AND, tmode,
11045 gen_lowpart_for_combine (tmode,
11046 XEXP (op0, 0)),
11047 gen_lowpart_for_combine (tmode,
11048 XEXP (op0, 1)));
11049
a687e897
RK
11050 op0 = gen_lowpart_for_combine (tmode, op0);
11051 op1 = gen_lowpart_for_combine (tmode, op1);
11052 break;
11053 }
230d793d 11054
a687e897
RK
11055 /* If this is a test for negative, we can make an explicit
11056 test of the sign bit. */
11057
11058 if (op1 == const0_rtx && (code == LT || code == GE)
11059 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d 11060 {
a687e897
RK
11061 op0 = gen_binary (AND, tmode,
11062 gen_lowpart_for_combine (tmode, op0),
5f4f0e22
CH
11063 GEN_INT ((HOST_WIDE_INT) 1
11064 << (GET_MODE_BITSIZE (mode) - 1)));
230d793d 11065 code = (code == LT) ? NE : EQ;
a687e897 11066 break;
230d793d 11067 }
230d793d
RS
11068 }
11069
b7a775b2
RK
11070#ifdef CANONICALIZE_COMPARISON
11071 /* If this machine only supports a subset of valid comparisons, see if we
11072 can convert an unsupported one into a supported one. */
11073 CANONICALIZE_COMPARISON (code, op0, op1);
11074#endif
11075
230d793d
RS
11076 *pop0 = op0;
11077 *pop1 = op1;
11078
11079 return code;
11080}
11081\f
11082/* Return 1 if we know that X, a comparison operation, is not operating
11083 on a floating-point value or is EQ or NE, meaning that we can safely
11084 reverse it. */
11085
11086static int
11087reversible_comparison_p (x)
11088 rtx x;
11089{
11090 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e 11091 || flag_fast_math
1eb8759b
RH
11092 || GET_CODE (x) == NE || GET_CODE (x) == EQ
11093 || GET_CODE (x) == UNORDERED || GET_CODE (x) == ORDERED)
230d793d
RS
11094 return 1;
11095
11096 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
11097 {
11098 case MODE_INT:
3ad2180a
RK
11099 case MODE_PARTIAL_INT:
11100 case MODE_COMPLEX_INT:
230d793d
RS
11101 return 1;
11102
11103 case MODE_CC:
9210df58
RK
11104 /* If the mode of the condition codes tells us that this is safe,
11105 we need look no further. */
11106 if (REVERSIBLE_CC_MODE (GET_MODE (XEXP (x, 0))))
11107 return 1;
11108
11109 /* Otherwise try and find where the condition codes were last set and
11110 use that. */
230d793d
RS
11111 x = get_last_value (XEXP (x, 0));
11112 return (x && GET_CODE (x) == COMPARE
3ad2180a 11113 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))));
663522cb 11114
e9a25f70
JL
11115 default:
11116 return 0;
230d793d 11117 }
230d793d
RS
11118}
11119\f
11120/* Utility function for following routine. Called when X is part of a value
11121 being stored into reg_last_set_value. Sets reg_last_set_table_tick
11122 for each register mentioned. Similar to mention_regs in cse.c */
11123
11124static void
11125update_table_tick (x)
11126 rtx x;
11127{
11128 register enum rtx_code code = GET_CODE (x);
6f7d635c 11129 register const char *fmt = GET_RTX_FORMAT (code);
230d793d
RS
11130 register int i;
11131
11132 if (code == REG)
11133 {
770ae6cc
RK
11134 unsigned int regno = REGNO (x);
11135 unsigned int endregno
11136 = regno + (regno < FIRST_PSEUDO_REGISTER
11137 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11138 unsigned int r;
230d793d 11139
770ae6cc
RK
11140 for (r = regno; r < endregno; r++)
11141 reg_last_set_table_tick[r] = label_tick;
230d793d
RS
11142
11143 return;
11144 }
663522cb 11145
230d793d
RS
11146 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11147 /* Note that we can't have an "E" in values stored; see
11148 get_last_value_validate. */
11149 if (fmt[i] == 'e')
11150 update_table_tick (XEXP (x, i));
11151}
11152
11153/* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
11154 are saying that the register is clobbered and we no longer know its
7988fd36
RK
11155 value. If INSN is zero, don't update reg_last_set; this is only permitted
11156 with VALUE also zero and is used to invalidate the register. */
230d793d
RS
11157
11158static void
11159record_value_for_reg (reg, insn, value)
11160 rtx reg;
11161 rtx insn;
11162 rtx value;
11163{
770ae6cc
RK
11164 unsigned int regno = REGNO (reg);
11165 unsigned int endregno
11166 = regno + (regno < FIRST_PSEUDO_REGISTER
11167 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
11168 unsigned int i;
230d793d
RS
11169
11170 /* If VALUE contains REG and we have a previous value for REG, substitute
11171 the previous value. */
11172 if (value && insn && reg_overlap_mentioned_p (reg, value))
11173 {
11174 rtx tem;
11175
11176 /* Set things up so get_last_value is allowed to see anything set up to
11177 our insn. */
11178 subst_low_cuid = INSN_CUID (insn);
663522cb 11179 tem = get_last_value (reg);
230d793d 11180
14a774a9
RK
11181 /* If TEM is simply a binary operation with two CLOBBERs as operands,
11182 it isn't going to be useful and will take a lot of time to process,
11183 so just use the CLOBBER. */
11184
230d793d 11185 if (tem)
14a774a9
RK
11186 {
11187 if ((GET_RTX_CLASS (GET_CODE (tem)) == '2'
11188 || GET_RTX_CLASS (GET_CODE (tem)) == 'c')
11189 && GET_CODE (XEXP (tem, 0)) == CLOBBER
11190 && GET_CODE (XEXP (tem, 1)) == CLOBBER)
11191 tem = XEXP (tem, 0);
11192
11193 value = replace_rtx (copy_rtx (value), reg, tem);
11194 }
230d793d
RS
11195 }
11196
11197 /* For each register modified, show we don't know its value, that
ef026f91
RS
11198 we don't know about its bitwise content, that its value has been
11199 updated, and that we don't know the location of the death of the
11200 register. */
770ae6cc 11201 for (i = regno; i < endregno; i++)
230d793d
RS
11202 {
11203 if (insn)
11204 reg_last_set[i] = insn;
770ae6cc 11205
230d793d 11206 reg_last_set_value[i] = 0;
ef026f91
RS
11207 reg_last_set_mode[i] = 0;
11208 reg_last_set_nonzero_bits[i] = 0;
11209 reg_last_set_sign_bit_copies[i] = 0;
230d793d
RS
11210 reg_last_death[i] = 0;
11211 }
11212
11213 /* Mark registers that are being referenced in this value. */
11214 if (value)
11215 update_table_tick (value);
11216
11217 /* Now update the status of each register being set.
11218 If someone is using this register in this block, set this register
11219 to invalid since we will get confused between the two lives in this
11220 basic block. This makes using this register always invalid. In cse, we
11221 scan the table to invalidate all entries using this register, but this
11222 is too much work for us. */
11223
11224 for (i = regno; i < endregno; i++)
11225 {
11226 reg_last_set_label[i] = label_tick;
11227 if (value && reg_last_set_table_tick[i] == label_tick)
11228 reg_last_set_invalid[i] = 1;
11229 else
11230 reg_last_set_invalid[i] = 0;
11231 }
11232
11233 /* The value being assigned might refer to X (like in "x++;"). In that
11234 case, we must replace it with (clobber (const_int 0)) to prevent
11235 infinite loops. */
9a893315 11236 if (value && ! get_last_value_validate (&value, insn,
230d793d
RS
11237 reg_last_set_label[regno], 0))
11238 {
11239 value = copy_rtx (value);
9a893315
JW
11240 if (! get_last_value_validate (&value, insn,
11241 reg_last_set_label[regno], 1))
230d793d
RS
11242 value = 0;
11243 }
11244
55310dad
RK
11245 /* For the main register being modified, update the value, the mode, the
11246 nonzero bits, and the number of sign bit copies. */
11247
230d793d
RS
11248 reg_last_set_value[regno] = value;
11249
55310dad
RK
11250 if (value)
11251 {
2afabb48 11252 subst_low_cuid = INSN_CUID (insn);
55310dad
RK
11253 reg_last_set_mode[regno] = GET_MODE (reg);
11254 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
11255 reg_last_set_sign_bit_copies[regno]
11256 = num_sign_bit_copies (value, GET_MODE (reg));
11257 }
230d793d
RS
11258}
11259
230d793d 11260/* Called via note_stores from record_dead_and_set_regs to handle one
84832317
MM
11261 SET or CLOBBER in an insn. DATA is the instruction in which the
11262 set is occurring. */
230d793d
RS
11263
11264static void
84832317 11265record_dead_and_set_regs_1 (dest, setter, data)
230d793d 11266 rtx dest, setter;
84832317 11267 void *data;
230d793d 11268{
84832317
MM
11269 rtx record_dead_insn = (rtx) data;
11270
ca89d290
RK
11271 if (GET_CODE (dest) == SUBREG)
11272 dest = SUBREG_REG (dest);
11273
230d793d
RS
11274 if (GET_CODE (dest) == REG)
11275 {
11276 /* If we are setting the whole register, we know its value. Otherwise
11277 show that we don't know the value. We can handle SUBREG in
11278 some cases. */
11279 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
11280 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
11281 else if (GET_CODE (setter) == SET
11282 && GET_CODE (SET_DEST (setter)) == SUBREG
11283 && SUBREG_REG (SET_DEST (setter)) == dest
90bf8081 11284 && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
230d793d 11285 && subreg_lowpart_p (SET_DEST (setter)))
d0ab8cd3
RK
11286 record_value_for_reg (dest, record_dead_insn,
11287 gen_lowpart_for_combine (GET_MODE (dest),
11288 SET_SRC (setter)));
230d793d 11289 else
5f4f0e22 11290 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
230d793d
RS
11291 }
11292 else if (GET_CODE (dest) == MEM
11293 /* Ignore pushes, they clobber nothing. */
11294 && ! push_operand (dest, GET_MODE (dest)))
11295 mem_last_set = INSN_CUID (record_dead_insn);
11296}
11297
11298/* Update the records of when each REG was most recently set or killed
11299 for the things done by INSN. This is the last thing done in processing
11300 INSN in the combiner loop.
11301
ef026f91
RS
11302 We update reg_last_set, reg_last_set_value, reg_last_set_mode,
11303 reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
11304 and also the similar information mem_last_set (which insn most recently
11305 modified memory) and last_call_cuid (which insn was the most recent
11306 subroutine call). */
230d793d
RS
11307
11308static void
11309record_dead_and_set_regs (insn)
11310 rtx insn;
11311{
11312 register rtx link;
770ae6cc 11313 unsigned int i;
55310dad 11314
230d793d
RS
11315 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
11316 {
dbc131f3
RK
11317 if (REG_NOTE_KIND (link) == REG_DEAD
11318 && GET_CODE (XEXP (link, 0)) == REG)
11319 {
770ae6cc
RK
11320 unsigned int regno = REGNO (XEXP (link, 0));
11321 unsigned int endregno
dbc131f3
RK
11322 = regno + (regno < FIRST_PSEUDO_REGISTER
11323 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
11324 : 1);
dbc131f3
RK
11325
11326 for (i = regno; i < endregno; i++)
11327 reg_last_death[i] = insn;
11328 }
230d793d 11329 else if (REG_NOTE_KIND (link) == REG_INC)
5f4f0e22 11330 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
230d793d
RS
11331 }
11332
11333 if (GET_CODE (insn) == CALL_INSN)
55310dad
RK
11334 {
11335 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
11336 if (call_used_regs[i])
11337 {
11338 reg_last_set_value[i] = 0;
ef026f91
RS
11339 reg_last_set_mode[i] = 0;
11340 reg_last_set_nonzero_bits[i] = 0;
11341 reg_last_set_sign_bit_copies[i] = 0;
55310dad
RK
11342 reg_last_death[i] = 0;
11343 }
11344
11345 last_call_cuid = mem_last_set = INSN_CUID (insn);
11346 }
230d793d 11347
84832317 11348 note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
230d793d 11349}
732f2ac9 11350
732f2ac9
JJ
11351/* If a SUBREG has the promoted bit set, it is in fact a property of the
11352 register present in the SUBREG, so for each such SUBREG go back and
11353 adjust nonzero and sign bit information of the registers that are
11354 known to have some zero/sign bits set.
11355
11356 This is needed because when combine blows the SUBREGs away, the
11357 information on zero/sign bits is lost and further combines can be
11358 missed because of that. */
11359
11360static void
11361record_promoted_value (insn, subreg)
663522cb
KH
11362 rtx insn;
11363 rtx subreg;
732f2ac9 11364{
4a71b24f 11365 rtx links, set;
770ae6cc 11366 unsigned int regno = REGNO (SUBREG_REG (subreg));
732f2ac9
JJ
11367 enum machine_mode mode = GET_MODE (subreg);
11368
25af74a0 11369 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
732f2ac9
JJ
11370 return;
11371
663522cb 11372 for (links = LOG_LINKS (insn); links;)
732f2ac9
JJ
11373 {
11374 insn = XEXP (links, 0);
11375 set = single_set (insn);
11376
11377 if (! set || GET_CODE (SET_DEST (set)) != REG
11378 || REGNO (SET_DEST (set)) != regno
11379 || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
11380 {
11381 links = XEXP (links, 1);
11382 continue;
11383 }
11384
663522cb
KH
11385 if (reg_last_set[regno] == insn)
11386 {
732f2ac9 11387 if (SUBREG_PROMOTED_UNSIGNED_P (subreg))
663522cb
KH
11388 reg_last_set_nonzero_bits[regno] &= GET_MODE_MASK (mode);
11389 }
732f2ac9
JJ
11390
11391 if (GET_CODE (SET_SRC (set)) == REG)
11392 {
11393 regno = REGNO (SET_SRC (set));
11394 links = LOG_LINKS (insn);
11395 }
11396 else
11397 break;
11398 }
11399}
11400
11401/* Scan X for promoted SUBREGs. For each one found,
11402 note what it implies to the registers used in it. */
11403
11404static void
11405check_promoted_subreg (insn, x)
663522cb
KH
11406 rtx insn;
11407 rtx x;
732f2ac9
JJ
11408{
11409 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
11410 && GET_CODE (SUBREG_REG (x)) == REG)
11411 record_promoted_value (insn, x);
11412 else
11413 {
11414 const char *format = GET_RTX_FORMAT (GET_CODE (x));
11415 int i, j;
11416
11417 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
663522cb 11418 switch (format[i])
732f2ac9
JJ
11419 {
11420 case 'e':
11421 check_promoted_subreg (insn, XEXP (x, i));
11422 break;
11423 case 'V':
11424 case 'E':
11425 if (XVEC (x, i) != 0)
11426 for (j = 0; j < XVECLEN (x, i); j++)
11427 check_promoted_subreg (insn, XVECEXP (x, i, j));
11428 break;
11429 }
11430 }
11431}
230d793d
RS
11432\f
11433/* Utility routine for the following function. Verify that all the registers
11434 mentioned in *LOC are valid when *LOC was part of a value set when
11435 label_tick == TICK. Return 0 if some are not.
11436
11437 If REPLACE is non-zero, replace the invalid reference with
11438 (clobber (const_int 0)) and return 1. This replacement is useful because
11439 we often can get useful information about the form of a value (e.g., if
11440 it was produced by a shift that always produces -1 or 0) even though
11441 we don't know exactly what registers it was produced from. */
11442
11443static int
9a893315 11444get_last_value_validate (loc, insn, tick, replace)
230d793d 11445 rtx *loc;
9a893315 11446 rtx insn;
230d793d
RS
11447 int tick;
11448 int replace;
11449{
11450 rtx x = *loc;
6f7d635c 11451 const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
230d793d
RS
11452 int len = GET_RTX_LENGTH (GET_CODE (x));
11453 int i;
11454
11455 if (GET_CODE (x) == REG)
11456 {
770ae6cc
RK
11457 unsigned int regno = REGNO (x);
11458 unsigned int endregno
11459 = regno + (regno < FIRST_PSEUDO_REGISTER
11460 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11461 unsigned int j;
230d793d
RS
11462
11463 for (j = regno; j < endregno; j++)
11464 if (reg_last_set_invalid[j]
57cf50a4
GRK
11465 /* If this is a pseudo-register that was only set once and not
11466 live at the beginning of the function, it is always valid. */
663522cb 11467 || (! (regno >= FIRST_PSEUDO_REGISTER
57cf50a4 11468 && REG_N_SETS (regno) == 1
770ae6cc
RK
11469 && (! REGNO_REG_SET_P
11470 (BASIC_BLOCK (0)->global_live_at_start, regno)))
230d793d
RS
11471 && reg_last_set_label[j] > tick))
11472 {
11473 if (replace)
38a448ca 11474 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
11475 return replace;
11476 }
11477
11478 return 1;
11479 }
9a893315
JW
11480 /* If this is a memory reference, make sure that there were
11481 no stores after it that might have clobbered the value. We don't
11482 have alias info, so we assume any store invalidates it. */
11483 else if (GET_CODE (x) == MEM && ! RTX_UNCHANGING_P (x)
11484 && INSN_CUID (insn) <= mem_last_set)
11485 {
11486 if (replace)
38a448ca 11487 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9a893315
JW
11488 return replace;
11489 }
230d793d
RS
11490
11491 for (i = 0; i < len; i++)
11492 if ((fmt[i] == 'e'
9a893315 11493 && get_last_value_validate (&XEXP (x, i), insn, tick, replace) == 0)
230d793d
RS
11494 /* Don't bother with these. They shouldn't occur anyway. */
11495 || fmt[i] == 'E')
11496 return 0;
11497
11498 /* If we haven't found a reason for it to be invalid, it is valid. */
11499 return 1;
11500}
11501
11502/* Get the last value assigned to X, if known. Some registers
11503 in the value may be replaced with (clobber (const_int 0)) if their value
11504 is known longer known reliably. */
11505
11506static rtx
11507get_last_value (x)
11508 rtx x;
11509{
770ae6cc 11510 unsigned int regno;
230d793d
RS
11511 rtx value;
11512
11513 /* If this is a non-paradoxical SUBREG, get the value of its operand and
11514 then convert it to the desired mode. If this is a paradoxical SUBREG,
0f41302f 11515 we cannot predict what values the "extra" bits might have. */
230d793d
RS
11516 if (GET_CODE (x) == SUBREG
11517 && subreg_lowpart_p (x)
11518 && (GET_MODE_SIZE (GET_MODE (x))
11519 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
11520 && (value = get_last_value (SUBREG_REG (x))) != 0)
11521 return gen_lowpart_for_combine (GET_MODE (x), value);
11522
11523 if (GET_CODE (x) != REG)
11524 return 0;
11525
11526 regno = REGNO (x);
11527 value = reg_last_set_value[regno];
11528
57cf50a4
GRK
11529 /* If we don't have a value, or if it isn't for this basic block and
11530 it's either a hard register, set more than once, or it's a live
663522cb 11531 at the beginning of the function, return 0.
57cf50a4 11532
663522cb 11533 Because if it's not live at the beginnning of the function then the reg
57cf50a4
GRK
11534 is always set before being used (is never used without being set).
11535 And, if it's set only once, and it's always set before use, then all
11536 uses must have the same last value, even if it's not from this basic
11537 block. */
230d793d
RS
11538
11539 if (value == 0
57cf50a4
GRK
11540 || (reg_last_set_label[regno] != label_tick
11541 && (regno < FIRST_PSEUDO_REGISTER
11542 || REG_N_SETS (regno) != 1
770ae6cc
RK
11543 || (REGNO_REG_SET_P
11544 (BASIC_BLOCK (0)->global_live_at_start, regno)))))
230d793d
RS
11545 return 0;
11546
4255220d 11547 /* If the value was set in a later insn than the ones we are processing,
ca4cd906 11548 we can't use it even if the register was only set once. */
bcd49eb7 11549 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
ca4cd906 11550 return 0;
d0ab8cd3
RK
11551
11552 /* If the value has all its registers valid, return it. */
9a893315
JW
11553 if (get_last_value_validate (&value, reg_last_set[regno],
11554 reg_last_set_label[regno], 0))
230d793d
RS
11555 return value;
11556
11557 /* Otherwise, make a copy and replace any invalid register with
11558 (clobber (const_int 0)). If that fails for some reason, return 0. */
11559
11560 value = copy_rtx (value);
9a893315
JW
11561 if (get_last_value_validate (&value, reg_last_set[regno],
11562 reg_last_set_label[regno], 1))
230d793d
RS
11563 return value;
11564
11565 return 0;
11566}
11567\f
11568/* Return nonzero if expression X refers to a REG or to memory
11569 that is set in an instruction more recent than FROM_CUID. */
11570
11571static int
11572use_crosses_set_p (x, from_cuid)
11573 register rtx x;
11574 int from_cuid;
11575{
6f7d635c 11576 register const char *fmt;
230d793d
RS
11577 register int i;
11578 register enum rtx_code code = GET_CODE (x);
11579
11580 if (code == REG)
11581 {
770ae6cc
RK
11582 unsigned int regno = REGNO (x);
11583 unsigned endreg = regno + (regno < FIRST_PSEUDO_REGISTER
663522cb
KH
11584 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11585
230d793d
RS
11586#ifdef PUSH_ROUNDING
11587 /* Don't allow uses of the stack pointer to be moved,
11588 because we don't know whether the move crosses a push insn. */
f73ad30e 11589 if (regno == STACK_POINTER_REGNUM && PUSH_ARGS)
230d793d
RS
11590 return 1;
11591#endif
770ae6cc 11592 for (; regno < endreg; regno++)
e28f5732
RK
11593 if (reg_last_set[regno]
11594 && INSN_CUID (reg_last_set[regno]) > from_cuid)
11595 return 1;
11596 return 0;
230d793d
RS
11597 }
11598
11599 if (code == MEM && mem_last_set > from_cuid)
11600 return 1;
11601
11602 fmt = GET_RTX_FORMAT (code);
11603
11604 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11605 {
11606 if (fmt[i] == 'E')
11607 {
11608 register int j;
11609 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11610 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
11611 return 1;
11612 }
11613 else if (fmt[i] == 'e'
11614 && use_crosses_set_p (XEXP (x, i), from_cuid))
11615 return 1;
11616 }
11617 return 0;
11618}
11619\f
11620/* Define three variables used for communication between the following
11621 routines. */
11622
770ae6cc 11623static unsigned int reg_dead_regno, reg_dead_endregno;
230d793d
RS
11624static int reg_dead_flag;
11625
11626/* Function called via note_stores from reg_dead_at_p.
11627
663522cb 11628 If DEST is within [reg_dead_regno, reg_dead_endregno), set
230d793d
RS
11629 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
11630
11631static void
84832317 11632reg_dead_at_p_1 (dest, x, data)
230d793d
RS
11633 rtx dest;
11634 rtx x;
84832317 11635 void *data ATTRIBUTE_UNUSED;
230d793d 11636{
770ae6cc 11637 unsigned int regno, endregno;
230d793d
RS
11638
11639 if (GET_CODE (dest) != REG)
11640 return;
11641
11642 regno = REGNO (dest);
663522cb 11643 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
230d793d
RS
11644 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
11645
11646 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
11647 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
11648}
11649
11650/* Return non-zero if REG is known to be dead at INSN.
11651
11652 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
11653 referencing REG, it is dead. If we hit a SET referencing REG, it is
11654 live. Otherwise, see if it is live or dead at the start of the basic
6e25d159
RK
11655 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
11656 must be assumed to be always live. */
230d793d
RS
11657
11658static int
11659reg_dead_at_p (reg, insn)
11660 rtx reg;
11661 rtx insn;
11662{
770ae6cc
RK
11663 int block;
11664 unsigned int i;
230d793d
RS
11665
11666 /* Set variables for reg_dead_at_p_1. */
11667 reg_dead_regno = REGNO (reg);
11668 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
11669 ? HARD_REGNO_NREGS (reg_dead_regno,
11670 GET_MODE (reg))
11671 : 1);
11672
11673 reg_dead_flag = 0;
11674
6e25d159
RK
11675 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. */
11676 if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
11677 {
11678 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11679 if (TEST_HARD_REG_BIT (newpat_used_regs, i))
11680 return 0;
11681 }
11682
230d793d
RS
11683 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
11684 beginning of function. */
60715d0b 11685 for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
230d793d
RS
11686 insn = prev_nonnote_insn (insn))
11687 {
84832317 11688 note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
230d793d
RS
11689 if (reg_dead_flag)
11690 return reg_dead_flag == 1 ? 1 : 0;
11691
11692 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
11693 return 1;
11694 }
11695
11696 /* Get the basic block number that we were in. */
11697 if (insn == 0)
11698 block = 0;
11699 else
11700 {
11701 for (block = 0; block < n_basic_blocks; block++)
3b413743 11702 if (insn == BLOCK_HEAD (block))
230d793d
RS
11703 break;
11704
11705 if (block == n_basic_blocks)
11706 return 0;
11707 }
11708
11709 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
e881bb1b 11710 if (REGNO_REG_SET_P (BASIC_BLOCK (block)->global_live_at_start, i))
230d793d
RS
11711 return 0;
11712
11713 return 1;
11714}
6e25d159
RK
11715\f
11716/* Note hard registers in X that are used. This code is similar to
11717 that in flow.c, but much simpler since we don't care about pseudos. */
11718
11719static void
11720mark_used_regs_combine (x)
11721 rtx x;
11722{
770ae6cc
RK
11723 RTX_CODE code = GET_CODE (x);
11724 unsigned int regno;
6e25d159
RK
11725 int i;
11726
11727 switch (code)
11728 {
11729 case LABEL_REF:
11730 case SYMBOL_REF:
11731 case CONST_INT:
11732 case CONST:
11733 case CONST_DOUBLE:
11734 case PC:
11735 case ADDR_VEC:
11736 case ADDR_DIFF_VEC:
11737 case ASM_INPUT:
11738#ifdef HAVE_cc0
11739 /* CC0 must die in the insn after it is set, so we don't need to take
11740 special note of it here. */
11741 case CC0:
11742#endif
11743 return;
11744
11745 case CLOBBER:
11746 /* If we are clobbering a MEM, mark any hard registers inside the
11747 address as used. */
11748 if (GET_CODE (XEXP (x, 0)) == MEM)
11749 mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
11750 return;
11751
11752 case REG:
11753 regno = REGNO (x);
11754 /* A hard reg in a wide mode may really be multiple registers.
11755 If so, mark all of them just like the first. */
11756 if (regno < FIRST_PSEUDO_REGISTER)
11757 {
770ae6cc
RK
11758 unsigned int endregno, r;
11759
6e25d159
RK
11760 /* None of this applies to the stack, frame or arg pointers */
11761 if (regno == STACK_POINTER_REGNUM
11762#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
11763 || regno == HARD_FRAME_POINTER_REGNUM
11764#endif
11765#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
11766 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
11767#endif
11768 || regno == FRAME_POINTER_REGNUM)
11769 return;
11770
770ae6cc
RK
11771 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11772 for (r = regno; r < endregno; r++)
11773 SET_HARD_REG_BIT (newpat_used_regs, r);
6e25d159
RK
11774 }
11775 return;
11776
11777 case SET:
11778 {
11779 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
11780 the address. */
11781 register rtx testreg = SET_DEST (x);
11782
e048778f
RK
11783 while (GET_CODE (testreg) == SUBREG
11784 || GET_CODE (testreg) == ZERO_EXTRACT
11785 || GET_CODE (testreg) == SIGN_EXTRACT
11786 || GET_CODE (testreg) == STRICT_LOW_PART)
6e25d159
RK
11787 testreg = XEXP (testreg, 0);
11788
11789 if (GET_CODE (testreg) == MEM)
11790 mark_used_regs_combine (XEXP (testreg, 0));
11791
11792 mark_used_regs_combine (SET_SRC (x));
6e25d159 11793 }
e9a25f70
JL
11794 return;
11795
11796 default:
11797 break;
6e25d159
RK
11798 }
11799
11800 /* Recursively scan the operands of this expression. */
11801
11802 {
6f7d635c 11803 register const char *fmt = GET_RTX_FORMAT (code);
6e25d159
RK
11804
11805 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11806 {
663522cb 11807 if (fmt[i] == 'e')
6e25d159 11808 mark_used_regs_combine (XEXP (x, i));
663522cb
KH
11809 else if (fmt[i] == 'E')
11810 {
11811 register int j;
6e25d159 11812
663522cb
KH
11813 for (j = 0; j < XVECLEN (x, i); j++)
11814 mark_used_regs_combine (XVECEXP (x, i, j));
11815 }
6e25d159
RK
11816 }
11817 }
11818}
230d793d
RS
11819\f
11820/* Remove register number REGNO from the dead registers list of INSN.
11821
11822 Return the note used to record the death, if there was one. */
11823
11824rtx
11825remove_death (regno, insn)
770ae6cc 11826 unsigned int regno;
230d793d
RS
11827 rtx insn;
11828{
11829 register rtx note = find_regno_note (insn, REG_DEAD, regno);
11830
11831 if (note)
1a26b032 11832 {
b1f21e0a 11833 REG_N_DEATHS (regno)--;
1a26b032
RK
11834 remove_note (insn, note);
11835 }
230d793d
RS
11836
11837 return note;
11838}
11839
11840/* For each register (hardware or pseudo) used within expression X, if its
11841 death is in an instruction with cuid between FROM_CUID (inclusive) and
11842 TO_INSN (exclusive), put a REG_DEAD note for that register in the
663522cb 11843 list headed by PNOTES.
230d793d 11844
6eb12cef
RK
11845 That said, don't move registers killed by maybe_kill_insn.
11846
230d793d
RS
11847 This is done when X is being merged by combination into TO_INSN. These
11848 notes will then be distributed as needed. */
11849
11850static void
6eb12cef 11851move_deaths (x, maybe_kill_insn, from_cuid, to_insn, pnotes)
230d793d 11852 rtx x;
6eb12cef 11853 rtx maybe_kill_insn;
230d793d
RS
11854 int from_cuid;
11855 rtx to_insn;
11856 rtx *pnotes;
11857{
6f7d635c 11858 register const char *fmt;
230d793d
RS
11859 register int len, i;
11860 register enum rtx_code code = GET_CODE (x);
11861
11862 if (code == REG)
11863 {
770ae6cc 11864 unsigned int regno = REGNO (x);
230d793d 11865 register rtx where_dead = reg_last_death[regno];
e340018d
JW
11866 register rtx before_dead, after_dead;
11867
6eb12cef
RK
11868 /* Don't move the register if it gets killed in between from and to */
11869 if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
770ae6cc 11870 && ! reg_referenced_p (x, maybe_kill_insn))
6eb12cef
RK
11871 return;
11872
e340018d
JW
11873 /* WHERE_DEAD could be a USE insn made by combine, so first we
11874 make sure that we have insns with valid INSN_CUID values. */
11875 before_dead = where_dead;
11876 while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
11877 before_dead = PREV_INSN (before_dead);
770ae6cc 11878
e340018d
JW
11879 after_dead = where_dead;
11880 while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
11881 after_dead = NEXT_INSN (after_dead);
11882
11883 if (before_dead && after_dead
11884 && INSN_CUID (before_dead) >= from_cuid
11885 && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
11886 || (where_dead != after_dead
11887 && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
230d793d 11888 {
dbc131f3 11889 rtx note = remove_death (regno, where_dead);
230d793d
RS
11890
11891 /* It is possible for the call above to return 0. This can occur
11892 when reg_last_death points to I2 or I1 that we combined with.
dbc131f3
RK
11893 In that case make a new note.
11894
11895 We must also check for the case where X is a hard register
11896 and NOTE is a death note for a range of hard registers
11897 including X. In that case, we must put REG_DEAD notes for
11898 the remaining registers in place of NOTE. */
11899
11900 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
11901 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
24e46fc4 11902 > GET_MODE_SIZE (GET_MODE (x))))
dbc131f3 11903 {
770ae6cc
RK
11904 unsigned int deadregno = REGNO (XEXP (note, 0));
11905 unsigned int deadend
dbc131f3
RK
11906 = (deadregno + HARD_REGNO_NREGS (deadregno,
11907 GET_MODE (XEXP (note, 0))));
770ae6cc
RK
11908 unsigned int ourend
11909 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11910 unsigned int i;
dbc131f3
RK
11911
11912 for (i = deadregno; i < deadend; i++)
11913 if (i < regno || i >= ourend)
11914 REG_NOTES (where_dead)
38a448ca
RH
11915 = gen_rtx_EXPR_LIST (REG_DEAD,
11916 gen_rtx_REG (reg_raw_mode[i], i),
11917 REG_NOTES (where_dead));
dbc131f3 11918 }
770ae6cc 11919
24e46fc4
JW
11920 /* If we didn't find any note, or if we found a REG_DEAD note that
11921 covers only part of the given reg, and we have a multi-reg hard
fabd69e8
RK
11922 register, then to be safe we must check for REG_DEAD notes
11923 for each register other than the first. They could have
11924 their own REG_DEAD notes lying around. */
24e46fc4
JW
11925 else if ((note == 0
11926 || (note != 0
11927 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11928 < GET_MODE_SIZE (GET_MODE (x)))))
11929 && regno < FIRST_PSEUDO_REGISTER
fabd69e8
RK
11930 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
11931 {
770ae6cc
RK
11932 unsigned int ourend
11933 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11934 unsigned int i, offset;
fabd69e8
RK
11935 rtx oldnotes = 0;
11936
24e46fc4
JW
11937 if (note)
11938 offset = HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0)));
11939 else
11940 offset = 1;
11941
11942 for (i = regno + offset; i < ourend; i++)
38a448ca 11943 move_deaths (gen_rtx_REG (reg_raw_mode[i], i),
6eb12cef 11944 maybe_kill_insn, from_cuid, to_insn, &oldnotes);
fabd69e8 11945 }
230d793d 11946
dbc131f3 11947 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
230d793d
RS
11948 {
11949 XEXP (note, 1) = *pnotes;
11950 *pnotes = note;
11951 }
11952 else
38a448ca 11953 *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
1a26b032 11954
b1f21e0a 11955 REG_N_DEATHS (regno)++;
230d793d
RS
11956 }
11957
11958 return;
11959 }
11960
11961 else if (GET_CODE (x) == SET)
11962 {
11963 rtx dest = SET_DEST (x);
11964
6eb12cef 11965 move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
230d793d 11966
a7c99304
RK
11967 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
11968 that accesses one word of a multi-word item, some
11969 piece of everything register in the expression is used by
11970 this insn, so remove any old death. */
11971
11972 if (GET_CODE (dest) == ZERO_EXTRACT
11973 || GET_CODE (dest) == STRICT_LOW_PART
11974 || (GET_CODE (dest) == SUBREG
11975 && (((GET_MODE_SIZE (GET_MODE (dest))
11976 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
11977 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
11978 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
230d793d 11979 {
6eb12cef 11980 move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
a7c99304 11981 return;
230d793d
RS
11982 }
11983
a7c99304
RK
11984 /* If this is some other SUBREG, we know it replaces the entire
11985 value, so use that as the destination. */
11986 if (GET_CODE (dest) == SUBREG)
11987 dest = SUBREG_REG (dest);
11988
11989 /* If this is a MEM, adjust deaths of anything used in the address.
11990 For a REG (the only other possibility), the entire value is
11991 being replaced so the old value is not used in this insn. */
230d793d
RS
11992
11993 if (GET_CODE (dest) == MEM)
6eb12cef
RK
11994 move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
11995 to_insn, pnotes);
230d793d
RS
11996 return;
11997 }
11998
11999 else if (GET_CODE (x) == CLOBBER)
12000 return;
12001
12002 len = GET_RTX_LENGTH (code);
12003 fmt = GET_RTX_FORMAT (code);
12004
12005 for (i = 0; i < len; i++)
12006 {
12007 if (fmt[i] == 'E')
12008 {
12009 register int j;
12010 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6eb12cef
RK
12011 move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
12012 to_insn, pnotes);
230d793d
RS
12013 }
12014 else if (fmt[i] == 'e')
6eb12cef 12015 move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
230d793d
RS
12016 }
12017}
12018\f
a7c99304
RK
12019/* Return 1 if X is the target of a bit-field assignment in BODY, the
12020 pattern of an insn. X must be a REG. */
230d793d
RS
12021
12022static int
a7c99304
RK
12023reg_bitfield_target_p (x, body)
12024 rtx x;
230d793d
RS
12025 rtx body;
12026{
12027 int i;
12028
12029 if (GET_CODE (body) == SET)
a7c99304
RK
12030 {
12031 rtx dest = SET_DEST (body);
12032 rtx target;
770ae6cc 12033 unsigned int regno, tregno, endregno, endtregno;
a7c99304
RK
12034
12035 if (GET_CODE (dest) == ZERO_EXTRACT)
12036 target = XEXP (dest, 0);
12037 else if (GET_CODE (dest) == STRICT_LOW_PART)
12038 target = SUBREG_REG (XEXP (dest, 0));
12039 else
12040 return 0;
12041
12042 if (GET_CODE (target) == SUBREG)
12043 target = SUBREG_REG (target);
12044
12045 if (GET_CODE (target) != REG)
12046 return 0;
12047
12048 tregno = REGNO (target), regno = REGNO (x);
12049 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
12050 return target == x;
12051
12052 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
12053 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
12054
12055 return endregno > tregno && regno < endtregno;
12056 }
230d793d
RS
12057
12058 else if (GET_CODE (body) == PARALLEL)
12059 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
a7c99304 12060 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
230d793d
RS
12061 return 1;
12062
12063 return 0;
663522cb 12064}
230d793d
RS
12065\f
12066/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
12067 as appropriate. I3 and I2 are the insns resulting from the combination
12068 insns including FROM (I2 may be zero).
12069
12070 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
12071 not need REG_DEAD notes because they are being substituted for. This
12072 saves searching in the most common cases.
12073
12074 Each note in the list is either ignored or placed on some insns, depending
12075 on the type of note. */
12076
12077static void
12078distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
12079 rtx notes;
12080 rtx from_insn;
12081 rtx i3, i2;
12082 rtx elim_i2, elim_i1;
12083{
12084 rtx note, next_note;
12085 rtx tem;
12086
12087 for (note = notes; note; note = next_note)
12088 {
12089 rtx place = 0, place2 = 0;
12090
12091 /* If this NOTE references a pseudo register, ensure it references
12092 the latest copy of that register. */
12093 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
12094 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
12095 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
12096
12097 next_note = XEXP (note, 1);
12098 switch (REG_NOTE_KIND (note))
12099 {
c9903b44
DE
12100 case REG_BR_PROB:
12101 case REG_EXEC_COUNT:
12102 /* Doesn't matter much where we put this, as long as it's somewhere.
12103 It is preferable to keep these notes on branches, which is most
12104 likely to be i3. */
12105 place = i3;
12106 break;
12107
4b7c585f 12108 case REG_EH_REGION:
0e403ec3 12109 case REG_EH_RETHROW:
ca3920ad 12110 case REG_NORETURN:
0e403ec3
AS
12111 /* These notes must remain with the call. It should not be
12112 possible for both I2 and I3 to be a call. */
663522cb 12113 if (GET_CODE (i3) == CALL_INSN)
4b7c585f
JL
12114 place = i3;
12115 else if (i2 && GET_CODE (i2) == CALL_INSN)
12116 place = i2;
12117 else
12118 abort ();
12119 break;
12120
230d793d 12121 case REG_UNUSED:
07d0cbdd 12122 /* Any clobbers for i3 may still exist, and so we must process
176c9e6b
JW
12123 REG_UNUSED notes from that insn.
12124
12125 Any clobbers from i2 or i1 can only exist if they were added by
12126 recog_for_combine. In that case, recog_for_combine created the
12127 necessary REG_UNUSED notes. Trying to keep any original
12128 REG_UNUSED notes from these insns can cause incorrect output
12129 if it is for the same register as the original i3 dest.
12130 In that case, we will notice that the register is set in i3,
12131 and then add a REG_UNUSED note for the destination of i3, which
07d0cbdd
JW
12132 is wrong. However, it is possible to have REG_UNUSED notes from
12133 i2 or i1 for register which were both used and clobbered, so
12134 we keep notes from i2 or i1 if they will turn into REG_DEAD
12135 notes. */
176c9e6b 12136
230d793d
RS
12137 /* If this register is set or clobbered in I3, put the note there
12138 unless there is one already. */
07d0cbdd 12139 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
230d793d 12140 {
07d0cbdd
JW
12141 if (from_insn != i3)
12142 break;
12143
230d793d
RS
12144 if (! (GET_CODE (XEXP (note, 0)) == REG
12145 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
12146 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
12147 place = i3;
12148 }
12149 /* Otherwise, if this register is used by I3, then this register
12150 now dies here, so we must put a REG_DEAD note here unless there
12151 is one already. */
12152 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
12153 && ! (GET_CODE (XEXP (note, 0)) == REG
770ae6cc
RK
12154 ? find_regno_note (i3, REG_DEAD,
12155 REGNO (XEXP (note, 0)))
230d793d
RS
12156 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
12157 {
12158 PUT_REG_NOTE_KIND (note, REG_DEAD);
12159 place = i3;
12160 }
12161 break;
12162
12163 case REG_EQUAL:
12164 case REG_EQUIV:
9ae8ffe7 12165 case REG_NOALIAS:
230d793d
RS
12166 /* These notes say something about results of an insn. We can
12167 only support them if they used to be on I3 in which case they
a687e897
RK
12168 remain on I3. Otherwise they are ignored.
12169
12170 If the note refers to an expression that is not a constant, we
12171 must also ignore the note since we cannot tell whether the
12172 equivalence is still true. It might be possible to do
12173 slightly better than this (we only have a problem if I2DEST
12174 or I1DEST is present in the expression), but it doesn't
12175 seem worth the trouble. */
12176
12177 if (from_insn == i3
12178 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
230d793d
RS
12179 place = i3;
12180 break;
12181
12182 case REG_INC:
12183 case REG_NO_CONFLICT:
230d793d
RS
12184 /* These notes say something about how a register is used. They must
12185 be present on any use of the register in I2 or I3. */
12186 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
12187 place = i3;
12188
12189 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
12190 {
12191 if (place)
12192 place2 = i2;
12193 else
12194 place = i2;
12195 }
12196 break;
12197
e55b4486
RH
12198 case REG_LABEL:
12199 /* This can show up in several ways -- either directly in the
12200 pattern, or hidden off in the constant pool with (or without?)
12201 a REG_EQUAL note. */
12202 /* ??? Ignore the without-reg_equal-note problem for now. */
12203 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
12204 || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
12205 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12206 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
12207 place = i3;
12208
12209 if (i2
12210 && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
663522cb 12211 || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
e55b4486
RH
12212 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12213 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
12214 {
12215 if (place)
12216 place2 = i2;
12217 else
12218 place = i2;
12219 }
12220 break;
12221
c1194d74 12222 case REG_NONNEG:
230d793d 12223 case REG_WAS_0:
c1194d74
JW
12224 /* These notes say something about the value of a register prior
12225 to the execution of an insn. It is too much trouble to see
12226 if the note is still correct in all situations. It is better
12227 to simply delete it. */
230d793d
RS
12228 break;
12229
12230 case REG_RETVAL:
12231 /* If the insn previously containing this note still exists,
12232 put it back where it was. Otherwise move it to the previous
12233 insn. Adjust the corresponding REG_LIBCALL note. */
12234 if (GET_CODE (from_insn) != NOTE)
12235 place = from_insn;
12236 else
12237 {
5f4f0e22 12238 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
230d793d
RS
12239 place = prev_real_insn (from_insn);
12240 if (tem && place)
12241 XEXP (tem, 0) = place;
c71e1201
AO
12242 /* If we're deleting the last remaining instruction of a
12243 libcall sequence, don't add the notes. */
12244 else if (XEXP (note, 0) == from_insn)
12245 tem = place = 0;
230d793d
RS
12246 }
12247 break;
12248
12249 case REG_LIBCALL:
12250 /* This is handled similarly to REG_RETVAL. */
12251 if (GET_CODE (from_insn) != NOTE)
12252 place = from_insn;
12253 else
12254 {
5f4f0e22 12255 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
230d793d
RS
12256 place = next_real_insn (from_insn);
12257 if (tem && place)
12258 XEXP (tem, 0) = place;
c71e1201
AO
12259 /* If we're deleting the last remaining instruction of a
12260 libcall sequence, don't add the notes. */
12261 else if (XEXP (note, 0) == from_insn)
12262 tem = place = 0;
230d793d
RS
12263 }
12264 break;
12265
12266 case REG_DEAD:
12267 /* If the register is used as an input in I3, it dies there.
12268 Similarly for I2, if it is non-zero and adjacent to I3.
12269
12270 If the register is not used as an input in either I3 or I2
12271 and it is not one of the registers we were supposed to eliminate,
12272 there are two possibilities. We might have a non-adjacent I2
12273 or we might have somehow eliminated an additional register
12274 from a computation. For example, we might have had A & B where
12275 we discover that B will always be zero. In this case we will
12276 eliminate the reference to A.
12277
12278 In both cases, we must search to see if we can find a previous
12279 use of A and put the death note there. */
12280
6e2d1486
RK
12281 if (from_insn
12282 && GET_CODE (from_insn) == CALL_INSN
663522cb 12283 && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
6e2d1486
RK
12284 place = from_insn;
12285 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
230d793d
RS
12286 place = i3;
12287 else if (i2 != 0 && next_nonnote_insn (i2) == i3
12288 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12289 place = i2;
12290
03afaf36
R
12291 if (rtx_equal_p (XEXP (note, 0), elim_i2)
12292 || rtx_equal_p (XEXP (note, 0), elim_i1))
230d793d
RS
12293 break;
12294
12295 if (place == 0)
38d8473f 12296 {
d3a923ee
RH
12297 basic_block bb = BASIC_BLOCK (this_basic_block);
12298
12299 for (tem = PREV_INSN (i3); place == 0; tem = PREV_INSN (tem))
38d8473f 12300 {
2c3c49de 12301 if (! INSN_P (tem))
d3a923ee
RH
12302 {
12303 if (tem == bb->head)
12304 break;
12305 continue;
12306 }
12307
38d8473f
RK
12308 /* If the register is being set at TEM, see if that is all
12309 TEM is doing. If so, delete TEM. Otherwise, make this
12310 into a REG_UNUSED note instead. */
12311 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
12312 {
12313 rtx set = single_set (tem);
e5e809f4 12314 rtx inner_dest = 0;
e51712db 12315#ifdef HAVE_cc0
f5c97640 12316 rtx cc0_setter = NULL_RTX;
e51712db 12317#endif
e5e809f4
JL
12318
12319 if (set != 0)
12320 for (inner_dest = SET_DEST (set);
663522cb
KH
12321 (GET_CODE (inner_dest) == STRICT_LOW_PART
12322 || GET_CODE (inner_dest) == SUBREG
12323 || GET_CODE (inner_dest) == ZERO_EXTRACT);
e5e809f4
JL
12324 inner_dest = XEXP (inner_dest, 0))
12325 ;
38d8473f
RK
12326
12327 /* Verify that it was the set, and not a clobber that
663522cb 12328 modified the register.
f5c97640
RH
12329
12330 CC0 targets must be careful to maintain setter/user
12331 pairs. If we cannot delete the setter due to side
12332 effects, mark the user with an UNUSED note instead
12333 of deleting it. */
38d8473f
RK
12334
12335 if (set != 0 && ! side_effects_p (SET_SRC (set))
f5c97640
RH
12336 && rtx_equal_p (XEXP (note, 0), inner_dest)
12337#ifdef HAVE_cc0
12338 && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
12339 || ((cc0_setter = prev_cc0_setter (tem)) != NULL
12340 && sets_cc0_p (PATTERN (cc0_setter)) > 0))
12341#endif
12342 )
38d8473f
RK
12343 {
12344 /* Move the notes and links of TEM elsewhere.
663522cb 12345 This might delete other dead insns recursively.
38d8473f
RK
12346 First set the pattern to something that won't use
12347 any register. */
12348
12349 PATTERN (tem) = pc_rtx;
12350
12351 distribute_notes (REG_NOTES (tem), tem, tem,
12352 NULL_RTX, NULL_RTX, NULL_RTX);
12353 distribute_links (LOG_LINKS (tem));
12354
12355 PUT_CODE (tem, NOTE);
12356 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
12357 NOTE_SOURCE_FILE (tem) = 0;
f5c97640
RH
12358
12359#ifdef HAVE_cc0
12360 /* Delete the setter too. */
12361 if (cc0_setter)
12362 {
12363 PATTERN (cc0_setter) = pc_rtx;
12364
12365 distribute_notes (REG_NOTES (cc0_setter),
12366 cc0_setter, cc0_setter,
12367 NULL_RTX, NULL_RTX, NULL_RTX);
12368 distribute_links (LOG_LINKS (cc0_setter));
12369
12370 PUT_CODE (cc0_setter, NOTE);
d3a923ee
RH
12371 NOTE_LINE_NUMBER (cc0_setter)
12372 = NOTE_INSN_DELETED;
f5c97640
RH
12373 NOTE_SOURCE_FILE (cc0_setter) = 0;
12374 }
12375#endif
38d8473f 12376 }
e5e809f4
JL
12377 /* If the register is both set and used here, put the
12378 REG_DEAD note here, but place a REG_UNUSED note
12379 here too unless there already is one. */
12380 else if (reg_referenced_p (XEXP (note, 0),
12381 PATTERN (tem)))
12382 {
12383 place = tem;
12384
12385 if (! find_regno_note (tem, REG_UNUSED,
12386 REGNO (XEXP (note, 0))))
12387 REG_NOTES (tem)
c5c76735 12388 = gen_rtx_EXPR_LIST (REG_UNUSED, XEXP (note, 0),
9e6a5703 12389 REG_NOTES (tem));
e5e809f4 12390 }
38d8473f
RK
12391 else
12392 {
12393 PUT_REG_NOTE_KIND (note, REG_UNUSED);
663522cb 12394
38d8473f
RK
12395 /* If there isn't already a REG_UNUSED note, put one
12396 here. */
12397 if (! find_regno_note (tem, REG_UNUSED,
12398 REGNO (XEXP (note, 0))))
12399 place = tem;
12400 break;
d3a923ee
RH
12401 }
12402 }
12403 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
12404 || (GET_CODE (tem) == CALL_INSN
12405 && find_reg_fusage (tem, USE, XEXP (note, 0))))
12406 {
12407 place = tem;
12408
12409 /* If we are doing a 3->2 combination, and we have a
12410 register which formerly died in i3 and was not used
12411 by i2, which now no longer dies in i3 and is used in
12412 i2 but does not die in i2, and place is between i2
12413 and i3, then we may need to move a link from place to
12414 i2. */
12415 if (i2 && INSN_UID (place) <= max_uid_cuid
12416 && INSN_CUID (place) > INSN_CUID (i2)
663522cb
KH
12417 && from_insn
12418 && INSN_CUID (from_insn) > INSN_CUID (i2)
d3a923ee
RH
12419 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12420 {
12421 rtx links = LOG_LINKS (place);
12422 LOG_LINKS (place) = 0;
12423 distribute_links (links);
12424 }
12425 break;
12426 }
12427
12428 if (tem == bb->head)
230d793d 12429 break;
38d8473f 12430 }
663522cb 12431
d3a923ee
RH
12432 /* We haven't found an insn for the death note and it
12433 is still a REG_DEAD note, but we have hit the beginning
12434 of the block. If the existing life info says the reg
715e7fbc
RH
12435 was dead, there's nothing left to do. Otherwise, we'll
12436 need to do a global life update after combine. */
770ae6cc
RK
12437 if (REG_NOTE_KIND (note) == REG_DEAD && place == 0
12438 && REGNO_REG_SET_P (bb->global_live_at_start,
12439 REGNO (XEXP (note, 0))))
e2cce0cf 12440 {
770ae6cc
RK
12441 SET_BIT (refresh_blocks, this_basic_block);
12442 need_refresh = 1;
e2cce0cf 12443 }
38d8473f 12444 }
230d793d
RS
12445
12446 /* If the register is set or already dead at PLACE, we needn't do
e5e809f4
JL
12447 anything with this note if it is still a REG_DEAD note.
12448 We can here if it is set at all, not if is it totally replace,
12449 which is what `dead_or_set_p' checks, so also check for it being
12450 set partially. */
12451
230d793d
RS
12452 if (place && REG_NOTE_KIND (note) == REG_DEAD)
12453 {
770ae6cc 12454 unsigned int regno = REGNO (XEXP (note, 0));
230d793d
RS
12455
12456 if (dead_or_set_p (place, XEXP (note, 0))
12457 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
12458 {
12459 /* Unless the register previously died in PLACE, clear
12460 reg_last_death. [I no longer understand why this is
12461 being done.] */
12462 if (reg_last_death[regno] != place)
12463 reg_last_death[regno] = 0;
12464 place = 0;
12465 }
12466 else
12467 reg_last_death[regno] = place;
12468
12469 /* If this is a death note for a hard reg that is occupying
12470 multiple registers, ensure that we are still using all
12471 parts of the object. If we find a piece of the object
03afaf36
R
12472 that is unused, we must arrange for an appropriate REG_DEAD
12473 note to be added for it. However, we can't just emit a USE
12474 and tag the note to it, since the register might actually
12475 be dead; so we recourse, and the recursive call then finds
12476 the previous insn that used this register. */
230d793d
RS
12477
12478 if (place && regno < FIRST_PSEUDO_REGISTER
12479 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
12480 {
770ae6cc 12481 unsigned int endregno
230d793d
RS
12482 = regno + HARD_REGNO_NREGS (regno,
12483 GET_MODE (XEXP (note, 0)));
12484 int all_used = 1;
770ae6cc 12485 unsigned int i;
230d793d
RS
12486
12487 for (i = regno; i < endregno; i++)
03afaf36
R
12488 if ((! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
12489 && ! find_regno_fusage (place, USE, i))
12490 || dead_or_set_regno_p (place, i))
12491 all_used = 0;
a394b17b 12492
230d793d
RS
12493 if (! all_used)
12494 {
12495 /* Put only REG_DEAD notes for pieces that are
03afaf36 12496 not already dead or set. */
230d793d 12497
03afaf36
R
12498 for (i = regno; i < endregno;
12499 i += HARD_REGNO_NREGS (i, reg_raw_mode[i]))
230d793d 12500 {
38a448ca 12501 rtx piece = gen_rtx_REG (reg_raw_mode[i], i);
230d793d 12502
03afaf36 12503 if (! dead_or_set_p (place, piece)
230d793d
RS
12504 && ! reg_bitfield_target_p (piece,
12505 PATTERN (place)))
03afaf36
R
12506 {
12507 rtx new_note
12508 = gen_rtx_EXPR_LIST (REG_DEAD, piece, NULL_RTX);
12509
12510 distribute_notes (new_note, place, place,
12511 NULL_RTX, NULL_RTX, NULL_RTX);
12512 }
230d793d
RS
12513 }
12514
12515 place = 0;
12516 }
12517 }
12518 }
12519 break;
12520
12521 default:
12522 /* Any other notes should not be present at this point in the
12523 compilation. */
12524 abort ();
12525 }
12526
12527 if (place)
12528 {
12529 XEXP (note, 1) = REG_NOTES (place);
12530 REG_NOTES (place) = note;
12531 }
1a26b032
RK
12532 else if ((REG_NOTE_KIND (note) == REG_DEAD
12533 || REG_NOTE_KIND (note) == REG_UNUSED)
12534 && GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 12535 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
230d793d
RS
12536
12537 if (place2)
1a26b032
RK
12538 {
12539 if ((REG_NOTE_KIND (note) == REG_DEAD
12540 || REG_NOTE_KIND (note) == REG_UNUSED)
12541 && GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 12542 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
1a26b032 12543
38a448ca
RH
12544 REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
12545 REG_NOTE_KIND (note),
12546 XEXP (note, 0),
12547 REG_NOTES (place2));
1a26b032 12548 }
230d793d
RS
12549 }
12550}
12551\f
12552/* Similarly to above, distribute the LOG_LINKS that used to be present on
5089e22e
RS
12553 I3, I2, and I1 to new locations. This is also called in one case to
12554 add a link pointing at I3 when I3's destination is changed. */
230d793d
RS
12555
12556static void
12557distribute_links (links)
12558 rtx links;
12559{
12560 rtx link, next_link;
12561
12562 for (link = links; link; link = next_link)
12563 {
12564 rtx place = 0;
12565 rtx insn;
12566 rtx set, reg;
12567
12568 next_link = XEXP (link, 1);
12569
12570 /* If the insn that this link points to is a NOTE or isn't a single
12571 set, ignore it. In the latter case, it isn't clear what we
663522cb 12572 can do other than ignore the link, since we can't tell which
230d793d
RS
12573 register it was for. Such links wouldn't be used by combine
12574 anyway.
12575
12576 It is not possible for the destination of the target of the link to
12577 have been changed by combine. The only potential of this is if we
12578 replace I3, I2, and I1 by I3 and I2. But in that case the
12579 destination of I2 also remains unchanged. */
12580
12581 if (GET_CODE (XEXP (link, 0)) == NOTE
12582 || (set = single_set (XEXP (link, 0))) == 0)
12583 continue;
12584
12585 reg = SET_DEST (set);
12586 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
12587 || GET_CODE (reg) == SIGN_EXTRACT
12588 || GET_CODE (reg) == STRICT_LOW_PART)
12589 reg = XEXP (reg, 0);
12590
12591 /* A LOG_LINK is defined as being placed on the first insn that uses
12592 a register and points to the insn that sets the register. Start
12593 searching at the next insn after the target of the link and stop
12594 when we reach a set of the register or the end of the basic block.
12595
12596 Note that this correctly handles the link that used to point from
5089e22e 12597 I3 to I2. Also note that not much searching is typically done here
230d793d
RS
12598 since most links don't point very far away. */
12599
12600 for (insn = NEXT_INSN (XEXP (link, 0));
0d4d42c3 12601 (insn && (this_basic_block == n_basic_blocks - 1
3b413743 12602 || BLOCK_HEAD (this_basic_block + 1) != insn));
230d793d 12603 insn = NEXT_INSN (insn))
2c3c49de 12604 if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
230d793d
RS
12605 {
12606 if (reg_referenced_p (reg, PATTERN (insn)))
12607 place = insn;
12608 break;
12609 }
6e2d1486 12610 else if (GET_CODE (insn) == CALL_INSN
663522cb 12611 && find_reg_fusage (insn, USE, reg))
6e2d1486
RK
12612 {
12613 place = insn;
12614 break;
12615 }
230d793d
RS
12616
12617 /* If we found a place to put the link, place it there unless there
12618 is already a link to the same insn as LINK at that point. */
12619
12620 if (place)
12621 {
12622 rtx link2;
12623
12624 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
12625 if (XEXP (link2, 0) == XEXP (link, 0))
12626 break;
12627
12628 if (link2 == 0)
12629 {
12630 XEXP (link, 1) = LOG_LINKS (place);
12631 LOG_LINKS (place) = link;
abe6e52f
RK
12632
12633 /* Set added_links_insn to the earliest insn we added a
12634 link to. */
663522cb 12635 if (added_links_insn == 0
abe6e52f
RK
12636 || INSN_CUID (added_links_insn) > INSN_CUID (place))
12637 added_links_insn = place;
230d793d
RS
12638 }
12639 }
12640 }
12641}
12642\f
1427d6d2
RK
12643/* Compute INSN_CUID for INSN, which is an insn made by combine. */
12644
12645static int
12646insn_cuid (insn)
12647 rtx insn;
12648{
12649 while (insn != 0 && INSN_UID (insn) > max_uid_cuid
12650 && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
12651 insn = NEXT_INSN (insn);
12652
12653 if (INSN_UID (insn) > max_uid_cuid)
12654 abort ();
12655
12656 return INSN_CUID (insn);
12657}
12658\f
230d793d
RS
12659void
12660dump_combine_stats (file)
12661 FILE *file;
12662{
ab87f8c8 12663 fnotice
230d793d
RS
12664 (file,
12665 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
12666 combine_attempts, combine_merges, combine_extras, combine_successes);
12667}
12668
12669void
12670dump_combine_total_stats (file)
12671 FILE *file;
12672{
ab87f8c8 12673 fnotice
230d793d
RS
12674 (file,
12675 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
12676 total_attempts, total_merges, total_extras, total_successes);
12677}
This page took 2.817691 seconds and 5 git commands to generate.