]> gcc.gnu.org Git - gcc.git/blob - gcc/combine.c
(make_compound_operation): Call recursively in the cases where we find a simplification.
[gcc.git] / gcc / combine.c
1 /* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 1988, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 /* This module is essentially the "combiner" phase of the U. of Arizona
22 Portable Optimizer, but redone to work on our list-structured
23 representation for RTL instead of their string representation.
24
25 The LOG_LINKS of each insn identify the most recent assignment
26 to each REG used in the insn. It is a list of previous insns,
27 each of which contains a SET for a REG that is used in this insn
28 and not used or set in between. LOG_LINKs never cross basic blocks.
29 They were set up by the preceding pass (lifetime analysis).
30
31 We try to combine each pair of insns joined by a logical link.
32 We also try to combine triples of insns A, B and C when
33 C has a link back to B and B has a link back to A.
34
35 LOG_LINKS does not have links for use of the CC0. They don't
36 need to, because the insn that sets the CC0 is always immediately
37 before the insn that tests it. So we always regard a branch
38 insn as having a logical link to the preceding insn. The same is true
39 for an insn explicitly using CC0.
40
41 We check (with use_crosses_set_p) to avoid combining in such a way
42 as to move a computation to a place where its value would be different.
43
44 Combination is done by mathematically substituting the previous
45 insn(s) values for the regs they set into the expressions in
46 the later insns that refer to these regs. If the result is a valid insn
47 for our target machine, according to the machine description,
48 we install it, delete the earlier insns, and update the data flow
49 information (LOG_LINKS and REG_NOTES) for what we did.
50
51 There are a few exceptions where the dataflow information created by
52 flow.c aren't completely updated:
53
54 - reg_live_length is not updated
55 - reg_n_refs is not adjusted in the rare case when a register is
56 no longer required in a computation
57 - there are extremely rare cases (see distribute_regnotes) when a
58 REG_DEAD note is lost
59 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
60 removed because there is no way to know which register it was
61 linking
62
63 To simplify substitution, we combine only when the earlier insn(s)
64 consist of only a single assignment. To simplify updating afterward,
65 we never combine when a subroutine call appears in the middle.
66
67 Since we do not represent assignments to CC0 explicitly except when that
68 is all an insn does, there is no LOG_LINKS entry in an insn that uses
69 the condition code for the insn that set the condition code.
70 Fortunately, these two insns must be consecutive.
71 Therefore, every JUMP_INSN is taken to have an implicit logical link
72 to the preceding insn. This is not quite right, since non-jumps can
73 also use the condition code; but in practice such insns would not
74 combine anyway. */
75
76 #include "config.h"
77 #include "gvarargs.h"
78 #include "rtl.h"
79 #include "flags.h"
80 #include "regs.h"
81 #include "expr.h"
82 #include "basic-block.h"
83 #include "insn-config.h"
84 #include "insn-flags.h"
85 #include "insn-codes.h"
86 #include "insn-attr.h"
87 #include "recog.h"
88 #include "real.h"
89 #include <stdio.h>
90
91 /* It is not safe to use ordinary gen_lowpart in combine.
92 Use gen_lowpart_for_combine instead. See comments there. */
93 #define gen_lowpart dont_use_gen_lowpart_you_dummy
94
95 /* If byte loads either zero- or sign- extend, define BYTE_LOADS_EXTEND
96 for cases when we don't care which is true. Define LOAD_EXTEND to
97 be ZERO_EXTEND or SIGN_EXTEND, depending on which was defined. */
98
99 #ifdef BYTE_LOADS_ZERO_EXTEND
100 #define BYTE_LOADS_EXTEND
101 #define LOAD_EXTEND ZERO_EXTEND
102 #endif
103
104 #ifdef BYTE_LOADS_SIGN_EXTEND
105 #define BYTE_LOADS_EXTEND
106 #define LOAD_EXTEND SIGN_EXTEND
107 #endif
108
109 /* Number of attempts to combine instructions in this function. */
110
111 static int combine_attempts;
112
113 /* Number of attempts that got as far as substitution in this function. */
114
115 static int combine_merges;
116
117 /* Number of instructions combined with added SETs in this function. */
118
119 static int combine_extras;
120
121 /* Number of instructions combined in this function. */
122
123 static int combine_successes;
124
125 /* Totals over entire compilation. */
126
127 static int total_attempts, total_merges, total_extras, total_successes;
128 \f
129 /* Vector mapping INSN_UIDs to cuids.
130 The cuids are like uids but increase monotonically always.
131 Combine always uses cuids so that it can compare them.
132 But actually renumbering the uids, which we used to do,
133 proves to be a bad idea because it makes it hard to compare
134 the dumps produced by earlier passes with those from later passes. */
135
136 static int *uid_cuid;
137
138 /* Get the cuid of an insn. */
139
140 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
141
142 /* Maximum register number, which is the size of the tables below. */
143
144 static int combine_max_regno;
145
146 /* Record last point of death of (hard or pseudo) register n. */
147
148 static rtx *reg_last_death;
149
150 /* Record last point of modification of (hard or pseudo) register n. */
151
152 static rtx *reg_last_set;
153
154 /* Record the cuid of the last insn that invalidated memory
155 (anything that writes memory, and subroutine calls, but not pushes). */
156
157 static int mem_last_set;
158
159 /* Record the cuid of the last CALL_INSN
160 so we can tell whether a potential combination crosses any calls. */
161
162 static int last_call_cuid;
163
164 /* When `subst' is called, this is the insn that is being modified
165 (by combining in a previous insn). The PATTERN of this insn
166 is still the old pattern partially modified and it should not be
167 looked at, but this may be used to examine the successors of the insn
168 to judge whether a simplification is valid. */
169
170 static rtx subst_insn;
171
172 /* This is the lowest CUID that `subst' is currently dealing with.
173 get_last_value will not return a value if the register was set at or
174 after this CUID. If not for this mechanism, we could get confused if
175 I2 or I1 in try_combine were an insn that used the old value of a register
176 to obtain a new value. In that case, we might erroneously get the
177 new value of the register when we wanted the old one. */
178
179 static int subst_low_cuid;
180
181 /* This is the value of undobuf.num_undo when we started processing this
182 substitution. This will prevent gen_rtx_combine from re-used a piece
183 from the previous expression. Doing so can produce circular rtl
184 structures. */
185
186 static int previous_num_undos;
187 \f
188 /* The next group of arrays allows the recording of the last value assigned
189 to (hard or pseudo) register n. We use this information to see if a
190 operation being processed is redundant given a prior operation performed
191 on the register. For example, an `and' with a constant is redundant if
192 all the zero bits are already known to be turned off.
193
194 We use an approach similar to that used by cse, but change it in the
195 following ways:
196
197 (1) We do not want to reinitialize at each label.
198 (2) It is useful, but not critical, to know the actual value assigned
199 to a register. Often just its form is helpful.
200
201 Therefore, we maintain the following arrays:
202
203 reg_last_set_value the last value assigned
204 reg_last_set_label records the value of label_tick when the
205 register was assigned
206 reg_last_set_table_tick records the value of label_tick when a
207 value using the register is assigned
208 reg_last_set_invalid set to non-zero when it is not valid
209 to use the value of this register in some
210 register's value
211
212 To understand the usage of these tables, it is important to understand
213 the distinction between the value in reg_last_set_value being valid
214 and the register being validly contained in some other expression in the
215 table.
216
217 Entry I in reg_last_set_value is valid if it is non-zero, and either
218 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
219
220 Register I may validly appear in any expression returned for the value
221 of another register if reg_n_sets[i] is 1. It may also appear in the
222 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
223 reg_last_set_invalid[j] is zero.
224
225 If an expression is found in the table containing a register which may
226 not validly appear in an expression, the register is replaced by
227 something that won't match, (clobber (const_int 0)).
228
229 reg_last_set_invalid[i] is set non-zero when register I is being assigned
230 to and reg_last_set_table_tick[i] == label_tick. */
231
232 /* Record last value assigned to (hard or pseudo) register n. */
233
234 static rtx *reg_last_set_value;
235
236 /* Record the value of label_tick when the value for register n is placed in
237 reg_last_set_value[n]. */
238
239 static short *reg_last_set_label;
240
241 /* Record the value of label_tick when an expression involving register n
242 is placed in reg_last_set_value. */
243
244 static short *reg_last_set_table_tick;
245
246 /* Set non-zero if references to register n in expressions should not be
247 used. */
248
249 static char *reg_last_set_invalid;
250
251 /* Incremented for each label. */
252
253 static short label_tick;
254
255 /* Some registers that are set more than once and used in more than one
256 basic block are nevertheless always set in similar ways. For example,
257 a QImode register may be loaded from memory in two places on a machine
258 where byte loads zero extend.
259
260 We record in the following array what we know about the nonzero
261 bits of a register, specifically which bits are known to be zero.
262
263 If an entry is zero, it means that we don't know anything special. */
264
265 static HOST_WIDE_INT *reg_nonzero_bits;
266
267 /* Mode used to compute significance in reg_nonzero_bits. It is the largest
268 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
269
270 static enum machine_mode nonzero_bits_mode;
271
272 /* Nonzero if we know that a register has some leading bits that are always
273 equal to the sign bit. */
274
275 static char *reg_sign_bit_copies;
276
277 /* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
278 It is zero while computing them and after combine has completed. This
279 former test prevents propagating values based on previously set values,
280 which can be incorrect if a variable is modified in a loop. */
281
282 static int nonzero_sign_valid;
283 \f
284 /* Record one modification to rtl structure
285 to be undone by storing old_contents into *where.
286 is_int is 1 if the contents are an int. */
287
288 struct undo
289 {
290 int is_int;
291 union {rtx rtx; int i;} old_contents;
292 union {rtx *rtx; int *i;} where;
293 };
294
295 /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
296 num_undo says how many are currently recorded.
297
298 storage is nonzero if we must undo the allocation of new storage.
299 The value of storage is what to pass to obfree.
300
301 other_insn is nonzero if we have modified some other insn in the process
302 of working on subst_insn. It must be verified too. */
303
304 #define MAX_UNDO 50
305
306 struct undobuf
307 {
308 int num_undo;
309 char *storage;
310 struct undo undo[MAX_UNDO];
311 rtx other_insn;
312 };
313
314 static struct undobuf undobuf;
315
316 /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
317 insn. The substitution can be undone by undo_all. If INTO is already
318 set to NEWVAL, do not record this change. Because computing NEWVAL might
319 also call SUBST, we have to compute it before we put anything into
320 the undo table. */
321
322 #define SUBST(INTO, NEWVAL) \
323 do { rtx _new = (NEWVAL); \
324 if (undobuf.num_undo < MAX_UNDO) \
325 { \
326 undobuf.undo[undobuf.num_undo].is_int = 0; \
327 undobuf.undo[undobuf.num_undo].where.rtx = &INTO; \
328 undobuf.undo[undobuf.num_undo].old_contents.rtx = INTO; \
329 INTO = _new; \
330 if (undobuf.undo[undobuf.num_undo].old_contents.rtx != INTO) \
331 undobuf.num_undo++; \
332 } \
333 } while (0)
334
335 /* Similar to SUBST, but NEWVAL is an int. INTO will normally be an XINT
336 expression.
337 Note that substitution for the value of a CONST_INT is not safe. */
338
339 #define SUBST_INT(INTO, NEWVAL) \
340 do { if (undobuf.num_undo < MAX_UNDO) \
341 { \
342 undobuf.undo[undobuf.num_undo].is_int = 1; \
343 undobuf.undo[undobuf.num_undo].where.i = (int *) &INTO; \
344 undobuf.undo[undobuf.num_undo].old_contents.i = INTO; \
345 INTO = NEWVAL; \
346 if (undobuf.undo[undobuf.num_undo].old_contents.i != INTO) \
347 undobuf.num_undo++; \
348 } \
349 } while (0)
350
351 /* Number of times the pseudo being substituted for
352 was found and replaced. */
353
354 static int n_occurrences;
355
356 static void set_nonzero_bits_and_sign_copies ();
357 static void setup_incoming_promotions ();
358 static void move_deaths ();
359 rtx remove_death ();
360 static void record_value_for_reg ();
361 static void record_dead_and_set_regs ();
362 static int use_crosses_set_p ();
363 static rtx try_combine ();
364 static rtx *find_split_point ();
365 static rtx subst ();
366 static void undo_all ();
367 static int reg_dead_at_p ();
368 static rtx expand_compound_operation ();
369 static rtx expand_field_assignment ();
370 static rtx make_extraction ();
371 static int get_pos_from_mask ();
372 static rtx force_to_mode ();
373 static rtx known_cond ();
374 static rtx make_field_assignment ();
375 static rtx make_compound_operation ();
376 static rtx apply_distributive_law ();
377 static rtx simplify_and_const_int ();
378 static unsigned HOST_WIDE_INT nonzero_bits ();
379 static int num_sign_bit_copies ();
380 static int merge_outer_ops ();
381 static rtx simplify_shift_const ();
382 static int recog_for_combine ();
383 static rtx gen_lowpart_for_combine ();
384 static rtx gen_rtx_combine ();
385 static rtx gen_binary ();
386 static rtx gen_unary ();
387 static enum rtx_code simplify_comparison ();
388 static int reversible_comparison_p ();
389 static int get_last_value_validate ();
390 static rtx get_last_value ();
391 static void distribute_notes ();
392 static void distribute_links ();
393 \f
394 /* Main entry point for combiner. F is the first insn of the function.
395 NREGS is the first unused pseudo-reg number. */
396
397 void
398 combine_instructions (f, nregs)
399 rtx f;
400 int nregs;
401 {
402 register rtx insn, next, prev;
403 register int i;
404 register rtx links, nextlinks;
405
406 combine_attempts = 0;
407 combine_merges = 0;
408 combine_extras = 0;
409 combine_successes = 0;
410 undobuf.num_undo = previous_num_undos = 0;
411
412 combine_max_regno = nregs;
413
414 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
415 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
416 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
417 reg_last_set_table_tick = (short *) alloca (nregs * sizeof (short));
418 reg_last_set_label = (short *) alloca (nregs * sizeof (short));
419 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
420 reg_nonzero_bits = (HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
421 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
422
423 bzero (reg_last_death, nregs * sizeof (rtx));
424 bzero (reg_last_set, nregs * sizeof (rtx));
425 bzero (reg_last_set_value, nregs * sizeof (rtx));
426 bzero (reg_last_set_table_tick, nregs * sizeof (short));
427 bzero (reg_last_set_label, nregs * sizeof (short));
428 bzero (reg_last_set_invalid, nregs * sizeof (char));
429 bzero (reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
430 bzero (reg_sign_bit_copies, nregs * sizeof (char));
431
432 init_recog_no_volatile ();
433
434 /* Compute maximum uid value so uid_cuid can be allocated. */
435
436 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
437 if (INSN_UID (insn) > i)
438 i = INSN_UID (insn);
439
440 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
441
442 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
443
444 /* Don't use reg_nonzero_bits when computing it. This can cause problems
445 when, for example, we have j <<= 1 in a loop. */
446
447 nonzero_sign_valid = 0;
448
449 /* Compute the mapping from uids to cuids.
450 Cuids are numbers assigned to insns, like uids,
451 except that cuids increase monotonically through the code.
452
453 Scan all SETs and see if we can deduce anything about what
454 bits are known to be zero for some registers and how many copies
455 of the sign bit are known to exist for those registers.
456
457 Also set any known values so that we can use it while searching
458 for what bits are known to be set. */
459
460 label_tick = 1;
461
462 setup_incoming_promotions ();
463
464 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
465 {
466 INSN_CUID (insn) = ++i;
467 subst_low_cuid = i;
468 subst_insn = insn;
469
470 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
471 {
472 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies);
473 record_dead_and_set_regs (insn);
474 }
475
476 if (GET_CODE (insn) == CODE_LABEL)
477 label_tick++;
478 }
479
480 nonzero_sign_valid = 1;
481
482 /* Now scan all the insns in forward order. */
483
484 label_tick = 1;
485 last_call_cuid = 0;
486 mem_last_set = 0;
487 bzero (reg_last_death, nregs * sizeof (rtx));
488 bzero (reg_last_set, nregs * sizeof (rtx));
489 bzero (reg_last_set_value, nregs * sizeof (rtx));
490 bzero (reg_last_set_table_tick, nregs * sizeof (short));
491 bzero (reg_last_set_label, nregs * sizeof (short));
492 bzero (reg_last_set_invalid, nregs * sizeof (char));
493
494 setup_incoming_promotions ();
495
496 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
497 {
498 next = 0;
499
500 if (GET_CODE (insn) == CODE_LABEL)
501 label_tick++;
502
503 else if (GET_CODE (insn) == INSN
504 || GET_CODE (insn) == CALL_INSN
505 || GET_CODE (insn) == JUMP_INSN)
506 {
507 /* Try this insn with each insn it links back to. */
508
509 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
510 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
511 goto retry;
512
513 /* Try each sequence of three linked insns ending with this one. */
514
515 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
516 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
517 nextlinks = XEXP (nextlinks, 1))
518 if ((next = try_combine (insn, XEXP (links, 0),
519 XEXP (nextlinks, 0))) != 0)
520 goto retry;
521
522 #ifdef HAVE_cc0
523 /* Try to combine a jump insn that uses CC0
524 with a preceding insn that sets CC0, and maybe with its
525 logical predecessor as well.
526 This is how we make decrement-and-branch insns.
527 We need this special code because data flow connections
528 via CC0 do not get entered in LOG_LINKS. */
529
530 if (GET_CODE (insn) == JUMP_INSN
531 && (prev = prev_nonnote_insn (insn)) != 0
532 && GET_CODE (prev) == INSN
533 && sets_cc0_p (PATTERN (prev)))
534 {
535 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
536 goto retry;
537
538 for (nextlinks = LOG_LINKS (prev); nextlinks;
539 nextlinks = XEXP (nextlinks, 1))
540 if ((next = try_combine (insn, prev,
541 XEXP (nextlinks, 0))) != 0)
542 goto retry;
543 }
544
545 /* Do the same for an insn that explicitly references CC0. */
546 if (GET_CODE (insn) == INSN
547 && (prev = prev_nonnote_insn (insn)) != 0
548 && GET_CODE (prev) == INSN
549 && sets_cc0_p (PATTERN (prev))
550 && GET_CODE (PATTERN (insn)) == SET
551 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
552 {
553 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
554 goto retry;
555
556 for (nextlinks = LOG_LINKS (prev); nextlinks;
557 nextlinks = XEXP (nextlinks, 1))
558 if ((next = try_combine (insn, prev,
559 XEXP (nextlinks, 0))) != 0)
560 goto retry;
561 }
562
563 /* Finally, see if any of the insns that this insn links to
564 explicitly references CC0. If so, try this insn, that insn,
565 and its predecessor if it sets CC0. */
566 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
567 if (GET_CODE (XEXP (links, 0)) == INSN
568 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
569 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
570 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
571 && GET_CODE (prev) == INSN
572 && sets_cc0_p (PATTERN (prev))
573 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
574 goto retry;
575 #endif
576
577 /* Try combining an insn with two different insns whose results it
578 uses. */
579 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
580 for (nextlinks = XEXP (links, 1); nextlinks;
581 nextlinks = XEXP (nextlinks, 1))
582 if ((next = try_combine (insn, XEXP (links, 0),
583 XEXP (nextlinks, 0))) != 0)
584 goto retry;
585
586 if (GET_CODE (insn) != NOTE)
587 record_dead_and_set_regs (insn);
588
589 retry:
590 ;
591 }
592 }
593
594 total_attempts += combine_attempts;
595 total_merges += combine_merges;
596 total_extras += combine_extras;
597 total_successes += combine_successes;
598
599 nonzero_sign_valid = 0;
600 }
601 \f
602 /* Set up any promoted values for incoming argument registers. */
603
604 static void
605 setup_incoming_promotions ()
606 {
607 #ifdef PROMOTE_FUNCTION_ARGS
608 int regno;
609 rtx reg;
610 enum machine_mode mode;
611 int unsignedp;
612 rtx first = get_insns ();
613
614 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
615 if (FUNCTION_ARG_REGNO_P (regno)
616 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
617 record_value_for_reg (reg, first,
618 gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
619 GET_MODE (reg),
620 gen_rtx (CLOBBER, mode, const0_rtx)));
621 #endif
622 }
623 \f
624 /* Called via note_stores. If X is a pseudo that is used in more than
625 one basic block, is narrower that HOST_BITS_PER_WIDE_INT, and is being
626 set, record what bits are known zero. If we are clobbering X,
627 ignore this "set" because the clobbered value won't be used.
628
629 If we are setting only a portion of X and we can't figure out what
630 portion, assume all bits will be used since we don't know what will
631 be happening.
632
633 Similarly, set how many bits of X are known to be copies of the sign bit
634 at all locations in the function. This is the smallest number implied
635 by any set of X. */
636
637 static void
638 set_nonzero_bits_and_sign_copies (x, set)
639 rtx x;
640 rtx set;
641 {
642 int num;
643
644 if (GET_CODE (x) == REG
645 && REGNO (x) >= FIRST_PSEUDO_REGISTER
646 && reg_n_sets[REGNO (x)] > 1
647 && reg_basic_block[REGNO (x)] < 0
648 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
649 {
650 if (GET_CODE (set) == CLOBBER)
651 return;
652
653 /* If this is a complex assignment, see if we can convert it into a
654 simple assignment. */
655 set = expand_field_assignment (set);
656
657 /* If this is a simple assignment, or we have a paradoxical SUBREG,
658 set what we know about X. */
659
660 if (SET_DEST (set) == x
661 || (GET_CODE (SET_DEST (set)) == SUBREG
662 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
663 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
664 && SUBREG_REG (SET_DEST (set)) == x))
665 {
666 reg_nonzero_bits[REGNO (x)]
667 |= nonzero_bits (SET_SRC (set), nonzero_bits_mode);
668 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
669 if (reg_sign_bit_copies[REGNO (x)] == 0
670 || reg_sign_bit_copies[REGNO (x)] > num)
671 reg_sign_bit_copies[REGNO (x)] = num;
672 }
673 else
674 {
675 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
676 reg_sign_bit_copies[REGNO (x)] = 0;
677 }
678 }
679 }
680 \f
681 /* See if INSN can be combined into I3. PRED and SUCC are optionally
682 insns that were previously combined into I3 or that will be combined
683 into the merger of INSN and I3.
684
685 Return 0 if the combination is not allowed for any reason.
686
687 If the combination is allowed, *PDEST will be set to the single
688 destination of INSN and *PSRC to the single source, and this function
689 will return 1. */
690
691 static int
692 can_combine_p (insn, i3, pred, succ, pdest, psrc)
693 rtx insn;
694 rtx i3;
695 rtx pred, succ;
696 rtx *pdest, *psrc;
697 {
698 int i;
699 rtx set = 0, src, dest;
700 rtx p, link;
701 int all_adjacent = (succ ? (next_active_insn (insn) == succ
702 && next_active_insn (succ) == i3)
703 : next_active_insn (insn) == i3);
704
705 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
706 or a PARALLEL consisting of such a SET and CLOBBERs.
707
708 If INSN has CLOBBER parallel parts, ignore them for our processing.
709 By definition, these happen during the execution of the insn. When it
710 is merged with another insn, all bets are off. If they are, in fact,
711 needed and aren't also supplied in I3, they may be added by
712 recog_for_combine. Otherwise, it won't match.
713
714 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
715 note.
716
717 Get the source and destination of INSN. If more than one, can't
718 combine. */
719
720 if (GET_CODE (PATTERN (insn)) == SET)
721 set = PATTERN (insn);
722 else if (GET_CODE (PATTERN (insn)) == PARALLEL
723 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
724 {
725 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
726 {
727 rtx elt = XVECEXP (PATTERN (insn), 0, i);
728
729 switch (GET_CODE (elt))
730 {
731 /* We can ignore CLOBBERs. */
732 case CLOBBER:
733 break;
734
735 case SET:
736 /* Ignore SETs whose result isn't used but not those that
737 have side-effects. */
738 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
739 && ! side_effects_p (elt))
740 break;
741
742 /* If we have already found a SET, this is a second one and
743 so we cannot combine with this insn. */
744 if (set)
745 return 0;
746
747 set = elt;
748 break;
749
750 default:
751 /* Anything else means we can't combine. */
752 return 0;
753 }
754 }
755
756 if (set == 0
757 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
758 so don't do anything with it. */
759 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
760 return 0;
761 }
762 else
763 return 0;
764
765 if (set == 0)
766 return 0;
767
768 set = expand_field_assignment (set);
769 src = SET_SRC (set), dest = SET_DEST (set);
770
771 /* Don't eliminate a store in the stack pointer. */
772 if (dest == stack_pointer_rtx
773 /* Don't install a subreg involving two modes not tieable.
774 It can worsen register allocation, and can even make invalid reload
775 insns, since the reg inside may need to be copied from in the
776 outside mode, and that may be invalid if it is an fp reg copied in
777 integer mode. As a special exception, we can allow this if
778 I3 is simply copying DEST, a REG, to CC0. */
779 || (GET_CODE (src) == SUBREG
780 && ! MODES_TIEABLE_P (GET_MODE (src), GET_MODE (SUBREG_REG (src)))
781 #ifdef HAVE_cc0
782 && ! (GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
783 && SET_DEST (PATTERN (i3)) == cc0_rtx
784 && GET_CODE (dest) == REG && dest == SET_SRC (PATTERN (i3)))
785 #endif
786 )
787 /* If we couldn't eliminate a field assignment, we can't combine. */
788 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
789 /* Don't combine with an insn that sets a register to itself if it has
790 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
791 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
792 /* Can't merge a function call. */
793 || GET_CODE (src) == CALL
794 /* Don't substitute into an incremented register. */
795 || FIND_REG_INC_NOTE (i3, dest)
796 || (succ && FIND_REG_INC_NOTE (succ, dest))
797 /* Don't combine the end of a libcall into anything. */
798 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
799 /* Make sure that DEST is not used after SUCC but before I3. */
800 || (succ && ! all_adjacent
801 && reg_used_between_p (dest, succ, i3))
802 /* Make sure that the value that is to be substituted for the register
803 does not use any registers whose values alter in between. However,
804 If the insns are adjacent, a use can't cross a set even though we
805 think it might (this can happen for a sequence of insns each setting
806 the same destination; reg_last_set of that register might point to
807 a NOTE). Also, don't move a volatile asm across any other insns. */
808 || (! all_adjacent
809 && (use_crosses_set_p (src, INSN_CUID (insn))
810 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))))
811 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
812 better register allocation by not doing the combine. */
813 || find_reg_note (i3, REG_NO_CONFLICT, dest)
814 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
815 /* Don't combine across a CALL_INSN, because that would possibly
816 change whether the life span of some REGs crosses calls or not,
817 and it is a pain to update that information.
818 Exception: if source is a constant, moving it later can't hurt.
819 Accept that special case, because it helps -fforce-addr a lot. */
820 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
821 return 0;
822
823 /* DEST must either be a REG or CC0. */
824 if (GET_CODE (dest) == REG)
825 {
826 /* If register alignment is being enforced for multi-word items in all
827 cases except for parameters, it is possible to have a register copy
828 insn referencing a hard register that is not allowed to contain the
829 mode being copied and which would not be valid as an operand of most
830 insns. Eliminate this problem by not combining with such an insn.
831
832 Also, on some machines we don't want to extend the life of a hard
833 register. */
834
835 if (GET_CODE (src) == REG
836 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
837 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
838 #ifdef SMALL_REGISTER_CLASSES
839 /* Don't extend the life of a hard register. */
840 || REGNO (src) < FIRST_PSEUDO_REGISTER
841 #else
842 || (REGNO (src) < FIRST_PSEUDO_REGISTER
843 && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))
844 #endif
845 ))
846 return 0;
847 }
848 else if (GET_CODE (dest) != CC0)
849 return 0;
850
851 /* Don't substitute for a register intended as a clobberable operand.
852 Similarly, don't substitute an expression containing a register that
853 will be clobbered in I3. */
854 if (GET_CODE (PATTERN (i3)) == PARALLEL)
855 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
856 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
857 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
858 src)
859 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
860 return 0;
861
862 /* If INSN contains anything volatile, or is an `asm' (whether volatile
863 or not), reject, unless nothing volatile comes between it and I3,
864 with the exception of SUCC. */
865
866 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
867 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
868 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
869 && p != succ && volatile_refs_p (PATTERN (p)))
870 return 0;
871
872 /* If INSN or I2 contains an autoincrement or autodecrement,
873 make sure that register is not used between there and I3,
874 and not already used in I3 either.
875 Also insist that I3 not be a jump; if it were one
876 and the incremented register were spilled, we would lose. */
877
878 #ifdef AUTO_INC_DEC
879 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
880 if (REG_NOTE_KIND (link) == REG_INC
881 && (GET_CODE (i3) == JUMP_INSN
882 || reg_used_between_p (XEXP (link, 0), insn, i3)
883 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
884 return 0;
885 #endif
886
887 #ifdef HAVE_cc0
888 /* Don't combine an insn that follows a CC0-setting insn.
889 An insn that uses CC0 must not be separated from the one that sets it.
890 We do, however, allow I2 to follow a CC0-setting insn if that insn
891 is passed as I1; in that case it will be deleted also.
892 We also allow combining in this case if all the insns are adjacent
893 because that would leave the two CC0 insns adjacent as well.
894 It would be more logical to test whether CC0 occurs inside I1 or I2,
895 but that would be much slower, and this ought to be equivalent. */
896
897 p = prev_nonnote_insn (insn);
898 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
899 && ! all_adjacent)
900 return 0;
901 #endif
902
903 /* If we get here, we have passed all the tests and the combination is
904 to be allowed. */
905
906 *pdest = dest;
907 *psrc = src;
908
909 return 1;
910 }
911 \f
912 /* LOC is the location within I3 that contains its pattern or the component
913 of a PARALLEL of the pattern. We validate that it is valid for combining.
914
915 One problem is if I3 modifies its output, as opposed to replacing it
916 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
917 so would produce an insn that is not equivalent to the original insns.
918
919 Consider:
920
921 (set (reg:DI 101) (reg:DI 100))
922 (set (subreg:SI (reg:DI 101) 0) <foo>)
923
924 This is NOT equivalent to:
925
926 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
927 (set (reg:DI 101) (reg:DI 100))])
928
929 Not only does this modify 100 (in which case it might still be valid
930 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
931
932 We can also run into a problem if I2 sets a register that I1
933 uses and I1 gets directly substituted into I3 (not via I2). In that
934 case, we would be getting the wrong value of I2DEST into I3, so we
935 must reject the combination. This case occurs when I2 and I1 both
936 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
937 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
938 of a SET must prevent combination from occurring.
939
940 On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
941 if the destination of a SET is a hard register.
942
943 Before doing the above check, we first try to expand a field assignment
944 into a set of logical operations.
945
946 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
947 we place a register that is both set and used within I3. If more than one
948 such register is detected, we fail.
949
950 Return 1 if the combination is valid, zero otherwise. */
951
952 static int
953 combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
954 rtx i3;
955 rtx *loc;
956 rtx i2dest;
957 rtx i1dest;
958 int i1_not_in_src;
959 rtx *pi3dest_killed;
960 {
961 rtx x = *loc;
962
963 if (GET_CODE (x) == SET)
964 {
965 rtx set = expand_field_assignment (x);
966 rtx dest = SET_DEST (set);
967 rtx src = SET_SRC (set);
968 rtx inner_dest = dest, inner_src = src;
969
970 SUBST (*loc, set);
971
972 while (GET_CODE (inner_dest) == STRICT_LOW_PART
973 || GET_CODE (inner_dest) == SUBREG
974 || GET_CODE (inner_dest) == ZERO_EXTRACT)
975 inner_dest = XEXP (inner_dest, 0);
976
977 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
978 was added. */
979 #if 0
980 while (GET_CODE (inner_src) == STRICT_LOW_PART
981 || GET_CODE (inner_src) == SUBREG
982 || GET_CODE (inner_src) == ZERO_EXTRACT)
983 inner_src = XEXP (inner_src, 0);
984
985 /* If it is better that two different modes keep two different pseudos,
986 avoid combining them. This avoids producing the following pattern
987 on a 386:
988 (set (subreg:SI (reg/v:QI 21) 0)
989 (lshiftrt:SI (reg/v:SI 20)
990 (const_int 24)))
991 If that were made, reload could not handle the pair of
992 reg 20/21, since it would try to get any GENERAL_REGS
993 but some of them don't handle QImode. */
994
995 if (rtx_equal_p (inner_src, i2dest)
996 && GET_CODE (inner_dest) == REG
997 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
998 return 0;
999 #endif
1000
1001 /* Check for the case where I3 modifies its output, as
1002 discussed above. */
1003 if ((inner_dest != dest
1004 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1005 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1006 /* This is the same test done in can_combine_p except that we
1007 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
1008 CALL operation. */
1009 || (GET_CODE (inner_dest) == REG
1010 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1011 #ifdef SMALL_REGISTER_CLASSES
1012 && GET_CODE (src) != CALL
1013 #else
1014 && ! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1015 GET_MODE (inner_dest))
1016 #endif
1017 )
1018
1019 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1020 return 0;
1021
1022 /* If DEST is used in I3, it is being killed in this insn,
1023 so record that for later. */
1024 if (pi3dest_killed && GET_CODE (dest) == REG
1025 && reg_referenced_p (dest, PATTERN (i3)))
1026 {
1027 if (*pi3dest_killed)
1028 return 0;
1029
1030 *pi3dest_killed = dest;
1031 }
1032 }
1033
1034 else if (GET_CODE (x) == PARALLEL)
1035 {
1036 int i;
1037
1038 for (i = 0; i < XVECLEN (x, 0); i++)
1039 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1040 i1_not_in_src, pi3dest_killed))
1041 return 0;
1042 }
1043
1044 return 1;
1045 }
1046 \f
1047 /* Try to combine the insns I1 and I2 into I3.
1048 Here I1 and I2 appear earlier than I3.
1049 I1 can be zero; then we combine just I2 into I3.
1050
1051 It we are combining three insns and the resulting insn is not recognized,
1052 try splitting it into two insns. If that happens, I2 and I3 are retained
1053 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1054 are pseudo-deleted.
1055
1056 If we created two insns, return I2; otherwise return I3.
1057 Return 0 if the combination does not work. Then nothing is changed. */
1058
1059 static rtx
1060 try_combine (i3, i2, i1)
1061 register rtx i3, i2, i1;
1062 {
1063 /* New patterns for I3 and I3, respectively. */
1064 rtx newpat, newi2pat = 0;
1065 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1066 int added_sets_1, added_sets_2;
1067 /* Total number of SETs to put into I3. */
1068 int total_sets;
1069 /* Nonzero is I2's body now appears in I3. */
1070 int i2_is_used;
1071 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1072 int insn_code_number, i2_code_number, other_code_number;
1073 /* Contains I3 if the destination of I3 is used in its source, which means
1074 that the old life of I3 is being killed. If that usage is placed into
1075 I2 and not in I3, a REG_DEAD note must be made. */
1076 rtx i3dest_killed = 0;
1077 /* SET_DEST and SET_SRC of I2 and I1. */
1078 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1079 /* PATTERN (I2), or a copy of it in certain cases. */
1080 rtx i2pat;
1081 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
1082 int i2dest_in_i2src, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1083 int i1_feeds_i3 = 0;
1084 /* Notes that must be added to REG_NOTES in I3 and I2. */
1085 rtx new_i3_notes, new_i2_notes;
1086
1087 int maxreg;
1088 rtx temp;
1089 register rtx link;
1090 int i;
1091
1092 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1093 This can occur when flow deletes an insn that it has merged into an
1094 auto-increment address. We also can't do anything if I3 has a
1095 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1096 libcall. */
1097
1098 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1099 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1100 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
1101 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
1102 return 0;
1103
1104 combine_attempts++;
1105
1106 undobuf.num_undo = previous_num_undos = 0;
1107 undobuf.other_insn = 0;
1108
1109 /* Save the current high-water-mark so we can free storage if we didn't
1110 accept this combination. */
1111 undobuf.storage = (char *) oballoc (0);
1112
1113 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1114 code below, set I1 to be the earlier of the two insns. */
1115 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1116 temp = i1, i1 = i2, i2 = temp;
1117
1118 /* First check for one important special-case that the code below will
1119 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1120 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1121 we may be able to replace that destination with the destination of I3.
1122 This occurs in the common code where we compute both a quotient and
1123 remainder into a structure, in which case we want to do the computation
1124 directly into the structure to avoid register-register copies.
1125
1126 We make very conservative checks below and only try to handle the
1127 most common cases of this. For example, we only handle the case
1128 where I2 and I3 are adjacent to avoid making difficult register
1129 usage tests. */
1130
1131 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1132 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1133 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1134 #ifdef SMALL_REGISTER_CLASSES
1135 && (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1136 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER)
1137 #endif
1138 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1139 && GET_CODE (PATTERN (i2)) == PARALLEL
1140 && ! side_effects_p (SET_DEST (PATTERN (i3)))
1141 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1142 below would need to check what is inside (and reg_overlap_mentioned_p
1143 doesn't support those codes anyway). Don't allow those destinations;
1144 the resulting insn isn't likely to be recognized anyway. */
1145 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1146 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1147 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1148 SET_DEST (PATTERN (i3)))
1149 && next_real_insn (i2) == i3)
1150 {
1151 rtx p2 = PATTERN (i2);
1152
1153 /* Make sure that the destination of I3,
1154 which we are going to substitute into one output of I2,
1155 is not used within another output of I2. We must avoid making this:
1156 (parallel [(set (mem (reg 69)) ...)
1157 (set (reg 69) ...)])
1158 which is not well-defined as to order of actions.
1159 (Besides, reload can't handle output reloads for this.)
1160
1161 The problem can also happen if the dest of I3 is a memory ref,
1162 if another dest in I2 is an indirect memory ref. */
1163 for (i = 0; i < XVECLEN (p2, 0); i++)
1164 if (GET_CODE (XVECEXP (p2, 0, i)) == SET
1165 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1166 SET_DEST (XVECEXP (p2, 0, i))))
1167 break;
1168
1169 if (i == XVECLEN (p2, 0))
1170 for (i = 0; i < XVECLEN (p2, 0); i++)
1171 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1172 {
1173 combine_merges++;
1174
1175 subst_insn = i3;
1176 subst_low_cuid = INSN_CUID (i2);
1177
1178 added_sets_2 = 0;
1179 i2dest = SET_SRC (PATTERN (i3));
1180
1181 /* Replace the dest in I2 with our dest and make the resulting
1182 insn the new pattern for I3. Then skip to where we
1183 validate the pattern. Everything was set up above. */
1184 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1185 SET_DEST (PATTERN (i3)));
1186
1187 newpat = p2;
1188 goto validate_replacement;
1189 }
1190 }
1191
1192 #ifndef HAVE_cc0
1193 /* If we have no I1 and I2 looks like:
1194 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1195 (set Y OP)])
1196 make up a dummy I1 that is
1197 (set Y OP)
1198 and change I2 to be
1199 (set (reg:CC X) (compare:CC Y (const_int 0)))
1200
1201 (We can ignore any trailing CLOBBERs.)
1202
1203 This undoes a previous combination and allows us to match a branch-and-
1204 decrement insn. */
1205
1206 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1207 && XVECLEN (PATTERN (i2), 0) >= 2
1208 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1209 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1210 == MODE_CC)
1211 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1212 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1213 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1214 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1215 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1216 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1217 {
1218 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1219 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1220 break;
1221
1222 if (i == 1)
1223 {
1224 /* We make I1 with the same INSN_UID as I2. This gives it
1225 the same INSN_CUID for value tracking. Our fake I1 will
1226 never appear in the insn stream so giving it the same INSN_UID
1227 as I2 will not cause a problem. */
1228
1229 i1 = gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2,
1230 XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0);
1231
1232 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1233 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1234 SET_DEST (PATTERN (i1)));
1235 }
1236 }
1237 #endif
1238
1239 /* Verify that I2 and I1 are valid for combining. */
1240 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1241 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1242 {
1243 undo_all ();
1244 return 0;
1245 }
1246
1247 /* Record whether I2DEST is used in I2SRC and similarly for the other
1248 cases. Knowing this will help in register status updating below. */
1249 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1250 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1251 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1252
1253 /* See if I1 directly feeds into I3. It does if I1DEST is not used
1254 in I2SRC. */
1255 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1256
1257 /* Ensure that I3's pattern can be the destination of combines. */
1258 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1259 i1 && i2dest_in_i1src && i1_feeds_i3,
1260 &i3dest_killed))
1261 {
1262 undo_all ();
1263 return 0;
1264 }
1265
1266 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1267 We used to do this EXCEPT in one case: I3 has a post-inc in an
1268 output operand. However, that exception can give rise to insns like
1269 mov r3,(r3)+
1270 which is a famous insn on the PDP-11 where the value of r3 used as the
1271 source was model-dependent. Avoid this sort of thing. */
1272
1273 #if 0
1274 if (!(GET_CODE (PATTERN (i3)) == SET
1275 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1276 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1277 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1278 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1279 /* It's not the exception. */
1280 #endif
1281 #ifdef AUTO_INC_DEC
1282 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1283 if (REG_NOTE_KIND (link) == REG_INC
1284 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1285 || (i1 != 0
1286 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1287 {
1288 undo_all ();
1289 return 0;
1290 }
1291 #endif
1292
1293 /* See if the SETs in I1 or I2 need to be kept around in the merged
1294 instruction: whenever the value set there is still needed past I3.
1295 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1296
1297 For the SET in I1, we have two cases: If I1 and I2 independently
1298 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1299 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1300 in I1 needs to be kept around unless I1DEST dies or is set in either
1301 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1302 I1DEST. If so, we know I1 feeds into I2. */
1303
1304 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1305
1306 added_sets_1
1307 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1308 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1309
1310 /* If the set in I2 needs to be kept around, we must make a copy of
1311 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1312 PATTERN (I2), we are only substituting for the original I1DEST, not into
1313 an already-substituted copy. This also prevents making self-referential
1314 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1315 I2DEST. */
1316
1317 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1318 ? gen_rtx (SET, VOIDmode, i2dest, i2src)
1319 : PATTERN (i2));
1320
1321 if (added_sets_2)
1322 i2pat = copy_rtx (i2pat);
1323
1324 combine_merges++;
1325
1326 /* Substitute in the latest insn for the regs set by the earlier ones. */
1327
1328 maxreg = max_reg_num ();
1329
1330 subst_insn = i3;
1331
1332 /* It is possible that the source of I2 or I1 may be performing an
1333 unneeded operation, such as a ZERO_EXTEND of something that is known
1334 to have the high part zero. Handle that case by letting subst look at
1335 the innermost one of them.
1336
1337 Another way to do this would be to have a function that tries to
1338 simplify a single insn instead of merging two or more insns. We don't
1339 do this because of the potential of infinite loops and because
1340 of the potential extra memory required. However, doing it the way
1341 we are is a bit of a kludge and doesn't catch all cases.
1342
1343 But only do this if -fexpensive-optimizations since it slows things down
1344 and doesn't usually win. */
1345
1346 if (flag_expensive_optimizations)
1347 {
1348 /* Pass pc_rtx so no substitutions are done, just simplifications.
1349 The cases that we are interested in here do not involve the few
1350 cases were is_replaced is checked. */
1351 if (i1)
1352 {
1353 subst_low_cuid = INSN_CUID (i1);
1354 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1355 }
1356 else
1357 {
1358 subst_low_cuid = INSN_CUID (i2);
1359 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1360 }
1361
1362 previous_num_undos = undobuf.num_undo;
1363 }
1364
1365 #ifndef HAVE_cc0
1366 /* Many machines that don't use CC0 have insns that can both perform an
1367 arithmetic operation and set the condition code. These operations will
1368 be represented as a PARALLEL with the first element of the vector
1369 being a COMPARE of an arithmetic operation with the constant zero.
1370 The second element of the vector will set some pseudo to the result
1371 of the same arithmetic operation. If we simplify the COMPARE, we won't
1372 match such a pattern and so will generate an extra insn. Here we test
1373 for this case, where both the comparison and the operation result are
1374 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1375 I2SRC. Later we will make the PARALLEL that contains I2. */
1376
1377 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1378 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1379 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1380 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1381 {
1382 rtx *cc_use;
1383 enum machine_mode compare_mode;
1384
1385 newpat = PATTERN (i3);
1386 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1387
1388 i2_is_used = 1;
1389
1390 #ifdef EXTRA_CC_MODES
1391 /* See if a COMPARE with the operand we substituted in should be done
1392 with the mode that is currently being used. If not, do the same
1393 processing we do in `subst' for a SET; namely, if the destination
1394 is used only once, try to replace it with a register of the proper
1395 mode and also replace the COMPARE. */
1396 if (undobuf.other_insn == 0
1397 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1398 &undobuf.other_insn))
1399 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1400 i2src, const0_rtx))
1401 != GET_MODE (SET_DEST (newpat))))
1402 {
1403 int regno = REGNO (SET_DEST (newpat));
1404 rtx new_dest = gen_rtx (REG, compare_mode, regno);
1405
1406 if (regno < FIRST_PSEUDO_REGISTER
1407 || (reg_n_sets[regno] == 1 && ! added_sets_2
1408 && ! REG_USERVAR_P (SET_DEST (newpat))))
1409 {
1410 if (regno >= FIRST_PSEUDO_REGISTER)
1411 SUBST (regno_reg_rtx[regno], new_dest);
1412
1413 SUBST (SET_DEST (newpat), new_dest);
1414 SUBST (XEXP (*cc_use, 0), new_dest);
1415 SUBST (SET_SRC (newpat),
1416 gen_rtx_combine (COMPARE, compare_mode,
1417 i2src, const0_rtx));
1418 }
1419 else
1420 undobuf.other_insn = 0;
1421 }
1422 #endif
1423 }
1424 else
1425 #endif
1426 {
1427 n_occurrences = 0; /* `subst' counts here */
1428
1429 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1430 need to make a unique copy of I2SRC each time we substitute it
1431 to avoid self-referential rtl. */
1432
1433 subst_low_cuid = INSN_CUID (i2);
1434 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1435 ! i1_feeds_i3 && i1dest_in_i1src);
1436 previous_num_undos = undobuf.num_undo;
1437
1438 /* Record whether i2's body now appears within i3's body. */
1439 i2_is_used = n_occurrences;
1440 }
1441
1442 /* If we already got a failure, don't try to do more. Otherwise,
1443 try to substitute in I1 if we have it. */
1444
1445 if (i1 && GET_CODE (newpat) != CLOBBER)
1446 {
1447 /* Before we can do this substitution, we must redo the test done
1448 above (see detailed comments there) that ensures that I1DEST
1449 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1450
1451 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1452 0, NULL_PTR))
1453 {
1454 undo_all ();
1455 return 0;
1456 }
1457
1458 n_occurrences = 0;
1459 subst_low_cuid = INSN_CUID (i1);
1460 newpat = subst (newpat, i1dest, i1src, 0, 0);
1461 previous_num_undos = undobuf.num_undo;
1462 }
1463
1464 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1465 to count all the ways that I2SRC and I1SRC can be used. */
1466 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
1467 && i2_is_used + added_sets_2 > 1)
1468 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
1469 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1470 > 1))
1471 /* Fail if we tried to make a new register (we used to abort, but there's
1472 really no reason to). */
1473 || max_reg_num () != maxreg
1474 /* Fail if we couldn't do something and have a CLOBBER. */
1475 || GET_CODE (newpat) == CLOBBER)
1476 {
1477 undo_all ();
1478 return 0;
1479 }
1480
1481 /* If the actions of the earlier insns must be kept
1482 in addition to substituting them into the latest one,
1483 we must make a new PARALLEL for the latest insn
1484 to hold additional the SETs. */
1485
1486 if (added_sets_1 || added_sets_2)
1487 {
1488 combine_extras++;
1489
1490 if (GET_CODE (newpat) == PARALLEL)
1491 {
1492 rtvec old = XVEC (newpat, 0);
1493 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1494 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1495 bcopy (&old->elem[0], &XVECEXP (newpat, 0, 0),
1496 sizeof (old->elem[0]) * old->num_elem);
1497 }
1498 else
1499 {
1500 rtx old = newpat;
1501 total_sets = 1 + added_sets_1 + added_sets_2;
1502 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1503 XVECEXP (newpat, 0, 0) = old;
1504 }
1505
1506 if (added_sets_1)
1507 XVECEXP (newpat, 0, --total_sets)
1508 = (GET_CODE (PATTERN (i1)) == PARALLEL
1509 ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1));
1510
1511 if (added_sets_2)
1512 {
1513 /* If there is no I1, use I2's body as is. We used to also not do
1514 the subst call below if I2 was substituted into I3,
1515 but that could lose a simplification. */
1516 if (i1 == 0)
1517 XVECEXP (newpat, 0, --total_sets) = i2pat;
1518 else
1519 /* See comment where i2pat is assigned. */
1520 XVECEXP (newpat, 0, --total_sets)
1521 = subst (i2pat, i1dest, i1src, 0, 0);
1522 }
1523 }
1524
1525 /* We come here when we are replacing a destination in I2 with the
1526 destination of I3. */
1527 validate_replacement:
1528
1529 /* Is the result of combination a valid instruction? */
1530 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1531
1532 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1533 the second SET's destination is a register that is unused. In that case,
1534 we just need the first SET. This can occur when simplifying a divmod
1535 insn. We *must* test for this case here because the code below that
1536 splits two independent SETs doesn't handle this case correctly when it
1537 updates the register status. Also check the case where the first
1538 SET's destination is unused. That would not cause incorrect code, but
1539 does cause an unneeded insn to remain. */
1540
1541 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1542 && XVECLEN (newpat, 0) == 2
1543 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1544 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1545 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1546 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1547 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1548 && asm_noperands (newpat) < 0)
1549 {
1550 newpat = XVECEXP (newpat, 0, 0);
1551 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1552 }
1553
1554 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1555 && XVECLEN (newpat, 0) == 2
1556 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1557 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1558 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1559 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1560 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1561 && asm_noperands (newpat) < 0)
1562 {
1563 newpat = XVECEXP (newpat, 0, 1);
1564 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1565 }
1566
1567 /* See if this is an XOR. If so, perhaps the problem is that the
1568 constant is out of range. Replace it with a complemented XOR with
1569 a complemented constant; it might be in range. */
1570
1571 else if (insn_code_number < 0 && GET_CODE (newpat) == SET
1572 && GET_CODE (SET_SRC (newpat)) == XOR
1573 && GET_CODE (XEXP (SET_SRC (newpat), 1)) == CONST_INT
1574 && ((temp = simplify_unary_operation (NOT,
1575 GET_MODE (SET_SRC (newpat)),
1576 XEXP (SET_SRC (newpat), 1),
1577 GET_MODE (SET_SRC (newpat))))
1578 != 0))
1579 {
1580 enum machine_mode i_mode = GET_MODE (SET_SRC (newpat));
1581 rtx pat
1582 = gen_rtx_combine (SET, VOIDmode, SET_DEST (newpat),
1583 gen_unary (NOT, i_mode,
1584 gen_binary (XOR, i_mode,
1585 XEXP (SET_SRC (newpat), 0),
1586 temp)));
1587
1588 insn_code_number = recog_for_combine (&pat, i3, &new_i3_notes);
1589 if (insn_code_number >= 0)
1590 newpat = pat;
1591 }
1592
1593 /* If we were combining three insns and the result is a simple SET
1594 with no ASM_OPERANDS that wasn't recognized, try to split it into two
1595 insns. There are two ways to do this. It can be split using a
1596 machine-specific method (like when you have an addition of a large
1597 constant) or by combine in the function find_split_point. */
1598
1599 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1600 && asm_noperands (newpat) < 0)
1601 {
1602 rtx m_split, *split;
1603 rtx ni2dest = i2dest;
1604
1605 /* See if the MD file can split NEWPAT. If it can't, see if letting it
1606 use I2DEST as a scratch register will help. In the latter case,
1607 convert I2DEST to the mode of the source of NEWPAT if we can. */
1608
1609 m_split = split_insns (newpat, i3);
1610
1611 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
1612 inputs of NEWPAT. */
1613
1614 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
1615 possible to try that as a scratch reg. This would require adding
1616 more code to make it work though. */
1617
1618 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
1619 {
1620 /* If I2DEST is a hard register or the only use of a pseudo,
1621 we can change its mode. */
1622 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
1623 && GET_MODE (SET_DEST (newpat)) != VOIDmode
1624 && GET_CODE (i2dest) == REG
1625 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1626 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1627 && ! REG_USERVAR_P (i2dest))))
1628 ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)),
1629 REGNO (i2dest));
1630
1631 m_split = split_insns (gen_rtx (PARALLEL, VOIDmode,
1632 gen_rtvec (2, newpat,
1633 gen_rtx (CLOBBER,
1634 VOIDmode,
1635 ni2dest))),
1636 i3);
1637 }
1638
1639 if (m_split && GET_CODE (m_split) == SEQUENCE
1640 && XVECLEN (m_split, 0) == 2
1641 && (next_real_insn (i2) == i3
1642 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1643 INSN_CUID (i2))))
1644 {
1645 rtx i2set, i3set;
1646 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
1647 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
1648
1649 i3set = single_set (XVECEXP (m_split, 0, 1));
1650 i2set = single_set (XVECEXP (m_split, 0, 0));
1651
1652 /* In case we changed the mode of I2DEST, replace it in the
1653 pseudo-register table here. We can't do it above in case this
1654 code doesn't get executed and we do a split the other way. */
1655
1656 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1657 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1658
1659 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1660
1661 /* If I2 or I3 has multiple SETs, we won't know how to track
1662 register status, so don't use these insns. */
1663
1664 if (i2_code_number >= 0 && i2set && i3set)
1665 insn_code_number = recog_for_combine (&newi3pat, i3,
1666 &new_i3_notes);
1667
1668 if (insn_code_number >= 0)
1669 newpat = newi3pat;
1670
1671 /* It is possible that both insns now set the destination of I3.
1672 If so, we must show an extra use of it. */
1673
1674 if (insn_code_number >= 0 && GET_CODE (SET_DEST (i3set)) == REG
1675 && GET_CODE (SET_DEST (i2set)) == REG
1676 && REGNO (SET_DEST (i3set)) == REGNO (SET_DEST (i2set)))
1677 reg_n_sets[REGNO (SET_DEST (i2set))]++;
1678 }
1679
1680 /* If we can split it and use I2DEST, go ahead and see if that
1681 helps things be recognized. Verify that none of the registers
1682 are set between I2 and I3. */
1683 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
1684 #ifdef HAVE_cc0
1685 && GET_CODE (i2dest) == REG
1686 #endif
1687 /* We need I2DEST in the proper mode. If it is a hard register
1688 or the only use of a pseudo, we can change its mode. */
1689 && (GET_MODE (*split) == GET_MODE (i2dest)
1690 || GET_MODE (*split) == VOIDmode
1691 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1692 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1693 && ! REG_USERVAR_P (i2dest)))
1694 && (next_real_insn (i2) == i3
1695 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1696 /* We can't overwrite I2DEST if its value is still used by
1697 NEWPAT. */
1698 && ! reg_referenced_p (i2dest, newpat))
1699 {
1700 rtx newdest = i2dest;
1701
1702 /* Get NEWDEST as a register in the proper mode. We have already
1703 validated that we can do this. */
1704 if (GET_MODE (i2dest) != GET_MODE (*split)
1705 && GET_MODE (*split) != VOIDmode)
1706 {
1707 newdest = gen_rtx (REG, GET_MODE (*split), REGNO (i2dest));
1708
1709 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1710 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
1711 }
1712
1713 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1714 an ASHIFT. This can occur if it was inside a PLUS and hence
1715 appeared to be a memory address. This is a kludge. */
1716 if (GET_CODE (*split) == MULT
1717 && GET_CODE (XEXP (*split, 1)) == CONST_INT
1718 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1719 SUBST (*split, gen_rtx_combine (ASHIFT, GET_MODE (*split),
1720 XEXP (*split, 0), GEN_INT (i)));
1721
1722 #ifdef INSN_SCHEDULING
1723 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1724 be written as a ZERO_EXTEND. */
1725 if (GET_CODE (*split) == SUBREG
1726 && GET_CODE (SUBREG_REG (*split)) == MEM)
1727 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, GET_MODE (*split),
1728 XEXP (*split, 0)));
1729 #endif
1730
1731 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
1732 SUBST (*split, newdest);
1733 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1734 if (i2_code_number >= 0)
1735 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1736 }
1737 }
1738
1739 /* Check for a case where we loaded from memory in a narrow mode and
1740 then sign extended it, but we need both registers. In that case,
1741 we have a PARALLEL with both loads from the same memory location.
1742 We can split this into a load from memory followed by a register-register
1743 copy. This saves at least one insn, more if register allocation can
1744 eliminate the copy. */
1745
1746 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1747 && GET_CODE (newpat) == PARALLEL
1748 && XVECLEN (newpat, 0) == 2
1749 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1750 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
1751 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1752 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1753 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
1754 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1755 INSN_CUID (i2))
1756 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1757 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1758 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1759 SET_SRC (XVECEXP (newpat, 0, 1)))
1760 && ! find_reg_note (i3, REG_UNUSED,
1761 SET_DEST (XVECEXP (newpat, 0, 0))))
1762 {
1763 rtx ni2dest;
1764
1765 newi2pat = XVECEXP (newpat, 0, 0);
1766 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
1767 newpat = XVECEXP (newpat, 0, 1);
1768 SUBST (SET_SRC (newpat),
1769 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
1770 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1771 if (i2_code_number >= 0)
1772 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1773
1774 if (insn_code_number >= 0)
1775 {
1776 rtx insn;
1777 rtx link;
1778
1779 /* If we will be able to accept this, we have made a change to the
1780 destination of I3. This can invalidate a LOG_LINKS pointing
1781 to I3. No other part of combine.c makes such a transformation.
1782
1783 The new I3 will have a destination that was previously the
1784 destination of I1 or I2 and which was used in i2 or I3. Call
1785 distribute_links to make a LOG_LINK from the next use of
1786 that destination. */
1787
1788 PATTERN (i3) = newpat;
1789 distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX));
1790
1791 /* I3 now uses what used to be its destination and which is
1792 now I2's destination. That means we need a LOG_LINK from
1793 I3 to I2. But we used to have one, so we still will.
1794
1795 However, some later insn might be using I2's dest and have
1796 a LOG_LINK pointing at I3. We must remove this link.
1797 The simplest way to remove the link is to point it at I1,
1798 which we know will be a NOTE. */
1799
1800 for (insn = NEXT_INSN (i3);
1801 insn && GET_CODE (insn) != CODE_LABEL
1802 && GET_CODE (PREV_INSN (insn)) != JUMP_INSN;
1803 insn = NEXT_INSN (insn))
1804 {
1805 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1806 && reg_referenced_p (ni2dest, PATTERN (insn)))
1807 {
1808 for (link = LOG_LINKS (insn); link;
1809 link = XEXP (link, 1))
1810 if (XEXP (link, 0) == i3)
1811 XEXP (link, 0) = i1;
1812
1813 break;
1814 }
1815 }
1816 }
1817 }
1818
1819 /* Similarly, check for a case where we have a PARALLEL of two independent
1820 SETs but we started with three insns. In this case, we can do the sets
1821 as two separate insns. This case occurs when some SET allows two
1822 other insns to combine, but the destination of that SET is still live. */
1823
1824 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1825 && GET_CODE (newpat) == PARALLEL
1826 && XVECLEN (newpat, 0) == 2
1827 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1828 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
1829 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
1830 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1831 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1832 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1833 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1834 INSN_CUID (i2))
1835 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
1836 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
1837 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
1838 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1839 XVECEXP (newpat, 0, 0))
1840 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
1841 XVECEXP (newpat, 0, 1)))
1842 {
1843 newi2pat = XVECEXP (newpat, 0, 1);
1844 newpat = XVECEXP (newpat, 0, 0);
1845
1846 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1847 if (i2_code_number >= 0)
1848 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1849 }
1850
1851 /* If it still isn't recognized, fail and change things back the way they
1852 were. */
1853 if ((insn_code_number < 0
1854 /* Is the result a reasonable ASM_OPERANDS? */
1855 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
1856 {
1857 undo_all ();
1858 return 0;
1859 }
1860
1861 /* If we had to change another insn, make sure it is valid also. */
1862 if (undobuf.other_insn)
1863 {
1864 rtx other_notes = REG_NOTES (undobuf.other_insn);
1865 rtx other_pat = PATTERN (undobuf.other_insn);
1866 rtx new_other_notes;
1867 rtx note, next;
1868
1869 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
1870 &new_other_notes);
1871
1872 if (other_code_number < 0 && ! check_asm_operands (other_pat))
1873 {
1874 undo_all ();
1875 return 0;
1876 }
1877
1878 PATTERN (undobuf.other_insn) = other_pat;
1879
1880 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
1881 are still valid. Then add any non-duplicate notes added by
1882 recog_for_combine. */
1883 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
1884 {
1885 next = XEXP (note, 1);
1886
1887 if (REG_NOTE_KIND (note) == REG_UNUSED
1888 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1889 {
1890 if (GET_CODE (XEXP (note, 0)) == REG)
1891 reg_n_deaths[REGNO (XEXP (note, 0))]--;
1892
1893 remove_note (undobuf.other_insn, note);
1894 }
1895 }
1896
1897 for (note = new_other_notes; note; note = XEXP (note, 1))
1898 if (GET_CODE (XEXP (note, 0)) == REG)
1899 reg_n_deaths[REGNO (XEXP (note, 0))]++;
1900
1901 distribute_notes (new_other_notes, undobuf.other_insn,
1902 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
1903 }
1904
1905 /* We now know that we can do this combination. Merge the insns and
1906 update the status of registers and LOG_LINKS. */
1907
1908 {
1909 rtx i3notes, i2notes, i1notes = 0;
1910 rtx i3links, i2links, i1links = 0;
1911 rtx midnotes = 0;
1912 int all_adjacent = (next_real_insn (i2) == i3
1913 && (i1 == 0 || next_real_insn (i1) == i2));
1914 register int regno;
1915 /* Compute which registers we expect to eliminate. */
1916 rtx elim_i2 = (newi2pat || i2dest_in_i2src || i2dest_in_i1src
1917 ? 0 : i2dest);
1918 rtx elim_i1 = i1 == 0 || i1dest_in_i1src ? 0 : i1dest;
1919
1920 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
1921 clear them. */
1922 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
1923 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
1924 if (i1)
1925 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
1926
1927 /* Ensure that we do not have something that should not be shared but
1928 occurs multiple times in the new insns. Check this by first
1929 resetting all the `used' flags and then copying anything is shared. */
1930
1931 reset_used_flags (i3notes);
1932 reset_used_flags (i2notes);
1933 reset_used_flags (i1notes);
1934 reset_used_flags (newpat);
1935 reset_used_flags (newi2pat);
1936 if (undobuf.other_insn)
1937 reset_used_flags (PATTERN (undobuf.other_insn));
1938
1939 i3notes = copy_rtx_if_shared (i3notes);
1940 i2notes = copy_rtx_if_shared (i2notes);
1941 i1notes = copy_rtx_if_shared (i1notes);
1942 newpat = copy_rtx_if_shared (newpat);
1943 newi2pat = copy_rtx_if_shared (newi2pat);
1944 if (undobuf.other_insn)
1945 reset_used_flags (PATTERN (undobuf.other_insn));
1946
1947 INSN_CODE (i3) = insn_code_number;
1948 PATTERN (i3) = newpat;
1949 if (undobuf.other_insn)
1950 INSN_CODE (undobuf.other_insn) = other_code_number;
1951
1952 /* We had one special case above where I2 had more than one set and
1953 we replaced a destination of one of those sets with the destination
1954 of I3. In that case, we have to update LOG_LINKS of insns later
1955 in this basic block. Note that this (expensive) case is rare. */
1956
1957 if (GET_CODE (PATTERN (i2)) == PARALLEL)
1958 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
1959 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
1960 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
1961 && ! find_reg_note (i2, REG_UNUSED,
1962 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
1963 {
1964 register rtx insn;
1965
1966 for (insn = NEXT_INSN (i2); insn; insn = NEXT_INSN (insn))
1967 {
1968 if (insn != i3 && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1969 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
1970 if (XEXP (link, 0) == i2)
1971 XEXP (link, 0) = i3;
1972
1973 if (GET_CODE (insn) == CODE_LABEL
1974 || GET_CODE (insn) == JUMP_INSN)
1975 break;
1976 }
1977 }
1978
1979 LOG_LINKS (i3) = 0;
1980 REG_NOTES (i3) = 0;
1981 LOG_LINKS (i2) = 0;
1982 REG_NOTES (i2) = 0;
1983
1984 if (newi2pat)
1985 {
1986 INSN_CODE (i2) = i2_code_number;
1987 PATTERN (i2) = newi2pat;
1988 }
1989 else
1990 {
1991 PUT_CODE (i2, NOTE);
1992 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
1993 NOTE_SOURCE_FILE (i2) = 0;
1994 }
1995
1996 if (i1)
1997 {
1998 LOG_LINKS (i1) = 0;
1999 REG_NOTES (i1) = 0;
2000 PUT_CODE (i1, NOTE);
2001 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2002 NOTE_SOURCE_FILE (i1) = 0;
2003 }
2004
2005 /* Get death notes for everything that is now used in either I3 or
2006 I2 and used to die in a previous insn. */
2007
2008 move_deaths (newpat, i1 ? INSN_CUID (i1) : INSN_CUID (i2), i3, &midnotes);
2009 if (newi2pat)
2010 move_deaths (newi2pat, INSN_CUID (i1), i2, &midnotes);
2011
2012 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2013 if (i3notes)
2014 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2015 elim_i2, elim_i1);
2016 if (i2notes)
2017 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2018 elim_i2, elim_i1);
2019 if (i1notes)
2020 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2021 elim_i2, elim_i1);
2022 if (midnotes)
2023 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2024 elim_i2, elim_i1);
2025
2026 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2027 know these are REG_UNUSED and want them to go to the desired insn,
2028 so we always pass it as i3. We have not counted the notes in
2029 reg_n_deaths yet, so we need to do so now. */
2030
2031 if (newi2pat && new_i2_notes)
2032 {
2033 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2034 if (GET_CODE (XEXP (temp, 0)) == REG)
2035 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2036
2037 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2038 }
2039
2040 if (new_i3_notes)
2041 {
2042 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2043 if (GET_CODE (XEXP (temp, 0)) == REG)
2044 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2045
2046 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2047 }
2048
2049 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
2050 put a REG_DEAD note for it somewhere. Similarly for I2 and I1.
2051 Show an additional death due to the REG_DEAD note we make here. If
2052 we discard it in distribute_notes, we will decrement it again. */
2053
2054 if (i3dest_killed)
2055 {
2056 if (GET_CODE (i3dest_killed) == REG)
2057 reg_n_deaths[REGNO (i3dest_killed)]++;
2058
2059 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed,
2060 NULL_RTX),
2061 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2062 NULL_RTX, NULL_RTX);
2063 }
2064
2065 /* For I2 and I1, we have to be careful. If NEWI2PAT exists and sets
2066 I2DEST or I1DEST, the death must be somewhere before I2, not I3. If
2067 we passed I3 in that case, it might delete I2. */
2068
2069 if (i2dest_in_i2src)
2070 {
2071 if (GET_CODE (i2dest) == REG)
2072 reg_n_deaths[REGNO (i2dest)]++;
2073
2074 if (newi2pat && reg_set_p (i2dest, newi2pat))
2075 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2076 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2077 else
2078 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2079 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2080 NULL_RTX, NULL_RTX);
2081 }
2082
2083 if (i1dest_in_i1src)
2084 {
2085 if (GET_CODE (i1dest) == REG)
2086 reg_n_deaths[REGNO (i1dest)]++;
2087
2088 if (newi2pat && reg_set_p (i1dest, newi2pat))
2089 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2090 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2091 else
2092 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2093 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2094 NULL_RTX, NULL_RTX);
2095 }
2096
2097 distribute_links (i3links);
2098 distribute_links (i2links);
2099 distribute_links (i1links);
2100
2101 if (GET_CODE (i2dest) == REG)
2102 {
2103 rtx link;
2104 rtx i2_insn = 0, i2_val = 0, set;
2105
2106 /* The insn that used to set this register doesn't exist, and
2107 this life of the register may not exist either. See if one of
2108 I3's links points to an insn that sets I2DEST. If it does,
2109 that is now the last known value for I2DEST. If we don't update
2110 this and I2 set the register to a value that depended on its old
2111 contents, we will get confused. If this insn is used, thing
2112 will be set correctly in combine_instructions. */
2113
2114 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2115 if ((set = single_set (XEXP (link, 0))) != 0
2116 && rtx_equal_p (i2dest, SET_DEST (set)))
2117 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2118
2119 record_value_for_reg (i2dest, i2_insn, i2_val);
2120
2121 /* If the reg formerly set in I2 died only once and that was in I3,
2122 zero its use count so it won't make `reload' do any work. */
2123 if (! added_sets_2 && newi2pat == 0)
2124 {
2125 regno = REGNO (i2dest);
2126 reg_n_sets[regno]--;
2127 if (reg_n_sets[regno] == 0
2128 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2129 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
2130 reg_n_refs[regno] = 0;
2131 }
2132 }
2133
2134 if (i1 && GET_CODE (i1dest) == REG)
2135 {
2136 rtx link;
2137 rtx i1_insn = 0, i1_val = 0, set;
2138
2139 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2140 if ((set = single_set (XEXP (link, 0))) != 0
2141 && rtx_equal_p (i1dest, SET_DEST (set)))
2142 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2143
2144 record_value_for_reg (i1dest, i1_insn, i1_val);
2145
2146 regno = REGNO (i1dest);
2147 if (! added_sets_1)
2148 {
2149 reg_n_sets[regno]--;
2150 if (reg_n_sets[regno] == 0
2151 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2152 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
2153 reg_n_refs[regno] = 0;
2154 }
2155 }
2156
2157 /* Update reg_nonzero_bits et al for any changes that may have been made
2158 to this insn. */
2159
2160 note_stores (newpat, set_nonzero_bits_and_sign_copies);
2161 if (newi2pat)
2162 note_stores (newi2pat, set_nonzero_bits_and_sign_copies);
2163
2164 /* If I3 is now an unconditional jump, ensure that it has a
2165 BARRIER following it since it may have initially been a
2166 conditional jump. It may also be the last nonnote insn. */
2167
2168 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
2169 && ((temp = next_nonnote_insn (i3)) == NULL_RTX
2170 || GET_CODE (temp) != BARRIER))
2171 emit_barrier_after (i3);
2172 }
2173
2174 combine_successes++;
2175
2176 return newi2pat ? i2 : i3;
2177 }
2178 \f
2179 /* Undo all the modifications recorded in undobuf. */
2180
2181 static void
2182 undo_all ()
2183 {
2184 register int i;
2185 if (undobuf.num_undo > MAX_UNDO)
2186 undobuf.num_undo = MAX_UNDO;
2187 for (i = undobuf.num_undo - 1; i >= 0; i--)
2188 {
2189 if (undobuf.undo[i].is_int)
2190 *undobuf.undo[i].where.i = undobuf.undo[i].old_contents.i;
2191 else
2192 *undobuf.undo[i].where.rtx = undobuf.undo[i].old_contents.rtx;
2193
2194 }
2195
2196 obfree (undobuf.storage);
2197 undobuf.num_undo = 0;
2198 }
2199 \f
2200 /* Find the innermost point within the rtx at LOC, possibly LOC itself,
2201 where we have an arithmetic expression and return that point. LOC will
2202 be inside INSN.
2203
2204 try_combine will call this function to see if an insn can be split into
2205 two insns. */
2206
2207 static rtx *
2208 find_split_point (loc, insn)
2209 rtx *loc;
2210 rtx insn;
2211 {
2212 rtx x = *loc;
2213 enum rtx_code code = GET_CODE (x);
2214 rtx *split;
2215 int len = 0, pos, unsignedp;
2216 rtx inner;
2217
2218 /* First special-case some codes. */
2219 switch (code)
2220 {
2221 case SUBREG:
2222 #ifdef INSN_SCHEDULING
2223 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2224 point. */
2225 if (GET_CODE (SUBREG_REG (x)) == MEM)
2226 return loc;
2227 #endif
2228 return find_split_point (&SUBREG_REG (x), insn);
2229
2230 case MEM:
2231 #ifdef HAVE_lo_sum
2232 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2233 using LO_SUM and HIGH. */
2234 if (GET_CODE (XEXP (x, 0)) == CONST
2235 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2236 {
2237 SUBST (XEXP (x, 0),
2238 gen_rtx_combine (LO_SUM, Pmode,
2239 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2240 XEXP (x, 0)));
2241 return &XEXP (XEXP (x, 0), 0);
2242 }
2243 #endif
2244
2245 /* If we have a PLUS whose second operand is a constant and the
2246 address is not valid, perhaps will can split it up using
2247 the machine-specific way to split large constants. We use
2248 the first psuedo-reg (one of the virtual regs) as a placeholder;
2249 it will not remain in the result. */
2250 if (GET_CODE (XEXP (x, 0)) == PLUS
2251 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2252 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2253 {
2254 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2255 rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)),
2256 subst_insn);
2257
2258 /* This should have produced two insns, each of which sets our
2259 placeholder. If the source of the second is a valid address,
2260 we can make put both sources together and make a split point
2261 in the middle. */
2262
2263 if (seq && XVECLEN (seq, 0) == 2
2264 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2265 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2266 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2267 && ! reg_mentioned_p (reg,
2268 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2269 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2270 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2271 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2272 && memory_address_p (GET_MODE (x),
2273 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2274 {
2275 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2276 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2277
2278 /* Replace the placeholder in SRC2 with SRC1. If we can
2279 find where in SRC2 it was placed, that can become our
2280 split point and we can replace this address with SRC2.
2281 Just try two obvious places. */
2282
2283 src2 = replace_rtx (src2, reg, src1);
2284 split = 0;
2285 if (XEXP (src2, 0) == src1)
2286 split = &XEXP (src2, 0);
2287 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2288 && XEXP (XEXP (src2, 0), 0) == src1)
2289 split = &XEXP (XEXP (src2, 0), 0);
2290
2291 if (split)
2292 {
2293 SUBST (XEXP (x, 0), src2);
2294 return split;
2295 }
2296 }
2297
2298 /* If that didn't work, perhaps the first operand is complex and
2299 needs to be computed separately, so make a split point there.
2300 This will occur on machines that just support REG + CONST
2301 and have a constant moved through some previous computation. */
2302
2303 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2304 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2305 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2306 == 'o')))
2307 return &XEXP (XEXP (x, 0), 0);
2308 }
2309 break;
2310
2311 case SET:
2312 #ifdef HAVE_cc0
2313 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2314 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2315 we need to put the operand into a register. So split at that
2316 point. */
2317
2318 if (SET_DEST (x) == cc0_rtx
2319 && GET_CODE (SET_SRC (x)) != COMPARE
2320 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2321 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2322 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2323 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2324 return &SET_SRC (x);
2325 #endif
2326
2327 /* See if we can split SET_SRC as it stands. */
2328 split = find_split_point (&SET_SRC (x), insn);
2329 if (split && split != &SET_SRC (x))
2330 return split;
2331
2332 /* See if this is a bitfield assignment with everything constant. If
2333 so, this is an IOR of an AND, so split it into that. */
2334 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2335 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2336 <= HOST_BITS_PER_WIDE_INT)
2337 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2338 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2339 && GET_CODE (SET_SRC (x)) == CONST_INT
2340 && ((INTVAL (XEXP (SET_DEST (x), 1))
2341 + INTVAL (XEXP (SET_DEST (x), 2)))
2342 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2343 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2344 {
2345 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2346 int len = INTVAL (XEXP (SET_DEST (x), 1));
2347 int src = INTVAL (SET_SRC (x));
2348 rtx dest = XEXP (SET_DEST (x), 0);
2349 enum machine_mode mode = GET_MODE (dest);
2350 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
2351
2352 #if BITS_BIG_ENDIAN
2353 pos = GET_MODE_BITSIZE (mode) - len - pos;
2354 #endif
2355
2356 if (src == mask)
2357 SUBST (SET_SRC (x),
2358 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
2359 else
2360 SUBST (SET_SRC (x),
2361 gen_binary (IOR, mode,
2362 gen_binary (AND, mode, dest,
2363 GEN_INT (~ (mask << pos)
2364 & GET_MODE_MASK (mode))),
2365 GEN_INT (src << pos)));
2366
2367 SUBST (SET_DEST (x), dest);
2368
2369 split = find_split_point (&SET_SRC (x), insn);
2370 if (split && split != &SET_SRC (x))
2371 return split;
2372 }
2373
2374 /* Otherwise, see if this is an operation that we can split into two.
2375 If so, try to split that. */
2376 code = GET_CODE (SET_SRC (x));
2377
2378 switch (code)
2379 {
2380 case AND:
2381 /* If we are AND'ing with a large constant that is only a single
2382 bit and the result is only being used in a context where we
2383 need to know if it is zero or non-zero, replace it with a bit
2384 extraction. This will avoid the large constant, which might
2385 have taken more than one insn to make. If the constant were
2386 not a valid argument to the AND but took only one insn to make,
2387 this is no worse, but if it took more than one insn, it will
2388 be better. */
2389
2390 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2391 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2392 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2393 && GET_CODE (SET_DEST (x)) == REG
2394 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2395 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2396 && XEXP (*split, 0) == SET_DEST (x)
2397 && XEXP (*split, 1) == const0_rtx)
2398 {
2399 SUBST (SET_SRC (x),
2400 make_extraction (GET_MODE (SET_DEST (x)),
2401 XEXP (SET_SRC (x), 0),
2402 pos, NULL_RTX, 1, 1, 0, 0));
2403 return find_split_point (loc, insn);
2404 }
2405 break;
2406
2407 case SIGN_EXTEND:
2408 inner = XEXP (SET_SRC (x), 0);
2409 pos = 0;
2410 len = GET_MODE_BITSIZE (GET_MODE (inner));
2411 unsignedp = 0;
2412 break;
2413
2414 case SIGN_EXTRACT:
2415 case ZERO_EXTRACT:
2416 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2417 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2418 {
2419 inner = XEXP (SET_SRC (x), 0);
2420 len = INTVAL (XEXP (SET_SRC (x), 1));
2421 pos = INTVAL (XEXP (SET_SRC (x), 2));
2422
2423 #if BITS_BIG_ENDIAN
2424 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
2425 #endif
2426 unsignedp = (code == ZERO_EXTRACT);
2427 }
2428 break;
2429 }
2430
2431 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2432 {
2433 enum machine_mode mode = GET_MODE (SET_SRC (x));
2434
2435 /* For unsigned, we have a choice of a shift followed by an
2436 AND or two shifts. Use two shifts for field sizes where the
2437 constant might be too large. We assume here that we can
2438 always at least get 8-bit constants in an AND insn, which is
2439 true for every current RISC. */
2440
2441 if (unsignedp && len <= 8)
2442 {
2443 SUBST (SET_SRC (x),
2444 gen_rtx_combine
2445 (AND, mode,
2446 gen_rtx_combine (LSHIFTRT, mode,
2447 gen_lowpart_for_combine (mode, inner),
2448 GEN_INT (pos)),
2449 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
2450
2451 split = find_split_point (&SET_SRC (x), insn);
2452 if (split && split != &SET_SRC (x))
2453 return split;
2454 }
2455 else
2456 {
2457 SUBST (SET_SRC (x),
2458 gen_rtx_combine
2459 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
2460 gen_rtx_combine (ASHIFT, mode,
2461 gen_lowpart_for_combine (mode, inner),
2462 GEN_INT (GET_MODE_BITSIZE (mode)
2463 - len - pos)),
2464 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
2465
2466 split = find_split_point (&SET_SRC (x), insn);
2467 if (split && split != &SET_SRC (x))
2468 return split;
2469 }
2470 }
2471
2472 /* See if this is a simple operation with a constant as the second
2473 operand. It might be that this constant is out of range and hence
2474 could be used as a split point. */
2475 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2476 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2477 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2478 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2479 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2480 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2481 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2482 == 'o'))))
2483 return &XEXP (SET_SRC (x), 1);
2484
2485 /* Finally, see if this is a simple operation with its first operand
2486 not in a register. The operation might require this operand in a
2487 register, so return it as a split point. We can always do this
2488 because if the first operand were another operation, we would have
2489 already found it as a split point. */
2490 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2491 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2492 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2493 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2494 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2495 return &XEXP (SET_SRC (x), 0);
2496
2497 return 0;
2498
2499 case AND:
2500 case IOR:
2501 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2502 it is better to write this as (not (ior A B)) so we can split it.
2503 Similarly for IOR. */
2504 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2505 {
2506 SUBST (*loc,
2507 gen_rtx_combine (NOT, GET_MODE (x),
2508 gen_rtx_combine (code == IOR ? AND : IOR,
2509 GET_MODE (x),
2510 XEXP (XEXP (x, 0), 0),
2511 XEXP (XEXP (x, 1), 0))));
2512 return find_split_point (loc, insn);
2513 }
2514
2515 /* Many RISC machines have a large set of logical insns. If the
2516 second operand is a NOT, put it first so we will try to split the
2517 other operand first. */
2518 if (GET_CODE (XEXP (x, 1)) == NOT)
2519 {
2520 rtx tem = XEXP (x, 0);
2521 SUBST (XEXP (x, 0), XEXP (x, 1));
2522 SUBST (XEXP (x, 1), tem);
2523 }
2524 break;
2525 }
2526
2527 /* Otherwise, select our actions depending on our rtx class. */
2528 switch (GET_RTX_CLASS (code))
2529 {
2530 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2531 case '3':
2532 split = find_split_point (&XEXP (x, 2), insn);
2533 if (split)
2534 return split;
2535 /* ... fall through ... */
2536 case '2':
2537 case 'c':
2538 case '<':
2539 split = find_split_point (&XEXP (x, 1), insn);
2540 if (split)
2541 return split;
2542 /* ... fall through ... */
2543 case '1':
2544 /* Some machines have (and (shift ...) ...) insns. If X is not
2545 an AND, but XEXP (X, 0) is, use it as our split point. */
2546 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2547 return &XEXP (x, 0);
2548
2549 split = find_split_point (&XEXP (x, 0), insn);
2550 if (split)
2551 return split;
2552 return loc;
2553 }
2554
2555 /* Otherwise, we don't have a split point. */
2556 return 0;
2557 }
2558 \f
2559 /* Throughout X, replace FROM with TO, and return the result.
2560 The result is TO if X is FROM;
2561 otherwise the result is X, but its contents may have been modified.
2562 If they were modified, a record was made in undobuf so that
2563 undo_all will (among other things) return X to its original state.
2564
2565 If the number of changes necessary is too much to record to undo,
2566 the excess changes are not made, so the result is invalid.
2567 The changes already made can still be undone.
2568 undobuf.num_undo is incremented for such changes, so by testing that
2569 the caller can tell whether the result is valid.
2570
2571 `n_occurrences' is incremented each time FROM is replaced.
2572
2573 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2574
2575 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
2576 by copying if `n_occurrences' is non-zero. */
2577
2578 static rtx
2579 subst (x, from, to, in_dest, unique_copy)
2580 register rtx x, from, to;
2581 int in_dest;
2582 int unique_copy;
2583 {
2584 register char *fmt;
2585 register int len, i;
2586 register enum rtx_code code = GET_CODE (x), orig_code = code;
2587 rtx temp;
2588 enum machine_mode mode = GET_MODE (x);
2589 enum machine_mode op0_mode = VOIDmode;
2590 rtx other_insn;
2591 rtx *cc_use;
2592 int n_restarts = 0;
2593
2594 /* FAKE_EXTEND_SAFE_P (MODE, FROM) is 1 if (subreg:MODE FROM 0) is a safe
2595 replacement for (zero_extend:MODE FROM) or (sign_extend:MODE FROM).
2596 If it is 0, that cannot be done. We can now do this for any MEM
2597 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be reloaded.
2598 If not for that, MEM's would very rarely be safe. */
2599
2600 /* Reject MODEs bigger than a word, because we might not be able
2601 to reference a two-register group starting with an arbitrary register
2602 (and currently gen_lowpart might crash for a SUBREG). */
2603
2604 #define FAKE_EXTEND_SAFE_P(MODE, FROM) \
2605 (GET_MODE_SIZE (MODE) <= UNITS_PER_WORD)
2606
2607 /* Two expressions are equal if they are identical copies of a shared
2608 RTX or if they are both registers with the same register number
2609 and mode. */
2610
2611 #define COMBINE_RTX_EQUAL_P(X,Y) \
2612 ((X) == (Y) \
2613 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
2614 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2615
2616 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
2617 {
2618 n_occurrences++;
2619 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
2620 }
2621
2622 /* If X and FROM are the same register but different modes, they will
2623 not have been seen as equal above. However, flow.c will make a
2624 LOG_LINKS entry for that case. If we do nothing, we will try to
2625 rerecognize our original insn and, when it succeeds, we will
2626 delete the feeding insn, which is incorrect.
2627
2628 So force this insn not to match in this (rare) case. */
2629 if (! in_dest && code == REG && GET_CODE (from) == REG
2630 && REGNO (x) == REGNO (from))
2631 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
2632
2633 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2634 of which may contain things that can be combined. */
2635 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
2636 return x;
2637
2638 /* It is possible to have a subexpression appear twice in the insn.
2639 Suppose that FROM is a register that appears within TO.
2640 Then, after that subexpression has been scanned once by `subst',
2641 the second time it is scanned, TO may be found. If we were
2642 to scan TO here, we would find FROM within it and create a
2643 self-referent rtl structure which is completely wrong. */
2644 if (COMBINE_RTX_EQUAL_P (x, to))
2645 return to;
2646
2647 len = GET_RTX_LENGTH (code);
2648 fmt = GET_RTX_FORMAT (code);
2649
2650 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2651 set up to skip this common case. All other cases where we want to
2652 suppress replacing something inside a SET_SRC are handled via the
2653 IN_DEST operand. */
2654 if (code == SET
2655 && (GET_CODE (SET_DEST (x)) == REG
2656 || GET_CODE (SET_DEST (x)) == CC0
2657 || GET_CODE (SET_DEST (x)) == PC))
2658 fmt = "ie";
2659
2660 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a constant. */
2661 if (fmt[0] == 'e')
2662 op0_mode = GET_MODE (XEXP (x, 0));
2663
2664 for (i = 0; i < len; i++)
2665 {
2666 if (fmt[i] == 'E')
2667 {
2668 register int j;
2669 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2670 {
2671 register rtx new;
2672 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
2673 {
2674 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2675 n_occurrences++;
2676 }
2677 else
2678 {
2679 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
2680
2681 /* If this substitution failed, this whole thing fails. */
2682 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2683 return new;
2684 }
2685
2686 SUBST (XVECEXP (x, i, j), new);
2687 }
2688 }
2689 else if (fmt[i] == 'e')
2690 {
2691 register rtx new;
2692
2693 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
2694 {
2695 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2696 n_occurrences++;
2697 }
2698 else
2699 /* If we are in a SET_DEST, suppress most cases unless we
2700 have gone inside a MEM, in which case we want to
2701 simplify the address. We assume here that things that
2702 are actually part of the destination have their inner
2703 parts in the first expression. This is true for SUBREG,
2704 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
2705 things aside from REG and MEM that should appear in a
2706 SET_DEST. */
2707 new = subst (XEXP (x, i), from, to,
2708 (((in_dest
2709 && (code == SUBREG || code == STRICT_LOW_PART
2710 || code == ZERO_EXTRACT))
2711 || code == SET)
2712 && i == 0), unique_copy);
2713
2714 /* If we found that we will have to reject this combination,
2715 indicate that by returning the CLOBBER ourselves, rather than
2716 an expression containing it. This will speed things up as
2717 well as prevent accidents where two CLOBBERs are considered
2718 to be equal, thus producing an incorrect simplification. */
2719
2720 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2721 return new;
2722
2723 SUBST (XEXP (x, i), new);
2724 }
2725 }
2726
2727 /* We come back to here if we have replaced the expression with one of
2728 a different code and it is likely that further simplification will be
2729 possible. */
2730
2731 restart:
2732
2733 /* If we have restarted more than 4 times, we are probably looping, so
2734 give up. */
2735 if (++n_restarts > 4)
2736 return x;
2737
2738 /* If we are restarting at all, it means that we no longer know the
2739 original mode of operand 0 (since we have probably changed the
2740 form of X). */
2741
2742 if (n_restarts > 1)
2743 op0_mode = VOIDmode;
2744
2745 code = GET_CODE (x);
2746
2747 /* If this is a commutative operation, put a constant last and a complex
2748 expression first. We don't need to do this for comparisons here. */
2749 if (GET_RTX_CLASS (code) == 'c'
2750 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2751 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
2752 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
2753 || (GET_CODE (XEXP (x, 0)) == SUBREG
2754 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
2755 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
2756 {
2757 temp = XEXP (x, 0);
2758 SUBST (XEXP (x, 0), XEXP (x, 1));
2759 SUBST (XEXP (x, 1), temp);
2760 }
2761
2762 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
2763 sign extension of a PLUS with a constant, reverse the order of the sign
2764 extension and the addition. Note that this not the same as the original
2765 code, but overflow is undefined for signed values. Also note that the
2766 PLUS will have been partially moved "inside" the sign-extension, so that
2767 the first operand of X will really look like:
2768 (ashiftrt (plus (ashift A C4) C5) C4).
2769 We convert this to
2770 (plus (ashiftrt (ashift A C4) C2) C4)
2771 and replace the first operand of X with that expression. Later parts
2772 of this function may simplify the expression further.
2773
2774 For example, if we start with (mult (sign_extend (plus A C1)) C2),
2775 we swap the SIGN_EXTEND and PLUS. Later code will apply the
2776 distributive law to produce (plus (mult (sign_extend X) C1) C3).
2777
2778 We do this to simplify address expressions. */
2779
2780 if ((code == PLUS || code == MINUS || code == MULT)
2781 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
2782 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
2783 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
2784 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
2785 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2786 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
2787 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
2788 && (temp = simplify_binary_operation (ASHIFTRT, mode,
2789 XEXP (XEXP (XEXP (x, 0), 0), 1),
2790 XEXP (XEXP (x, 0), 1))) != 0)
2791 {
2792 rtx new
2793 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
2794 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
2795 INTVAL (XEXP (XEXP (x, 0), 1)));
2796
2797 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
2798 INTVAL (XEXP (XEXP (x, 0), 1)));
2799
2800 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
2801 }
2802
2803 /* If this is a simple operation applied to an IF_THEN_ELSE, try
2804 applying it to the arms of the IF_THEN_ELSE. This often simplifies
2805 things. Don't deal with operations that change modes here. */
2806
2807 if ((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
2808 && GET_CODE (XEXP (x, 0)) == IF_THEN_ELSE)
2809 {
2810 /* Don't do this by using SUBST inside X since we might be messing
2811 up a shared expression. */
2812 rtx cond = XEXP (XEXP (x, 0), 0);
2813 rtx t_arm = subst (gen_binary (code, mode, XEXP (XEXP (x, 0), 1),
2814 XEXP (x, 1)),
2815 pc_rtx, pc_rtx, 0, 0);
2816 rtx f_arm = subst (gen_binary (code, mode, XEXP (XEXP (x, 0), 2),
2817 XEXP (x, 1)),
2818 pc_rtx, pc_rtx, 0, 0);
2819
2820
2821 x = gen_rtx (IF_THEN_ELSE, mode, cond, t_arm, f_arm);
2822 goto restart;
2823 }
2824
2825 else if (GET_RTX_CLASS (code) == '1'
2826 && GET_CODE (XEXP (x, 0)) == IF_THEN_ELSE
2827 && GET_MODE (XEXP (x, 0)) == mode)
2828 {
2829 rtx cond = XEXP (XEXP (x, 0), 0);
2830 rtx t_arm = subst (gen_unary (code, mode, XEXP (XEXP (x, 0), 1)),
2831 pc_rtx, pc_rtx, 0, 0);
2832 rtx f_arm = subst (gen_unary (code, mode, XEXP (XEXP (x, 0), 2)),
2833 pc_rtx, pc_rtx, 0, 0);
2834
2835 x = gen_rtx_combine (IF_THEN_ELSE, mode, cond, t_arm, f_arm);
2836 goto restart;
2837 }
2838
2839 /* Try to fold this expression in case we have constants that weren't
2840 present before. */
2841 temp = 0;
2842 switch (GET_RTX_CLASS (code))
2843 {
2844 case '1':
2845 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
2846 break;
2847 case '<':
2848 temp = simplify_relational_operation (code, op0_mode,
2849 XEXP (x, 0), XEXP (x, 1));
2850 #ifdef FLOAT_STORE_FLAG_VALUE
2851 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2852 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2853 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
2854 #endif
2855 break;
2856 case 'c':
2857 case '2':
2858 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
2859 break;
2860 case 'b':
2861 case '3':
2862 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
2863 XEXP (x, 1), XEXP (x, 2));
2864 break;
2865 }
2866
2867 if (temp)
2868 x = temp, code = GET_CODE (temp);
2869
2870 /* First see if we can apply the inverse distributive law. */
2871 if (code == PLUS || code == MINUS || code == IOR || code == XOR)
2872 {
2873 x = apply_distributive_law (x);
2874 code = GET_CODE (x);
2875 }
2876
2877 /* If CODE is an associative operation not otherwise handled, see if we
2878 can associate some operands. This can win if they are constants or
2879 if they are logically related (i.e. (a & b) & a. */
2880 if ((code == PLUS || code == MINUS
2881 || code == MULT || code == AND || code == IOR || code == XOR
2882 || code == DIV || code == UDIV
2883 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
2884 && GET_MODE_CLASS (mode) == MODE_INT)
2885 {
2886 if (GET_CODE (XEXP (x, 0)) == code)
2887 {
2888 rtx other = XEXP (XEXP (x, 0), 0);
2889 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
2890 rtx inner_op1 = XEXP (x, 1);
2891 rtx inner;
2892
2893 /* Make sure we pass the constant operand if any as the second
2894 one if this is a commutative operation. */
2895 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
2896 {
2897 rtx tem = inner_op0;
2898 inner_op0 = inner_op1;
2899 inner_op1 = tem;
2900 }
2901 inner = simplify_binary_operation (code == MINUS ? PLUS
2902 : code == DIV ? MULT
2903 : code == UDIV ? MULT
2904 : code,
2905 mode, inner_op0, inner_op1);
2906
2907 /* For commutative operations, try the other pair if that one
2908 didn't simplify. */
2909 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
2910 {
2911 other = XEXP (XEXP (x, 0), 1);
2912 inner = simplify_binary_operation (code, mode,
2913 XEXP (XEXP (x, 0), 0),
2914 XEXP (x, 1));
2915 }
2916
2917 if (inner)
2918 {
2919 x = gen_binary (code, mode, other, inner);
2920 goto restart;
2921
2922 }
2923 }
2924 }
2925
2926 /* A little bit of algebraic simplification here. */
2927 switch (code)
2928 {
2929 case MEM:
2930 /* Ensure that our address has any ASHIFTs converted to MULT in case
2931 address-recognizing predicates are called later. */
2932 temp = make_compound_operation (XEXP (x, 0), MEM);
2933 SUBST (XEXP (x, 0), temp);
2934 break;
2935
2936 case SUBREG:
2937 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
2938 is paradoxical. If we can't do that safely, then it becomes
2939 something nonsensical so that this combination won't take place. */
2940
2941 if (GET_CODE (SUBREG_REG (x)) == MEM
2942 && (GET_MODE_SIZE (mode)
2943 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2944 {
2945 rtx inner = SUBREG_REG (x);
2946 int endian_offset = 0;
2947 /* Don't change the mode of the MEM
2948 if that would change the meaning of the address. */
2949 if (MEM_VOLATILE_P (SUBREG_REG (x))
2950 || mode_dependent_address_p (XEXP (inner, 0)))
2951 return gen_rtx (CLOBBER, mode, const0_rtx);
2952
2953 #if BYTES_BIG_ENDIAN
2954 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2955 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
2956 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
2957 endian_offset -= UNITS_PER_WORD - GET_MODE_SIZE (GET_MODE (inner));
2958 #endif
2959 /* Note if the plus_constant doesn't make a valid address
2960 then this combination won't be accepted. */
2961 x = gen_rtx (MEM, mode,
2962 plus_constant (XEXP (inner, 0),
2963 (SUBREG_WORD (x) * UNITS_PER_WORD
2964 + endian_offset)));
2965 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
2966 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
2967 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
2968 return x;
2969 }
2970
2971 /* If we are in a SET_DEST, these other cases can't apply. */
2972 if (in_dest)
2973 return x;
2974
2975 /* Changing mode twice with SUBREG => just change it once,
2976 or not at all if changing back to starting mode. */
2977 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
2978 {
2979 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
2980 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
2981 return SUBREG_REG (SUBREG_REG (x));
2982
2983 SUBST_INT (SUBREG_WORD (x),
2984 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
2985 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
2986 }
2987
2988 /* SUBREG of a hard register => just change the register number
2989 and/or mode. If the hard register is not valid in that mode,
2990 suppress this combination. If the hard register is the stack,
2991 frame, or argument pointer, leave this as a SUBREG. */
2992
2993 if (GET_CODE (SUBREG_REG (x)) == REG
2994 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
2995 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
2996 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2997 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
2998 #endif
2999 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
3000 {
3001 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3002 mode))
3003 return gen_rtx (REG, mode,
3004 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
3005 else
3006 return gen_rtx (CLOBBER, mode, const0_rtx);
3007 }
3008
3009 /* For a constant, try to pick up the part we want. Handle a full
3010 word and low-order part. Only do this if we are narrowing
3011 the constant; if it is being widened, we have no idea what
3012 the extra bits will have been set to. */
3013
3014 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3015 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3016 && GET_MODE_SIZE (op0_mode) < UNITS_PER_WORD
3017 && GET_MODE_CLASS (mode) == MODE_INT)
3018 {
3019 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
3020 0, op0_mode);
3021 if (temp)
3022 return temp;
3023 }
3024
3025 if (CONSTANT_P (SUBREG_REG (x)) && subreg_lowpart_p (x)
3026 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (op0_mode))
3027 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3028
3029 /* If we are narrowing the object, we need to see if we can simplify
3030 the expression for the object knowing that we only need the
3031 low-order bits. */
3032
3033 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
3034 && subreg_lowpart_p (x))
3035 return force_to_mode (SUBREG_REG (x), mode, GET_MODE_BITSIZE (mode),
3036 NULL_RTX);
3037 break;
3038
3039 case NOT:
3040 /* (not (plus X -1)) can become (neg X). */
3041 if (GET_CODE (XEXP (x, 0)) == PLUS
3042 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
3043 {
3044 x = gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
3045 goto restart;
3046 }
3047
3048 /* Similarly, (not (neg X)) is (plus X -1). */
3049 if (GET_CODE (XEXP (x, 0)) == NEG)
3050 {
3051 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3052 goto restart;
3053 }
3054
3055 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
3056 if (GET_CODE (XEXP (x, 0)) == XOR
3057 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3058 && (temp = simplify_unary_operation (NOT, mode,
3059 XEXP (XEXP (x, 0), 1),
3060 mode)) != 0)
3061 {
3062 SUBST (XEXP (XEXP (x, 0), 1), temp);
3063 return XEXP (x, 0);
3064 }
3065
3066 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3067 other than 1, but that is not valid. We could do a similar
3068 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3069 but this doesn't seem common enough to bother with. */
3070 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3071 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3072 {
3073 x = gen_rtx (ROTATE, mode, gen_unary (NOT, mode, const1_rtx),
3074 XEXP (XEXP (x, 0), 1));
3075 goto restart;
3076 }
3077
3078 if (GET_CODE (XEXP (x, 0)) == SUBREG
3079 && subreg_lowpart_p (XEXP (x, 0))
3080 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3081 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3082 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3083 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3084 {
3085 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3086
3087 x = gen_rtx (ROTATE, inner_mode,
3088 gen_unary (NOT, inner_mode, const1_rtx),
3089 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3090 x = gen_lowpart_for_combine (mode, x);
3091 goto restart;
3092 }
3093
3094 #if STORE_FLAG_VALUE == -1
3095 /* (not (comparison foo bar)) can be done by reversing the comparison
3096 code if valid. */
3097 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3098 && reversible_comparison_p (XEXP (x, 0)))
3099 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3100 mode, XEXP (XEXP (x, 0), 0),
3101 XEXP (XEXP (x, 0), 1));
3102
3103 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
3104 is (lt foo (const_int 0)), so we can perform the above
3105 simplification. */
3106
3107 if (XEXP (x, 1) == const1_rtx
3108 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3109 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3110 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3111 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
3112 #endif
3113
3114 /* Apply De Morgan's laws to reduce number of patterns for machines
3115 with negating logical insns (and-not, nand, etc.). If result has
3116 only one NOT, put it first, since that is how the patterns are
3117 coded. */
3118
3119 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3120 {
3121 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3122
3123 if (GET_CODE (in1) == NOT)
3124 in1 = XEXP (in1, 0);
3125 else
3126 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3127
3128 if (GET_CODE (in2) == NOT)
3129 in2 = XEXP (in2, 0);
3130 else if (GET_CODE (in2) == CONST_INT
3131 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3132 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
3133 else
3134 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3135
3136 if (GET_CODE (in2) == NOT)
3137 {
3138 rtx tem = in2;
3139 in2 = in1; in1 = tem;
3140 }
3141
3142 x = gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3143 mode, in1, in2);
3144 goto restart;
3145 }
3146 break;
3147
3148 case NEG:
3149 /* (neg (plus X 1)) can become (not X). */
3150 if (GET_CODE (XEXP (x, 0)) == PLUS
3151 && XEXP (XEXP (x, 0), 1) == const1_rtx)
3152 {
3153 x = gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
3154 goto restart;
3155 }
3156
3157 /* Similarly, (neg (not X)) is (plus X 1). */
3158 if (GET_CODE (XEXP (x, 0)) == NOT)
3159 {
3160 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), const1_rtx);
3161 goto restart;
3162 }
3163
3164 /* (neg (minus X Y)) can become (minus Y X). */
3165 if (GET_CODE (XEXP (x, 0)) == MINUS
3166 && (GET_MODE_CLASS (mode) != MODE_FLOAT
3167 /* x-y != -(y-x) with IEEE floating point. */
3168 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT))
3169 {
3170 x = gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3171 XEXP (XEXP (x, 0), 0));
3172 goto restart;
3173 }
3174
3175 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
3176 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
3177 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
3178 {
3179 x = gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3180 goto restart;
3181 }
3182
3183 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3184 if we can then eliminate the NEG (e.g.,
3185 if the operand is a constant). */
3186
3187 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3188 {
3189 temp = simplify_unary_operation (NEG, mode,
3190 XEXP (XEXP (x, 0), 0), mode);
3191 if (temp)
3192 {
3193 SUBST (XEXP (XEXP (x, 0), 0), temp);
3194 return XEXP (x, 0);
3195 }
3196 }
3197
3198 temp = expand_compound_operation (XEXP (x, 0));
3199
3200 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3201 replaced by (lshiftrt X C). This will convert
3202 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3203
3204 if (GET_CODE (temp) == ASHIFTRT
3205 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3206 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
3207 {
3208 x = simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3209 INTVAL (XEXP (temp, 1)));
3210 goto restart;
3211 }
3212
3213 /* If X has only a single bit that might be nonzero, say, bit I, convert
3214 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3215 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3216 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3217 or a SUBREG of one since we'd be making the expression more
3218 complex if it was just a register. */
3219
3220 if (GET_CODE (temp) != REG
3221 && ! (GET_CODE (temp) == SUBREG
3222 && GET_CODE (SUBREG_REG (temp)) == REG)
3223 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
3224 {
3225 rtx temp1 = simplify_shift_const
3226 (NULL_RTX, ASHIFTRT, mode,
3227 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
3228 GET_MODE_BITSIZE (mode) - 1 - i),
3229 GET_MODE_BITSIZE (mode) - 1 - i);
3230
3231 /* If all we did was surround TEMP with the two shifts, we
3232 haven't improved anything, so don't use it. Otherwise,
3233 we are better off with TEMP1. */
3234 if (GET_CODE (temp1) != ASHIFTRT
3235 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3236 || XEXP (XEXP (temp1, 0), 0) != temp)
3237 {
3238 x = temp1;
3239 goto restart;
3240 }
3241 }
3242 break;
3243
3244 case FLOAT_TRUNCATE:
3245 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3246 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3247 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3248 return XEXP (XEXP (x, 0), 0);
3249 break;
3250
3251 #ifdef HAVE_cc0
3252 case COMPARE:
3253 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3254 using cc0, in which case we want to leave it as a COMPARE
3255 so we can distinguish it from a register-register-copy. */
3256 if (XEXP (x, 1) == const0_rtx)
3257 return XEXP (x, 0);
3258
3259 /* In IEEE floating point, x-0 is not the same as x. */
3260 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3261 || GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_INT)
3262 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3263 return XEXP (x, 0);
3264 break;
3265 #endif
3266
3267 case CONST:
3268 /* (const (const X)) can become (const X). Do it this way rather than
3269 returning the inner CONST since CONST can be shared with a
3270 REG_EQUAL note. */
3271 if (GET_CODE (XEXP (x, 0)) == CONST)
3272 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3273 break;
3274
3275 #ifdef HAVE_lo_sum
3276 case LO_SUM:
3277 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3278 can add in an offset. find_split_point will split this address up
3279 again if it doesn't match. */
3280 if (GET_CODE (XEXP (x, 0)) == HIGH
3281 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3282 return XEXP (x, 1);
3283 break;
3284 #endif
3285
3286 case PLUS:
3287 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3288 outermost. That's because that's the way indexed addresses are
3289 supposed to appear. This code used to check many more cases, but
3290 they are now checked elsewhere. */
3291 if (GET_CODE (XEXP (x, 0)) == PLUS
3292 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3293 return gen_binary (PLUS, mode,
3294 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3295 XEXP (x, 1)),
3296 XEXP (XEXP (x, 0), 1));
3297
3298 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3299 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3300 bit-field and can be replaced by either a sign_extend or a
3301 sign_extract. The `and' may be a zero_extend. */
3302 if (GET_CODE (XEXP (x, 0)) == XOR
3303 && GET_CODE (XEXP (x, 1)) == CONST_INT
3304 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3305 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3306 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
3307 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3308 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3309 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3310 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
3311 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
3312 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3313 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3314 == i + 1))))
3315 {
3316 x = simplify_shift_const
3317 (NULL_RTX, ASHIFTRT, mode,
3318 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3319 XEXP (XEXP (XEXP (x, 0), 0), 0),
3320 GET_MODE_BITSIZE (mode) - (i + 1)),
3321 GET_MODE_BITSIZE (mode) - (i + 1));
3322 goto restart;
3323 }
3324
3325 /* If only the low-order bit of X is possible nonzero, (plus x -1)
3326 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3327 the bitsize of the mode - 1. This allows simplification of
3328 "a = (b & 8) == 0;" */
3329 if (XEXP (x, 1) == constm1_rtx
3330 && GET_CODE (XEXP (x, 0)) != REG
3331 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3332 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
3333 && nonzero_bits (XEXP (x, 0), mode) == 1)
3334 {
3335 x = simplify_shift_const
3336 (NULL_RTX, ASHIFTRT, mode,
3337 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3338 gen_rtx_combine (XOR, mode,
3339 XEXP (x, 0), const1_rtx),
3340 GET_MODE_BITSIZE (mode) - 1),
3341 GET_MODE_BITSIZE (mode) - 1);
3342 goto restart;
3343 }
3344
3345 /* If we are adding two things that have no bits in common, convert
3346 the addition into an IOR. This will often be further simplified,
3347 for example in cases like ((a & 1) + (a & 2)), which can
3348 become a & 3. */
3349
3350 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3351 && (nonzero_bits (XEXP (x, 0), mode)
3352 & nonzero_bits (XEXP (x, 1), mode)) == 0)
3353 {
3354 x = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
3355 goto restart;
3356 }
3357 break;
3358
3359 case MINUS:
3360 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3361 (and <foo> (const_int pow2-1)) */
3362 if (GET_CODE (XEXP (x, 1)) == AND
3363 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3364 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3365 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
3366 {
3367 x = simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
3368 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
3369 goto restart;
3370 }
3371 break;
3372
3373 case MULT:
3374 /* If we have (mult (plus A B) C), apply the distributive law and then
3375 the inverse distributive law to see if things simplify. This
3376 occurs mostly in addresses, often when unrolling loops. */
3377
3378 if (GET_CODE (XEXP (x, 0)) == PLUS)
3379 {
3380 x = apply_distributive_law
3381 (gen_binary (PLUS, mode,
3382 gen_binary (MULT, mode,
3383 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3384 gen_binary (MULT, mode,
3385 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3386
3387 if (GET_CODE (x) != MULT)
3388 goto restart;
3389 }
3390
3391 /* If this is multiplication by a power of two and its first operand is
3392 a shift, treat the multiply as a shift to allow the shifts to
3393 possibly combine. */
3394 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3395 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3396 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3397 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3398 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3399 || GET_CODE (XEXP (x, 0)) == ROTATE
3400 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3401 {
3402 x = simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0), i);
3403 goto restart;
3404 }
3405
3406 /* Convert (mult (ashift (const_int 1) A) B) to (ashift B A). */
3407 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3408 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3409 return gen_rtx_combine (ASHIFT, mode, XEXP (x, 1),
3410 XEXP (XEXP (x, 0), 1));
3411 break;
3412
3413 case UDIV:
3414 /* If this is a divide by a power of two, treat it as a shift if
3415 its first operand is a shift. */
3416 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3417 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3418 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3419 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3420 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3421 || GET_CODE (XEXP (x, 0)) == ROTATE
3422 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3423 {
3424 x = simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
3425 goto restart;
3426 }
3427 break;
3428
3429 case EQ: case NE:
3430 case GT: case GTU: case GE: case GEU:
3431 case LT: case LTU: case LE: case LEU:
3432 /* If the first operand is a condition code, we can't do anything
3433 with it. */
3434 if (GET_CODE (XEXP (x, 0)) == COMPARE
3435 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
3436 #ifdef HAVE_cc0
3437 && XEXP (x, 0) != cc0_rtx
3438 #endif
3439 ))
3440 {
3441 rtx op0 = XEXP (x, 0);
3442 rtx op1 = XEXP (x, 1);
3443 enum rtx_code new_code;
3444
3445 if (GET_CODE (op0) == COMPARE)
3446 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3447
3448 /* Simplify our comparison, if possible. */
3449 new_code = simplify_comparison (code, &op0, &op1);
3450
3451 #if STORE_FLAG_VALUE == 1
3452 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
3453 if only the low-order bit is possibly nonzero in X (such as when
3454 X is a ZERO_EXTRACT of one bit. Similarly, we can convert
3455 EQ to (xor X 1). Remove any ZERO_EXTRACT we made when thinking
3456 this was a comparison. It may now be simpler to use, e.g., an
3457 AND. If a ZERO_EXTRACT is indeed appropriate, it will
3458 be placed back by the call to make_compound_operation in the
3459 SET case. */
3460 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3461 && op1 == const0_rtx
3462 && nonzero_bits (op0, GET_MODE (op0)) == 1)
3463 return gen_lowpart_for_combine (mode,
3464 expand_compound_operation (op0));
3465 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3466 && op1 == const0_rtx
3467 && nonzero_bits (op0, GET_MODE (op0)) == 1)
3468 {
3469 op0 = expand_compound_operation (op0);
3470
3471 x = gen_rtx_combine (XOR, mode,
3472 gen_lowpart_for_combine (mode, op0),
3473 const1_rtx);
3474 goto restart;
3475 }
3476 #endif
3477
3478 #if STORE_FLAG_VALUE == -1
3479 /* If STORE_FLAG_VALUE is -1, we can convert (ne x 0)
3480 to (neg x) if only the low-order bit of X can be nonzero.
3481 This converts (ne (zero_extract X 1 Y) 0) to
3482 (sign_extract X 1 Y). */
3483 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3484 && op1 == const0_rtx
3485 && nonzero_bits (op0, GET_MODE (op0)) == 1)
3486 {
3487 op0 = expand_compound_operation (op0);
3488 x = gen_rtx_combine (NEG, mode,
3489 gen_lowpart_for_combine (mode, op0));
3490 goto restart;
3491 }
3492 #endif
3493
3494 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
3495 one bit that might be nonzero, we can convert (ne x 0) to
3496 (ashift x c) where C puts the bit in the sign bit. Remove any
3497 AND with STORE_FLAG_VALUE when we are done, since we are only
3498 going to test the sign bit. */
3499 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3500 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3501 && (STORE_FLAG_VALUE
3502 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
3503 && op1 == const0_rtx
3504 && mode == GET_MODE (op0)
3505 && (i = exact_log2 (nonzero_bits (op0, GET_MODE (op0)))) >= 0)
3506 {
3507 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3508 expand_compound_operation (op0),
3509 GET_MODE_BITSIZE (mode) - 1 - i);
3510 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
3511 return XEXP (x, 0);
3512 else
3513 return x;
3514 }
3515
3516 /* If the code changed, return a whole new comparison. */
3517 if (new_code != code)
3518 return gen_rtx_combine (new_code, mode, op0, op1);
3519
3520 /* Otherwise, keep this operation, but maybe change its operands.
3521 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
3522 SUBST (XEXP (x, 0), op0);
3523 SUBST (XEXP (x, 1), op1);
3524 }
3525 break;
3526
3527 case IF_THEN_ELSE:
3528 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register
3529 used in it is being compared against certain values. Get the
3530 true and false comparisons and see if that says anything about the
3531 value of each arm. */
3532
3533 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3534 && reversible_comparison_p (XEXP (x, 0))
3535 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
3536 {
3537 HOST_WIDE_INT nzb;
3538 rtx from = XEXP (XEXP (x, 0), 0);
3539 enum rtx_code true_code = GET_CODE (XEXP (x, 0));
3540 enum rtx_code false_code = reverse_condition (true_code);
3541 rtx true_val = XEXP (XEXP (x, 0), 1);
3542 rtx false_val = true_val;
3543 rtx true_arm = XEXP (x, 1);
3544 rtx false_arm = XEXP (x, 2);
3545 int swapped = 0;
3546
3547 /* If FALSE_CODE is EQ, swap the codes and arms. */
3548
3549 if (false_code == EQ)
3550 {
3551 swapped = 1, true_code = EQ, false_code = NE;
3552 true_arm = XEXP (x, 2), false_arm = XEXP (x, 1);
3553 }
3554
3555 /* If we are comparing against zero and the expression being tested
3556 has only a single bit that might be nonzero, that is its value
3557 when it is not equal to zero. Similarly if it is known to be
3558 -1 or 0. */
3559
3560 if (true_code == EQ && true_val == const0_rtx
3561 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
3562 false_code = EQ, false_val = GEN_INT (nzb);
3563 else if (true_code == EQ && true_val == const0_rtx
3564 && (num_sign_bit_copies (from, GET_MODE (from))
3565 == GET_MODE_BITSIZE (GET_MODE (from))))
3566 false_code = EQ, false_val = constm1_rtx;
3567
3568 /* Now simplify an arm if we know the value of the register
3569 in the branch and it is used in the arm. Be carefull due to
3570 the potential of locally-shared RTL. */
3571
3572 if (reg_mentioned_p (from, true_arm))
3573 true_arm = subst (known_cond (copy_rtx (true_arm), true_code,
3574 from, true_val),
3575 pc_rtx, pc_rtx, 0, 0);
3576 if (reg_mentioned_p (from, false_arm))
3577 false_arm = subst (known_cond (copy_rtx (false_arm), false_code,
3578 from, false_val),
3579 pc_rtx, pc_rtx, 0, 0);
3580
3581 SUBST (XEXP (x, 1), swapped ? false_arm : true_arm);
3582 SUBST (XEXP (x, 2), swapped ? true_arm : false_arm);
3583 }
3584
3585 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
3586 reversed, do so to avoid needing two sets of patterns for
3587 subtract-and-branch insns. Similarly if we have a constant in that
3588 position or if the third operand is the same as the first operand
3589 of the comparison. */
3590
3591 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3592 && reversible_comparison_p (XEXP (x, 0))
3593 && (XEXP (x, 1) == pc_rtx || GET_CODE (XEXP (x, 1)) == CONST_INT
3594 || rtx_equal_p (XEXP (x, 2), XEXP (XEXP (x, 0), 0))))
3595 {
3596 SUBST (XEXP (x, 0),
3597 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3598 GET_MODE (XEXP (x, 0)),
3599 XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 0), 1)));
3600
3601 temp = XEXP (x, 1);
3602 SUBST (XEXP (x, 1), XEXP (x, 2));
3603 SUBST (XEXP (x, 2), temp);
3604 }
3605
3606 /* If the two arms are identical, we don't need the comparison. */
3607
3608 if (rtx_equal_p (XEXP (x, 1), XEXP (x, 2))
3609 && ! side_effects_p (XEXP (x, 0)))
3610 return XEXP (x, 1);
3611
3612 /* Look for cases where we have (abs x) or (neg (abs X)). */
3613
3614 if (GET_MODE_CLASS (mode) == MODE_INT
3615 && GET_CODE (XEXP (x, 2)) == NEG
3616 && rtx_equal_p (XEXP (x, 1), XEXP (XEXP (x, 2), 0))
3617 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3618 && rtx_equal_p (XEXP (x, 1), XEXP (XEXP (x, 0), 0))
3619 && ! side_effects_p (XEXP (x, 1)))
3620 switch (GET_CODE (XEXP (x, 0)))
3621 {
3622 case GT:
3623 case GE:
3624 x = gen_unary (ABS, mode, XEXP (x, 1));
3625 goto restart;
3626 case LT:
3627 case LE:
3628 x = gen_unary (NEG, mode, gen_unary (ABS, mode, XEXP (x, 1)));
3629 goto restart;
3630 }
3631
3632 /* Look for MIN or MAX. */
3633
3634 if (GET_MODE_CLASS (mode) == MODE_INT
3635 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3636 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3637 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 2))
3638 && ! side_effects_p (XEXP (x, 0)))
3639 switch (GET_CODE (XEXP (x, 0)))
3640 {
3641 case GE:
3642 case GT:
3643 x = gen_binary (SMAX, mode, XEXP (x, 1), XEXP (x, 2));
3644 goto restart;
3645 case LE:
3646 case LT:
3647 x = gen_binary (SMIN, mode, XEXP (x, 1), XEXP (x, 2));
3648 goto restart;
3649 case GEU:
3650 case GTU:
3651 x = gen_binary (UMAX, mode, XEXP (x, 1), XEXP (x, 2));
3652 goto restart;
3653 case LEU:
3654 case LTU:
3655 x = gen_binary (UMIN, mode, XEXP (x, 1), XEXP (x, 2));
3656 goto restart;
3657 }
3658
3659 /* If we have something like (if_then_else (ne A 0) (OP X C) X),
3660 A is known to be either 0 or 1, and OP is an identity when its
3661 second operand is zero, this can be done as (OP X (mult A C)).
3662 Similarly if A is known to be 0 or -1 and also similarly if we have
3663 a ZERO_EXTEND or SIGN_EXTEND as long as X is already extended (so
3664 we don't destroy it). */
3665
3666 if (mode != VOIDmode
3667 && (GET_CODE (XEXP (x, 0)) == EQ || GET_CODE (XEXP (x, 0)) == NE)
3668 && XEXP (XEXP (x, 0), 1) == const0_rtx
3669 && (nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1
3670 || (num_sign_bit_copies (XEXP (XEXP (x, 0), 0), mode)
3671 == GET_MODE_BITSIZE (mode))))
3672 {
3673 rtx nz = make_compound_operation (GET_CODE (XEXP (x, 0)) == NE
3674 ? XEXP (x, 1) : XEXP (x, 2));
3675 rtx z = GET_CODE (XEXP (x, 0)) == NE ? XEXP (x, 2) : XEXP (x, 1);
3676 rtx dir = (nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1
3677 ? const1_rtx : constm1_rtx);
3678 rtx c = 0;
3679 enum machine_mode m = mode;
3680 enum rtx_code op, extend_op = 0;
3681
3682 if ((GET_CODE (nz) == PLUS || GET_CODE (nz) == MINUS
3683 || GET_CODE (nz) == IOR || GET_CODE (nz) == XOR
3684 || GET_CODE (nz) == ASHIFT
3685 || GET_CODE (nz) == LSHIFTRT || GET_CODE (nz) == ASHIFTRT)
3686 && rtx_equal_p (XEXP (nz, 0), z))
3687 c = XEXP (nz, 1), op = GET_CODE (nz);
3688 else if (GET_CODE (nz) == SIGN_EXTEND
3689 && (GET_CODE (XEXP (nz, 0)) == PLUS
3690 || GET_CODE (XEXP (nz, 0)) == MINUS
3691 || GET_CODE (XEXP (nz, 0)) == IOR
3692 || GET_CODE (XEXP (nz, 0)) == XOR
3693 || GET_CODE (XEXP (nz, 0)) == ASHIFT
3694 || GET_CODE (XEXP (nz, 0)) == LSHIFTRT
3695 || GET_CODE (XEXP (nz, 0)) == ASHIFTRT)
3696 && GET_CODE (XEXP (XEXP (nz, 0), 0)) == SUBREG
3697 && subreg_lowpart_p (XEXP (XEXP (nz, 0), 0))
3698 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (nz, 0), 0)), z)
3699 && (num_sign_bit_copies (z, GET_MODE (z))
3700 >= (GET_MODE_BITSIZE (mode)
3701 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (nz, 0), 0))))))
3702 {
3703 c = XEXP (XEXP (nz, 0), 1);
3704 op = GET_CODE (XEXP (nz, 0));
3705 extend_op = SIGN_EXTEND;
3706 m = GET_MODE (XEXP (nz, 0));
3707 }
3708 else if (GET_CODE (nz) == ZERO_EXTEND
3709 && (GET_CODE (XEXP (nz, 0)) == PLUS
3710 || GET_CODE (XEXP (nz, 0)) == MINUS
3711 || GET_CODE (XEXP (nz, 0)) == IOR
3712 || GET_CODE (XEXP (nz, 0)) == XOR
3713 || GET_CODE (XEXP (nz, 0)) == ASHIFT
3714 || GET_CODE (XEXP (nz, 0)) == LSHIFTRT
3715 || GET_CODE (XEXP (nz, 0)) == ASHIFTRT)
3716 && GET_CODE (XEXP (XEXP (nz, 0), 0)) == SUBREG
3717 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3718 && subreg_lowpart_p (XEXP (XEXP (nz, 0), 0))
3719 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (nz, 0), 0)), z)
3720 && ((nonzero_bits (z, GET_MODE (z))
3721 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (nz, 0), 0))))
3722 == 0))
3723 {
3724 c = XEXP (XEXP (nz, 0), 1);
3725 op = GET_CODE (XEXP (nz, 0));
3726 extend_op = ZERO_EXTEND;
3727 m = GET_MODE (XEXP (nz, 0));
3728 }
3729
3730 if (c && ! side_effects_p (c) && ! side_effects_p (z))
3731 {
3732 temp
3733 = gen_binary (MULT, m,
3734 gen_lowpart_for_combine (m,
3735 XEXP (XEXP (x, 0), 0)),
3736 gen_binary (MULT, m, c, dir));
3737
3738 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
3739
3740 if (extend_op != 0)
3741 temp = gen_unary (extend_op, mode, temp);
3742
3743 return temp;
3744 }
3745 }
3746 break;
3747
3748 case ZERO_EXTRACT:
3749 case SIGN_EXTRACT:
3750 case ZERO_EXTEND:
3751 case SIGN_EXTEND:
3752 /* If we are processing SET_DEST, we are done. */
3753 if (in_dest)
3754 return x;
3755
3756 x = expand_compound_operation (x);
3757 if (GET_CODE (x) != code)
3758 goto restart;
3759 break;
3760
3761 case SET:
3762 /* (set (pc) (return)) gets written as (return). */
3763 if (GET_CODE (SET_DEST (x)) == PC && GET_CODE (SET_SRC (x)) == RETURN)
3764 return SET_SRC (x);
3765
3766 /* Convert this into a field assignment operation, if possible. */
3767 x = make_field_assignment (x);
3768
3769 /* If we are setting CC0 or if the source is a COMPARE, look for the
3770 use of the comparison result and try to simplify it unless we already
3771 have used undobuf.other_insn. */
3772 if ((GET_CODE (SET_SRC (x)) == COMPARE
3773 #ifdef HAVE_cc0
3774 || SET_DEST (x) == cc0_rtx
3775 #endif
3776 )
3777 && (cc_use = find_single_use (SET_DEST (x), subst_insn,
3778 &other_insn)) != 0
3779 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
3780 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
3781 && XEXP (*cc_use, 0) == SET_DEST (x))
3782 {
3783 enum rtx_code old_code = GET_CODE (*cc_use);
3784 enum rtx_code new_code;
3785 rtx op0, op1;
3786 int other_changed = 0;
3787 enum machine_mode compare_mode = GET_MODE (SET_DEST (x));
3788
3789 if (GET_CODE (SET_SRC (x)) == COMPARE)
3790 op0 = XEXP (SET_SRC (x), 0), op1 = XEXP (SET_SRC (x), 1);
3791 else
3792 op0 = SET_SRC (x), op1 = const0_rtx;
3793
3794 /* Simplify our comparison, if possible. */
3795 new_code = simplify_comparison (old_code, &op0, &op1);
3796
3797 #if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
3798 /* If this machine has CC modes other than CCmode, check to see
3799 if we need to use a different CC mode here. */
3800 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
3801
3802 /* If the mode changed, we have to change SET_DEST, the mode
3803 in the compare, and the mode in the place SET_DEST is used.
3804 If SET_DEST is a hard register, just build new versions with
3805 the proper mode. If it is a pseudo, we lose unless it is only
3806 time we set the pseudo, in which case we can safely change
3807 its mode. */
3808 if (compare_mode != GET_MODE (SET_DEST (x)))
3809 {
3810 int regno = REGNO (SET_DEST (x));
3811 rtx new_dest = gen_rtx (REG, compare_mode, regno);
3812
3813 if (regno < FIRST_PSEUDO_REGISTER
3814 || (reg_n_sets[regno] == 1
3815 && ! REG_USERVAR_P (SET_DEST (x))))
3816 {
3817 if (regno >= FIRST_PSEUDO_REGISTER)
3818 SUBST (regno_reg_rtx[regno], new_dest);
3819
3820 SUBST (SET_DEST (x), new_dest);
3821 SUBST (XEXP (*cc_use, 0), new_dest);
3822 other_changed = 1;
3823 }
3824 }
3825 #endif
3826
3827 /* If the code changed, we have to build a new comparison
3828 in undobuf.other_insn. */
3829 if (new_code != old_code)
3830 {
3831 unsigned HOST_WIDE_INT mask;
3832
3833 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
3834 SET_DEST (x), const0_rtx));
3835
3836 /* If the only change we made was to change an EQ into an
3837 NE or vice versa, OP0 has only one bit that might be nonzero,
3838 and OP1 is zero, check if changing the user of the condition
3839 code will produce a valid insn. If it won't, we can keep
3840 the original code in that insn by surrounding our operation
3841 with an XOR. */
3842
3843 if (((old_code == NE && new_code == EQ)
3844 || (old_code == EQ && new_code == NE))
3845 && ! other_changed && op1 == const0_rtx
3846 && (GET_MODE_BITSIZE (GET_MODE (op0))
3847 <= HOST_BITS_PER_WIDE_INT)
3848 && (exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0)))
3849 >= 0))
3850 {
3851 rtx pat = PATTERN (other_insn), note = 0;
3852
3853 if ((recog_for_combine (&pat, other_insn, &note) < 0
3854 && ! check_asm_operands (pat)))
3855 {
3856 PUT_CODE (*cc_use, old_code);
3857 other_insn = 0;
3858
3859 op0 = gen_binary (XOR, GET_MODE (op0), op0,
3860 GEN_INT (mask));
3861 }
3862 }
3863
3864 other_changed = 1;
3865 }
3866
3867 if (other_changed)
3868 undobuf.other_insn = other_insn;
3869
3870 #ifdef HAVE_cc0
3871 /* If we are now comparing against zero, change our source if
3872 needed. If we do not use cc0, we always have a COMPARE. */
3873 if (op1 == const0_rtx && SET_DEST (x) == cc0_rtx)
3874 SUBST (SET_SRC (x), op0);
3875 else
3876 #endif
3877
3878 /* Otherwise, if we didn't previously have a COMPARE in the
3879 correct mode, we need one. */
3880 if (GET_CODE (SET_SRC (x)) != COMPARE
3881 || GET_MODE (SET_SRC (x)) != compare_mode)
3882 SUBST (SET_SRC (x), gen_rtx_combine (COMPARE, compare_mode,
3883 op0, op1));
3884 else
3885 {
3886 /* Otherwise, update the COMPARE if needed. */
3887 SUBST (XEXP (SET_SRC (x), 0), op0);
3888 SUBST (XEXP (SET_SRC (x), 1), op1);
3889 }
3890 }
3891 else
3892 {
3893 /* Get SET_SRC in a form where we have placed back any
3894 compound expressions. Then do the checks below. */
3895 temp = make_compound_operation (SET_SRC (x), SET);
3896 SUBST (SET_SRC (x), temp);
3897 }
3898
3899 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some
3900 operation, and X being a REG or (subreg (reg)), we may be able to
3901 convert this to (set (subreg:m2 x) (op)).
3902
3903 We can always do this if M1 is narrower than M2 because that
3904 means that we only care about the low bits of the result.
3905
3906 However, on most machines (those with neither BYTE_LOADS_ZERO_EXTEND
3907 nor BYTES_LOADS_SIGN_EXTEND defined), we cannot perform a
3908 narrower operation that requested since the high-order bits will
3909 be undefined. On machine where BYTE_LOADS_*_EXTEND is defined,
3910 however, this transformation is safe as long as M1 and M2 have
3911 the same number of words. */
3912
3913 if (GET_CODE (SET_SRC (x)) == SUBREG
3914 && subreg_lowpart_p (SET_SRC (x))
3915 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) != 'o'
3916 && (((GET_MODE_SIZE (GET_MODE (SET_SRC (x))) + (UNITS_PER_WORD - 1))
3917 / UNITS_PER_WORD)
3918 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x))))
3919 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
3920 #ifndef BYTE_LOADS_EXTEND
3921 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
3922 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
3923 #endif
3924 && (GET_CODE (SET_DEST (x)) == REG
3925 || (GET_CODE (SET_DEST (x)) == SUBREG
3926 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG)))
3927 {
3928 SUBST (SET_DEST (x),
3929 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_SRC (x))),
3930 SET_DEST (x)));
3931 SUBST (SET_SRC (x), SUBREG_REG (SET_SRC (x)));
3932 }
3933
3934 #ifdef BYTE_LOADS_EXTEND
3935 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with
3936 M wider than N, this would require a paradoxical subreg.
3937 Replace the subreg with a zero_extend to avoid the reload that
3938 would otherwise be required. */
3939
3940 if (GET_CODE (SET_SRC (x)) == SUBREG
3941 && subreg_lowpart_p (SET_SRC (x))
3942 && SUBREG_WORD (SET_SRC (x)) == 0
3943 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
3944 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
3945 && GET_CODE (SUBREG_REG (SET_SRC (x))) == MEM)
3946 SUBST (SET_SRC (x), gen_rtx_combine (LOAD_EXTEND,
3947 GET_MODE (SET_SRC (x)),
3948 XEXP (SET_SRC (x), 0)));
3949 #endif
3950
3951 #ifndef HAVE_conditional_move
3952
3953 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE,
3954 and we are comparing an item known to be 0 or -1 against 0, use a
3955 logical operation instead. Check for one of the arms being an IOR
3956 of the other arm with some value. We compute three terms to be
3957 IOR'ed together. In practice, at most two will be nonzero. Then
3958 we do the IOR's. */
3959
3960 if (GET_CODE (SET_DEST (x)) != PC
3961 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE
3962 && (GET_CODE (XEXP (SET_SRC (x), 0)) == EQ
3963 || GET_CODE (XEXP (SET_SRC (x), 0)) == NE)
3964 && XEXP (XEXP (SET_SRC (x), 0), 1) == const0_rtx
3965 && (num_sign_bit_copies (XEXP (XEXP (SET_SRC (x), 0), 0),
3966 GET_MODE (XEXP (XEXP (SET_SRC (x), 0), 0)))
3967 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (SET_SRC (x), 0), 0))))
3968 && ! side_effects_p (SET_SRC (x)))
3969 {
3970 rtx true = (GET_CODE (XEXP (SET_SRC (x), 0)) == NE
3971 ? XEXP (SET_SRC (x), 1) : XEXP (SET_SRC (x), 2));
3972 rtx false = (GET_CODE (XEXP (SET_SRC (x), 0)) == NE
3973 ? XEXP (SET_SRC (x), 2) : XEXP (SET_SRC (x), 1));
3974 rtx term1 = const0_rtx, term2, term3;
3975
3976 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
3977 term1 = false, true = XEXP (true, 1), false = const0_rtx;
3978 else if (GET_CODE (true) == IOR
3979 && rtx_equal_p (XEXP (true, 1), false))
3980 term1 = false, true = XEXP (true, 0), false = const0_rtx;
3981 else if (GET_CODE (false) == IOR
3982 && rtx_equal_p (XEXP (false, 0), true))
3983 term1 = true, false = XEXP (false, 1), true = const0_rtx;
3984 else if (GET_CODE (false) == IOR
3985 && rtx_equal_p (XEXP (false, 1), true))
3986 term1 = true, false = XEXP (false, 0), true = const0_rtx;
3987
3988 term2 = gen_binary (AND, GET_MODE (SET_SRC (x)),
3989 XEXP (XEXP (SET_SRC (x), 0), 0), true);
3990 term3 = gen_binary (AND, GET_MODE (SET_SRC (x)),
3991 gen_unary (NOT, GET_MODE (SET_SRC (x)),
3992 XEXP (XEXP (SET_SRC (x), 0), 0)),
3993 false);
3994
3995 SUBST (SET_SRC (x),
3996 gen_binary (IOR, GET_MODE (SET_SRC (x)),
3997 gen_binary (IOR, GET_MODE (SET_SRC (x)),
3998 term1, term2),
3999 term3));
4000 }
4001 #endif
4002 break;
4003
4004 case AND:
4005 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4006 {
4007 x = simplify_and_const_int (x, mode, XEXP (x, 0),
4008 INTVAL (XEXP (x, 1)));
4009
4010 /* If we have (ior (and (X C1) C2)) and the next restart would be
4011 the last, simplify this by making C1 as small as possible
4012 and then exit. */
4013 if (n_restarts >= 3 && GET_CODE (x) == IOR
4014 && GET_CODE (XEXP (x, 0)) == AND
4015 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4016 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4017 {
4018 temp = gen_binary (AND, mode, XEXP (XEXP (x, 0), 0),
4019 GEN_INT (INTVAL (XEXP (XEXP (x, 0), 1))
4020 & ~ INTVAL (XEXP (x, 1))));
4021 return gen_binary (IOR, mode, temp, XEXP (x, 1));
4022 }
4023
4024 if (GET_CODE (x) != AND)
4025 goto restart;
4026 }
4027
4028 /* Convert (A | B) & A to A. */
4029 if (GET_CODE (XEXP (x, 0)) == IOR
4030 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4031 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
4032 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
4033 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
4034 return XEXP (x, 1);
4035
4036 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
4037 insn (and may simplify more). */
4038 else if (GET_CODE (XEXP (x, 0)) == XOR
4039 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4040 && ! side_effects_p (XEXP (x, 1)))
4041 {
4042 x = gen_binary (AND, mode,
4043 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
4044 XEXP (x, 1));
4045 goto restart;
4046 }
4047 else if (GET_CODE (XEXP (x, 0)) == XOR
4048 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
4049 && ! side_effects_p (XEXP (x, 1)))
4050 {
4051 x = gen_binary (AND, mode,
4052 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
4053 XEXP (x, 1));
4054 goto restart;
4055 }
4056
4057 /* Similarly for (~ (A ^ B)) & A. */
4058 else if (GET_CODE (XEXP (x, 0)) == NOT
4059 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
4060 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 0), XEXP (x, 1))
4061 && ! side_effects_p (XEXP (x, 1)))
4062 {
4063 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 1),
4064 XEXP (x, 1));
4065 goto restart;
4066 }
4067 else if (GET_CODE (XEXP (x, 0)) == NOT
4068 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
4069 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 1), XEXP (x, 1))
4070 && ! side_effects_p (XEXP (x, 1)))
4071 {
4072 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 0),
4073 XEXP (x, 1));
4074 goto restart;
4075 }
4076
4077 /* If we have (and A B) with A not an object but that is known to
4078 be -1 or 0, this is equivalent to the expression
4079 (if_then_else (ne A (const_int 0)) B (const_int 0))
4080 We make this conversion because it may allow further
4081 simplifications and then allow use of conditional move insns.
4082 If the machine doesn't have condition moves, code in case SET
4083 will convert the IF_THEN_ELSE back to the logical operation.
4084 We build the IF_THEN_ELSE here in case further simplification
4085 is possible (e.g., we can convert it to ABS). */
4086
4087 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
4088 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
4089 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o')
4090 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4091 == GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
4092 {
4093 rtx op0 = XEXP (x, 0);
4094 rtx op1 = const0_rtx;
4095 enum rtx_code comp_code
4096 = simplify_comparison (NE, &op0, &op1);
4097
4098 x = gen_rtx_combine (IF_THEN_ELSE, mode,
4099 gen_binary (comp_code, VOIDmode, op0, op1),
4100 XEXP (x, 1), const0_rtx);
4101 goto restart;
4102 }
4103
4104 /* In the following group of tests (and those in case IOR below),
4105 we start with some combination of logical operations and apply
4106 the distributive law followed by the inverse distributive law.
4107 Most of the time, this results in no change. However, if some of
4108 the operands are the same or inverses of each other, simplifications
4109 will result.
4110
4111 For example, (and (ior A B) (not B)) can occur as the result of
4112 expanding a bit field assignment. When we apply the distributive
4113 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
4114 which then simplifies to (and (A (not B))). */
4115
4116 /* If we have (and (ior A B) C), apply the distributive law and then
4117 the inverse distributive law to see if things simplify. */
4118
4119 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == XOR)
4120 {
4121 x = apply_distributive_law
4122 (gen_binary (GET_CODE (XEXP (x, 0)), mode,
4123 gen_binary (AND, mode,
4124 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4125 gen_binary (AND, mode,
4126 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
4127 if (GET_CODE (x) != AND)
4128 goto restart;
4129 }
4130
4131 if (GET_CODE (XEXP (x, 1)) == IOR || GET_CODE (XEXP (x, 1)) == XOR)
4132 {
4133 x = apply_distributive_law
4134 (gen_binary (GET_CODE (XEXP (x, 1)), mode,
4135 gen_binary (AND, mode,
4136 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
4137 gen_binary (AND, mode,
4138 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
4139 if (GET_CODE (x) != AND)
4140 goto restart;
4141 }
4142
4143 /* Similarly, taking advantage of the fact that
4144 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
4145
4146 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == XOR)
4147 {
4148 x = apply_distributive_law
4149 (gen_binary (XOR, mode,
4150 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
4151 XEXP (XEXP (x, 1), 0)),
4152 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
4153 XEXP (XEXP (x, 1), 1))));
4154 if (GET_CODE (x) != AND)
4155 goto restart;
4156 }
4157
4158 else if (GET_CODE (XEXP (x, 1)) == NOT && GET_CODE (XEXP (x, 0)) == XOR)
4159 {
4160 x = apply_distributive_law
4161 (gen_binary (XOR, mode,
4162 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
4163 XEXP (XEXP (x, 0), 0)),
4164 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
4165 XEXP (XEXP (x, 0), 1))));
4166 if (GET_CODE (x) != AND)
4167 goto restart;
4168 }
4169 break;
4170
4171 case IOR:
4172 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
4173 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4174 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4175 && (nonzero_bits (XEXP (x, 0), mode) & ~ INTVAL (XEXP (x, 1))) == 0)
4176 return XEXP (x, 1);
4177
4178 /* Convert (A & B) | A to A. */
4179 if (GET_CODE (XEXP (x, 0)) == AND
4180 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4181 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
4182 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
4183 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
4184 return XEXP (x, 1);
4185
4186 /* If we have (ior (and A B) C), apply the distributive law and then
4187 the inverse distributive law to see if things simplify. */
4188
4189 if (GET_CODE (XEXP (x, 0)) == AND)
4190 {
4191 x = apply_distributive_law
4192 (gen_binary (AND, mode,
4193 gen_binary (IOR, mode,
4194 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4195 gen_binary (IOR, mode,
4196 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
4197
4198 if (GET_CODE (x) != IOR)
4199 goto restart;
4200 }
4201
4202 if (GET_CODE (XEXP (x, 1)) == AND)
4203 {
4204 x = apply_distributive_law
4205 (gen_binary (AND, mode,
4206 gen_binary (IOR, mode,
4207 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
4208 gen_binary (IOR, mode,
4209 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
4210
4211 if (GET_CODE (x) != IOR)
4212 goto restart;
4213 }
4214
4215 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
4216 mode size to (rotate A CX). */
4217
4218 if (((GET_CODE (XEXP (x, 0)) == ASHIFT
4219 && GET_CODE (XEXP (x, 1)) == LSHIFTRT)
4220 || (GET_CODE (XEXP (x, 1)) == ASHIFT
4221 && GET_CODE (XEXP (x, 0)) == LSHIFTRT))
4222 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 1), 0))
4223 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4224 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4225 && (INTVAL (XEXP (XEXP (x, 0), 1)) + INTVAL (XEXP (XEXP (x, 1), 1))
4226 == GET_MODE_BITSIZE (mode)))
4227 {
4228 rtx shift_count;
4229
4230 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
4231 shift_count = XEXP (XEXP (x, 0), 1);
4232 else
4233 shift_count = XEXP (XEXP (x, 1), 1);
4234 x = gen_rtx (ROTATE, mode, XEXP (XEXP (x, 0), 0), shift_count);
4235 goto restart;
4236 }
4237 break;
4238
4239 case XOR:
4240 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
4241 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
4242 (NOT y). */
4243 {
4244 int num_negated = 0;
4245 rtx in1 = XEXP (x, 0), in2 = XEXP (x, 1);
4246
4247 if (GET_CODE (in1) == NOT)
4248 num_negated++, in1 = XEXP (in1, 0);
4249 if (GET_CODE (in2) == NOT)
4250 num_negated++, in2 = XEXP (in2, 0);
4251
4252 if (num_negated == 2)
4253 {
4254 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4255 SUBST (XEXP (x, 1), XEXP (XEXP (x, 1), 0));
4256 }
4257 else if (num_negated == 1)
4258 {
4259 x = gen_unary (NOT, mode,
4260 gen_binary (XOR, mode, in1, in2));
4261 goto restart;
4262 }
4263 }
4264
4265 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
4266 correspond to a machine insn or result in further simplifications
4267 if B is a constant. */
4268
4269 if (GET_CODE (XEXP (x, 0)) == AND
4270 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
4271 && ! side_effects_p (XEXP (x, 1)))
4272 {
4273 x = gen_binary (AND, mode,
4274 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
4275 XEXP (x, 1));
4276 goto restart;
4277 }
4278 else if (GET_CODE (XEXP (x, 0)) == AND
4279 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4280 && ! side_effects_p (XEXP (x, 1)))
4281 {
4282 x = gen_binary (AND, mode,
4283 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
4284 XEXP (x, 1));
4285 goto restart;
4286 }
4287
4288
4289 #if STORE_FLAG_VALUE == 1
4290 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
4291 comparison. */
4292 if (XEXP (x, 1) == const1_rtx
4293 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4294 && reversible_comparison_p (XEXP (x, 0)))
4295 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
4296 mode, XEXP (XEXP (x, 0), 0),
4297 XEXP (XEXP (x, 0), 1));
4298
4299 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
4300 is (lt foo (const_int 0)), so we can perform the above
4301 simplification. */
4302
4303 if (XEXP (x, 1) == const1_rtx
4304 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
4305 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4306 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
4307 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
4308 #endif
4309
4310 /* (xor (comparison foo bar) (const_int sign-bit))
4311 when STORE_FLAG_VALUE is the sign bit. */
4312 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4313 && (STORE_FLAG_VALUE
4314 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
4315 && XEXP (x, 1) == const_true_rtx
4316 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4317 && reversible_comparison_p (XEXP (x, 0)))
4318 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
4319 mode, XEXP (XEXP (x, 0), 0),
4320 XEXP (XEXP (x, 0), 1));
4321 break;
4322
4323 case ABS:
4324 /* (abs (neg <foo>)) -> (abs <foo>) */
4325 if (GET_CODE (XEXP (x, 0)) == NEG)
4326 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4327
4328 /* If operand is something known to be positive, ignore the ABS. */
4329 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4330 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4331 <= HOST_BITS_PER_WIDE_INT)
4332 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4333 & ((HOST_WIDE_INT) 1
4334 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4335 == 0)))
4336 return XEXP (x, 0);
4337
4338
4339 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
4340 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4341 {
4342 x = gen_rtx_combine (NEG, mode, XEXP (x, 0));
4343 goto restart;
4344 }
4345 break;
4346
4347 case FFS:
4348 /* (ffs (*_extend <X>)) = (ffs <X>) */
4349 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4350 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4351 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4352 break;
4353
4354 case FLOAT:
4355 /* (float (sign_extend <X>)) = (float <X>). */
4356 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4357 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4358 break;
4359
4360 case LSHIFT:
4361 case ASHIFT:
4362 case LSHIFTRT:
4363 case ASHIFTRT:
4364 case ROTATE:
4365 case ROTATERT:
4366 /* If this is a shift by a constant amount, simplify it. */
4367 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4368 {
4369 x = simplify_shift_const (x, code, mode, XEXP (x, 0),
4370 INTVAL (XEXP (x, 1)));
4371 if (GET_CODE (x) != code)
4372 goto restart;
4373 }
4374
4375 #ifdef SHIFT_COUNT_TRUNCATED
4376 else if (GET_CODE (XEXP (x, 1)) != REG)
4377 SUBST (XEXP (x, 1),
4378 force_to_mode (XEXP (x, 1), GET_MODE (x),
4379 exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))),
4380 NULL_RTX));
4381 #endif
4382
4383 break;
4384 }
4385
4386 return x;
4387 }
4388 \f
4389 /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
4390 operations" because they can be replaced with two more basic operations.
4391 ZERO_EXTEND is also considered "compound" because it can be replaced with
4392 an AND operation, which is simpler, though only one operation.
4393
4394 The function expand_compound_operation is called with an rtx expression
4395 and will convert it to the appropriate shifts and AND operations,
4396 simplifying at each stage.
4397
4398 The function make_compound_operation is called to convert an expression
4399 consisting of shifts and ANDs into the equivalent compound expression.
4400 It is the inverse of this function, loosely speaking. */
4401
4402 static rtx
4403 expand_compound_operation (x)
4404 rtx x;
4405 {
4406 int pos = 0, len;
4407 int unsignedp = 0;
4408 int modewidth;
4409 rtx tem;
4410
4411 switch (GET_CODE (x))
4412 {
4413 case ZERO_EXTEND:
4414 unsignedp = 1;
4415 case SIGN_EXTEND:
4416 /* We can't necessarily use a const_int for a multiword mode;
4417 it depends on implicitly extending the value.
4418 Since we don't know the right way to extend it,
4419 we can't tell whether the implicit way is right.
4420
4421 Even for a mode that is no wider than a const_int,
4422 we can't win, because we need to sign extend one of its bits through
4423 the rest of it, and we don't know which bit. */
4424 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
4425 return x;
4426
4427 if (! FAKE_EXTEND_SAFE_P (GET_MODE (XEXP (x, 0)), XEXP (x, 0)))
4428 return x;
4429
4430 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
4431 /* If the inner object has VOIDmode (the only way this can happen
4432 is if it is a ASM_OPERANDS), we can't do anything since we don't
4433 know how much masking to do. */
4434 if (len == 0)
4435 return x;
4436
4437 break;
4438
4439 case ZERO_EXTRACT:
4440 unsignedp = 1;
4441 case SIGN_EXTRACT:
4442 /* If the operand is a CLOBBER, just return it. */
4443 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
4444 return XEXP (x, 0);
4445
4446 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4447 || GET_CODE (XEXP (x, 2)) != CONST_INT
4448 || GET_MODE (XEXP (x, 0)) == VOIDmode)
4449 return x;
4450
4451 len = INTVAL (XEXP (x, 1));
4452 pos = INTVAL (XEXP (x, 2));
4453
4454 /* If this goes outside the object being extracted, replace the object
4455 with a (use (mem ...)) construct that only combine understands
4456 and is used only for this purpose. */
4457 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4458 SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0)));
4459
4460 #if BITS_BIG_ENDIAN
4461 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
4462 #endif
4463 break;
4464
4465 default:
4466 return x;
4467 }
4468
4469 /* If we reach here, we want to return a pair of shifts. The inner
4470 shift is a left shift of BITSIZE - POS - LEN bits. The outer
4471 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
4472 logical depending on the value of UNSIGNEDP.
4473
4474 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
4475 converted into an AND of a shift.
4476
4477 We must check for the case where the left shift would have a negative
4478 count. This can happen in a case like (x >> 31) & 255 on machines
4479 that can't shift by a constant. On those machines, we would first
4480 combine the shift with the AND to produce a variable-position
4481 extraction. Then the constant of 31 would be substituted in to produce
4482 a such a position. */
4483
4484 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
4485 if (modewidth >= pos - len)
4486 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
4487 GET_MODE (x),
4488 simplify_shift_const (NULL_RTX, ASHIFT,
4489 GET_MODE (x),
4490 XEXP (x, 0),
4491 modewidth - pos - len),
4492 modewidth - len);
4493
4494 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
4495 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
4496 simplify_shift_const (NULL_RTX, LSHIFTRT,
4497 GET_MODE (x),
4498 XEXP (x, 0), pos),
4499 ((HOST_WIDE_INT) 1 << len) - 1);
4500 else
4501 /* Any other cases we can't handle. */
4502 return x;
4503
4504
4505 /* If we couldn't do this for some reason, return the original
4506 expression. */
4507 if (GET_CODE (tem) == CLOBBER)
4508 return x;
4509
4510 return tem;
4511 }
4512 \f
4513 /* X is a SET which contains an assignment of one object into
4514 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
4515 or certain SUBREGS). If possible, convert it into a series of
4516 logical operations.
4517
4518 We half-heartedly support variable positions, but do not at all
4519 support variable lengths. */
4520
4521 static rtx
4522 expand_field_assignment (x)
4523 rtx x;
4524 {
4525 rtx inner;
4526 rtx pos; /* Always counts from low bit. */
4527 int len;
4528 rtx mask;
4529 enum machine_mode compute_mode;
4530
4531 /* Loop until we find something we can't simplify. */
4532 while (1)
4533 {
4534 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
4535 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
4536 {
4537 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
4538 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4539 pos = const0_rtx;
4540 }
4541 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4542 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
4543 {
4544 inner = XEXP (SET_DEST (x), 0);
4545 len = INTVAL (XEXP (SET_DEST (x), 1));
4546 pos = XEXP (SET_DEST (x), 2);
4547
4548 /* If the position is constant and spans the width of INNER,
4549 surround INNER with a USE to indicate this. */
4550 if (GET_CODE (pos) == CONST_INT
4551 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
4552 inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner);
4553
4554 #if BITS_BIG_ENDIAN
4555 if (GET_CODE (pos) == CONST_INT)
4556 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
4557 - INTVAL (pos));
4558 else if (GET_CODE (pos) == MINUS
4559 && GET_CODE (XEXP (pos, 1)) == CONST_INT
4560 && (INTVAL (XEXP (pos, 1))
4561 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
4562 /* If position is ADJUST - X, new position is X. */
4563 pos = XEXP (pos, 0);
4564 else
4565 pos = gen_binary (MINUS, GET_MODE (pos),
4566 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
4567 - len),
4568 pos);
4569 #endif
4570 }
4571
4572 /* A SUBREG between two modes that occupy the same numbers of words
4573 can be done by moving the SUBREG to the source. */
4574 else if (GET_CODE (SET_DEST (x)) == SUBREG
4575 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
4576 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
4577 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
4578 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
4579 {
4580 x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)),
4581 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
4582 SET_SRC (x)));
4583 continue;
4584 }
4585 else
4586 break;
4587
4588 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4589 inner = SUBREG_REG (inner);
4590
4591 compute_mode = GET_MODE (inner);
4592
4593 /* Compute a mask of LEN bits, if we can do this on the host machine. */
4594 if (len < HOST_BITS_PER_WIDE_INT)
4595 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
4596 else
4597 break;
4598
4599 /* Now compute the equivalent expression. Make a copy of INNER
4600 for the SET_DEST in case it is a MEM into which we will substitute;
4601 we don't want shared RTL in that case. */
4602 x = gen_rtx (SET, VOIDmode, copy_rtx (inner),
4603 gen_binary (IOR, compute_mode,
4604 gen_binary (AND, compute_mode,
4605 gen_unary (NOT, compute_mode,
4606 gen_binary (ASHIFT,
4607 compute_mode,
4608 mask, pos)),
4609 inner),
4610 gen_binary (ASHIFT, compute_mode,
4611 gen_binary (AND, compute_mode,
4612 gen_lowpart_for_combine
4613 (compute_mode,
4614 SET_SRC (x)),
4615 mask),
4616 pos)));
4617 }
4618
4619 return x;
4620 }
4621 \f
4622 /* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
4623 it is an RTX that represents a variable starting position; otherwise,
4624 POS is the (constant) starting bit position (counted from the LSB).
4625
4626 INNER may be a USE. This will occur when we started with a bitfield
4627 that went outside the boundary of the object in memory, which is
4628 allowed on most machines. To isolate this case, we produce a USE
4629 whose mode is wide enough and surround the MEM with it. The only
4630 code that understands the USE is this routine. If it is not removed,
4631 it will cause the resulting insn not to match.
4632
4633 UNSIGNEDP is non-zero for an unsigned reference and zero for a
4634 signed reference.
4635
4636 IN_DEST is non-zero if this is a reference in the destination of a
4637 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
4638 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
4639 be used.
4640
4641 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
4642 ZERO_EXTRACT should be built even for bits starting at bit 0.
4643
4644 MODE is the desired mode of the result (if IN_DEST == 0). */
4645
4646 static rtx
4647 make_extraction (mode, inner, pos, pos_rtx, len,
4648 unsignedp, in_dest, in_compare)
4649 enum machine_mode mode;
4650 rtx inner;
4651 int pos;
4652 rtx pos_rtx;
4653 int len;
4654 int unsignedp;
4655 int in_dest, in_compare;
4656 {
4657 /* This mode describes the size of the storage area
4658 to fetch the overall value from. Within that, we
4659 ignore the POS lowest bits, etc. */
4660 enum machine_mode is_mode = GET_MODE (inner);
4661 enum machine_mode inner_mode;
4662 enum machine_mode wanted_mem_mode = byte_mode;
4663 enum machine_mode pos_mode = word_mode;
4664 enum machine_mode extraction_mode = word_mode;
4665 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
4666 int spans_byte = 0;
4667 rtx new = 0;
4668 rtx orig_pos_rtx = pos_rtx;
4669
4670 /* Get some information about INNER and get the innermost object. */
4671 if (GET_CODE (inner) == USE)
4672 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
4673 /* We don't need to adjust the position because we set up the USE
4674 to pretend that it was a full-word object. */
4675 spans_byte = 1, inner = XEXP (inner, 0);
4676 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4677 {
4678 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
4679 consider just the QI as the memory to extract from.
4680 The subreg adds or removes high bits; its mode is
4681 irrelevant to the meaning of this extraction,
4682 since POS and LEN count from the lsb. */
4683 if (GET_CODE (SUBREG_REG (inner)) == MEM)
4684 is_mode = GET_MODE (SUBREG_REG (inner));
4685 inner = SUBREG_REG (inner);
4686 }
4687
4688 inner_mode = GET_MODE (inner);
4689
4690 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
4691 pos = INTVAL (pos_rtx), pos_rtx = 0;
4692
4693 /* See if this can be done without an extraction. We never can if the
4694 width of the field is not the same as that of some integer mode. For
4695 registers, we can only avoid the extraction if the position is at the
4696 low-order bit and this is either not in the destination or we have the
4697 appropriate STRICT_LOW_PART operation available.
4698
4699 For MEM, we can avoid an extract if the field starts on an appropriate
4700 boundary and we can change the mode of the memory reference. However,
4701 we cannot directly access the MEM if we have a USE and the underlying
4702 MEM is not TMODE. This combination means that MEM was being used in a
4703 context where bits outside its mode were being referenced; that is only
4704 valid in bit-field insns. */
4705
4706 if (tmode != BLKmode
4707 && ! (spans_byte && inner_mode != tmode)
4708 && ((pos_rtx == 0 && pos == 0 && GET_CODE (inner) != MEM
4709 && (! in_dest
4710 || (GET_CODE (inner) == REG
4711 && (movstrict_optab->handlers[(int) tmode].insn_code
4712 != CODE_FOR_nothing))))
4713 || (GET_CODE (inner) == MEM && pos_rtx == 0
4714 && (pos
4715 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
4716 : BITS_PER_UNIT)) == 0
4717 /* We can't do this if we are widening INNER_MODE (it
4718 may not be aligned, for one thing). */
4719 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
4720 && (inner_mode == tmode
4721 || (! mode_dependent_address_p (XEXP (inner, 0))
4722 && ! MEM_VOLATILE_P (inner))))))
4723 {
4724 /* If INNER is a MEM, make a new MEM that encompasses just the desired
4725 field. If the original and current mode are the same, we need not
4726 adjust the offset. Otherwise, we do if bytes big endian.
4727
4728 If INNER is not a MEM, get a piece consisting of the just the field
4729 of interest (in this case POS must be 0). */
4730
4731 if (GET_CODE (inner) == MEM)
4732 {
4733 int offset;
4734 /* POS counts from lsb, but make OFFSET count in memory order. */
4735 if (BYTES_BIG_ENDIAN)
4736 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
4737 else
4738 offset = pos / BITS_PER_UNIT;
4739
4740 new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset));
4741 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
4742 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
4743 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
4744 }
4745 else if (GET_CODE (inner) == REG)
4746 /* We can't call gen_lowpart_for_combine here since we always want
4747 a SUBREG and it would sometimes return a new hard register. */
4748 new = gen_rtx (SUBREG, tmode, inner,
4749 (WORDS_BIG_ENDIAN
4750 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
4751 ? ((GET_MODE_SIZE (inner_mode) - GET_MODE_SIZE (tmode))
4752 / UNITS_PER_WORD)
4753 : 0));
4754 else
4755 new = force_to_mode (inner, tmode, len, NULL_RTX);
4756
4757 /* If this extraction is going into the destination of a SET,
4758 make a STRICT_LOW_PART unless we made a MEM. */
4759
4760 if (in_dest)
4761 return (GET_CODE (new) == MEM ? new
4762 : (GET_CODE (new) != SUBREG
4763 ? gen_rtx (CLOBBER, tmode, const0_rtx)
4764 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
4765
4766 /* Otherwise, sign- or zero-extend unless we already are in the
4767 proper mode. */
4768
4769 return (mode == tmode ? new
4770 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
4771 mode, new));
4772 }
4773
4774 /* Unless this is a COMPARE or we have a funny memory reference,
4775 don't do anything with zero-extending field extracts starting at
4776 the low-order bit since they are simple AND operations. */
4777 if (pos_rtx == 0 && pos == 0 && ! in_dest
4778 && ! in_compare && ! spans_byte && unsignedp)
4779 return 0;
4780
4781 /* Get the mode to use should INNER be a MEM, the mode for the position,
4782 and the mode for the result. */
4783 #ifdef HAVE_insv
4784 if (in_dest)
4785 {
4786 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
4787 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
4788 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
4789 }
4790 #endif
4791
4792 #ifdef HAVE_extzv
4793 if (! in_dest && unsignedp)
4794 {
4795 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
4796 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
4797 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
4798 }
4799 #endif
4800
4801 #ifdef HAVE_extv
4802 if (! in_dest && ! unsignedp)
4803 {
4804 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
4805 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
4806 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
4807 }
4808 #endif
4809
4810 /* Never narrow an object, since that might not be safe. */
4811
4812 if (mode != VOIDmode
4813 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
4814 extraction_mode = mode;
4815
4816 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
4817 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
4818 pos_mode = GET_MODE (pos_rtx);
4819
4820 /* If this is not from memory or we have to change the mode of memory and
4821 cannot, the desired mode is EXTRACTION_MODE. */
4822 if (GET_CODE (inner) != MEM
4823 || (inner_mode != wanted_mem_mode
4824 && (mode_dependent_address_p (XEXP (inner, 0))
4825 || MEM_VOLATILE_P (inner))))
4826 wanted_mem_mode = extraction_mode;
4827
4828 #if BITS_BIG_ENDIAN
4829 /* If position is constant, compute new position. Otherwise, build
4830 subtraction. */
4831 if (pos_rtx == 0)
4832 pos = (MAX (GET_MODE_BITSIZE (is_mode), GET_MODE_BITSIZE (wanted_mem_mode))
4833 - len - pos);
4834 else
4835 pos_rtx
4836 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
4837 GEN_INT (MAX (GET_MODE_BITSIZE (is_mode),
4838 GET_MODE_BITSIZE (wanted_mem_mode))
4839 - len),
4840 pos_rtx);
4841 #endif
4842
4843 /* If INNER has a wider mode, make it smaller. If this is a constant
4844 extract, try to adjust the byte to point to the byte containing
4845 the value. */
4846 if (wanted_mem_mode != VOIDmode
4847 && GET_MODE_SIZE (wanted_mem_mode) < GET_MODE_SIZE (is_mode)
4848 && ((GET_CODE (inner) == MEM
4849 && (inner_mode == wanted_mem_mode
4850 || (! mode_dependent_address_p (XEXP (inner, 0))
4851 && ! MEM_VOLATILE_P (inner))))))
4852 {
4853 int offset = 0;
4854
4855 /* The computations below will be correct if the machine is big
4856 endian in both bits and bytes or little endian in bits and bytes.
4857 If it is mixed, we must adjust. */
4858
4859 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
4860 if (! spans_byte && is_mode != wanted_mem_mode)
4861 offset = (GET_MODE_SIZE (is_mode)
4862 - GET_MODE_SIZE (wanted_mem_mode) - offset);
4863 #endif
4864
4865 /* If bytes are big endian and we had a paradoxical SUBREG, we must
4866 adjust OFFSET to compensate. */
4867 #if BYTES_BIG_ENDIAN
4868 if (! spans_byte
4869 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
4870 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
4871 #endif
4872
4873 /* If this is a constant position, we can move to the desired byte. */
4874 if (pos_rtx == 0)
4875 {
4876 offset += pos / BITS_PER_UNIT;
4877 pos %= GET_MODE_BITSIZE (wanted_mem_mode);
4878 }
4879
4880 if (offset != 0 || inner_mode != wanted_mem_mode)
4881 {
4882 rtx newmem = gen_rtx (MEM, wanted_mem_mode,
4883 plus_constant (XEXP (inner, 0), offset));
4884 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
4885 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
4886 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
4887 inner = newmem;
4888 }
4889 }
4890
4891 /* If INNER is not memory, we can always get it into the proper mode. */
4892 else if (GET_CODE (inner) != MEM)
4893 inner = force_to_mode (inner, extraction_mode,
4894 (pos < 0 ? GET_MODE_BITSIZE (extraction_mode)
4895 : len + pos),
4896 NULL_RTX);
4897
4898 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
4899 have to zero extend. Otherwise, we can just use a SUBREG. */
4900 if (pos_rtx != 0
4901 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
4902 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
4903 else if (pos_rtx != 0
4904 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
4905 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
4906
4907 /* Make POS_RTX unless we already have it and it is correct. If we don't
4908 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
4909 be a CONST_INT. */
4910 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
4911 pos_rtx = orig_pos_rtx;
4912
4913 else if (pos_rtx == 0)
4914 pos_rtx = GEN_INT (pos);
4915
4916 /* Make the required operation. See if we can use existing rtx. */
4917 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
4918 extraction_mode, inner, GEN_INT (len), pos_rtx);
4919 if (! in_dest)
4920 new = gen_lowpart_for_combine (mode, new);
4921
4922 return new;
4923 }
4924 \f
4925 /* Look at the expression rooted at X. Look for expressions
4926 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
4927 Form these expressions.
4928
4929 Return the new rtx, usually just X.
4930
4931 Also, for machines like the Vax that don't have logical shift insns,
4932 try to convert logical to arithmetic shift operations in cases where
4933 they are equivalent. This undoes the canonicalizations to logical
4934 shifts done elsewhere.
4935
4936 We try, as much as possible, to re-use rtl expressions to save memory.
4937
4938 IN_CODE says what kind of expression we are processing. Normally, it is
4939 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
4940 being kludges), it is MEM. When processing the arguments of a comparison
4941 or a COMPARE against zero, it is COMPARE. */
4942
4943 static rtx
4944 make_compound_operation (x, in_code)
4945 rtx x;
4946 enum rtx_code in_code;
4947 {
4948 enum rtx_code code = GET_CODE (x);
4949 enum machine_mode mode = GET_MODE (x);
4950 int mode_width = GET_MODE_BITSIZE (mode);
4951 enum rtx_code next_code;
4952 int i, count;
4953 rtx new = 0;
4954 rtx tem;
4955 char *fmt;
4956
4957 /* Select the code to be used in recursive calls. Once we are inside an
4958 address, we stay there. If we have a comparison, set to COMPARE,
4959 but once inside, go back to our default of SET. */
4960
4961 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
4962 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
4963 && XEXP (x, 1) == const0_rtx) ? COMPARE
4964 : in_code == COMPARE ? SET : in_code);
4965
4966 /* Process depending on the code of this operation. If NEW is set
4967 non-zero, it will be returned. */
4968
4969 switch (code)
4970 {
4971 case ASHIFT:
4972 case LSHIFT:
4973 /* Convert shifts by constants into multiplications if inside
4974 an address. */
4975 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
4976 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
4977 && INTVAL (XEXP (x, 1)) >= 0)
4978 {
4979 new = make_compound_operation (XEXP (x, 0), next_code);
4980 new = gen_rtx_combine (MULT, mode, new,
4981 GEN_INT ((HOST_WIDE_INT) 1
4982 << INTVAL (XEXP (x, 1))));
4983 }
4984 break;
4985
4986 case AND:
4987 /* If the second operand is not a constant, we can't do anything
4988 with it. */
4989 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4990 break;
4991
4992 /* If the constant is a power of two minus one and the first operand
4993 is a logical right shift, make an extraction. */
4994 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
4995 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
4996 {
4997 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
4998 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
4999 0, in_code == COMPARE);
5000 }
5001
5002 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
5003 else if (GET_CODE (XEXP (x, 0)) == SUBREG
5004 && subreg_lowpart_p (XEXP (x, 0))
5005 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
5006 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5007 {
5008 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
5009 next_code);
5010 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
5011 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
5012 0, in_code == COMPARE);
5013 }
5014
5015 /* If we are have (and (rotate X C) M) and C is larger than the number
5016 of bits in M, this is an extraction. */
5017
5018 else if (GET_CODE (XEXP (x, 0)) == ROTATE
5019 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5020 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
5021 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
5022 {
5023 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5024 new = make_extraction (mode, new,
5025 (GET_MODE_BITSIZE (mode)
5026 - INTVAL (XEXP (XEXP (x, 0), 1))),
5027 NULL_RTX, i, 1, 0, in_code == COMPARE);
5028 }
5029
5030 /* On machines without logical shifts, if the operand of the AND is
5031 a logical shift and our mask turns off all the propagated sign
5032 bits, we can replace the logical shift with an arithmetic shift. */
5033 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5034 && (lshr_optab->handlers[(int) mode].insn_code
5035 == CODE_FOR_nothing)
5036 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
5037 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5038 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5039 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5040 && mode_width <= HOST_BITS_PER_WIDE_INT)
5041 {
5042 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
5043
5044 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
5045 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
5046 SUBST (XEXP (x, 0),
5047 gen_rtx_combine (ASHIFTRT, mode,
5048 make_compound_operation (XEXP (XEXP (x, 0), 0),
5049 next_code),
5050 XEXP (XEXP (x, 0), 1)));
5051 }
5052
5053 /* If the constant is one less than a power of two, this might be
5054 representable by an extraction even if no shift is present.
5055 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
5056 we are in a COMPARE. */
5057 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5058 new = make_extraction (mode,
5059 make_compound_operation (XEXP (x, 0),
5060 next_code),
5061 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
5062
5063 /* If we are in a comparison and this is an AND with a power of two,
5064 convert this into the appropriate bit extract. */
5065 else if (in_code == COMPARE
5066 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
5067 new = make_extraction (mode,
5068 make_compound_operation (XEXP (x, 0),
5069 next_code),
5070 i, NULL_RTX, 1, 1, 0, 1);
5071
5072 break;
5073
5074 case LSHIFTRT:
5075 /* If the sign bit is known to be zero, replace this with an
5076 arithmetic shift. */
5077 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
5078 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5079 && mode_width <= HOST_BITS_PER_WIDE_INT
5080 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
5081 {
5082 new = gen_rtx_combine (ASHIFTRT, mode,
5083 make_compound_operation (XEXP (x, 0),
5084 next_code),
5085 XEXP (x, 1));
5086 break;
5087 }
5088
5089 /* ... fall through ... */
5090
5091 case ASHIFTRT:
5092 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
5093 this is a SIGN_EXTRACT. */
5094 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5095 && GET_CODE (XEXP (x, 0)) == ASHIFT
5096 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5097 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (x, 0), 1)))
5098 {
5099 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5100 new = make_extraction (mode, new,
5101 (INTVAL (XEXP (x, 1))
5102 - INTVAL (XEXP (XEXP (x, 0), 1))),
5103 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
5104 code == LSHIFTRT, 0, in_code == COMPARE);
5105 }
5106
5107 /* Similarly if we have (ashifrt (OP (ashift foo C1) C3) C2). In these
5108 cases, we are better off returning a SIGN_EXTEND of the operation. */
5109
5110 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5111 && (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND
5112 || GET_CODE (XEXP (x, 0)) == XOR
5113 || GET_CODE (XEXP (x, 0)) == PLUS)
5114 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFT
5115 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
5116 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5117 && INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)) < HOST_BITS_PER_WIDE_INT
5118 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5119 && (INTVAL (XEXP (XEXP (x, 0), 1))
5120 & (((HOST_WIDE_INT) 1
5121 << INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))) - 1)) == 0)
5122 {
5123 HOST_WIDE_INT newop1
5124 = (INTVAL (XEXP (XEXP (x, 0), 1))
5125 >> INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)));
5126
5127 new = make_compound_operation (XEXP (XEXP (XEXP (x, 0), 0), 0),
5128 next_code);
5129 new = make_extraction (mode,
5130 gen_binary (GET_CODE (XEXP (x, 0)), mode, new,
5131 GEN_INT (newop1)),
5132 (INTVAL (XEXP (x, 1))
5133 - INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))),
5134 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
5135 code == LSHIFTRT, 0, in_code == COMPARE);
5136 }
5137
5138 /* Similarly for (ashiftrt (neg (ashift FOO C1)) C2). */
5139 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5140 && GET_CODE (XEXP (x, 0)) == NEG
5141 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFT
5142 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
5143 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)))
5144 {
5145 new = make_compound_operation (XEXP (XEXP (XEXP (x, 0), 0), 0),
5146 next_code);
5147 new = make_extraction (mode,
5148 gen_unary (GET_CODE (XEXP (x, 0)), mode,
5149 new, 0),
5150 (INTVAL (XEXP (x, 1))
5151 - INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))),
5152 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
5153 code == LSHIFTRT, 0, in_code == COMPARE);
5154 }
5155 break;
5156
5157 case SUBREG:
5158 /* Call ourselves recursively on the inner expression. If we are
5159 narrowing the object and it has a different RTL code from
5160 what it originally did, do this SUBREG as a force_to_mode. */
5161
5162 tem = make_compound_operation (SUBREG_REG (x), next_code);
5163 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
5164 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
5165 && subreg_lowpart_p (x))
5166 return force_to_mode (tem, mode, GET_MODE_BITSIZE (mode), NULL_RTX);
5167 }
5168
5169 if (new)
5170 {
5171 x = gen_lowpart_for_combine (mode, new);
5172 code = GET_CODE (x);
5173 }
5174
5175 /* Now recursively process each operand of this operation. */
5176 fmt = GET_RTX_FORMAT (code);
5177 for (i = 0; i < GET_RTX_LENGTH (code); i++)
5178 if (fmt[i] == 'e')
5179 {
5180 new = make_compound_operation (XEXP (x, i), next_code);
5181 SUBST (XEXP (x, i), new);
5182 }
5183
5184 return x;
5185 }
5186 \f
5187 /* Given M see if it is a value that would select a field of bits
5188 within an item, but not the entire word. Return -1 if not.
5189 Otherwise, return the starting position of the field, where 0 is the
5190 low-order bit.
5191
5192 *PLEN is set to the length of the field. */
5193
5194 static int
5195 get_pos_from_mask (m, plen)
5196 unsigned HOST_WIDE_INT m;
5197 int *plen;
5198 {
5199 /* Get the bit number of the first 1 bit from the right, -1 if none. */
5200 int pos = exact_log2 (m & - m);
5201
5202 if (pos < 0)
5203 return -1;
5204
5205 /* Now shift off the low-order zero bits and see if we have a power of
5206 two minus 1. */
5207 *plen = exact_log2 ((m >> pos) + 1);
5208
5209 if (*plen <= 0)
5210 return -1;
5211
5212 return pos;
5213 }
5214 \f
5215 /* Rewrite X so that it is an expression in MODE. We only care about the
5216 low-order BITS bits so we can ignore AND operations that just clear
5217 higher-order bits.
5218
5219 Also, if REG is non-zero and X is a register equal in value to REG,
5220 replace X with REG. */
5221
5222 static rtx
5223 force_to_mode (x, mode, bits, reg)
5224 rtx x;
5225 enum machine_mode mode;
5226 int bits;
5227 rtx reg;
5228 {
5229 enum rtx_code code = GET_CODE (x);
5230 enum machine_mode op_mode = mode;
5231
5232 /* If X is narrower than MODE or if BITS is larger than the size of MODE,
5233 just get X in the proper mode. */
5234
5235 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
5236 || bits > GET_MODE_BITSIZE (mode))
5237 return gen_lowpart_for_combine (mode, x);
5238
5239 switch (code)
5240 {
5241 case SIGN_EXTEND:
5242 case ZERO_EXTEND:
5243 case ZERO_EXTRACT:
5244 case SIGN_EXTRACT:
5245 x = expand_compound_operation (x);
5246 if (GET_CODE (x) != code)
5247 return force_to_mode (x, mode, bits, reg);
5248 break;
5249
5250 case REG:
5251 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
5252 || rtx_equal_p (reg, get_last_value (x))))
5253 x = reg;
5254 break;
5255
5256 case CONST_INT:
5257 if (bits < HOST_BITS_PER_WIDE_INT)
5258 x = GEN_INT (INTVAL (x) & (((HOST_WIDE_INT) 1 << bits) - 1));
5259 return x;
5260
5261 case SUBREG:
5262 /* Ignore low-order SUBREGs. */
5263 if (subreg_lowpart_p (x))
5264 return force_to_mode (SUBREG_REG (x), mode, bits, reg);
5265 break;
5266
5267 case AND:
5268 /* If this is an AND with a constant. Otherwise, we fall through to
5269 do the general binary case. */
5270
5271 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
5272 {
5273 HOST_WIDE_INT mask = INTVAL (XEXP (x, 1));
5274 int len = exact_log2 (mask + 1);
5275 rtx op = XEXP (x, 0);
5276
5277 /* If this is masking some low-order bits, we may be able to
5278 impose a stricter constraint on what bits of the operand are
5279 required. */
5280
5281 op = force_to_mode (op, mode, len > 0 ? MIN (len, bits) : bits,
5282 reg);
5283
5284 if (bits < HOST_BITS_PER_WIDE_INT)
5285 mask &= ((HOST_WIDE_INT) 1 << bits) - 1;
5286
5287 /* If we have no AND in MODE, use the original mode for the
5288 operation. */
5289
5290 if (and_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5291 op_mode = GET_MODE (x);
5292
5293 x = simplify_and_const_int (x, op_mode, op, mask);
5294
5295 /* If X is still an AND, see if it is an AND with a mask that
5296 is just some low-order bits. If so, and it is BITS wide (it
5297 can't be wider), we don't need it. */
5298
5299 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
5300 && bits < HOST_BITS_PER_WIDE_INT
5301 && INTVAL (XEXP (x, 1)) == ((HOST_WIDE_INT) 1 << bits) - 1)
5302 x = XEXP (x, 0);
5303
5304 break;
5305 }
5306
5307 /* ... fall through ... */
5308
5309 case PLUS:
5310 case MINUS:
5311 case MULT:
5312 case IOR:
5313 case XOR:
5314 /* For most binary operations, just propagate into the operation and
5315 change the mode if we have an operation of that mode. */
5316
5317 if ((code == PLUS
5318 && add_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5319 || (code == MINUS
5320 && sub_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5321 || (code == MULT && (smul_optab->handlers[(int) mode].insn_code
5322 == CODE_FOR_nothing))
5323 || (code == AND
5324 && and_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5325 || (code == IOR
5326 && ior_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5327 || (code == XOR && (xor_optab->handlers[(int) mode].insn_code
5328 == CODE_FOR_nothing)))
5329 op_mode = GET_MODE (x);
5330
5331 x = gen_binary (code, op_mode,
5332 gen_lowpart_for_combine (op_mode,
5333 force_to_mode (XEXP (x, 0),
5334 mode, bits,
5335 reg)),
5336 gen_lowpart_for_combine (op_mode,
5337 force_to_mode (XEXP (x, 1),
5338 mode, bits,
5339 reg)));
5340 break;
5341
5342 case ASHIFT:
5343 case LSHIFT:
5344 /* For left shifts, do the same, but just for the first operand.
5345 If the shift count is a constant, we need even fewer bits of the
5346 first operand. */
5347
5348 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) < bits)
5349 bits -= INTVAL (XEXP (x, 1));
5350
5351 if ((code == ASHIFT
5352 && ashl_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5353 || (code == LSHIFT && (lshl_optab->handlers[(int) mode].insn_code
5354 == CODE_FOR_nothing)))
5355 op_mode = GET_MODE (x);
5356
5357 x = gen_binary (code, op_mode,
5358 gen_lowpart_for_combine (op_mode,
5359 force_to_mode (XEXP (x, 0),
5360 mode, bits,
5361 reg)),
5362 XEXP (x, 1));
5363 break;
5364
5365 case LSHIFTRT:
5366 /* Here we can only do something if the shift count is a constant and
5367 the count plus BITS is no larger than the width of MODE, we can do
5368 the shift in MODE. */
5369
5370 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5371 && INTVAL (XEXP (x, 1)) + bits <= GET_MODE_BITSIZE (mode))
5372 {
5373 rtx inner = force_to_mode (XEXP (x, 0), mode,
5374 bits + INTVAL (XEXP (x, 1)), reg);
5375
5376 if (lshr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5377 op_mode = GET_MODE (x);
5378
5379 x = gen_binary (LSHIFTRT, op_mode,
5380 gen_lowpart_for_combine (op_mode, inner),
5381 XEXP (x, 1));
5382 }
5383 break;
5384
5385 case ASHIFTRT:
5386 /* If this is a sign-extension operation that just affects bits
5387 we don't care about, remove it. */
5388
5389 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5390 && INTVAL (XEXP (x, 1)) >= 0
5391 && INTVAL (XEXP (x, 1)) <= GET_MODE_BITSIZE (GET_MODE (x)) - bits
5392 && GET_CODE (XEXP (x, 0)) == ASHIFT
5393 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5394 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
5395 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, bits, reg);
5396 break;
5397
5398 case NEG:
5399 case NOT:
5400 if ((code == NEG
5401 && neg_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5402 || (code == NOT && (one_cmpl_optab->handlers[(int) mode].insn_code
5403 == CODE_FOR_nothing)))
5404 op_mode = GET_MODE (x);
5405
5406 /* Handle these similarly to the way we handle most binary operations. */
5407 x = gen_unary (code, op_mode,
5408 gen_lowpart_for_combine (op_mode,
5409 force_to_mode (XEXP (x, 0), mode,
5410 bits, reg)));
5411 break;
5412
5413 case IF_THEN_ELSE:
5414 /* We have no way of knowing if the IF_THEN_ELSE can itself be
5415 written in a narrower mode. We play it safe and do not do so. */
5416
5417 SUBST (XEXP (x, 1),
5418 gen_lowpart_for_combine (GET_MODE (x),
5419 force_to_mode (XEXP (x, 1), mode,
5420 bits, reg)));
5421 SUBST (XEXP (x, 2),
5422 gen_lowpart_for_combine (GET_MODE (x),
5423 force_to_mode (XEXP (x, 2), mode,
5424 bits, reg)));
5425 break;
5426 }
5427
5428 /* Ensure we return a value of the proper mode. */
5429 return gen_lowpart_for_combine (mode, x);
5430 }
5431 \f
5432 /* Return the value of expression X given the fact that condition COND
5433 is known to be true when applied to REG as its first operand and VAL
5434 as its second. X is known to not be shared and so can be modified in
5435 place.
5436
5437 We only handle the simplest cases, and specifically those cases that
5438 arise with IF_THEN_ELSE expressions. */
5439
5440 static rtx
5441 known_cond (x, cond, reg, val)
5442 rtx x;
5443 enum rtx_code cond;
5444 rtx reg, val;
5445 {
5446 enum rtx_code code = GET_CODE (x);
5447 rtx new, temp;
5448 char *fmt;
5449 int i, j;
5450
5451 if (side_effects_p (x))
5452 return x;
5453
5454 if (cond == EQ && rtx_equal_p (x, reg))
5455 return val;
5456
5457 /* If X is (abs REG) and we know something about REG's relationship
5458 with zero, we may be able to simplify this. */
5459
5460 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
5461 switch (cond)
5462 {
5463 case GE: case GT: case EQ:
5464 return XEXP (x, 0);
5465 case LT: case LE:
5466 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), XEXP (x, 0));
5467 }
5468
5469 /* The only other cases we handle are MIN, MAX, and comparisons if the
5470 operands are the same as REG and VAL. */
5471
5472 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
5473 {
5474 if (rtx_equal_p (XEXP (x, 0), val))
5475 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
5476
5477 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
5478 {
5479 if (GET_RTX_CLASS (code) == '<')
5480 return (comparison_dominates_p (cond, code) ? const_true_rtx
5481 : (comparison_dominates_p (cond,
5482 reverse_condition (code))
5483 ? const0_rtx : x));
5484
5485 else if (code == SMAX || code == SMIN
5486 || code == UMIN || code == UMAX)
5487 {
5488 int unsignedp = (code == UMIN || code == UMAX);
5489
5490 if (code == SMAX || code == UMAX)
5491 cond = reverse_condition (cond);
5492
5493 switch (cond)
5494 {
5495 case GE: case GT:
5496 return unsignedp ? x : XEXP (x, 1);
5497 case LE: case LT:
5498 return unsignedp ? x : XEXP (x, 0);
5499 case GEU: case GTU:
5500 return unsignedp ? XEXP (x, 1) : x;
5501 case LEU: case LTU:
5502 return unsignedp ? XEXP (x, 0) : x;
5503 }
5504 }
5505 }
5506 }
5507
5508 fmt = GET_RTX_FORMAT (code);
5509 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5510 {
5511 if (fmt[i] == 'e')
5512 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
5513 else if (fmt[i] == 'E')
5514 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5515 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
5516 cond, reg, val));
5517 }
5518
5519 return x;
5520 }
5521 \f
5522 /* See if X, a SET operation, can be rewritten as a bit-field assignment.
5523 Return that assignment if so.
5524
5525 We only handle the most common cases. */
5526
5527 static rtx
5528 make_field_assignment (x)
5529 rtx x;
5530 {
5531 rtx dest = SET_DEST (x);
5532 rtx src = SET_SRC (x);
5533 rtx ourdest;
5534 rtx assign;
5535 HOST_WIDE_INT c1;
5536 int pos, len;
5537 rtx other;
5538 enum machine_mode mode;
5539
5540 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
5541 a clear of a one-bit field. We will have changed it to
5542 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
5543 for a SUBREG. */
5544
5545 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
5546 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
5547 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
5548 && (rtx_equal_p (dest, XEXP (src, 1))
5549 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
5550 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
5551 {
5552 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
5553 1, 1, 1, 0);
5554 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
5555 }
5556
5557 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
5558 && subreg_lowpart_p (XEXP (src, 0))
5559 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
5560 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
5561 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
5562 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
5563 && (rtx_equal_p (dest, XEXP (src, 1))
5564 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
5565 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
5566 {
5567 assign = make_extraction (VOIDmode, dest, 0,
5568 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
5569 1, 1, 1, 0);
5570 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
5571 }
5572
5573 /* If SRC is (ior (ashift (const_int 1) POS DEST)), this is a set of a
5574 one-bit field. */
5575 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
5576 && XEXP (XEXP (src, 0), 0) == const1_rtx
5577 && (rtx_equal_p (dest, XEXP (src, 1))
5578 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
5579 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
5580 {
5581 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
5582 1, 1, 1, 0);
5583 return gen_rtx (SET, VOIDmode, assign, const1_rtx);
5584 }
5585
5586 /* The other case we handle is assignments into a constant-position
5587 field. They look like (ior (and DEST C1) OTHER). If C1 represents
5588 a mask that has all one bits except for a group of zero bits and
5589 OTHER is known to have zeros where C1 has ones, this is such an
5590 assignment. Compute the position and length from C1. Shift OTHER
5591 to the appropriate position, force it to the required mode, and
5592 make the extraction. Check for the AND in both operands. */
5593
5594 if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == AND
5595 && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT
5596 && (rtx_equal_p (XEXP (XEXP (src, 0), 0), dest)
5597 || rtx_equal_p (XEXP (XEXP (src, 0), 0), get_last_value (dest))
5598 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 0), 1)), dest)))
5599 c1 = INTVAL (XEXP (XEXP (src, 0), 1)), other = XEXP (src, 1);
5600 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 1)) == AND
5601 && GET_CODE (XEXP (XEXP (src, 1), 1)) == CONST_INT
5602 && (rtx_equal_p (XEXP (XEXP (src, 1), 0), dest)
5603 || rtx_equal_p (XEXP (XEXP (src, 1), 0), get_last_value (dest))
5604 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 1), 0)),
5605 dest)))
5606 c1 = INTVAL (XEXP (XEXP (src, 1), 1)), other = XEXP (src, 0);
5607 else
5608 return x;
5609
5610 pos = get_pos_from_mask (~c1, &len);
5611 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
5612 || (GET_MODE_BITSIZE (GET_MODE (other)) <= HOST_BITS_PER_WIDE_INT
5613 && (c1 & nonzero_bits (other, GET_MODE (other))) != 0))
5614 return x;
5615
5616 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
5617
5618 /* The mode to use for the source is the mode of the assignment, or of
5619 what is inside a possible STRICT_LOW_PART. */
5620 mode = (GET_CODE (assign) == STRICT_LOW_PART
5621 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
5622
5623 /* Shift OTHER right POS places and make it the source, restricting it
5624 to the proper length and mode. */
5625
5626 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
5627 GET_MODE (src), other, pos),
5628 mode, len, dest);
5629
5630 return gen_rtx_combine (SET, VOIDmode, assign, src);
5631 }
5632 \f
5633 /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
5634 if so. */
5635
5636 static rtx
5637 apply_distributive_law (x)
5638 rtx x;
5639 {
5640 enum rtx_code code = GET_CODE (x);
5641 rtx lhs, rhs, other;
5642 rtx tem;
5643 enum rtx_code inner_code;
5644
5645 /* Distributivity is not true for floating point.
5646 It can change the value. So don't do it.
5647 -- rms and moshier@world.std.com. */
5648 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
5649 return x;
5650
5651 /* The outer operation can only be one of the following: */
5652 if (code != IOR && code != AND && code != XOR
5653 && code != PLUS && code != MINUS)
5654 return x;
5655
5656 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
5657
5658 /* If either operand is a primitive we can't do anything, so get out fast. */
5659 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
5660 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
5661 return x;
5662
5663 lhs = expand_compound_operation (lhs);
5664 rhs = expand_compound_operation (rhs);
5665 inner_code = GET_CODE (lhs);
5666 if (inner_code != GET_CODE (rhs))
5667 return x;
5668
5669 /* See if the inner and outer operations distribute. */
5670 switch (inner_code)
5671 {
5672 case LSHIFTRT:
5673 case ASHIFTRT:
5674 case AND:
5675 case IOR:
5676 /* These all distribute except over PLUS. */
5677 if (code == PLUS || code == MINUS)
5678 return x;
5679 break;
5680
5681 case MULT:
5682 if (code != PLUS && code != MINUS)
5683 return x;
5684 break;
5685
5686 case ASHIFT:
5687 case LSHIFT:
5688 /* These are also multiplies, so they distribute over everything. */
5689 break;
5690
5691 case SUBREG:
5692 /* Non-paradoxical SUBREGs distributes over all operations, provided
5693 the inner modes and word numbers are the same, this is an extraction
5694 of a low-order part, we don't convert an fp operation to int or
5695 vice versa, and we would not be converting a single-word
5696 operation into a multi-word operation. The latter test is not
5697 required, but it prevents generating unneeded multi-word operations.
5698 Some of the previous tests are redundant given the latter test, but
5699 are retained because they are required for correctness.
5700
5701 We produce the result slightly differently in this case. */
5702
5703 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
5704 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
5705 || ! subreg_lowpart_p (lhs)
5706 || (GET_MODE_CLASS (GET_MODE (lhs))
5707 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
5708 || (GET_MODE_SIZE (GET_MODE (lhs))
5709 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
5710 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
5711 return x;
5712
5713 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
5714 SUBREG_REG (lhs), SUBREG_REG (rhs));
5715 return gen_lowpart_for_combine (GET_MODE (x), tem);
5716
5717 default:
5718 return x;
5719 }
5720
5721 /* Set LHS and RHS to the inner operands (A and B in the example
5722 above) and set OTHER to the common operand (C in the example).
5723 These is only one way to do this unless the inner operation is
5724 commutative. */
5725 if (GET_RTX_CLASS (inner_code) == 'c'
5726 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
5727 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
5728 else if (GET_RTX_CLASS (inner_code) == 'c'
5729 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
5730 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
5731 else if (GET_RTX_CLASS (inner_code) == 'c'
5732 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
5733 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
5734 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
5735 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
5736 else
5737 return x;
5738
5739 /* Form the new inner operation, seeing if it simplifies first. */
5740 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
5741
5742 /* There is one exception to the general way of distributing:
5743 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
5744 if (code == XOR && inner_code == IOR)
5745 {
5746 inner_code = AND;
5747 other = gen_unary (NOT, GET_MODE (x), other);
5748 }
5749
5750 /* We may be able to continuing distributing the result, so call
5751 ourselves recursively on the inner operation before forming the
5752 outer operation, which we return. */
5753 return gen_binary (inner_code, GET_MODE (x),
5754 apply_distributive_law (tem), other);
5755 }
5756 \f
5757 /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
5758 in MODE.
5759
5760 Return an equivalent form, if different from X. Otherwise, return X. If
5761 X is zero, we are to always construct the equivalent form. */
5762
5763 static rtx
5764 simplify_and_const_int (x, mode, varop, constop)
5765 rtx x;
5766 enum machine_mode mode;
5767 rtx varop;
5768 unsigned HOST_WIDE_INT constop;
5769 {
5770 register enum machine_mode tmode;
5771 register rtx temp;
5772 unsigned HOST_WIDE_INT nonzero;
5773
5774 /* There is a large class of optimizations based on the principle that
5775 some operations produce results where certain bits are known to be zero,
5776 and hence are not significant to the AND. For example, if we have just
5777 done a left shift of one bit, the low-order bit is known to be zero and
5778 hence an AND with a mask of ~1 would not do anything.
5779
5780 At the end of the following loop, we set:
5781
5782 VAROP to be the item to be AND'ed with;
5783 CONSTOP to the constant value to AND it with. */
5784
5785 while (1)
5786 {
5787 /* If we ever encounter a mode wider than the host machine's widest
5788 integer size, we can't compute the masks accurately, so give up. */
5789 if (GET_MODE_BITSIZE (GET_MODE (varop)) > HOST_BITS_PER_WIDE_INT)
5790 break;
5791
5792 /* Unless one of the cases below does a `continue',
5793 a `break' will be executed to exit the loop. */
5794
5795 switch (GET_CODE (varop))
5796 {
5797 case CLOBBER:
5798 /* If VAROP is a (clobber (const_int)), return it since we know
5799 we are generating something that won't match. */
5800 return varop;
5801
5802 #if ! BITS_BIG_ENDIAN
5803 case USE:
5804 /* VAROP is a (use (mem ..)) that was made from a bit-field
5805 extraction that spanned the boundary of the MEM. If we are
5806 now masking so it is within that boundary, we don't need the
5807 USE any more. */
5808 if ((constop & ~ GET_MODE_MASK (GET_MODE (XEXP (varop, 0)))) == 0)
5809 {
5810 varop = XEXP (varop, 0);
5811 continue;
5812 }
5813 break;
5814 #endif
5815
5816 case SUBREG:
5817 if (subreg_lowpart_p (varop)
5818 /* We can ignore the effect this SUBREG if it narrows the mode
5819 or, on machines where byte operations extend, if the
5820 constant masks to zero all the bits the mode doesn't have. */
5821 && ((GET_MODE_SIZE (GET_MODE (varop))
5822 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop))))
5823 #ifdef BYTE_LOADS_EXTEND
5824 || (0 == (constop
5825 & GET_MODE_MASK (GET_MODE (varop))
5826 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (varop)))))
5827 #endif
5828 ))
5829 {
5830 varop = SUBREG_REG (varop);
5831 continue;
5832 }
5833 break;
5834
5835 case ZERO_EXTRACT:
5836 case SIGN_EXTRACT:
5837 case ZERO_EXTEND:
5838 case SIGN_EXTEND:
5839 /* Try to expand these into a series of shifts and then work
5840 with that result. If we can't, for example, if the extract
5841 isn't at a fixed position, give up. */
5842 temp = expand_compound_operation (varop);
5843 if (temp != varop)
5844 {
5845 varop = temp;
5846 continue;
5847 }
5848 break;
5849
5850 case AND:
5851 if (GET_CODE (XEXP (varop, 1)) == CONST_INT)
5852 {
5853 constop &= INTVAL (XEXP (varop, 1));
5854 varop = XEXP (varop, 0);
5855 continue;
5856 }
5857 break;
5858
5859 case IOR:
5860 case XOR:
5861 /* If VAROP is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
5862 LSHIFT so we end up with an (and (lshiftrt (ior ...) ...) ...)
5863 operation which may be a bitfield extraction. */
5864
5865 if (GET_CODE (XEXP (varop, 0)) == LSHIFTRT
5866 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5867 && INTVAL (XEXP (XEXP (varop, 0), 1)) >= 0
5868 && INTVAL (XEXP (XEXP (varop, 0), 1)) < HOST_BITS_PER_WIDE_INT
5869 && GET_CODE (XEXP (varop, 1)) == CONST_INT
5870 && (INTVAL (XEXP (varop, 1))
5871 & ~ nonzero_bits (XEXP (varop, 0), GET_MODE (varop)) == 0))
5872 {
5873 temp = GEN_INT ((INTVAL (XEXP (varop, 1)) & constop)
5874 << INTVAL (XEXP (XEXP (varop, 0), 1)));
5875 temp = gen_binary (GET_CODE (varop), GET_MODE (varop),
5876 XEXP (XEXP (varop, 0), 0), temp);
5877 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5878 temp, XEXP (varop, 1));
5879 continue;
5880 }
5881
5882 /* Apply the AND to both branches of the IOR or XOR, then try to
5883 apply the distributive law. This may eliminate operations
5884 if either branch can be simplified because of the AND.
5885 It may also make some cases more complex, but those cases
5886 probably won't match a pattern either with or without this. */
5887 return
5888 gen_lowpart_for_combine
5889 (mode, apply_distributive_law
5890 (gen_rtx_combine
5891 (GET_CODE (varop), GET_MODE (varop),
5892 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
5893 XEXP (varop, 0), constop),
5894 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
5895 XEXP (varop, 1), constop))));
5896
5897 case NOT:
5898 /* (and (not FOO)) is (and (xor FOO CONST_OP)) so if FOO is an
5899 LSHIFTRT we can do the same as above. */
5900
5901 if (GET_CODE (XEXP (varop, 0)) == LSHIFTRT
5902 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5903 && INTVAL (XEXP (XEXP (varop, 0), 1)) >= 0
5904 && INTVAL (XEXP (XEXP (varop, 0), 1)) < HOST_BITS_PER_WIDE_INT)
5905 {
5906 temp = GEN_INT (constop << INTVAL (XEXP (XEXP (varop, 0), 1)));
5907 temp = gen_binary (XOR, GET_MODE (varop),
5908 XEXP (XEXP (varop, 0), 0), temp);
5909 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5910 temp, XEXP (XEXP (varop, 0), 1));
5911 continue;
5912 }
5913 break;
5914
5915 case ASHIFTRT:
5916 /* If we are just looking for the sign bit, we don't need this
5917 shift at all, even if it has a variable count. */
5918 if (constop == ((HOST_WIDE_INT) 1
5919 << (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)))
5920 {
5921 varop = XEXP (varop, 0);
5922 continue;
5923 }
5924
5925 /* If this is a shift by a constant, get a mask that contains
5926 those bits that are not copies of the sign bit. We then have
5927 two cases: If CONSTOP only includes those bits, this can be
5928 a logical shift, which may allow simplifications. If CONSTOP
5929 is a single-bit field not within those bits, we are requesting
5930 a copy of the sign bit and hence can shift the sign bit to
5931 the appropriate location. */
5932 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
5933 && INTVAL (XEXP (varop, 1)) >= 0
5934 && INTVAL (XEXP (varop, 1)) < HOST_BITS_PER_WIDE_INT)
5935 {
5936 int i = -1;
5937
5938 nonzero = GET_MODE_MASK (GET_MODE (varop));
5939 nonzero >>= INTVAL (XEXP (varop, 1));
5940
5941 if ((constop & ~ nonzero) == 0
5942 || (i = exact_log2 (constop)) >= 0)
5943 {
5944 varop = simplify_shift_const
5945 (varop, LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5946 i < 0 ? INTVAL (XEXP (varop, 1))
5947 : GET_MODE_BITSIZE (GET_MODE (varop)) - 1 - i);
5948 if (GET_CODE (varop) != ASHIFTRT)
5949 continue;
5950 }
5951 }
5952
5953 /* If our mask is 1, convert this to a LSHIFTRT. This can be done
5954 even if the shift count isn't a constant. */
5955 if (constop == 1)
5956 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5957 XEXP (varop, 0), XEXP (varop, 1));
5958 break;
5959
5960 case LSHIFTRT:
5961 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
5962 shift and AND produces only copies of the sign bit (C2 is one less
5963 than a power of two), we can do this with just a shift. */
5964
5965 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
5966 && ((INTVAL (XEXP (varop, 1))
5967 + num_sign_bit_copies (XEXP (varop, 0),
5968 GET_MODE (XEXP (varop, 0))))
5969 >= GET_MODE_BITSIZE (GET_MODE (varop)))
5970 && exact_log2 (constop + 1) >= 0)
5971 varop
5972 = gen_rtx_combine (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5973 GEN_INT (GET_MODE_BITSIZE (GET_MODE (varop))
5974 - exact_log2 (constop + 1)));
5975 break;
5976
5977 case NE:
5978 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is
5979 included in STORE_FLAG_VALUE and FOO has no bits that might be
5980 nonzero not in CONST. */
5981 if ((constop & ~ STORE_FLAG_VALUE) == 0
5982 && XEXP (varop, 0) == const0_rtx
5983 && (nonzero_bits (XEXP (varop, 0), mode) & ~ constop) == 0)
5984 {
5985 varop = XEXP (varop, 0);
5986 continue;
5987 }
5988 break;
5989
5990 case PLUS:
5991 /* In (and (plus FOO C1) M), if M is a mask that just turns off
5992 low-order bits (as in an alignment operation) and FOO is already
5993 aligned to that boundary, we can convert remove this AND
5994 and possibly the PLUS if it is now adding zero. */
5995 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
5996 && exact_log2 (-constop) >= 0
5997 && (nonzero_bits (XEXP (varop, 0), mode) & ~ constop) == 0)
5998 {
5999 varop = plus_constant (XEXP (varop, 0),
6000 INTVAL (XEXP (varop, 1)) & constop);
6001 constop = ~0;
6002 break;
6003 }
6004
6005 /* ... fall through ... */
6006
6007 case MINUS:
6008 /* In (and (plus (and FOO M1) BAR) M2), if M1 and M2 are one
6009 less than powers of two and M2 is narrower than M1, we can
6010 eliminate the inner AND. This occurs when incrementing
6011 bit fields. */
6012
6013 if (GET_CODE (XEXP (varop, 0)) == ZERO_EXTRACT
6014 || GET_CODE (XEXP (varop, 0)) == ZERO_EXTEND)
6015 SUBST (XEXP (varop, 0),
6016 expand_compound_operation (XEXP (varop, 0)));
6017
6018 if (GET_CODE (XEXP (varop, 0)) == AND
6019 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
6020 && exact_log2 (constop + 1) >= 0
6021 && exact_log2 (INTVAL (XEXP (XEXP (varop, 0), 1)) + 1) >= 0
6022 && (~ INTVAL (XEXP (XEXP (varop, 0), 1)) & constop) == 0)
6023 SUBST (XEXP (varop, 0), XEXP (XEXP (varop, 0), 0));
6024 break;
6025 }
6026
6027 break;
6028 }
6029
6030 /* If we have reached a constant, this whole thing is constant. */
6031 if (GET_CODE (varop) == CONST_INT)
6032 return GEN_INT (constop & INTVAL (varop));
6033
6034 /* See what bits may be nonzero in VAROP. Unlike the general case of
6035 a call to nonzero_bits, here we don't care about bits outside
6036 MODE. */
6037
6038 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
6039
6040 /* Turn off all bits in the constant that are known to already be zero.
6041 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
6042 which is tested below. */
6043
6044 constop &= nonzero;
6045
6046 /* If we don't have any bits left, return zero. */
6047 if (constop == 0)
6048 return const0_rtx;
6049
6050 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
6051 if we already had one (just check for the simplest cases). */
6052 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
6053 && GET_MODE (XEXP (x, 0)) == mode
6054 && SUBREG_REG (XEXP (x, 0)) == varop)
6055 varop = XEXP (x, 0);
6056 else
6057 varop = gen_lowpart_for_combine (mode, varop);
6058
6059 /* If we can't make the SUBREG, try to return what we were given. */
6060 if (GET_CODE (varop) == CLOBBER)
6061 return x ? x : varop;
6062
6063 /* If we are only masking insignificant bits, return VAROP. */
6064 if (constop == nonzero)
6065 x = varop;
6066
6067 /* Otherwise, return an AND. See how much, if any, of X we can use. */
6068 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
6069 x = gen_rtx_combine (AND, mode, varop, GEN_INT (constop));
6070
6071 else
6072 {
6073 if (GET_CODE (XEXP (x, 1)) != CONST_INT
6074 || INTVAL (XEXP (x, 1)) != constop)
6075 SUBST (XEXP (x, 1), GEN_INT (constop));
6076
6077 SUBST (XEXP (x, 0), varop);
6078 }
6079
6080 return x;
6081 }
6082 \f
6083 /* Given an expression, X, compute which bits in X can be non-zero.
6084 We don't care about bits outside of those defined in MODE.
6085
6086 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
6087 a shift, AND, or zero_extract, we can do better. */
6088
6089 static unsigned HOST_WIDE_INT
6090 nonzero_bits (x, mode)
6091 rtx x;
6092 enum machine_mode mode;
6093 {
6094 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
6095 unsigned HOST_WIDE_INT inner_nz;
6096 enum rtx_code code;
6097 int mode_width = GET_MODE_BITSIZE (mode);
6098 rtx tem;
6099
6100 /* If X is wider than MODE, use its mode instead. */
6101 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
6102 {
6103 mode = GET_MODE (x);
6104 nonzero = GET_MODE_MASK (mode);
6105 mode_width = GET_MODE_BITSIZE (mode);
6106 }
6107
6108 if (mode_width > HOST_BITS_PER_WIDE_INT)
6109 /* Our only callers in this case look for single bit values. So
6110 just return the mode mask. Those tests will then be false. */
6111 return nonzero;
6112
6113 code = GET_CODE (x);
6114 switch (code)
6115 {
6116 case REG:
6117 #ifdef STACK_BOUNDARY
6118 /* If this is the stack pointer, we may know something about its
6119 alignment. If PUSH_ROUNDING is defined, it is possible for the
6120 stack to be momentarily aligned only to that amount, so we pick
6121 the least alignment. */
6122
6123 if (x == stack_pointer_rtx)
6124 {
6125 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6126
6127 #ifdef PUSH_ROUNDING
6128 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
6129 #endif
6130
6131 return nonzero & ~ (sp_alignment - 1);
6132 }
6133 #endif
6134
6135 /* If X is a register whose value we can find, use that value.
6136 Otherwise, use the previously-computed nonzero bits for this
6137 register. */
6138
6139 tem = get_last_value (x);
6140 if (tem)
6141 return nonzero_bits (tem, mode);
6142 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
6143 return reg_nonzero_bits[REGNO (x)] & nonzero;
6144 else
6145 return nonzero;
6146
6147 case CONST_INT:
6148 return INTVAL (x);
6149
6150 #ifdef BYTE_LOADS_ZERO_EXTEND
6151 case MEM:
6152 /* In many, if not most, RISC machines, reading a byte from memory
6153 zeros the rest of the register. Noticing that fact saves a lot
6154 of extra zero-extends. */
6155 nonzero &= GET_MODE_MASK (GET_MODE (x));
6156 break;
6157 #endif
6158
6159 #if STORE_FLAG_VALUE == 1
6160 case EQ: case NE:
6161 case GT: case GTU:
6162 case LT: case LTU:
6163 case GE: case GEU:
6164 case LE: case LEU:
6165
6166 if (GET_MODE_CLASS (mode) == MODE_INT)
6167 nonzero = 1;
6168
6169 /* A comparison operation only sets the bits given by its mode. The
6170 rest are set undefined. */
6171 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
6172 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
6173 break;
6174 #endif
6175
6176 case NEG:
6177 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6178 == GET_MODE_BITSIZE (GET_MODE (x)))
6179 nonzero = 1;
6180
6181 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
6182 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
6183 break;
6184
6185 case ABS:
6186 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6187 == GET_MODE_BITSIZE (GET_MODE (x)))
6188 nonzero = 1;
6189 break;
6190
6191 case TRUNCATE:
6192 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
6193 break;
6194
6195 case ZERO_EXTEND:
6196 nonzero &= nonzero_bits (XEXP (x, 0), mode);
6197 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6198 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6199 break;
6200
6201 case SIGN_EXTEND:
6202 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
6203 Otherwise, show all the bits in the outer mode but not the inner
6204 may be non-zero. */
6205 inner_nz = nonzero_bits (XEXP (x, 0), mode);
6206 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6207 {
6208 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6209 if (inner_nz &
6210 (((HOST_WIDE_INT) 1
6211 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
6212 inner_nz |= (GET_MODE_MASK (mode)
6213 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
6214 }
6215
6216 nonzero &= inner_nz;
6217 break;
6218
6219 case AND:
6220 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
6221 & nonzero_bits (XEXP (x, 1), mode));
6222 break;
6223
6224 case XOR: case IOR:
6225 case UMIN: case UMAX: case SMIN: case SMAX:
6226 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
6227 | nonzero_bits (XEXP (x, 1), mode));
6228 break;
6229
6230 case PLUS: case MINUS:
6231 case MULT:
6232 case DIV: case UDIV:
6233 case MOD: case UMOD:
6234 /* We can apply the rules of arithmetic to compute the number of
6235 high- and low-order zero bits of these operations. We start by
6236 computing the width (position of the highest-order non-zero bit)
6237 and the number of low-order zero bits for each value. */
6238 {
6239 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
6240 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
6241 int width0 = floor_log2 (nz0) + 1;
6242 int width1 = floor_log2 (nz1) + 1;
6243 int low0 = floor_log2 (nz0 & -nz0);
6244 int low1 = floor_log2 (nz1 & -nz1);
6245 int op0_maybe_minusp = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
6246 int op1_maybe_minusp = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
6247 int result_width = mode_width;
6248 int result_low = 0;
6249
6250 switch (code)
6251 {
6252 case PLUS:
6253 result_width = MAX (width0, width1) + 1;
6254 result_low = MIN (low0, low1);
6255 break;
6256 case MINUS:
6257 result_low = MIN (low0, low1);
6258 break;
6259 case MULT:
6260 result_width = width0 + width1;
6261 result_low = low0 + low1;
6262 break;
6263 case DIV:
6264 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6265 result_width = width0;
6266 break;
6267 case UDIV:
6268 result_width = width0;
6269 break;
6270 case MOD:
6271 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6272 result_width = MIN (width0, width1);
6273 result_low = MIN (low0, low1);
6274 break;
6275 case UMOD:
6276 result_width = MIN (width0, width1);
6277 result_low = MIN (low0, low1);
6278 break;
6279 }
6280
6281 if (result_width < mode_width)
6282 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
6283
6284 if (result_low > 0)
6285 nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
6286 }
6287 break;
6288
6289 case ZERO_EXTRACT:
6290 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6291 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6292 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
6293 break;
6294
6295 case SUBREG:
6296 /* If this is a SUBREG formed for a promoted variable that has
6297 been zero-extended, we know that at least the high-order bits
6298 are zero, though others might be too. */
6299
6300 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
6301 nonzero = (GET_MODE_MASK (GET_MODE (x))
6302 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
6303
6304 /* If the inner mode is a single word for both the host and target
6305 machines, we can compute this from which bits of the inner
6306 object might be nonzero. */
6307 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
6308 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
6309 <= HOST_BITS_PER_WIDE_INT))
6310 {
6311 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
6312 #ifndef BYTE_LOADS_EXTEND
6313 /* On many CISC machines, accessing an object in a wider mode
6314 causes the high-order bits to become undefined. So they are
6315 not known to be zero. */
6316 if (GET_MODE_SIZE (GET_MODE (x))
6317 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6318 nonzero |= (GET_MODE_MASK (GET_MODE (x))
6319 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
6320 #endif
6321 }
6322 break;
6323
6324 case ASHIFTRT:
6325 case LSHIFTRT:
6326 case ASHIFT:
6327 case LSHIFT:
6328 case ROTATE:
6329 /* The nonzero bits are in two classes: any bits within MODE
6330 that aren't in GET_MODE (x) are always significant. The rest of the
6331 nonzero bits are those that are significant in the operand of
6332 the shift when shifted the appropriate number of bits. This
6333 shows that high-order bits are cleared by the right shift and
6334 low-order bits by left shifts. */
6335 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6336 && INTVAL (XEXP (x, 1)) >= 0
6337 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6338 {
6339 enum machine_mode inner_mode = GET_MODE (x);
6340 int width = GET_MODE_BITSIZE (inner_mode);
6341 int count = INTVAL (XEXP (x, 1));
6342 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
6343 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
6344 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
6345 unsigned HOST_WIDE_INT outer = 0;
6346
6347 if (mode_width > width)
6348 outer = (op_nonzero & nonzero & ~ mode_mask);
6349
6350 if (code == LSHIFTRT)
6351 inner >>= count;
6352 else if (code == ASHIFTRT)
6353 {
6354 inner >>= count;
6355
6356 /* If the sign bit may have been nonzero before the shift, we
6357 need to mark all the places it could have been copied to
6358 by the shift as possibly nonzero. */
6359 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
6360 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
6361 }
6362 else if (code == LSHIFT || code == ASHIFT)
6363 inner <<= count;
6364 else
6365 inner = ((inner << (count % width)
6366 | (inner >> (width - (count % width)))) & mode_mask);
6367
6368 nonzero &= (outer | inner);
6369 }
6370 break;
6371
6372 case FFS:
6373 /* This is at most the number of bits in the mode. */
6374 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
6375 break;
6376
6377 case IF_THEN_ELSE:
6378 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
6379 | nonzero_bits (XEXP (x, 2), mode));
6380 break;
6381 }
6382
6383 return nonzero;
6384 }
6385 \f
6386 /* Return the number of bits at the high-order end of X that are known to
6387 be equal to the sign bit. This number will always be between 1 and
6388 the number of bits in the mode of X. MODE is the mode to be used
6389 if X is VOIDmode. */
6390
6391 static int
6392 num_sign_bit_copies (x, mode)
6393 rtx x;
6394 enum machine_mode mode;
6395 {
6396 enum rtx_code code = GET_CODE (x);
6397 int bitwidth;
6398 int num0, num1, result;
6399 unsigned HOST_WIDE_INT nonzero;
6400 rtx tem;
6401
6402 /* If we weren't given a mode, use the mode of X. If the mode is still
6403 VOIDmode, we don't know anything. */
6404
6405 if (mode == VOIDmode)
6406 mode = GET_MODE (x);
6407
6408 if (mode == VOIDmode)
6409 return 1;
6410
6411 bitwidth = GET_MODE_BITSIZE (mode);
6412
6413 switch (code)
6414 {
6415 case REG:
6416 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
6417 return reg_sign_bit_copies[REGNO (x)];
6418
6419 tem = get_last_value (x);
6420 if (tem != 0)
6421 return num_sign_bit_copies (tem, mode);
6422 break;
6423
6424 #ifdef BYTE_LOADS_SIGN_EXTEND
6425 case MEM:
6426 /* Some RISC machines sign-extend all loads of smaller than a word. */
6427 return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
6428 #endif
6429
6430 case CONST_INT:
6431 /* If the constant is negative, take its 1's complement and remask.
6432 Then see how many zero bits we have. */
6433 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
6434 if (bitwidth <= HOST_BITS_PER_WIDE_INT
6435 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6436 nonzero = (~ nonzero) & GET_MODE_MASK (mode);
6437
6438 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
6439
6440 case SUBREG:
6441 /* If this is a SUBREG for a promoted object that is sign-extended
6442 and we are looking at it in a wider mode, we know that at least the
6443 high-order bits are known to be sign bit copies. */
6444
6445 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
6446 return (GET_MODE_BITSIZE (mode) - GET_MODE_BITSIZE (GET_MODE (x))
6447 + num_sign_bit_copies (SUBREG_REG (x), GET_MODE (x)));
6448
6449 /* For a smaller object, just ignore the high bits. */
6450 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
6451 {
6452 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
6453 return MAX (1, (num0
6454 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
6455 - bitwidth)));
6456 }
6457
6458 #ifdef BYTE_LOADS_EXTEND
6459 /* For paradoxical SUBREGs, just look inside since, on machines with
6460 one of these defined, we assume that operations are actually
6461 performed on the full register. Note that we are passing MODE
6462 to the recursive call, so the number of sign bit copies will
6463 remain relative to that mode, not the inner mode. */
6464
6465 if (GET_MODE_SIZE (GET_MODE (x))
6466 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6467 return num_sign_bit_copies (SUBREG_REG (x), mode);
6468 #endif
6469
6470 break;
6471
6472 case SIGN_EXTRACT:
6473 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
6474 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
6475 break;
6476
6477 case SIGN_EXTEND:
6478 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6479 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
6480
6481 case TRUNCATE:
6482 /* For a smaller object, just ignore the high bits. */
6483 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
6484 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6485 - bitwidth)));
6486
6487 case NOT:
6488 return num_sign_bit_copies (XEXP (x, 0), mode);
6489
6490 case ROTATE: case ROTATERT:
6491 /* If we are rotating left by a number of bits less than the number
6492 of sign bit copies, we can just subtract that amount from the
6493 number. */
6494 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6495 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
6496 {
6497 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6498 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
6499 : bitwidth - INTVAL (XEXP (x, 1))));
6500 }
6501 break;
6502
6503 case NEG:
6504 /* In general, this subtracts one sign bit copy. But if the value
6505 is known to be positive, the number of sign bit copies is the
6506 same as that of the input. Finally, if the input has just one bit
6507 that might be nonzero, all the bits are copies of the sign bit. */
6508 nonzero = nonzero_bits (XEXP (x, 0), mode);
6509 if (nonzero == 1)
6510 return bitwidth;
6511
6512 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6513 if (num0 > 1
6514 && bitwidth <= HOST_BITS_PER_WIDE_INT
6515 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
6516 num0--;
6517
6518 return num0;
6519
6520 case IOR: case AND: case XOR:
6521 case SMIN: case SMAX: case UMIN: case UMAX:
6522 /* Logical operations will preserve the number of sign-bit copies.
6523 MIN and MAX operations always return one of the operands. */
6524 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6525 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6526 return MIN (num0, num1);
6527
6528 case PLUS: case MINUS:
6529 /* For addition and subtraction, we can have a 1-bit carry. However,
6530 if we are subtracting 1 from a positive number, there will not
6531 be such a carry. Furthermore, if the positive number is known to
6532 be 0 or 1, we know the result is either -1 or 0. */
6533
6534 if (code == PLUS && XEXP (x, 1) == constm1_rtx
6535 && bitwidth <= HOST_BITS_PER_INT)
6536 {
6537 nonzero = nonzero_bits (XEXP (x, 0), mode);
6538 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
6539 return (nonzero == 1 || nonzero == 0 ? bitwidth
6540 : bitwidth - floor_log2 (nonzero) - 1);
6541 }
6542
6543 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6544 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6545 return MAX (1, MIN (num0, num1) - 1);
6546
6547 case MULT:
6548 /* The number of bits of the product is the sum of the number of
6549 bits of both terms. However, unless one of the terms if known
6550 to be positive, we must allow for an additional bit since negating
6551 a negative number can remove one sign bit copy. */
6552
6553 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6554 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6555
6556 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
6557 if (result > 0
6558 && bitwidth <= HOST_BITS_PER_INT
6559 && ((nonzero_bits (XEXP (x, 0), mode)
6560 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6561 && (nonzero_bits (XEXP (x, 1), mode)
6562 & ((HOST_WIDE_INT) 1 << (bitwidth - 1)) != 0))
6563 result--;
6564
6565 return MAX (1, result);
6566
6567 case UDIV:
6568 /* The result must be <= the first operand. */
6569 return num_sign_bit_copies (XEXP (x, 0), mode);
6570
6571 case UMOD:
6572 /* The result must be <= the scond operand. */
6573 return num_sign_bit_copies (XEXP (x, 1), mode);
6574
6575 case DIV:
6576 /* Similar to unsigned division, except that we have to worry about
6577 the case where the divisor is negative, in which case we have
6578 to add 1. */
6579 result = num_sign_bit_copies (XEXP (x, 0), mode);
6580 if (result > 1
6581 && bitwidth <= HOST_BITS_PER_WIDE_INT
6582 && (nonzero_bits (XEXP (x, 1), mode)
6583 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6584 result --;
6585
6586 return result;
6587
6588 case MOD:
6589 result = num_sign_bit_copies (XEXP (x, 1), mode);
6590 if (result > 1
6591 && bitwidth <= HOST_BITS_PER_WIDE_INT
6592 && (nonzero_bits (XEXP (x, 1), mode)
6593 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6594 result --;
6595
6596 return result;
6597
6598 case ASHIFTRT:
6599 /* Shifts by a constant add to the number of bits equal to the
6600 sign bit. */
6601 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6602 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6603 && INTVAL (XEXP (x, 1)) > 0)
6604 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
6605
6606 return num0;
6607
6608 case ASHIFT:
6609 case LSHIFT:
6610 /* Left shifts destroy copies. */
6611 if (GET_CODE (XEXP (x, 1)) != CONST_INT
6612 || INTVAL (XEXP (x, 1)) < 0
6613 || INTVAL (XEXP (x, 1)) >= bitwidth)
6614 return 1;
6615
6616 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6617 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
6618
6619 case IF_THEN_ELSE:
6620 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
6621 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
6622 return MIN (num0, num1);
6623
6624 #if STORE_FLAG_VALUE == -1
6625 case EQ: case NE: case GE: case GT: case LE: case LT:
6626 case GEU: case GTU: case LEU: case LTU:
6627 return bitwidth;
6628 #endif
6629 }
6630
6631 /* If we haven't been able to figure it out by one of the above rules,
6632 see if some of the high-order bits are known to be zero. If so,
6633 count those bits and return one less than that amount. If we can't
6634 safely compute the mask for this mode, always return BITWIDTH. */
6635
6636 if (bitwidth > HOST_BITS_PER_WIDE_INT)
6637 return 1;
6638
6639 nonzero = nonzero_bits (x, mode);
6640 return (nonzero == GET_MODE_MASK (mode)
6641 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
6642 }
6643 \f
6644 /* Return the number of "extended" bits there are in X, when interpreted
6645 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
6646 unsigned quantities, this is the number of high-order zero bits.
6647 For signed quantities, this is the number of copies of the sign bit
6648 minus 1. In both case, this function returns the number of "spare"
6649 bits. For example, if two quantities for which this function returns
6650 at least 1 are added, the addition is known not to overflow.
6651
6652 This function will always return 0 unless called during combine, which
6653 implies that it must be called from a define_split. */
6654
6655 int
6656 extended_count (x, mode, unsignedp)
6657 rtx x;
6658 enum machine_mode mode;
6659 int unsignedp;
6660 {
6661 if (nonzero_sign_valid == 0)
6662 return 0;
6663
6664 return (unsignedp
6665 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6666 && (GET_MODE_BITSIZE (mode) - 1
6667 - floor_log2 (nonzero_bits (x, mode))))
6668 : num_sign_bit_copies (x, mode) - 1);
6669 }
6670 \f
6671 /* This function is called from `simplify_shift_const' to merge two
6672 outer operations. Specifically, we have already found that we need
6673 to perform operation *POP0 with constant *PCONST0 at the outermost
6674 position. We would now like to also perform OP1 with constant CONST1
6675 (with *POP0 being done last).
6676
6677 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
6678 the resulting operation. *PCOMP_P is set to 1 if we would need to
6679 complement the innermost operand, otherwise it is unchanged.
6680
6681 MODE is the mode in which the operation will be done. No bits outside
6682 the width of this mode matter. It is assumed that the width of this mode
6683 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
6684
6685 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
6686 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
6687 result is simply *PCONST0.
6688
6689 If the resulting operation cannot be expressed as one operation, we
6690 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
6691
6692 static int
6693 merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
6694 enum rtx_code *pop0;
6695 HOST_WIDE_INT *pconst0;
6696 enum rtx_code op1;
6697 HOST_WIDE_INT const1;
6698 enum machine_mode mode;
6699 int *pcomp_p;
6700 {
6701 enum rtx_code op0 = *pop0;
6702 HOST_WIDE_INT const0 = *pconst0;
6703
6704 const0 &= GET_MODE_MASK (mode);
6705 const1 &= GET_MODE_MASK (mode);
6706
6707 /* If OP0 is an AND, clear unimportant bits in CONST1. */
6708 if (op0 == AND)
6709 const1 &= const0;
6710
6711 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
6712 if OP0 is SET. */
6713
6714 if (op1 == NIL || op0 == SET)
6715 return 1;
6716
6717 else if (op0 == NIL)
6718 op0 = op1, const0 = const1;
6719
6720 else if (op0 == op1)
6721 {
6722 switch (op0)
6723 {
6724 case AND:
6725 const0 &= const1;
6726 break;
6727 case IOR:
6728 const0 |= const1;
6729 break;
6730 case XOR:
6731 const0 ^= const1;
6732 break;
6733 case PLUS:
6734 const0 += const1;
6735 break;
6736 case NEG:
6737 op0 = NIL;
6738 break;
6739 }
6740 }
6741
6742 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
6743 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
6744 return 0;
6745
6746 /* If the two constants aren't the same, we can't do anything. The
6747 remaining six cases can all be done. */
6748 else if (const0 != const1)
6749 return 0;
6750
6751 else
6752 switch (op0)
6753 {
6754 case IOR:
6755 if (op1 == AND)
6756 /* (a & b) | b == b */
6757 op0 = SET;
6758 else /* op1 == XOR */
6759 /* (a ^ b) | b == a | b */
6760 ;
6761 break;
6762
6763 case XOR:
6764 if (op1 == AND)
6765 /* (a & b) ^ b == (~a) & b */
6766 op0 = AND, *pcomp_p = 1;
6767 else /* op1 == IOR */
6768 /* (a | b) ^ b == a & ~b */
6769 op0 = AND, *pconst0 = ~ const0;
6770 break;
6771
6772 case AND:
6773 if (op1 == IOR)
6774 /* (a | b) & b == b */
6775 op0 = SET;
6776 else /* op1 == XOR */
6777 /* (a ^ b) & b) == (~a) & b */
6778 *pcomp_p = 1;
6779 break;
6780 }
6781
6782 /* Check for NO-OP cases. */
6783 const0 &= GET_MODE_MASK (mode);
6784 if (const0 == 0
6785 && (op0 == IOR || op0 == XOR || op0 == PLUS))
6786 op0 = NIL;
6787 else if (const0 == 0 && op0 == AND)
6788 op0 = SET;
6789 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
6790 op0 = NIL;
6791
6792 *pop0 = op0;
6793 *pconst0 = const0;
6794
6795 return 1;
6796 }
6797 \f
6798 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
6799 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
6800 that we started with.
6801
6802 The shift is normally computed in the widest mode we find in VAROP, as
6803 long as it isn't a different number of words than RESULT_MODE. Exceptions
6804 are ASHIFTRT and ROTATE, which are always done in their original mode, */
6805
6806 static rtx
6807 simplify_shift_const (x, code, result_mode, varop, count)
6808 rtx x;
6809 enum rtx_code code;
6810 enum machine_mode result_mode;
6811 rtx varop;
6812 int count;
6813 {
6814 enum rtx_code orig_code = code;
6815 int orig_count = count;
6816 enum machine_mode mode = result_mode;
6817 enum machine_mode shift_mode, tmode;
6818 int mode_words
6819 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
6820 /* We form (outer_op (code varop count) (outer_const)). */
6821 enum rtx_code outer_op = NIL;
6822 HOST_WIDE_INT outer_const;
6823 rtx const_rtx;
6824 int complement_p = 0;
6825 rtx new;
6826
6827 /* If we were given an invalid count, don't do anything except exactly
6828 what was requested. */
6829
6830 if (count < 0 || count > GET_MODE_BITSIZE (mode))
6831 {
6832 if (x)
6833 return x;
6834
6835 return gen_rtx (code, mode, varop, GEN_INT (count));
6836 }
6837
6838 /* Unless one of the branches of the `if' in this loop does a `continue',
6839 we will `break' the loop after the `if'. */
6840
6841 while (count != 0)
6842 {
6843 /* If we have an operand of (clobber (const_int 0)), just return that
6844 value. */
6845 if (GET_CODE (varop) == CLOBBER)
6846 return varop;
6847
6848 /* If we discovered we had to complement VAROP, leave. Making a NOT
6849 here would cause an infinite loop. */
6850 if (complement_p)
6851 break;
6852
6853 /* Convert ROTATETRT to ROTATE. */
6854 if (code == ROTATERT)
6855 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
6856
6857 /* Canonicalize LSHIFT to ASHIFT. */
6858 if (code == LSHIFT)
6859 code = ASHIFT;
6860
6861 /* We need to determine what mode we will do the shift in. If the
6862 shift is a ASHIFTRT or ROTATE, we must always do it in the mode it
6863 was originally done in. Otherwise, we can do it in MODE, the widest
6864 mode encountered. */
6865 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
6866
6867 /* Handle cases where the count is greater than the size of the mode
6868 minus 1. For ASHIFT, use the size minus one as the count (this can
6869 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
6870 take the count modulo the size. For other shifts, the result is
6871 zero.
6872
6873 Since these shifts are being produced by the compiler by combining
6874 multiple operations, each of which are defined, we know what the
6875 result is supposed to be. */
6876
6877 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
6878 {
6879 if (code == ASHIFTRT)
6880 count = GET_MODE_BITSIZE (shift_mode) - 1;
6881 else if (code == ROTATE || code == ROTATERT)
6882 count %= GET_MODE_BITSIZE (shift_mode);
6883 else
6884 {
6885 /* We can't simply return zero because there may be an
6886 outer op. */
6887 varop = const0_rtx;
6888 count = 0;
6889 break;
6890 }
6891 }
6892
6893 /* Negative counts are invalid and should not have been made (a
6894 programmer-specified negative count should have been handled
6895 above). */
6896 else if (count < 0)
6897 abort ();
6898
6899 /* An arithmetic right shift of a quantity known to be -1 or 0
6900 is a no-op. */
6901 if (code == ASHIFTRT
6902 && (num_sign_bit_copies (varop, shift_mode)
6903 == GET_MODE_BITSIZE (shift_mode)))
6904 {
6905 count = 0;
6906 break;
6907 }
6908
6909 /* If we are doing an arithmetic right shift and discarding all but
6910 the sign bit copies, this is equivalent to doing a shift by the
6911 bitsize minus one. Convert it into that shift because it will often
6912 allow other simplifications. */
6913
6914 if (code == ASHIFTRT
6915 && (count + num_sign_bit_copies (varop, shift_mode)
6916 >= GET_MODE_BITSIZE (shift_mode)))
6917 count = GET_MODE_BITSIZE (shift_mode) - 1;
6918
6919 /* We simplify the tests below and elsewhere by converting
6920 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
6921 `make_compound_operation' will convert it to a ASHIFTRT for
6922 those machines (such as Vax) that don't have a LSHIFTRT. */
6923 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
6924 && code == ASHIFTRT
6925 && ((nonzero_bits (varop, shift_mode)
6926 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
6927 == 0))
6928 code = LSHIFTRT;
6929
6930 switch (GET_CODE (varop))
6931 {
6932 case SIGN_EXTEND:
6933 case ZERO_EXTEND:
6934 case SIGN_EXTRACT:
6935 case ZERO_EXTRACT:
6936 new = expand_compound_operation (varop);
6937 if (new != varop)
6938 {
6939 varop = new;
6940 continue;
6941 }
6942 break;
6943
6944 case MEM:
6945 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
6946 minus the width of a smaller mode, we can do this with a
6947 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
6948 if ((code == ASHIFTRT || code == LSHIFTRT)
6949 && ! mode_dependent_address_p (XEXP (varop, 0))
6950 && ! MEM_VOLATILE_P (varop)
6951 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
6952 MODE_INT, 1)) != BLKmode)
6953 {
6954 #if BYTES_BIG_ENDIAN
6955 new = gen_rtx (MEM, tmode, XEXP (varop, 0));
6956 #else
6957 new = gen_rtx (MEM, tmode,
6958 plus_constant (XEXP (varop, 0),
6959 count / BITS_PER_UNIT));
6960 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
6961 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
6962 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
6963 #endif
6964 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
6965 : ZERO_EXTEND, mode, new);
6966 count = 0;
6967 continue;
6968 }
6969 break;
6970
6971 case USE:
6972 /* Similar to the case above, except that we can only do this if
6973 the resulting mode is the same as that of the underlying
6974 MEM and adjust the address depending on the *bits* endianness
6975 because of the way that bit-field extract insns are defined. */
6976 if ((code == ASHIFTRT || code == LSHIFTRT)
6977 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
6978 MODE_INT, 1)) != BLKmode
6979 && tmode == GET_MODE (XEXP (varop, 0)))
6980 {
6981 #if BITS_BIG_ENDIAN
6982 new = XEXP (varop, 0);
6983 #else
6984 new = copy_rtx (XEXP (varop, 0));
6985 SUBST (XEXP (new, 0),
6986 plus_constant (XEXP (new, 0),
6987 count / BITS_PER_UNIT));
6988 #endif
6989
6990 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
6991 : ZERO_EXTEND, mode, new);
6992 count = 0;
6993 continue;
6994 }
6995 break;
6996
6997 case SUBREG:
6998 /* If VAROP is a SUBREG, strip it as long as the inner operand has
6999 the same number of words as what we've seen so far. Then store
7000 the widest mode in MODE. */
7001 if (subreg_lowpart_p (varop)
7002 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
7003 > GET_MODE_SIZE (GET_MODE (varop)))
7004 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
7005 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
7006 == mode_words))
7007 {
7008 varop = SUBREG_REG (varop);
7009 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
7010 mode = GET_MODE (varop);
7011 continue;
7012 }
7013 break;
7014
7015 case MULT:
7016 /* Some machines use MULT instead of ASHIFT because MULT
7017 is cheaper. But it is still better on those machines to
7018 merge two shifts into one. */
7019 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7020 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
7021 {
7022 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
7023 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
7024 continue;
7025 }
7026 break;
7027
7028 case UDIV:
7029 /* Similar, for when divides are cheaper. */
7030 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7031 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
7032 {
7033 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
7034 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
7035 continue;
7036 }
7037 break;
7038
7039 case ASHIFTRT:
7040 /* If we are extracting just the sign bit of an arithmetic right
7041 shift, that shift is not needed. */
7042 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
7043 {
7044 varop = XEXP (varop, 0);
7045 continue;
7046 }
7047
7048 /* ... fall through ... */
7049
7050 case LSHIFTRT:
7051 case ASHIFT:
7052 case LSHIFT:
7053 case ROTATE:
7054 /* Here we have two nested shifts. The result is usually the
7055 AND of a new shift with a mask. We compute the result below. */
7056 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7057 && INTVAL (XEXP (varop, 1)) >= 0
7058 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
7059 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
7060 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7061 {
7062 enum rtx_code first_code = GET_CODE (varop);
7063 int first_count = INTVAL (XEXP (varop, 1));
7064 unsigned HOST_WIDE_INT mask;
7065 rtx mask_rtx;
7066 rtx inner;
7067
7068 if (first_code == LSHIFT)
7069 first_code = ASHIFT;
7070
7071 /* We have one common special case. We can't do any merging if
7072 the inner code is an ASHIFTRT of a smaller mode. However, if
7073 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
7074 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
7075 we can convert it to
7076 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
7077 This simplifies certain SIGN_EXTEND operations. */
7078 if (code == ASHIFT && first_code == ASHIFTRT
7079 && (GET_MODE_BITSIZE (result_mode)
7080 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
7081 {
7082 /* C3 has the low-order C1 bits zero. */
7083
7084 mask = (GET_MODE_MASK (mode)
7085 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
7086
7087 varop = simplify_and_const_int (NULL_RTX, result_mode,
7088 XEXP (varop, 0), mask);
7089 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
7090 varop, count);
7091 count = first_count;
7092 code = ASHIFTRT;
7093 continue;
7094 }
7095
7096 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
7097 than C1 high-order bits equal to the sign bit, we can convert
7098 this to either an ASHIFT or a ASHIFTRT depending on the
7099 two counts.
7100
7101 We cannot do this if VAROP's mode is not SHIFT_MODE. */
7102
7103 if (code == ASHIFTRT && first_code == ASHIFT
7104 && GET_MODE (varop) == shift_mode
7105 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
7106 > first_count))
7107 {
7108 count -= first_count;
7109 if (count < 0)
7110 count = - count, code = ASHIFT;
7111 varop = XEXP (varop, 0);
7112 continue;
7113 }
7114
7115 /* There are some cases we can't do. If CODE is ASHIFTRT,
7116 we can only do this if FIRST_CODE is also ASHIFTRT.
7117
7118 We can't do the case when CODE is ROTATE and FIRST_CODE is
7119 ASHIFTRT.
7120
7121 If the mode of this shift is not the mode of the outer shift,
7122 we can't do this if either shift is ASHIFTRT or ROTATE.
7123
7124 Finally, we can't do any of these if the mode is too wide
7125 unless the codes are the same.
7126
7127 Handle the case where the shift codes are the same
7128 first. */
7129
7130 if (code == first_code)
7131 {
7132 if (GET_MODE (varop) != result_mode
7133 && (code == ASHIFTRT || code == ROTATE))
7134 break;
7135
7136 count += first_count;
7137 varop = XEXP (varop, 0);
7138 continue;
7139 }
7140
7141 if (code == ASHIFTRT
7142 || (code == ROTATE && first_code == ASHIFTRT)
7143 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
7144 || (GET_MODE (varop) != result_mode
7145 && (first_code == ASHIFTRT || first_code == ROTATE
7146 || code == ROTATE)))
7147 break;
7148
7149 /* To compute the mask to apply after the shift, shift the
7150 nonzero bits of the inner shift the same way the
7151 outer shift will. */
7152
7153 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
7154
7155 mask_rtx
7156 = simplify_binary_operation (code, result_mode, mask_rtx,
7157 GEN_INT (count));
7158
7159 /* Give up if we can't compute an outer operation to use. */
7160 if (mask_rtx == 0
7161 || GET_CODE (mask_rtx) != CONST_INT
7162 || ! merge_outer_ops (&outer_op, &outer_const, AND,
7163 INTVAL (mask_rtx),
7164 result_mode, &complement_p))
7165 break;
7166
7167 /* If the shifts are in the same direction, we add the
7168 counts. Otherwise, we subtract them. */
7169 if ((code == ASHIFTRT || code == LSHIFTRT)
7170 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
7171 count += first_count;
7172 else
7173 count -= first_count;
7174
7175 /* If COUNT is positive, the new shift is usually CODE,
7176 except for the two exceptions below, in which case it is
7177 FIRST_CODE. If the count is negative, FIRST_CODE should
7178 always be used */
7179 if (count > 0
7180 && ((first_code == ROTATE && code == ASHIFT)
7181 || (first_code == ASHIFTRT && code == LSHIFTRT)))
7182 code = first_code;
7183 else if (count < 0)
7184 code = first_code, count = - count;
7185
7186 varop = XEXP (varop, 0);
7187 continue;
7188 }
7189
7190 /* If we have (A << B << C) for any shift, we can convert this to
7191 (A << C << B). This wins if A is a constant. Only try this if
7192 B is not a constant. */
7193
7194 else if (GET_CODE (varop) == code
7195 && GET_CODE (XEXP (varop, 1)) != CONST_INT
7196 && 0 != (new
7197 = simplify_binary_operation (code, mode,
7198 XEXP (varop, 0),
7199 GEN_INT (count))))
7200 {
7201 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
7202 count = 0;
7203 continue;
7204 }
7205 break;
7206
7207 case NOT:
7208 /* Make this fit the case below. */
7209 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
7210 GEN_INT (GET_MODE_MASK (mode)));
7211 continue;
7212
7213 case IOR:
7214 case AND:
7215 case XOR:
7216 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
7217 with C the size of VAROP - 1 and the shift is logical if
7218 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
7219 we have an (le X 0) operation. If we have an arithmetic shift
7220 and STORE_FLAG_VALUE is 1 or we have a logical shift with
7221 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
7222
7223 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
7224 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
7225 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7226 && (code == LSHIFTRT || code == ASHIFTRT)
7227 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
7228 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
7229 {
7230 count = 0;
7231 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
7232 const0_rtx);
7233
7234 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
7235 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
7236
7237 continue;
7238 }
7239
7240 /* If we have (shift (logical)), move the logical to the outside
7241 to allow it to possibly combine with another logical and the
7242 shift to combine with another shift. This also canonicalizes to
7243 what a ZERO_EXTRACT looks like. Also, some machines have
7244 (and (shift)) insns. */
7245
7246 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7247 && (new = simplify_binary_operation (code, result_mode,
7248 XEXP (varop, 1),
7249 GEN_INT (count))) != 0
7250 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
7251 INTVAL (new), result_mode, &complement_p))
7252 {
7253 varop = XEXP (varop, 0);
7254 continue;
7255 }
7256
7257 /* If we can't do that, try to simplify the shift in each arm of the
7258 logical expression, make a new logical expression, and apply
7259 the inverse distributive law. */
7260 {
7261 rtx lhs = simplify_shift_const (NULL_RTX, code, result_mode,
7262 XEXP (varop, 0), count);
7263 rtx rhs = simplify_shift_const (NULL_RTX, code, result_mode,
7264 XEXP (varop, 1), count);
7265
7266 varop = gen_binary (GET_CODE (varop), result_mode, lhs, rhs);
7267 varop = apply_distributive_law (varop);
7268
7269 count = 0;
7270 }
7271 break;
7272
7273 case EQ:
7274 /* convert (lshift (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
7275 says that the sign bit can be tested, FOO has mode MODE, C is
7276 GET_MODE_BITSIZE (MODE) - 1, and FOO has only the low-order bit
7277 may be nonzero. */
7278 if (code == LSHIFT
7279 && XEXP (varop, 1) == const0_rtx
7280 && GET_MODE (XEXP (varop, 0)) == result_mode
7281 && count == GET_MODE_BITSIZE (result_mode) - 1
7282 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
7283 && ((STORE_FLAG_VALUE
7284 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
7285 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
7286 && merge_outer_ops (&outer_op, &outer_const, XOR,
7287 (HOST_WIDE_INT) 1, result_mode,
7288 &complement_p))
7289 {
7290 varop = XEXP (varop, 0);
7291 count = 0;
7292 continue;
7293 }
7294 break;
7295
7296 case NEG:
7297 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
7298 than the number of bits in the mode is equivalent to A. */
7299 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
7300 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
7301 {
7302 varop = XEXP (varop, 0);
7303 count = 0;
7304 continue;
7305 }
7306
7307 /* NEG commutes with ASHIFT since it is multiplication. Move the
7308 NEG outside to allow shifts to combine. */
7309 if (code == ASHIFT
7310 && merge_outer_ops (&outer_op, &outer_const, NEG,
7311 (HOST_WIDE_INT) 0, result_mode,
7312 &complement_p))
7313 {
7314 varop = XEXP (varop, 0);
7315 continue;
7316 }
7317 break;
7318
7319 case PLUS:
7320 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
7321 is one less than the number of bits in the mode is
7322 equivalent to (xor A 1). */
7323 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
7324 && XEXP (varop, 1) == constm1_rtx
7325 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
7326 && merge_outer_ops (&outer_op, &outer_const, XOR,
7327 (HOST_WIDE_INT) 1, result_mode,
7328 &complement_p))
7329 {
7330 count = 0;
7331 varop = XEXP (varop, 0);
7332 continue;
7333 }
7334
7335 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
7336 that might be nonzero in BAR are those being shifted out and those
7337 bits are known zero in FOO, we can replace the PLUS with FOO.
7338 Similarly in the other operand order. This code occurs when
7339 we are computing the size of a variable-size array. */
7340
7341 if ((code == ASHIFTRT || code == LSHIFTRT)
7342 && count < HOST_BITS_PER_WIDE_INT
7343 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
7344 && (nonzero_bits (XEXP (varop, 1), result_mode)
7345 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
7346 {
7347 varop = XEXP (varop, 0);
7348 continue;
7349 }
7350 else if ((code == ASHIFTRT || code == LSHIFTRT)
7351 && count < HOST_BITS_PER_WIDE_INT
7352 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
7353 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
7354 >> count)
7355 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
7356 & nonzero_bits (XEXP (varop, 1),
7357 result_mode)))
7358 {
7359 varop = XEXP (varop, 1);
7360 continue;
7361 }
7362
7363 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
7364 if (code == ASHIFT
7365 && GET_CODE (XEXP (varop, 1)) == CONST_INT
7366 && (new = simplify_binary_operation (ASHIFT, result_mode,
7367 XEXP (varop, 1),
7368 GEN_INT (count))) != 0
7369 && merge_outer_ops (&outer_op, &outer_const, PLUS,
7370 INTVAL (new), result_mode, &complement_p))
7371 {
7372 varop = XEXP (varop, 0);
7373 continue;
7374 }
7375 break;
7376
7377 case MINUS:
7378 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
7379 with C the size of VAROP - 1 and the shift is logical if
7380 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
7381 we have a (gt X 0) operation. If the shift is arithmetic with
7382 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
7383 we have a (neg (gt X 0)) operation. */
7384
7385 if (GET_CODE (XEXP (varop, 0)) == ASHIFTRT
7386 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
7387 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7388 && (code == LSHIFTRT || code == ASHIFTRT)
7389 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
7390 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
7391 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
7392 {
7393 count = 0;
7394 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
7395 const0_rtx);
7396
7397 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
7398 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
7399
7400 continue;
7401 }
7402 break;
7403 }
7404
7405 break;
7406 }
7407
7408 /* We need to determine what mode to do the shift in. If the shift is
7409 a ASHIFTRT or ROTATE, we must always do it in the mode it was originally
7410 done in. Otherwise, we can do it in MODE, the widest mode encountered.
7411 The code we care about is that of the shift that will actually be done,
7412 not the shift that was originally requested. */
7413 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
7414
7415 /* We have now finished analyzing the shift. The result should be
7416 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
7417 OUTER_OP is non-NIL, it is an operation that needs to be applied
7418 to the result of the shift. OUTER_CONST is the relevant constant,
7419 but we must turn off all bits turned off in the shift.
7420
7421 If we were passed a value for X, see if we can use any pieces of
7422 it. If not, make new rtx. */
7423
7424 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
7425 && GET_CODE (XEXP (x, 1)) == CONST_INT
7426 && INTVAL (XEXP (x, 1)) == count)
7427 const_rtx = XEXP (x, 1);
7428 else
7429 const_rtx = GEN_INT (count);
7430
7431 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7432 && GET_MODE (XEXP (x, 0)) == shift_mode
7433 && SUBREG_REG (XEXP (x, 0)) == varop)
7434 varop = XEXP (x, 0);
7435 else if (GET_MODE (varop) != shift_mode)
7436 varop = gen_lowpart_for_combine (shift_mode, varop);
7437
7438 /* If we can't make the SUBREG, try to return what we were given. */
7439 if (GET_CODE (varop) == CLOBBER)
7440 return x ? x : varop;
7441
7442 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
7443 if (new != 0)
7444 x = new;
7445 else
7446 {
7447 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
7448 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
7449
7450 SUBST (XEXP (x, 0), varop);
7451 SUBST (XEXP (x, 1), const_rtx);
7452 }
7453
7454 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
7455 turn off all the bits that the shift would have turned off. */
7456 if (orig_code == LSHIFTRT && result_mode != shift_mode)
7457 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
7458 GET_MODE_MASK (result_mode) >> orig_count);
7459
7460 /* Do the remainder of the processing in RESULT_MODE. */
7461 x = gen_lowpart_for_combine (result_mode, x);
7462
7463 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
7464 operation. */
7465 if (complement_p)
7466 x = gen_unary (NOT, result_mode, x);
7467
7468 if (outer_op != NIL)
7469 {
7470 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
7471 outer_const &= GET_MODE_MASK (result_mode);
7472
7473 if (outer_op == AND)
7474 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
7475 else if (outer_op == SET)
7476 /* This means that we have determined that the result is
7477 equivalent to a constant. This should be rare. */
7478 x = GEN_INT (outer_const);
7479 else if (GET_RTX_CLASS (outer_op) == '1')
7480 x = gen_unary (outer_op, result_mode, x);
7481 else
7482 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
7483 }
7484
7485 return x;
7486 }
7487 \f
7488 /* Like recog, but we receive the address of a pointer to a new pattern.
7489 We try to match the rtx that the pointer points to.
7490 If that fails, we may try to modify or replace the pattern,
7491 storing the replacement into the same pointer object.
7492
7493 Modifications include deletion or addition of CLOBBERs.
7494
7495 PNOTES is a pointer to a location where any REG_UNUSED notes added for
7496 the CLOBBERs are placed.
7497
7498 The value is the final insn code from the pattern ultimately matched,
7499 or -1. */
7500
7501 static int
7502 recog_for_combine (pnewpat, insn, pnotes)
7503 rtx *pnewpat;
7504 rtx insn;
7505 rtx *pnotes;
7506 {
7507 register rtx pat = *pnewpat;
7508 int insn_code_number;
7509 int num_clobbers_to_add = 0;
7510 int i;
7511 rtx notes = 0;
7512
7513 /* Is the result of combination a valid instruction? */
7514 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
7515
7516 /* If it isn't, there is the possibility that we previously had an insn
7517 that clobbered some register as a side effect, but the combined
7518 insn doesn't need to do that. So try once more without the clobbers
7519 unless this represents an ASM insn. */
7520
7521 if (insn_code_number < 0 && ! check_asm_operands (pat)
7522 && GET_CODE (pat) == PARALLEL)
7523 {
7524 int pos;
7525
7526 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
7527 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
7528 {
7529 if (i != pos)
7530 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
7531 pos++;
7532 }
7533
7534 SUBST_INT (XVECLEN (pat, 0), pos);
7535
7536 if (pos == 1)
7537 pat = XVECEXP (pat, 0, 0);
7538
7539 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
7540 }
7541
7542 /* If we had any clobbers to add, make a new pattern than contains
7543 them. Then check to make sure that all of them are dead. */
7544 if (num_clobbers_to_add)
7545 {
7546 rtx newpat = gen_rtx (PARALLEL, VOIDmode,
7547 gen_rtvec (GET_CODE (pat) == PARALLEL
7548 ? XVECLEN (pat, 0) + num_clobbers_to_add
7549 : num_clobbers_to_add + 1));
7550
7551 if (GET_CODE (pat) == PARALLEL)
7552 for (i = 0; i < XVECLEN (pat, 0); i++)
7553 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
7554 else
7555 XVECEXP (newpat, 0, 0) = pat;
7556
7557 add_clobbers (newpat, insn_code_number);
7558
7559 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
7560 i < XVECLEN (newpat, 0); i++)
7561 {
7562 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
7563 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
7564 return -1;
7565 notes = gen_rtx (EXPR_LIST, REG_UNUSED,
7566 XEXP (XVECEXP (newpat, 0, i), 0), notes);
7567 }
7568 pat = newpat;
7569 }
7570
7571 *pnewpat = pat;
7572 *pnotes = notes;
7573
7574 return insn_code_number;
7575 }
7576 \f
7577 /* Like gen_lowpart but for use by combine. In combine it is not possible
7578 to create any new pseudoregs. However, it is safe to create
7579 invalid memory addresses, because combine will try to recognize
7580 them and all they will do is make the combine attempt fail.
7581
7582 If for some reason this cannot do its job, an rtx
7583 (clobber (const_int 0)) is returned.
7584 An insn containing that will not be recognized. */
7585
7586 #undef gen_lowpart
7587
7588 static rtx
7589 gen_lowpart_for_combine (mode, x)
7590 enum machine_mode mode;
7591 register rtx x;
7592 {
7593 rtx result;
7594
7595 if (GET_MODE (x) == mode)
7596 return x;
7597
7598 /* We can only support MODE being wider than a word if X is a
7599 constant integer or has a mode the same size. */
7600
7601 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
7602 && ! ((GET_MODE (x) == VOIDmode
7603 && (GET_CODE (x) == CONST_INT
7604 || GET_CODE (x) == CONST_DOUBLE))
7605 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
7606 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
7607
7608 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
7609 won't know what to do. So we will strip off the SUBREG here and
7610 process normally. */
7611 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
7612 {
7613 x = SUBREG_REG (x);
7614 if (GET_MODE (x) == mode)
7615 return x;
7616 }
7617
7618 result = gen_lowpart_common (mode, x);
7619 if (result)
7620 return result;
7621
7622 if (GET_CODE (x) == MEM)
7623 {
7624 register int offset = 0;
7625 rtx new;
7626
7627 /* Refuse to work on a volatile memory ref or one with a mode-dependent
7628 address. */
7629 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
7630 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
7631
7632 /* If we want to refer to something bigger than the original memref,
7633 generate a perverse subreg instead. That will force a reload
7634 of the original memref X. */
7635 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
7636 return gen_rtx (SUBREG, mode, x, 0);
7637
7638 #if WORDS_BIG_ENDIAN
7639 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
7640 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
7641 #endif
7642 #if BYTES_BIG_ENDIAN
7643 /* Adjust the address so that the address-after-the-data
7644 is unchanged. */
7645 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
7646 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
7647 #endif
7648 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
7649 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
7650 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
7651 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
7652 return new;
7653 }
7654
7655 /* If X is a comparison operator, rewrite it in a new mode. This
7656 probably won't match, but may allow further simplifications. */
7657 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
7658 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
7659
7660 /* If we couldn't simplify X any other way, just enclose it in a
7661 SUBREG. Normally, this SUBREG won't match, but some patterns may
7662 include an explicit SUBREG or we may simplify it further in combine. */
7663 else
7664 {
7665 int word = 0;
7666
7667 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
7668 word = ((GET_MODE_SIZE (GET_MODE (x))
7669 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
7670 / UNITS_PER_WORD);
7671 return gen_rtx (SUBREG, mode, x, word);
7672 }
7673 }
7674 \f
7675 /* Make an rtx expression. This is a subset of gen_rtx and only supports
7676 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
7677
7678 If the identical expression was previously in the insn (in the undobuf),
7679 it will be returned. Only if it is not found will a new expression
7680 be made. */
7681
7682 /*VARARGS2*/
7683 static rtx
7684 gen_rtx_combine (va_alist)
7685 va_dcl
7686 {
7687 va_list p;
7688 enum rtx_code code;
7689 enum machine_mode mode;
7690 int n_args;
7691 rtx args[3];
7692 int i, j;
7693 char *fmt;
7694 rtx rt;
7695
7696 va_start (p);
7697 code = va_arg (p, enum rtx_code);
7698 mode = va_arg (p, enum machine_mode);
7699 n_args = GET_RTX_LENGTH (code);
7700 fmt = GET_RTX_FORMAT (code);
7701
7702 if (n_args == 0 || n_args > 3)
7703 abort ();
7704
7705 /* Get each arg and verify that it is supposed to be an expression. */
7706 for (j = 0; j < n_args; j++)
7707 {
7708 if (*fmt++ != 'e')
7709 abort ();
7710
7711 args[j] = va_arg (p, rtx);
7712 }
7713
7714 /* See if this is in undobuf. Be sure we don't use objects that came
7715 from another insn; this could produce circular rtl structures. */
7716
7717 for (i = previous_num_undos; i < undobuf.num_undo; i++)
7718 if (!undobuf.undo[i].is_int
7719 && GET_CODE (undobuf.undo[i].old_contents.rtx) == code
7720 && GET_MODE (undobuf.undo[i].old_contents.rtx) == mode)
7721 {
7722 for (j = 0; j < n_args; j++)
7723 if (XEXP (undobuf.undo[i].old_contents.rtx, j) != args[j])
7724 break;
7725
7726 if (j == n_args)
7727 return undobuf.undo[i].old_contents.rtx;
7728 }
7729
7730 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
7731 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
7732 rt = rtx_alloc (code);
7733 PUT_MODE (rt, mode);
7734 XEXP (rt, 0) = args[0];
7735 if (n_args > 1)
7736 {
7737 XEXP (rt, 1) = args[1];
7738 if (n_args > 2)
7739 XEXP (rt, 2) = args[2];
7740 }
7741 return rt;
7742 }
7743
7744 /* These routines make binary and unary operations by first seeing if they
7745 fold; if not, a new expression is allocated. */
7746
7747 static rtx
7748 gen_binary (code, mode, op0, op1)
7749 enum rtx_code code;
7750 enum machine_mode mode;
7751 rtx op0, op1;
7752 {
7753 rtx result;
7754 rtx tem;
7755
7756 if (GET_RTX_CLASS (code) == 'c'
7757 && (GET_CODE (op0) == CONST_INT
7758 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
7759 tem = op0, op0 = op1, op1 = tem;
7760
7761 if (GET_RTX_CLASS (code) == '<')
7762 {
7763 enum machine_mode op_mode = GET_MODE (op0);
7764 if (op_mode == VOIDmode)
7765 op_mode = GET_MODE (op1);
7766 result = simplify_relational_operation (code, op_mode, op0, op1);
7767 }
7768 else
7769 result = simplify_binary_operation (code, mode, op0, op1);
7770
7771 if (result)
7772 return result;
7773
7774 /* Put complex operands first and constants second. */
7775 if (GET_RTX_CLASS (code) == 'c'
7776 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
7777 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
7778 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
7779 || (GET_CODE (op0) == SUBREG
7780 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
7781 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
7782 return gen_rtx_combine (code, mode, op1, op0);
7783
7784 return gen_rtx_combine (code, mode, op0, op1);
7785 }
7786
7787 static rtx
7788 gen_unary (code, mode, op0)
7789 enum rtx_code code;
7790 enum machine_mode mode;
7791 rtx op0;
7792 {
7793 rtx result = simplify_unary_operation (code, mode, op0, mode);
7794
7795 if (result)
7796 return result;
7797
7798 return gen_rtx_combine (code, mode, op0);
7799 }
7800 \f
7801 /* Simplify a comparison between *POP0 and *POP1 where CODE is the
7802 comparison code that will be tested.
7803
7804 The result is a possibly different comparison code to use. *POP0 and
7805 *POP1 may be updated.
7806
7807 It is possible that we might detect that a comparison is either always
7808 true or always false. However, we do not perform general constant
7809 folding in combine, so this knowledge isn't useful. Such tautologies
7810 should have been detected earlier. Hence we ignore all such cases. */
7811
7812 static enum rtx_code
7813 simplify_comparison (code, pop0, pop1)
7814 enum rtx_code code;
7815 rtx *pop0;
7816 rtx *pop1;
7817 {
7818 rtx op0 = *pop0;
7819 rtx op1 = *pop1;
7820 rtx tem, tem1;
7821 int i;
7822 enum machine_mode mode, tmode;
7823
7824 /* Try a few ways of applying the same transformation to both operands. */
7825 while (1)
7826 {
7827 /* If both operands are the same constant shift, see if we can ignore the
7828 shift. We can if the shift is a rotate or if the bits shifted out of
7829 this shift are known to be zero for both inputs and if the type of
7830 comparison is compatible with the shift. */
7831 if (GET_CODE (op0) == GET_CODE (op1)
7832 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
7833 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
7834 || ((GET_CODE (op0) == LSHIFTRT
7835 || GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
7836 && (code != GT && code != LT && code != GE && code != LE))
7837 || (GET_CODE (op0) == ASHIFTRT
7838 && (code != GTU && code != LTU
7839 && code != GEU && code != GEU)))
7840 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7841 && INTVAL (XEXP (op0, 1)) >= 0
7842 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
7843 && XEXP (op0, 1) == XEXP (op1, 1))
7844 {
7845 enum machine_mode mode = GET_MODE (op0);
7846 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
7847 int shift_count = INTVAL (XEXP (op0, 1));
7848
7849 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
7850 mask &= (mask >> shift_count) << shift_count;
7851 else if (GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
7852 mask = (mask & (mask << shift_count)) >> shift_count;
7853
7854 if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0
7855 && (nonzero_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
7856 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
7857 else
7858 break;
7859 }
7860
7861 /* If both operands are AND's of a paradoxical SUBREG by constant, the
7862 SUBREGs are of the same mode, and, in both cases, the AND would
7863 be redundant if the comparison was done in the narrower mode,
7864 do the comparison in the narrower mode (e.g., we are AND'ing with 1
7865 and the operand's possibly nonzero bits are 0xffffff01; in that case
7866 if we only care about QImode, we don't need the AND). This case
7867 occurs if the output mode of an scc insn is not SImode and
7868 STORE_FLAG_VALUE == 1 (e.g., the 386). */
7869
7870 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
7871 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7872 && GET_CODE (XEXP (op1, 1)) == CONST_INT
7873 && GET_CODE (XEXP (op0, 0)) == SUBREG
7874 && GET_CODE (XEXP (op1, 0)) == SUBREG
7875 && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
7876 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
7877 && (GET_MODE (SUBREG_REG (XEXP (op0, 0)))
7878 == GET_MODE (SUBREG_REG (XEXP (op1, 0))))
7879 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
7880 <= HOST_BITS_PER_WIDE_INT)
7881 && (nonzero_bits (SUBREG_REG (XEXP (op0, 0)),
7882 GET_MODE (SUBREG_REG (XEXP (op0, 0))))
7883 & ~ INTVAL (XEXP (op0, 1))) == 0
7884 && (nonzero_bits (SUBREG_REG (XEXP (op1, 0)),
7885 GET_MODE (SUBREG_REG (XEXP (op1, 0))))
7886 & ~ INTVAL (XEXP (op1, 1))) == 0)
7887 {
7888 op0 = SUBREG_REG (XEXP (op0, 0));
7889 op1 = SUBREG_REG (XEXP (op1, 0));
7890
7891 /* the resulting comparison is always unsigned since we masked off
7892 the original sign bit. */
7893 code = unsigned_condition (code);
7894 }
7895 else
7896 break;
7897 }
7898
7899 /* If the first operand is a constant, swap the operands and adjust the
7900 comparison code appropriately. */
7901 if (CONSTANT_P (op0))
7902 {
7903 tem = op0, op0 = op1, op1 = tem;
7904 code = swap_condition (code);
7905 }
7906
7907 /* We now enter a loop during which we will try to simplify the comparison.
7908 For the most part, we only are concerned with comparisons with zero,
7909 but some things may really be comparisons with zero but not start
7910 out looking that way. */
7911
7912 while (GET_CODE (op1) == CONST_INT)
7913 {
7914 enum machine_mode mode = GET_MODE (op0);
7915 int mode_width = GET_MODE_BITSIZE (mode);
7916 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
7917 int equality_comparison_p;
7918 int sign_bit_comparison_p;
7919 int unsigned_comparison_p;
7920 HOST_WIDE_INT const_op;
7921
7922 /* We only want to handle integral modes. This catches VOIDmode,
7923 CCmode, and the floating-point modes. An exception is that we
7924 can handle VOIDmode if OP0 is a COMPARE or a comparison
7925 operation. */
7926
7927 if (GET_MODE_CLASS (mode) != MODE_INT
7928 && ! (mode == VOIDmode
7929 && (GET_CODE (op0) == COMPARE
7930 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
7931 break;
7932
7933 /* Get the constant we are comparing against and turn off all bits
7934 not on in our mode. */
7935 const_op = INTVAL (op1);
7936 if (mode_width <= HOST_BITS_PER_WIDE_INT)
7937 const_op &= mask;
7938
7939 /* If we are comparing against a constant power of two and the value
7940 being compared can only have that single bit nonzero (e.g., it was
7941 `and'ed with that bit), we can replace this with a comparison
7942 with zero. */
7943 if (const_op
7944 && (code == EQ || code == NE || code == GE || code == GEU
7945 || code == LT || code == LTU)
7946 && mode_width <= HOST_BITS_PER_WIDE_INT
7947 && exact_log2 (const_op) >= 0
7948 && nonzero_bits (op0, mode) == const_op)
7949 {
7950 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
7951 op1 = const0_rtx, const_op = 0;
7952 }
7953
7954 /* Similarly, if we are comparing a value known to be either -1 or
7955 0 with -1, change it to the opposite comparison against zero. */
7956
7957 if (const_op == -1
7958 && (code == EQ || code == NE || code == GT || code == LE
7959 || code == GEU || code == LTU)
7960 && num_sign_bit_copies (op0, mode) == mode_width)
7961 {
7962 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
7963 op1 = const0_rtx, const_op = 0;
7964 }
7965
7966 /* Do some canonicalizations based on the comparison code. We prefer
7967 comparisons against zero and then prefer equality comparisons.
7968 If we can reduce the size of a constant, we will do that too. */
7969
7970 switch (code)
7971 {
7972 case LT:
7973 /* < C is equivalent to <= (C - 1) */
7974 if (const_op > 0)
7975 {
7976 const_op -= 1;
7977 op1 = GEN_INT (const_op);
7978 code = LE;
7979 /* ... fall through to LE case below. */
7980 }
7981 else
7982 break;
7983
7984 case LE:
7985 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
7986 if (const_op < 0)
7987 {
7988 const_op += 1;
7989 op1 = GEN_INT (const_op);
7990 code = LT;
7991 }
7992
7993 /* If we are doing a <= 0 comparison on a value known to have
7994 a zero sign bit, we can replace this with == 0. */
7995 else if (const_op == 0
7996 && mode_width <= HOST_BITS_PER_WIDE_INT
7997 && (nonzero_bits (op0, mode)
7998 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
7999 code = EQ;
8000 break;
8001
8002 case GE:
8003 /* >= C is equivalent to > (C - 1). */
8004 if (const_op > 0)
8005 {
8006 const_op -= 1;
8007 op1 = GEN_INT (const_op);
8008 code = GT;
8009 /* ... fall through to GT below. */
8010 }
8011 else
8012 break;
8013
8014 case GT:
8015 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
8016 if (const_op < 0)
8017 {
8018 const_op += 1;
8019 op1 = GEN_INT (const_op);
8020 code = GE;
8021 }
8022
8023 /* If we are doing a > 0 comparison on a value known to have
8024 a zero sign bit, we can replace this with != 0. */
8025 else if (const_op == 0
8026 && mode_width <= HOST_BITS_PER_WIDE_INT
8027 && (nonzero_bits (op0, mode)
8028 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
8029 code = NE;
8030 break;
8031
8032 case LTU:
8033 /* < C is equivalent to <= (C - 1). */
8034 if (const_op > 0)
8035 {
8036 const_op -= 1;
8037 op1 = GEN_INT (const_op);
8038 code = LEU;
8039 /* ... fall through ... */
8040 }
8041
8042 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
8043 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
8044 {
8045 const_op = 0, op1 = const0_rtx;
8046 code = GE;
8047 break;
8048 }
8049 else
8050 break;
8051
8052 case LEU:
8053 /* unsigned <= 0 is equivalent to == 0 */
8054 if (const_op == 0)
8055 code = EQ;
8056
8057 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
8058 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
8059 {
8060 const_op = 0, op1 = const0_rtx;
8061 code = GE;
8062 }
8063 break;
8064
8065 case GEU:
8066 /* >= C is equivalent to < (C - 1). */
8067 if (const_op > 1)
8068 {
8069 const_op -= 1;
8070 op1 = GEN_INT (const_op);
8071 code = GTU;
8072 /* ... fall through ... */
8073 }
8074
8075 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
8076 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
8077 {
8078 const_op = 0, op1 = const0_rtx;
8079 code = LT;
8080 }
8081 else
8082 break;
8083
8084 case GTU:
8085 /* unsigned > 0 is equivalent to != 0 */
8086 if (const_op == 0)
8087 code = NE;
8088
8089 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
8090 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
8091 {
8092 const_op = 0, op1 = const0_rtx;
8093 code = LT;
8094 }
8095 break;
8096 }
8097
8098 /* Compute some predicates to simplify code below. */
8099
8100 equality_comparison_p = (code == EQ || code == NE);
8101 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
8102 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
8103 || code == LEU);
8104
8105 /* Now try cases based on the opcode of OP0. If none of the cases
8106 does a "continue", we exit this loop immediately after the
8107 switch. */
8108
8109 switch (GET_CODE (op0))
8110 {
8111 case ZERO_EXTRACT:
8112 /* If we are extracting a single bit from a variable position in
8113 a constant that has only a single bit set and are comparing it
8114 with zero, we can convert this into an equality comparison
8115 between the position and the location of the single bit. We can't
8116 do this if bit endian and we don't have an extzv since we then
8117 can't know what mode to use for the endianness adjustment. */
8118
8119 #if ! BITS_BIG_ENDIAN || defined (HAVE_extzv)
8120 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
8121 && XEXP (op0, 1) == const1_rtx
8122 && equality_comparison_p && const_op == 0
8123 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
8124 {
8125 #if BITS_BIG_ENDIAN
8126 i = (GET_MODE_BITSIZE
8127 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
8128 #endif
8129
8130 op0 = XEXP (op0, 2);
8131 op1 = GEN_INT (i);
8132 const_op = i;
8133
8134 /* Result is nonzero iff shift count is equal to I. */
8135 code = reverse_condition (code);
8136 continue;
8137 }
8138 #endif
8139
8140 /* ... fall through ... */
8141
8142 case SIGN_EXTRACT:
8143 tem = expand_compound_operation (op0);
8144 if (tem != op0)
8145 {
8146 op0 = tem;
8147 continue;
8148 }
8149 break;
8150
8151 case NOT:
8152 /* If testing for equality, we can take the NOT of the constant. */
8153 if (equality_comparison_p
8154 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
8155 {
8156 op0 = XEXP (op0, 0);
8157 op1 = tem;
8158 continue;
8159 }
8160
8161 /* If just looking at the sign bit, reverse the sense of the
8162 comparison. */
8163 if (sign_bit_comparison_p)
8164 {
8165 op0 = XEXP (op0, 0);
8166 code = (code == GE ? LT : GE);
8167 continue;
8168 }
8169 break;
8170
8171 case NEG:
8172 /* If testing for equality, we can take the NEG of the constant. */
8173 if (equality_comparison_p
8174 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
8175 {
8176 op0 = XEXP (op0, 0);
8177 op1 = tem;
8178 continue;
8179 }
8180
8181 /* The remaining cases only apply to comparisons with zero. */
8182 if (const_op != 0)
8183 break;
8184
8185 /* When X is ABS or is known positive,
8186 (neg X) is < 0 if and only if X != 0. */
8187
8188 if (sign_bit_comparison_p
8189 && (GET_CODE (XEXP (op0, 0)) == ABS
8190 || (mode_width <= HOST_BITS_PER_WIDE_INT
8191 && (nonzero_bits (XEXP (op0, 0), mode)
8192 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
8193 {
8194 op0 = XEXP (op0, 0);
8195 code = (code == LT ? NE : EQ);
8196 continue;
8197 }
8198
8199 /* If we have NEG of something whose two high-order bits are the
8200 same, we know that "(-a) < 0" is equivalent to "a > 0". */
8201 if (num_sign_bit_copies (op0, mode) >= 2)
8202 {
8203 op0 = XEXP (op0, 0);
8204 code = swap_condition (code);
8205 continue;
8206 }
8207 break;
8208
8209 case ROTATE:
8210 /* If we are testing equality and our count is a constant, we
8211 can perform the inverse operation on our RHS. */
8212 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
8213 && (tem = simplify_binary_operation (ROTATERT, mode,
8214 op1, XEXP (op0, 1))) != 0)
8215 {
8216 op0 = XEXP (op0, 0);
8217 op1 = tem;
8218 continue;
8219 }
8220
8221 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
8222 a particular bit. Convert it to an AND of a constant of that
8223 bit. This will be converted into a ZERO_EXTRACT. */
8224 if (const_op == 0 && sign_bit_comparison_p
8225 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8226 && mode_width <= HOST_BITS_PER_WIDE_INT)
8227 {
8228 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8229 ((HOST_WIDE_INT) 1
8230 << (mode_width - 1
8231 - INTVAL (XEXP (op0, 1)))));
8232 code = (code == LT ? NE : EQ);
8233 continue;
8234 }
8235
8236 /* ... fall through ... */
8237
8238 case ABS:
8239 /* ABS is ignorable inside an equality comparison with zero. */
8240 if (const_op == 0 && equality_comparison_p)
8241 {
8242 op0 = XEXP (op0, 0);
8243 continue;
8244 }
8245 break;
8246
8247
8248 case SIGN_EXTEND:
8249 /* Can simplify (compare (zero/sign_extend FOO) CONST)
8250 to (compare FOO CONST) if CONST fits in FOO's mode and we
8251 are either testing inequality or have an unsigned comparison
8252 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
8253 if (! unsigned_comparison_p
8254 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
8255 <= HOST_BITS_PER_WIDE_INT)
8256 && ((unsigned HOST_WIDE_INT) const_op
8257 < (((HOST_WIDE_INT) 1
8258 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
8259 {
8260 op0 = XEXP (op0, 0);
8261 continue;
8262 }
8263 break;
8264
8265 case SUBREG:
8266 /* Check for the case where we are comparing A - C1 with C2,
8267 both constants are smaller than 1/2 the maxium positive
8268 value in MODE, and the comparison is equality or unsigned.
8269 In that case, if A is either zero-extended to MODE or has
8270 sufficient sign bits so that the high-order bit in MODE
8271 is a copy of the sign in the inner mode, we can prove that it is
8272 safe to do the operation in the wider mode. This simplifies
8273 many range checks. */
8274
8275 if (mode_width <= HOST_BITS_PER_WIDE_INT
8276 && subreg_lowpart_p (op0)
8277 && GET_CODE (SUBREG_REG (op0)) == PLUS
8278 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
8279 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
8280 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
8281 < GET_MODE_MASK (mode) / 2)
8282 && (unsigned) const_op < GET_MODE_MASK (mode) / 2
8283 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
8284 GET_MODE (SUBREG_REG (op0)))
8285 & ~ GET_MODE_MASK (mode))
8286 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
8287 GET_MODE (SUBREG_REG (op0)))
8288 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
8289 - GET_MODE_BITSIZE (mode)))))
8290 {
8291 op0 = SUBREG_REG (op0);
8292 continue;
8293 }
8294
8295 /* If the inner mode is narrower and we are extracting the low part,
8296 we can treat the SUBREG as if it were a ZERO_EXTEND. */
8297 if (subreg_lowpart_p (op0)
8298 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
8299 /* Fall through */ ;
8300 else
8301 break;
8302
8303 /* ... fall through ... */
8304
8305 case ZERO_EXTEND:
8306 if ((unsigned_comparison_p || equality_comparison_p)
8307 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
8308 <= HOST_BITS_PER_WIDE_INT)
8309 && ((unsigned HOST_WIDE_INT) const_op
8310 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
8311 {
8312 op0 = XEXP (op0, 0);
8313 continue;
8314 }
8315 break;
8316
8317 case PLUS:
8318 /* (eq (plus X C1) C2) -> (eq X (minus C2 C1)). We can only do
8319 this for equality comparisons due to pathological cases involving
8320 overflows. */
8321 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
8322 && (tem = simplify_binary_operation (MINUS, mode, op1,
8323 XEXP (op0, 1))) != 0)
8324 {
8325 op0 = XEXP (op0, 0);
8326 op1 = tem;
8327 continue;
8328 }
8329
8330 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
8331 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
8332 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
8333 {
8334 op0 = XEXP (XEXP (op0, 0), 0);
8335 code = (code == LT ? EQ : NE);
8336 continue;
8337 }
8338 break;
8339
8340 case MINUS:
8341 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
8342 of bits in X minus 1, is one iff X > 0. */
8343 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
8344 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8345 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
8346 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
8347 {
8348 op0 = XEXP (op0, 1);
8349 code = (code == GE ? LE : GT);
8350 continue;
8351 }
8352 break;
8353
8354 case XOR:
8355 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
8356 if C is zero or B is a constant. */
8357 if (equality_comparison_p
8358 && 0 != (tem = simplify_binary_operation (XOR, mode,
8359 XEXP (op0, 1), op1)))
8360 {
8361 op0 = XEXP (op0, 0);
8362 op1 = tem;
8363 continue;
8364 }
8365 break;
8366
8367 case EQ: case NE:
8368 case LT: case LTU: case LE: case LEU:
8369 case GT: case GTU: case GE: case GEU:
8370 /* We can't do anything if OP0 is a condition code value, rather
8371 than an actual data value. */
8372 if (const_op != 0
8373 #ifdef HAVE_cc0
8374 || XEXP (op0, 0) == cc0_rtx
8375 #endif
8376 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
8377 break;
8378
8379 /* Get the two operands being compared. */
8380 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
8381 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
8382 else
8383 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
8384
8385 /* Check for the cases where we simply want the result of the
8386 earlier test or the opposite of that result. */
8387 if (code == NE
8388 || (code == EQ && reversible_comparison_p (op0))
8389 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
8390 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8391 && (STORE_FLAG_VALUE
8392 & (((HOST_WIDE_INT) 1
8393 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
8394 && (code == LT
8395 || (code == GE && reversible_comparison_p (op0)))))
8396 {
8397 code = (code == LT || code == NE
8398 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
8399 op0 = tem, op1 = tem1;
8400 continue;
8401 }
8402 break;
8403
8404 case IOR:
8405 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
8406 iff X <= 0. */
8407 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
8408 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
8409 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
8410 {
8411 op0 = XEXP (op0, 1);
8412 code = (code == GE ? GT : LE);
8413 continue;
8414 }
8415 break;
8416
8417 case AND:
8418 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
8419 will be converted to a ZERO_EXTRACT later. */
8420 if (const_op == 0 && equality_comparison_p
8421 && (GET_CODE (XEXP (op0, 0)) == ASHIFT
8422 || GET_CODE (XEXP (op0, 0)) == LSHIFT)
8423 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
8424 {
8425 op0 = simplify_and_const_int
8426 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
8427 XEXP (op0, 1),
8428 XEXP (XEXP (op0, 0), 1)),
8429 (HOST_WIDE_INT) 1);
8430 continue;
8431 }
8432
8433 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
8434 zero and X is a comparison and C1 and C2 describe only bits set
8435 in STORE_FLAG_VALUE, we can compare with X. */
8436 if (const_op == 0 && equality_comparison_p
8437 && mode_width <= HOST_BITS_PER_WIDE_INT
8438 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8439 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
8440 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8441 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
8442 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
8443 {
8444 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
8445 << INTVAL (XEXP (XEXP (op0, 0), 1)));
8446 if ((~ STORE_FLAG_VALUE & mask) == 0
8447 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
8448 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
8449 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
8450 {
8451 op0 = XEXP (XEXP (op0, 0), 0);
8452 continue;
8453 }
8454 }
8455
8456 /* If we are doing an equality comparison of an AND of a bit equal
8457 to the sign bit, replace this with a LT or GE comparison of
8458 the underlying value. */
8459 if (equality_comparison_p
8460 && const_op == 0
8461 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8462 && mode_width <= HOST_BITS_PER_WIDE_INT
8463 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
8464 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
8465 {
8466 op0 = XEXP (op0, 0);
8467 code = (code == EQ ? GE : LT);
8468 continue;
8469 }
8470
8471 /* If this AND operation is really a ZERO_EXTEND from a narrower
8472 mode, the constant fits within that mode, and this is either an
8473 equality or unsigned comparison, try to do this comparison in
8474 the narrower mode. */
8475 if ((equality_comparison_p || unsigned_comparison_p)
8476 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8477 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
8478 & GET_MODE_MASK (mode))
8479 + 1)) >= 0
8480 && const_op >> i == 0
8481 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
8482 {
8483 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
8484 continue;
8485 }
8486 break;
8487
8488 case ASHIFT:
8489 case LSHIFT:
8490 /* If we have (compare (xshift FOO N) (const_int C)) and
8491 the high order N bits of FOO (N+1 if an inequality comparison)
8492 are known to be zero, we can do this by comparing FOO with C
8493 shifted right N bits so long as the low-order N bits of C are
8494 zero. */
8495 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
8496 && INTVAL (XEXP (op0, 1)) >= 0
8497 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
8498 < HOST_BITS_PER_WIDE_INT)
8499 && ((const_op
8500 & ((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1) == 0)
8501 && mode_width <= HOST_BITS_PER_WIDE_INT
8502 && (nonzero_bits (XEXP (op0, 0), mode)
8503 & ~ (mask >> (INTVAL (XEXP (op0, 1))
8504 + ! equality_comparison_p))) == 0)
8505 {
8506 const_op >>= INTVAL (XEXP (op0, 1));
8507 op1 = GEN_INT (const_op);
8508 op0 = XEXP (op0, 0);
8509 continue;
8510 }
8511
8512 /* If we are doing a sign bit comparison, it means we are testing
8513 a particular bit. Convert it to the appropriate AND. */
8514 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
8515 && mode_width <= HOST_BITS_PER_WIDE_INT)
8516 {
8517 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8518 ((HOST_WIDE_INT) 1
8519 << (mode_width - 1
8520 - INTVAL (XEXP (op0, 1)))));
8521 code = (code == LT ? NE : EQ);
8522 continue;
8523 }
8524
8525 /* If this an equality comparison with zero and we are shifting
8526 the low bit to the sign bit, we can convert this to an AND of the
8527 low-order bit. */
8528 if (const_op == 0 && equality_comparison_p
8529 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8530 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
8531 {
8532 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8533 (HOST_WIDE_INT) 1);
8534 continue;
8535 }
8536 break;
8537
8538 case ASHIFTRT:
8539 /* If this is an equality comparison with zero, we can do this
8540 as a logical shift, which might be much simpler. */
8541 if (equality_comparison_p && const_op == 0
8542 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
8543 {
8544 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
8545 XEXP (op0, 0),
8546 INTVAL (XEXP (op0, 1)));
8547 continue;
8548 }
8549
8550 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
8551 do the comparison in a narrower mode. */
8552 if (! unsigned_comparison_p
8553 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8554 && GET_CODE (XEXP (op0, 0)) == ASHIFT
8555 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
8556 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
8557 MODE_INT, 1)) != BLKmode
8558 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
8559 || ((unsigned HOST_WIDE_INT) - const_op
8560 <= GET_MODE_MASK (tmode))))
8561 {
8562 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
8563 continue;
8564 }
8565
8566 /* ... fall through ... */
8567 case LSHIFTRT:
8568 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
8569 the low order N bits of FOO are known to be zero, we can do this
8570 by comparing FOO with C shifted left N bits so long as no
8571 overflow occurs. */
8572 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
8573 && INTVAL (XEXP (op0, 1)) >= 0
8574 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
8575 && mode_width <= HOST_BITS_PER_WIDE_INT
8576 && (nonzero_bits (XEXP (op0, 0), mode)
8577 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
8578 && (const_op == 0
8579 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
8580 < mode_width)))
8581 {
8582 const_op <<= INTVAL (XEXP (op0, 1));
8583 op1 = GEN_INT (const_op);
8584 op0 = XEXP (op0, 0);
8585 continue;
8586 }
8587
8588 /* If we are using this shift to extract just the sign bit, we
8589 can replace this with an LT or GE comparison. */
8590 if (const_op == 0
8591 && (equality_comparison_p || sign_bit_comparison_p)
8592 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8593 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
8594 {
8595 op0 = XEXP (op0, 0);
8596 code = (code == NE || code == GT ? LT : GE);
8597 continue;
8598 }
8599 break;
8600 }
8601
8602 break;
8603 }
8604
8605 /* Now make any compound operations involved in this comparison. Then,
8606 check for an outmost SUBREG on OP0 that isn't doing anything or is
8607 paradoxical. The latter case can only occur when it is known that the
8608 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
8609 We can never remove a SUBREG for a non-equality comparison because the
8610 sign bit is in a different place in the underlying object. */
8611
8612 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
8613 op1 = make_compound_operation (op1, SET);
8614
8615 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
8616 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8617 && (code == NE || code == EQ)
8618 && ((GET_MODE_SIZE (GET_MODE (op0))
8619 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
8620 {
8621 op0 = SUBREG_REG (op0);
8622 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
8623 }
8624
8625 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
8626 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8627 && (code == NE || code == EQ)
8628 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
8629 <= HOST_BITS_PER_WIDE_INT)
8630 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
8631 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
8632 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
8633 op1),
8634 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
8635 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
8636 op0 = SUBREG_REG (op0), op1 = tem;
8637
8638 /* We now do the opposite procedure: Some machines don't have compare
8639 insns in all modes. If OP0's mode is an integer mode smaller than a
8640 word and we can't do a compare in that mode, see if there is a larger
8641 mode for which we can do the compare. There are a number of cases in
8642 which we can use the wider mode. */
8643
8644 mode = GET_MODE (op0);
8645 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
8646 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
8647 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
8648 for (tmode = GET_MODE_WIDER_MODE (mode);
8649 (tmode != VOIDmode
8650 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
8651 tmode = GET_MODE_WIDER_MODE (tmode))
8652 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
8653 {
8654 /* If the only nonzero bits in OP0 and OP1 are those in the
8655 narrower mode and this is an equality or unsigned comparison,
8656 we can use the wider mode. Similarly for sign-extended
8657 values and equality or signed comparisons. */
8658 if (((code == EQ || code == NE
8659 || code == GEU || code == GTU || code == LEU || code == LTU)
8660 && (nonzero_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
8661 && (nonzero_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0)
8662 || ((code == EQ || code == NE
8663 || code == GE || code == GT || code == LE || code == LT)
8664 && (num_sign_bit_copies (op0, tmode)
8665 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
8666 && (num_sign_bit_copies (op1, tmode)
8667 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
8668 {
8669 op0 = gen_lowpart_for_combine (tmode, op0);
8670 op1 = gen_lowpart_for_combine (tmode, op1);
8671 break;
8672 }
8673
8674 /* If this is a test for negative, we can make an explicit
8675 test of the sign bit. */
8676
8677 if (op1 == const0_rtx && (code == LT || code == GE)
8678 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
8679 {
8680 op0 = gen_binary (AND, tmode,
8681 gen_lowpart_for_combine (tmode, op0),
8682 GEN_INT ((HOST_WIDE_INT) 1
8683 << (GET_MODE_BITSIZE (mode) - 1)));
8684 code = (code == LT) ? NE : EQ;
8685 break;
8686 }
8687 }
8688
8689 *pop0 = op0;
8690 *pop1 = op1;
8691
8692 return code;
8693 }
8694 \f
8695 /* Return 1 if we know that X, a comparison operation, is not operating
8696 on a floating-point value or is EQ or NE, meaning that we can safely
8697 reverse it. */
8698
8699 static int
8700 reversible_comparison_p (x)
8701 rtx x;
8702 {
8703 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
8704 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
8705 return 1;
8706
8707 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
8708 {
8709 case MODE_INT:
8710 return 1;
8711
8712 case MODE_CC:
8713 x = get_last_value (XEXP (x, 0));
8714 return (x && GET_CODE (x) == COMPARE
8715 && GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_INT);
8716 }
8717
8718 return 0;
8719 }
8720 \f
8721 /* Utility function for following routine. Called when X is part of a value
8722 being stored into reg_last_set_value. Sets reg_last_set_table_tick
8723 for each register mentioned. Similar to mention_regs in cse.c */
8724
8725 static void
8726 update_table_tick (x)
8727 rtx x;
8728 {
8729 register enum rtx_code code = GET_CODE (x);
8730 register char *fmt = GET_RTX_FORMAT (code);
8731 register int i;
8732
8733 if (code == REG)
8734 {
8735 int regno = REGNO (x);
8736 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8737 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
8738
8739 for (i = regno; i < endregno; i++)
8740 reg_last_set_table_tick[i] = label_tick;
8741
8742 return;
8743 }
8744
8745 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8746 /* Note that we can't have an "E" in values stored; see
8747 get_last_value_validate. */
8748 if (fmt[i] == 'e')
8749 update_table_tick (XEXP (x, i));
8750 }
8751
8752 /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
8753 are saying that the register is clobbered and we no longer know its
8754 value. If INSN is zero, don't update reg_last_set; this is only permitted
8755 with VALUE also zero and is used to invalidate the register. */
8756
8757 static void
8758 record_value_for_reg (reg, insn, value)
8759 rtx reg;
8760 rtx insn;
8761 rtx value;
8762 {
8763 int regno = REGNO (reg);
8764 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8765 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
8766 int i;
8767
8768 /* If VALUE contains REG and we have a previous value for REG, substitute
8769 the previous value. */
8770 if (value && insn && reg_overlap_mentioned_p (reg, value))
8771 {
8772 rtx tem;
8773
8774 /* Set things up so get_last_value is allowed to see anything set up to
8775 our insn. */
8776 subst_low_cuid = INSN_CUID (insn);
8777 tem = get_last_value (reg);
8778
8779 if (tem)
8780 value = replace_rtx (copy_rtx (value), reg, tem);
8781 }
8782
8783 /* For each register modified, show we don't know its value, that
8784 its value has been updated, and that we don't know the location of
8785 the death of the register. */
8786 for (i = regno; i < endregno; i ++)
8787 {
8788 if (insn)
8789 reg_last_set[i] = insn;
8790 reg_last_set_value[i] = 0;
8791 reg_last_death[i] = 0;
8792 }
8793
8794 /* Mark registers that are being referenced in this value. */
8795 if (value)
8796 update_table_tick (value);
8797
8798 /* Now update the status of each register being set.
8799 If someone is using this register in this block, set this register
8800 to invalid since we will get confused between the two lives in this
8801 basic block. This makes using this register always invalid. In cse, we
8802 scan the table to invalidate all entries using this register, but this
8803 is too much work for us. */
8804
8805 for (i = regno; i < endregno; i++)
8806 {
8807 reg_last_set_label[i] = label_tick;
8808 if (value && reg_last_set_table_tick[i] == label_tick)
8809 reg_last_set_invalid[i] = 1;
8810 else
8811 reg_last_set_invalid[i] = 0;
8812 }
8813
8814 /* The value being assigned might refer to X (like in "x++;"). In that
8815 case, we must replace it with (clobber (const_int 0)) to prevent
8816 infinite loops. */
8817 if (value && ! get_last_value_validate (&value,
8818 reg_last_set_label[regno], 0))
8819 {
8820 value = copy_rtx (value);
8821 if (! get_last_value_validate (&value, reg_last_set_label[regno], 1))
8822 value = 0;
8823 }
8824
8825 /* For the main register being modified, update the value. */
8826 reg_last_set_value[regno] = value;
8827
8828 }
8829
8830 /* Used for communication between the following two routines. */
8831 static rtx record_dead_insn;
8832
8833 /* Called via note_stores from record_dead_and_set_regs to handle one
8834 SET or CLOBBER in an insn. */
8835
8836 static void
8837 record_dead_and_set_regs_1 (dest, setter)
8838 rtx dest, setter;
8839 {
8840 if (GET_CODE (dest) == REG)
8841 {
8842 /* If we are setting the whole register, we know its value. Otherwise
8843 show that we don't know the value. We can handle SUBREG in
8844 some cases. */
8845 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
8846 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
8847 else if (GET_CODE (setter) == SET
8848 && GET_CODE (SET_DEST (setter)) == SUBREG
8849 && SUBREG_REG (SET_DEST (setter)) == dest
8850 && subreg_lowpart_p (SET_DEST (setter)))
8851 record_value_for_reg (dest, record_dead_insn,
8852 gen_lowpart_for_combine (GET_MODE (dest),
8853 SET_SRC (setter)));
8854 else
8855 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
8856 }
8857 else if (GET_CODE (dest) == MEM
8858 /* Ignore pushes, they clobber nothing. */
8859 && ! push_operand (dest, GET_MODE (dest)))
8860 mem_last_set = INSN_CUID (record_dead_insn);
8861 }
8862
8863 /* Update the records of when each REG was most recently set or killed
8864 for the things done by INSN. This is the last thing done in processing
8865 INSN in the combiner loop.
8866
8867 We update reg_last_set, reg_last_set_value, reg_last_death, and also the
8868 similar information mem_last_set (which insn most recently modified memory)
8869 and last_call_cuid (which insn was the most recent subroutine call). */
8870
8871 static void
8872 record_dead_and_set_regs (insn)
8873 rtx insn;
8874 {
8875 register rtx link;
8876 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
8877 {
8878 if (REG_NOTE_KIND (link) == REG_DEAD)
8879 reg_last_death[REGNO (XEXP (link, 0))] = insn;
8880 else if (REG_NOTE_KIND (link) == REG_INC)
8881 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
8882 }
8883
8884 if (GET_CODE (insn) == CALL_INSN)
8885 last_call_cuid = mem_last_set = INSN_CUID (insn);
8886
8887 record_dead_insn = insn;
8888 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
8889 }
8890 \f
8891 /* Utility routine for the following function. Verify that all the registers
8892 mentioned in *LOC are valid when *LOC was part of a value set when
8893 label_tick == TICK. Return 0 if some are not.
8894
8895 If REPLACE is non-zero, replace the invalid reference with
8896 (clobber (const_int 0)) and return 1. This replacement is useful because
8897 we often can get useful information about the form of a value (e.g., if
8898 it was produced by a shift that always produces -1 or 0) even though
8899 we don't know exactly what registers it was produced from. */
8900
8901 static int
8902 get_last_value_validate (loc, tick, replace)
8903 rtx *loc;
8904 int tick;
8905 int replace;
8906 {
8907 rtx x = *loc;
8908 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
8909 int len = GET_RTX_LENGTH (GET_CODE (x));
8910 int i;
8911
8912 if (GET_CODE (x) == REG)
8913 {
8914 int regno = REGNO (x);
8915 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8916 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
8917 int j;
8918
8919 for (j = regno; j < endregno; j++)
8920 if (reg_last_set_invalid[j]
8921 /* If this is a pseudo-register that was only set once, it is
8922 always valid. */
8923 || (! (regno >= FIRST_PSEUDO_REGISTER && reg_n_sets[regno] == 1)
8924 && reg_last_set_label[j] > tick))
8925 {
8926 if (replace)
8927 *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8928 return replace;
8929 }
8930
8931 return 1;
8932 }
8933
8934 for (i = 0; i < len; i++)
8935 if ((fmt[i] == 'e'
8936 && get_last_value_validate (&XEXP (x, i), tick, replace) == 0)
8937 /* Don't bother with these. They shouldn't occur anyway. */
8938 || fmt[i] == 'E')
8939 return 0;
8940
8941 /* If we haven't found a reason for it to be invalid, it is valid. */
8942 return 1;
8943 }
8944
8945 /* Get the last value assigned to X, if known. Some registers
8946 in the value may be replaced with (clobber (const_int 0)) if their value
8947 is known longer known reliably. */
8948
8949 static rtx
8950 get_last_value (x)
8951 rtx x;
8952 {
8953 int regno;
8954 rtx value;
8955
8956 /* If this is a non-paradoxical SUBREG, get the value of its operand and
8957 then convert it to the desired mode. If this is a paradoxical SUBREG,
8958 we cannot predict what values the "extra" bits might have. */
8959 if (GET_CODE (x) == SUBREG
8960 && subreg_lowpart_p (x)
8961 && (GET_MODE_SIZE (GET_MODE (x))
8962 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8963 && (value = get_last_value (SUBREG_REG (x))) != 0)
8964 return gen_lowpart_for_combine (GET_MODE (x), value);
8965
8966 if (GET_CODE (x) != REG)
8967 return 0;
8968
8969 regno = REGNO (x);
8970 value = reg_last_set_value[regno];
8971
8972 /* If we don't have a value or if it isn't for this basic block, return 0. */
8973
8974 if (value == 0
8975 || (reg_n_sets[regno] != 1
8976 && (reg_last_set_label[regno] != label_tick)))
8977 return 0;
8978
8979 /* If the value was set in a later insn that the ones we are processing,
8980 we can't use it even if the register was only set once, but make a quick
8981 check to see if the previous insn set it to something. This is commonly
8982 the case when the same pseudo is used by repeated insns. */
8983
8984 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
8985 {
8986 rtx insn, set;
8987
8988 for (insn = prev_nonnote_insn (subst_insn);
8989 insn && INSN_CUID (insn) >= subst_low_cuid;
8990 insn = prev_nonnote_insn (insn))
8991 ;
8992
8993 if (insn
8994 && (set = single_set (insn)) != 0
8995 && rtx_equal_p (SET_DEST (set), x))
8996 {
8997 value = SET_SRC (set);
8998
8999 /* Make sure that VALUE doesn't reference X. Replace any
9000 expliit references with a CLOBBER. If there are any remaining
9001 references (rare), don't use the value. */
9002
9003 if (reg_mentioned_p (x, value))
9004 value = replace_rtx (copy_rtx (value), x,
9005 gen_rtx (CLOBBER, GET_MODE (x), const0_rtx));
9006
9007 if (reg_overlap_mentioned_p (x, value))
9008 return 0;
9009 }
9010 else
9011 return 0;
9012 }
9013
9014 /* If the value has all its registers valid, return it. */
9015 if (get_last_value_validate (&value, reg_last_set_label[regno], 0))
9016 return value;
9017
9018 /* Otherwise, make a copy and replace any invalid register with
9019 (clobber (const_int 0)). If that fails for some reason, return 0. */
9020
9021 value = copy_rtx (value);
9022 if (get_last_value_validate (&value, reg_last_set_label[regno], 1))
9023 return value;
9024
9025 return 0;
9026 }
9027 \f
9028 /* Return nonzero if expression X refers to a REG or to memory
9029 that is set in an instruction more recent than FROM_CUID. */
9030
9031 static int
9032 use_crosses_set_p (x, from_cuid)
9033 register rtx x;
9034 int from_cuid;
9035 {
9036 register char *fmt;
9037 register int i;
9038 register enum rtx_code code = GET_CODE (x);
9039
9040 if (code == REG)
9041 {
9042 register int regno = REGNO (x);
9043 #ifdef PUSH_ROUNDING
9044 /* Don't allow uses of the stack pointer to be moved,
9045 because we don't know whether the move crosses a push insn. */
9046 if (regno == STACK_POINTER_REGNUM)
9047 return 1;
9048 #endif
9049 return (reg_last_set[regno]
9050 && INSN_CUID (reg_last_set[regno]) > from_cuid);
9051 }
9052
9053 if (code == MEM && mem_last_set > from_cuid)
9054 return 1;
9055
9056 fmt = GET_RTX_FORMAT (code);
9057
9058 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9059 {
9060 if (fmt[i] == 'E')
9061 {
9062 register int j;
9063 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9064 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
9065 return 1;
9066 }
9067 else if (fmt[i] == 'e'
9068 && use_crosses_set_p (XEXP (x, i), from_cuid))
9069 return 1;
9070 }
9071 return 0;
9072 }
9073 \f
9074 /* Define three variables used for communication between the following
9075 routines. */
9076
9077 static int reg_dead_regno, reg_dead_endregno;
9078 static int reg_dead_flag;
9079
9080 /* Function called via note_stores from reg_dead_at_p.
9081
9082 If DEST is within [reg_dead_rengno, reg_dead_endregno), set
9083 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
9084
9085 static void
9086 reg_dead_at_p_1 (dest, x)
9087 rtx dest;
9088 rtx x;
9089 {
9090 int regno, endregno;
9091
9092 if (GET_CODE (dest) != REG)
9093 return;
9094
9095 regno = REGNO (dest);
9096 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9097 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
9098
9099 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
9100 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
9101 }
9102
9103 /* Return non-zero if REG is known to be dead at INSN.
9104
9105 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
9106 referencing REG, it is dead. If we hit a SET referencing REG, it is
9107 live. Otherwise, see if it is live or dead at the start of the basic
9108 block we are in. */
9109
9110 static int
9111 reg_dead_at_p (reg, insn)
9112 rtx reg;
9113 rtx insn;
9114 {
9115 int block, i;
9116
9117 /* Set variables for reg_dead_at_p_1. */
9118 reg_dead_regno = REGNO (reg);
9119 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
9120 ? HARD_REGNO_NREGS (reg_dead_regno,
9121 GET_MODE (reg))
9122 : 1);
9123
9124 reg_dead_flag = 0;
9125
9126 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
9127 beginning of function. */
9128 for (; insn && GET_CODE (insn) != CODE_LABEL;
9129 insn = prev_nonnote_insn (insn))
9130 {
9131 note_stores (PATTERN (insn), reg_dead_at_p_1);
9132 if (reg_dead_flag)
9133 return reg_dead_flag == 1 ? 1 : 0;
9134
9135 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
9136 return 1;
9137 }
9138
9139 /* Get the basic block number that we were in. */
9140 if (insn == 0)
9141 block = 0;
9142 else
9143 {
9144 for (block = 0; block < n_basic_blocks; block++)
9145 if (insn == basic_block_head[block])
9146 break;
9147
9148 if (block == n_basic_blocks)
9149 return 0;
9150 }
9151
9152 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
9153 if (basic_block_live_at_start[block][i / REGSET_ELT_BITS]
9154 & ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS)))
9155 return 0;
9156
9157 return 1;
9158 }
9159 \f
9160 /* Remove register number REGNO from the dead registers list of INSN.
9161
9162 Return the note used to record the death, if there was one. */
9163
9164 rtx
9165 remove_death (regno, insn)
9166 int regno;
9167 rtx insn;
9168 {
9169 register rtx note = find_regno_note (insn, REG_DEAD, regno);
9170
9171 if (note)
9172 {
9173 reg_n_deaths[regno]--;
9174 remove_note (insn, note);
9175 }
9176
9177 return note;
9178 }
9179
9180 /* For each register (hardware or pseudo) used within expression X, if its
9181 death is in an instruction with cuid between FROM_CUID (inclusive) and
9182 TO_INSN (exclusive), put a REG_DEAD note for that register in the
9183 list headed by PNOTES.
9184
9185 This is done when X is being merged by combination into TO_INSN. These
9186 notes will then be distributed as needed. */
9187
9188 static void
9189 move_deaths (x, from_cuid, to_insn, pnotes)
9190 rtx x;
9191 int from_cuid;
9192 rtx to_insn;
9193 rtx *pnotes;
9194 {
9195 register char *fmt;
9196 register int len, i;
9197 register enum rtx_code code = GET_CODE (x);
9198
9199 if (code == REG)
9200 {
9201 register int regno = REGNO (x);
9202 register rtx where_dead = reg_last_death[regno];
9203
9204 if (where_dead && INSN_CUID (where_dead) >= from_cuid
9205 && INSN_CUID (where_dead) < INSN_CUID (to_insn))
9206 {
9207 rtx note = remove_death (regno, reg_last_death[regno]);
9208
9209 /* It is possible for the call above to return 0. This can occur
9210 when reg_last_death points to I2 or I1 that we combined with.
9211 In that case make a new note. */
9212
9213 if (note)
9214 {
9215 XEXP (note, 1) = *pnotes;
9216 *pnotes = note;
9217 }
9218 else
9219 *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes);
9220
9221 reg_n_deaths[regno]++;
9222 }
9223
9224 return;
9225 }
9226
9227 else if (GET_CODE (x) == SET)
9228 {
9229 rtx dest = SET_DEST (x);
9230
9231 move_deaths (SET_SRC (x), from_cuid, to_insn, pnotes);
9232
9233 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
9234 that accesses one word of a multi-word item, some
9235 piece of everything register in the expression is used by
9236 this insn, so remove any old death. */
9237
9238 if (GET_CODE (dest) == ZERO_EXTRACT
9239 || GET_CODE (dest) == STRICT_LOW_PART
9240 || (GET_CODE (dest) == SUBREG
9241 && (((GET_MODE_SIZE (GET_MODE (dest))
9242 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
9243 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
9244 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
9245 {
9246 move_deaths (dest, from_cuid, to_insn, pnotes);
9247 return;
9248 }
9249
9250 /* If this is some other SUBREG, we know it replaces the entire
9251 value, so use that as the destination. */
9252 if (GET_CODE (dest) == SUBREG)
9253 dest = SUBREG_REG (dest);
9254
9255 /* If this is a MEM, adjust deaths of anything used in the address.
9256 For a REG (the only other possibility), the entire value is
9257 being replaced so the old value is not used in this insn. */
9258
9259 if (GET_CODE (dest) == MEM)
9260 move_deaths (XEXP (dest, 0), from_cuid, to_insn, pnotes);
9261 return;
9262 }
9263
9264 else if (GET_CODE (x) == CLOBBER)
9265 return;
9266
9267 len = GET_RTX_LENGTH (code);
9268 fmt = GET_RTX_FORMAT (code);
9269
9270 for (i = 0; i < len; i++)
9271 {
9272 if (fmt[i] == 'E')
9273 {
9274 register int j;
9275 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9276 move_deaths (XVECEXP (x, i, j), from_cuid, to_insn, pnotes);
9277 }
9278 else if (fmt[i] == 'e')
9279 move_deaths (XEXP (x, i), from_cuid, to_insn, pnotes);
9280 }
9281 }
9282 \f
9283 /* Return 1 if X is the target of a bit-field assignment in BODY, the
9284 pattern of an insn. X must be a REG. */
9285
9286 static int
9287 reg_bitfield_target_p (x, body)
9288 rtx x;
9289 rtx body;
9290 {
9291 int i;
9292
9293 if (GET_CODE (body) == SET)
9294 {
9295 rtx dest = SET_DEST (body);
9296 rtx target;
9297 int regno, tregno, endregno, endtregno;
9298
9299 if (GET_CODE (dest) == ZERO_EXTRACT)
9300 target = XEXP (dest, 0);
9301 else if (GET_CODE (dest) == STRICT_LOW_PART)
9302 target = SUBREG_REG (XEXP (dest, 0));
9303 else
9304 return 0;
9305
9306 if (GET_CODE (target) == SUBREG)
9307 target = SUBREG_REG (target);
9308
9309 if (GET_CODE (target) != REG)
9310 return 0;
9311
9312 tregno = REGNO (target), regno = REGNO (x);
9313 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
9314 return target == x;
9315
9316 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
9317 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
9318
9319 return endregno > tregno && regno < endtregno;
9320 }
9321
9322 else if (GET_CODE (body) == PARALLEL)
9323 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
9324 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
9325 return 1;
9326
9327 return 0;
9328 }
9329 \f
9330 /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
9331 as appropriate. I3 and I2 are the insns resulting from the combination
9332 insns including FROM (I2 may be zero).
9333
9334 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
9335 not need REG_DEAD notes because they are being substituted for. This
9336 saves searching in the most common cases.
9337
9338 Each note in the list is either ignored or placed on some insns, depending
9339 on the type of note. */
9340
9341 static void
9342 distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
9343 rtx notes;
9344 rtx from_insn;
9345 rtx i3, i2;
9346 rtx elim_i2, elim_i1;
9347 {
9348 rtx note, next_note;
9349 rtx tem;
9350
9351 for (note = notes; note; note = next_note)
9352 {
9353 rtx place = 0, place2 = 0;
9354
9355 /* If this NOTE references a pseudo register, ensure it references
9356 the latest copy of that register. */
9357 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
9358 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
9359 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
9360
9361 next_note = XEXP (note, 1);
9362 switch (REG_NOTE_KIND (note))
9363 {
9364 case REG_UNUSED:
9365 /* If this register is set or clobbered in I3, put the note there
9366 unless there is one already. */
9367 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
9368 {
9369 if (! (GET_CODE (XEXP (note, 0)) == REG
9370 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
9371 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
9372 place = i3;
9373 }
9374 /* Otherwise, if this register is used by I3, then this register
9375 now dies here, so we must put a REG_DEAD note here unless there
9376 is one already. */
9377 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
9378 && ! (GET_CODE (XEXP (note, 0)) == REG
9379 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
9380 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
9381 {
9382 PUT_REG_NOTE_KIND (note, REG_DEAD);
9383 place = i3;
9384 }
9385 break;
9386
9387 case REG_EQUAL:
9388 case REG_EQUIV:
9389 case REG_NONNEG:
9390 /* These notes say something about results of an insn. We can
9391 only support them if they used to be on I3 in which case they
9392 remain on I3. Otherwise they are ignored.
9393
9394 If the note refers to an expression that is not a constant, we
9395 must also ignore the note since we cannot tell whether the
9396 equivalence is still true. It might be possible to do
9397 slightly better than this (we only have a problem if I2DEST
9398 or I1DEST is present in the expression), but it doesn't
9399 seem worth the trouble. */
9400
9401 if (from_insn == i3
9402 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
9403 place = i3;
9404 break;
9405
9406 case REG_INC:
9407 case REG_NO_CONFLICT:
9408 case REG_LABEL:
9409 /* These notes say something about how a register is used. They must
9410 be present on any use of the register in I2 or I3. */
9411 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
9412 place = i3;
9413
9414 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
9415 {
9416 if (place)
9417 place2 = i2;
9418 else
9419 place = i2;
9420 }
9421 break;
9422
9423 case REG_WAS_0:
9424 /* It is too much trouble to try to see if this note is still
9425 correct in all situations. It is better to simply delete it. */
9426 break;
9427
9428 case REG_RETVAL:
9429 /* If the insn previously containing this note still exists,
9430 put it back where it was. Otherwise move it to the previous
9431 insn. Adjust the corresponding REG_LIBCALL note. */
9432 if (GET_CODE (from_insn) != NOTE)
9433 place = from_insn;
9434 else
9435 {
9436 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
9437 place = prev_real_insn (from_insn);
9438 if (tem && place)
9439 XEXP (tem, 0) = place;
9440 }
9441 break;
9442
9443 case REG_LIBCALL:
9444 /* This is handled similarly to REG_RETVAL. */
9445 if (GET_CODE (from_insn) != NOTE)
9446 place = from_insn;
9447 else
9448 {
9449 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
9450 place = next_real_insn (from_insn);
9451 if (tem && place)
9452 XEXP (tem, 0) = place;
9453 }
9454 break;
9455
9456 case REG_DEAD:
9457 /* If the register is used as an input in I3, it dies there.
9458 Similarly for I2, if it is non-zero and adjacent to I3.
9459
9460 If the register is not used as an input in either I3 or I2
9461 and it is not one of the registers we were supposed to eliminate,
9462 there are two possibilities. We might have a non-adjacent I2
9463 or we might have somehow eliminated an additional register
9464 from a computation. For example, we might have had A & B where
9465 we discover that B will always be zero. In this case we will
9466 eliminate the reference to A.
9467
9468 In both cases, we must search to see if we can find a previous
9469 use of A and put the death note there. */
9470
9471 if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
9472 place = i3;
9473 else if (i2 != 0 && next_nonnote_insn (i2) == i3
9474 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
9475 place = i2;
9476
9477 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
9478 break;
9479
9480 /* If the register is used in both I2 and I3 and it dies in I3,
9481 we might have added another reference to it. If reg_n_refs
9482 was 2, bump it to 3. This has to be correct since the
9483 register must have been set somewhere. The reason this is
9484 done is because local-alloc.c treats 2 references as a
9485 special case. */
9486
9487 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
9488 && reg_n_refs[REGNO (XEXP (note, 0))]== 2
9489 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
9490 reg_n_refs[REGNO (XEXP (note, 0))] = 3;
9491
9492 if (place == 0)
9493 for (tem = prev_nonnote_insn (i3);
9494 tem && (GET_CODE (tem) == INSN
9495 || GET_CODE (tem) == CALL_INSN);
9496 tem = prev_nonnote_insn (tem))
9497 {
9498 /* If the register is being set at TEM, see if that is all
9499 TEM is doing. If so, delete TEM. Otherwise, make this
9500 into a REG_UNUSED note instead. */
9501 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
9502 {
9503 rtx set = single_set (tem);
9504
9505 /* Verify that it was the set, and not a clobber that
9506 modified the register. */
9507
9508 if (set != 0 && ! side_effects_p (SET_SRC (set))
9509 && rtx_equal_p (XEXP (note, 0), SET_DEST (set)))
9510 {
9511 /* Move the notes and links of TEM elsewhere.
9512 This might delete other dead insns recursively.
9513 First set the pattern to something that won't use
9514 any register. */
9515
9516 PATTERN (tem) = pc_rtx;
9517
9518 distribute_notes (REG_NOTES (tem), tem, tem,
9519 NULL_RTX, NULL_RTX, NULL_RTX);
9520 distribute_links (LOG_LINKS (tem));
9521
9522 PUT_CODE (tem, NOTE);
9523 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
9524 NOTE_SOURCE_FILE (tem) = 0;
9525 }
9526 else
9527 {
9528 PUT_REG_NOTE_KIND (note, REG_UNUSED);
9529
9530 /* If there isn't already a REG_UNUSED note, put one
9531 here. */
9532 if (! find_regno_note (tem, REG_UNUSED,
9533 REGNO (XEXP (note, 0))))
9534 place = tem;
9535 break;
9536 }
9537 }
9538 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem)))
9539 {
9540 place = tem;
9541 break;
9542 }
9543 }
9544
9545 /* If the register is set or already dead at PLACE, we needn't do
9546 anything with this note if it is still a REG_DEAD note.
9547
9548 Note that we cannot use just `dead_or_set_p' here since we can
9549 convert an assignment to a register into a bit-field assignment.
9550 Therefore, we must also omit the note if the register is the
9551 target of a bitfield assignment. */
9552
9553 if (place && REG_NOTE_KIND (note) == REG_DEAD)
9554 {
9555 int regno = REGNO (XEXP (note, 0));
9556
9557 if (dead_or_set_p (place, XEXP (note, 0))
9558 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
9559 {
9560 /* Unless the register previously died in PLACE, clear
9561 reg_last_death. [I no longer understand why this is
9562 being done.] */
9563 if (reg_last_death[regno] != place)
9564 reg_last_death[regno] = 0;
9565 place = 0;
9566 }
9567 else
9568 reg_last_death[regno] = place;
9569
9570 /* If this is a death note for a hard reg that is occupying
9571 multiple registers, ensure that we are still using all
9572 parts of the object. If we find a piece of the object
9573 that is unused, we must add a USE for that piece before
9574 PLACE and put the appropriate REG_DEAD note on it.
9575
9576 An alternative would be to put a REG_UNUSED for the pieces
9577 on the insn that set the register, but that can't be done if
9578 it is not in the same block. It is simpler, though less
9579 efficient, to add the USE insns. */
9580
9581 if (place && regno < FIRST_PSEUDO_REGISTER
9582 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
9583 {
9584 int endregno
9585 = regno + HARD_REGNO_NREGS (regno,
9586 GET_MODE (XEXP (note, 0)));
9587 int all_used = 1;
9588 int i;
9589
9590 for (i = regno; i < endregno; i++)
9591 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0))
9592 {
9593 rtx piece = gen_rtx (REG, word_mode, i);
9594 rtx p;
9595
9596 /* See if we already placed a USE note for this
9597 register in front of PLACE. */
9598 for (p = place;
9599 GET_CODE (PREV_INSN (p)) == INSN
9600 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
9601 p = PREV_INSN (p))
9602 if (rtx_equal_p (piece,
9603 XEXP (PATTERN (PREV_INSN (p)), 0)))
9604 {
9605 p = 0;
9606 break;
9607 }
9608
9609 if (p)
9610 {
9611 rtx use_insn
9612 = emit_insn_before (gen_rtx (USE, VOIDmode,
9613 piece),
9614 p);
9615 REG_NOTES (use_insn)
9616 = gen_rtx (EXPR_LIST, REG_DEAD, piece,
9617 REG_NOTES (use_insn));
9618 }
9619
9620 all_used = 0;
9621 }
9622
9623 if (! all_used)
9624 {
9625 /* Put only REG_DEAD notes for pieces that are
9626 still used and that are not already dead or set. */
9627
9628 for (i = regno; i < endregno; i++)
9629 {
9630 rtx piece = gen_rtx (REG, word_mode, i);
9631
9632 if (reg_referenced_p (piece, PATTERN (place))
9633 && ! dead_or_set_p (place, piece)
9634 && ! reg_bitfield_target_p (piece,
9635 PATTERN (place)))
9636 REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD,
9637 piece,
9638 REG_NOTES (place));
9639 }
9640
9641 place = 0;
9642 }
9643 }
9644 }
9645 break;
9646
9647 default:
9648 /* Any other notes should not be present at this point in the
9649 compilation. */
9650 abort ();
9651 }
9652
9653 if (place)
9654 {
9655 XEXP (note, 1) = REG_NOTES (place);
9656 REG_NOTES (place) = note;
9657 }
9658 else if ((REG_NOTE_KIND (note) == REG_DEAD
9659 || REG_NOTE_KIND (note) == REG_UNUSED)
9660 && GET_CODE (XEXP (note, 0)) == REG)
9661 reg_n_deaths[REGNO (XEXP (note, 0))]--;
9662
9663 if (place2)
9664 {
9665 if ((REG_NOTE_KIND (note) == REG_DEAD
9666 || REG_NOTE_KIND (note) == REG_UNUSED)
9667 && GET_CODE (XEXP (note, 0)) == REG)
9668 reg_n_deaths[REGNO (XEXP (note, 0))]++;
9669
9670 REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note),
9671 XEXP (note, 0), REG_NOTES (place2));
9672 }
9673 }
9674 }
9675 \f
9676 /* Similarly to above, distribute the LOG_LINKS that used to be present on
9677 I3, I2, and I1 to new locations. This is also called in one case to
9678 add a link pointing at I3 when I3's destination is changed. */
9679
9680 static void
9681 distribute_links (links)
9682 rtx links;
9683 {
9684 rtx link, next_link;
9685
9686 for (link = links; link; link = next_link)
9687 {
9688 rtx place = 0;
9689 rtx insn;
9690 rtx set, reg;
9691
9692 next_link = XEXP (link, 1);
9693
9694 /* If the insn that this link points to is a NOTE or isn't a single
9695 set, ignore it. In the latter case, it isn't clear what we
9696 can do other than ignore the link, since we can't tell which
9697 register it was for. Such links wouldn't be used by combine
9698 anyway.
9699
9700 It is not possible for the destination of the target of the link to
9701 have been changed by combine. The only potential of this is if we
9702 replace I3, I2, and I1 by I3 and I2. But in that case the
9703 destination of I2 also remains unchanged. */
9704
9705 if (GET_CODE (XEXP (link, 0)) == NOTE
9706 || (set = single_set (XEXP (link, 0))) == 0)
9707 continue;
9708
9709 reg = SET_DEST (set);
9710 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
9711 || GET_CODE (reg) == SIGN_EXTRACT
9712 || GET_CODE (reg) == STRICT_LOW_PART)
9713 reg = XEXP (reg, 0);
9714
9715 /* A LOG_LINK is defined as being placed on the first insn that uses
9716 a register and points to the insn that sets the register. Start
9717 searching at the next insn after the target of the link and stop
9718 when we reach a set of the register or the end of the basic block.
9719
9720 Note that this correctly handles the link that used to point from
9721 I3 to I2. Also note that not much searching is typically done here
9722 since most links don't point very far away. */
9723
9724 for (insn = NEXT_INSN (XEXP (link, 0));
9725 (insn && GET_CODE (insn) != CODE_LABEL
9726 && GET_CODE (PREV_INSN (insn)) != JUMP_INSN);
9727 insn = NEXT_INSN (insn))
9728 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
9729 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
9730 {
9731 if (reg_referenced_p (reg, PATTERN (insn)))
9732 place = insn;
9733 break;
9734 }
9735
9736 /* If we found a place to put the link, place it there unless there
9737 is already a link to the same insn as LINK at that point. */
9738
9739 if (place)
9740 {
9741 rtx link2;
9742
9743 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
9744 if (XEXP (link2, 0) == XEXP (link, 0))
9745 break;
9746
9747 if (link2 == 0)
9748 {
9749 XEXP (link, 1) = LOG_LINKS (place);
9750 LOG_LINKS (place) = link;
9751 }
9752 }
9753 }
9754 }
9755 \f
9756 void
9757 dump_combine_stats (file)
9758 FILE *file;
9759 {
9760 fprintf
9761 (file,
9762 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
9763 combine_attempts, combine_merges, combine_extras, combine_successes);
9764 }
9765
9766 void
9767 dump_combine_total_stats (file)
9768 FILE *file;
9769 {
9770 fprintf
9771 (file,
9772 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
9773 total_attempts, total_merges, total_extras, total_successes);
9774 }
This page took 0.555957 seconds and 5 git commands to generate.