]> gcc.gnu.org Git - gcc.git/blame - gcc/cse.c
* vxppc.h (CPP_SPEC): Fix support for vararg functions.
[gcc.git] / gcc / cse.c
CommitLineData
7afe21cc 1/* Common subexpression elimination for GNU compiler.
747215f1 2 Copyright (C) 1987, 88, 89, 92-7, 1998, 1999 Free Software Foundation, Inc.
7afe21cc
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
7afe21cc
RK
20
21
22#include "config.h"
670ee920
KG
23/* stdio.h must precede rtl.h for FFS. */
24#include "system.h"
50b2596f 25#include <setjmp.h>
9c3b4c8b 26
7afe21cc
RK
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "flags.h"
31#include "real.h"
32#include "insn-config.h"
33#include "recog.h"
956d6950 34#include "expr.h"
50b2596f
KG
35#include "toplev.h"
36#include "output.h"
30f72379 37#include "splay-tree.h"
7afe21cc
RK
38
39/* The basic idea of common subexpression elimination is to go
40 through the code, keeping a record of expressions that would
41 have the same value at the current scan point, and replacing
42 expressions encountered with the cheapest equivalent expression.
43
44 It is too complicated to keep track of the different possibilities
45 when control paths merge; so, at each label, we forget all that is
46 known and start fresh. This can be described as processing each
47 basic block separately. Note, however, that these are not quite
48 the same as the basic blocks found by a later pass and used for
49 data flow analysis and register packing. We do not need to start fresh
50 after a conditional jump instruction if there is no label there.
51
52 We use two data structures to record the equivalent expressions:
53 a hash table for most expressions, and several vectors together
54 with "quantity numbers" to record equivalent (pseudo) registers.
55
56 The use of the special data structure for registers is desirable
57 because it is faster. It is possible because registers references
58 contain a fairly small number, the register number, taken from
59 a contiguously allocated series, and two register references are
60 identical if they have the same number. General expressions
61 do not have any such thing, so the only way to retrieve the
62 information recorded on an expression other than a register
63 is to keep it in a hash table.
64
65Registers and "quantity numbers":
66
67 At the start of each basic block, all of the (hardware and pseudo)
68 registers used in the function are given distinct quantity
69 numbers to indicate their contents. During scan, when the code
70 copies one register into another, we copy the quantity number.
71 When a register is loaded in any other way, we allocate a new
72 quantity number to describe the value generated by this operation.
73 `reg_qty' records what quantity a register is currently thought
74 of as containing.
75
76 All real quantity numbers are greater than or equal to `max_reg'.
77 If register N has not been assigned a quantity, reg_qty[N] will equal N.
78
79 Quantity numbers below `max_reg' do not exist and none of the `qty_...'
80 variables should be referenced with an index below `max_reg'.
81
82 We also maintain a bidirectional chain of registers for each
83 quantity number. `qty_first_reg', `qty_last_reg',
84 `reg_next_eqv' and `reg_prev_eqv' hold these chains.
85
86 The first register in a chain is the one whose lifespan is least local.
87 Among equals, it is the one that was seen first.
88 We replace any equivalent register with that one.
89
90 If two registers have the same quantity number, it must be true that
91 REG expressions with `qty_mode' must be in the hash table for both
92 registers and must be in the same class.
93
94 The converse is not true. Since hard registers may be referenced in
95 any mode, two REG expressions might be equivalent in the hash table
96 but not have the same quantity number if the quantity number of one
97 of the registers is not the same mode as those expressions.
98
99Constants and quantity numbers
100
101 When a quantity has a known constant value, that value is stored
102 in the appropriate element of qty_const. This is in addition to
103 putting the constant in the hash table as is usual for non-regs.
104
d45cf215 105 Whether a reg or a constant is preferred is determined by the configuration
7afe21cc
RK
106 macro CONST_COSTS and will often depend on the constant value. In any
107 event, expressions containing constants can be simplified, by fold_rtx.
108
109 When a quantity has a known nearly constant value (such as an address
110 of a stack slot), that value is stored in the appropriate element
111 of qty_const.
112
113 Integer constants don't have a machine mode. However, cse
114 determines the intended machine mode from the destination
115 of the instruction that moves the constant. The machine mode
116 is recorded in the hash table along with the actual RTL
117 constant expression so that different modes are kept separate.
118
119Other expressions:
120
121 To record known equivalences among expressions in general
122 we use a hash table called `table'. It has a fixed number of buckets
123 that contain chains of `struct table_elt' elements for expressions.
124 These chains connect the elements whose expressions have the same
125 hash codes.
126
127 Other chains through the same elements connect the elements which
128 currently have equivalent values.
129
130 Register references in an expression are canonicalized before hashing
131 the expression. This is done using `reg_qty' and `qty_first_reg'.
132 The hash code of a register reference is computed using the quantity
133 number, not the register number.
134
135 When the value of an expression changes, it is necessary to remove from the
136 hash table not just that expression but all expressions whose values
137 could be different as a result.
138
139 1. If the value changing is in memory, except in special cases
140 ANYTHING referring to memory could be changed. That is because
141 nobody knows where a pointer does not point.
142 The function `invalidate_memory' removes what is necessary.
143
144 The special cases are when the address is constant or is
145 a constant plus a fixed register such as the frame pointer
146 or a static chain pointer. When such addresses are stored in,
147 we can tell exactly which other such addresses must be invalidated
148 due to overlap. `invalidate' does this.
149 All expressions that refer to non-constant
150 memory addresses are also invalidated. `invalidate_memory' does this.
151
152 2. If the value changing is a register, all expressions
153 containing references to that register, and only those,
154 must be removed.
155
156 Because searching the entire hash table for expressions that contain
157 a register is very slow, we try to figure out when it isn't necessary.
158 Precisely, this is necessary only when expressions have been
159 entered in the hash table using this register, and then the value has
160 changed, and then another expression wants to be added to refer to
161 the register's new value. This sequence of circumstances is rare
162 within any one basic block.
163
164 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
165 reg_tick[i] is incremented whenever a value is stored in register i.
166 reg_in_table[i] holds -1 if no references to register i have been
167 entered in the table; otherwise, it contains the value reg_tick[i] had
168 when the references were entered. If we want to enter a reference
169 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
170 Until we want to enter a new entry, the mere fact that the two vectors
171 don't match makes the entries be ignored if anyone tries to match them.
172
173 Registers themselves are entered in the hash table as well as in
174 the equivalent-register chains. However, the vectors `reg_tick'
175 and `reg_in_table' do not apply to expressions which are simple
176 register references. These expressions are removed from the table
177 immediately when they become invalid, and this can be done even if
178 we do not immediately search for all the expressions that refer to
179 the register.
180
181 A CLOBBER rtx in an instruction invalidates its operand for further
182 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
183 invalidates everything that resides in memory.
184
185Related expressions:
186
187 Constant expressions that differ only by an additive integer
188 are called related. When a constant expression is put in
189 the table, the related expression with no constant term
190 is also entered. These are made to point at each other
191 so that it is possible to find out if there exists any
192 register equivalent to an expression related to a given expression. */
193
194/* One plus largest register number used in this function. */
195
196static int max_reg;
197
556c714b
JW
198/* One plus largest instruction UID used in this function at time of
199 cse_main call. */
200
201static int max_insn_uid;
202
7afe21cc
RK
203/* Length of vectors indexed by quantity number.
204 We know in advance we will not need a quantity number this big. */
205
206static int max_qty;
207
208/* Next quantity number to be allocated.
209 This is 1 + the largest number needed so far. */
210
211static int next_qty;
212
71d306d1 213/* Indexed by quantity number, gives the first (or last) register
7afe21cc
RK
214 in the chain of registers that currently contain this quantity. */
215
216static int *qty_first_reg;
217static int *qty_last_reg;
218
219/* Index by quantity number, gives the mode of the quantity. */
220
221static enum machine_mode *qty_mode;
222
223/* Indexed by quantity number, gives the rtx of the constant value of the
224 quantity, or zero if it does not have a known value.
225 A sum of the frame pointer (or arg pointer) plus a constant
226 can also be entered here. */
227
228static rtx *qty_const;
229
230/* Indexed by qty number, gives the insn that stored the constant value
231 recorded in `qty_const'. */
232
233static rtx *qty_const_insn;
234
235/* The next three variables are used to track when a comparison between a
236 quantity and some constant or register has been passed. In that case, we
237 know the results of the comparison in case we see it again. These variables
238 record a comparison that is known to be true. */
239
240/* Indexed by qty number, gives the rtx code of a comparison with a known
241 result involving this quantity. If none, it is UNKNOWN. */
242static enum rtx_code *qty_comparison_code;
243
244/* Indexed by qty number, gives the constant being compared against in a
245 comparison of known result. If no such comparison, it is undefined.
246 If the comparison is not with a constant, it is zero. */
247
248static rtx *qty_comparison_const;
249
250/* Indexed by qty number, gives the quantity being compared against in a
251 comparison of known result. If no such comparison, if it undefined.
252 If the comparison is not with a register, it is -1. */
253
254static int *qty_comparison_qty;
255
256#ifdef HAVE_cc0
257/* For machines that have a CC0, we do not record its value in the hash
258 table since its use is guaranteed to be the insn immediately following
259 its definition and any other insn is presumed to invalidate it.
260
261 Instead, we store below the value last assigned to CC0. If it should
262 happen to be a constant, it is stored in preference to the actual
263 assigned value. In case it is a constant, we store the mode in which
264 the constant should be interpreted. */
265
266static rtx prev_insn_cc0;
267static enum machine_mode prev_insn_cc0_mode;
268#endif
269
270/* Previous actual insn. 0 if at first insn of basic block. */
271
272static rtx prev_insn;
273
274/* Insn being scanned. */
275
276static rtx this_insn;
277
71d306d1
DE
278/* Index by register number, gives the number of the next (or
279 previous) register in the chain of registers sharing the same
7afe21cc
RK
280 value.
281
282 Or -1 if this register is at the end of the chain.
283
284 If reg_qty[N] == N, reg_next_eqv[N] is undefined. */
285
286static int *reg_next_eqv;
287static int *reg_prev_eqv;
288
30f72379
MM
289struct cse_reg_info {
290 union {
291 /* The number of times the register has been altered in the current
292 basic block. */
293 int reg_tick;
294
295 /* The next cse_reg_info structure in the free list. */
296 struct cse_reg_info* next;
297 } variant;
298
299 /* The REG_TICK value at which rtx's containing this register are
300 valid in the hash table. If this does not equal the current
301 reg_tick value, such expressions existing in the hash table are
302 invalid. */
303 int reg_in_table;
304
305 /* The quantity number of the register's current contents. */
306 int reg_qty;
307};
7afe21cc 308
30f72379
MM
309/* A free list of cse_reg_info entries. */
310static struct cse_reg_info *cse_reg_info_free_list;
7afe21cc 311
30f72379
MM
312/* A mapping from registers to cse_reg_info data structures. */
313static splay_tree cse_reg_info_tree;
7afe21cc 314
30f72379
MM
315/* The last lookup we did into the cse_reg_info_tree. This allows us
316 to cache repeated lookups. */
317static int cached_regno;
318static struct cse_reg_info *cached_cse_reg_info;
7afe21cc
RK
319
320/* A HARD_REG_SET containing all the hard registers for which there is
321 currently a REG expression in the hash table. Note the difference
322 from the above variables, which indicate if the REG is mentioned in some
323 expression in the table. */
324
325static HARD_REG_SET hard_regs_in_table;
326
327/* A HARD_REG_SET containing all the hard registers that are invalidated
328 by a CALL_INSN. */
329
330static HARD_REG_SET regs_invalidated_by_call;
331
7afe21cc
RK
332/* CUID of insn that starts the basic block currently being cse-processed. */
333
334static int cse_basic_block_start;
335
336/* CUID of insn that ends the basic block currently being cse-processed. */
337
338static int cse_basic_block_end;
339
340/* Vector mapping INSN_UIDs to cuids.
d45cf215 341 The cuids are like uids but increase monotonically always.
7afe21cc
RK
342 We use them to see whether a reg is used outside a given basic block. */
343
906c4e36 344static int *uid_cuid;
7afe21cc 345
164c8956
RK
346/* Highest UID in UID_CUID. */
347static int max_uid;
348
7afe21cc
RK
349/* Get the cuid of an insn. */
350
351#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
352
353/* Nonzero if cse has altered conditional jump insns
354 in such a way that jump optimization should be redone. */
355
356static int cse_jumps_altered;
357
a5dfb4ee
RK
358/* Nonzero if we put a LABEL_REF into the hash table. Since we may have put
359 it into an INSN without a REG_LABEL, we have to rerun jump after CSE
360 to put in the note. */
361static int recorded_label_ref;
362
7afe21cc
RK
363/* canon_hash stores 1 in do_not_record
364 if it notices a reference to CC0, PC, or some other volatile
365 subexpression. */
366
367static int do_not_record;
368
7bac1be0
RK
369#ifdef LOAD_EXTEND_OP
370
371/* Scratch rtl used when looking for load-extended copy of a MEM. */
372static rtx memory_extend_rtx;
373#endif
374
7afe21cc
RK
375/* canon_hash stores 1 in hash_arg_in_memory
376 if it notices a reference to memory within the expression being hashed. */
377
378static int hash_arg_in_memory;
379
380/* canon_hash stores 1 in hash_arg_in_struct
381 if it notices a reference to memory that's part of a structure. */
382
383static int hash_arg_in_struct;
384
385/* The hash table contains buckets which are chains of `struct table_elt's,
386 each recording one expression's information.
387 That expression is in the `exp' field.
388
389 Those elements with the same hash code are chained in both directions
390 through the `next_same_hash' and `prev_same_hash' fields.
391
392 Each set of expressions with equivalent values
393 are on a two-way chain through the `next_same_value'
394 and `prev_same_value' fields, and all point with
395 the `first_same_value' field at the first element in
396 that chain. The chain is in order of increasing cost.
397 Each element's cost value is in its `cost' field.
398
399 The `in_memory' field is nonzero for elements that
400 involve any reference to memory. These elements are removed
401 whenever a write is done to an unidentified location in memory.
402 To be safe, we assume that a memory address is unidentified unless
403 the address is either a symbol constant or a constant plus
404 the frame pointer or argument pointer.
405
406 The `in_struct' field is nonzero for elements that
407 involve any reference to memory inside a structure or array.
408
409 The `related_value' field is used to connect related expressions
410 (that differ by adding an integer).
411 The related expressions are chained in a circular fashion.
412 `related_value' is zero for expressions for which this
413 chain is not useful.
414
415 The `cost' field stores the cost of this element's expression.
416
417 The `is_const' flag is set if the element is a constant (including
418 a fixed address).
419
420 The `flag' field is used as a temporary during some search routines.
421
422 The `mode' field is usually the same as GET_MODE (`exp'), but
423 if `exp' is a CONST_INT and has no machine mode then the `mode'
424 field is the mode it was being used as. Each constant is
425 recorded separately for each mode it is used with. */
426
427
428struct table_elt
429{
430 rtx exp;
431 struct table_elt *next_same_hash;
432 struct table_elt *prev_same_hash;
433 struct table_elt *next_same_value;
434 struct table_elt *prev_same_value;
435 struct table_elt *first_same_value;
436 struct table_elt *related_value;
437 int cost;
438 enum machine_mode mode;
439 char in_memory;
440 char in_struct;
441 char is_const;
442 char flag;
443};
444
7afe21cc
RK
445/* We don't want a lot of buckets, because we rarely have very many
446 things stored in the hash table, and a lot of buckets slows
447 down a lot of loops that happen frequently. */
448#define NBUCKETS 31
449
450/* Compute hash code of X in mode M. Special-case case where X is a pseudo
451 register (hard registers may require `do_not_record' to be set). */
452
453#define HASH(X, M) \
454 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
30f72379 455 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) % NBUCKETS \
7afe21cc
RK
456 : canon_hash (X, M) % NBUCKETS)
457
458/* Determine whether register number N is considered a fixed register for CSE.
459 It is desirable to replace other regs with fixed regs, to reduce need for
460 non-fixed hard regs.
461 A reg wins if it is either the frame pointer or designated as fixed,
462 but not if it is an overlapping register. */
463#ifdef OVERLAPPING_REGNO_P
464#define FIXED_REGNO_P(N) \
8bc169f2 465 (((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
6ab832bc 466 || fixed_regs[N] || global_regs[N]) \
7afe21cc
RK
467 && ! OVERLAPPING_REGNO_P ((N)))
468#else
469#define FIXED_REGNO_P(N) \
8bc169f2 470 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
6ab832bc 471 || fixed_regs[N] || global_regs[N])
7afe21cc
RK
472#endif
473
474/* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
ac07e066
RK
475 hard registers and pointers into the frame are the cheapest with a cost
476 of 0. Next come pseudos with a cost of one and other hard registers with
477 a cost of 2. Aside from these special cases, call `rtx_cost'. */
478
6ab832bc 479#define CHEAP_REGNO(N) \
8bc169f2
DE
480 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
481 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
482 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
483 || ((N) < FIRST_PSEUDO_REGISTER \
e7bb59fa 484 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
7afe21cc 485
6ab832bc
RK
486/* A register is cheap if it is a user variable assigned to the register
487 or if its register number always corresponds to a cheap register. */
488
489#define CHEAP_REG(N) \
490 ((REG_USERVAR_P (N) && REGNO (N) < FIRST_PSEUDO_REGISTER) \
491 || CHEAP_REGNO (REGNO (N)))
492
38734e55
ILT
493#define COST(X) \
494 (GET_CODE (X) == REG \
495 ? (CHEAP_REG (X) ? 0 \
496 : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
497 : 2) \
954a5693 498 : notreg_cost(X))
7afe21cc 499
30f72379
MM
500/* Get the info associated with register N. */
501
502#define GET_CSE_REG_INFO(N) \
503 (((N) == cached_regno && cached_cse_reg_info) \
504 ? cached_cse_reg_info : get_cse_reg_info ((N)))
505
506/* Get the number of times this register has been updated in this
507 basic block. */
508
509#define REG_TICK(N) ((GET_CSE_REG_INFO (N))->variant.reg_tick)
510
511/* Get the point at which REG was recorded in the table. */
512
513#define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
514
515/* Get the quantity number for REG. */
516
517#define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
518
7afe21cc
RK
519/* Determine if the quantity number for register X represents a valid index
520 into the `qty_...' variables. */
521
30f72379 522#define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (N))
7afe21cc 523
2f541799
MM
524#ifdef ADDRESS_COST
525/* The ADDRESS_COST macro does not deal with ADDRESSOF nodes. But,
526 during CSE, such nodes are present. Using an ADDRESSOF node which
527 refers to the address of a REG is a good thing because we can then
528 turn (MEM (ADDRESSSOF (REG))) into just plain REG. */
529#define CSE_ADDRESS_COST(RTX) \
530 ((GET_CODE (RTX) == ADDRESSOF && REG_P (XEXP ((RTX), 0))) \
531 ? -1 : ADDRESS_COST(RTX))
532#endif
533
7afe21cc
RK
534static struct table_elt *table[NBUCKETS];
535
536/* Chain of `struct table_elt's made so far for this function
537 but currently removed from the table. */
538
539static struct table_elt *free_element_chain;
540
541/* Number of `struct table_elt' structures made so far for this function. */
542
543static int n_elements_made;
544
545/* Maximum value `n_elements_made' has had so far in this compilation
546 for functions previously processed. */
547
548static int max_elements_made;
549
550/* Surviving equivalence class when two equivalence classes are merged
551 by recording the effects of a jump in the last insn. Zero if the
552 last insn was not a conditional jump. */
553
554static struct table_elt *last_jump_equiv_class;
555
556/* Set to the cost of a constant pool reference if one was found for a
557 symbolic constant. If this was found, it means we should try to
558 convert constants into constant pool entries if they don't fit in
559 the insn. */
560
561static int constant_pool_entries_cost;
562
6cd4575e
RK
563/* Define maximum length of a branch path. */
564
565#define PATHLENGTH 10
566
567/* This data describes a block that will be processed by cse_basic_block. */
568
569struct cse_basic_block_data {
570 /* Lowest CUID value of insns in block. */
571 int low_cuid;
572 /* Highest CUID value of insns in block. */
573 int high_cuid;
574 /* Total number of SETs in block. */
575 int nsets;
576 /* Last insn in the block. */
577 rtx last;
578 /* Size of current branch path, if any. */
579 int path_size;
580 /* Current branch path, indicating which branches will be taken. */
581 struct branch_path {
0f41302f 582 /* The branch insn. */
6cd4575e
RK
583 rtx branch;
584 /* Whether it should be taken or not. AROUND is the same as taken
585 except that it is used when the destination label is not preceded
586 by a BARRIER. */
587 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
588 } path[PATHLENGTH];
589};
590
7afe21cc
RK
591/* Nonzero if X has the form (PLUS frame-pointer integer). We check for
592 virtual regs here because the simplify_*_operation routines are called
593 by integrate.c, which is called before virtual register instantiation. */
594
595#define FIXED_BASE_PLUS_P(X) \
8bc169f2
DE
596 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
597 || (X) == arg_pointer_rtx \
7afe21cc
RK
598 || (X) == virtual_stack_vars_rtx \
599 || (X) == virtual_incoming_args_rtx \
600 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
601 && (XEXP (X, 0) == frame_pointer_rtx \
8bc169f2 602 || XEXP (X, 0) == hard_frame_pointer_rtx \
7afe21cc
RK
603 || XEXP (X, 0) == arg_pointer_rtx \
604 || XEXP (X, 0) == virtual_stack_vars_rtx \
e9a25f70
JL
605 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
606 || GET_CODE (X) == ADDRESSOF)
7afe21cc 607
6f90e075
JW
608/* Similar, but also allows reference to the stack pointer.
609
610 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
611 arg_pointer_rtx by itself is nonzero, because on at least one machine,
612 the i960, the arg pointer is zero when it is unused. */
7afe21cc
RK
613
614#define NONZERO_BASE_PLUS_P(X) \
8bc169f2 615 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
6f90e075
JW
616 || (X) == virtual_stack_vars_rtx \
617 || (X) == virtual_incoming_args_rtx \
618 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
619 && (XEXP (X, 0) == frame_pointer_rtx \
8bc169f2 620 || XEXP (X, 0) == hard_frame_pointer_rtx \
6f90e075
JW
621 || XEXP (X, 0) == arg_pointer_rtx \
622 || XEXP (X, 0) == virtual_stack_vars_rtx \
623 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
7afe21cc
RK
624 || (X) == stack_pointer_rtx \
625 || (X) == virtual_stack_dynamic_rtx \
626 || (X) == virtual_outgoing_args_rtx \
627 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
628 && (XEXP (X, 0) == stack_pointer_rtx \
629 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
e9a25f70
JL
630 || XEXP (X, 0) == virtual_outgoing_args_rtx)) \
631 || GET_CODE (X) == ADDRESSOF)
7afe21cc 632
954a5693 633static int notreg_cost PROTO((rtx));
6cd4575e
RK
634static void new_basic_block PROTO((void));
635static void make_new_qty PROTO((int));
636static void make_regs_eqv PROTO((int, int));
637static void delete_reg_equiv PROTO((int));
638static int mention_regs PROTO((rtx));
639static int insert_regs PROTO((rtx, struct table_elt *, int));
640static void free_element PROTO((struct table_elt *));
2197a88a 641static void remove_from_table PROTO((struct table_elt *, unsigned));
6cd4575e 642static struct table_elt *get_element PROTO((void));
2197a88a
RK
643static struct table_elt *lookup PROTO((rtx, unsigned, enum machine_mode)),
644 *lookup_for_remove PROTO((rtx, unsigned, enum machine_mode));
6cd4575e 645static rtx lookup_as_function PROTO((rtx, enum rtx_code));
2197a88a 646static struct table_elt *insert PROTO((rtx, struct table_elt *, unsigned,
6cd4575e
RK
647 enum machine_mode));
648static void merge_equiv_classes PROTO((struct table_elt *,
649 struct table_elt *));
68c1e173 650static void invalidate PROTO((rtx, enum machine_mode));
9ae8ffe7 651static int cse_rtx_varies_p PROTO((rtx));
6cd4575e 652static void remove_invalid_refs PROTO((int));
34c73909 653static void remove_invalid_subreg_refs PROTO((int, int, enum machine_mode));
6cd4575e 654static void rehash_using_reg PROTO((rtx));
9ae8ffe7 655static void invalidate_memory PROTO((void));
6cd4575e
RK
656static void invalidate_for_call PROTO((void));
657static rtx use_related_value PROTO((rtx, struct table_elt *));
2197a88a
RK
658static unsigned canon_hash PROTO((rtx, enum machine_mode));
659static unsigned safe_hash PROTO((rtx, enum machine_mode));
6cd4575e 660static int exp_equiv_p PROTO((rtx, rtx, int, int));
f451db89 661static void set_nonvarying_address_components PROTO((rtx, int, rtx *,
6500fb43
RK
662 HOST_WIDE_INT *,
663 HOST_WIDE_INT *));
6cd4575e 664static int refers_to_p PROTO((rtx, rtx));
6cd4575e
RK
665static rtx canon_reg PROTO((rtx, rtx));
666static void find_best_addr PROTO((rtx, rtx *));
667static enum rtx_code find_comparison_args PROTO((enum rtx_code, rtx *, rtx *,
668 enum machine_mode *,
669 enum machine_mode *));
96b0e481
RK
670static rtx cse_gen_binary PROTO((enum rtx_code, enum machine_mode,
671 rtx, rtx));
672static rtx simplify_plus_minus PROTO((enum rtx_code, enum machine_mode,
673 rtx, rtx));
6cd4575e
RK
674static rtx fold_rtx PROTO((rtx, rtx));
675static rtx equiv_constant PROTO((rtx));
676static void record_jump_equiv PROTO((rtx, int));
677static void record_jump_cond PROTO((enum rtx_code, enum machine_mode,
678 rtx, rtx, int));
7bd8b2a8 679static void cse_insn PROTO((rtx, rtx));
9ae8ffe7
JL
680static int note_mem_written PROTO((rtx));
681static void invalidate_from_clobbers PROTO((rtx));
6cd4575e
RK
682static rtx cse_process_notes PROTO((rtx, rtx));
683static void cse_around_loop PROTO((rtx));
684static void invalidate_skipped_set PROTO((rtx, rtx));
685static void invalidate_skipped_block PROTO((rtx));
686static void cse_check_loop_start PROTO((rtx, rtx));
687static void cse_set_around_loop PROTO((rtx, rtx, rtx));
688static rtx cse_basic_block PROTO((rtx, rtx, struct branch_path *, int));
79644f06 689static void count_reg_usage PROTO((rtx, int *, rtx, int));
a0153051 690extern void dump_class PROTO((struct table_elt*));
1a87eea2 691static void check_fold_consts PROTO((PTR));
30f72379
MM
692static struct cse_reg_info* get_cse_reg_info PROTO((int));
693static void free_cse_reg_info PROTO((splay_tree_value));
01e752d3 694static void flush_hash_table PROTO((void));
c407b802
RK
695
696extern int rtx_equal_function_value_matters;
7afe21cc 697\f
a4c6502a
MM
698/* Dump the expressions in the equivalence class indicated by CLASSP.
699 This function is used only for debugging. */
a0153051 700void
a4c6502a
MM
701dump_class (classp)
702 struct table_elt *classp;
703{
704 struct table_elt *elt;
705
706 fprintf (stderr, "Equivalence chain for ");
707 print_rtl (stderr, classp->exp);
708 fprintf (stderr, ": \n");
709
710 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
711 {
712 print_rtl (stderr, elt->exp);
713 fprintf (stderr, "\n");
714 }
715}
716
7afe21cc
RK
717/* Return an estimate of the cost of computing rtx X.
718 One use is in cse, to decide which expression to keep in the hash table.
719 Another is in rtl generation, to pick the cheapest way to multiply.
720 Other uses like the latter are expected in the future. */
721
954a5693
RK
722/* Internal function, to compute cost when X is not a register; called
723 from COST macro to keep it simple. */
724
725static int
726notreg_cost (x)
727 rtx x;
728{
729 return ((GET_CODE (x) == SUBREG
730 && GET_CODE (SUBREG_REG (x)) == REG
731 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
732 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
733 && (GET_MODE_SIZE (GET_MODE (x))
734 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
735 && subreg_lowpart_p (x)
736 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
737 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
738 ? (CHEAP_REG (SUBREG_REG (x)) ? 0
739 : (REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER ? 1
740 : 2))
741 : rtx_cost (x, SET) * 2);
742}
743
7afe21cc
RK
744/* Return the right cost to give to an operation
745 to make the cost of the corresponding register-to-register instruction
746 N times that of a fast register-to-register instruction. */
747
748#define COSTS_N_INSNS(N) ((N) * 4 - 2)
749
750int
e5f6a288 751rtx_cost (x, outer_code)
7afe21cc 752 rtx x;
79c9824e 753 enum rtx_code outer_code ATTRIBUTE_UNUSED;
7afe21cc
RK
754{
755 register int i, j;
756 register enum rtx_code code;
757 register char *fmt;
758 register int total;
759
760 if (x == 0)
761 return 0;
762
763 /* Compute the default costs of certain things.
764 Note that RTX_COSTS can override the defaults. */
765
766 code = GET_CODE (x);
767 switch (code)
768 {
769 case MULT:
770 /* Count multiplication by 2**n as a shift,
771 because if we are considering it, we would output it as a shift. */
772 if (GET_CODE (XEXP (x, 1)) == CONST_INT
773 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
774 total = 2;
775 else
776 total = COSTS_N_INSNS (5);
777 break;
778 case DIV:
779 case UDIV:
780 case MOD:
781 case UMOD:
782 total = COSTS_N_INSNS (7);
783 break;
784 case USE:
785 /* Used in loop.c and combine.c as a marker. */
786 total = 0;
787 break;
538b78e7
RS
788 case ASM_OPERANDS:
789 /* We don't want these to be used in substitutions because
790 we have no way of validating the resulting insn. So assign
791 anything containing an ASM_OPERANDS a very high cost. */
792 total = 1000;
793 break;
7afe21cc
RK
794 default:
795 total = 2;
796 }
797
798 switch (code)
799 {
800 case REG:
6ab832bc 801 return ! CHEAP_REG (x);
ac07e066 802
7afe21cc 803 case SUBREG:
fc3ffe83
RK
804 /* If we can't tie these modes, make this expensive. The larger
805 the mode, the more expensive it is. */
806 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
807 return COSTS_N_INSNS (2
808 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
7afe21cc
RK
809 return 2;
810#ifdef RTX_COSTS
e5f6a288 811 RTX_COSTS (x, code, outer_code);
7afe21cc 812#endif
47a0b68f 813#ifdef CONST_COSTS
e5f6a288 814 CONST_COSTS (x, code, outer_code);
47a0b68f 815#endif
8625fab5
KG
816
817 default:
818#ifdef DEFAULT_RTX_COSTS
819 DEFAULT_RTX_COSTS(x, code, outer_code);
820#endif
821 break;
7afe21cc
RK
822 }
823
824 /* Sum the costs of the sub-rtx's, plus cost of this operation,
825 which is already in total. */
826
827 fmt = GET_RTX_FORMAT (code);
828 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
829 if (fmt[i] == 'e')
e5f6a288 830 total += rtx_cost (XEXP (x, i), code);
7afe21cc
RK
831 else if (fmt[i] == 'E')
832 for (j = 0; j < XVECLEN (x, i); j++)
e5f6a288 833 total += rtx_cost (XVECEXP (x, i, j), code);
7afe21cc
RK
834
835 return total;
836}
837\f
30f72379
MM
838static struct cse_reg_info *
839get_cse_reg_info (regno)
840 int regno;
841{
842 struct cse_reg_info *cri;
843 splay_tree_node n;
844
845 /* See if we already have this entry. */
846 n = splay_tree_lookup (cse_reg_info_tree,
847 (splay_tree_key) regno);
848 if (n)
849 cri = (struct cse_reg_info *) (n->value);
850 else
851 {
852 /* Get a new cse_reg_info structure. */
853 if (cse_reg_info_free_list)
854 {
855 cri = cse_reg_info_free_list;
856 cse_reg_info_free_list = cri->variant.next;
857 }
858 else
859 cri = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
860
861 /* Initialize it. */
862 cri->variant.reg_tick = 0;
863 cri->reg_in_table = -1;
864 cri->reg_qty = regno;
865
866 splay_tree_insert (cse_reg_info_tree,
867 (splay_tree_key) regno,
868 (splay_tree_value) cri);
869 }
870
871 /* Cache this lookup; we tend to be looking up information about the
872 same register several times in a row. */
873 cached_regno = regno;
874 cached_cse_reg_info = cri;
875
876 return cri;
877}
878
879static void
880free_cse_reg_info (v)
881 splay_tree_value v;
882{
883 struct cse_reg_info *cri = (struct cse_reg_info *) v;
884
885 cri->variant.next = cse_reg_info_free_list;
886 cse_reg_info_free_list = cri;
887}
888
7afe21cc
RK
889/* Clear the hash table and initialize each register with its own quantity,
890 for a new basic block. */
891
892static void
893new_basic_block ()
894{
895 register int i;
896
897 next_qty = max_reg;
898
30f72379
MM
899 if (cse_reg_info_tree)
900 {
901 splay_tree_delete (cse_reg_info_tree);
902 cached_cse_reg_info = 0;
903 }
904
905 cse_reg_info_tree = splay_tree_new (splay_tree_compare_ints, 0,
906 free_cse_reg_info);
7afe21cc 907
7afe21cc
RK
908 CLEAR_HARD_REG_SET (hard_regs_in_table);
909
910 /* The per-quantity values used to be initialized here, but it is
911 much faster to initialize each as it is made in `make_new_qty'. */
912
913 for (i = 0; i < NBUCKETS; i++)
914 {
915 register struct table_elt *this, *next;
916 for (this = table[i]; this; this = next)
917 {
918 next = this->next_same_hash;
919 free_element (this);
920 }
921 }
922
4c9a05bc 923 bzero ((char *) table, sizeof table);
7afe21cc
RK
924
925 prev_insn = 0;
926
927#ifdef HAVE_cc0
928 prev_insn_cc0 = 0;
929#endif
930}
931
932/* Say that register REG contains a quantity not in any register before
933 and initialize that quantity. */
934
935static void
936make_new_qty (reg)
937 register int reg;
938{
939 register int q;
940
941 if (next_qty >= max_qty)
942 abort ();
943
30f72379 944 q = REG_QTY (reg) = next_qty++;
7afe21cc
RK
945 qty_first_reg[q] = reg;
946 qty_last_reg[q] = reg;
947 qty_const[q] = qty_const_insn[q] = 0;
948 qty_comparison_code[q] = UNKNOWN;
949
950 reg_next_eqv[reg] = reg_prev_eqv[reg] = -1;
951}
952
953/* Make reg NEW equivalent to reg OLD.
954 OLD is not changing; NEW is. */
955
956static void
957make_regs_eqv (new, old)
958 register int new, old;
959{
960 register int lastr, firstr;
30f72379 961 register int q = REG_QTY (old);
7afe21cc
RK
962
963 /* Nothing should become eqv until it has a "non-invalid" qty number. */
964 if (! REGNO_QTY_VALID_P (old))
965 abort ();
966
30f72379 967 REG_QTY (new) = q;
7afe21cc
RK
968 firstr = qty_first_reg[q];
969 lastr = qty_last_reg[q];
970
971 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
972 hard regs. Among pseudos, if NEW will live longer than any other reg
973 of the same qty, and that is beyond the current basic block,
974 make it the new canonical replacement for this qty. */
975 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
976 /* Certain fixed registers might be of the class NO_REGS. This means
977 that not only can they not be allocated by the compiler, but
830a38ee 978 they cannot be used in substitutions or canonicalizations
7afe21cc
RK
979 either. */
980 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
981 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
982 || (new >= FIRST_PSEUDO_REGISTER
983 && (firstr < FIRST_PSEUDO_REGISTER
b1f21e0a
MM
984 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
985 || (uid_cuid[REGNO_FIRST_UID (new)]
7afe21cc 986 < cse_basic_block_start))
b1f21e0a
MM
987 && (uid_cuid[REGNO_LAST_UID (new)]
988 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
7afe21cc
RK
989 {
990 reg_prev_eqv[firstr] = new;
991 reg_next_eqv[new] = firstr;
992 reg_prev_eqv[new] = -1;
993 qty_first_reg[q] = new;
994 }
995 else
996 {
997 /* If NEW is a hard reg (known to be non-fixed), insert at end.
998 Otherwise, insert before any non-fixed hard regs that are at the
999 end. Registers of class NO_REGS cannot be used as an
1000 equivalent for anything. */
1001 while (lastr < FIRST_PSEUDO_REGISTER && reg_prev_eqv[lastr] >= 0
1002 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1003 && new >= FIRST_PSEUDO_REGISTER)
1004 lastr = reg_prev_eqv[lastr];
1005 reg_next_eqv[new] = reg_next_eqv[lastr];
1006 if (reg_next_eqv[lastr] >= 0)
1007 reg_prev_eqv[reg_next_eqv[lastr]] = new;
1008 else
1009 qty_last_reg[q] = new;
1010 reg_next_eqv[lastr] = new;
1011 reg_prev_eqv[new] = lastr;
1012 }
1013}
1014
1015/* Remove REG from its equivalence class. */
1016
1017static void
1018delete_reg_equiv (reg)
1019 register int reg;
1020{
30f72379 1021 register int q = REG_QTY (reg);
a4e262bc 1022 register int p, n;
7afe21cc 1023
a4e262bc 1024 /* If invalid, do nothing. */
7afe21cc
RK
1025 if (q == reg)
1026 return;
1027
a4e262bc
RK
1028 p = reg_prev_eqv[reg];
1029 n = reg_next_eqv[reg];
1030
7afe21cc
RK
1031 if (n != -1)
1032 reg_prev_eqv[n] = p;
1033 else
1034 qty_last_reg[q] = p;
1035 if (p != -1)
1036 reg_next_eqv[p] = n;
1037 else
1038 qty_first_reg[q] = n;
1039
30f72379 1040 REG_QTY (reg) = reg;
7afe21cc
RK
1041}
1042
1043/* Remove any invalid expressions from the hash table
1044 that refer to any of the registers contained in expression X.
1045
1046 Make sure that newly inserted references to those registers
1047 as subexpressions will be considered valid.
1048
1049 mention_regs is not called when a register itself
1050 is being stored in the table.
1051
1052 Return 1 if we have done something that may have changed the hash code
1053 of X. */
1054
1055static int
1056mention_regs (x)
1057 rtx x;
1058{
1059 register enum rtx_code code;
1060 register int i, j;
1061 register char *fmt;
1062 register int changed = 0;
1063
1064 if (x == 0)
e5f6a288 1065 return 0;
7afe21cc
RK
1066
1067 code = GET_CODE (x);
1068 if (code == REG)
1069 {
1070 register int regno = REGNO (x);
1071 register int endregno
1072 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1073 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1074 int i;
1075
1076 for (i = regno; i < endregno; i++)
1077 {
30f72379 1078 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
7afe21cc
RK
1079 remove_invalid_refs (i);
1080
30f72379 1081 REG_IN_TABLE (i) = REG_TICK (i);
7afe21cc
RK
1082 }
1083
1084 return 0;
1085 }
1086
34c73909
R
1087 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1088 pseudo if they don't use overlapping words. We handle only pseudos
1089 here for simplicity. */
1090 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1091 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1092 {
1093 int i = REGNO (SUBREG_REG (x));
1094
30f72379 1095 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
34c73909
R
1096 {
1097 /* If reg_tick has been incremented more than once since
1098 reg_in_table was last set, that means that the entire
1099 register has been set before, so discard anything memorized
1100 for the entrire register, including all SUBREG expressions. */
30f72379 1101 if (REG_IN_TABLE (i) != REG_TICK (i) - 1)
34c73909
R
1102 remove_invalid_refs (i);
1103 else
1104 remove_invalid_subreg_refs (i, SUBREG_WORD (x), GET_MODE (x));
1105 }
1106
30f72379 1107 REG_IN_TABLE (i) = REG_TICK (i);
34c73909
R
1108 return 0;
1109 }
1110
7afe21cc
RK
1111 /* If X is a comparison or a COMPARE and either operand is a register
1112 that does not have a quantity, give it one. This is so that a later
1113 call to record_jump_equiv won't cause X to be assigned a different
1114 hash code and not found in the table after that call.
1115
1116 It is not necessary to do this here, since rehash_using_reg can
1117 fix up the table later, but doing this here eliminates the need to
1118 call that expensive function in the most common case where the only
1119 use of the register is in the comparison. */
1120
1121 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1122 {
1123 if (GET_CODE (XEXP (x, 0)) == REG
1124 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
906c4e36 1125 if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
7afe21cc
RK
1126 {
1127 rehash_using_reg (XEXP (x, 0));
1128 changed = 1;
1129 }
1130
1131 if (GET_CODE (XEXP (x, 1)) == REG
1132 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
906c4e36 1133 if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
7afe21cc
RK
1134 {
1135 rehash_using_reg (XEXP (x, 1));
1136 changed = 1;
1137 }
1138 }
1139
1140 fmt = GET_RTX_FORMAT (code);
1141 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1142 if (fmt[i] == 'e')
1143 changed |= mention_regs (XEXP (x, i));
1144 else if (fmt[i] == 'E')
1145 for (j = 0; j < XVECLEN (x, i); j++)
1146 changed |= mention_regs (XVECEXP (x, i, j));
1147
1148 return changed;
1149}
1150
1151/* Update the register quantities for inserting X into the hash table
1152 with a value equivalent to CLASSP.
1153 (If the class does not contain a REG, it is irrelevant.)
1154 If MODIFIED is nonzero, X is a destination; it is being modified.
1155 Note that delete_reg_equiv should be called on a register
1156 before insert_regs is done on that register with MODIFIED != 0.
1157
1158 Nonzero value means that elements of reg_qty have changed
1159 so X's hash code may be different. */
1160
1161static int
1162insert_regs (x, classp, modified)
1163 rtx x;
1164 struct table_elt *classp;
1165 int modified;
1166{
1167 if (GET_CODE (x) == REG)
1168 {
1169 register int regno = REGNO (x);
1170
1ff0c00d
RK
1171 /* If REGNO is in the equivalence table already but is of the
1172 wrong mode for that equivalence, don't do anything here. */
1173
1174 if (REGNO_QTY_VALID_P (regno)
30f72379 1175 && qty_mode[REG_QTY (regno)] != GET_MODE (x))
1ff0c00d
RK
1176 return 0;
1177
1178 if (modified || ! REGNO_QTY_VALID_P (regno))
7afe21cc
RK
1179 {
1180 if (classp)
1181 for (classp = classp->first_same_value;
1182 classp != 0;
1183 classp = classp->next_same_value)
1184 if (GET_CODE (classp->exp) == REG
1185 && GET_MODE (classp->exp) == GET_MODE (x))
1186 {
1187 make_regs_eqv (regno, REGNO (classp->exp));
1188 return 1;
1189 }
1190
1191 make_new_qty (regno);
30f72379 1192 qty_mode[REG_QTY (regno)] = GET_MODE (x);
7afe21cc
RK
1193 return 1;
1194 }
cdf4112f
TG
1195
1196 return 0;
7afe21cc 1197 }
c610adec
RK
1198
1199 /* If X is a SUBREG, we will likely be inserting the inner register in the
1200 table. If that register doesn't have an assigned quantity number at
1201 this point but does later, the insertion that we will be doing now will
1202 not be accessible because its hash code will have changed. So assign
1203 a quantity number now. */
1204
1205 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1206 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1207 {
34c73909
R
1208 int regno = REGNO (SUBREG_REG (x));
1209
906c4e36 1210 insert_regs (SUBREG_REG (x), NULL_PTR, 0);
34c73909
R
1211 /* Mention_regs checks if REG_TICK is exactly one larger than
1212 REG_IN_TABLE to find out if there was only a single preceding
1213 invalidation - for the SUBREG - or another one, which would be
1214 for the full register. Since we don't invalidate the SUBREG
1215 here first, we might have to bump up REG_TICK so that mention_regs
1216 will do the right thing. */
30f72379
MM
1217 if (REG_IN_TABLE (regno) >= 0
1218 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1219 REG_TICK (regno)++;
34c73909 1220 mention_regs (x);
c610adec
RK
1221 return 1;
1222 }
7afe21cc
RK
1223 else
1224 return mention_regs (x);
1225}
1226\f
1227/* Look in or update the hash table. */
1228
1229/* Put the element ELT on the list of free elements. */
1230
1231static void
1232free_element (elt)
1233 struct table_elt *elt;
1234{
1235 elt->next_same_hash = free_element_chain;
1236 free_element_chain = elt;
1237}
1238
1239/* Return an element that is free for use. */
1240
1241static struct table_elt *
1242get_element ()
1243{
1244 struct table_elt *elt = free_element_chain;
1245 if (elt)
1246 {
1247 free_element_chain = elt->next_same_hash;
1248 return elt;
1249 }
1250 n_elements_made++;
1251 return (struct table_elt *) oballoc (sizeof (struct table_elt));
1252}
1253
1254/* Remove table element ELT from use in the table.
1255 HASH is its hash code, made using the HASH macro.
1256 It's an argument because often that is known in advance
1257 and we save much time not recomputing it. */
1258
1259static void
1260remove_from_table (elt, hash)
1261 register struct table_elt *elt;
2197a88a 1262 unsigned hash;
7afe21cc
RK
1263{
1264 if (elt == 0)
1265 return;
1266
1267 /* Mark this element as removed. See cse_insn. */
1268 elt->first_same_value = 0;
1269
1270 /* Remove the table element from its equivalence class. */
1271
1272 {
1273 register struct table_elt *prev = elt->prev_same_value;
1274 register struct table_elt *next = elt->next_same_value;
1275
1276 if (next) next->prev_same_value = prev;
1277
1278 if (prev)
1279 prev->next_same_value = next;
1280 else
1281 {
1282 register struct table_elt *newfirst = next;
1283 while (next)
1284 {
1285 next->first_same_value = newfirst;
1286 next = next->next_same_value;
1287 }
1288 }
1289 }
1290
1291 /* Remove the table element from its hash bucket. */
1292
1293 {
1294 register struct table_elt *prev = elt->prev_same_hash;
1295 register struct table_elt *next = elt->next_same_hash;
1296
1297 if (next) next->prev_same_hash = prev;
1298
1299 if (prev)
1300 prev->next_same_hash = next;
1301 else if (table[hash] == elt)
1302 table[hash] = next;
1303 else
1304 {
1305 /* This entry is not in the proper hash bucket. This can happen
1306 when two classes were merged by `merge_equiv_classes'. Search
1307 for the hash bucket that it heads. This happens only very
1308 rarely, so the cost is acceptable. */
1309 for (hash = 0; hash < NBUCKETS; hash++)
1310 if (table[hash] == elt)
1311 table[hash] = next;
1312 }
1313 }
1314
1315 /* Remove the table element from its related-value circular chain. */
1316
1317 if (elt->related_value != 0 && elt->related_value != elt)
1318 {
1319 register struct table_elt *p = elt->related_value;
1320 while (p->related_value != elt)
1321 p = p->related_value;
1322 p->related_value = elt->related_value;
1323 if (p->related_value == p)
1324 p->related_value = 0;
1325 }
1326
1327 free_element (elt);
1328}
1329
1330/* Look up X in the hash table and return its table element,
1331 or 0 if X is not in the table.
1332
1333 MODE is the machine-mode of X, or if X is an integer constant
1334 with VOIDmode then MODE is the mode with which X will be used.
1335
1336 Here we are satisfied to find an expression whose tree structure
1337 looks like X. */
1338
1339static struct table_elt *
1340lookup (x, hash, mode)
1341 rtx x;
2197a88a 1342 unsigned hash;
7afe21cc
RK
1343 enum machine_mode mode;
1344{
1345 register struct table_elt *p;
1346
1347 for (p = table[hash]; p; p = p->next_same_hash)
1348 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1349 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1350 return p;
1351
1352 return 0;
1353}
1354
1355/* Like `lookup' but don't care whether the table element uses invalid regs.
1356 Also ignore discrepancies in the machine mode of a register. */
1357
1358static struct table_elt *
1359lookup_for_remove (x, hash, mode)
1360 rtx x;
2197a88a 1361 unsigned hash;
7afe21cc
RK
1362 enum machine_mode mode;
1363{
1364 register struct table_elt *p;
1365
1366 if (GET_CODE (x) == REG)
1367 {
1368 int regno = REGNO (x);
1369 /* Don't check the machine mode when comparing registers;
1370 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1371 for (p = table[hash]; p; p = p->next_same_hash)
1372 if (GET_CODE (p->exp) == REG
1373 && REGNO (p->exp) == regno)
1374 return p;
1375 }
1376 else
1377 {
1378 for (p = table[hash]; p; p = p->next_same_hash)
1379 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1380 return p;
1381 }
1382
1383 return 0;
1384}
1385
1386/* Look for an expression equivalent to X and with code CODE.
1387 If one is found, return that expression. */
1388
1389static rtx
1390lookup_as_function (x, code)
1391 rtx x;
1392 enum rtx_code code;
1393{
1394 register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS,
1395 GET_MODE (x));
34c73909
R
1396 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1397 long as we are narrowing. So if we looked in vain for a mode narrower
1398 than word_mode before, look for word_mode now. */
1399 if (p == 0 && code == CONST_INT
1400 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1401 {
1402 x = copy_rtx (x);
1403 PUT_MODE (x, word_mode);
1404 p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS, word_mode);
1405 }
1406
7afe21cc
RK
1407 if (p == 0)
1408 return 0;
1409
1410 for (p = p->first_same_value; p; p = p->next_same_value)
1411 {
1412 if (GET_CODE (p->exp) == code
1413 /* Make sure this is a valid entry in the table. */
1414 && exp_equiv_p (p->exp, p->exp, 1, 0))
1415 return p->exp;
1416 }
1417
1418 return 0;
1419}
1420
1421/* Insert X in the hash table, assuming HASH is its hash code
1422 and CLASSP is an element of the class it should go in
1423 (or 0 if a new class should be made).
1424 It is inserted at the proper position to keep the class in
1425 the order cheapest first.
1426
1427 MODE is the machine-mode of X, or if X is an integer constant
1428 with VOIDmode then MODE is the mode with which X will be used.
1429
1430 For elements of equal cheapness, the most recent one
1431 goes in front, except that the first element in the list
1432 remains first unless a cheaper element is added. The order of
1433 pseudo-registers does not matter, as canon_reg will be called to
830a38ee 1434 find the cheapest when a register is retrieved from the table.
7afe21cc
RK
1435
1436 The in_memory field in the hash table element is set to 0.
1437 The caller must set it nonzero if appropriate.
1438
1439 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1440 and if insert_regs returns a nonzero value
1441 you must then recompute its hash code before calling here.
1442
1443 If necessary, update table showing constant values of quantities. */
1444
1445#define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1446
1447static struct table_elt *
1448insert (x, classp, hash, mode)
1449 register rtx x;
1450 register struct table_elt *classp;
2197a88a 1451 unsigned hash;
7afe21cc
RK
1452 enum machine_mode mode;
1453{
1454 register struct table_elt *elt;
1455
1456 /* If X is a register and we haven't made a quantity for it,
1457 something is wrong. */
1458 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1459 abort ();
1460
1461 /* If X is a hard register, show it is being put in the table. */
1462 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1463 {
1464 int regno = REGNO (x);
1465 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1466 int i;
1467
1468 for (i = regno; i < endregno; i++)
1469 SET_HARD_REG_BIT (hard_regs_in_table, i);
1470 }
1471
a5dfb4ee 1472 /* If X is a label, show we recorded it. */
970c9ace
RK
1473 if (GET_CODE (x) == LABEL_REF
1474 || (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS
1475 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF))
a5dfb4ee 1476 recorded_label_ref = 1;
7afe21cc
RK
1477
1478 /* Put an element for X into the right hash bucket. */
1479
1480 elt = get_element ();
1481 elt->exp = x;
1482 elt->cost = COST (x);
1483 elt->next_same_value = 0;
1484 elt->prev_same_value = 0;
1485 elt->next_same_hash = table[hash];
1486 elt->prev_same_hash = 0;
1487 elt->related_value = 0;
1488 elt->in_memory = 0;
1489 elt->mode = mode;
1490 elt->is_const = (CONSTANT_P (x)
1491 /* GNU C++ takes advantage of this for `this'
1492 (and other const values). */
1493 || (RTX_UNCHANGING_P (x)
1494 && GET_CODE (x) == REG
1495 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1496 || FIXED_BASE_PLUS_P (x));
1497
1498 if (table[hash])
1499 table[hash]->prev_same_hash = elt;
1500 table[hash] = elt;
1501
1502 /* Put it into the proper value-class. */
1503 if (classp)
1504 {
1505 classp = classp->first_same_value;
1506 if (CHEAPER (elt, classp))
1507 /* Insert at the head of the class */
1508 {
1509 register struct table_elt *p;
1510 elt->next_same_value = classp;
1511 classp->prev_same_value = elt;
1512 elt->first_same_value = elt;
1513
1514 for (p = classp; p; p = p->next_same_value)
1515 p->first_same_value = elt;
1516 }
1517 else
1518 {
1519 /* Insert not at head of the class. */
1520 /* Put it after the last element cheaper than X. */
1521 register struct table_elt *p, *next;
1522 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1523 p = next);
1524 /* Put it after P and before NEXT. */
1525 elt->next_same_value = next;
1526 if (next)
1527 next->prev_same_value = elt;
1528 elt->prev_same_value = p;
1529 p->next_same_value = elt;
1530 elt->first_same_value = classp;
1531 }
1532 }
1533 else
1534 elt->first_same_value = elt;
1535
1536 /* If this is a constant being set equivalent to a register or a register
1537 being set equivalent to a constant, note the constant equivalence.
1538
1539 If this is a constant, it cannot be equivalent to a different constant,
1540 and a constant is the only thing that can be cheaper than a register. So
1541 we know the register is the head of the class (before the constant was
1542 inserted).
1543
1544 If this is a register that is not already known equivalent to a
1545 constant, we must check the entire class.
1546
1547 If this is a register that is already known equivalent to an insn,
1548 update `qty_const_insn' to show that `this_insn' is the latest
1549 insn making that quantity equivalent to the constant. */
1550
f353588a
RK
1551 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1552 && GET_CODE (x) != REG)
7afe21cc 1553 {
30f72379
MM
1554 qty_const[REG_QTY (REGNO (classp->exp))]
1555 = gen_lowpart_if_possible (qty_mode[REG_QTY (REGNO (classp->exp))], x);
1556 qty_const_insn[REG_QTY (REGNO (classp->exp))] = this_insn;
7afe21cc
RK
1557 }
1558
30f72379 1559 else if (GET_CODE (x) == REG && classp && ! qty_const[REG_QTY (REGNO (x))]
f353588a 1560 && ! elt->is_const)
7afe21cc
RK
1561 {
1562 register struct table_elt *p;
1563
1564 for (p = classp; p != 0; p = p->next_same_value)
1565 {
f353588a 1566 if (p->is_const && GET_CODE (p->exp) != REG)
7afe21cc 1567 {
30f72379 1568 qty_const[REG_QTY (REGNO (x))]
7afe21cc 1569 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
30f72379 1570 qty_const_insn[REG_QTY (REGNO (x))] = this_insn;
7afe21cc
RK
1571 break;
1572 }
1573 }
1574 }
1575
30f72379
MM
1576 else if (GET_CODE (x) == REG && qty_const[REG_QTY (REGNO (x))]
1577 && GET_MODE (x) == qty_mode[REG_QTY (REGNO (x))])
1578 qty_const_insn[REG_QTY (REGNO (x))] = this_insn;
7afe21cc
RK
1579
1580 /* If this is a constant with symbolic value,
1581 and it has a term with an explicit integer value,
1582 link it up with related expressions. */
1583 if (GET_CODE (x) == CONST)
1584 {
1585 rtx subexp = get_related_value (x);
2197a88a 1586 unsigned subhash;
7afe21cc
RK
1587 struct table_elt *subelt, *subelt_prev;
1588
1589 if (subexp != 0)
1590 {
1591 /* Get the integer-free subexpression in the hash table. */
1592 subhash = safe_hash (subexp, mode) % NBUCKETS;
1593 subelt = lookup (subexp, subhash, mode);
1594 if (subelt == 0)
906c4e36 1595 subelt = insert (subexp, NULL_PTR, subhash, mode);
7afe21cc
RK
1596 /* Initialize SUBELT's circular chain if it has none. */
1597 if (subelt->related_value == 0)
1598 subelt->related_value = subelt;
1599 /* Find the element in the circular chain that precedes SUBELT. */
1600 subelt_prev = subelt;
1601 while (subelt_prev->related_value != subelt)
1602 subelt_prev = subelt_prev->related_value;
1603 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1604 This way the element that follows SUBELT is the oldest one. */
1605 elt->related_value = subelt_prev->related_value;
1606 subelt_prev->related_value = elt;
1607 }
1608 }
1609
1610 return elt;
1611}
1612\f
1613/* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1614 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1615 the two classes equivalent.
1616
1617 CLASS1 will be the surviving class; CLASS2 should not be used after this
1618 call.
1619
1620 Any invalid entries in CLASS2 will not be copied. */
1621
1622static void
1623merge_equiv_classes (class1, class2)
1624 struct table_elt *class1, *class2;
1625{
1626 struct table_elt *elt, *next, *new;
1627
1628 /* Ensure we start with the head of the classes. */
1629 class1 = class1->first_same_value;
1630 class2 = class2->first_same_value;
1631
1632 /* If they were already equal, forget it. */
1633 if (class1 == class2)
1634 return;
1635
1636 for (elt = class2; elt; elt = next)
1637 {
2197a88a 1638 unsigned hash;
7afe21cc
RK
1639 rtx exp = elt->exp;
1640 enum machine_mode mode = elt->mode;
1641
1642 next = elt->next_same_value;
1643
1644 /* Remove old entry, make a new one in CLASS1's class.
1645 Don't do this for invalid entries as we cannot find their
0f41302f 1646 hash code (it also isn't necessary). */
7afe21cc
RK
1647 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1648 {
1649 hash_arg_in_memory = 0;
1650 hash_arg_in_struct = 0;
1651 hash = HASH (exp, mode);
1652
1653 if (GET_CODE (exp) == REG)
1654 delete_reg_equiv (REGNO (exp));
1655
1656 remove_from_table (elt, hash);
1657
1658 if (insert_regs (exp, class1, 0))
8ae2b8f6
JW
1659 {
1660 rehash_using_reg (exp);
1661 hash = HASH (exp, mode);
1662 }
7afe21cc
RK
1663 new = insert (exp, class1, hash, mode);
1664 new->in_memory = hash_arg_in_memory;
1665 new->in_struct = hash_arg_in_struct;
1666 }
1667 }
1668}
1669\f
01e752d3
JL
1670
1671/* Flush the entire hash table. */
1672
1673static void
1674flush_hash_table ()
1675{
1676 int i;
1677 struct table_elt *p;
1678
1679 for (i = 0; i < NBUCKETS; i++)
1680 for (p = table[i]; p; p = table[i])
1681 {
1682 /* Note that invalidate can remove elements
1683 after P in the current hash chain. */
1684 if (GET_CODE (p->exp) == REG)
1685 invalidate (p->exp, p->mode);
1686 else
1687 remove_from_table (p, i);
1688 }
1689}
1690
1691
7afe21cc
RK
1692/* Remove from the hash table, or mark as invalid,
1693 all expressions whose values could be altered by storing in X.
1694 X is a register, a subreg, or a memory reference with nonvarying address
1695 (because, when a memory reference with a varying address is stored in,
1696 all memory references are removed by invalidate_memory
1697 so specific invalidation is superfluous).
bb4034b3
JW
1698 FULL_MODE, if not VOIDmode, indicates that this much should be invalidated
1699 instead of just the amount indicated by the mode of X. This is only used
1700 for bitfield stores into memory.
7afe21cc
RK
1701
1702 A nonvarying address may be just a register or just
1703 a symbol reference, or it may be either of those plus
1704 a numeric offset. */
1705
1706static void
bb4034b3 1707invalidate (x, full_mode)
7afe21cc 1708 rtx x;
bb4034b3 1709 enum machine_mode full_mode;
7afe21cc
RK
1710{
1711 register int i;
1712 register struct table_elt *p;
7afe21cc
RK
1713
1714 /* If X is a register, dependencies on its contents
1715 are recorded through the qty number mechanism.
1716 Just change the qty number of the register,
1717 mark it as invalid for expressions that refer to it,
1718 and remove it itself. */
1719
1720 if (GET_CODE (x) == REG)
1721 {
1722 register int regno = REGNO (x);
2197a88a 1723 register unsigned hash = HASH (x, GET_MODE (x));
7afe21cc
RK
1724
1725 /* Remove REGNO from any quantity list it might be on and indicate
9ec36da5 1726 that its value might have changed. If it is a pseudo, remove its
7afe21cc
RK
1727 entry from the hash table.
1728
1729 For a hard register, we do the first two actions above for any
1730 additional hard registers corresponding to X. Then, if any of these
1731 registers are in the table, we must remove any REG entries that
1732 overlap these registers. */
1733
1734 delete_reg_equiv (regno);
30f72379 1735 REG_TICK (regno)++;
7afe21cc
RK
1736
1737 if (regno >= FIRST_PSEUDO_REGISTER)
85e4d983
RK
1738 {
1739 /* Because a register can be referenced in more than one mode,
1740 we might have to remove more than one table entry. */
1741
1742 struct table_elt *elt;
1743
2d8b0f3a 1744 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
85e4d983
RK
1745 remove_from_table (elt, hash);
1746 }
7afe21cc
RK
1747 else
1748 {
54b1de55
RK
1749 HOST_WIDE_INT in_table
1750 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
7afe21cc
RK
1751 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1752 int tregno, tendregno;
1753 register struct table_elt *p, *next;
1754
1755 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1756
1757 for (i = regno + 1; i < endregno; i++)
1758 {
1759 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1760 CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1761 delete_reg_equiv (i);
30f72379 1762 REG_TICK (i)++;
7afe21cc
RK
1763 }
1764
1765 if (in_table)
1766 for (hash = 0; hash < NBUCKETS; hash++)
1767 for (p = table[hash]; p; p = next)
1768 {
1769 next = p->next_same_hash;
1770
1771 if (GET_CODE (p->exp) != REG
1772 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1773 continue;
1774
1775 tregno = REGNO (p->exp);
1776 tendregno
1777 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1778 if (tendregno > regno && tregno < endregno)
925be47c 1779 remove_from_table (p, hash);
7afe21cc
RK
1780 }
1781 }
1782
1783 return;
1784 }
1785
1786 if (GET_CODE (x) == SUBREG)
1787 {
1788 if (GET_CODE (SUBREG_REG (x)) != REG)
1789 abort ();
bb4034b3 1790 invalidate (SUBREG_REG (x), VOIDmode);
7afe21cc
RK
1791 return;
1792 }
1793
aac5cc16
RH
1794 /* If X is a parallel, invalidate all of its elements. */
1795
1796 if (GET_CODE (x) == PARALLEL)
1797 {
1798 for (i = XVECLEN (x, 0) - 1; i >= 0 ; --i)
1799 invalidate (XVECEXP (x, 0, i), VOIDmode);
1800 return;
1801 }
1802
1803 /* If X is an expr_list, this is part of a disjoint return value;
1804 extract the location in question ignoring the offset. */
1805
1806 if (GET_CODE (x) == EXPR_LIST)
1807 {
1808 invalidate (XEXP (x, 0), VOIDmode);
1809 return;
1810 }
1811
7afe21cc
RK
1812 /* X is not a register; it must be a memory reference with
1813 a nonvarying address. Remove all hash table elements
1814 that refer to overlapping pieces of memory. */
1815
1816 if (GET_CODE (x) != MEM)
1817 abort ();
7afe21cc 1818
bb4034b3
JW
1819 if (full_mode == VOIDmode)
1820 full_mode = GET_MODE (x);
1821
7afe21cc
RK
1822 for (i = 0; i < NBUCKETS; i++)
1823 {
1824 register struct table_elt *next;
1825 for (p = table[i]; p; p = next)
1826 {
1827 next = p->next_same_hash;
9ae8ffe7
JL
1828 /* Invalidate ASM_OPERANDS which reference memory (this is easier
1829 than checking all the aliases). */
1830 if (p->in_memory
1831 && (GET_CODE (p->exp) != MEM
1832 || true_dependence (x, full_mode, p->exp, cse_rtx_varies_p)))
7afe21cc
RK
1833 remove_from_table (p, i);
1834 }
1835 }
1836}
1837
1838/* Remove all expressions that refer to register REGNO,
1839 since they are already invalid, and we are about to
1840 mark that register valid again and don't want the old
1841 expressions to reappear as valid. */
1842
1843static void
1844remove_invalid_refs (regno)
1845 int regno;
1846{
1847 register int i;
1848 register struct table_elt *p, *next;
1849
1850 for (i = 0; i < NBUCKETS; i++)
1851 for (p = table[i]; p; p = next)
1852 {
1853 next = p->next_same_hash;
1854 if (GET_CODE (p->exp) != REG
906c4e36 1855 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
7afe21cc
RK
1856 remove_from_table (p, i);
1857 }
1858}
34c73909
R
1859
1860/* Likewise for a subreg with subreg_reg WORD and mode MODE. */
1861static void
1862remove_invalid_subreg_refs (regno, word, mode)
1863 int regno;
1864 int word;
1865 enum machine_mode mode;
1866{
1867 register int i;
1868 register struct table_elt *p, *next;
1869 int end = word + (GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD;
1870
1871 for (i = 0; i < NBUCKETS; i++)
1872 for (p = table[i]; p; p = next)
1873 {
1874 rtx exp;
1875 next = p->next_same_hash;
1876
1877 exp = p->exp;
1878 if (GET_CODE (p->exp) != REG
1879 && (GET_CODE (exp) != SUBREG
1880 || GET_CODE (SUBREG_REG (exp)) != REG
1881 || REGNO (SUBREG_REG (exp)) != regno
1882 || (((SUBREG_WORD (exp)
1883 + (GET_MODE_SIZE (GET_MODE (exp)) - 1) / UNITS_PER_WORD)
1884 >= word)
1885 && SUBREG_WORD (exp) <= end))
1886 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1887 remove_from_table (p, i);
1888 }
1889}
7afe21cc
RK
1890\f
1891/* Recompute the hash codes of any valid entries in the hash table that
1892 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1893
1894 This is called when we make a jump equivalence. */
1895
1896static void
1897rehash_using_reg (x)
1898 rtx x;
1899{
973838fd 1900 unsigned int i;
7afe21cc 1901 struct table_elt *p, *next;
2197a88a 1902 unsigned hash;
7afe21cc
RK
1903
1904 if (GET_CODE (x) == SUBREG)
1905 x = SUBREG_REG (x);
1906
1907 /* If X is not a register or if the register is known not to be in any
1908 valid entries in the table, we have no work to do. */
1909
1910 if (GET_CODE (x) != REG
30f72379
MM
1911 || REG_IN_TABLE (REGNO (x)) < 0
1912 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
7afe21cc
RK
1913 return;
1914
1915 /* Scan all hash chains looking for valid entries that mention X.
1916 If we find one and it is in the wrong hash chain, move it. We can skip
1917 objects that are registers, since they are handled specially. */
1918
1919 for (i = 0; i < NBUCKETS; i++)
1920 for (p = table[i]; p; p = next)
1921 {
1922 next = p->next_same_hash;
1923 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
538b78e7 1924 && exp_equiv_p (p->exp, p->exp, 1, 0)
7afe21cc
RK
1925 && i != (hash = safe_hash (p->exp, p->mode) % NBUCKETS))
1926 {
1927 if (p->next_same_hash)
1928 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1929
1930 if (p->prev_same_hash)
1931 p->prev_same_hash->next_same_hash = p->next_same_hash;
1932 else
1933 table[i] = p->next_same_hash;
1934
1935 p->next_same_hash = table[hash];
1936 p->prev_same_hash = 0;
1937 if (table[hash])
1938 table[hash]->prev_same_hash = p;
1939 table[hash] = p;
1940 }
1941 }
1942}
1943\f
7afe21cc
RK
1944/* Remove from the hash table any expression that is a call-clobbered
1945 register. Also update their TICK values. */
1946
1947static void
1948invalidate_for_call ()
1949{
1950 int regno, endregno;
1951 int i;
2197a88a 1952 unsigned hash;
7afe21cc
RK
1953 struct table_elt *p, *next;
1954 int in_table = 0;
1955
1956 /* Go through all the hard registers. For each that is clobbered in
1957 a CALL_INSN, remove the register from quantity chains and update
1958 reg_tick if defined. Also see if any of these registers is currently
1959 in the table. */
1960
1961 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1962 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1963 {
1964 delete_reg_equiv (regno);
30f72379
MM
1965 if (REG_TICK (regno) >= 0)
1966 REG_TICK (regno)++;
7afe21cc 1967
0e227018 1968 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
7afe21cc
RK
1969 }
1970
1971 /* In the case where we have no call-clobbered hard registers in the
1972 table, we are done. Otherwise, scan the table and remove any
1973 entry that overlaps a call-clobbered register. */
1974
1975 if (in_table)
1976 for (hash = 0; hash < NBUCKETS; hash++)
1977 for (p = table[hash]; p; p = next)
1978 {
1979 next = p->next_same_hash;
1980
9ae8ffe7
JL
1981 if (p->in_memory)
1982 {
1983 remove_from_table (p, hash);
1984 continue;
1985 }
1986
7afe21cc
RK
1987 if (GET_CODE (p->exp) != REG
1988 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1989 continue;
1990
1991 regno = REGNO (p->exp);
1992 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1993
1994 for (i = regno; i < endregno; i++)
1995 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1996 {
1997 remove_from_table (p, hash);
1998 break;
1999 }
2000 }
2001}
2002\f
2003/* Given an expression X of type CONST,
2004 and ELT which is its table entry (or 0 if it
2005 is not in the hash table),
2006 return an alternate expression for X as a register plus integer.
2007 If none can be found, return 0. */
2008
2009static rtx
2010use_related_value (x, elt)
2011 rtx x;
2012 struct table_elt *elt;
2013{
2014 register struct table_elt *relt = 0;
2015 register struct table_elt *p, *q;
906c4e36 2016 HOST_WIDE_INT offset;
7afe21cc
RK
2017
2018 /* First, is there anything related known?
2019 If we have a table element, we can tell from that.
2020 Otherwise, must look it up. */
2021
2022 if (elt != 0 && elt->related_value != 0)
2023 relt = elt;
2024 else if (elt == 0 && GET_CODE (x) == CONST)
2025 {
2026 rtx subexp = get_related_value (x);
2027 if (subexp != 0)
2028 relt = lookup (subexp,
2029 safe_hash (subexp, GET_MODE (subexp)) % NBUCKETS,
2030 GET_MODE (subexp));
2031 }
2032
2033 if (relt == 0)
2034 return 0;
2035
2036 /* Search all related table entries for one that has an
2037 equivalent register. */
2038
2039 p = relt;
2040 while (1)
2041 {
2042 /* This loop is strange in that it is executed in two different cases.
2043 The first is when X is already in the table. Then it is searching
2044 the RELATED_VALUE list of X's class (RELT). The second case is when
2045 X is not in the table. Then RELT points to a class for the related
2046 value.
2047
2048 Ensure that, whatever case we are in, that we ignore classes that have
2049 the same value as X. */
2050
2051 if (rtx_equal_p (x, p->exp))
2052 q = 0;
2053 else
2054 for (q = p->first_same_value; q; q = q->next_same_value)
2055 if (GET_CODE (q->exp) == REG)
2056 break;
2057
2058 if (q)
2059 break;
2060
2061 p = p->related_value;
2062
2063 /* We went all the way around, so there is nothing to be found.
2064 Alternatively, perhaps RELT was in the table for some other reason
2065 and it has no related values recorded. */
2066 if (p == relt || p == 0)
2067 break;
2068 }
2069
2070 if (q == 0)
2071 return 0;
2072
2073 offset = (get_integer_term (x) - get_integer_term (p->exp));
2074 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2075 return plus_constant (q->exp, offset);
2076}
2077\f
2078/* Hash an rtx. We are careful to make sure the value is never negative.
2079 Equivalent registers hash identically.
2080 MODE is used in hashing for CONST_INTs only;
2081 otherwise the mode of X is used.
2082
2083 Store 1 in do_not_record if any subexpression is volatile.
2084
2085 Store 1 in hash_arg_in_memory if X contains a MEM rtx
2086 which does not have the RTX_UNCHANGING_P bit set.
2087 In this case, also store 1 in hash_arg_in_struct
2088 if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
2089
2090 Note that cse_insn knows that the hash code of a MEM expression
2091 is just (int) MEM plus the hash code of the address. */
2092
2197a88a 2093static unsigned
7afe21cc
RK
2094canon_hash (x, mode)
2095 rtx x;
2096 enum machine_mode mode;
2097{
2098 register int i, j;
2197a88a 2099 register unsigned hash = 0;
7afe21cc
RK
2100 register enum rtx_code code;
2101 register char *fmt;
2102
2103 /* repeat is used to turn tail-recursion into iteration. */
2104 repeat:
2105 if (x == 0)
2106 return hash;
2107
2108 code = GET_CODE (x);
2109 switch (code)
2110 {
2111 case REG:
2112 {
2113 register int regno = REGNO (x);
2114
2115 /* On some machines, we can't record any non-fixed hard register,
2116 because extending its life will cause reload problems. We
9a794e50
RH
2117 consider ap, fp, and sp to be fixed for this purpose.
2118
2119 We also consider CCmode registers to be fixed for this purpose;
2120 failure to do so leads to failure to simplify 0<100 type of
2121 conditionals.
2122
0f41302f 2123 On all machines, we can't record any global registers. */
7afe21cc
RK
2124
2125 if (regno < FIRST_PSEUDO_REGISTER
2126 && (global_regs[regno]
f95182a4
ILT
2127 || (SMALL_REGISTER_CLASSES
2128 && ! fixed_regs[regno]
7afe21cc 2129 && regno != FRAME_POINTER_REGNUM
8bc169f2 2130 && regno != HARD_FRAME_POINTER_REGNUM
7afe21cc 2131 && regno != ARG_POINTER_REGNUM
9a794e50
RH
2132 && regno != STACK_POINTER_REGNUM
2133 && GET_MODE_CLASS (GET_MODE (x)) != MODE_CC)))
7afe21cc
RK
2134 {
2135 do_not_record = 1;
2136 return 0;
2137 }
30f72379 2138 hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2197a88a 2139 return hash;
7afe21cc
RK
2140 }
2141
34c73909
R
2142 /* We handle SUBREG of a REG specially because the underlying
2143 reg changes its hash value with every value change; we don't
2144 want to have to forget unrelated subregs when one subreg changes. */
2145 case SUBREG:
2146 {
2147 if (GET_CODE (SUBREG_REG (x)) == REG)
2148 {
2149 hash += (((unsigned) SUBREG << 7)
2150 + REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
2151 return hash;
2152 }
2153 break;
2154 }
2155
7afe21cc 2156 case CONST_INT:
2197a88a
RK
2157 {
2158 unsigned HOST_WIDE_INT tem = INTVAL (x);
2159 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2160 return hash;
2161 }
7afe21cc
RK
2162
2163 case CONST_DOUBLE:
2164 /* This is like the general case, except that it only counts
2165 the integers representing the constant. */
2197a88a 2166 hash += (unsigned) code + (unsigned) GET_MODE (x);
969c8517
RK
2167 if (GET_MODE (x) != VOIDmode)
2168 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
2169 {
2170 unsigned tem = XINT (x, i);
2171 hash += tem;
2172 }
2173 else
2174 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2175 + (unsigned) CONST_DOUBLE_HIGH (x));
7afe21cc
RK
2176 return hash;
2177
2178 /* Assume there is only one rtx object for any given label. */
2179 case LABEL_REF:
3c543775 2180 hash
7bcac048 2181 += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2197a88a 2182 return hash;
7afe21cc
RK
2183
2184 case SYMBOL_REF:
3c543775 2185 hash
7bcac048 2186 += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2197a88a 2187 return hash;
7afe21cc
RK
2188
2189 case MEM:
2190 if (MEM_VOLATILE_P (x))
2191 {
2192 do_not_record = 1;
2193 return 0;
2194 }
9ad91d71 2195 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
7afe21cc
RK
2196 {
2197 hash_arg_in_memory = 1;
2198 if (MEM_IN_STRUCT_P (x)) hash_arg_in_struct = 1;
2199 }
2200 /* Now that we have already found this special case,
2201 might as well speed it up as much as possible. */
2197a88a 2202 hash += (unsigned) MEM;
7afe21cc
RK
2203 x = XEXP (x, 0);
2204 goto repeat;
2205
2206 case PRE_DEC:
2207 case PRE_INC:
2208 case POST_DEC:
2209 case POST_INC:
2210 case PC:
2211 case CC0:
2212 case CALL:
2213 case UNSPEC_VOLATILE:
2214 do_not_record = 1;
2215 return 0;
2216
2217 case ASM_OPERANDS:
2218 if (MEM_VOLATILE_P (x))
2219 {
2220 do_not_record = 1;
2221 return 0;
2222 }
e9a25f70
JL
2223 break;
2224
2225 default:
2226 break;
7afe21cc
RK
2227 }
2228
2229 i = GET_RTX_LENGTH (code) - 1;
2197a88a 2230 hash += (unsigned) code + (unsigned) GET_MODE (x);
7afe21cc
RK
2231 fmt = GET_RTX_FORMAT (code);
2232 for (; i >= 0; i--)
2233 {
2234 if (fmt[i] == 'e')
2235 {
2236 rtx tem = XEXP (x, i);
7afe21cc
RK
2237
2238 /* If we are about to do the last recursive call
2239 needed at this level, change it into iteration.
2240 This function is called enough to be worth it. */
2241 if (i == 0)
2242 {
2243 x = tem;
2244 goto repeat;
2245 }
2246 hash += canon_hash (tem, 0);
2247 }
2248 else if (fmt[i] == 'E')
2249 for (j = 0; j < XVECLEN (x, i); j++)
2250 hash += canon_hash (XVECEXP (x, i, j), 0);
2251 else if (fmt[i] == 's')
2252 {
2197a88a 2253 register unsigned char *p = (unsigned char *) XSTR (x, i);
7afe21cc
RK
2254 if (p)
2255 while (*p)
2197a88a 2256 hash += *p++;
7afe21cc
RK
2257 }
2258 else if (fmt[i] == 'i')
2259 {
2197a88a
RK
2260 register unsigned tem = XINT (x, i);
2261 hash += tem;
7afe21cc 2262 }
e9a25f70
JL
2263 else if (fmt[i] == '0')
2264 /* unused */;
7afe21cc
RK
2265 else
2266 abort ();
2267 }
2268 return hash;
2269}
2270
2271/* Like canon_hash but with no side effects. */
2272
2197a88a 2273static unsigned
7afe21cc
RK
2274safe_hash (x, mode)
2275 rtx x;
2276 enum machine_mode mode;
2277{
2278 int save_do_not_record = do_not_record;
2279 int save_hash_arg_in_memory = hash_arg_in_memory;
2280 int save_hash_arg_in_struct = hash_arg_in_struct;
2197a88a 2281 unsigned hash = canon_hash (x, mode);
7afe21cc
RK
2282 hash_arg_in_memory = save_hash_arg_in_memory;
2283 hash_arg_in_struct = save_hash_arg_in_struct;
2284 do_not_record = save_do_not_record;
2285 return hash;
2286}
2287\f
2288/* Return 1 iff X and Y would canonicalize into the same thing,
2289 without actually constructing the canonicalization of either one.
2290 If VALIDATE is nonzero,
2291 we assume X is an expression being processed from the rtl
2292 and Y was found in the hash table. We check register refs
2293 in Y for being marked as valid.
2294
2295 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2296 that is known to be in the register. Ordinarily, we don't allow them
2297 to match, because letting them match would cause unpredictable results
2298 in all the places that search a hash table chain for an equivalent
2299 for a given value. A possible equivalent that has different structure
2300 has its hash code computed from different data. Whether the hash code
38e01259 2301 is the same as that of the given value is pure luck. */
7afe21cc
RK
2302
2303static int
2304exp_equiv_p (x, y, validate, equal_values)
2305 rtx x, y;
2306 int validate;
2307 int equal_values;
2308{
906c4e36 2309 register int i, j;
7afe21cc
RK
2310 register enum rtx_code code;
2311 register char *fmt;
2312
2313 /* Note: it is incorrect to assume an expression is equivalent to itself
2314 if VALIDATE is nonzero. */
2315 if (x == y && !validate)
2316 return 1;
2317 if (x == 0 || y == 0)
2318 return x == y;
2319
2320 code = GET_CODE (x);
2321 if (code != GET_CODE (y))
2322 {
2323 if (!equal_values)
2324 return 0;
2325
2326 /* If X is a constant and Y is a register or vice versa, they may be
2327 equivalent. We only have to validate if Y is a register. */
2328 if (CONSTANT_P (x) && GET_CODE (y) == REG
2329 && REGNO_QTY_VALID_P (REGNO (y))
30f72379
MM
2330 && GET_MODE (y) == qty_mode[REG_QTY (REGNO (y))]
2331 && rtx_equal_p (x, qty_const[REG_QTY (REGNO (y))])
2332 && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
7afe21cc
RK
2333 return 1;
2334
2335 if (CONSTANT_P (y) && code == REG
2336 && REGNO_QTY_VALID_P (REGNO (x))
30f72379
MM
2337 && GET_MODE (x) == qty_mode[REG_QTY (REGNO (x))]
2338 && rtx_equal_p (y, qty_const[REG_QTY (REGNO (x))]))
7afe21cc
RK
2339 return 1;
2340
2341 return 0;
2342 }
2343
2344 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2345 if (GET_MODE (x) != GET_MODE (y))
2346 return 0;
2347
2348 switch (code)
2349 {
2350 case PC:
2351 case CC0:
2352 return x == y;
2353
2354 case CONST_INT:
58c8c593 2355 return INTVAL (x) == INTVAL (y);
7afe21cc
RK
2356
2357 case LABEL_REF:
7afe21cc
RK
2358 return XEXP (x, 0) == XEXP (y, 0);
2359
f54d4924
RK
2360 case SYMBOL_REF:
2361 return XSTR (x, 0) == XSTR (y, 0);
2362
7afe21cc
RK
2363 case REG:
2364 {
2365 int regno = REGNO (y);
2366 int endregno
2367 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2368 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2369 int i;
2370
2371 /* If the quantities are not the same, the expressions are not
2372 equivalent. If there are and we are not to validate, they
2373 are equivalent. Otherwise, ensure all regs are up-to-date. */
2374
30f72379 2375 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
7afe21cc
RK
2376 return 0;
2377
2378 if (! validate)
2379 return 1;
2380
2381 for (i = regno; i < endregno; i++)
30f72379 2382 if (REG_IN_TABLE (i) != REG_TICK (i))
7afe21cc
RK
2383 return 0;
2384
2385 return 1;
2386 }
2387
2388 /* For commutative operations, check both orders. */
2389 case PLUS:
2390 case MULT:
2391 case AND:
2392 case IOR:
2393 case XOR:
2394 case NE:
2395 case EQ:
2396 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2397 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2398 validate, equal_values))
2399 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2400 validate, equal_values)
2401 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2402 validate, equal_values)));
e9a25f70
JL
2403
2404 default:
2405 break;
7afe21cc
RK
2406 }
2407
2408 /* Compare the elements. If any pair of corresponding elements
2409 fail to match, return 0 for the whole things. */
2410
2411 fmt = GET_RTX_FORMAT (code);
2412 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2413 {
906c4e36 2414 switch (fmt[i])
7afe21cc 2415 {
906c4e36 2416 case 'e':
7afe21cc
RK
2417 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2418 return 0;
906c4e36
RK
2419 break;
2420
2421 case 'E':
7afe21cc
RK
2422 if (XVECLEN (x, i) != XVECLEN (y, i))
2423 return 0;
2424 for (j = 0; j < XVECLEN (x, i); j++)
2425 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2426 validate, equal_values))
2427 return 0;
906c4e36
RK
2428 break;
2429
2430 case 's':
7afe21cc
RK
2431 if (strcmp (XSTR (x, i), XSTR (y, i)))
2432 return 0;
906c4e36
RK
2433 break;
2434
2435 case 'i':
7afe21cc
RK
2436 if (XINT (x, i) != XINT (y, i))
2437 return 0;
906c4e36
RK
2438 break;
2439
2440 case 'w':
2441 if (XWINT (x, i) != XWINT (y, i))
2442 return 0;
2443 break;
2444
2445 case '0':
2446 break;
2447
2448 default:
2449 abort ();
7afe21cc 2450 }
906c4e36
RK
2451 }
2452
7afe21cc
RK
2453 return 1;
2454}
2455\f
2456/* Return 1 iff any subexpression of X matches Y.
2457 Here we do not require that X or Y be valid (for registers referred to)
2458 for being in the hash table. */
2459
6cd4575e 2460static int
7afe21cc
RK
2461refers_to_p (x, y)
2462 rtx x, y;
2463{
2464 register int i;
2465 register enum rtx_code code;
2466 register char *fmt;
2467
2468 repeat:
2469 if (x == y)
2470 return 1;
2471 if (x == 0 || y == 0)
2472 return 0;
2473
2474 code = GET_CODE (x);
2475 /* If X as a whole has the same code as Y, they may match.
2476 If so, return 1. */
2477 if (code == GET_CODE (y))
2478 {
2479 if (exp_equiv_p (x, y, 0, 1))
2480 return 1;
2481 }
2482
2483 /* X does not match, so try its subexpressions. */
2484
2485 fmt = GET_RTX_FORMAT (code);
2486 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2487 if (fmt[i] == 'e')
2488 {
2489 if (i == 0)
2490 {
2491 x = XEXP (x, 0);
2492 goto repeat;
2493 }
2494 else
2495 if (refers_to_p (XEXP (x, i), y))
2496 return 1;
2497 }
2498 else if (fmt[i] == 'E')
2499 {
2500 int j;
2501 for (j = 0; j < XVECLEN (x, i); j++)
2502 if (refers_to_p (XVECEXP (x, i, j), y))
2503 return 1;
2504 }
2505
2506 return 0;
2507}
2508\f
f451db89
JL
2509/* Given ADDR and SIZE (a memory address, and the size of the memory reference),
2510 set PBASE, PSTART, and PEND which correspond to the base of the address,
2511 the starting offset, and ending offset respectively.
2512
bb4034b3 2513 ADDR is known to be a nonvarying address. */
f451db89 2514
bb4034b3
JW
2515/* ??? Despite what the comments say, this function is in fact frequently
2516 passed varying addresses. This does not appear to cause any problems. */
f451db89
JL
2517
2518static void
2519set_nonvarying_address_components (addr, size, pbase, pstart, pend)
2520 rtx addr;
2521 int size;
2522 rtx *pbase;
6500fb43 2523 HOST_WIDE_INT *pstart, *pend;
f451db89
JL
2524{
2525 rtx base;
c85663b1 2526 HOST_WIDE_INT start, end;
f451db89
JL
2527
2528 base = addr;
2529 start = 0;
2530 end = 0;
2531
e5e809f4
JL
2532 if (flag_pic && GET_CODE (base) == PLUS
2533 && XEXP (base, 0) == pic_offset_table_rtx)
2534 base = XEXP (base, 1);
2535
f451db89
JL
2536 /* Registers with nonvarying addresses usually have constant equivalents;
2537 but the frame pointer register is also possible. */
2538 if (GET_CODE (base) == REG
2539 && qty_const != 0
2540 && REGNO_QTY_VALID_P (REGNO (base))
30f72379
MM
2541 && qty_mode[REG_QTY (REGNO (base))] == GET_MODE (base)
2542 && qty_const[REG_QTY (REGNO (base))] != 0)
2543 base = qty_const[REG_QTY (REGNO (base))];
f451db89
JL
2544 else if (GET_CODE (base) == PLUS
2545 && GET_CODE (XEXP (base, 1)) == CONST_INT
2546 && GET_CODE (XEXP (base, 0)) == REG
2547 && qty_const != 0
2548 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
30f72379 2549 && (qty_mode[REG_QTY (REGNO (XEXP (base, 0)))]
f451db89 2550 == GET_MODE (XEXP (base, 0)))
30f72379 2551 && qty_const[REG_QTY (REGNO (XEXP (base, 0)))])
f451db89
JL
2552 {
2553 start = INTVAL (XEXP (base, 1));
30f72379 2554 base = qty_const[REG_QTY (REGNO (XEXP (base, 0)))];
f451db89 2555 }
9c6b0bae 2556 /* This can happen as the result of virtual register instantiation,
abc95ed3 2557 if the initial offset is too large to be a valid address. */
9c6b0bae
RK
2558 else if (GET_CODE (base) == PLUS
2559 && GET_CODE (XEXP (base, 0)) == REG
2560 && GET_CODE (XEXP (base, 1)) == REG
2561 && qty_const != 0
2562 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
30f72379 2563 && (qty_mode[REG_QTY (REGNO (XEXP (base, 0)))]
9c6b0bae 2564 == GET_MODE (XEXP (base, 0)))
30f72379 2565 && qty_const[REG_QTY (REGNO (XEXP (base, 0)))]
9c6b0bae 2566 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 1)))
30f72379 2567 && (qty_mode[REG_QTY (REGNO (XEXP (base, 1)))]
9c6b0bae 2568 == GET_MODE (XEXP (base, 1)))
30f72379 2569 && qty_const[REG_QTY (REGNO (XEXP (base, 1)))])
9c6b0bae 2570 {
30f72379
MM
2571 rtx tem = qty_const[REG_QTY (REGNO (XEXP (base, 1)))];
2572 base = qty_const[REG_QTY (REGNO (XEXP (base, 0)))];
9c6b0bae
RK
2573
2574 /* One of the two values must be a constant. */
2575 if (GET_CODE (base) != CONST_INT)
2576 {
2577 if (GET_CODE (tem) != CONST_INT)
2578 abort ();
2579 start = INTVAL (tem);
2580 }
2581 else
2582 {
2583 start = INTVAL (base);
2584 base = tem;
2585 }
2586 }
f451db89 2587
c85663b1
RK
2588 /* Handle everything that we can find inside an address that has been
2589 viewed as constant. */
f451db89 2590
c85663b1 2591 while (1)
f451db89 2592 {
c85663b1
RK
2593 /* If no part of this switch does a "continue", the code outside
2594 will exit this loop. */
2595
2596 switch (GET_CODE (base))
2597 {
2598 case LO_SUM:
2599 /* By definition, operand1 of a LO_SUM is the associated constant
2600 address. Use the associated constant address as the base
2601 instead. */
2602 base = XEXP (base, 1);
2603 continue;
2604
2605 case CONST:
2606 /* Strip off CONST. */
2607 base = XEXP (base, 0);
2608 continue;
2609
2610 case PLUS:
2611 if (GET_CODE (XEXP (base, 1)) == CONST_INT)
2612 {
2613 start += INTVAL (XEXP (base, 1));
2614 base = XEXP (base, 0);
2615 continue;
2616 }
2617 break;
2618
2619 case AND:
2620 /* Handle the case of an AND which is the negative of a power of
2621 two. This is used to represent unaligned memory operations. */
2622 if (GET_CODE (XEXP (base, 1)) == CONST_INT
2623 && exact_log2 (- INTVAL (XEXP (base, 1))) > 0)
2624 {
2625 set_nonvarying_address_components (XEXP (base, 0), size,
2626 pbase, pstart, pend);
2627
2628 /* Assume the worst misalignment. START is affected, but not
2629 END, so compensate but adjusting SIZE. Don't lose any
2630 constant we already had. */
2631
2632 size = *pend - *pstart - INTVAL (XEXP (base, 1)) - 1;
89046535
RK
2633 start += *pstart + INTVAL (XEXP (base, 1)) + 1;
2634 end += *pend;
c85663b1
RK
2635 base = *pbase;
2636 }
2637 break;
e9a25f70
JL
2638
2639 default:
2640 break;
c85663b1
RK
2641 }
2642
2643 break;
f451db89
JL
2644 }
2645
336d6f0a
RK
2646 if (GET_CODE (base) == CONST_INT)
2647 {
2648 start += INTVAL (base);
2649 base = const0_rtx;
2650 }
2651
f451db89
JL
2652 end = start + size;
2653
2654 /* Set the return values. */
2655 *pbase = base;
2656 *pstart = start;
2657 *pend = end;
2658}
2659
9ae8ffe7
JL
2660/* Return 1 if X has a value that can vary even between two
2661 executions of the program. 0 means X can be compared reliably
2662 against certain constants or near-constants. */
7afe21cc
RK
2663
2664static int
9ae8ffe7
JL
2665cse_rtx_varies_p (x)
2666 register rtx x;
7afe21cc
RK
2667{
2668 /* We need not check for X and the equivalence class being of the same
2669 mode because if X is equivalent to a constant in some mode, it
2670 doesn't vary in any mode. */
2671
9ae8ffe7
JL
2672 if (GET_CODE (x) == REG
2673 && REGNO_QTY_VALID_P (REGNO (x))
30f72379
MM
2674 && GET_MODE (x) == qty_mode[REG_QTY (REGNO (x))]
2675 && qty_const[REG_QTY (REGNO (x))] != 0)
7afe21cc
RK
2676 return 0;
2677
9ae8ffe7
JL
2678 if (GET_CODE (x) == PLUS
2679 && GET_CODE (XEXP (x, 1)) == CONST_INT
2680 && GET_CODE (XEXP (x, 0)) == REG
2681 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2682 && (GET_MODE (XEXP (x, 0))
30f72379
MM
2683 == qty_mode[REG_QTY (REGNO (XEXP (x, 0)))])
2684 && qty_const[REG_QTY (REGNO (XEXP (x, 0)))])
7afe21cc
RK
2685 return 0;
2686
9c6b0bae
RK
2687 /* This can happen as the result of virtual register instantiation, if
2688 the initial constant is too large to be a valid address. This gives
2689 us a three instruction sequence, load large offset into a register,
2690 load fp minus a constant into a register, then a MEM which is the
2691 sum of the two `constant' registers. */
9ae8ffe7
JL
2692 if (GET_CODE (x) == PLUS
2693 && GET_CODE (XEXP (x, 0)) == REG
2694 && GET_CODE (XEXP (x, 1)) == REG
2695 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2696 && (GET_MODE (XEXP (x, 0))
30f72379
MM
2697 == qty_mode[REG_QTY (REGNO (XEXP (x, 0)))])
2698 && qty_const[REG_QTY (REGNO (XEXP (x, 0)))]
9ae8ffe7
JL
2699 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1)))
2700 && (GET_MODE (XEXP (x, 1))
30f72379
MM
2701 == qty_mode[REG_QTY (REGNO (XEXP (x, 1)))])
2702 && qty_const[REG_QTY (REGNO (XEXP (x, 1)))])
9c6b0bae
RK
2703 return 0;
2704
9ae8ffe7 2705 return rtx_varies_p (x);
7afe21cc
RK
2706}
2707\f
2708/* Canonicalize an expression:
2709 replace each register reference inside it
2710 with the "oldest" equivalent register.
2711
2712 If INSN is non-zero and we are replacing a pseudo with a hard register
7722328e
RK
2713 or vice versa, validate_change is used to ensure that INSN remains valid
2714 after we make our substitution. The calls are made with IN_GROUP non-zero
2715 so apply_change_group must be called upon the outermost return from this
2716 function (unless INSN is zero). The result of apply_change_group can
2717 generally be discarded since the changes we are making are optional. */
7afe21cc
RK
2718
2719static rtx
2720canon_reg (x, insn)
2721 rtx x;
2722 rtx insn;
2723{
2724 register int i;
2725 register enum rtx_code code;
2726 register char *fmt;
2727
2728 if (x == 0)
2729 return x;
2730
2731 code = GET_CODE (x);
2732 switch (code)
2733 {
2734 case PC:
2735 case CC0:
2736 case CONST:
2737 case CONST_INT:
2738 case CONST_DOUBLE:
2739 case SYMBOL_REF:
2740 case LABEL_REF:
2741 case ADDR_VEC:
2742 case ADDR_DIFF_VEC:
2743 return x;
2744
2745 case REG:
2746 {
2747 register int first;
2748
2749 /* Never replace a hard reg, because hard regs can appear
2750 in more than one machine mode, and we must preserve the mode
2751 of each occurrence. Also, some hard regs appear in
2752 MEMs that are shared and mustn't be altered. Don't try to
2753 replace any reg that maps to a reg of class NO_REGS. */
2754 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2755 || ! REGNO_QTY_VALID_P (REGNO (x)))
2756 return x;
2757
30f72379 2758 first = qty_first_reg[REG_QTY (REGNO (x))];
7afe21cc
RK
2759 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2760 : REGNO_REG_CLASS (first) == NO_REGS ? x
30f72379 2761 : gen_rtx_REG (qty_mode[REG_QTY (REGNO (x))], first));
7afe21cc 2762 }
e9a25f70
JL
2763
2764 default:
2765 break;
7afe21cc
RK
2766 }
2767
2768 fmt = GET_RTX_FORMAT (code);
2769 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2770 {
2771 register int j;
2772
2773 if (fmt[i] == 'e')
2774 {
2775 rtx new = canon_reg (XEXP (x, i), insn);
58873255 2776 int insn_code;
7afe21cc
RK
2777
2778 /* If replacing pseudo with hard reg or vice versa, ensure the
178c39f6 2779 insn remains valid. Likewise if the insn has MATCH_DUPs. */
aee9dc31
RS
2780 if (insn != 0 && new != 0
2781 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
178c39f6
RK
2782 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2783 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
58873255
RK
2784 || (insn_code = recog_memoized (insn)) < 0
2785 || insn_n_dups[insn_code] > 0))
77fa0940 2786 validate_change (insn, &XEXP (x, i), new, 1);
7afe21cc
RK
2787 else
2788 XEXP (x, i) = new;
2789 }
2790 else if (fmt[i] == 'E')
2791 for (j = 0; j < XVECLEN (x, i); j++)
2792 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2793 }
2794
2795 return x;
2796}
2797\f
a2cabb29 2798/* LOC is a location within INSN that is an operand address (the contents of
7afe21cc
RK
2799 a MEM). Find the best equivalent address to use that is valid for this
2800 insn.
2801
2802 On most CISC machines, complicated address modes are costly, and rtx_cost
2803 is a good approximation for that cost. However, most RISC machines have
2804 only a few (usually only one) memory reference formats. If an address is
2805 valid at all, it is often just as cheap as any other address. Hence, for
2806 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2807 costs of various addresses. For two addresses of equal cost, choose the one
2808 with the highest `rtx_cost' value as that has the potential of eliminating
2809 the most insns. For equal costs, we choose the first in the equivalence
2810 class. Note that we ignore the fact that pseudo registers are cheaper
2811 than hard registers here because we would also prefer the pseudo registers.
2812 */
2813
6cd4575e 2814static void
7afe21cc
RK
2815find_best_addr (insn, loc)
2816 rtx insn;
2817 rtx *loc;
2818{
7a87758d 2819 struct table_elt *elt;
7afe21cc 2820 rtx addr = *loc;
7a87758d
AS
2821#ifdef ADDRESS_COST
2822 struct table_elt *p;
7afe21cc 2823 int found_better = 1;
7a87758d 2824#endif
7afe21cc
RK
2825 int save_do_not_record = do_not_record;
2826 int save_hash_arg_in_memory = hash_arg_in_memory;
2827 int save_hash_arg_in_struct = hash_arg_in_struct;
7afe21cc
RK
2828 int addr_volatile;
2829 int regno;
2197a88a 2830 unsigned hash;
7afe21cc
RK
2831
2832 /* Do not try to replace constant addresses or addresses of local and
2833 argument slots. These MEM expressions are made only once and inserted
2834 in many instructions, as well as being used to control symbol table
2835 output. It is not safe to clobber them.
2836
2837 There are some uncommon cases where the address is already in a register
2838 for some reason, but we cannot take advantage of that because we have
2839 no easy way to unshare the MEM. In addition, looking up all stack
2840 addresses is costly. */
2841 if ((GET_CODE (addr) == PLUS
2842 && GET_CODE (XEXP (addr, 0)) == REG
2843 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2844 && (regno = REGNO (XEXP (addr, 0)),
8bc169f2
DE
2845 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2846 || regno == ARG_POINTER_REGNUM))
7afe21cc 2847 || (GET_CODE (addr) == REG
8bc169f2
DE
2848 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2849 || regno == HARD_FRAME_POINTER_REGNUM
2850 || regno == ARG_POINTER_REGNUM))
e9a25f70 2851 || GET_CODE (addr) == ADDRESSOF
7afe21cc
RK
2852 || CONSTANT_ADDRESS_P (addr))
2853 return;
2854
2855 /* If this address is not simply a register, try to fold it. This will
2856 sometimes simplify the expression. Many simplifications
2857 will not be valid, but some, usually applying the associative rule, will
2858 be valid and produce better code. */
8c87f107
RK
2859 if (GET_CODE (addr) != REG)
2860 {
2861 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2862
2863 if (1
2864#ifdef ADDRESS_COST
2f541799
MM
2865 && (CSE_ADDRESS_COST (folded) < CSE_ADDRESS_COST (addr)
2866 || (CSE_ADDRESS_COST (folded) == CSE_ADDRESS_COST (addr)
9a252d29 2867 && rtx_cost (folded, MEM) > rtx_cost (addr, MEM)))
8c87f107 2868#else
9a252d29 2869 && rtx_cost (folded, MEM) < rtx_cost (addr, MEM)
8c87f107
RK
2870#endif
2871 && validate_change (insn, loc, folded, 0))
2872 addr = folded;
2873 }
7afe21cc 2874
42495ca0
RK
2875 /* If this address is not in the hash table, we can't look for equivalences
2876 of the whole address. Also, ignore if volatile. */
2877
7afe21cc 2878 do_not_record = 0;
2197a88a 2879 hash = HASH (addr, Pmode);
7afe21cc
RK
2880 addr_volatile = do_not_record;
2881 do_not_record = save_do_not_record;
2882 hash_arg_in_memory = save_hash_arg_in_memory;
2883 hash_arg_in_struct = save_hash_arg_in_struct;
2884
2885 if (addr_volatile)
2886 return;
2887
2197a88a 2888 elt = lookup (addr, hash, Pmode);
7afe21cc 2889
7afe21cc 2890#ifndef ADDRESS_COST
42495ca0
RK
2891 if (elt)
2892 {
2d8b0f3a 2893 int our_cost = elt->cost;
42495ca0
RK
2894
2895 /* Find the lowest cost below ours that works. */
2896 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2897 if (elt->cost < our_cost
2898 && (GET_CODE (elt->exp) == REG
2899 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2900 && validate_change (insn, loc,
906c4e36 2901 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
42495ca0
RK
2902 return;
2903 }
2904#else
7afe21cc 2905
42495ca0
RK
2906 if (elt)
2907 {
2908 /* We need to find the best (under the criteria documented above) entry
2909 in the class that is valid. We use the `flag' field to indicate
2910 choices that were invalid and iterate until we can't find a better
2911 one that hasn't already been tried. */
7afe21cc 2912
42495ca0
RK
2913 for (p = elt->first_same_value; p; p = p->next_same_value)
2914 p->flag = 0;
7afe21cc 2915
42495ca0
RK
2916 while (found_better)
2917 {
2f541799 2918 int best_addr_cost = CSE_ADDRESS_COST (*loc);
42495ca0
RK
2919 int best_rtx_cost = (elt->cost + 1) >> 1;
2920 struct table_elt *best_elt = elt;
2921
2922 found_better = 0;
2923 for (p = elt->first_same_value; p; p = p->next_same_value)
2f541799 2924 if (! p->flag)
42495ca0 2925 {
2f541799
MM
2926 if ((GET_CODE (p->exp) == REG
2927 || exp_equiv_p (p->exp, p->exp, 1, 0))
2928 && (CSE_ADDRESS_COST (p->exp) < best_addr_cost
2929 || (CSE_ADDRESS_COST (p->exp) == best_addr_cost
2930 && (p->cost + 1) >> 1 > best_rtx_cost)))
2931 {
2932 found_better = 1;
2933 best_addr_cost = CSE_ADDRESS_COST (p->exp);
2934 best_rtx_cost = (p->cost + 1) >> 1;
2935 best_elt = p;
2936 }
42495ca0 2937 }
7afe21cc 2938
42495ca0
RK
2939 if (found_better)
2940 {
2941 if (validate_change (insn, loc,
906c4e36
RK
2942 canon_reg (copy_rtx (best_elt->exp),
2943 NULL_RTX), 0))
42495ca0
RK
2944 return;
2945 else
2946 best_elt->flag = 1;
2947 }
2948 }
2949 }
7afe21cc 2950
42495ca0
RK
2951 /* If the address is a binary operation with the first operand a register
2952 and the second a constant, do the same as above, but looking for
2953 equivalences of the register. Then try to simplify before checking for
2954 the best address to use. This catches a few cases: First is when we
2955 have REG+const and the register is another REG+const. We can often merge
2956 the constants and eliminate one insn and one register. It may also be
2957 that a machine has a cheap REG+REG+const. Finally, this improves the
2958 code on the Alpha for unaligned byte stores. */
2959
2960 if (flag_expensive_optimizations
2961 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2962 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2963 && GET_CODE (XEXP (*loc, 0)) == REG
2964 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
7afe21cc 2965 {
42495ca0
RK
2966 rtx c = XEXP (*loc, 1);
2967
2968 do_not_record = 0;
2197a88a 2969 hash = HASH (XEXP (*loc, 0), Pmode);
42495ca0
RK
2970 do_not_record = save_do_not_record;
2971 hash_arg_in_memory = save_hash_arg_in_memory;
2972 hash_arg_in_struct = save_hash_arg_in_struct;
2973
2197a88a 2974 elt = lookup (XEXP (*loc, 0), hash, Pmode);
42495ca0
RK
2975 if (elt == 0)
2976 return;
2977
2978 /* We need to find the best (under the criteria documented above) entry
2979 in the class that is valid. We use the `flag' field to indicate
2980 choices that were invalid and iterate until we can't find a better
2981 one that hasn't already been tried. */
7afe21cc 2982
7afe21cc 2983 for (p = elt->first_same_value; p; p = p->next_same_value)
42495ca0 2984 p->flag = 0;
7afe21cc 2985
42495ca0 2986 while (found_better)
7afe21cc 2987 {
2f541799 2988 int best_addr_cost = CSE_ADDRESS_COST (*loc);
42495ca0
RK
2989 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2990 struct table_elt *best_elt = elt;
2991 rtx best_rtx = *loc;
f6516aee
JW
2992 int count;
2993
2994 /* This is at worst case an O(n^2) algorithm, so limit our search
2995 to the first 32 elements on the list. This avoids trouble
2996 compiling code with very long basic blocks that can easily
2997 call cse_gen_binary so many times that we run out of memory. */
42495ca0
RK
2998
2999 found_better = 0;
f6516aee
JW
3000 for (p = elt->first_same_value, count = 0;
3001 p && count < 32;
3002 p = p->next_same_value, count++)
42495ca0
RK
3003 if (! p->flag
3004 && (GET_CODE (p->exp) == REG
3005 || exp_equiv_p (p->exp, p->exp, 1, 0)))
3006 {
96b0e481 3007 rtx new = cse_gen_binary (GET_CODE (*loc), Pmode, p->exp, c);
42495ca0 3008
2f541799
MM
3009 if ((CSE_ADDRESS_COST (new) < best_addr_cost
3010 || (CSE_ADDRESS_COST (new) == best_addr_cost
42495ca0
RK
3011 && (COST (new) + 1) >> 1 > best_rtx_cost)))
3012 {
3013 found_better = 1;
2f541799 3014 best_addr_cost = CSE_ADDRESS_COST (new);
42495ca0
RK
3015 best_rtx_cost = (COST (new) + 1) >> 1;
3016 best_elt = p;
3017 best_rtx = new;
3018 }
3019 }
3020
3021 if (found_better)
3022 {
3023 if (validate_change (insn, loc,
906c4e36
RK
3024 canon_reg (copy_rtx (best_rtx),
3025 NULL_RTX), 0))
42495ca0
RK
3026 return;
3027 else
3028 best_elt->flag = 1;
3029 }
7afe21cc
RK
3030 }
3031 }
3032#endif
3033}
3034\f
3035/* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3036 operation (EQ, NE, GT, etc.), follow it back through the hash table and
3037 what values are being compared.
3038
3039 *PARG1 and *PARG2 are updated to contain the rtx representing the values
3040 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
3041 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3042 compared to produce cc0.
3043
3044 The return value is the comparison operator and is either the code of
3045 A or the code corresponding to the inverse of the comparison. */
3046
3047static enum rtx_code
13c9910f 3048find_comparison_args (code, parg1, parg2, pmode1, pmode2)
7afe21cc
RK
3049 enum rtx_code code;
3050 rtx *parg1, *parg2;
13c9910f 3051 enum machine_mode *pmode1, *pmode2;
7afe21cc
RK
3052{
3053 rtx arg1, arg2;
3054
3055 arg1 = *parg1, arg2 = *parg2;
3056
3057 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
3058
b2796a4b 3059 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
7afe21cc
RK
3060 {
3061 /* Set non-zero when we find something of interest. */
3062 rtx x = 0;
3063 int reverse_code = 0;
3064 struct table_elt *p = 0;
3065
3066 /* If arg1 is a COMPARE, extract the comparison arguments from it.
3067 On machines with CC0, this is the only case that can occur, since
3068 fold_rtx will return the COMPARE or item being compared with zero
3069 when given CC0. */
3070
3071 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3072 x = arg1;
3073
3074 /* If ARG1 is a comparison operator and CODE is testing for
3075 STORE_FLAG_VALUE, get the inner arguments. */
3076
3077 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
3078 {
c610adec
RK
3079 if (code == NE
3080 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3081 && code == LT && STORE_FLAG_VALUE == -1)
3082#ifdef FLOAT_STORE_FLAG_VALUE
3083 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3084 && FLOAT_STORE_FLAG_VALUE < 0)
3085#endif
3086 )
7afe21cc 3087 x = arg1;
c610adec
RK
3088 else if (code == EQ
3089 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3090 && code == GE && STORE_FLAG_VALUE == -1)
3091#ifdef FLOAT_STORE_FLAG_VALUE
3092 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3093 && FLOAT_STORE_FLAG_VALUE < 0)
3094#endif
3095 )
7afe21cc
RK
3096 x = arg1, reverse_code = 1;
3097 }
3098
3099 /* ??? We could also check for
3100
3101 (ne (and (eq (...) (const_int 1))) (const_int 0))
3102
3103 and related forms, but let's wait until we see them occurring. */
3104
3105 if (x == 0)
3106 /* Look up ARG1 in the hash table and see if it has an equivalence
3107 that lets us see what is being compared. */
3108 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) % NBUCKETS,
3109 GET_MODE (arg1));
3110 if (p) p = p->first_same_value;
3111
3112 for (; p; p = p->next_same_value)
3113 {
3114 enum machine_mode inner_mode = GET_MODE (p->exp);
3115
3116 /* If the entry isn't valid, skip it. */
3117 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3118 continue;
3119
3120 if (GET_CODE (p->exp) == COMPARE
3121 /* Another possibility is that this machine has a compare insn
3122 that includes the comparison code. In that case, ARG1 would
3123 be equivalent to a comparison operation that would set ARG1 to
3124 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3125 ORIG_CODE is the actual comparison being done; if it is an EQ,
3126 we must reverse ORIG_CODE. On machine with a negative value
3127 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3128 || ((code == NE
3129 || (code == LT
c610adec 3130 && GET_MODE_CLASS (inner_mode) == MODE_INT
906c4e36
RK
3131 && (GET_MODE_BITSIZE (inner_mode)
3132 <= HOST_BITS_PER_WIDE_INT)
7afe21cc 3133 && (STORE_FLAG_VALUE
906c4e36
RK
3134 & ((HOST_WIDE_INT) 1
3135 << (GET_MODE_BITSIZE (inner_mode) - 1))))
c610adec
RK
3136#ifdef FLOAT_STORE_FLAG_VALUE
3137 || (code == LT
3138 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3139 && FLOAT_STORE_FLAG_VALUE < 0)
3140#endif
3141 )
7afe21cc
RK
3142 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3143 {
3144 x = p->exp;
3145 break;
3146 }
3147 else if ((code == EQ
3148 || (code == GE
c610adec 3149 && GET_MODE_CLASS (inner_mode) == MODE_INT
906c4e36
RK
3150 && (GET_MODE_BITSIZE (inner_mode)
3151 <= HOST_BITS_PER_WIDE_INT)
7afe21cc 3152 && (STORE_FLAG_VALUE
906c4e36
RK
3153 & ((HOST_WIDE_INT) 1
3154 << (GET_MODE_BITSIZE (inner_mode) - 1))))
c610adec
RK
3155#ifdef FLOAT_STORE_FLAG_VALUE
3156 || (code == GE
3157 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3158 && FLOAT_STORE_FLAG_VALUE < 0)
3159#endif
3160 )
7afe21cc
RK
3161 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3162 {
3163 reverse_code = 1;
3164 x = p->exp;
3165 break;
3166 }
3167
3168 /* If this is fp + constant, the equivalent is a better operand since
3169 it may let us predict the value of the comparison. */
3170 else if (NONZERO_BASE_PLUS_P (p->exp))
3171 {
3172 arg1 = p->exp;
3173 continue;
3174 }
3175 }
3176
3177 /* If we didn't find a useful equivalence for ARG1, we are done.
3178 Otherwise, set up for the next iteration. */
3179 if (x == 0)
3180 break;
3181
3182 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3183 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3184 code = GET_CODE (x);
3185
3186 if (reverse_code)
3187 code = reverse_condition (code);
3188 }
3189
13c9910f
RS
3190 /* Return our results. Return the modes from before fold_rtx
3191 because fold_rtx might produce const_int, and then it's too late. */
3192 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
7afe21cc
RK
3193 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3194
3195 return code;
3196}
3197\f
3198/* Try to simplify a unary operation CODE whose output mode is to be
3199 MODE with input operand OP whose mode was originally OP_MODE.
3200 Return zero if no simplification can be made. */
3201
3202rtx
3203simplify_unary_operation (code, mode, op, op_mode)
3204 enum rtx_code code;
3205 enum machine_mode mode;
3206 rtx op;
3207 enum machine_mode op_mode;
3208{
3209 register int width = GET_MODE_BITSIZE (mode);
3210
3211 /* The order of these tests is critical so that, for example, we don't
3212 check the wrong mode (input vs. output) for a conversion operation,
3213 such as FIX. At some point, this should be simplified. */
3214
62c0ea12 3215#if !defined(REAL_IS_NOT_DOUBLE) || defined(REAL_ARITHMETIC)
7afe21cc 3216
62c0ea12
RK
3217 if (code == FLOAT && GET_MODE (op) == VOIDmode
3218 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
7afe21cc 3219 {
62c0ea12 3220 HOST_WIDE_INT hv, lv;
7afe21cc
RK
3221 REAL_VALUE_TYPE d;
3222
62c0ea12
RK
3223 if (GET_CODE (op) == CONST_INT)
3224 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
3225 else
7ac4a266 3226 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
7afe21cc
RK
3227
3228#ifdef REAL_ARITHMETIC
2ebcccf3 3229 REAL_VALUE_FROM_INT (d, lv, hv, mode);
7afe21cc 3230#else
62c0ea12 3231 if (hv < 0)
7afe21cc 3232 {
62c0ea12 3233 d = (double) (~ hv);
906c4e36
RK
3234 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3235 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
62c0ea12 3236 d += (double) (unsigned HOST_WIDE_INT) (~ lv);
7afe21cc
RK
3237 d = (- d - 1.0);
3238 }
3239 else
3240 {
62c0ea12 3241 d = (double) hv;
906c4e36
RK
3242 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3243 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
62c0ea12 3244 d += (double) (unsigned HOST_WIDE_INT) lv;
7afe21cc
RK
3245 }
3246#endif /* REAL_ARITHMETIC */
940fd0b5 3247 d = real_value_truncate (mode, d);
7afe21cc
RK
3248 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3249 }
62c0ea12
RK
3250 else if (code == UNSIGNED_FLOAT && GET_MODE (op) == VOIDmode
3251 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
7afe21cc 3252 {
62c0ea12 3253 HOST_WIDE_INT hv, lv;
7afe21cc
RK
3254 REAL_VALUE_TYPE d;
3255
62c0ea12
RK
3256 if (GET_CODE (op) == CONST_INT)
3257 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
3258 else
7ac4a266 3259 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
62c0ea12 3260
a9c6464d
RK
3261 if (op_mode == VOIDmode)
3262 {
3263 /* We don't know how to interpret negative-looking numbers in
3264 this case, so don't try to fold those. */
3265 if (hv < 0)
3266 return 0;
3267 }
3268 else if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2)
62c0ea12
RK
3269 ;
3270 else
3271 hv = 0, lv &= GET_MODE_MASK (op_mode);
3272
7afe21cc 3273#ifdef REAL_ARITHMETIC
2ebcccf3 3274 REAL_VALUE_FROM_UNSIGNED_INT (d, lv, hv, mode);
7afe21cc 3275#else
62c0ea12 3276
138cec59 3277 d = (double) (unsigned HOST_WIDE_INT) hv;
906c4e36
RK
3278 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3279 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
62c0ea12 3280 d += (double) (unsigned HOST_WIDE_INT) lv;
7afe21cc 3281#endif /* REAL_ARITHMETIC */
940fd0b5 3282 d = real_value_truncate (mode, d);
7afe21cc
RK
3283 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3284 }
3285#endif
3286
f89e32e9
RK
3287 if (GET_CODE (op) == CONST_INT
3288 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
7afe21cc 3289 {
906c4e36
RK
3290 register HOST_WIDE_INT arg0 = INTVAL (op);
3291 register HOST_WIDE_INT val;
7afe21cc
RK
3292
3293 switch (code)
3294 {
3295 case NOT:
3296 val = ~ arg0;
3297 break;
3298
3299 case NEG:
3300 val = - arg0;
3301 break;
3302
3303 case ABS:
3304 val = (arg0 >= 0 ? arg0 : - arg0);
3305 break;
3306
3307 case FFS:
3308 /* Don't use ffs here. Instead, get low order bit and then its
3309 number. If arg0 is zero, this will return 0, as desired. */
3310 arg0 &= GET_MODE_MASK (mode);
3311 val = exact_log2 (arg0 & (- arg0)) + 1;
3312 break;
3313
3314 case TRUNCATE:
3315 val = arg0;
3316 break;
3317
3318 case ZERO_EXTEND:
3319 if (op_mode == VOIDmode)
3320 op_mode = mode;
82a5e898 3321 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
d80e9fd7
RS
3322 {
3323 /* If we were really extending the mode,
3324 we would have to distinguish between zero-extension
3325 and sign-extension. */
3326 if (width != GET_MODE_BITSIZE (op_mode))
3327 abort ();
3328 val = arg0;
3329 }
82a5e898
CH
3330 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3331 val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
7afe21cc
RK
3332 else
3333 return 0;
3334 break;
3335
3336 case SIGN_EXTEND:
3337 if (op_mode == VOIDmode)
3338 op_mode = mode;
82a5e898 3339 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
d80e9fd7
RS
3340 {
3341 /* If we were really extending the mode,
3342 we would have to distinguish between zero-extension
3343 and sign-extension. */
3344 if (width != GET_MODE_BITSIZE (op_mode))
3345 abort ();
3346 val = arg0;
3347 }
f12564b4 3348 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
7afe21cc 3349 {
82a5e898
CH
3350 val
3351 = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3352 if (val
3353 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1)))
3354 val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
7afe21cc
RK
3355 }
3356 else
3357 return 0;
3358 break;
3359
d45cf215
RS
3360 case SQRT:
3361 return 0;
3362
7afe21cc
RK
3363 default:
3364 abort ();
3365 }
3366
3367 /* Clear the bits that don't belong in our mode,
3368 unless they and our sign bit are all one.
3369 So we get either a reasonable negative value or a reasonable
3370 unsigned value for this mode. */
906c4e36
RK
3371 if (width < HOST_BITS_PER_WIDE_INT
3372 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3373 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4879acf6 3374 val &= ((HOST_WIDE_INT) 1 << width) - 1;
7afe21cc 3375
737e7965
JW
3376 /* If this would be an entire word for the target, but is not for
3377 the host, then sign-extend on the host so that the number will look
3378 the same way on the host that it would on the target.
3379
3380 For example, when building a 64 bit alpha hosted 32 bit sparc
3381 targeted compiler, then we want the 32 bit unsigned value -1 to be
3382 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
3383 The later confuses the sparc backend. */
3384
3385 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
3386 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
3387 val |= ((HOST_WIDE_INT) (-1) << width);
3388
906c4e36 3389 return GEN_INT (val);
7afe21cc
RK
3390 }
3391
3392 /* We can do some operations on integer CONST_DOUBLEs. Also allow
0f41302f 3393 for a DImode operation on a CONST_INT. */
8e0ac43b 3394 else if (GET_MODE (op) == VOIDmode && width <= HOST_BITS_PER_INT * 2
7afe21cc
RK
3395 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3396 {
906c4e36 3397 HOST_WIDE_INT l1, h1, lv, hv;
7afe21cc
RK
3398
3399 if (GET_CODE (op) == CONST_DOUBLE)
3400 l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
3401 else
3402 l1 = INTVAL (op), h1 = l1 < 0 ? -1 : 0;
3403
3404 switch (code)
3405 {
3406 case NOT:
3407 lv = ~ l1;
3408 hv = ~ h1;
3409 break;
3410
3411 case NEG:
3412 neg_double (l1, h1, &lv, &hv);
3413 break;
3414
3415 case ABS:
3416 if (h1 < 0)
3417 neg_double (l1, h1, &lv, &hv);
3418 else
3419 lv = l1, hv = h1;
3420 break;
3421
3422 case FFS:
3423 hv = 0;
3424 if (l1 == 0)
906c4e36 3425 lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & (-h1)) + 1;
7afe21cc
RK
3426 else
3427 lv = exact_log2 (l1 & (-l1)) + 1;
3428 break;
3429
3430 case TRUNCATE:
8e0ac43b 3431 /* This is just a change-of-mode, so do nothing. */
d50d63c0 3432 lv = l1, hv = h1;
7afe21cc
RK
3433 break;
3434
f72aed24
RS
3435 case ZERO_EXTEND:
3436 if (op_mode == VOIDmode
906c4e36 3437 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
f72aed24
RS
3438 return 0;
3439
3440 hv = 0;
3441 lv = l1 & GET_MODE_MASK (op_mode);
3442 break;
3443
3444 case SIGN_EXTEND:
3445 if (op_mode == VOIDmode
906c4e36 3446 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
f72aed24
RS
3447 return 0;
3448 else
3449 {
3450 lv = l1 & GET_MODE_MASK (op_mode);
906c4e36
RK
3451 if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
3452 && (lv & ((HOST_WIDE_INT) 1
3453 << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
3454 lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
f72aed24 3455
906c4e36 3456 hv = (lv < 0) ? ~ (HOST_WIDE_INT) 0 : 0;
f72aed24
RS
3457 }
3458 break;
3459
d45cf215
RS
3460 case SQRT:
3461 return 0;
3462
7afe21cc
RK
3463 default:
3464 return 0;
3465 }
3466
3467 return immed_double_const (lv, hv, mode);
3468 }
3469
3470#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3471 else if (GET_CODE (op) == CONST_DOUBLE
3472 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3473 {
3474 REAL_VALUE_TYPE d;
3475 jmp_buf handler;
3476 rtx x;
3477
3478 if (setjmp (handler))
3479 /* There used to be a warning here, but that is inadvisable.
3480 People may want to cause traps, and the natural way
3481 to do it should not get a warning. */
3482 return 0;
3483
3484 set_float_handler (handler);
3485
3486 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3487
3488 switch (code)
3489 {
3490 case NEG:
3491 d = REAL_VALUE_NEGATE (d);
3492 break;
3493
3494 case ABS:
8b3686ed 3495 if (REAL_VALUE_NEGATIVE (d))
7afe21cc
RK
3496 d = REAL_VALUE_NEGATE (d);
3497 break;
3498
3499 case FLOAT_TRUNCATE:
d3159aee 3500 d = real_value_truncate (mode, d);
7afe21cc
RK
3501 break;
3502
3503 case FLOAT_EXTEND:
3504 /* All this does is change the mode. */
3505 break;
3506
3507 case FIX:
d3159aee 3508 d = REAL_VALUE_RNDZINT (d);
7afe21cc
RK
3509 break;
3510
3511 case UNSIGNED_FIX:
d3159aee 3512 d = REAL_VALUE_UNSIGNED_RNDZINT (d);
7afe21cc
RK
3513 break;
3514
d45cf215
RS
3515 case SQRT:
3516 return 0;
3517
7afe21cc
RK
3518 default:
3519 abort ();
3520 }
3521
560c94a2 3522 x = CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
906c4e36 3523 set_float_handler (NULL_PTR);
7afe21cc
RK
3524 return x;
3525 }
8e0ac43b
RK
3526
3527 else if (GET_CODE (op) == CONST_DOUBLE
3528 && GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
3529 && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 3530 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
7afe21cc
RK
3531 {
3532 REAL_VALUE_TYPE d;
3533 jmp_buf handler;
906c4e36 3534 HOST_WIDE_INT val;
7afe21cc
RK
3535
3536 if (setjmp (handler))
3537 return 0;
3538
3539 set_float_handler (handler);
3540
3541 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3542
3543 switch (code)
3544 {
3545 case FIX:
3546 val = REAL_VALUE_FIX (d);
3547 break;
3548
3549 case UNSIGNED_FIX:
3550 val = REAL_VALUE_UNSIGNED_FIX (d);
3551 break;
3552
3553 default:
3554 abort ();
3555 }
3556
906c4e36 3557 set_float_handler (NULL_PTR);
7afe21cc
RK
3558
3559 /* Clear the bits that don't belong in our mode,
3560 unless they and our sign bit are all one.
3561 So we get either a reasonable negative value or a reasonable
3562 unsigned value for this mode. */
906c4e36
RK
3563 if (width < HOST_BITS_PER_WIDE_INT
3564 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3565 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3566 val &= ((HOST_WIDE_INT) 1 << width) - 1;
7afe21cc 3567
ad89d6f6
TG
3568 /* If this would be an entire word for the target, but is not for
3569 the host, then sign-extend on the host so that the number will look
3570 the same way on the host that it would on the target.
3571
3572 For example, when building a 64 bit alpha hosted 32 bit sparc
3573 targeted compiler, then we want the 32 bit unsigned value -1 to be
3574 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
3575 The later confuses the sparc backend. */
3576
3577 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
3578 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
3579 val |= ((HOST_WIDE_INT) (-1) << width);
3580
906c4e36 3581 return GEN_INT (val);
7afe21cc
RK
3582 }
3583#endif
a6acbe15
RS
3584 /* This was formerly used only for non-IEEE float.
3585 eggert@twinsun.com says it is safe for IEEE also. */
3586 else
7afe21cc
RK
3587 {
3588 /* There are some simplifications we can do even if the operands
a6acbe15 3589 aren't constant. */
7afe21cc
RK
3590 switch (code)
3591 {
3592 case NEG:
3593 case NOT:
3594 /* (not (not X)) == X, similarly for NEG. */
3595 if (GET_CODE (op) == code)
3596 return XEXP (op, 0);
3597 break;
3598
3599 case SIGN_EXTEND:
3600 /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
3601 becomes just the MINUS if its mode is MODE. This allows
3602 folding switch statements on machines using casesi (such as
3603 the Vax). */
3604 if (GET_CODE (op) == TRUNCATE
3605 && GET_MODE (XEXP (op, 0)) == mode
3606 && GET_CODE (XEXP (op, 0)) == MINUS
3607 && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
3608 && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
3609 return XEXP (op, 0);
cceb347c
RK
3610
3611#ifdef POINTERS_EXTEND_UNSIGNED
3612 if (! POINTERS_EXTEND_UNSIGNED
3613 && mode == Pmode && GET_MODE (op) == ptr_mode
3614 && CONSTANT_P (op))
3615 return convert_memory_address (Pmode, op);
3616#endif
3617 break;
3618
3619#ifdef POINTERS_EXTEND_UNSIGNED
3620 case ZERO_EXTEND:
3621 if (POINTERS_EXTEND_UNSIGNED
3622 && mode == Pmode && GET_MODE (op) == ptr_mode
3623 && CONSTANT_P (op))
3624 return convert_memory_address (Pmode, op);
7afe21cc 3625 break;
cceb347c 3626#endif
e9a25f70
JL
3627
3628 default:
3629 break;
7afe21cc
RK
3630 }
3631
3632 return 0;
3633 }
7afe21cc
RK
3634}
3635\f
3636/* Simplify a binary operation CODE with result mode MODE, operating on OP0
3637 and OP1. Return 0 if no simplification is possible.
3638
3639 Don't use this for relational operations such as EQ or LT.
3640 Use simplify_relational_operation instead. */
3641
3642rtx
3643simplify_binary_operation (code, mode, op0, op1)
3644 enum rtx_code code;
3645 enum machine_mode mode;
3646 rtx op0, op1;
3647{
906c4e36
RK
3648 register HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
3649 HOST_WIDE_INT val;
7afe21cc 3650 int width = GET_MODE_BITSIZE (mode);
96b0e481 3651 rtx tem;
7afe21cc
RK
3652
3653 /* Relational operations don't work here. We must know the mode
3654 of the operands in order to do the comparison correctly.
3655 Assuming a full word can give incorrect results.
3656 Consider comparing 128 with -128 in QImode. */
3657
3658 if (GET_RTX_CLASS (code) == '<')
3659 abort ();
3660
3661#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3662 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3663 && GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
3664 && mode == GET_MODE (op0) && mode == GET_MODE (op1))
3665 {
3666 REAL_VALUE_TYPE f0, f1, value;
3667 jmp_buf handler;
3668
3669 if (setjmp (handler))
3670 return 0;
3671
3672 set_float_handler (handler);
3673
3674 REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3675 REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
5352b11a
RS
3676 f0 = real_value_truncate (mode, f0);
3677 f1 = real_value_truncate (mode, f1);
7afe21cc
RK
3678
3679#ifdef REAL_ARITHMETIC
956d6950
JL
3680#ifndef REAL_INFINITY
3681 if (code == DIV && REAL_VALUES_EQUAL (f1, dconst0))
3682 return 0;
3683#endif
d3159aee 3684 REAL_ARITHMETIC (value, rtx_to_tree_code (code), f0, f1);
7afe21cc
RK
3685#else
3686 switch (code)
3687 {
3688 case PLUS:
3689 value = f0 + f1;
3690 break;
3691 case MINUS:
3692 value = f0 - f1;
3693 break;
3694 case MULT:
3695 value = f0 * f1;
3696 break;
3697 case DIV:
3698#ifndef REAL_INFINITY
3699 if (f1 == 0)
21d12b80 3700 return 0;
7afe21cc
RK
3701#endif
3702 value = f0 / f1;
3703 break;
3704 case SMIN:
3705 value = MIN (f0, f1);
3706 break;
3707 case SMAX:
3708 value = MAX (f0, f1);
3709 break;
3710 default:
3711 abort ();
3712 }
3713#endif
3714
5352b11a 3715 value = real_value_truncate (mode, value);
831522a4 3716 set_float_handler (NULL_PTR);
560c94a2 3717 return CONST_DOUBLE_FROM_REAL_VALUE (value, mode);
7afe21cc 3718 }
6076248a 3719#endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
7afe21cc
RK
3720
3721 /* We can fold some multi-word operations. */
6076248a 3722 if (GET_MODE_CLASS (mode) == MODE_INT
33085906 3723 && width == HOST_BITS_PER_WIDE_INT * 2
fe873240 3724 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
6076248a 3725 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
7afe21cc 3726 {
906c4e36 3727 HOST_WIDE_INT l1, l2, h1, h2, lv, hv;
7afe21cc 3728
fe873240
RK
3729 if (GET_CODE (op0) == CONST_DOUBLE)
3730 l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3731 else
3732 l1 = INTVAL (op0), h1 = l1 < 0 ? -1 : 0;
7afe21cc
RK
3733
3734 if (GET_CODE (op1) == CONST_DOUBLE)
3735 l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3736 else
3737 l2 = INTVAL (op1), h2 = l2 < 0 ? -1 : 0;
3738
3739 switch (code)
3740 {
3741 case MINUS:
3742 /* A - B == A + (-B). */
3743 neg_double (l2, h2, &lv, &hv);
3744 l2 = lv, h2 = hv;
3745
0f41302f 3746 /* .. fall through ... */
7afe21cc
RK
3747
3748 case PLUS:
3749 add_double (l1, h1, l2, h2, &lv, &hv);
3750 break;
3751
3752 case MULT:
3753 mul_double (l1, h1, l2, h2, &lv, &hv);
3754 break;
3755
3756 case DIV: case MOD: case UDIV: case UMOD:
3757 /* We'd need to include tree.h to do this and it doesn't seem worth
3758 it. */
3759 return 0;
3760
3761 case AND:
3762 lv = l1 & l2, hv = h1 & h2;
3763 break;
3764
3765 case IOR:
3766 lv = l1 | l2, hv = h1 | h2;
3767 break;
3768
3769 case XOR:
3770 lv = l1 ^ l2, hv = h1 ^ h2;
3771 break;
3772
3773 case SMIN:
906c4e36
RK
3774 if (h1 < h2
3775 || (h1 == h2
3776 && ((unsigned HOST_WIDE_INT) l1
3777 < (unsigned HOST_WIDE_INT) l2)))
7afe21cc
RK
3778 lv = l1, hv = h1;
3779 else
3780 lv = l2, hv = h2;
3781 break;
3782
3783 case SMAX:
906c4e36
RK
3784 if (h1 > h2
3785 || (h1 == h2
3786 && ((unsigned HOST_WIDE_INT) l1
3787 > (unsigned HOST_WIDE_INT) l2)))
7afe21cc
RK
3788 lv = l1, hv = h1;
3789 else
3790 lv = l2, hv = h2;
3791 break;
3792
3793 case UMIN:
906c4e36
RK
3794 if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2
3795 || (h1 == h2
3796 && ((unsigned HOST_WIDE_INT) l1
3797 < (unsigned HOST_WIDE_INT) l2)))
7afe21cc
RK
3798 lv = l1, hv = h1;
3799 else
3800 lv = l2, hv = h2;
3801 break;
3802
3803 case UMAX:
906c4e36
RK
3804 if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2
3805 || (h1 == h2
3806 && ((unsigned HOST_WIDE_INT) l1
3807 > (unsigned HOST_WIDE_INT) l2)))
7afe21cc
RK
3808 lv = l1, hv = h1;
3809 else
3810 lv = l2, hv = h2;
3811 break;
3812
3813 case LSHIFTRT: case ASHIFTRT:
45620ed4 3814 case ASHIFT:
7afe21cc
RK
3815 case ROTATE: case ROTATERT:
3816#ifdef SHIFT_COUNT_TRUNCATED
85c0a556
RK
3817 if (SHIFT_COUNT_TRUNCATED)
3818 l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
7afe21cc
RK
3819#endif
3820
3821 if (h2 != 0 || l2 < 0 || l2 >= GET_MODE_BITSIZE (mode))
3822 return 0;
3823
3824 if (code == LSHIFTRT || code == ASHIFTRT)
3825 rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3826 code == ASHIFTRT);
45620ed4
RK
3827 else if (code == ASHIFT)
3828 lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1);
7afe21cc
RK
3829 else if (code == ROTATE)
3830 lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3831 else /* code == ROTATERT */
3832 rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3833 break;
3834
3835 default:
3836 return 0;
3837 }
3838
3839 return immed_double_const (lv, hv, mode);
3840 }
7afe21cc
RK
3841
3842 if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
906c4e36 3843 || width > HOST_BITS_PER_WIDE_INT || width == 0)
7afe21cc
RK
3844 {
3845 /* Even if we can't compute a constant result,
3846 there are some cases worth simplifying. */
3847
3848 switch (code)
3849 {
3850 case PLUS:
3851 /* In IEEE floating point, x+0 is not the same as x. Similarly
3852 for the other optimizations below. */
3853 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
a83afb65 3854 && FLOAT_MODE_P (mode) && ! flag_fast_math)
7afe21cc
RK
3855 break;
3856
3857 if (op1 == CONST0_RTX (mode))
3858 return op0;
3859
7afe21cc
RK
3860 /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3861 if (GET_CODE (op0) == NEG)
96b0e481 3862 return cse_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
7afe21cc 3863 else if (GET_CODE (op1) == NEG)
96b0e481 3864 return cse_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
7afe21cc 3865
96b0e481
RK
3866 /* Handle both-operands-constant cases. We can only add
3867 CONST_INTs to constants since the sum of relocatable symbols
fe873240
RK
3868 can't be handled by most assemblers. Don't add CONST_INT
3869 to CONST_INT since overflow won't be computed properly if wider
3870 than HOST_BITS_PER_WIDE_INT. */
7afe21cc 3871
fe873240
RK
3872 if (CONSTANT_P (op0) && GET_MODE (op0) != VOIDmode
3873 && GET_CODE (op1) == CONST_INT)
96b0e481 3874 return plus_constant (op0, INTVAL (op1));
fe873240
RK
3875 else if (CONSTANT_P (op1) && GET_MODE (op1) != VOIDmode
3876 && GET_CODE (op0) == CONST_INT)
96b0e481 3877 return plus_constant (op1, INTVAL (op0));
7afe21cc 3878
30d69925
RK
3879 /* See if this is something like X * C - X or vice versa or
3880 if the multiplication is written as a shift. If so, we can
3881 distribute and make a new multiply, shift, or maybe just
3882 have X (if C is 2 in the example above). But don't make
3883 real multiply if we didn't have one before. */
3884
3885 if (! FLOAT_MODE_P (mode))
3886 {
3887 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3888 rtx lhs = op0, rhs = op1;
3889 int had_mult = 0;
3890
3891 if (GET_CODE (lhs) == NEG)
3892 coeff0 = -1, lhs = XEXP (lhs, 0);
3893 else if (GET_CODE (lhs) == MULT
3894 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3895 {
3896 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3897 had_mult = 1;
3898 }
3899 else if (GET_CODE (lhs) == ASHIFT
3900 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3901 && INTVAL (XEXP (lhs, 1)) >= 0
3902 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3903 {
3904 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3905 lhs = XEXP (lhs, 0);
3906 }
3907
3908 if (GET_CODE (rhs) == NEG)
3909 coeff1 = -1, rhs = XEXP (rhs, 0);
3910 else if (GET_CODE (rhs) == MULT
3911 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3912 {
3913 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3914 had_mult = 1;
3915 }
3916 else if (GET_CODE (rhs) == ASHIFT
3917 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3918 && INTVAL (XEXP (rhs, 1)) >= 0
3919 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3920 {
3921 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3922 rhs = XEXP (rhs, 0);
3923 }
3924
3925 if (rtx_equal_p (lhs, rhs))
3926 {
3927 tem = cse_gen_binary (MULT, mode, lhs,
3928 GEN_INT (coeff0 + coeff1));
3929 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3930 }
3931 }
3932
96b0e481
RK
3933 /* If one of the operands is a PLUS or a MINUS, see if we can
3934 simplify this by the associative law.
3935 Don't use the associative law for floating point.
3936 The inaccuracy makes it nonassociative,
3937 and subtle programs can break if operations are associated. */
7afe21cc 3938
cbf6a543 3939 if (INTEGRAL_MODE_P (mode)
96b0e481
RK
3940 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3941 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3942 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3943 return tem;
7afe21cc
RK
3944 break;
3945
3946 case COMPARE:
3947#ifdef HAVE_cc0
3948 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3949 using cc0, in which case we want to leave it as a COMPARE
3950 so we can distinguish it from a register-register-copy.
3951
3952 In IEEE floating point, x-0 is not the same as x. */
3953
3954 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
a83afb65 3955 || ! FLOAT_MODE_P (mode) || flag_fast_math)
7afe21cc
RK
3956 && op1 == CONST0_RTX (mode))
3957 return op0;
3958#else
3959 /* Do nothing here. */
3960#endif
3961 break;
3962
3963 case MINUS:
21648b45
RK
3964 /* None of these optimizations can be done for IEEE
3965 floating point. */
3966 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
a83afb65 3967 && FLOAT_MODE_P (mode) && ! flag_fast_math)
21648b45
RK
3968 break;
3969
a83afb65
RK
3970 /* We can't assume x-x is 0 even with non-IEEE floating point,
3971 but since it is zero except in very strange circumstances, we
3972 will treat it as zero with -ffast-math. */
7afe21cc
RK
3973 if (rtx_equal_p (op0, op1)
3974 && ! side_effects_p (op0)
a83afb65
RK
3975 && (! FLOAT_MODE_P (mode) || flag_fast_math))
3976 return CONST0_RTX (mode);
7afe21cc
RK
3977
3978 /* Change subtraction from zero into negation. */
3979 if (op0 == CONST0_RTX (mode))
38a448ca 3980 return gen_rtx_NEG (mode, op1);
7afe21cc 3981
96b0e481
RK
3982 /* (-1 - a) is ~a. */
3983 if (op0 == constm1_rtx)
38a448ca 3984 return gen_rtx_NOT (mode, op1);
96b0e481 3985
7afe21cc
RK
3986 /* Subtracting 0 has no effect. */
3987 if (op1 == CONST0_RTX (mode))
3988 return op0;
3989
30d69925
RK
3990 /* See if this is something like X * C - X or vice versa or
3991 if the multiplication is written as a shift. If so, we can
3992 distribute and make a new multiply, shift, or maybe just
3993 have X (if C is 2 in the example above). But don't make
3994 real multiply if we didn't have one before. */
3995
3996 if (! FLOAT_MODE_P (mode))
3997 {
3998 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3999 rtx lhs = op0, rhs = op1;
4000 int had_mult = 0;
4001
4002 if (GET_CODE (lhs) == NEG)
4003 coeff0 = -1, lhs = XEXP (lhs, 0);
4004 else if (GET_CODE (lhs) == MULT
4005 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
4006 {
4007 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
4008 had_mult = 1;
4009 }
4010 else if (GET_CODE (lhs) == ASHIFT
4011 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
4012 && INTVAL (XEXP (lhs, 1)) >= 0
4013 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
4014 {
4015 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
4016 lhs = XEXP (lhs, 0);
4017 }
4018
4019 if (GET_CODE (rhs) == NEG)
4020 coeff1 = - 1, rhs = XEXP (rhs, 0);
4021 else if (GET_CODE (rhs) == MULT
4022 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
4023 {
4024 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
4025 had_mult = 1;
4026 }
4027 else if (GET_CODE (rhs) == ASHIFT
4028 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
4029 && INTVAL (XEXP (rhs, 1)) >= 0
4030 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
4031 {
4032 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
4033 rhs = XEXP (rhs, 0);
4034 }
4035
4036 if (rtx_equal_p (lhs, rhs))
4037 {
4038 tem = cse_gen_binary (MULT, mode, lhs,
4039 GEN_INT (coeff0 - coeff1));
4040 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
4041 }
4042 }
4043
7afe21cc
RK
4044 /* (a - (-b)) -> (a + b). */
4045 if (GET_CODE (op1) == NEG)
96b0e481 4046 return cse_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
7afe21cc 4047
96b0e481
RK
4048 /* If one of the operands is a PLUS or a MINUS, see if we can
4049 simplify this by the associative law.
4050 Don't use the associative law for floating point.
7afe21cc
RK
4051 The inaccuracy makes it nonassociative,
4052 and subtle programs can break if operations are associated. */
7afe21cc 4053
cbf6a543 4054 if (INTEGRAL_MODE_P (mode)
96b0e481
RK
4055 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
4056 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
4057 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
4058 return tem;
7afe21cc
RK
4059
4060 /* Don't let a relocatable value get a negative coeff. */
b5a09c41 4061 if (GET_CODE (op1) == CONST_INT && GET_MODE (op0) != VOIDmode)
7afe21cc 4062 return plus_constant (op0, - INTVAL (op1));
29d72c4b
TG
4063
4064 /* (x - (x & y)) -> (x & ~y) */
4065 if (GET_CODE (op1) == AND)
4066 {
4067 if (rtx_equal_p (op0, XEXP (op1, 0)))
38a448ca 4068 return cse_gen_binary (AND, mode, op0, gen_rtx_NOT (mode, XEXP (op1, 1)));
29d72c4b 4069 if (rtx_equal_p (op0, XEXP (op1, 1)))
38a448ca 4070 return cse_gen_binary (AND, mode, op0, gen_rtx_NOT (mode, XEXP (op1, 0)));
29d72c4b 4071 }
7afe21cc
RK
4072 break;
4073
4074 case MULT:
4075 if (op1 == constm1_rtx)
4076 {
96b0e481 4077 tem = simplify_unary_operation (NEG, mode, op0, mode);
7afe21cc 4078
38a448ca 4079 return tem ? tem : gen_rtx_NEG (mode, op0);
7afe21cc
RK
4080 }
4081
4082 /* In IEEE floating point, x*0 is not always 0. */
4083 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
a83afb65 4084 || ! FLOAT_MODE_P (mode) || flag_fast_math)
7afe21cc
RK
4085 && op1 == CONST0_RTX (mode)
4086 && ! side_effects_p (op0))
4087 return op1;
4088
4089 /* In IEEE floating point, x*1 is not equivalent to x for nans.
4090 However, ANSI says we can drop signals,
4091 so we can do this anyway. */
4092 if (op1 == CONST1_RTX (mode))
4093 return op0;
4094
c407b802
RK
4095 /* Convert multiply by constant power of two into shift unless
4096 we are still generating RTL. This test is a kludge. */
7afe21cc 4097 if (GET_CODE (op1) == CONST_INT
c407b802 4098 && (val = exact_log2 (INTVAL (op1))) >= 0
2d917903
JW
4099 /* If the mode is larger than the host word size, and the
4100 uppermost bit is set, then this isn't a power of two due
4101 to implicit sign extension. */
4102 && (width <= HOST_BITS_PER_WIDE_INT
4103 || val != HOST_BITS_PER_WIDE_INT - 1)
c407b802 4104 && ! rtx_equal_function_value_matters)
38a448ca 4105 return gen_rtx_ASHIFT (mode, op0, GEN_INT (val));
7afe21cc
RK
4106
4107 if (GET_CODE (op1) == CONST_DOUBLE
4108 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT)
4109 {
4110 REAL_VALUE_TYPE d;
5a3d4bef
RK
4111 jmp_buf handler;
4112 int op1is2, op1ism1;
4113
4114 if (setjmp (handler))
4115 return 0;
4116
4117 set_float_handler (handler);
7afe21cc 4118 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
5a3d4bef
RK
4119 op1is2 = REAL_VALUES_EQUAL (d, dconst2);
4120 op1ism1 = REAL_VALUES_EQUAL (d, dconstm1);
4121 set_float_handler (NULL_PTR);
7afe21cc
RK
4122
4123 /* x*2 is x+x and x*(-1) is -x */
5a3d4bef 4124 if (op1is2 && GET_MODE (op0) == mode)
38a448ca 4125 return gen_rtx_PLUS (mode, op0, copy_rtx (op0));
7afe21cc 4126
5a3d4bef 4127 else if (op1ism1 && GET_MODE (op0) == mode)
38a448ca 4128 return gen_rtx_NEG (mode, op0);
7afe21cc
RK
4129 }
4130 break;
4131
4132 case IOR:
4133 if (op1 == const0_rtx)
4134 return op0;
4135 if (GET_CODE (op1) == CONST_INT
4136 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4137 return op1;
4138 if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4139 return op0;
4140 /* A | (~A) -> -1 */
4141 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
4142 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
31dcf83f 4143 && ! side_effects_p (op0)
8e7e5365 4144 && GET_MODE_CLASS (mode) != MODE_CC)
7afe21cc
RK
4145 return constm1_rtx;
4146 break;
4147
4148 case XOR:
4149 if (op1 == const0_rtx)
4150 return op0;
4151 if (GET_CODE (op1) == CONST_INT
4152 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
38a448ca 4153 return gen_rtx_NOT (mode, op0);
31dcf83f 4154 if (op0 == op1 && ! side_effects_p (op0)
8e7e5365 4155 && GET_MODE_CLASS (mode) != MODE_CC)
7afe21cc
RK
4156 return const0_rtx;
4157 break;
4158
4159 case AND:
4160 if (op1 == const0_rtx && ! side_effects_p (op0))
4161 return const0_rtx;
4162 if (GET_CODE (op1) == CONST_INT
4163 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4164 return op0;
31dcf83f 4165 if (op0 == op1 && ! side_effects_p (op0)
8e7e5365 4166 && GET_MODE_CLASS (mode) != MODE_CC)
7afe21cc
RK
4167 return op0;
4168 /* A & (~A) -> 0 */
4169 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
4170 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
709ab4fc 4171 && ! side_effects_p (op0)
8e7e5365 4172 && GET_MODE_CLASS (mode) != MODE_CC)
7afe21cc
RK
4173 return const0_rtx;
4174 break;
4175
4176 case UDIV:
4177 /* Convert divide by power of two into shift (divide by 1 handled
4178 below). */
4179 if (GET_CODE (op1) == CONST_INT
4180 && (arg1 = exact_log2 (INTVAL (op1))) > 0)
38a448ca 4181 return gen_rtx_LSHIFTRT (mode, op0, GEN_INT (arg1));
7afe21cc 4182
0f41302f 4183 /* ... fall through ... */
7afe21cc
RK
4184
4185 case DIV:
4186 if (op1 == CONST1_RTX (mode))
4187 return op0;
e7a522ba
RS
4188
4189 /* In IEEE floating point, 0/x is not always 0. */
4190 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
a83afb65 4191 || ! FLOAT_MODE_P (mode) || flag_fast_math)
e7a522ba
RS
4192 && op0 == CONST0_RTX (mode)
4193 && ! side_effects_p (op1))
7afe21cc 4194 return op0;
e7a522ba 4195
7afe21cc 4196#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
a83afb65
RK
4197 /* Change division by a constant into multiplication. Only do
4198 this with -ffast-math until an expert says it is safe in
4199 general. */
7afe21cc
RK
4200 else if (GET_CODE (op1) == CONST_DOUBLE
4201 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT
a83afb65
RK
4202 && op1 != CONST0_RTX (mode)
4203 && flag_fast_math)
7afe21cc
RK
4204 {
4205 REAL_VALUE_TYPE d;
4206 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
a83afb65
RK
4207
4208 if (! REAL_VALUES_EQUAL (d, dconst0))
4209 {
7afe21cc 4210#if defined (REAL_ARITHMETIC)
a83afb65 4211 REAL_ARITHMETIC (d, rtx_to_tree_code (DIV), dconst1, d);
38a448ca
RH
4212 return gen_rtx_MULT (mode, op0,
4213 CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
7afe21cc 4214#else
38a448ca
RH
4215 return gen_rtx_MULT (mode, op0,
4216 CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
7afe21cc 4217#endif
a83afb65
RK
4218 }
4219 }
7afe21cc
RK
4220#endif
4221 break;
4222
4223 case UMOD:
4224 /* Handle modulus by power of two (mod with 1 handled below). */
4225 if (GET_CODE (op1) == CONST_INT
4226 && exact_log2 (INTVAL (op1)) > 0)
38a448ca 4227 return gen_rtx_AND (mode, op0, GEN_INT (INTVAL (op1) - 1));
7afe21cc 4228
0f41302f 4229 /* ... fall through ... */
7afe21cc
RK
4230
4231 case MOD:
4232 if ((op0 == const0_rtx || op1 == const1_rtx)
4233 && ! side_effects_p (op0) && ! side_effects_p (op1))
4234 return const0_rtx;
4235 break;
4236
4237 case ROTATERT:
4238 case ROTATE:
4239 /* Rotating ~0 always results in ~0. */
906c4e36 4240 if (GET_CODE (op0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT
7afe21cc
RK
4241 && INTVAL (op0) == GET_MODE_MASK (mode)
4242 && ! side_effects_p (op1))
4243 return op0;
4244
0f41302f 4245 /* ... fall through ... */
7afe21cc 4246
7afe21cc
RK
4247 case ASHIFT:
4248 case ASHIFTRT:
4249 case LSHIFTRT:
4250 if (op1 == const0_rtx)
4251 return op0;
4252 if (op0 == const0_rtx && ! side_effects_p (op1))
4253 return op0;
4254 break;
4255
4256 case SMIN:
906c4e36
RK
4257 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
4258 && INTVAL (op1) == (HOST_WIDE_INT) 1 << (width -1)
7afe21cc
RK
4259 && ! side_effects_p (op0))
4260 return op1;
4261 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4262 return op0;
4263 break;
4264
4265 case SMAX:
906c4e36 4266 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
dbbe6445
RK
4267 && (INTVAL (op1)
4268 == (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1)
7afe21cc
RK
4269 && ! side_effects_p (op0))
4270 return op1;
4271 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4272 return op0;
4273 break;
4274
4275 case UMIN:
4276 if (op1 == const0_rtx && ! side_effects_p (op0))
4277 return op1;
4278 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4279 return op0;
4280 break;
4281
4282 case UMAX:
4283 if (op1 == constm1_rtx && ! side_effects_p (op0))
4284 return op1;
4285 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4286 return op0;
4287 break;
4288
4289 default:
4290 abort ();
4291 }
4292
4293 return 0;
4294 }
4295
4296 /* Get the integer argument values in two forms:
4297 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
4298
4299 arg0 = INTVAL (op0);
4300 arg1 = INTVAL (op1);
4301
906c4e36 4302 if (width < HOST_BITS_PER_WIDE_INT)
7afe21cc 4303 {
906c4e36
RK
4304 arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
4305 arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
7afe21cc
RK
4306
4307 arg0s = arg0;
906c4e36
RK
4308 if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4309 arg0s |= ((HOST_WIDE_INT) (-1) << width);
7afe21cc
RK
4310
4311 arg1s = arg1;
906c4e36
RK
4312 if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4313 arg1s |= ((HOST_WIDE_INT) (-1) << width);
7afe21cc
RK
4314 }
4315 else
4316 {
4317 arg0s = arg0;
4318 arg1s = arg1;
4319 }
4320
4321 /* Compute the value of the arithmetic. */
4322
4323 switch (code)
4324 {
4325 case PLUS:
538b78e7 4326 val = arg0s + arg1s;
7afe21cc
RK
4327 break;
4328
4329 case MINUS:
538b78e7 4330 val = arg0s - arg1s;
7afe21cc
RK
4331 break;
4332
4333 case MULT:
4334 val = arg0s * arg1s;
4335 break;
4336
4337 case DIV:
4338 if (arg1s == 0)
4339 return 0;
4340 val = arg0s / arg1s;
4341 break;
4342
4343 case MOD:
4344 if (arg1s == 0)
4345 return 0;
4346 val = arg0s % arg1s;
4347 break;
4348
4349 case UDIV:
4350 if (arg1 == 0)
4351 return 0;
906c4e36 4352 val = (unsigned HOST_WIDE_INT) arg0 / arg1;
7afe21cc
RK
4353 break;
4354
4355 case UMOD:
4356 if (arg1 == 0)
4357 return 0;
906c4e36 4358 val = (unsigned HOST_WIDE_INT) arg0 % arg1;
7afe21cc
RK
4359 break;
4360
4361 case AND:
4362 val = arg0 & arg1;
4363 break;
4364
4365 case IOR:
4366 val = arg0 | arg1;
4367 break;
4368
4369 case XOR:
4370 val = arg0 ^ arg1;
4371 break;
4372
4373 case LSHIFTRT:
4374 /* If shift count is undefined, don't fold it; let the machine do
4375 what it wants. But truncate it if the machine will do that. */
4376 if (arg1 < 0)
4377 return 0;
4378
4379#ifdef SHIFT_COUNT_TRUNCATED
85c0a556 4380 if (SHIFT_COUNT_TRUNCATED)
4d61f8c5 4381 arg1 %= width;
7afe21cc
RK
4382#endif
4383
906c4e36 4384 val = ((unsigned HOST_WIDE_INT) arg0) >> arg1;
7afe21cc
RK
4385 break;
4386
4387 case ASHIFT:
7afe21cc
RK
4388 if (arg1 < 0)
4389 return 0;
4390
4391#ifdef SHIFT_COUNT_TRUNCATED
85c0a556 4392 if (SHIFT_COUNT_TRUNCATED)
4d61f8c5 4393 arg1 %= width;
7afe21cc
RK
4394#endif
4395
906c4e36 4396 val = ((unsigned HOST_WIDE_INT) arg0) << arg1;
7afe21cc
RK
4397 break;
4398
4399 case ASHIFTRT:
4400 if (arg1 < 0)
4401 return 0;
4402
4403#ifdef SHIFT_COUNT_TRUNCATED
85c0a556 4404 if (SHIFT_COUNT_TRUNCATED)
4d61f8c5 4405 arg1 %= width;
7afe21cc
RK
4406#endif
4407
7afe21cc 4408 val = arg0s >> arg1;
2166571b
RS
4409
4410 /* Bootstrap compiler may not have sign extended the right shift.
4411 Manually extend the sign to insure bootstrap cc matches gcc. */
4412 if (arg0s < 0 && arg1 > 0)
4413 val |= ((HOST_WIDE_INT) -1) << (HOST_BITS_PER_WIDE_INT - arg1);
4414
7afe21cc
RK
4415 break;
4416
4417 case ROTATERT:
4418 if (arg1 < 0)
4419 return 0;
4420
4421 arg1 %= width;
906c4e36
RK
4422 val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
4423 | (((unsigned HOST_WIDE_INT) arg0) >> arg1));
7afe21cc
RK
4424 break;
4425
4426 case ROTATE:
4427 if (arg1 < 0)
4428 return 0;
4429
4430 arg1 %= width;
906c4e36
RK
4431 val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
4432 | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
7afe21cc
RK
4433 break;
4434
4435 case COMPARE:
4436 /* Do nothing here. */
4437 return 0;
4438
830a38ee
RS
4439 case SMIN:
4440 val = arg0s <= arg1s ? arg0s : arg1s;
4441 break;
4442
4443 case UMIN:
906c4e36
RK
4444 val = ((unsigned HOST_WIDE_INT) arg0
4445 <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
830a38ee
RS
4446 break;
4447
4448 case SMAX:
4449 val = arg0s > arg1s ? arg0s : arg1s;
4450 break;
4451
4452 case UMAX:
906c4e36
RK
4453 val = ((unsigned HOST_WIDE_INT) arg0
4454 > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
830a38ee
RS
4455 break;
4456
7afe21cc
RK
4457 default:
4458 abort ();
4459 }
4460
4461 /* Clear the bits that don't belong in our mode, unless they and our sign
4462 bit are all one. So we get either a reasonable negative value or a
4463 reasonable unsigned value for this mode. */
906c4e36
RK
4464 if (width < HOST_BITS_PER_WIDE_INT
4465 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4466 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4467 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4468
ad89d6f6
TG
4469 /* If this would be an entire word for the target, but is not for
4470 the host, then sign-extend on the host so that the number will look
4471 the same way on the host that it would on the target.
4472
4473 For example, when building a 64 bit alpha hosted 32 bit sparc
4474 targeted compiler, then we want the 32 bit unsigned value -1 to be
4475 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
4476 The later confuses the sparc backend. */
4477
4478 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
4479 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
4480 val |= ((HOST_WIDE_INT) (-1) << width);
4481
906c4e36 4482 return GEN_INT (val);
7afe21cc
RK
4483}
4484\f
96b0e481
RK
4485/* Simplify a PLUS or MINUS, at least one of whose operands may be another
4486 PLUS or MINUS.
4487
4488 Rather than test for specific case, we do this by a brute-force method
4489 and do all possible simplifications until no more changes occur. Then
4490 we rebuild the operation. */
4491
4492static rtx
4493simplify_plus_minus (code, mode, op0, op1)
4494 enum rtx_code code;
4495 enum machine_mode mode;
4496 rtx op0, op1;
4497{
4498 rtx ops[8];
4499 int negs[8];
4500 rtx result, tem;
fb5c8ce6 4501 int n_ops = 2, input_ops = 2, input_consts = 0, n_consts = 0;
96b0e481 4502 int first = 1, negate = 0, changed;
fb5c8ce6 4503 int i, j;
96b0e481 4504
4c9a05bc 4505 bzero ((char *) ops, sizeof ops);
96b0e481
RK
4506
4507 /* Set up the two operands and then expand them until nothing has been
4508 changed. If we run out of room in our array, give up; this should
4509 almost never happen. */
4510
4511 ops[0] = op0, ops[1] = op1, negs[0] = 0, negs[1] = (code == MINUS);
4512
4513 changed = 1;
4514 while (changed)
4515 {
4516 changed = 0;
4517
4518 for (i = 0; i < n_ops; i++)
4519 switch (GET_CODE (ops[i]))
4520 {
4521 case PLUS:
4522 case MINUS:
4523 if (n_ops == 7)
4524 return 0;
4525
4526 ops[n_ops] = XEXP (ops[i], 1);
4527 negs[n_ops++] = GET_CODE (ops[i]) == MINUS ? !negs[i] : negs[i];
4528 ops[i] = XEXP (ops[i], 0);
b7d9299b 4529 input_ops++;
96b0e481
RK
4530 changed = 1;
4531 break;
4532
4533 case NEG:
4534 ops[i] = XEXP (ops[i], 0);
4535 negs[i] = ! negs[i];
4536 changed = 1;
4537 break;
4538
4539 case CONST:
4540 ops[i] = XEXP (ops[i], 0);
fb5c8ce6 4541 input_consts++;
96b0e481
RK
4542 changed = 1;
4543 break;
4544
4545 case NOT:
4546 /* ~a -> (-a - 1) */
4547 if (n_ops != 7)
4548 {
4549 ops[n_ops] = constm1_rtx;
5931019b 4550 negs[n_ops++] = negs[i];
96b0e481
RK
4551 ops[i] = XEXP (ops[i], 0);
4552 negs[i] = ! negs[i];
4553 changed = 1;
4554 }
4555 break;
4556
4557 case CONST_INT:
4558 if (negs[i])
4559 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0, changed = 1;
4560 break;
e9a25f70
JL
4561
4562 default:
4563 break;
96b0e481
RK
4564 }
4565 }
4566
4567 /* If we only have two operands, we can't do anything. */
4568 if (n_ops <= 2)
4569 return 0;
4570
4571 /* Now simplify each pair of operands until nothing changes. The first
4572 time through just simplify constants against each other. */
4573
4574 changed = 1;
4575 while (changed)
4576 {
4577 changed = first;
4578
4579 for (i = 0; i < n_ops - 1; i++)
4580 for (j = i + 1; j < n_ops; j++)
4581 if (ops[i] != 0 && ops[j] != 0
4582 && (! first || (CONSTANT_P (ops[i]) && CONSTANT_P (ops[j]))))
4583 {
4584 rtx lhs = ops[i], rhs = ops[j];
4585 enum rtx_code ncode = PLUS;
4586
4587 if (negs[i] && ! negs[j])
4588 lhs = ops[j], rhs = ops[i], ncode = MINUS;
4589 else if (! negs[i] && negs[j])
4590 ncode = MINUS;
4591
4592 tem = simplify_binary_operation (ncode, mode, lhs, rhs);
b7d9299b 4593 if (tem)
96b0e481
RK
4594 {
4595 ops[i] = tem, ops[j] = 0;
4596 negs[i] = negs[i] && negs[j];
4597 if (GET_CODE (tem) == NEG)
4598 ops[i] = XEXP (tem, 0), negs[i] = ! negs[i];
4599
4600 if (GET_CODE (ops[i]) == CONST_INT && negs[i])
4601 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0;
4602 changed = 1;
4603 }
4604 }
4605
4606 first = 0;
4607 }
4608
4609 /* Pack all the operands to the lower-numbered entries and give up if
91a60f37 4610 we didn't reduce the number of operands we had. Make sure we
fb5c8ce6
RK
4611 count a CONST as two operands. If we have the same number of
4612 operands, but have made more CONSTs than we had, this is also
4613 an improvement, so accept it. */
91a60f37 4614
fb5c8ce6 4615 for (i = 0, j = 0; j < n_ops; j++)
96b0e481 4616 if (ops[j] != 0)
91a60f37
RK
4617 {
4618 ops[i] = ops[j], negs[i++] = negs[j];
4619 if (GET_CODE (ops[j]) == CONST)
fb5c8ce6 4620 n_consts++;
91a60f37 4621 }
96b0e481 4622
fb5c8ce6
RK
4623 if (i + n_consts > input_ops
4624 || (i + n_consts == input_ops && n_consts <= input_consts))
96b0e481
RK
4625 return 0;
4626
4627 n_ops = i;
4628
4629 /* If we have a CONST_INT, put it last. */
4630 for (i = 0; i < n_ops - 1; i++)
4631 if (GET_CODE (ops[i]) == CONST_INT)
4632 {
4633 tem = ops[n_ops - 1], ops[n_ops - 1] = ops[i] , ops[i] = tem;
4634 j = negs[n_ops - 1], negs[n_ops - 1] = negs[i], negs[i] = j;
4635 }
4636
4637 /* Put a non-negated operand first. If there aren't any, make all
4638 operands positive and negate the whole thing later. */
4639 for (i = 0; i < n_ops && negs[i]; i++)
4640 ;
4641
4642 if (i == n_ops)
4643 {
4644 for (i = 0; i < n_ops; i++)
4645 negs[i] = 0;
4646 negate = 1;
4647 }
4648 else if (i != 0)
4649 {
4650 tem = ops[0], ops[0] = ops[i], ops[i] = tem;
4651 j = negs[0], negs[0] = negs[i], negs[i] = j;
4652 }
4653
4654 /* Now make the result by performing the requested operations. */
4655 result = ops[0];
4656 for (i = 1; i < n_ops; i++)
4657 result = cse_gen_binary (negs[i] ? MINUS : PLUS, mode, result, ops[i]);
4658
38a448ca 4659 return negate ? gen_rtx_NEG (mode, result) : result;
96b0e481
RK
4660}
4661\f
4662/* Make a binary operation by properly ordering the operands and
4663 seeing if the expression folds. */
4664
4665static rtx
4666cse_gen_binary (code, mode, op0, op1)
4667 enum rtx_code code;
4668 enum machine_mode mode;
4669 rtx op0, op1;
4670{
4671 rtx tem;
4672
4673 /* Put complex operands first and constants second if commutative. */
4674 if (GET_RTX_CLASS (code) == 'c'
4675 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
4676 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
4677 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
4678 || (GET_CODE (op0) == SUBREG
4679 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
4680 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
4681 tem = op0, op0 = op1, op1 = tem;
4682
4683 /* If this simplifies, do it. */
4684 tem = simplify_binary_operation (code, mode, op0, op1);
4685
4686 if (tem)
4687 return tem;
4688
4689 /* Handle addition and subtraction of CONST_INT specially. Otherwise,
4690 just form the operation. */
4691
4692 if (code == PLUS && GET_CODE (op1) == CONST_INT
4693 && GET_MODE (op0) != VOIDmode)
4694 return plus_constant (op0, INTVAL (op1));
4695 else if (code == MINUS && GET_CODE (op1) == CONST_INT
4696 && GET_MODE (op0) != VOIDmode)
4697 return plus_constant (op0, - INTVAL (op1));
4698 else
38a448ca 4699 return gen_rtx_fmt_ee (code, mode, op0, op1);
96b0e481
RK
4700}
4701\f
1a87eea2
KG
4702struct cfc_args
4703{
4704 /* Input */
4705 rtx op0, op1;
4706 /* Output */
4707 int equal, op0lt, op1lt;
4708};
4709
4710static void
4711check_fold_consts (data)
4712 PTR data;
4713{
4714 struct cfc_args * args = (struct cfc_args *) data;
4715 REAL_VALUE_TYPE d0, d1;
4716
4717 REAL_VALUE_FROM_CONST_DOUBLE (d0, args->op0);
4718 REAL_VALUE_FROM_CONST_DOUBLE (d1, args->op1);
4719 args->equal = REAL_VALUES_EQUAL (d0, d1);
4720 args->op0lt = REAL_VALUES_LESS (d0, d1);
4721 args->op1lt = REAL_VALUES_LESS (d1, d0);
4722}
4723
7afe21cc 4724/* Like simplify_binary_operation except used for relational operators.
a432f20d
RK
4725 MODE is the mode of the operands, not that of the result. If MODE
4726 is VOIDmode, both operands must also be VOIDmode and we compare the
4727 operands in "infinite precision".
4728
4729 If no simplification is possible, this function returns zero. Otherwise,
4730 it returns either const_true_rtx or const0_rtx. */
7afe21cc
RK
4731
4732rtx
4733simplify_relational_operation (code, mode, op0, op1)
4734 enum rtx_code code;
4735 enum machine_mode mode;
4736 rtx op0, op1;
4737{
a432f20d
RK
4738 int equal, op0lt, op0ltu, op1lt, op1ltu;
4739 rtx tem;
7afe21cc
RK
4740
4741 /* If op0 is a compare, extract the comparison arguments from it. */
4742 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
4743 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4744
28bad1cb
RK
4745 /* We can't simplify MODE_CC values since we don't know what the
4746 actual comparison is. */
4747 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC
4748#ifdef HAVE_cc0
4749 || op0 == cc0_rtx
4750#endif
4751 )
31dcf83f
RS
4752 return 0;
4753
a432f20d
RK
4754 /* For integer comparisons of A and B maybe we can simplify A - B and can
4755 then simplify a comparison of that with zero. If A and B are both either
4756 a register or a CONST_INT, this can't help; testing for these cases will
4757 prevent infinite recursion here and speed things up.
4758
c27b5c62
JW
4759 If CODE is an unsigned comparison, then we can never do this optimization,
4760 because it gives an incorrect result if the subtraction wraps around zero.
4761 ANSI C defines unsigned operations such that they never overflow, and
4762 thus such cases can not be ignored. */
a432f20d
RK
4763
4764 if (INTEGRAL_MODE_P (mode) && op1 != const0_rtx
4765 && ! ((GET_CODE (op0) == REG || GET_CODE (op0) == CONST_INT)
4766 && (GET_CODE (op1) == REG || GET_CODE (op1) == CONST_INT))
4767 && 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1))
c27b5c62 4768 && code != GTU && code != GEU && code != LTU && code != LEU)
a432f20d
RK
4769 return simplify_relational_operation (signed_condition (code),
4770 mode, tem, const0_rtx);
4771
4772 /* For non-IEEE floating-point, if the two operands are equal, we know the
4773 result. */
4774 if (rtx_equal_p (op0, op1)
4775 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4776 || ! FLOAT_MODE_P (GET_MODE (op0)) || flag_fast_math))
4777 equal = 1, op0lt = 0, op0ltu = 0, op1lt = 0, op1ltu = 0;
4778
4779 /* If the operands are floating-point constants, see if we can fold
4780 the result. */
6076248a 4781#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
a432f20d
RK
4782 else if (GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
4783 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
4784 {
1a87eea2
KG
4785 struct cfc_args args;
4786
4787 /* Setup input for check_fold_consts() */
4788 args.op0 = op0;
4789 args.op1 = op1;
a432f20d 4790
1a87eea2
KG
4791 if (do_float_handler(check_fold_consts, (PTR) &args) == 0)
4792 /* We got an exception from check_fold_consts() */
a432f20d 4793 return 0;
7afe21cc 4794
1a87eea2
KG
4795 /* Receive output from check_fold_consts() */
4796 equal = args.equal;
4797 op0lt = op0ltu = args.op0lt;
4798 op1lt = op1ltu = args.op1lt;
a432f20d
RK
4799 }
4800#endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
7afe21cc 4801
a432f20d
RK
4802 /* Otherwise, see if the operands are both integers. */
4803 else if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode)
4804 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
4805 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
4806 {
4807 int width = GET_MODE_BITSIZE (mode);
64812ded
RK
4808 HOST_WIDE_INT l0s, h0s, l1s, h1s;
4809 unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u;
7afe21cc 4810
a432f20d
RK
4811 /* Get the two words comprising each integer constant. */
4812 if (GET_CODE (op0) == CONST_DOUBLE)
4813 {
4814 l0u = l0s = CONST_DOUBLE_LOW (op0);
4815 h0u = h0s = CONST_DOUBLE_HIGH (op0);
7afe21cc 4816 }
a432f20d 4817 else
6076248a 4818 {
a432f20d 4819 l0u = l0s = INTVAL (op0);
cb3bb2a7 4820 h0u = h0s = l0s < 0 ? -1 : 0;
a432f20d 4821 }
6076248a 4822
a432f20d
RK
4823 if (GET_CODE (op1) == CONST_DOUBLE)
4824 {
4825 l1u = l1s = CONST_DOUBLE_LOW (op1);
4826 h1u = h1s = CONST_DOUBLE_HIGH (op1);
4827 }
4828 else
4829 {
4830 l1u = l1s = INTVAL (op1);
cb3bb2a7 4831 h1u = h1s = l1s < 0 ? -1 : 0;
a432f20d
RK
4832 }
4833
4834 /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT,
4835 we have to sign or zero-extend the values. */
4836 if (width != 0 && width <= HOST_BITS_PER_WIDE_INT)
4837 h0u = h1u = 0, h0s = l0s < 0 ? -1 : 0, h1s = l1s < 0 ? -1 : 0;
6076248a 4838
a432f20d
RK
4839 if (width != 0 && width < HOST_BITS_PER_WIDE_INT)
4840 {
4841 l0u &= ((HOST_WIDE_INT) 1 << width) - 1;
4842 l1u &= ((HOST_WIDE_INT) 1 << width) - 1;
6076248a 4843
a432f20d
RK
4844 if (l0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4845 l0s |= ((HOST_WIDE_INT) (-1) << width);
6076248a 4846
a432f20d
RK
4847 if (l1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4848 l1s |= ((HOST_WIDE_INT) (-1) << width);
6076248a
RK
4849 }
4850
a432f20d
RK
4851 equal = (h0u == h1u && l0u == l1u);
4852 op0lt = (h0s < h1s || (h0s == h1s && l0s < l1s));
4853 op1lt = (h1s < h0s || (h1s == h0s && l1s < l0s));
4854 op0ltu = (h0u < h1u || (h0u == h1u && l0u < l1u));
4855 op1ltu = (h1u < h0u || (h1u == h0u && l1u < l0u));
4856 }
4857
4858 /* Otherwise, there are some code-specific tests we can make. */
4859 else
4860 {
7afe21cc
RK
4861 switch (code)
4862 {
4863 case EQ:
a432f20d
RK
4864 /* References to the frame plus a constant or labels cannot
4865 be zero, but a SYMBOL_REF can due to #pragma weak. */
4866 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4867 || GET_CODE (op0) == LABEL_REF)
1a7c818b 4868#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
a432f20d
RK
4869 /* On some machines, the ap reg can be 0 sometimes. */
4870 && op0 != arg_pointer_rtx
7afe21cc 4871#endif
a432f20d
RK
4872 )
4873 return const0_rtx;
4874 break;
7afe21cc
RK
4875
4876 case NE:
a432f20d
RK
4877 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4878 || GET_CODE (op0) == LABEL_REF)
1a7c818b 4879#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
a432f20d 4880 && op0 != arg_pointer_rtx
7afe21cc 4881#endif
a432f20d 4882 )
7afe21cc
RK
4883 return const_true_rtx;
4884 break;
4885
4886 case GEU:
a432f20d
RK
4887 /* Unsigned values are never negative. */
4888 if (op1 == const0_rtx)
7afe21cc
RK
4889 return const_true_rtx;
4890 break;
4891
4892 case LTU:
a432f20d 4893 if (op1 == const0_rtx)
7afe21cc
RK
4894 return const0_rtx;
4895 break;
4896
4897 case LEU:
4898 /* Unsigned values are never greater than the largest
4899 unsigned value. */
4900 if (GET_CODE (op1) == CONST_INT
4901 && INTVAL (op1) == GET_MODE_MASK (mode)
a432f20d
RK
4902 && INTEGRAL_MODE_P (mode))
4903 return const_true_rtx;
7afe21cc
RK
4904 break;
4905
4906 case GTU:
4907 if (GET_CODE (op1) == CONST_INT
4908 && INTVAL (op1) == GET_MODE_MASK (mode)
cbf6a543 4909 && INTEGRAL_MODE_P (mode))
7afe21cc
RK
4910 return const0_rtx;
4911 break;
e9a25f70
JL
4912
4913 default:
4914 break;
7afe21cc
RK
4915 }
4916
4917 return 0;
4918 }
4919
a432f20d
RK
4920 /* If we reach here, EQUAL, OP0LT, OP0LTU, OP1LT, and OP1LTU are set
4921 as appropriate. */
7afe21cc
RK
4922 switch (code)
4923 {
7afe21cc 4924 case EQ:
a432f20d
RK
4925 return equal ? const_true_rtx : const0_rtx;
4926 case NE:
4927 return ! equal ? const_true_rtx : const0_rtx;
7afe21cc 4928 case LT:
a432f20d 4929 return op0lt ? const_true_rtx : const0_rtx;
7afe21cc 4930 case GT:
a432f20d 4931 return op1lt ? const_true_rtx : const0_rtx;
7afe21cc 4932 case LTU:
a432f20d 4933 return op0ltu ? const_true_rtx : const0_rtx;
7afe21cc 4934 case GTU:
a432f20d
RK
4935 return op1ltu ? const_true_rtx : const0_rtx;
4936 case LE:
4937 return equal || op0lt ? const_true_rtx : const0_rtx;
4938 case GE:
4939 return equal || op1lt ? const_true_rtx : const0_rtx;
4940 case LEU:
4941 return equal || op0ltu ? const_true_rtx : const0_rtx;
4942 case GEU:
4943 return equal || op1ltu ? const_true_rtx : const0_rtx;
e9a25f70
JL
4944 default:
4945 abort ();
7afe21cc 4946 }
7afe21cc
RK
4947}
4948\f
4949/* Simplify CODE, an operation with result mode MODE and three operands,
4950 OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became
4951 a constant. Return 0 if no simplifications is possible. */
4952
4953rtx
4954simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2)
4955 enum rtx_code code;
4956 enum machine_mode mode, op0_mode;
4957 rtx op0, op1, op2;
4958{
4959 int width = GET_MODE_BITSIZE (mode);
4960
4961 /* VOIDmode means "infinite" precision. */
4962 if (width == 0)
906c4e36 4963 width = HOST_BITS_PER_WIDE_INT;
7afe21cc
RK
4964
4965 switch (code)
4966 {
4967 case SIGN_EXTRACT:
4968 case ZERO_EXTRACT:
4969 if (GET_CODE (op0) == CONST_INT
4970 && GET_CODE (op1) == CONST_INT
4971 && GET_CODE (op2) == CONST_INT
4972 && INTVAL (op1) + INTVAL (op2) <= GET_MODE_BITSIZE (op0_mode)
906c4e36 4973 && width <= HOST_BITS_PER_WIDE_INT)
7afe21cc
RK
4974 {
4975 /* Extracting a bit-field from a constant */
906c4e36 4976 HOST_WIDE_INT val = INTVAL (op0);
7afe21cc 4977
f76b9db2
ILT
4978 if (BITS_BIG_ENDIAN)
4979 val >>= (GET_MODE_BITSIZE (op0_mode)
4980 - INTVAL (op2) - INTVAL (op1));
4981 else
4982 val >>= INTVAL (op2);
4983
906c4e36 4984 if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
7afe21cc
RK
4985 {
4986 /* First zero-extend. */
906c4e36 4987 val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
7afe21cc 4988 /* If desired, propagate sign bit. */
906c4e36
RK
4989 if (code == SIGN_EXTRACT
4990 && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))))
4991 val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
7afe21cc
RK
4992 }
4993
4994 /* Clear the bits that don't belong in our mode,
4995 unless they and our sign bit are all one.
4996 So we get either a reasonable negative value or a reasonable
4997 unsigned value for this mode. */
906c4e36
RK
4998 if (width < HOST_BITS_PER_WIDE_INT
4999 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
5000 != ((HOST_WIDE_INT) (-1) << (width - 1))))
5001 val &= ((HOST_WIDE_INT) 1 << width) - 1;
7afe21cc 5002
906c4e36 5003 return GEN_INT (val);
7afe21cc
RK
5004 }
5005 break;
5006
5007 case IF_THEN_ELSE:
5008 if (GET_CODE (op0) == CONST_INT)
5009 return op0 != const0_rtx ? op1 : op2;
3bf1b082
JW
5010
5011 /* Convert a == b ? b : a to "a". */
5012 if (GET_CODE (op0) == NE && ! side_effects_p (op0)
5013 && rtx_equal_p (XEXP (op0, 0), op1)
5014 && rtx_equal_p (XEXP (op0, 1), op2))
5015 return op1;
5016 else if (GET_CODE (op0) == EQ && ! side_effects_p (op0)
5017 && rtx_equal_p (XEXP (op0, 1), op1)
5018 && rtx_equal_p (XEXP (op0, 0), op2))
5019 return op2;
e82ad93d 5020 else if (GET_RTX_CLASS (GET_CODE (op0)) == '<' && ! side_effects_p (op0))
ed1ecb19
JL
5021 {
5022 rtx temp;
5023 temp = simplify_relational_operation (GET_CODE (op0), op0_mode,
5024 XEXP (op0, 0), XEXP (op0, 1));
5025 /* See if any simplifications were possible. */
5026 if (temp == const0_rtx)
5027 return op2;
5028 else if (temp == const1_rtx)
5029 return op1;
5030 }
7afe21cc
RK
5031 break;
5032
5033 default:
5034 abort ();
5035 }
5036
5037 return 0;
5038}
5039\f
5040/* If X is a nontrivial arithmetic operation on an argument
5041 for which a constant value can be determined, return
5042 the result of operating on that value, as a constant.
5043 Otherwise, return X, possibly with one or more operands
5044 modified by recursive calls to this function.
5045
e7bb59fa
RK
5046 If X is a register whose contents are known, we do NOT
5047 return those contents here. equiv_constant is called to
5048 perform that task.
7afe21cc
RK
5049
5050 INSN is the insn that we may be modifying. If it is 0, make a copy
5051 of X before modifying it. */
5052
5053static rtx
5054fold_rtx (x, insn)
5055 rtx x;
5056 rtx insn;
5057{
5058 register enum rtx_code code;
5059 register enum machine_mode mode;
5060 register char *fmt;
906c4e36 5061 register int i;
7afe21cc
RK
5062 rtx new = 0;
5063 int copied = 0;
5064 int must_swap = 0;
5065
5066 /* Folded equivalents of first two operands of X. */
5067 rtx folded_arg0;
5068 rtx folded_arg1;
5069
5070 /* Constant equivalents of first three operands of X;
5071 0 when no such equivalent is known. */
5072 rtx const_arg0;
5073 rtx const_arg1;
5074 rtx const_arg2;
5075
5076 /* The mode of the first operand of X. We need this for sign and zero
5077 extends. */
5078 enum machine_mode mode_arg0;
5079
5080 if (x == 0)
5081 return x;
5082
5083 mode = GET_MODE (x);
5084 code = GET_CODE (x);
5085 switch (code)
5086 {
5087 case CONST:
5088 case CONST_INT:
5089 case CONST_DOUBLE:
5090 case SYMBOL_REF:
5091 case LABEL_REF:
5092 case REG:
5093 /* No use simplifying an EXPR_LIST
5094 since they are used only for lists of args
5095 in a function call's REG_EQUAL note. */
5096 case EXPR_LIST:
956d6950
JL
5097 /* Changing anything inside an ADDRESSOF is incorrect; we don't
5098 want to (e.g.,) make (addressof (const_int 0)) just because
5099 the location is known to be zero. */
5100 case ADDRESSOF:
7afe21cc
RK
5101 return x;
5102
5103#ifdef HAVE_cc0
5104 case CC0:
5105 return prev_insn_cc0;
5106#endif
5107
5108 case PC:
5109 /* If the next insn is a CODE_LABEL followed by a jump table,
5110 PC's value is a LABEL_REF pointing to that label. That
5111 lets us fold switch statements on the Vax. */
5112 if (insn && GET_CODE (insn) == JUMP_INSN)
5113 {
5114 rtx next = next_nonnote_insn (insn);
5115
5116 if (next && GET_CODE (next) == CODE_LABEL
5117 && NEXT_INSN (next) != 0
5118 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
5119 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
5120 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
38a448ca 5121 return gen_rtx_LABEL_REF (Pmode, next);
7afe21cc
RK
5122 }
5123 break;
5124
5125 case SUBREG:
c610adec
RK
5126 /* See if we previously assigned a constant value to this SUBREG. */
5127 if ((new = lookup_as_function (x, CONST_INT)) != 0
5128 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
7afe21cc
RK
5129 return new;
5130
4b980e20
RK
5131 /* If this is a paradoxical SUBREG, we have no idea what value the
5132 extra bits would have. However, if the operand is equivalent
5133 to a SUBREG whose operand is the same as our mode, and all the
5134 modes are within a word, we can just use the inner operand
31c85c78
RK
5135 because these SUBREGs just say how to treat the register.
5136
5137 Similarly if we find an integer constant. */
4b980e20 5138
e5f6a288 5139 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4b980e20
RK
5140 {
5141 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
5142 struct table_elt *elt;
5143
5144 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
5145 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
5146 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
5147 imode)) != 0)
31c85c78
RK
5148 for (elt = elt->first_same_value;
5149 elt; elt = elt->next_same_value)
5150 {
5151 if (CONSTANT_P (elt->exp)
5152 && GET_MODE (elt->exp) == VOIDmode)
5153 return elt->exp;
5154
4b980e20
RK
5155 if (GET_CODE (elt->exp) == SUBREG
5156 && GET_MODE (SUBREG_REG (elt->exp)) == mode
906c4e36 5157 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4b980e20
RK
5158 return copy_rtx (SUBREG_REG (elt->exp));
5159 }
5160
5161 return x;
5162 }
e5f6a288 5163
7afe21cc
RK
5164 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
5165 We might be able to if the SUBREG is extracting a single word in an
5166 integral mode or extracting the low part. */
5167
5168 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
5169 const_arg0 = equiv_constant (folded_arg0);
5170 if (const_arg0)
5171 folded_arg0 = const_arg0;
5172
5173 if (folded_arg0 != SUBREG_REG (x))
5174 {
5175 new = 0;
5176
5177 if (GET_MODE_CLASS (mode) == MODE_INT
5178 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
5179 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
5180 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
5181 GET_MODE (SUBREG_REG (x)));
5182 if (new == 0 && subreg_lowpart_p (x))
5183 new = gen_lowpart_if_possible (mode, folded_arg0);
5184 if (new)
5185 return new;
5186 }
e5f6a288
RK
5187
5188 /* If this is a narrowing SUBREG and our operand is a REG, see if
858a47b1 5189 we can find an equivalence for REG that is an arithmetic operation
e5f6a288
RK
5190 in a wider mode where both operands are paradoxical SUBREGs
5191 from objects of our result mode. In that case, we couldn't report
5192 an equivalent value for that operation, since we don't know what the
5193 extra bits will be. But we can find an equivalence for this SUBREG
5194 by folding that operation is the narrow mode. This allows us to
5195 fold arithmetic in narrow modes when the machine only supports
4b980e20
RK
5196 word-sized arithmetic.
5197
5198 Also look for a case where we have a SUBREG whose operand is the
5199 same as our result. If both modes are smaller than a word, we
5200 are simply interpreting a register in different modes and we
5201 can use the inner value. */
e5f6a288
RK
5202
5203 if (GET_CODE (folded_arg0) == REG
e8d76a39
RS
5204 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
5205 && subreg_lowpart_p (x))
e5f6a288
RK
5206 {
5207 struct table_elt *elt;
5208
5209 /* We can use HASH here since we know that canon_hash won't be
5210 called. */
5211 elt = lookup (folded_arg0,
5212 HASH (folded_arg0, GET_MODE (folded_arg0)),
5213 GET_MODE (folded_arg0));
5214
5215 if (elt)
5216 elt = elt->first_same_value;
5217
5218 for (; elt; elt = elt->next_same_value)
5219 {
e8d76a39
RS
5220 enum rtx_code eltcode = GET_CODE (elt->exp);
5221
e5f6a288
RK
5222 /* Just check for unary and binary operations. */
5223 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
5224 && GET_CODE (elt->exp) != SIGN_EXTEND
5225 && GET_CODE (elt->exp) != ZERO_EXTEND
5226 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
5227 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
5228 {
5229 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
5230
5231 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
906c4e36 5232 op0 = fold_rtx (op0, NULL_RTX);
e5f6a288
RK
5233
5234 op0 = equiv_constant (op0);
5235 if (op0)
5236 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
5237 op0, mode);
5238 }
5239 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
5240 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
e8d76a39
RS
5241 && eltcode != DIV && eltcode != MOD
5242 && eltcode != UDIV && eltcode != UMOD
5243 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
5244 && eltcode != ROTATE && eltcode != ROTATERT
e5f6a288
RK
5245 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
5246 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
5247 == mode))
5248 || CONSTANT_P (XEXP (elt->exp, 0)))
5249 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
5250 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
5251 == mode))
5252 || CONSTANT_P (XEXP (elt->exp, 1))))
5253 {
5254 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
5255 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
5256
5257 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
906c4e36 5258 op0 = fold_rtx (op0, NULL_RTX);
e5f6a288
RK
5259
5260 if (op0)
5261 op0 = equiv_constant (op0);
5262
5263 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
906c4e36 5264 op1 = fold_rtx (op1, NULL_RTX);
e5f6a288
RK
5265
5266 if (op1)
5267 op1 = equiv_constant (op1);
5268
76fb0b60
RS
5269 /* If we are looking for the low SImode part of
5270 (ashift:DI c (const_int 32)), it doesn't work
5271 to compute that in SImode, because a 32-bit shift
5272 in SImode is unpredictable. We know the value is 0. */
5273 if (op0 && op1
45620ed4 5274 && GET_CODE (elt->exp) == ASHIFT
76fb0b60
RS
5275 && GET_CODE (op1) == CONST_INT
5276 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
5277 {
5278 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
5279
5280 /* If the count fits in the inner mode's width,
5281 but exceeds the outer mode's width,
5282 the value will get truncated to 0
5283 by the subreg. */
5284 new = const0_rtx;
5285 else
5286 /* If the count exceeds even the inner mode's width,
5287 don't fold this expression. */
5288 new = 0;
5289 }
5290 else if (op0 && op1)
e5f6a288
RK
5291 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
5292 op0, op1);
5293 }
5294
4b980e20
RK
5295 else if (GET_CODE (elt->exp) == SUBREG
5296 && GET_MODE (SUBREG_REG (elt->exp)) == mode
5297 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
5298 <= UNITS_PER_WORD)
906c4e36 5299 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4b980e20
RK
5300 new = copy_rtx (SUBREG_REG (elt->exp));
5301
e5f6a288
RK
5302 if (new)
5303 return new;
5304 }
5305 }
5306
7afe21cc
RK
5307 return x;
5308
5309 case NOT:
5310 case NEG:
5311 /* If we have (NOT Y), see if Y is known to be (NOT Z).
5312 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
5313 new = lookup_as_function (XEXP (x, 0), code);
5314 if (new)
5315 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
5316 break;
13c9910f 5317
7afe21cc
RK
5318 case MEM:
5319 /* If we are not actually processing an insn, don't try to find the
5320 best address. Not only don't we care, but we could modify the
5321 MEM in an invalid way since we have no insn to validate against. */
5322 if (insn != 0)
5323 find_best_addr (insn, &XEXP (x, 0));
5324
5325 {
5326 /* Even if we don't fold in the insn itself,
5327 we can safely do so here, in hopes of getting a constant. */
906c4e36 5328 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
7afe21cc 5329 rtx base = 0;
906c4e36 5330 HOST_WIDE_INT offset = 0;
7afe21cc
RK
5331
5332 if (GET_CODE (addr) == REG
5333 && REGNO_QTY_VALID_P (REGNO (addr))
30f72379
MM
5334 && GET_MODE (addr) == qty_mode[REG_QTY (REGNO (addr))]
5335 && qty_const[REG_QTY (REGNO (addr))] != 0)
5336 addr = qty_const[REG_QTY (REGNO (addr))];
7afe21cc
RK
5337
5338 /* If address is constant, split it into a base and integer offset. */
5339 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
5340 base = addr;
5341 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
5342 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
5343 {
5344 base = XEXP (XEXP (addr, 0), 0);
5345 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
5346 }
5347 else if (GET_CODE (addr) == LO_SUM
5348 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
5349 base = XEXP (addr, 1);
e9a25f70 5350 else if (GET_CODE (addr) == ADDRESSOF)
956d6950 5351 return change_address (x, VOIDmode, addr);
7afe21cc
RK
5352
5353 /* If this is a constant pool reference, we can fold it into its
5354 constant to allow better value tracking. */
5355 if (base && GET_CODE (base) == SYMBOL_REF
5356 && CONSTANT_POOL_ADDRESS_P (base))
5357 {
5358 rtx constant = get_pool_constant (base);
5359 enum machine_mode const_mode = get_pool_mode (base);
5360 rtx new;
5361
5362 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
5363 constant_pool_entries_cost = COST (constant);
5364
5365 /* If we are loading the full constant, we have an equivalence. */
5366 if (offset == 0 && mode == const_mode)
5367 return constant;
5368
9faa82d8 5369 /* If this actually isn't a constant (weird!), we can't do
7afe21cc
RK
5370 anything. Otherwise, handle the two most common cases:
5371 extracting a word from a multi-word constant, and extracting
5372 the low-order bits. Other cases don't seem common enough to
5373 worry about. */
5374 if (! CONSTANT_P (constant))
5375 return x;
5376
5377 if (GET_MODE_CLASS (mode) == MODE_INT
5378 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
5379 && offset % UNITS_PER_WORD == 0
5380 && (new = operand_subword (constant,
5381 offset / UNITS_PER_WORD,
5382 0, const_mode)) != 0)
5383 return new;
5384
5385 if (((BYTES_BIG_ENDIAN
5386 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
5387 || (! BYTES_BIG_ENDIAN && offset == 0))
5388 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
5389 return new;
5390 }
5391
5392 /* If this is a reference to a label at a known position in a jump
5393 table, we also know its value. */
5394 if (base && GET_CODE (base) == LABEL_REF)
5395 {
5396 rtx label = XEXP (base, 0);
5397 rtx table_insn = NEXT_INSN (label);
5398
5399 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5400 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
5401 {
5402 rtx table = PATTERN (table_insn);
5403
5404 if (offset >= 0
5405 && (offset / GET_MODE_SIZE (GET_MODE (table))
5406 < XVECLEN (table, 0)))
5407 return XVECEXP (table, 0,
5408 offset / GET_MODE_SIZE (GET_MODE (table)));
5409 }
5410 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5411 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
5412 {
5413 rtx table = PATTERN (table_insn);
5414
5415 if (offset >= 0
5416 && (offset / GET_MODE_SIZE (GET_MODE (table))
5417 < XVECLEN (table, 1)))
5418 {
5419 offset /= GET_MODE_SIZE (GET_MODE (table));
38a448ca
RH
5420 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
5421 XEXP (table, 0));
7afe21cc
RK
5422
5423 if (GET_MODE (table) != Pmode)
38a448ca 5424 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
7afe21cc 5425
67a37737
RK
5426 /* Indicate this is a constant. This isn't a
5427 valid form of CONST, but it will only be used
5428 to fold the next insns and then discarded, so
ac7ef8d5
FS
5429 it should be safe.
5430
5431 Note this expression must be explicitly discarded,
5432 by cse_insn, else it may end up in a REG_EQUAL note
5433 and "escape" to cause problems elsewhere. */
38a448ca 5434 return gen_rtx_CONST (GET_MODE (new), new);
7afe21cc
RK
5435 }
5436 }
5437 }
5438
5439 return x;
5440 }
9255709c
RK
5441
5442 case ASM_OPERANDS:
5443 for (i = XVECLEN (x, 3) - 1; i >= 0; i--)
5444 validate_change (insn, &XVECEXP (x, 3, i),
5445 fold_rtx (XVECEXP (x, 3, i), insn), 0);
5446 break;
e9a25f70
JL
5447
5448 default:
5449 break;
7afe21cc
RK
5450 }
5451
5452 const_arg0 = 0;
5453 const_arg1 = 0;
5454 const_arg2 = 0;
5455 mode_arg0 = VOIDmode;
5456
5457 /* Try folding our operands.
5458 Then see which ones have constant values known. */
5459
5460 fmt = GET_RTX_FORMAT (code);
5461 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5462 if (fmt[i] == 'e')
5463 {
5464 rtx arg = XEXP (x, i);
5465 rtx folded_arg = arg, const_arg = 0;
5466 enum machine_mode mode_arg = GET_MODE (arg);
5467 rtx cheap_arg, expensive_arg;
5468 rtx replacements[2];
5469 int j;
5470
5471 /* Most arguments are cheap, so handle them specially. */
5472 switch (GET_CODE (arg))
5473 {
5474 case REG:
5475 /* This is the same as calling equiv_constant; it is duplicated
5476 here for speed. */
5477 if (REGNO_QTY_VALID_P (REGNO (arg))
30f72379
MM
5478 && qty_const[REG_QTY (REGNO (arg))] != 0
5479 && GET_CODE (qty_const[REG_QTY (REGNO (arg))]) != REG
5480 && GET_CODE (qty_const[REG_QTY (REGNO (arg))]) != PLUS)
7afe21cc
RK
5481 const_arg
5482 = gen_lowpart_if_possible (GET_MODE (arg),
30f72379 5483 qty_const[REG_QTY (REGNO (arg))]);
7afe21cc
RK
5484 break;
5485
5486 case CONST:
5487 case CONST_INT:
5488 case SYMBOL_REF:
5489 case LABEL_REF:
5490 case CONST_DOUBLE:
5491 const_arg = arg;
5492 break;
5493
5494#ifdef HAVE_cc0
5495 case CC0:
5496 folded_arg = prev_insn_cc0;
5497 mode_arg = prev_insn_cc0_mode;
5498 const_arg = equiv_constant (folded_arg);
5499 break;
5500#endif
5501
5502 default:
5503 folded_arg = fold_rtx (arg, insn);
5504 const_arg = equiv_constant (folded_arg);
5505 }
5506
5507 /* For the first three operands, see if the operand
5508 is constant or equivalent to a constant. */
5509 switch (i)
5510 {
5511 case 0:
5512 folded_arg0 = folded_arg;
5513 const_arg0 = const_arg;
5514 mode_arg0 = mode_arg;
5515 break;
5516 case 1:
5517 folded_arg1 = folded_arg;
5518 const_arg1 = const_arg;
5519 break;
5520 case 2:
5521 const_arg2 = const_arg;
5522 break;
5523 }
5524
5525 /* Pick the least expensive of the folded argument and an
5526 equivalent constant argument. */
5527 if (const_arg == 0 || const_arg == folded_arg
5528 || COST (const_arg) > COST (folded_arg))
5529 cheap_arg = folded_arg, expensive_arg = const_arg;
5530 else
5531 cheap_arg = const_arg, expensive_arg = folded_arg;
5532
5533 /* Try to replace the operand with the cheapest of the two
5534 possibilities. If it doesn't work and this is either of the first
5535 two operands of a commutative operation, try swapping them.
5536 If THAT fails, try the more expensive, provided it is cheaper
5537 than what is already there. */
5538
5539 if (cheap_arg == XEXP (x, i))
5540 continue;
5541
5542 if (insn == 0 && ! copied)
5543 {
5544 x = copy_rtx (x);
5545 copied = 1;
5546 }
5547
5548 replacements[0] = cheap_arg, replacements[1] = expensive_arg;
5549 for (j = 0;
5550 j < 2 && replacements[j]
5551 && COST (replacements[j]) < COST (XEXP (x, i));
5552 j++)
5553 {
5554 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
5555 break;
5556
5557 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
5558 {
5559 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
5560 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
5561
5562 if (apply_change_group ())
5563 {
5564 /* Swap them back to be invalid so that this loop can
5565 continue and flag them to be swapped back later. */
5566 rtx tem;
5567
5568 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
5569 XEXP (x, 1) = tem;
5570 must_swap = 1;
5571 break;
5572 }
5573 }
5574 }
5575 }
5576
2d8b0f3a
JL
5577 else
5578 {
5579 if (fmt[i] == 'E')
5580 /* Don't try to fold inside of a vector of expressions.
5581 Doing nothing is harmless. */
5582 {;}
5583 }
7afe21cc
RK
5584
5585 /* If a commutative operation, place a constant integer as the second
5586 operand unless the first operand is also a constant integer. Otherwise,
5587 place any constant second unless the first operand is also a constant. */
5588
5589 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5590 {
5591 if (must_swap || (const_arg0
5592 && (const_arg1 == 0
5593 || (GET_CODE (const_arg0) == CONST_INT
5594 && GET_CODE (const_arg1) != CONST_INT))))
5595 {
5596 register rtx tem = XEXP (x, 0);
5597
5598 if (insn == 0 && ! copied)
5599 {
5600 x = copy_rtx (x);
5601 copied = 1;
5602 }
5603
5604 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
5605 validate_change (insn, &XEXP (x, 1), tem, 1);
5606 if (apply_change_group ())
5607 {
5608 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
5609 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
5610 }
5611 }
5612 }
5613
5614 /* If X is an arithmetic operation, see if we can simplify it. */
5615
5616 switch (GET_RTX_CLASS (code))
5617 {
5618 case '1':
67a37737
RK
5619 {
5620 int is_const = 0;
5621
5622 /* We can't simplify extension ops unless we know the
5623 original mode. */
5624 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
5625 && mode_arg0 == VOIDmode)
5626 break;
5627
5628 /* If we had a CONST, strip it off and put it back later if we
5629 fold. */
5630 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
5631 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
5632
5633 new = simplify_unary_operation (code, mode,
5634 const_arg0 ? const_arg0 : folded_arg0,
5635 mode_arg0);
5636 if (new != 0 && is_const)
38a448ca 5637 new = gen_rtx_CONST (mode, new);
67a37737 5638 }
7afe21cc
RK
5639 break;
5640
5641 case '<':
5642 /* See what items are actually being compared and set FOLDED_ARG[01]
5643 to those values and CODE to the actual comparison code. If any are
5644 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
5645 do anything if both operands are already known to be constant. */
5646
5647 if (const_arg0 == 0 || const_arg1 == 0)
5648 {
5649 struct table_elt *p0, *p1;
c610adec 5650 rtx true = const_true_rtx, false = const0_rtx;
13c9910f 5651 enum machine_mode mode_arg1;
c610adec
RK
5652
5653#ifdef FLOAT_STORE_FLAG_VALUE
c7c955ee 5654 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
c610adec 5655 {
560c94a2
RK
5656 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5657 mode);
c610adec
RK
5658 false = CONST0_RTX (mode);
5659 }
5660#endif
7afe21cc 5661
13c9910f
RS
5662 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
5663 &mode_arg0, &mode_arg1);
7afe21cc
RK
5664 const_arg0 = equiv_constant (folded_arg0);
5665 const_arg1 = equiv_constant (folded_arg1);
5666
13c9910f
RS
5667 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
5668 what kinds of things are being compared, so we can't do
5669 anything with this comparison. */
7afe21cc
RK
5670
5671 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
5672 break;
5673
0f41302f
MS
5674 /* If we do not now have two constants being compared, see
5675 if we can nevertheless deduce some things about the
5676 comparison. */
7afe21cc
RK
5677 if (const_arg0 == 0 || const_arg1 == 0)
5678 {
0f41302f
MS
5679 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or
5680 non-explicit constant? These aren't zero, but we
5681 don't know their sign. */
7afe21cc
RK
5682 if (const_arg1 == const0_rtx
5683 && (NONZERO_BASE_PLUS_P (folded_arg0)
5684#if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
5685 come out as 0. */
5686 || GET_CODE (folded_arg0) == SYMBOL_REF
5687#endif
5688 || GET_CODE (folded_arg0) == LABEL_REF
5689 || GET_CODE (folded_arg0) == CONST))
5690 {
5691 if (code == EQ)
c610adec 5692 return false;
7afe21cc 5693 else if (code == NE)
c610adec 5694 return true;
7afe21cc
RK
5695 }
5696
5697 /* See if the two operands are the same. We don't do this
5698 for IEEE floating-point since we can't assume x == x
5699 since x might be a NaN. */
5700
5701 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
a83afb65 5702 || ! FLOAT_MODE_P (mode_arg0) || flag_fast_math)
7afe21cc
RK
5703 && (folded_arg0 == folded_arg1
5704 || (GET_CODE (folded_arg0) == REG
5705 && GET_CODE (folded_arg1) == REG
30f72379
MM
5706 && (REG_QTY (REGNO (folded_arg0))
5707 == REG_QTY (REGNO (folded_arg1))))
7afe21cc
RK
5708 || ((p0 = lookup (folded_arg0,
5709 (safe_hash (folded_arg0, mode_arg0)
5710 % NBUCKETS), mode_arg0))
5711 && (p1 = lookup (folded_arg1,
5712 (safe_hash (folded_arg1, mode_arg0)
5713 % NBUCKETS), mode_arg0))
5714 && p0->first_same_value == p1->first_same_value)))
5715 return ((code == EQ || code == LE || code == GE
5716 || code == LEU || code == GEU)
c610adec 5717 ? true : false);
7afe21cc
RK
5718
5719 /* If FOLDED_ARG0 is a register, see if the comparison we are
5720 doing now is either the same as we did before or the reverse
5721 (we only check the reverse if not floating-point). */
5722 else if (GET_CODE (folded_arg0) == REG)
5723 {
30f72379 5724 int qty = REG_QTY (REGNO (folded_arg0));
7afe21cc
RK
5725
5726 if (REGNO_QTY_VALID_P (REGNO (folded_arg0))
5727 && (comparison_dominates_p (qty_comparison_code[qty], code)
5728 || (comparison_dominates_p (qty_comparison_code[qty],
5729 reverse_condition (code))
cbf6a543 5730 && ! FLOAT_MODE_P (mode_arg0)))
7afe21cc
RK
5731 && (rtx_equal_p (qty_comparison_const[qty], folded_arg1)
5732 || (const_arg1
5733 && rtx_equal_p (qty_comparison_const[qty],
5734 const_arg1))
5735 || (GET_CODE (folded_arg1) == REG
30f72379 5736 && (REG_QTY (REGNO (folded_arg1))
7afe21cc
RK
5737 == qty_comparison_qty[qty]))))
5738 return (comparison_dominates_p (qty_comparison_code[qty],
5739 code)
c610adec 5740 ? true : false);
7afe21cc
RK
5741 }
5742 }
5743 }
5744
5745 /* If we are comparing against zero, see if the first operand is
5746 equivalent to an IOR with a constant. If so, we may be able to
5747 determine the result of this comparison. */
5748
5749 if (const_arg1 == const0_rtx)
5750 {
5751 rtx y = lookup_as_function (folded_arg0, IOR);
5752 rtx inner_const;
5753
5754 if (y != 0
5755 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
5756 && GET_CODE (inner_const) == CONST_INT
5757 && INTVAL (inner_const) != 0)
5758 {
5759 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
906c4e36
RK
5760 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
5761 && (INTVAL (inner_const)
5762 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
c610adec
RK
5763 rtx true = const_true_rtx, false = const0_rtx;
5764
5765#ifdef FLOAT_STORE_FLAG_VALUE
c7c955ee 5766 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
c610adec 5767 {
560c94a2
RK
5768 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5769 mode);
c610adec
RK
5770 false = CONST0_RTX (mode);
5771 }
5772#endif
7afe21cc
RK
5773
5774 switch (code)
5775 {
5776 case EQ:
c610adec 5777 return false;
7afe21cc 5778 case NE:
c610adec 5779 return true;
7afe21cc
RK
5780 case LT: case LE:
5781 if (has_sign)
c610adec 5782 return true;
7afe21cc
RK
5783 break;
5784 case GT: case GE:
5785 if (has_sign)
c610adec 5786 return false;
7afe21cc 5787 break;
e9a25f70
JL
5788 default:
5789 break;
7afe21cc
RK
5790 }
5791 }
5792 }
5793
5794 new = simplify_relational_operation (code, mode_arg0,
5795 const_arg0 ? const_arg0 : folded_arg0,
5796 const_arg1 ? const_arg1 : folded_arg1);
c610adec
RK
5797#ifdef FLOAT_STORE_FLAG_VALUE
5798 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
5799 new = ((new == const0_rtx) ? CONST0_RTX (mode)
560c94a2 5800 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE, mode));
c610adec 5801#endif
7afe21cc
RK
5802 break;
5803
5804 case '2':
5805 case 'c':
5806 switch (code)
5807 {
5808 case PLUS:
5809 /* If the second operand is a LABEL_REF, see if the first is a MINUS
5810 with that LABEL_REF as its second operand. If so, the result is
5811 the first operand of that MINUS. This handles switches with an
5812 ADDR_DIFF_VEC table. */
5813 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
5814 {
e650cbda
RK
5815 rtx y
5816 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
5817 : lookup_as_function (folded_arg0, MINUS);
7afe21cc
RK
5818
5819 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5820 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
5821 return XEXP (y, 0);
67a37737
RK
5822
5823 /* Now try for a CONST of a MINUS like the above. */
e650cbda
RK
5824 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
5825 : lookup_as_function (folded_arg0, CONST))) != 0
67a37737
RK
5826 && GET_CODE (XEXP (y, 0)) == MINUS
5827 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5828 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg1, 0))
5829 return XEXP (XEXP (y, 0), 0);
7afe21cc 5830 }
c2cc0778 5831
e650cbda
RK
5832 /* Likewise if the operands are in the other order. */
5833 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
5834 {
5835 rtx y
5836 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
5837 : lookup_as_function (folded_arg1, MINUS);
5838
5839 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5840 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
5841 return XEXP (y, 0);
5842
5843 /* Now try for a CONST of a MINUS like the above. */
5844 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
5845 : lookup_as_function (folded_arg1, CONST))) != 0
5846 && GET_CODE (XEXP (y, 0)) == MINUS
5847 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5848 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg0, 0))
5849 return XEXP (XEXP (y, 0), 0);
5850 }
5851
c2cc0778
RK
5852 /* If second operand is a register equivalent to a negative
5853 CONST_INT, see if we can find a register equivalent to the
5854 positive constant. Make a MINUS if so. Don't do this for
5d595063 5855 a non-negative constant since we might then alternate between
c2cc0778 5856 chosing positive and negative constants. Having the positive
5d595063
RK
5857 constant previously-used is the more common case. Be sure
5858 the resulting constant is non-negative; if const_arg1 were
5859 the smallest negative number this would overflow: depending
5860 on the mode, this would either just be the same value (and
5861 hence not save anything) or be incorrect. */
5862 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
5863 && INTVAL (const_arg1) < 0
5864 && - INTVAL (const_arg1) >= 0
5865 && GET_CODE (folded_arg1) == REG)
c2cc0778
RK
5866 {
5867 rtx new_const = GEN_INT (- INTVAL (const_arg1));
5868 struct table_elt *p
5869 = lookup (new_const, safe_hash (new_const, mode) % NBUCKETS,
5870 mode);
5871
5872 if (p)
5873 for (p = p->first_same_value; p; p = p->next_same_value)
5874 if (GET_CODE (p->exp) == REG)
5875 return cse_gen_binary (MINUS, mode, folded_arg0,
5876 canon_reg (p->exp, NULL_RTX));
5877 }
13c9910f
RS
5878 goto from_plus;
5879
5880 case MINUS:
5881 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
5882 If so, produce (PLUS Z C2-C). */
5883 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
5884 {
5885 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
5886 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
f3becefd
RK
5887 return fold_rtx (plus_constant (copy_rtx (y),
5888 -INTVAL (const_arg1)),
a3b5c94a 5889 NULL_RTX);
13c9910f 5890 }
7afe21cc 5891
0f41302f 5892 /* ... fall through ... */
7afe21cc 5893
13c9910f 5894 from_plus:
7afe21cc
RK
5895 case SMIN: case SMAX: case UMIN: case UMAX:
5896 case IOR: case AND: case XOR:
5897 case MULT: case DIV: case UDIV:
5898 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
5899 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
5900 is known to be of similar form, we may be able to replace the
5901 operation with a combined operation. This may eliminate the
5902 intermediate operation if every use is simplified in this way.
5903 Note that the similar optimization done by combine.c only works
5904 if the intermediate operation's result has only one reference. */
5905
5906 if (GET_CODE (folded_arg0) == REG
5907 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
5908 {
5909 int is_shift
5910 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
5911 rtx y = lookup_as_function (folded_arg0, code);
5912 rtx inner_const;
5913 enum rtx_code associate_code;
5914 rtx new_const;
5915
5916 if (y == 0
5917 || 0 == (inner_const
5918 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
5919 || GET_CODE (inner_const) != CONST_INT
5920 /* If we have compiled a statement like
5921 "if (x == (x & mask1))", and now are looking at
5922 "x & mask2", we will have a case where the first operand
5923 of Y is the same as our first operand. Unless we detect
5924 this case, an infinite loop will result. */
5925 || XEXP (y, 0) == folded_arg0)
5926 break;
5927
5928 /* Don't associate these operations if they are a PLUS with the
5929 same constant and it is a power of two. These might be doable
5930 with a pre- or post-increment. Similarly for two subtracts of
5931 identical powers of two with post decrement. */
5932
5933 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
940da324
JL
5934 && ((HAVE_PRE_INCREMENT
5935 && exact_log2 (INTVAL (const_arg1)) >= 0)
5936 || (HAVE_POST_INCREMENT
5937 && exact_log2 (INTVAL (const_arg1)) >= 0)
5938 || (HAVE_PRE_DECREMENT
5939 && exact_log2 (- INTVAL (const_arg1)) >= 0)
5940 || (HAVE_POST_DECREMENT
5941 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
7afe21cc
RK
5942 break;
5943
5944 /* Compute the code used to compose the constants. For example,
5945 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
5946
5947 associate_code
5948 = (code == MULT || code == DIV || code == UDIV ? MULT
5949 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
5950
5951 new_const = simplify_binary_operation (associate_code, mode,
5952 const_arg1, inner_const);
5953
5954 if (new_const == 0)
5955 break;
5956
5957 /* If we are associating shift operations, don't let this
4908e508
RS
5958 produce a shift of the size of the object or larger.
5959 This could occur when we follow a sign-extend by a right
5960 shift on a machine that does a sign-extend as a pair
5961 of shifts. */
7afe21cc
RK
5962
5963 if (is_shift && GET_CODE (new_const) == CONST_INT
4908e508
RS
5964 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
5965 {
5966 /* As an exception, we can turn an ASHIFTRT of this
5967 form into a shift of the number of bits - 1. */
5968 if (code == ASHIFTRT)
5969 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
5970 else
5971 break;
5972 }
7afe21cc
RK
5973
5974 y = copy_rtx (XEXP (y, 0));
5975
5976 /* If Y contains our first operand (the most common way this
5977 can happen is if Y is a MEM), we would do into an infinite
5978 loop if we tried to fold it. So don't in that case. */
5979
5980 if (! reg_mentioned_p (folded_arg0, y))
5981 y = fold_rtx (y, insn);
5982
96b0e481 5983 return cse_gen_binary (code, mode, y, new_const);
7afe21cc 5984 }
e9a25f70
JL
5985 break;
5986
5987 default:
5988 break;
7afe21cc
RK
5989 }
5990
5991 new = simplify_binary_operation (code, mode,
5992 const_arg0 ? const_arg0 : folded_arg0,
5993 const_arg1 ? const_arg1 : folded_arg1);
5994 break;
5995
5996 case 'o':
5997 /* (lo_sum (high X) X) is simply X. */
5998 if (code == LO_SUM && const_arg0 != 0
5999 && GET_CODE (const_arg0) == HIGH
6000 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
6001 return const_arg1;
6002 break;
6003
6004 case '3':
6005 case 'b':
6006 new = simplify_ternary_operation (code, mode, mode_arg0,
6007 const_arg0 ? const_arg0 : folded_arg0,
6008 const_arg1 ? const_arg1 : folded_arg1,
6009 const_arg2 ? const_arg2 : XEXP (x, 2));
6010 break;
ee5332b8
RH
6011
6012 case 'x':
6013 /* Always eliminate CONSTANT_P_RTX at this stage. */
6014 if (code == CONSTANT_P_RTX)
6015 return (const_arg0 ? const1_rtx : const0_rtx);
6016 break;
7afe21cc
RK
6017 }
6018
6019 return new ? new : x;
6020}
6021\f
6022/* Return a constant value currently equivalent to X.
6023 Return 0 if we don't know one. */
6024
6025static rtx
6026equiv_constant (x)
6027 rtx x;
6028{
6029 if (GET_CODE (x) == REG
6030 && REGNO_QTY_VALID_P (REGNO (x))
30f72379
MM
6031 && qty_const[REG_QTY (REGNO (x))])
6032 x = gen_lowpart_if_possible (GET_MODE (x), qty_const[REG_QTY (REGNO (x))]);
7afe21cc 6033
2ce5e1b4 6034 if (x == 0 || CONSTANT_P (x))
7afe21cc
RK
6035 return x;
6036
fc3ffe83
RK
6037 /* If X is a MEM, try to fold it outside the context of any insn to see if
6038 it might be equivalent to a constant. That handles the case where it
6039 is a constant-pool reference. Then try to look it up in the hash table
6040 in case it is something whose value we have seen before. */
6041
6042 if (GET_CODE (x) == MEM)
6043 {
6044 struct table_elt *elt;
6045
906c4e36 6046 x = fold_rtx (x, NULL_RTX);
fc3ffe83
RK
6047 if (CONSTANT_P (x))
6048 return x;
6049
6050 elt = lookup (x, safe_hash (x, GET_MODE (x)) % NBUCKETS, GET_MODE (x));
6051 if (elt == 0)
6052 return 0;
6053
6054 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
6055 if (elt->is_const && CONSTANT_P (elt->exp))
6056 return elt->exp;
6057 }
6058
7afe21cc
RK
6059 return 0;
6060}
6061\f
6062/* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
6063 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
6064 least-significant part of X.
6065 MODE specifies how big a part of X to return.
6066
6067 If the requested operation cannot be done, 0 is returned.
6068
6069 This is similar to gen_lowpart in emit-rtl.c. */
6070
6071rtx
6072gen_lowpart_if_possible (mode, x)
6073 enum machine_mode mode;
6074 register rtx x;
6075{
6076 rtx result = gen_lowpart_common (mode, x);
6077
6078 if (result)
6079 return result;
6080 else if (GET_CODE (x) == MEM)
6081 {
6082 /* This is the only other case we handle. */
6083 register int offset = 0;
6084 rtx new;
6085
f76b9db2
ILT
6086 if (WORDS_BIG_ENDIAN)
6087 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
6088 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
6089 if (BYTES_BIG_ENDIAN)
6090 /* Adjust the address so that the address-after-the-data is
6091 unchanged. */
6092 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
6093 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
38a448ca 6094 new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
7afe21cc
RK
6095 if (! memory_address_p (mode, XEXP (new, 0)))
6096 return 0;
7afe21cc 6097 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
c6df88cb 6098 MEM_COPY_ATTRIBUTES (new, x);
7afe21cc
RK
6099 return new;
6100 }
6101 else
6102 return 0;
6103}
6104\f
6105/* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
6106 branch. It will be zero if not.
6107
6108 In certain cases, this can cause us to add an equivalence. For example,
6109 if we are following the taken case of
6110 if (i == 2)
6111 we can add the fact that `i' and '2' are now equivalent.
6112
6113 In any case, we can record that this comparison was passed. If the same
6114 comparison is seen later, we will know its value. */
6115
6116static void
6117record_jump_equiv (insn, taken)
6118 rtx insn;
6119 int taken;
6120{
6121 int cond_known_true;
6122 rtx op0, op1;
13c9910f 6123 enum machine_mode mode, mode0, mode1;
7afe21cc
RK
6124 int reversed_nonequality = 0;
6125 enum rtx_code code;
6126
6127 /* Ensure this is the right kind of insn. */
6128 if (! condjump_p (insn) || simplejump_p (insn))
6129 return;
6130
6131 /* See if this jump condition is known true or false. */
6132 if (taken)
6133 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
6134 else
6135 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
6136
6137 /* Get the type of comparison being done and the operands being compared.
6138 If we had to reverse a non-equality condition, record that fact so we
6139 know that it isn't valid for floating-point. */
6140 code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
6141 op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
6142 op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
6143
13c9910f 6144 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
7afe21cc
RK
6145 if (! cond_known_true)
6146 {
6147 reversed_nonequality = (code != EQ && code != NE);
6148 code = reverse_condition (code);
6149 }
6150
6151 /* The mode is the mode of the non-constant. */
13c9910f
RS
6152 mode = mode0;
6153 if (mode1 != VOIDmode)
6154 mode = mode1;
7afe21cc
RK
6155
6156 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
6157}
6158
6159/* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
6160 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
6161 Make any useful entries we can with that information. Called from
6162 above function and called recursively. */
6163
6164static void
6165record_jump_cond (code, mode, op0, op1, reversed_nonequality)
6166 enum rtx_code code;
6167 enum machine_mode mode;
6168 rtx op0, op1;
6169 int reversed_nonequality;
6170{
2197a88a 6171 unsigned op0_hash, op1_hash;
7afe21cc
RK
6172 int op0_in_memory, op0_in_struct, op1_in_memory, op1_in_struct;
6173 struct table_elt *op0_elt, *op1_elt;
6174
6175 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
6176 we know that they are also equal in the smaller mode (this is also
6177 true for all smaller modes whether or not there is a SUBREG, but
ac7ef8d5 6178 is not worth testing for with no SUBREG). */
7afe21cc 6179
2e794ee8 6180 /* Note that GET_MODE (op0) may not equal MODE. */
7afe21cc 6181 if (code == EQ && GET_CODE (op0) == SUBREG
2e794ee8
RS
6182 && (GET_MODE_SIZE (GET_MODE (op0))
6183 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
7afe21cc
RK
6184 {
6185 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
6186 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
6187
6188 record_jump_cond (code, mode, SUBREG_REG (op0),
38a448ca 6189 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
7afe21cc
RK
6190 reversed_nonequality);
6191 }
6192
6193 if (code == EQ && GET_CODE (op1) == SUBREG
2e794ee8
RS
6194 && (GET_MODE_SIZE (GET_MODE (op1))
6195 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
7afe21cc
RK
6196 {
6197 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
6198 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
6199
6200 record_jump_cond (code, mode, SUBREG_REG (op1),
38a448ca 6201 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
7afe21cc
RK
6202 reversed_nonequality);
6203 }
6204
6205 /* Similarly, if this is an NE comparison, and either is a SUBREG
6206 making a smaller mode, we know the whole thing is also NE. */
6207
2e794ee8
RS
6208 /* Note that GET_MODE (op0) may not equal MODE;
6209 if we test MODE instead, we can get an infinite recursion
6210 alternating between two modes each wider than MODE. */
6211
7afe21cc
RK
6212 if (code == NE && GET_CODE (op0) == SUBREG
6213 && subreg_lowpart_p (op0)
2e794ee8
RS
6214 && (GET_MODE_SIZE (GET_MODE (op0))
6215 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
7afe21cc
RK
6216 {
6217 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
6218 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
6219
6220 record_jump_cond (code, mode, SUBREG_REG (op0),
38a448ca 6221 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
7afe21cc
RK
6222 reversed_nonequality);
6223 }
6224
6225 if (code == NE && GET_CODE (op1) == SUBREG
6226 && subreg_lowpart_p (op1)
2e794ee8
RS
6227 && (GET_MODE_SIZE (GET_MODE (op1))
6228 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
7afe21cc
RK
6229 {
6230 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
6231 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
6232
6233 record_jump_cond (code, mode, SUBREG_REG (op1),
38a448ca 6234 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
7afe21cc
RK
6235 reversed_nonequality);
6236 }
6237
6238 /* Hash both operands. */
6239
6240 do_not_record = 0;
6241 hash_arg_in_memory = 0;
6242 hash_arg_in_struct = 0;
2197a88a 6243 op0_hash = HASH (op0, mode);
7afe21cc
RK
6244 op0_in_memory = hash_arg_in_memory;
6245 op0_in_struct = hash_arg_in_struct;
6246
6247 if (do_not_record)
6248 return;
6249
6250 do_not_record = 0;
6251 hash_arg_in_memory = 0;
6252 hash_arg_in_struct = 0;
2197a88a 6253 op1_hash = HASH (op1, mode);
7afe21cc
RK
6254 op1_in_memory = hash_arg_in_memory;
6255 op1_in_struct = hash_arg_in_struct;
6256
6257 if (do_not_record)
6258 return;
6259
6260 /* Look up both operands. */
2197a88a
RK
6261 op0_elt = lookup (op0, op0_hash, mode);
6262 op1_elt = lookup (op1, op1_hash, mode);
7afe21cc 6263
af3869c1
RK
6264 /* If both operands are already equivalent or if they are not in the
6265 table but are identical, do nothing. */
6266 if ((op0_elt != 0 && op1_elt != 0
6267 && op0_elt->first_same_value == op1_elt->first_same_value)
6268 || op0 == op1 || rtx_equal_p (op0, op1))
6269 return;
6270
7afe21cc 6271 /* If we aren't setting two things equal all we can do is save this
b2796a4b
RK
6272 comparison. Similarly if this is floating-point. In the latter
6273 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
6274 If we record the equality, we might inadvertently delete code
6275 whose intent was to change -0 to +0. */
6276
cbf6a543 6277 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
7afe21cc
RK
6278 {
6279 /* If we reversed a floating-point comparison, if OP0 is not a
6280 register, or if OP1 is neither a register or constant, we can't
6281 do anything. */
6282
6283 if (GET_CODE (op1) != REG)
6284 op1 = equiv_constant (op1);
6285
cbf6a543 6286 if ((reversed_nonequality && FLOAT_MODE_P (mode))
7afe21cc
RK
6287 || GET_CODE (op0) != REG || op1 == 0)
6288 return;
6289
6290 /* Put OP0 in the hash table if it isn't already. This gives it a
6291 new quantity number. */
6292 if (op0_elt == 0)
6293 {
906c4e36 6294 if (insert_regs (op0, NULL_PTR, 0))
7afe21cc
RK
6295 {
6296 rehash_using_reg (op0);
2197a88a 6297 op0_hash = HASH (op0, mode);
2bb81c86
RK
6298
6299 /* If OP0 is contained in OP1, this changes its hash code
6300 as well. Faster to rehash than to check, except
6301 for the simple case of a constant. */
6302 if (! CONSTANT_P (op1))
2197a88a 6303 op1_hash = HASH (op1,mode);
7afe21cc
RK
6304 }
6305
2197a88a 6306 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
7afe21cc
RK
6307 op0_elt->in_memory = op0_in_memory;
6308 op0_elt->in_struct = op0_in_struct;
6309 }
6310
30f72379 6311 qty_comparison_code[REG_QTY (REGNO (op0))] = code;
7afe21cc
RK
6312 if (GET_CODE (op1) == REG)
6313 {
5d5ea909 6314 /* Look it up again--in case op0 and op1 are the same. */
2197a88a 6315 op1_elt = lookup (op1, op1_hash, mode);
5d5ea909 6316
7afe21cc
RK
6317 /* Put OP1 in the hash table so it gets a new quantity number. */
6318 if (op1_elt == 0)
6319 {
906c4e36 6320 if (insert_regs (op1, NULL_PTR, 0))
7afe21cc
RK
6321 {
6322 rehash_using_reg (op1);
2197a88a 6323 op1_hash = HASH (op1, mode);
7afe21cc
RK
6324 }
6325
2197a88a 6326 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
7afe21cc
RK
6327 op1_elt->in_memory = op1_in_memory;
6328 op1_elt->in_struct = op1_in_struct;
6329 }
6330
30f72379
MM
6331 qty_comparison_qty[REG_QTY (REGNO (op0))] = REG_QTY (REGNO (op1));
6332 qty_comparison_const[REG_QTY (REGNO (op0))] = 0;
7afe21cc
RK
6333 }
6334 else
6335 {
30f72379
MM
6336 qty_comparison_qty[REG_QTY (REGNO (op0))] = -1;
6337 qty_comparison_const[REG_QTY (REGNO (op0))] = op1;
7afe21cc
RK
6338 }
6339
6340 return;
6341 }
6342
eb5ad42a
RS
6343 /* If either side is still missing an equivalence, make it now,
6344 then merge the equivalences. */
7afe21cc 6345
7afe21cc
RK
6346 if (op0_elt == 0)
6347 {
eb5ad42a 6348 if (insert_regs (op0, NULL_PTR, 0))
7afe21cc
RK
6349 {
6350 rehash_using_reg (op0);
2197a88a 6351 op0_hash = HASH (op0, mode);
7afe21cc
RK
6352 }
6353
2197a88a 6354 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
7afe21cc
RK
6355 op0_elt->in_memory = op0_in_memory;
6356 op0_elt->in_struct = op0_in_struct;
7afe21cc
RK
6357 }
6358
6359 if (op1_elt == 0)
6360 {
eb5ad42a 6361 if (insert_regs (op1, NULL_PTR, 0))
7afe21cc
RK
6362 {
6363 rehash_using_reg (op1);
2197a88a 6364 op1_hash = HASH (op1, mode);
7afe21cc
RK
6365 }
6366
2197a88a 6367 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
7afe21cc
RK
6368 op1_elt->in_memory = op1_in_memory;
6369 op1_elt->in_struct = op1_in_struct;
7afe21cc 6370 }
eb5ad42a
RS
6371
6372 merge_equiv_classes (op0_elt, op1_elt);
6373 last_jump_equiv_class = op0_elt;
7afe21cc
RK
6374}
6375\f
6376/* CSE processing for one instruction.
6377 First simplify sources and addresses of all assignments
6378 in the instruction, using previously-computed equivalents values.
6379 Then install the new sources and destinations in the table
6380 of available values.
6381
1ed0205e
VM
6382 If LIBCALL_INSN is nonzero, don't record any equivalence made in
6383 the insn. It means that INSN is inside libcall block. In this
6384 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
7afe21cc
RK
6385
6386/* Data on one SET contained in the instruction. */
6387
6388struct set
6389{
6390 /* The SET rtx itself. */
6391 rtx rtl;
6392 /* The SET_SRC of the rtx (the original value, if it is changing). */
6393 rtx src;
6394 /* The hash-table element for the SET_SRC of the SET. */
6395 struct table_elt *src_elt;
2197a88a
RK
6396 /* Hash value for the SET_SRC. */
6397 unsigned src_hash;
6398 /* Hash value for the SET_DEST. */
6399 unsigned dest_hash;
7afe21cc
RK
6400 /* The SET_DEST, with SUBREG, etc., stripped. */
6401 rtx inner_dest;
6402 /* Place where the pointer to the INNER_DEST was found. */
6403 rtx *inner_dest_loc;
6404 /* Nonzero if the SET_SRC is in memory. */
6405 char src_in_memory;
6406 /* Nonzero if the SET_SRC is in a structure. */
6407 char src_in_struct;
6408 /* Nonzero if the SET_SRC contains something
6409 whose value cannot be predicted and understood. */
6410 char src_volatile;
6411 /* Original machine mode, in case it becomes a CONST_INT. */
6412 enum machine_mode mode;
6413 /* A constant equivalent for SET_SRC, if any. */
6414 rtx src_const;
2197a88a
RK
6415 /* Hash value of constant equivalent for SET_SRC. */
6416 unsigned src_const_hash;
7afe21cc
RK
6417 /* Table entry for constant equivalent for SET_SRC, if any. */
6418 struct table_elt *src_const_elt;
6419};
6420
6421static void
7bd8b2a8 6422cse_insn (insn, libcall_insn)
7afe21cc 6423 rtx insn;
7bd8b2a8 6424 rtx libcall_insn;
7afe21cc
RK
6425{
6426 register rtx x = PATTERN (insn);
7afe21cc 6427 register int i;
92f9aa51 6428 rtx tem;
7afe21cc
RK
6429 register int n_sets = 0;
6430
2d8b0f3a 6431#ifdef HAVE_cc0
7afe21cc
RK
6432 /* Records what this insn does to set CC0. */
6433 rtx this_insn_cc0 = 0;
135d84b8 6434 enum machine_mode this_insn_cc0_mode = VOIDmode;
2d8b0f3a 6435#endif
7afe21cc
RK
6436
6437 rtx src_eqv = 0;
6438 struct table_elt *src_eqv_elt = 0;
6439 int src_eqv_volatile;
6440 int src_eqv_in_memory;
6441 int src_eqv_in_struct;
2197a88a 6442 unsigned src_eqv_hash;
7afe21cc
RK
6443
6444 struct set *sets;
6445
6446 this_insn = insn;
7afe21cc
RK
6447
6448 /* Find all the SETs and CLOBBERs in this instruction.
6449 Record all the SETs in the array `set' and count them.
6450 Also determine whether there is a CLOBBER that invalidates
6451 all memory references, or all references at varying addresses. */
6452
f1e7c95f
RK
6453 if (GET_CODE (insn) == CALL_INSN)
6454 {
6455 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6456 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
bb4034b3 6457 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
f1e7c95f
RK
6458 }
6459
7afe21cc
RK
6460 if (GET_CODE (x) == SET)
6461 {
6462 sets = (struct set *) alloca (sizeof (struct set));
6463 sets[0].rtl = x;
6464
6465 /* Ignore SETs that are unconditional jumps.
6466 They never need cse processing, so this does not hurt.
6467 The reason is not efficiency but rather
6468 so that we can test at the end for instructions
6469 that have been simplified to unconditional jumps
6470 and not be misled by unchanged instructions
6471 that were unconditional jumps to begin with. */
6472 if (SET_DEST (x) == pc_rtx
6473 && GET_CODE (SET_SRC (x)) == LABEL_REF)
6474 ;
6475
6476 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
6477 The hard function value register is used only once, to copy to
6478 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
6479 Ensure we invalidate the destination register. On the 80386 no
7722328e 6480 other code would invalidate it since it is a fixed_reg.
0f41302f 6481 We need not check the return of apply_change_group; see canon_reg. */
7afe21cc
RK
6482
6483 else if (GET_CODE (SET_SRC (x)) == CALL)
6484 {
6485 canon_reg (SET_SRC (x), insn);
77fa0940 6486 apply_change_group ();
7afe21cc 6487 fold_rtx (SET_SRC (x), insn);
bb4034b3 6488 invalidate (SET_DEST (x), VOIDmode);
7afe21cc
RK
6489 }
6490 else
6491 n_sets = 1;
6492 }
6493 else if (GET_CODE (x) == PARALLEL)
6494 {
6495 register int lim = XVECLEN (x, 0);
6496
6497 sets = (struct set *) alloca (lim * sizeof (struct set));
6498
6499 /* Find all regs explicitly clobbered in this insn,
6500 and ensure they are not replaced with any other regs
6501 elsewhere in this insn.
6502 When a reg that is clobbered is also used for input,
6503 we should presume that that is for a reason,
6504 and we should not substitute some other register
6505 which is not supposed to be clobbered.
6506 Therefore, this loop cannot be merged into the one below
830a38ee 6507 because a CALL may precede a CLOBBER and refer to the
7afe21cc
RK
6508 value clobbered. We must not let a canonicalization do
6509 anything in that case. */
6510 for (i = 0; i < lim; i++)
6511 {
6512 register rtx y = XVECEXP (x, 0, i);
2708da92
RS
6513 if (GET_CODE (y) == CLOBBER)
6514 {
6515 rtx clobbered = XEXP (y, 0);
6516
6517 if (GET_CODE (clobbered) == REG
6518 || GET_CODE (clobbered) == SUBREG)
bb4034b3 6519 invalidate (clobbered, VOIDmode);
2708da92
RS
6520 else if (GET_CODE (clobbered) == STRICT_LOW_PART
6521 || GET_CODE (clobbered) == ZERO_EXTRACT)
bb4034b3 6522 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
2708da92 6523 }
7afe21cc
RK
6524 }
6525
6526 for (i = 0; i < lim; i++)
6527 {
6528 register rtx y = XVECEXP (x, 0, i);
6529 if (GET_CODE (y) == SET)
6530 {
7722328e
RK
6531 /* As above, we ignore unconditional jumps and call-insns and
6532 ignore the result of apply_change_group. */
7afe21cc
RK
6533 if (GET_CODE (SET_SRC (y)) == CALL)
6534 {
6535 canon_reg (SET_SRC (y), insn);
77fa0940 6536 apply_change_group ();
7afe21cc 6537 fold_rtx (SET_SRC (y), insn);
bb4034b3 6538 invalidate (SET_DEST (y), VOIDmode);
7afe21cc
RK
6539 }
6540 else if (SET_DEST (y) == pc_rtx
6541 && GET_CODE (SET_SRC (y)) == LABEL_REF)
6542 ;
6543 else
6544 sets[n_sets++].rtl = y;
6545 }
6546 else if (GET_CODE (y) == CLOBBER)
6547 {
9ae8ffe7 6548 /* If we clobber memory, canon the address.
7afe21cc
RK
6549 This does nothing when a register is clobbered
6550 because we have already invalidated the reg. */
6551 if (GET_CODE (XEXP (y, 0)) == MEM)
9ae8ffe7 6552 canon_reg (XEXP (y, 0), NULL_RTX);
7afe21cc
RK
6553 }
6554 else if (GET_CODE (y) == USE
6555 && ! (GET_CODE (XEXP (y, 0)) == REG
6556 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
906c4e36 6557 canon_reg (y, NULL_RTX);
7afe21cc
RK
6558 else if (GET_CODE (y) == CALL)
6559 {
7722328e
RK
6560 /* The result of apply_change_group can be ignored; see
6561 canon_reg. */
7afe21cc 6562 canon_reg (y, insn);
77fa0940 6563 apply_change_group ();
7afe21cc
RK
6564 fold_rtx (y, insn);
6565 }
6566 }
6567 }
6568 else if (GET_CODE (x) == CLOBBER)
6569 {
6570 if (GET_CODE (XEXP (x, 0)) == MEM)
9ae8ffe7 6571 canon_reg (XEXP (x, 0), NULL_RTX);
7afe21cc
RK
6572 }
6573
6574 /* Canonicalize a USE of a pseudo register or memory location. */
6575 else if (GET_CODE (x) == USE
6576 && ! (GET_CODE (XEXP (x, 0)) == REG
6577 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
906c4e36 6578 canon_reg (XEXP (x, 0), NULL_RTX);
7afe21cc
RK
6579 else if (GET_CODE (x) == CALL)
6580 {
7722328e 6581 /* The result of apply_change_group can be ignored; see canon_reg. */
7afe21cc 6582 canon_reg (x, insn);
77fa0940 6583 apply_change_group ();
7afe21cc
RK
6584 fold_rtx (x, insn);
6585 }
6586
7b3ab05e
JW
6587 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
6588 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
6589 is handled specially for this case, and if it isn't set, then there will
9faa82d8 6590 be no equivalence for the destination. */
92f9aa51
RK
6591 if (n_sets == 1 && REG_NOTES (insn) != 0
6592 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
7b3ab05e
JW
6593 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
6594 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
92f9aa51 6595 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
7afe21cc
RK
6596
6597 /* Canonicalize sources and addresses of destinations.
6598 We do this in a separate pass to avoid problems when a MATCH_DUP is
6599 present in the insn pattern. In that case, we want to ensure that
6600 we don't break the duplicate nature of the pattern. So we will replace
6601 both operands at the same time. Otherwise, we would fail to find an
6602 equivalent substitution in the loop calling validate_change below.
7afe21cc
RK
6603
6604 We used to suppress canonicalization of DEST if it appears in SRC,
77fa0940 6605 but we don't do this any more. */
7afe21cc
RK
6606
6607 for (i = 0; i < n_sets; i++)
6608 {
6609 rtx dest = SET_DEST (sets[i].rtl);
6610 rtx src = SET_SRC (sets[i].rtl);
6611 rtx new = canon_reg (src, insn);
58873255 6612 int insn_code;
7afe21cc 6613
77fa0940
RK
6614 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
6615 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
6616 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
58873255
RK
6617 || (insn_code = recog_memoized (insn)) < 0
6618 || insn_n_dups[insn_code] > 0)
77fa0940 6619 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
7afe21cc
RK
6620 else
6621 SET_SRC (sets[i].rtl) = new;
6622
6623 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
6624 {
6625 validate_change (insn, &XEXP (dest, 1),
77fa0940 6626 canon_reg (XEXP (dest, 1), insn), 1);
7afe21cc 6627 validate_change (insn, &XEXP (dest, 2),
77fa0940 6628 canon_reg (XEXP (dest, 2), insn), 1);
7afe21cc
RK
6629 }
6630
6631 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
6632 || GET_CODE (dest) == ZERO_EXTRACT
6633 || GET_CODE (dest) == SIGN_EXTRACT)
6634 dest = XEXP (dest, 0);
6635
6636 if (GET_CODE (dest) == MEM)
6637 canon_reg (dest, insn);
6638 }
6639
77fa0940
RK
6640 /* Now that we have done all the replacements, we can apply the change
6641 group and see if they all work. Note that this will cause some
6642 canonicalizations that would have worked individually not to be applied
6643 because some other canonicalization didn't work, but this should not
7722328e
RK
6644 occur often.
6645
6646 The result of apply_change_group can be ignored; see canon_reg. */
77fa0940
RK
6647
6648 apply_change_group ();
6649
7afe21cc
RK
6650 /* Set sets[i].src_elt to the class each source belongs to.
6651 Detect assignments from or to volatile things
6652 and set set[i] to zero so they will be ignored
6653 in the rest of this function.
6654
6655 Nothing in this loop changes the hash table or the register chains. */
6656
6657 for (i = 0; i < n_sets; i++)
6658 {
6659 register rtx src, dest;
6660 register rtx src_folded;
6661 register struct table_elt *elt = 0, *p;
6662 enum machine_mode mode;
6663 rtx src_eqv_here;
6664 rtx src_const = 0;
6665 rtx src_related = 0;
6666 struct table_elt *src_const_elt = 0;
6667 int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
6668 int src_related_cost = 10000, src_elt_cost = 10000;
6669 /* Set non-zero if we need to call force_const_mem on with the
6670 contents of src_folded before using it. */
6671 int src_folded_force_flag = 0;
6672
6673 dest = SET_DEST (sets[i].rtl);
6674 src = SET_SRC (sets[i].rtl);
6675
6676 /* If SRC is a constant that has no machine mode,
6677 hash it with the destination's machine mode.
6678 This way we can keep different modes separate. */
6679
6680 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6681 sets[i].mode = mode;
6682
6683 if (src_eqv)
6684 {
6685 enum machine_mode eqvmode = mode;
6686 if (GET_CODE (dest) == STRICT_LOW_PART)
6687 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6688 do_not_record = 0;
6689 hash_arg_in_memory = 0;
6690 hash_arg_in_struct = 0;
6691 src_eqv = fold_rtx (src_eqv, insn);
2197a88a 6692 src_eqv_hash = HASH (src_eqv, eqvmode);
7afe21cc
RK
6693
6694 /* Find the equivalence class for the equivalent expression. */
6695
6696 if (!do_not_record)
2197a88a 6697 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
7afe21cc
RK
6698
6699 src_eqv_volatile = do_not_record;
6700 src_eqv_in_memory = hash_arg_in_memory;
6701 src_eqv_in_struct = hash_arg_in_struct;
6702 }
6703
6704 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
6705 value of the INNER register, not the destination. So it is not
3826a3da 6706 a valid substitution for the source. But save it for later. */
7afe21cc
RK
6707 if (GET_CODE (dest) == STRICT_LOW_PART)
6708 src_eqv_here = 0;
6709 else
6710 src_eqv_here = src_eqv;
6711
6712 /* Simplify and foldable subexpressions in SRC. Then get the fully-
6713 simplified result, which may not necessarily be valid. */
6714 src_folded = fold_rtx (src, insn);
6715
e6a125a0
RK
6716#if 0
6717 /* ??? This caused bad code to be generated for the m68k port with -O2.
6718 Suppose src is (CONST_INT -1), and that after truncation src_folded
6719 is (CONST_INT 3). Suppose src_folded is then used for src_const.
6720 At the end we will add src and src_const to the same equivalence
6721 class. We now have 3 and -1 on the same equivalence class. This
6722 causes later instructions to be mis-optimized. */
7afe21cc
RK
6723 /* If storing a constant in a bitfield, pre-truncate the constant
6724 so we will be able to record it later. */
6725 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6726 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6727 {
6728 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6729
6730 if (GET_CODE (src) == CONST_INT
6731 && GET_CODE (width) == CONST_INT
906c4e36
RK
6732 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6733 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6734 src_folded
6735 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
6736 << INTVAL (width)) - 1));
7afe21cc 6737 }
e6a125a0 6738#endif
7afe21cc
RK
6739
6740 /* Compute SRC's hash code, and also notice if it
6741 should not be recorded at all. In that case,
6742 prevent any further processing of this assignment. */
6743 do_not_record = 0;
6744 hash_arg_in_memory = 0;
6745 hash_arg_in_struct = 0;
6746
6747 sets[i].src = src;
2197a88a 6748 sets[i].src_hash = HASH (src, mode);
7afe21cc
RK
6749 sets[i].src_volatile = do_not_record;
6750 sets[i].src_in_memory = hash_arg_in_memory;
6751 sets[i].src_in_struct = hash_arg_in_struct;
6752
50196afa
RK
6753 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
6754 a pseudo that is set more than once, do not record SRC. Using
6755 SRC as a replacement for anything else will be incorrect in that
6756 situation. Note that this usually occurs only for stack slots,
956d6950 6757 in which case all the RTL would be referring to SRC, so we don't
50196afa
RK
6758 lose any optimization opportunities by not having SRC in the
6759 hash table. */
6760
6761 if (GET_CODE (src) == MEM
6762 && find_reg_note (insn, REG_EQUIV, src) != 0
6763 && GET_CODE (dest) == REG
6764 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
b1f21e0a 6765 && REG_N_SETS (REGNO (dest)) != 1)
50196afa
RK
6766 sets[i].src_volatile = 1;
6767
0dadecf6
RK
6768#if 0
6769 /* It is no longer clear why we used to do this, but it doesn't
6770 appear to still be needed. So let's try without it since this
6771 code hurts cse'ing widened ops. */
7afe21cc
RK
6772 /* If source is a perverse subreg (such as QI treated as an SI),
6773 treat it as volatile. It may do the work of an SI in one context
6774 where the extra bits are not being used, but cannot replace an SI
6775 in general. */
6776 if (GET_CODE (src) == SUBREG
6777 && (GET_MODE_SIZE (GET_MODE (src))
6778 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
6779 sets[i].src_volatile = 1;
0dadecf6 6780#endif
7afe21cc
RK
6781
6782 /* Locate all possible equivalent forms for SRC. Try to replace
6783 SRC in the insn with each cheaper equivalent.
6784
6785 We have the following types of equivalents: SRC itself, a folded
6786 version, a value given in a REG_EQUAL note, or a value related
6787 to a constant.
6788
6789 Each of these equivalents may be part of an additional class
6790 of equivalents (if more than one is in the table, they must be in
6791 the same class; we check for this).
6792
6793 If the source is volatile, we don't do any table lookups.
6794
6795 We note any constant equivalent for possible later use in a
6796 REG_NOTE. */
6797
6798 if (!sets[i].src_volatile)
2197a88a 6799 elt = lookup (src, sets[i].src_hash, mode);
7afe21cc
RK
6800
6801 sets[i].src_elt = elt;
6802
6803 if (elt && src_eqv_here && src_eqv_elt)
6804 {
6805 if (elt->first_same_value != src_eqv_elt->first_same_value)
6806 {
6807 /* The REG_EQUAL is indicating that two formerly distinct
6808 classes are now equivalent. So merge them. */
6809 merge_equiv_classes (elt, src_eqv_elt);
2197a88a
RK
6810 src_eqv_hash = HASH (src_eqv, elt->mode);
6811 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
7afe21cc
RK
6812 }
6813
6814 src_eqv_here = 0;
6815 }
6816
6817 else if (src_eqv_elt)
6818 elt = src_eqv_elt;
6819
6820 /* Try to find a constant somewhere and record it in `src_const'.
6821 Record its table element, if any, in `src_const_elt'. Look in
6822 any known equivalences first. (If the constant is not in the
2197a88a 6823 table, also set `sets[i].src_const_hash'). */
7afe21cc
RK
6824 if (elt)
6825 for (p = elt->first_same_value; p; p = p->next_same_value)
6826 if (p->is_const)
6827 {
6828 src_const = p->exp;
6829 src_const_elt = elt;
6830 break;
6831 }
6832
6833 if (src_const == 0
6834 && (CONSTANT_P (src_folded)
6835 /* Consider (minus (label_ref L1) (label_ref L2)) as
6836 "constant" here so we will record it. This allows us
6837 to fold switch statements when an ADDR_DIFF_VEC is used. */
6838 || (GET_CODE (src_folded) == MINUS
6839 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
6840 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
6841 src_const = src_folded, src_const_elt = elt;
6842 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
6843 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
6844
6845 /* If we don't know if the constant is in the table, get its
6846 hash code and look it up. */
6847 if (src_const && src_const_elt == 0)
6848 {
2197a88a
RK
6849 sets[i].src_const_hash = HASH (src_const, mode);
6850 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
7afe21cc
RK
6851 }
6852
6853 sets[i].src_const = src_const;
6854 sets[i].src_const_elt = src_const_elt;
6855
6856 /* If the constant and our source are both in the table, mark them as
6857 equivalent. Otherwise, if a constant is in the table but the source
6858 isn't, set ELT to it. */
6859 if (src_const_elt && elt
6860 && src_const_elt->first_same_value != elt->first_same_value)
6861 merge_equiv_classes (elt, src_const_elt);
6862 else if (src_const_elt && elt == 0)
6863 elt = src_const_elt;
6864
6865 /* See if there is a register linearly related to a constant
6866 equivalent of SRC. */
6867 if (src_const
6868 && (GET_CODE (src_const) == CONST
6869 || (src_const_elt && src_const_elt->related_value != 0)))
6870 {
6871 src_related = use_related_value (src_const, src_const_elt);
6872 if (src_related)
6873 {
6874 struct table_elt *src_related_elt
6875 = lookup (src_related, HASH (src_related, mode), mode);
6876 if (src_related_elt && elt)
6877 {
6878 if (elt->first_same_value
6879 != src_related_elt->first_same_value)
6880 /* This can occur when we previously saw a CONST
6881 involving a SYMBOL_REF and then see the SYMBOL_REF
6882 twice. Merge the involved classes. */
6883 merge_equiv_classes (elt, src_related_elt);
6884
6885 src_related = 0;
6886 src_related_elt = 0;
6887 }
6888 else if (src_related_elt && elt == 0)
6889 elt = src_related_elt;
6890 }
6891 }
6892
e4600702
RK
6893 /* See if we have a CONST_INT that is already in a register in a
6894 wider mode. */
6895
6896 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
6897 && GET_MODE_CLASS (mode) == MODE_INT
6898 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
6899 {
6900 enum machine_mode wider_mode;
6901
6902 for (wider_mode = GET_MODE_WIDER_MODE (mode);
6903 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
6904 && src_related == 0;
6905 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
6906 {
6907 struct table_elt *const_elt
6908 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
6909
6910 if (const_elt == 0)
6911 continue;
6912
6913 for (const_elt = const_elt->first_same_value;
6914 const_elt; const_elt = const_elt->next_same_value)
6915 if (GET_CODE (const_elt->exp) == REG)
6916 {
6917 src_related = gen_lowpart_if_possible (mode,
6918 const_elt->exp);
6919 break;
6920 }
6921 }
6922 }
6923
d45cf215
RS
6924 /* Another possibility is that we have an AND with a constant in
6925 a mode narrower than a word. If so, it might have been generated
6926 as part of an "if" which would narrow the AND. If we already
6927 have done the AND in a wider mode, we can use a SUBREG of that
6928 value. */
6929
6930 if (flag_expensive_optimizations && ! src_related
6931 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
6932 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6933 {
6934 enum machine_mode tmode;
38a448ca 6935 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
d45cf215
RS
6936
6937 for (tmode = GET_MODE_WIDER_MODE (mode);
6938 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6939 tmode = GET_MODE_WIDER_MODE (tmode))
6940 {
6941 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
6942 struct table_elt *larger_elt;
6943
6944 if (inner)
6945 {
6946 PUT_MODE (new_and, tmode);
6947 XEXP (new_and, 0) = inner;
6948 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
6949 if (larger_elt == 0)
6950 continue;
6951
6952 for (larger_elt = larger_elt->first_same_value;
6953 larger_elt; larger_elt = larger_elt->next_same_value)
6954 if (GET_CODE (larger_elt->exp) == REG)
6955 {
6956 src_related
6957 = gen_lowpart_if_possible (mode, larger_elt->exp);
6958 break;
6959 }
6960
6961 if (src_related)
6962 break;
6963 }
6964 }
6965 }
7bac1be0
RK
6966
6967#ifdef LOAD_EXTEND_OP
6968 /* See if a MEM has already been loaded with a widening operation;
6969 if it has, we can use a subreg of that. Many CISC machines
6970 also have such operations, but this is only likely to be
6971 beneficial these machines. */
6972
6973 if (flag_expensive_optimizations && src_related == 0
6974 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6975 && GET_MODE_CLASS (mode) == MODE_INT
6976 && GET_CODE (src) == MEM && ! do_not_record
6977 && LOAD_EXTEND_OP (mode) != NIL)
6978 {
6979 enum machine_mode tmode;
6980
6981 /* Set what we are trying to extend and the operation it might
6982 have been extended with. */
6983 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
6984 XEXP (memory_extend_rtx, 0) = src;
6985
6986 for (tmode = GET_MODE_WIDER_MODE (mode);
6987 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6988 tmode = GET_MODE_WIDER_MODE (tmode))
6989 {
6990 struct table_elt *larger_elt;
6991
6992 PUT_MODE (memory_extend_rtx, tmode);
6993 larger_elt = lookup (memory_extend_rtx,
6994 HASH (memory_extend_rtx, tmode), tmode);
6995 if (larger_elt == 0)
6996 continue;
6997
6998 for (larger_elt = larger_elt->first_same_value;
6999 larger_elt; larger_elt = larger_elt->next_same_value)
7000 if (GET_CODE (larger_elt->exp) == REG)
7001 {
7002 src_related = gen_lowpart_if_possible (mode,
7003 larger_elt->exp);
7004 break;
7005 }
7006
7007 if (src_related)
7008 break;
7009 }
7010 }
7011#endif /* LOAD_EXTEND_OP */
7012
7afe21cc
RK
7013 if (src == src_folded)
7014 src_folded = 0;
7015
7016 /* At this point, ELT, if non-zero, points to a class of expressions
7017 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
7018 and SRC_RELATED, if non-zero, each contain additional equivalent
7019 expressions. Prune these latter expressions by deleting expressions
7020 already in the equivalence class.
7021
7022 Check for an equivalent identical to the destination. If found,
7023 this is the preferred equivalent since it will likely lead to
7024 elimination of the insn. Indicate this by placing it in
7025 `src_related'. */
7026
7027 if (elt) elt = elt->first_same_value;
7028 for (p = elt; p; p = p->next_same_value)
7029 {
7030 enum rtx_code code = GET_CODE (p->exp);
7031
7032 /* If the expression is not valid, ignore it. Then we do not
7033 have to check for validity below. In most cases, we can use
7034 `rtx_equal_p', since canonicalization has already been done. */
7035 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
7036 continue;
7037
5a03c8c4
RK
7038 /* Also skip paradoxical subregs, unless that's what we're
7039 looking for. */
7040 if (code == SUBREG
7041 && (GET_MODE_SIZE (GET_MODE (p->exp))
7042 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
7043 && ! (src != 0
7044 && GET_CODE (src) == SUBREG
7045 && GET_MODE (src) == GET_MODE (p->exp)
7046 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
7047 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
7048 continue;
7049
7afe21cc
RK
7050 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
7051 src = 0;
7052 else if (src_folded && GET_CODE (src_folded) == code
7053 && rtx_equal_p (src_folded, p->exp))
7054 src_folded = 0;
7055 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
7056 && rtx_equal_p (src_eqv_here, p->exp))
7057 src_eqv_here = 0;
7058 else if (src_related && GET_CODE (src_related) == code
7059 && rtx_equal_p (src_related, p->exp))
7060 src_related = 0;
7061
7062 /* This is the same as the destination of the insns, we want
7063 to prefer it. Copy it to src_related. The code below will
7064 then give it a negative cost. */
7065 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
7066 src_related = dest;
7067
7068 }
7069
7070 /* Find the cheapest valid equivalent, trying all the available
7071 possibilities. Prefer items not in the hash table to ones
7072 that are when they are equal cost. Note that we can never
7073 worsen an insn as the current contents will also succeed.
05c33dd8 7074 If we find an equivalent identical to the destination, use it as best,
0f41302f 7075 since this insn will probably be eliminated in that case. */
7afe21cc
RK
7076 if (src)
7077 {
7078 if (rtx_equal_p (src, dest))
7079 src_cost = -1;
7080 else
7081 src_cost = COST (src);
7082 }
7083
7084 if (src_eqv_here)
7085 {
7086 if (rtx_equal_p (src_eqv_here, dest))
7087 src_eqv_cost = -1;
7088 else
7089 src_eqv_cost = COST (src_eqv_here);
7090 }
7091
7092 if (src_folded)
7093 {
7094 if (rtx_equal_p (src_folded, dest))
7095 src_folded_cost = -1;
7096 else
7097 src_folded_cost = COST (src_folded);
7098 }
7099
7100 if (src_related)
7101 {
7102 if (rtx_equal_p (src_related, dest))
7103 src_related_cost = -1;
7104 else
7105 src_related_cost = COST (src_related);
7106 }
7107
7108 /* If this was an indirect jump insn, a known label will really be
7109 cheaper even though it looks more expensive. */
7110 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
7111 src_folded = src_const, src_folded_cost = -1;
7112
7113 /* Terminate loop when replacement made. This must terminate since
7114 the current contents will be tested and will always be valid. */
7115 while (1)
7116 {
7bd8b2a8 7117 rtx trial, old_src;
7afe21cc
RK
7118
7119 /* Skip invalid entries. */
7120 while (elt && GET_CODE (elt->exp) != REG
7121 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7122 elt = elt->next_same_value;
5a03c8c4
RK
7123
7124 /* A paradoxical subreg would be bad here: it'll be the right
7125 size, but later may be adjusted so that the upper bits aren't
7126 what we want. So reject it. */
7127 if (elt != 0
7128 && GET_CODE (elt->exp) == SUBREG
7129 && (GET_MODE_SIZE (GET_MODE (elt->exp))
7130 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
7131 /* It is okay, though, if the rtx we're trying to match
7132 will ignore any of the bits we can't predict. */
7133 && ! (src != 0
7134 && GET_CODE (src) == SUBREG
7135 && GET_MODE (src) == GET_MODE (elt->exp)
7136 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
7137 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
7138 {
7139 elt = elt->next_same_value;
7140 continue;
7141 }
7afe21cc
RK
7142
7143 if (elt) src_elt_cost = elt->cost;
7144
7145 /* Find cheapest and skip it for the next time. For items
7146 of equal cost, use this order:
7147 src_folded, src, src_eqv, src_related and hash table entry. */
7148 if (src_folded_cost <= src_cost
7149 && src_folded_cost <= src_eqv_cost
7150 && src_folded_cost <= src_related_cost
7151 && src_folded_cost <= src_elt_cost)
7152 {
7153 trial = src_folded, src_folded_cost = 10000;
7154 if (src_folded_force_flag)
7155 trial = force_const_mem (mode, trial);
7156 }
7157 else if (src_cost <= src_eqv_cost
7158 && src_cost <= src_related_cost
7159 && src_cost <= src_elt_cost)
7160 trial = src, src_cost = 10000;
7161 else if (src_eqv_cost <= src_related_cost
7162 && src_eqv_cost <= src_elt_cost)
0af62b41 7163 trial = copy_rtx (src_eqv_here), src_eqv_cost = 10000;
7afe21cc 7164 else if (src_related_cost <= src_elt_cost)
0af62b41 7165 trial = copy_rtx (src_related), src_related_cost = 10000;
7afe21cc
RK
7166 else
7167 {
05c33dd8 7168 trial = copy_rtx (elt->exp);
7afe21cc
RK
7169 elt = elt->next_same_value;
7170 src_elt_cost = 10000;
7171 }
7172
7173 /* We don't normally have an insn matching (set (pc) (pc)), so
7174 check for this separately here. We will delete such an
7175 insn below.
7176
7177 Tablejump insns contain a USE of the table, so simply replacing
7178 the operand with the constant won't match. This is simply an
7179 unconditional branch, however, and is therefore valid. Just
7180 insert the substitution here and we will delete and re-emit
7181 the insn later. */
7182
7bd8b2a8
JL
7183 /* Keep track of the original SET_SRC so that we can fix notes
7184 on libcall instructions. */
7185 old_src = SET_SRC (sets[i].rtl);
7186
7afe21cc
RK
7187 if (n_sets == 1 && dest == pc_rtx
7188 && (trial == pc_rtx
7189 || (GET_CODE (trial) == LABEL_REF
7190 && ! condjump_p (insn))))
7191 {
7192 /* If TRIAL is a label in front of a jump table, we are
7193 really falling through the switch (this is how casesi
7194 insns work), so we must branch around the table. */
7195 if (GET_CODE (trial) == CODE_LABEL
7196 && NEXT_INSN (trial) != 0
7197 && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
7198 && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
7199 || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
7200
38a448ca 7201 trial = gen_rtx_LABEL_REF (Pmode, get_label_after (trial));
7afe21cc
RK
7202
7203 SET_SRC (sets[i].rtl) = trial;
44333223 7204 cse_jumps_altered = 1;
7afe21cc
RK
7205 break;
7206 }
7207
7208 /* Look for a substitution that makes a valid insn. */
7209 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
05c33dd8 7210 {
7bd8b2a8
JL
7211 /* If we just made a substitution inside a libcall, then we
7212 need to make the same substitution in any notes attached
7213 to the RETVAL insn. */
1ed0205e
VM
7214 if (libcall_insn
7215 && (GET_CODE (old_src) == REG
7216 || GET_CODE (old_src) == SUBREG
7217 || GET_CODE (old_src) == MEM))
7bd8b2a8
JL
7218 replace_rtx (REG_NOTES (libcall_insn), old_src,
7219 canon_reg (SET_SRC (sets[i].rtl), insn));
7220
7722328e
RK
7221 /* The result of apply_change_group can be ignored; see
7222 canon_reg. */
7223
7224 validate_change (insn, &SET_SRC (sets[i].rtl),
7225 canon_reg (SET_SRC (sets[i].rtl), insn),
7226 1);
6702af89 7227 apply_change_group ();
05c33dd8
RK
7228 break;
7229 }
7afe21cc
RK
7230
7231 /* If we previously found constant pool entries for
7232 constants and this is a constant, try making a
7233 pool entry. Put it in src_folded unless we already have done
7234 this since that is where it likely came from. */
7235
7236 else if (constant_pool_entries_cost
7237 && CONSTANT_P (trial)
1bbd065b
RK
7238 && ! (GET_CODE (trial) == CONST
7239 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
7240 && (src_folded == 0
7241 || (GET_CODE (src_folded) != MEM
7242 && ! src_folded_force_flag))
9ae8ffe7
JL
7243 && GET_MODE_CLASS (mode) != MODE_CC
7244 && mode != VOIDmode)
7afe21cc
RK
7245 {
7246 src_folded_force_flag = 1;
7247 src_folded = trial;
7248 src_folded_cost = constant_pool_entries_cost;
7249 }
7250 }
7251
7252 src = SET_SRC (sets[i].rtl);
7253
7254 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
7255 However, there is an important exception: If both are registers
7256 that are not the head of their equivalence class, replace SET_SRC
7257 with the head of the class. If we do not do this, we will have
7258 both registers live over a portion of the basic block. This way,
7259 their lifetimes will likely abut instead of overlapping. */
7260 if (GET_CODE (dest) == REG
7261 && REGNO_QTY_VALID_P (REGNO (dest))
30f72379
MM
7262 && qty_mode[REG_QTY (REGNO (dest))] == GET_MODE (dest)
7263 && qty_first_reg[REG_QTY (REGNO (dest))] != REGNO (dest)
7afe21cc
RK
7264 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
7265 /* Don't do this if the original insn had a hard reg as
7266 SET_SRC. */
7267 && (GET_CODE (sets[i].src) != REG
7268 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER))
7269 /* We can't call canon_reg here because it won't do anything if
7270 SRC is a hard register. */
7271 {
30f72379 7272 int first = qty_first_reg[REG_QTY (REGNO (src))];
759bd8b7
R
7273 rtx new_src
7274 = (first >= FIRST_PSEUDO_REGISTER
7275 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
7276
7277 /* We must use validate-change even for this, because this
7278 might be a special no-op instruction, suitable only to
7279 tag notes onto. */
7280 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
7281 {
7282 src = new_src;
7283 /* If we had a constant that is cheaper than what we are now
7284 setting SRC to, use that constant. We ignored it when we
7285 thought we could make this into a no-op. */
7286 if (src_const && COST (src_const) < COST (src)
7287 && validate_change (insn, &SET_SRC (sets[i].rtl), src_const,
7288 0))
7289 src = src_const;
7290 }
7afe21cc
RK
7291 }
7292
7293 /* If we made a change, recompute SRC values. */
7294 if (src != sets[i].src)
7295 {
7296 do_not_record = 0;
7297 hash_arg_in_memory = 0;
7298 hash_arg_in_struct = 0;
7299 sets[i].src = src;
2197a88a 7300 sets[i].src_hash = HASH (src, mode);
7afe21cc
RK
7301 sets[i].src_volatile = do_not_record;
7302 sets[i].src_in_memory = hash_arg_in_memory;
7303 sets[i].src_in_struct = hash_arg_in_struct;
2197a88a 7304 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
7afe21cc
RK
7305 }
7306
7307 /* If this is a single SET, we are setting a register, and we have an
7308 equivalent constant, we want to add a REG_NOTE. We don't want
7309 to write a REG_EQUAL note for a constant pseudo since verifying that
d45cf215 7310 that pseudo hasn't been eliminated is a pain. Such a note also
ac7ef8d5
FS
7311 won't help anything.
7312
7313 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
7314 which can be created for a reference to a compile time computable
7315 entry in a jump table. */
7316
7afe21cc 7317 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
ac7ef8d5
FS
7318 && GET_CODE (src_const) != REG
7319 && ! (GET_CODE (src_const) == CONST
7320 && GET_CODE (XEXP (src_const, 0)) == MINUS
7321 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
7322 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
7afe21cc 7323 {
92f9aa51 7324 tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7afe21cc
RK
7325
7326 /* Record the actual constant value in a REG_EQUAL note, making
7327 a new one if one does not already exist. */
7328 if (tem)
7329 XEXP (tem, 0) = src_const;
7330 else
38a448ca
RH
7331 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL,
7332 src_const, REG_NOTES (insn));
7afe21cc
RK
7333
7334 /* If storing a constant value in a register that
7335 previously held the constant value 0,
7336 record this fact with a REG_WAS_0 note on this insn.
7337
7338 Note that the *register* is required to have previously held 0,
7339 not just any register in the quantity and we must point to the
7340 insn that set that register to zero.
7341
7342 Rather than track each register individually, we just see if
7343 the last set for this quantity was for this register. */
7344
7345 if (REGNO_QTY_VALID_P (REGNO (dest))
30f72379 7346 && qty_const[REG_QTY (REGNO (dest))] == const0_rtx)
7afe21cc
RK
7347 {
7348 /* See if we previously had a REG_WAS_0 note. */
906c4e36 7349 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
30f72379 7350 rtx const_insn = qty_const_insn[REG_QTY (REGNO (dest))];
7afe21cc
RK
7351
7352 if ((tem = single_set (const_insn)) != 0
7353 && rtx_equal_p (SET_DEST (tem), dest))
7354 {
7355 if (note)
7356 XEXP (note, 0) = const_insn;
7357 else
38a448ca
RH
7358 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_WAS_0,
7359 const_insn,
7360 REG_NOTES (insn));
7afe21cc
RK
7361 }
7362 }
7363 }
7364
7365 /* Now deal with the destination. */
7366 do_not_record = 0;
7367 sets[i].inner_dest_loc = &SET_DEST (sets[0].rtl);
7368
7369 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
7370 to the MEM or REG within it. */
7371 while (GET_CODE (dest) == SIGN_EXTRACT
7372 || GET_CODE (dest) == ZERO_EXTRACT
7373 || GET_CODE (dest) == SUBREG
7374 || GET_CODE (dest) == STRICT_LOW_PART)
7375 {
7376 sets[i].inner_dest_loc = &XEXP (dest, 0);
7377 dest = XEXP (dest, 0);
7378 }
7379
7380 sets[i].inner_dest = dest;
7381
7382 if (GET_CODE (dest) == MEM)
7383 {
9ae8ffe7
JL
7384#ifdef PUSH_ROUNDING
7385 /* Stack pushes invalidate the stack pointer. */
7386 rtx addr = XEXP (dest, 0);
7387 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7388 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7389 && XEXP (addr, 0) == stack_pointer_rtx)
7390 invalidate (stack_pointer_rtx, Pmode);
7391#endif
7afe21cc 7392 dest = fold_rtx (dest, insn);
7afe21cc
RK
7393 }
7394
7395 /* Compute the hash code of the destination now,
7396 before the effects of this instruction are recorded,
7397 since the register values used in the address computation
7398 are those before this instruction. */
2197a88a 7399 sets[i].dest_hash = HASH (dest, mode);
7afe21cc
RK
7400
7401 /* Don't enter a bit-field in the hash table
7402 because the value in it after the store
7403 may not equal what was stored, due to truncation. */
7404
7405 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
7406 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
7407 {
7408 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
7409
7410 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
7411 && GET_CODE (width) == CONST_INT
906c4e36
RK
7412 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
7413 && ! (INTVAL (src_const)
7414 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
7afe21cc
RK
7415 /* Exception: if the value is constant,
7416 and it won't be truncated, record it. */
7417 ;
7418 else
7419 {
7420 /* This is chosen so that the destination will be invalidated
7421 but no new value will be recorded.
7422 We must invalidate because sometimes constant
7423 values can be recorded for bitfields. */
7424 sets[i].src_elt = 0;
7425 sets[i].src_volatile = 1;
7426 src_eqv = 0;
7427 src_eqv_elt = 0;
7428 }
7429 }
7430
7431 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
7432 the insn. */
7433 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
7434 {
7435 PUT_CODE (insn, NOTE);
7436 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7437 NOTE_SOURCE_FILE (insn) = 0;
7438 cse_jumps_altered = 1;
7439 /* One less use of the label this insn used to jump to. */
85c3ba60
JL
7440 if (JUMP_LABEL (insn) != 0)
7441 --LABEL_NUSES (JUMP_LABEL (insn));
7afe21cc
RK
7442 /* No more processing for this set. */
7443 sets[i].rtl = 0;
7444 }
7445
7446 /* If this SET is now setting PC to a label, we know it used to
7447 be a conditional or computed branch. So we see if we can follow
7448 it. If it was a computed branch, delete it and re-emit. */
7449 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
7450 {
7451 rtx p;
7452
7453 /* If this is not in the format for a simple branch and
7454 we are the only SET in it, re-emit it. */
7455 if (! simplejump_p (insn) && n_sets == 1)
7456 {
7457 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
7458 JUMP_LABEL (new) = XEXP (src, 0);
7459 LABEL_NUSES (XEXP (src, 0))++;
7460 delete_insn (insn);
7461 insn = new;
7462 }
31dcf83f
RS
7463 else
7464 /* Otherwise, force rerecognition, since it probably had
7465 a different pattern before.
7466 This shouldn't really be necessary, since whatever
7467 changed the source value above should have done this.
7468 Until the right place is found, might as well do this here. */
7469 INSN_CODE (insn) = -1;
7afe21cc
RK
7470
7471 /* Now that we've converted this jump to an unconditional jump,
7472 there is dead code after it. Delete the dead code until we
7473 reach a BARRIER, the end of the function, or a label. Do
7474 not delete NOTEs except for NOTE_INSN_DELETED since later
7475 phases assume these notes are retained. */
7476
7477 p = insn;
7478
7479 while (NEXT_INSN (p) != 0
7480 && GET_CODE (NEXT_INSN (p)) != BARRIER
7481 && GET_CODE (NEXT_INSN (p)) != CODE_LABEL)
7482 {
7483 if (GET_CODE (NEXT_INSN (p)) != NOTE
7484 || NOTE_LINE_NUMBER (NEXT_INSN (p)) == NOTE_INSN_DELETED)
7485 delete_insn (NEXT_INSN (p));
7486 else
7487 p = NEXT_INSN (p);
7488 }
7489
7490 /* If we don't have a BARRIER immediately after INSN, put one there.
7491 Much code assumes that there are no NOTEs between a JUMP_INSN and
7492 BARRIER. */
7493
7494 if (NEXT_INSN (insn) == 0
7495 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
783e5bca 7496 emit_barrier_before (NEXT_INSN (insn));
7afe21cc
RK
7497
7498 /* We might have two BARRIERs separated by notes. Delete the second
7499 one if so. */
7500
538b78e7
RS
7501 if (p != insn && NEXT_INSN (p) != 0
7502 && GET_CODE (NEXT_INSN (p)) == BARRIER)
7afe21cc
RK
7503 delete_insn (NEXT_INSN (p));
7504
7505 cse_jumps_altered = 1;
7506 sets[i].rtl = 0;
7507 }
7508
c2a47e48
RK
7509 /* If destination is volatile, invalidate it and then do no further
7510 processing for this assignment. */
7afe21cc
RK
7511
7512 else if (do_not_record)
c2a47e48
RK
7513 {
7514 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7515 || GET_CODE (dest) == MEM)
bb4034b3 7516 invalidate (dest, VOIDmode);
2708da92
RS
7517 else if (GET_CODE (dest) == STRICT_LOW_PART
7518 || GET_CODE (dest) == ZERO_EXTRACT)
bb4034b3 7519 invalidate (XEXP (dest, 0), GET_MODE (dest));
c2a47e48
RK
7520 sets[i].rtl = 0;
7521 }
7afe21cc
RK
7522
7523 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
2197a88a 7524 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
7afe21cc
RK
7525
7526#ifdef HAVE_cc0
7527 /* If setting CC0, record what it was set to, or a constant, if it
7528 is equivalent to a constant. If it is being set to a floating-point
7529 value, make a COMPARE with the appropriate constant of 0. If we
7530 don't do this, later code can interpret this as a test against
7531 const0_rtx, which can cause problems if we try to put it into an
7532 insn as a floating-point operand. */
7533 if (dest == cc0_rtx)
7534 {
7535 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
7536 this_insn_cc0_mode = mode;
cbf6a543 7537 if (FLOAT_MODE_P (mode))
38a448ca
RH
7538 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
7539 CONST0_RTX (mode));
7afe21cc
RK
7540 }
7541#endif
7542 }
7543
7544 /* Now enter all non-volatile source expressions in the hash table
7545 if they are not already present.
7546 Record their equivalence classes in src_elt.
7547 This way we can insert the corresponding destinations into
7548 the same classes even if the actual sources are no longer in them
7549 (having been invalidated). */
7550
7551 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
7552 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
7553 {
7554 register struct table_elt *elt;
7555 register struct table_elt *classp = sets[0].src_elt;
7556 rtx dest = SET_DEST (sets[0].rtl);
7557 enum machine_mode eqvmode = GET_MODE (dest);
7558
7559 if (GET_CODE (dest) == STRICT_LOW_PART)
7560 {
7561 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
7562 classp = 0;
7563 }
7564 if (insert_regs (src_eqv, classp, 0))
8ae2b8f6
JW
7565 {
7566 rehash_using_reg (src_eqv);
7567 src_eqv_hash = HASH (src_eqv, eqvmode);
7568 }
2197a88a 7569 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
7afe21cc
RK
7570 elt->in_memory = src_eqv_in_memory;
7571 elt->in_struct = src_eqv_in_struct;
7572 src_eqv_elt = elt;
f7911249
JW
7573
7574 /* Check to see if src_eqv_elt is the same as a set source which
7575 does not yet have an elt, and if so set the elt of the set source
7576 to src_eqv_elt. */
7577 for (i = 0; i < n_sets; i++)
7578 if (sets[i].rtl && sets[i].src_elt == 0
7579 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
7580 sets[i].src_elt = src_eqv_elt;
7afe21cc
RK
7581 }
7582
7583 for (i = 0; i < n_sets; i++)
7584 if (sets[i].rtl && ! sets[i].src_volatile
7585 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
7586 {
7587 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
7588 {
7589 /* REG_EQUAL in setting a STRICT_LOW_PART
7590 gives an equivalent for the entire destination register,
7591 not just for the subreg being stored in now.
7592 This is a more interesting equivalence, so we arrange later
7593 to treat the entire reg as the destination. */
7594 sets[i].src_elt = src_eqv_elt;
2197a88a 7595 sets[i].src_hash = src_eqv_hash;
7afe21cc
RK
7596 }
7597 else
7598 {
7599 /* Insert source and constant equivalent into hash table, if not
7600 already present. */
7601 register struct table_elt *classp = src_eqv_elt;
7602 register rtx src = sets[i].src;
7603 register rtx dest = SET_DEST (sets[i].rtl);
7604 enum machine_mode mode
7605 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
7606
7607 if (sets[i].src_elt == 0)
7608 {
7609 register struct table_elt *elt;
7610
7611 /* Note that these insert_regs calls cannot remove
7612 any of the src_elt's, because they would have failed to
7613 match if not still valid. */
7614 if (insert_regs (src, classp, 0))
8ae2b8f6
JW
7615 {
7616 rehash_using_reg (src);
7617 sets[i].src_hash = HASH (src, mode);
7618 }
2197a88a 7619 elt = insert (src, classp, sets[i].src_hash, mode);
7afe21cc
RK
7620 elt->in_memory = sets[i].src_in_memory;
7621 elt->in_struct = sets[i].src_in_struct;
7622 sets[i].src_elt = classp = elt;
7623 }
7624
7625 if (sets[i].src_const && sets[i].src_const_elt == 0
7626 && src != sets[i].src_const
7627 && ! rtx_equal_p (sets[i].src_const, src))
7628 sets[i].src_elt = insert (sets[i].src_const, classp,
2197a88a 7629 sets[i].src_const_hash, mode);
7afe21cc
RK
7630 }
7631 }
7632 else if (sets[i].src_elt == 0)
7633 /* If we did not insert the source into the hash table (e.g., it was
7634 volatile), note the equivalence class for the REG_EQUAL value, if any,
7635 so that the destination goes into that class. */
7636 sets[i].src_elt = src_eqv_elt;
7637
9ae8ffe7 7638 invalidate_from_clobbers (x);
77fa0940
RK
7639
7640 /* Some registers are invalidated by subroutine calls. Memory is
7641 invalidated by non-constant calls. */
7642
7afe21cc
RK
7643 if (GET_CODE (insn) == CALL_INSN)
7644 {
77fa0940 7645 if (! CONST_CALL_P (insn))
9ae8ffe7 7646 invalidate_memory ();
7afe21cc
RK
7647 invalidate_for_call ();
7648 }
7649
7650 /* Now invalidate everything set by this instruction.
7651 If a SUBREG or other funny destination is being set,
7652 sets[i].rtl is still nonzero, so here we invalidate the reg
7653 a part of which is being set. */
7654
7655 for (i = 0; i < n_sets; i++)
7656 if (sets[i].rtl)
7657 {
bb4034b3
JW
7658 /* We can't use the inner dest, because the mode associated with
7659 a ZERO_EXTRACT is significant. */
7660 register rtx dest = SET_DEST (sets[i].rtl);
7afe21cc
RK
7661
7662 /* Needed for registers to remove the register from its
7663 previous quantity's chain.
7664 Needed for memory if this is a nonvarying address, unless
7665 we have just done an invalidate_memory that covers even those. */
7666 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
9ae8ffe7 7667 || GET_CODE (dest) == MEM)
bb4034b3 7668 invalidate (dest, VOIDmode);
2708da92
RS
7669 else if (GET_CODE (dest) == STRICT_LOW_PART
7670 || GET_CODE (dest) == ZERO_EXTRACT)
bb4034b3 7671 invalidate (XEXP (dest, 0), GET_MODE (dest));
7afe21cc
RK
7672 }
7673
01e752d3
JL
7674 /* A volatile ASM invalidates everything. */
7675 if (GET_CODE (insn) == INSN
7676 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
7677 && MEM_VOLATILE_P (PATTERN (insn)))
7678 flush_hash_table ();
7679
7afe21cc
RK
7680 /* Make sure registers mentioned in destinations
7681 are safe for use in an expression to be inserted.
7682 This removes from the hash table
7683 any invalid entry that refers to one of these registers.
7684
7685 We don't care about the return value from mention_regs because
7686 we are going to hash the SET_DEST values unconditionally. */
7687
7688 for (i = 0; i < n_sets; i++)
34c73909
R
7689 {
7690 if (sets[i].rtl)
7691 {
7692 rtx x = SET_DEST (sets[i].rtl);
7693
7694 if (GET_CODE (x) != REG)
7695 mention_regs (x);
7696 else
7697 {
7698 /* We used to rely on all references to a register becoming
7699 inaccessible when a register changes to a new quantity,
7700 since that changes the hash code. However, that is not
7701 safe, since after NBUCKETS new quantities we get a
7702 hash 'collision' of a register with its own invalid
7703 entries. And since SUBREGs have been changed not to
7704 change their hash code with the hash code of the register,
7705 it wouldn't work any longer at all. So we have to check
7706 for any invalid references lying around now.
7707 This code is similar to the REG case in mention_regs,
7708 but it knows that reg_tick has been incremented, and
7709 it leaves reg_in_table as -1 . */
7710 register int regno = REGNO (x);
7711 register int endregno
7712 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
7713 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
7714 int i;
7715
7716 for (i = regno; i < endregno; i++)
7717 {
30f72379 7718 if (REG_IN_TABLE (i) >= 0)
34c73909
R
7719 {
7720 remove_invalid_refs (i);
30f72379 7721 REG_IN_TABLE (i) = -1;
34c73909
R
7722 }
7723 }
7724 }
7725 }
7726 }
7afe21cc
RK
7727
7728 /* We may have just removed some of the src_elt's from the hash table.
7729 So replace each one with the current head of the same class. */
7730
7731 for (i = 0; i < n_sets; i++)
7732 if (sets[i].rtl)
7733 {
7734 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
7735 /* If elt was removed, find current head of same class,
7736 or 0 if nothing remains of that class. */
7737 {
7738 register struct table_elt *elt = sets[i].src_elt;
7739
7740 while (elt && elt->prev_same_value)
7741 elt = elt->prev_same_value;
7742
7743 while (elt && elt->first_same_value == 0)
7744 elt = elt->next_same_value;
7745 sets[i].src_elt = elt ? elt->first_same_value : 0;
7746 }
7747 }
7748
7749 /* Now insert the destinations into their equivalence classes. */
7750
7751 for (i = 0; i < n_sets; i++)
7752 if (sets[i].rtl)
7753 {
7754 register rtx dest = SET_DEST (sets[i].rtl);
9de2c71a 7755 rtx inner_dest = sets[i].inner_dest;
7afe21cc
RK
7756 register struct table_elt *elt;
7757
7758 /* Don't record value if we are not supposed to risk allocating
7759 floating-point values in registers that might be wider than
7760 memory. */
7761 if ((flag_float_store
7762 && GET_CODE (dest) == MEM
cbf6a543 7763 && FLOAT_MODE_P (GET_MODE (dest)))
bc4ddc77
JW
7764 /* Don't record BLKmode values, because we don't know the
7765 size of it, and can't be sure that other BLKmode values
7766 have the same or smaller size. */
7767 || GET_MODE (dest) == BLKmode
7afe21cc
RK
7768 /* Don't record values of destinations set inside a libcall block
7769 since we might delete the libcall. Things should have been set
7770 up so we won't want to reuse such a value, but we play it safe
7771 here. */
7bd8b2a8 7772 || libcall_insn
7afe21cc
RK
7773 /* If we didn't put a REG_EQUAL value or a source into the hash
7774 table, there is no point is recording DEST. */
1a8e9a8e
RK
7775 || sets[i].src_elt == 0
7776 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
7777 or SIGN_EXTEND, don't record DEST since it can cause
7778 some tracking to be wrong.
7779
7780 ??? Think about this more later. */
7781 || (GET_CODE (dest) == SUBREG
7782 && (GET_MODE_SIZE (GET_MODE (dest))
7783 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7784 && (GET_CODE (sets[i].src) == SIGN_EXTEND
7785 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
7afe21cc
RK
7786 continue;
7787
7788 /* STRICT_LOW_PART isn't part of the value BEING set,
7789 and neither is the SUBREG inside it.
7790 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
7791 if (GET_CODE (dest) == STRICT_LOW_PART)
7792 dest = SUBREG_REG (XEXP (dest, 0));
7793
c610adec 7794 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
7afe21cc
RK
7795 /* Registers must also be inserted into chains for quantities. */
7796 if (insert_regs (dest, sets[i].src_elt, 1))
8ae2b8f6
JW
7797 {
7798 /* If `insert_regs' changes something, the hash code must be
7799 recalculated. */
7800 rehash_using_reg (dest);
7801 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
7802 }
7afe21cc 7803
9de2c71a
MM
7804 if (GET_CODE (inner_dest) == MEM
7805 && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
7806 /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
7807 that (MEM (ADDRESSOF (X))) is equivalent to Y.
7808 Consider the case in which the address of the MEM is
7809 passed to a function, which alters the MEM. Then, if we
7810 later use Y instead of the MEM we'll miss the update. */
7811 elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
7812 else
7813 elt = insert (dest, sets[i].src_elt,
7814 sets[i].dest_hash, GET_MODE (dest));
7815
c256df0b 7816 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
9ad91d71
RK
7817 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
7818 || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
7819 0))));
c256df0b 7820
7afe21cc
RK
7821 if (elt->in_memory)
7822 {
7823 /* This implicitly assumes a whole struct
7824 need not have MEM_IN_STRUCT_P.
7825 But a whole struct is *supposed* to have MEM_IN_STRUCT_P. */
7826 elt->in_struct = (MEM_IN_STRUCT_P (sets[i].inner_dest)
7827 || sets[i].inner_dest != SET_DEST (sets[i].rtl));
7828 }
7829
fc3ffe83
RK
7830 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
7831 narrower than M2, and both M1 and M2 are the same number of words,
7832 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
7833 make that equivalence as well.
7afe21cc
RK
7834
7835 However, BAR may have equivalences for which gen_lowpart_if_possible
7836 will produce a simpler value than gen_lowpart_if_possible applied to
7837 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
7838 BAR's equivalences. If we don't get a simplified form, make
7839 the SUBREG. It will not be used in an equivalence, but will
7840 cause two similar assignments to be detected.
7841
7842 Note the loop below will find SUBREG_REG (DEST) since we have
7843 already entered SRC and DEST of the SET in the table. */
7844
7845 if (GET_CODE (dest) == SUBREG
6cdbaec4
RK
7846 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
7847 / UNITS_PER_WORD)
7848 == (GET_MODE_SIZE (GET_MODE (dest)) - 1)/ UNITS_PER_WORD)
7afe21cc
RK
7849 && (GET_MODE_SIZE (GET_MODE (dest))
7850 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7851 && sets[i].src_elt != 0)
7852 {
7853 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
7854 struct table_elt *elt, *classp = 0;
7855
7856 for (elt = sets[i].src_elt->first_same_value; elt;
7857 elt = elt->next_same_value)
7858 {
7859 rtx new_src = 0;
2197a88a 7860 unsigned src_hash;
7afe21cc
RK
7861 struct table_elt *src_elt;
7862
7863 /* Ignore invalid entries. */
7864 if (GET_CODE (elt->exp) != REG
7865 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7866 continue;
7867
7868 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
7869 if (new_src == 0)
38a448ca 7870 new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
7afe21cc
RK
7871
7872 src_hash = HASH (new_src, new_mode);
7873 src_elt = lookup (new_src, src_hash, new_mode);
7874
7875 /* Put the new source in the hash table is if isn't
7876 already. */
7877 if (src_elt == 0)
7878 {
7879 if (insert_regs (new_src, classp, 0))
8ae2b8f6
JW
7880 {
7881 rehash_using_reg (new_src);
7882 src_hash = HASH (new_src, new_mode);
7883 }
7afe21cc
RK
7884 src_elt = insert (new_src, classp, src_hash, new_mode);
7885 src_elt->in_memory = elt->in_memory;
7886 src_elt->in_struct = elt->in_struct;
7887 }
7888 else if (classp && classp != src_elt->first_same_value)
7889 /* Show that two things that we've seen before are
7890 actually the same. */
7891 merge_equiv_classes (src_elt, classp);
7892
7893 classp = src_elt->first_same_value;
da932f04
JL
7894 /* Ignore invalid entries. */
7895 while (classp
7896 && GET_CODE (classp->exp) != REG
7897 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
7898 classp = classp->next_same_value;
7afe21cc
RK
7899 }
7900 }
7901 }
7902
7903 /* Special handling for (set REG0 REG1)
7904 where REG0 is the "cheapest", cheaper than REG1.
7905 After cse, REG1 will probably not be used in the sequel,
7906 so (if easily done) change this insn to (set REG1 REG0) and
7907 replace REG1 with REG0 in the previous insn that computed their value.
7908 Then REG1 will become a dead store and won't cloud the situation
7909 for later optimizations.
7910
7911 Do not make this change if REG1 is a hard register, because it will
7912 then be used in the sequel and we may be changing a two-operand insn
7913 into a three-operand insn.
7914
50270076
R
7915 Also do not do this if we are operating on a copy of INSN.
7916
7917 Also don't do this if INSN ends a libcall; this would cause an unrelated
7918 register to be set in the middle of a libcall, and we then get bad code
7919 if the libcall is deleted. */
7afe21cc
RK
7920
7921 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
7922 && NEXT_INSN (PREV_INSN (insn)) == insn
7923 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
7924 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
7925 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl)))
30f72379 7926 && (qty_first_reg[REG_QTY (REGNO (SET_SRC (sets[0].rtl)))]
50270076
R
7927 == REGNO (SET_DEST (sets[0].rtl)))
7928 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
7afe21cc
RK
7929 {
7930 rtx prev = PREV_INSN (insn);
7931 while (prev && GET_CODE (prev) == NOTE)
7932 prev = PREV_INSN (prev);
7933
7934 if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
7935 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
7936 {
7937 rtx dest = SET_DEST (sets[0].rtl);
906c4e36 7938 rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
7afe21cc
RK
7939
7940 validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
7941 validate_change (insn, & SET_DEST (sets[0].rtl),
7942 SET_SRC (sets[0].rtl), 1);
7943 validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
7944 apply_change_group ();
7945
7946 /* If REG1 was equivalent to a constant, REG0 is not. */
7947 if (note)
7948 PUT_REG_NOTE_KIND (note, REG_EQUAL);
7949
7950 /* If there was a REG_WAS_0 note on PREV, remove it. Move
7951 any REG_WAS_0 note on INSN to PREV. */
906c4e36 7952 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
7afe21cc
RK
7953 if (note)
7954 remove_note (prev, note);
7955
906c4e36 7956 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7afe21cc
RK
7957 if (note)
7958 {
7959 remove_note (insn, note);
7960 XEXP (note, 1) = REG_NOTES (prev);
7961 REG_NOTES (prev) = note;
7962 }
98369a0f
RK
7963
7964 /* If INSN has a REG_EQUAL note, and this note mentions REG0,
7965 then we must delete it, because the value in REG0 has changed. */
7966 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7967 if (note && reg_mentioned_p (dest, XEXP (note, 0)))
7968 remove_note (insn, note);
7afe21cc
RK
7969 }
7970 }
7971
7972 /* If this is a conditional jump insn, record any known equivalences due to
7973 the condition being tested. */
7974
7975 last_jump_equiv_class = 0;
7976 if (GET_CODE (insn) == JUMP_INSN
7977 && n_sets == 1 && GET_CODE (x) == SET
7978 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
7979 record_jump_equiv (insn, 0);
7980
7981#ifdef HAVE_cc0
7982 /* If the previous insn set CC0 and this insn no longer references CC0,
7983 delete the previous insn. Here we use the fact that nothing expects CC0
7984 to be valid over an insn, which is true until the final pass. */
7985 if (prev_insn && GET_CODE (prev_insn) == INSN
7986 && (tem = single_set (prev_insn)) != 0
7987 && SET_DEST (tem) == cc0_rtx
7988 && ! reg_mentioned_p (cc0_rtx, x))
7989 {
7990 PUT_CODE (prev_insn, NOTE);
7991 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
7992 NOTE_SOURCE_FILE (prev_insn) = 0;
7993 }
7994
7995 prev_insn_cc0 = this_insn_cc0;
7996 prev_insn_cc0_mode = this_insn_cc0_mode;
7997#endif
7998
7999 prev_insn = insn;
8000}
8001\f
a4c6502a 8002/* Remove from the hash table all expressions that reference memory. */
7afe21cc 8003static void
9ae8ffe7 8004invalidate_memory ()
7afe21cc 8005{
9ae8ffe7
JL
8006 register int i;
8007 register struct table_elt *p, *next;
7afe21cc 8008
9ae8ffe7
JL
8009 for (i = 0; i < NBUCKETS; i++)
8010 for (p = table[i]; p; p = next)
8011 {
8012 next = p->next_same_hash;
8013 if (p->in_memory)
8014 remove_from_table (p, i);
8015 }
8016}
8017
8018/* XXX ??? The name of this function bears little resemblance to
8019 what this function actually does. FIXME. */
8020static int
8021note_mem_written (addr)
8022 register rtx addr;
8023{
8024 /* Pushing or popping the stack invalidates just the stack pointer. */
8025 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
8026 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
8027 && GET_CODE (XEXP (addr, 0)) == REG
8028 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
7afe21cc 8029 {
30f72379
MM
8030 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
8031 REG_TICK (STACK_POINTER_REGNUM)++;
9ae8ffe7
JL
8032
8033 /* This should be *very* rare. */
8034 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
8035 invalidate (stack_pointer_rtx, VOIDmode);
8036 return 1;
7afe21cc 8037 }
9ae8ffe7 8038 return 0;
7afe21cc
RK
8039}
8040
8041/* Perform invalidation on the basis of everything about an insn
8042 except for invalidating the actual places that are SET in it.
8043 This includes the places CLOBBERed, and anything that might
8044 alias with something that is SET or CLOBBERed.
8045
7afe21cc
RK
8046 X is the pattern of the insn. */
8047
8048static void
9ae8ffe7 8049invalidate_from_clobbers (x)
7afe21cc
RK
8050 rtx x;
8051{
7afe21cc
RK
8052 if (GET_CODE (x) == CLOBBER)
8053 {
8054 rtx ref = XEXP (x, 0);
9ae8ffe7
JL
8055 if (ref)
8056 {
8057 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
8058 || GET_CODE (ref) == MEM)
8059 invalidate (ref, VOIDmode);
8060 else if (GET_CODE (ref) == STRICT_LOW_PART
8061 || GET_CODE (ref) == ZERO_EXTRACT)
8062 invalidate (XEXP (ref, 0), GET_MODE (ref));
8063 }
7afe21cc
RK
8064 }
8065 else if (GET_CODE (x) == PARALLEL)
8066 {
8067 register int i;
8068 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
8069 {
8070 register rtx y = XVECEXP (x, 0, i);
8071 if (GET_CODE (y) == CLOBBER)
8072 {
8073 rtx ref = XEXP (y, 0);
9ae8ffe7
JL
8074 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
8075 || GET_CODE (ref) == MEM)
8076 invalidate (ref, VOIDmode);
8077 else if (GET_CODE (ref) == STRICT_LOW_PART
8078 || GET_CODE (ref) == ZERO_EXTRACT)
8079 invalidate (XEXP (ref, 0), GET_MODE (ref));
7afe21cc
RK
8080 }
8081 }
8082 }
8083}
8084\f
8085/* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
8086 and replace any registers in them with either an equivalent constant
8087 or the canonical form of the register. If we are inside an address,
8088 only do this if the address remains valid.
8089
8090 OBJECT is 0 except when within a MEM in which case it is the MEM.
8091
8092 Return the replacement for X. */
8093
8094static rtx
8095cse_process_notes (x, object)
8096 rtx x;
8097 rtx object;
8098{
8099 enum rtx_code code = GET_CODE (x);
8100 char *fmt = GET_RTX_FORMAT (code);
7afe21cc
RK
8101 int i;
8102
8103 switch (code)
8104 {
8105 case CONST_INT:
8106 case CONST:
8107 case SYMBOL_REF:
8108 case LABEL_REF:
8109 case CONST_DOUBLE:
8110 case PC:
8111 case CC0:
8112 case LO_SUM:
8113 return x;
8114
8115 case MEM:
8116 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
8117 return x;
8118
8119 case EXPR_LIST:
8120 case INSN_LIST:
8121 if (REG_NOTE_KIND (x) == REG_EQUAL)
906c4e36 8122 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
7afe21cc 8123 if (XEXP (x, 1))
906c4e36 8124 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
7afe21cc
RK
8125 return x;
8126
e4890d45
RS
8127 case SIGN_EXTEND:
8128 case ZERO_EXTEND:
0b0ee36c 8129 case SUBREG:
e4890d45
RS
8130 {
8131 rtx new = cse_process_notes (XEXP (x, 0), object);
8132 /* We don't substitute VOIDmode constants into these rtx,
8133 since they would impede folding. */
8134 if (GET_MODE (new) != VOIDmode)
8135 validate_change (object, &XEXP (x, 0), new, 0);
8136 return x;
8137 }
8138
7afe21cc 8139 case REG:
30f72379 8140 i = REG_QTY (REGNO (x));
7afe21cc
RK
8141
8142 /* Return a constant or a constant register. */
8143 if (REGNO_QTY_VALID_P (REGNO (x))
8144 && qty_const[i] != 0
8145 && (CONSTANT_P (qty_const[i])
8146 || GET_CODE (qty_const[i]) == REG))
8147 {
8148 rtx new = gen_lowpart_if_possible (GET_MODE (x), qty_const[i]);
8149 if (new)
8150 return new;
8151 }
8152
8153 /* Otherwise, canonicalize this register. */
906c4e36 8154 return canon_reg (x, NULL_RTX);
e9a25f70
JL
8155
8156 default:
8157 break;
7afe21cc
RK
8158 }
8159
8160 for (i = 0; i < GET_RTX_LENGTH (code); i++)
8161 if (fmt[i] == 'e')
8162 validate_change (object, &XEXP (x, i),
7fe34fdf 8163 cse_process_notes (XEXP (x, i), object), 0);
7afe21cc
RK
8164
8165 return x;
8166}
8167\f
8168/* Find common subexpressions between the end test of a loop and the beginning
8169 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
8170
8171 Often we have a loop where an expression in the exit test is used
8172 in the body of the loop. For example "while (*p) *q++ = *p++;".
8173 Because of the way we duplicate the loop exit test in front of the loop,
8174 however, we don't detect that common subexpression. This will be caught
8175 when global cse is implemented, but this is a quite common case.
8176
8177 This function handles the most common cases of these common expressions.
8178 It is called after we have processed the basic block ending with the
8179 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
8180 jumps to a label used only once. */
8181
8182static void
8183cse_around_loop (loop_start)
8184 rtx loop_start;
8185{
8186 rtx insn;
8187 int i;
8188 struct table_elt *p;
8189
8190 /* If the jump at the end of the loop doesn't go to the start, we don't
8191 do anything. */
8192 for (insn = PREV_INSN (loop_start);
8193 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
8194 insn = PREV_INSN (insn))
8195 ;
8196
8197 if (insn == 0
8198 || GET_CODE (insn) != NOTE
8199 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
8200 return;
8201
8202 /* If the last insn of the loop (the end test) was an NE comparison,
8203 we will interpret it as an EQ comparison, since we fell through
f72aed24 8204 the loop. Any equivalences resulting from that comparison are
7afe21cc
RK
8205 therefore not valid and must be invalidated. */
8206 if (last_jump_equiv_class)
8207 for (p = last_jump_equiv_class->first_same_value; p;
8208 p = p->next_same_value)
51723711
KG
8209 {
8210 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
8211 || (GET_CODE (p->exp) == SUBREG
8212 && GET_CODE (SUBREG_REG (p->exp)) == REG))
8213 invalidate (p->exp, VOIDmode);
8214 else if (GET_CODE (p->exp) == STRICT_LOW_PART
8215 || GET_CODE (p->exp) == ZERO_EXTRACT)
8216 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
8217 }
7afe21cc
RK
8218
8219 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
8220 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
8221
8222 The only thing we do with SET_DEST is invalidate entries, so we
8223 can safely process each SET in order. It is slightly less efficient
556c714b
JW
8224 to do so, but we only want to handle the most common cases.
8225
8226 The gen_move_insn call in cse_set_around_loop may create new pseudos.
8227 These pseudos won't have valid entries in any of the tables indexed
8228 by register number, such as reg_qty. We avoid out-of-range array
8229 accesses by not processing any instructions created after cse started. */
7afe21cc
RK
8230
8231 for (insn = NEXT_INSN (loop_start);
8232 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
556c714b 8233 && INSN_UID (insn) < max_insn_uid
7afe21cc
RK
8234 && ! (GET_CODE (insn) == NOTE
8235 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
8236 insn = NEXT_INSN (insn))
8237 {
8238 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
8239 && (GET_CODE (PATTERN (insn)) == SET
8240 || GET_CODE (PATTERN (insn)) == CLOBBER))
8241 cse_set_around_loop (PATTERN (insn), insn, loop_start);
8242 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
8243 && GET_CODE (PATTERN (insn)) == PARALLEL)
8244 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
8245 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
8246 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
8247 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
8248 loop_start);
8249 }
8250}
8251\f
8b3686ed
RK
8252/* Process one SET of an insn that was skipped. We ignore CLOBBERs
8253 since they are done elsewhere. This function is called via note_stores. */
8254
8255static void
8256invalidate_skipped_set (dest, set)
8257 rtx set;
8258 rtx dest;
8259{
9ae8ffe7
JL
8260 enum rtx_code code = GET_CODE (dest);
8261
8262 if (code == MEM
8263 && ! note_mem_written (dest) /* If this is not a stack push ... */
8264 /* There are times when an address can appear varying and be a PLUS
8265 during this scan when it would be a fixed address were we to know
8266 the proper equivalences. So invalidate all memory if there is
8267 a BLKmode or nonscalar memory reference or a reference to a
8268 variable address. */
8269 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
8270 || cse_rtx_varies_p (XEXP (dest, 0))))
8271 {
8272 invalidate_memory ();
8273 return;
8274 }
ffcf6393 8275
f47c02fa
RK
8276 if (GET_CODE (set) == CLOBBER
8277#ifdef HAVE_cc0
8278 || dest == cc0_rtx
8279#endif
8280 || dest == pc_rtx)
8281 return;
8282
9ae8ffe7 8283 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
bb4034b3 8284 invalidate (XEXP (dest, 0), GET_MODE (dest));
9ae8ffe7
JL
8285 else if (code == REG || code == SUBREG || code == MEM)
8286 invalidate (dest, VOIDmode);
8b3686ed
RK
8287}
8288
8289/* Invalidate all insns from START up to the end of the function or the
8290 next label. This called when we wish to CSE around a block that is
8291 conditionally executed. */
8292
8293static void
8294invalidate_skipped_block (start)
8295 rtx start;
8296{
8297 rtx insn;
8b3686ed
RK
8298
8299 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
8300 insn = NEXT_INSN (insn))
8301 {
8302 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
8303 continue;
8304
8b3686ed
RK
8305 if (GET_CODE (insn) == CALL_INSN)
8306 {
9ae8ffe7
JL
8307 if (! CONST_CALL_P (insn))
8308 invalidate_memory ();
8b3686ed 8309 invalidate_for_call ();
8b3686ed
RK
8310 }
8311
97577254 8312 invalidate_from_clobbers (PATTERN (insn));
8b3686ed 8313 note_stores (PATTERN (insn), invalidate_skipped_set);
8b3686ed
RK
8314 }
8315}
8316\f
7afe21cc
RK
8317/* Used for communication between the following two routines; contains a
8318 value to be checked for modification. */
8319
8320static rtx cse_check_loop_start_value;
8321
8322/* If modifying X will modify the value in CSE_CHECK_LOOP_START_VALUE,
8323 indicate that fact by setting CSE_CHECK_LOOP_START_VALUE to 0. */
8324
8325static void
8326cse_check_loop_start (x, set)
8327 rtx x;
d6f4ec51 8328 rtx set ATTRIBUTE_UNUSED;
7afe21cc
RK
8329{
8330 if (cse_check_loop_start_value == 0
8331 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
8332 return;
8333
8334 if ((GET_CODE (x) == MEM && GET_CODE (cse_check_loop_start_value) == MEM)
8335 || reg_overlap_mentioned_p (x, cse_check_loop_start_value))
8336 cse_check_loop_start_value = 0;
8337}
8338
8339/* X is a SET or CLOBBER contained in INSN that was found near the start of
8340 a loop that starts with the label at LOOP_START.
8341
8342 If X is a SET, we see if its SET_SRC is currently in our hash table.
8343 If so, we see if it has a value equal to some register used only in the
8344 loop exit code (as marked by jump.c).
8345
8346 If those two conditions are true, we search backwards from the start of
8347 the loop to see if that same value was loaded into a register that still
8348 retains its value at the start of the loop.
8349
8350 If so, we insert an insn after the load to copy the destination of that
8351 load into the equivalent register and (try to) replace our SET_SRC with that
8352 register.
8353
8354 In any event, we invalidate whatever this SET or CLOBBER modifies. */
8355
8356static void
8357cse_set_around_loop (x, insn, loop_start)
8358 rtx x;
8359 rtx insn;
8360 rtx loop_start;
8361{
7afe21cc 8362 struct table_elt *src_elt;
7afe21cc
RK
8363
8364 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
8365 are setting PC or CC0 or whose SET_SRC is already a register. */
8366 if (GET_CODE (x) == SET
8367 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
8368 && GET_CODE (SET_SRC (x)) != REG)
8369 {
8370 src_elt = lookup (SET_SRC (x),
8371 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
8372 GET_MODE (SET_DEST (x)));
8373
8374 if (src_elt)
8375 for (src_elt = src_elt->first_same_value; src_elt;
8376 src_elt = src_elt->next_same_value)
8377 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
8378 && COST (src_elt->exp) < COST (SET_SRC (x)))
8379 {
8380 rtx p, set;
8381
8382 /* Look for an insn in front of LOOP_START that sets
8383 something in the desired mode to SET_SRC (x) before we hit
8384 a label or CALL_INSN. */
8385
8386 for (p = prev_nonnote_insn (loop_start);
8387 p && GET_CODE (p) != CALL_INSN
8388 && GET_CODE (p) != CODE_LABEL;
8389 p = prev_nonnote_insn (p))
8390 if ((set = single_set (p)) != 0
8391 && GET_CODE (SET_DEST (set)) == REG
8392 && GET_MODE (SET_DEST (set)) == src_elt->mode
8393 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
8394 {
8395 /* We now have to ensure that nothing between P
8396 and LOOP_START modified anything referenced in
8397 SET_SRC (x). We know that nothing within the loop
8398 can modify it, or we would have invalidated it in
8399 the hash table. */
8400 rtx q;
8401
8402 cse_check_loop_start_value = SET_SRC (x);
8403 for (q = p; q != loop_start; q = NEXT_INSN (q))
8404 if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
8405 note_stores (PATTERN (q), cse_check_loop_start);
8406
8407 /* If nothing was changed and we can replace our
8408 SET_SRC, add an insn after P to copy its destination
8409 to what we will be replacing SET_SRC with. */
8410 if (cse_check_loop_start_value
8411 && validate_change (insn, &SET_SRC (x),
8412 src_elt->exp, 0))
e89d3e6f
R
8413 {
8414 /* If this creates new pseudos, this is unsafe,
8415 because the regno of new pseudo is unsuitable
8416 to index into reg_qty when cse_insn processes
8417 the new insn. Therefore, if a new pseudo was
8418 created, discard this optimization. */
8419 int nregs = max_reg_num ();
8420 rtx move
8421 = gen_move_insn (src_elt->exp, SET_DEST (set));
8422 if (nregs != max_reg_num ())
8423 {
8424 if (! validate_change (insn, &SET_SRC (x),
8425 SET_SRC (set), 0))
8426 abort ();
8427 }
8428 else
8429 emit_insn_after (move, p);
8430 }
7afe21cc
RK
8431 break;
8432 }
8433 }
8434 }
8435
8436 /* Now invalidate anything modified by X. */
9ae8ffe7 8437 note_mem_written (SET_DEST (x));
7afe21cc 8438
9ae8ffe7 8439 /* See comment on similar code in cse_insn for explanation of these tests. */
7afe21cc 8440 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
9ae8ffe7 8441 || GET_CODE (SET_DEST (x)) == MEM)
bb4034b3 8442 invalidate (SET_DEST (x), VOIDmode);
2708da92
RS
8443 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
8444 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
bb4034b3 8445 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
7afe21cc
RK
8446}
8447\f
8448/* Find the end of INSN's basic block and return its range,
8449 the total number of SETs in all the insns of the block, the last insn of the
8450 block, and the branch path.
8451
8452 The branch path indicates which branches should be followed. If a non-zero
8453 path size is specified, the block should be rescanned and a different set
8454 of branches will be taken. The branch path is only used if
8b3686ed 8455 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
7afe21cc
RK
8456
8457 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
8458 used to describe the block. It is filled in with the information about
8459 the current block. The incoming structure's branch path, if any, is used
8460 to construct the output branch path. */
8461
7afe21cc 8462void
8b3686ed 8463cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
7afe21cc
RK
8464 rtx insn;
8465 struct cse_basic_block_data *data;
8466 int follow_jumps;
8467 int after_loop;
8b3686ed 8468 int skip_blocks;
7afe21cc
RK
8469{
8470 rtx p = insn, q;
8471 int nsets = 0;
8472 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
fc3ffe83 8473 rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
7afe21cc
RK
8474 int path_size = data->path_size;
8475 int path_entry = 0;
8476 int i;
8477
8478 /* Update the previous branch path, if any. If the last branch was
8479 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
8480 shorten the path by one and look at the previous branch. We know that
8481 at least one branch must have been taken if PATH_SIZE is non-zero. */
8482 while (path_size > 0)
8483 {
8b3686ed 8484 if (data->path[path_size - 1].status != NOT_TAKEN)
7afe21cc
RK
8485 {
8486 data->path[path_size - 1].status = NOT_TAKEN;
8487 break;
8488 }
8489 else
8490 path_size--;
8491 }
8492
8493 /* Scan to end of this basic block. */
8494 while (p && GET_CODE (p) != CODE_LABEL)
8495 {
8496 /* Don't cse out the end of a loop. This makes a difference
8497 only for the unusual loops that always execute at least once;
8498 all other loops have labels there so we will stop in any case.
8499 Cse'ing out the end of the loop is dangerous because it
8500 might cause an invariant expression inside the loop
8501 to be reused after the end of the loop. This would make it
8502 hard to move the expression out of the loop in loop.c,
8503 especially if it is one of several equivalent expressions
8504 and loop.c would like to eliminate it.
8505
8506 If we are running after loop.c has finished, we can ignore
8507 the NOTE_INSN_LOOP_END. */
8508
8509 if (! after_loop && GET_CODE (p) == NOTE
8510 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
8511 break;
8512
8513 /* Don't cse over a call to setjmp; on some machines (eg vax)
8514 the regs restored by the longjmp come from
8515 a later time than the setjmp. */
8516 if (GET_CODE (p) == NOTE
8517 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
8518 break;
8519
8520 /* A PARALLEL can have lots of SETs in it,
8521 especially if it is really an ASM_OPERANDS. */
8522 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
8523 && GET_CODE (PATTERN (p)) == PARALLEL)
8524 nsets += XVECLEN (PATTERN (p), 0);
8525 else if (GET_CODE (p) != NOTE)
8526 nsets += 1;
8527
164c8956
RK
8528 /* Ignore insns made by CSE; they cannot affect the boundaries of
8529 the basic block. */
8530
8531 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
8b3686ed 8532 high_cuid = INSN_CUID (p);
164c8956
RK
8533 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
8534 low_cuid = INSN_CUID (p);
7afe21cc
RK
8535
8536 /* See if this insn is in our branch path. If it is and we are to
8537 take it, do so. */
8538 if (path_entry < path_size && data->path[path_entry].branch == p)
8539 {
8b3686ed 8540 if (data->path[path_entry].status != NOT_TAKEN)
7afe21cc
RK
8541 p = JUMP_LABEL (p);
8542
8543 /* Point to next entry in path, if any. */
8544 path_entry++;
8545 }
8546
8547 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
8548 was specified, we haven't reached our maximum path length, there are
8549 insns following the target of the jump, this is the only use of the
8b3686ed
RK
8550 jump label, and the target label is preceded by a BARRIER.
8551
8552 Alternatively, we can follow the jump if it branches around a
8553 block of code and there are no other branches into the block.
8554 In this case invalidate_skipped_block will be called to invalidate any
8555 registers set in the block when following the jump. */
8556
8557 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
7afe21cc
RK
8558 && GET_CODE (p) == JUMP_INSN
8559 && GET_CODE (PATTERN (p)) == SET
8560 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
85c3ba60 8561 && JUMP_LABEL (p) != 0
7afe21cc
RK
8562 && LABEL_NUSES (JUMP_LABEL (p)) == 1
8563 && NEXT_INSN (JUMP_LABEL (p)) != 0)
8564 {
8565 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
8566 if ((GET_CODE (q) != NOTE
8567 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
8568 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
8569 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
8570 break;
8571
8572 /* If we ran into a BARRIER, this code is an extension of the
8573 basic block when the branch is taken. */
8b3686ed 8574 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
7afe21cc
RK
8575 {
8576 /* Don't allow ourself to keep walking around an
8577 always-executed loop. */
fc3ffe83
RK
8578 if (next_real_insn (q) == next)
8579 {
8580 p = NEXT_INSN (p);
8581 continue;
8582 }
7afe21cc
RK
8583
8584 /* Similarly, don't put a branch in our path more than once. */
8585 for (i = 0; i < path_entry; i++)
8586 if (data->path[i].branch == p)
8587 break;
8588
8589 if (i != path_entry)
8590 break;
8591
8592 data->path[path_entry].branch = p;
8593 data->path[path_entry++].status = TAKEN;
8594
8595 /* This branch now ends our path. It was possible that we
8596 didn't see this branch the last time around (when the
8597 insn in front of the target was a JUMP_INSN that was
8598 turned into a no-op). */
8599 path_size = path_entry;
8600
8601 p = JUMP_LABEL (p);
8602 /* Mark block so we won't scan it again later. */
8603 PUT_MODE (NEXT_INSN (p), QImode);
8604 }
8b3686ed
RK
8605 /* Detect a branch around a block of code. */
8606 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
8607 {
8608 register rtx tmp;
8609
fc3ffe83
RK
8610 if (next_real_insn (q) == next)
8611 {
8612 p = NEXT_INSN (p);
8613 continue;
8614 }
8b3686ed
RK
8615
8616 for (i = 0; i < path_entry; i++)
8617 if (data->path[i].branch == p)
8618 break;
8619
8620 if (i != path_entry)
8621 break;
8622
8623 /* This is no_labels_between_p (p, q) with an added check for
8624 reaching the end of a function (in case Q precedes P). */
8625 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
8626 if (GET_CODE (tmp) == CODE_LABEL)
8627 break;
8628
8629 if (tmp == q)
8630 {
8631 data->path[path_entry].branch = p;
8632 data->path[path_entry++].status = AROUND;
8633
8634 path_size = path_entry;
8635
8636 p = JUMP_LABEL (p);
8637 /* Mark block so we won't scan it again later. */
8638 PUT_MODE (NEXT_INSN (p), QImode);
8639 }
8640 }
7afe21cc 8641 }
7afe21cc
RK
8642 p = NEXT_INSN (p);
8643 }
8644
8645 data->low_cuid = low_cuid;
8646 data->high_cuid = high_cuid;
8647 data->nsets = nsets;
8648 data->last = p;
8649
8650 /* If all jumps in the path are not taken, set our path length to zero
8651 so a rescan won't be done. */
8652 for (i = path_size - 1; i >= 0; i--)
8b3686ed 8653 if (data->path[i].status != NOT_TAKEN)
7afe21cc
RK
8654 break;
8655
8656 if (i == -1)
8657 data->path_size = 0;
8658 else
8659 data->path_size = path_size;
8660
8661 /* End the current branch path. */
8662 data->path[path_size].branch = 0;
8663}
8664\f
7afe21cc
RK
8665/* Perform cse on the instructions of a function.
8666 F is the first instruction.
8667 NREGS is one plus the highest pseudo-reg number used in the instruction.
8668
8669 AFTER_LOOP is 1 if this is the cse call done after loop optimization
8670 (only if -frerun-cse-after-loop).
8671
8672 Returns 1 if jump_optimize should be redone due to simplifications
8673 in conditional jump instructions. */
8674
8675int
8676cse_main (f, nregs, after_loop, file)
8677 rtx f;
8678 int nregs;
8679 int after_loop;
8680 FILE *file;
8681{
8682 struct cse_basic_block_data val;
8683 register rtx insn = f;
8684 register int i;
8685
8686 cse_jumps_altered = 0;
a5dfb4ee 8687 recorded_label_ref = 0;
7afe21cc
RK
8688 constant_pool_entries_cost = 0;
8689 val.path_size = 0;
8690
8691 init_recog ();
9ae8ffe7 8692 init_alias_analysis ();
7afe21cc
RK
8693
8694 max_reg = nregs;
8695
556c714b
JW
8696 max_insn_uid = get_max_uid ();
8697
7afe21cc
RK
8698 reg_next_eqv = (int *) alloca (nregs * sizeof (int));
8699 reg_prev_eqv = (int *) alloca (nregs * sizeof (int));
7afe21cc 8700
7bac1be0
RK
8701#ifdef LOAD_EXTEND_OP
8702
8703 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
8704 and change the code and mode as appropriate. */
38a448ca 8705 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
7bac1be0
RK
8706#endif
8707
7afe21cc
RK
8708 /* Discard all the free elements of the previous function
8709 since they are allocated in the temporarily obstack. */
4c9a05bc 8710 bzero ((char *) table, sizeof table);
7afe21cc
RK
8711 free_element_chain = 0;
8712 n_elements_made = 0;
8713
8714 /* Find the largest uid. */
8715
164c8956
RK
8716 max_uid = get_max_uid ();
8717 uid_cuid = (int *) alloca ((max_uid + 1) * sizeof (int));
4c9a05bc 8718 bzero ((char *) uid_cuid, (max_uid + 1) * sizeof (int));
7afe21cc
RK
8719
8720 /* Compute the mapping from uids to cuids.
8721 CUIDs are numbers assigned to insns, like uids,
8722 except that cuids increase monotonically through the code.
8723 Don't assign cuids to line-number NOTEs, so that the distance in cuids
8724 between two insns is not affected by -g. */
8725
8726 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
8727 {
8728 if (GET_CODE (insn) != NOTE
8729 || NOTE_LINE_NUMBER (insn) < 0)
8730 INSN_CUID (insn) = ++i;
8731 else
8732 /* Give a line number note the same cuid as preceding insn. */
8733 INSN_CUID (insn) = i;
8734 }
8735
8736 /* Initialize which registers are clobbered by calls. */
8737
8738 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
8739
8740 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8741 if ((call_used_regs[i]
8742 /* Used to check !fixed_regs[i] here, but that isn't safe;
8743 fixed regs are still call-clobbered, and sched can get
8744 confused if they can "live across calls".
8745
8746 The frame pointer is always preserved across calls. The arg
8747 pointer is if it is fixed. The stack pointer usually is, unless
8748 RETURN_POPS_ARGS, in which case an explicit CLOBBER
8749 will be present. If we are generating PIC code, the PIC offset
8750 table register is preserved across calls. */
8751
8752 && i != STACK_POINTER_REGNUM
8753 && i != FRAME_POINTER_REGNUM
8bc169f2
DE
8754#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
8755 && i != HARD_FRAME_POINTER_REGNUM
8756#endif
7afe21cc
RK
8757#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
8758 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
8759#endif
be8fe470 8760#if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
7afe21cc
RK
8761 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
8762#endif
8763 )
8764 || global_regs[i])
8765 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
8766
8767 /* Loop over basic blocks.
8768 Compute the maximum number of qty's needed for each basic block
8769 (which is 2 for each SET). */
8770 insn = f;
8771 while (insn)
8772 {
8b3686ed
RK
8773 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
8774 flag_cse_skip_blocks);
7afe21cc
RK
8775
8776 /* If this basic block was already processed or has no sets, skip it. */
8777 if (val.nsets == 0 || GET_MODE (insn) == QImode)
8778 {
8779 PUT_MODE (insn, VOIDmode);
8780 insn = (val.last ? NEXT_INSN (val.last) : 0);
8781 val.path_size = 0;
8782 continue;
8783 }
8784
8785 cse_basic_block_start = val.low_cuid;
8786 cse_basic_block_end = val.high_cuid;
8787 max_qty = val.nsets * 2;
8788
8789 if (file)
ab87f8c8 8790 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
7afe21cc
RK
8791 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
8792 val.nsets);
8793
8794 /* Make MAX_QTY bigger to give us room to optimize
8795 past the end of this basic block, if that should prove useful. */
8796 if (max_qty < 500)
8797 max_qty = 500;
8798
8799 max_qty += max_reg;
8800
8801 /* If this basic block is being extended by following certain jumps,
8802 (see `cse_end_of_basic_block'), we reprocess the code from the start.
8803 Otherwise, we start after this basic block. */
8804 if (val.path_size > 0)
8805 cse_basic_block (insn, val.last, val.path, 0);
8806 else
8807 {
8808 int old_cse_jumps_altered = cse_jumps_altered;
8809 rtx temp;
8810
8811 /* When cse changes a conditional jump to an unconditional
8812 jump, we want to reprocess the block, since it will give
8813 us a new branch path to investigate. */
8814 cse_jumps_altered = 0;
8815 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
8b3686ed
RK
8816 if (cse_jumps_altered == 0
8817 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7afe21cc
RK
8818 insn = temp;
8819
8820 cse_jumps_altered |= old_cse_jumps_altered;
8821 }
8822
8823#ifdef USE_C_ALLOCA
8824 alloca (0);
8825#endif
8826 }
8827
8828 /* Tell refers_to_mem_p that qty_const info is not available. */
8829 qty_const = 0;
8830
8831 if (max_elements_made < n_elements_made)
8832 max_elements_made = n_elements_made;
8833
a5dfb4ee 8834 return cse_jumps_altered || recorded_label_ref;
7afe21cc
RK
8835}
8836
8837/* Process a single basic block. FROM and TO and the limits of the basic
8838 block. NEXT_BRANCH points to the branch path when following jumps or
8839 a null path when not following jumps.
8840
8841 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
8842 loop. This is true when we are being called for the last time on a
8843 block and this CSE pass is before loop.c. */
8844
8845static rtx
8846cse_basic_block (from, to, next_branch, around_loop)
8847 register rtx from, to;
8848 struct branch_path *next_branch;
8849 int around_loop;
8850{
8851 register rtx insn;
8852 int to_usage = 0;
7bd8b2a8 8853 rtx libcall_insn = NULL_RTX;
e9a25f70 8854 int num_insns = 0;
7afe21cc
RK
8855
8856 /* Each of these arrays is undefined before max_reg, so only allocate
8857 the space actually needed and adjust the start below. */
8858
8859 qty_first_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8860 qty_last_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8861 qty_mode= (enum machine_mode *) alloca ((max_qty - max_reg) * sizeof (enum machine_mode));
8862 qty_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8863 qty_const_insn = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8864 qty_comparison_code
8865 = (enum rtx_code *) alloca ((max_qty - max_reg) * sizeof (enum rtx_code));
8866 qty_comparison_qty = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8867 qty_comparison_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8868
8869 qty_first_reg -= max_reg;
8870 qty_last_reg -= max_reg;
8871 qty_mode -= max_reg;
8872 qty_const -= max_reg;
8873 qty_const_insn -= max_reg;
8874 qty_comparison_code -= max_reg;
8875 qty_comparison_qty -= max_reg;
8876 qty_comparison_const -= max_reg;
8877
8878 new_basic_block ();
8879
8880 /* TO might be a label. If so, protect it from being deleted. */
8881 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8882 ++LABEL_NUSES (to);
8883
8884 for (insn = from; insn != to; insn = NEXT_INSN (insn))
8885 {
1d22a2c1 8886 register enum rtx_code code = GET_CODE (insn);
e9a25f70 8887
1d22a2c1
MM
8888 /* If we have processed 1,000 insns, flush the hash table to
8889 avoid extreme quadratic behavior. We must not include NOTEs
8890 in the count since there may be more or them when generating
8891 debugging information. If we clear the table at different
8892 times, code generated with -g -O might be different than code
8893 generated with -O but not -g.
e9a25f70
JL
8894
8895 ??? This is a real kludge and needs to be done some other way.
8896 Perhaps for 2.9. */
1d22a2c1 8897 if (code != NOTE && num_insns++ > 1000)
e9a25f70 8898 {
01e752d3 8899 flush_hash_table ();
e9a25f70
JL
8900 num_insns = 0;
8901 }
7afe21cc
RK
8902
8903 /* See if this is a branch that is part of the path. If so, and it is
8904 to be taken, do so. */
8905 if (next_branch->branch == insn)
8906 {
8b3686ed
RK
8907 enum taken status = next_branch++->status;
8908 if (status != NOT_TAKEN)
7afe21cc 8909 {
8b3686ed
RK
8910 if (status == TAKEN)
8911 record_jump_equiv (insn, 1);
8912 else
8913 invalidate_skipped_block (NEXT_INSN (insn));
8914
7afe21cc
RK
8915 /* Set the last insn as the jump insn; it doesn't affect cc0.
8916 Then follow this branch. */
8917#ifdef HAVE_cc0
8918 prev_insn_cc0 = 0;
8919#endif
8920 prev_insn = insn;
8921 insn = JUMP_LABEL (insn);
8922 continue;
8923 }
8924 }
8925
7afe21cc
RK
8926 if (GET_MODE (insn) == QImode)
8927 PUT_MODE (insn, VOIDmode);
8928
8929 if (GET_RTX_CLASS (code) == 'i')
8930 {
7bd8b2a8
JL
8931 rtx p;
8932
7afe21cc
RK
8933 /* Process notes first so we have all notes in canonical forms when
8934 looking for duplicate operations. */
8935
8936 if (REG_NOTES (insn))
906c4e36 8937 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7afe21cc
RK
8938
8939 /* Track when we are inside in LIBCALL block. Inside such a block,
8940 we do not want to record destinations. The last insn of a
8941 LIBCALL block is not considered to be part of the block, since
830a38ee 8942 its destination is the result of the block and hence should be
7afe21cc
RK
8943 recorded. */
8944
63be02db 8945 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7bd8b2a8 8946 libcall_insn = XEXP (p, 0);
906c4e36 8947 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7bd8b2a8 8948 libcall_insn = NULL_RTX;
7afe21cc 8949
7bd8b2a8 8950 cse_insn (insn, libcall_insn);
7afe21cc
RK
8951 }
8952
8953 /* If INSN is now an unconditional jump, skip to the end of our
8954 basic block by pretending that we just did the last insn in the
8955 basic block. If we are jumping to the end of our block, show
8956 that we can have one usage of TO. */
8957
8958 if (simplejump_p (insn))
8959 {
8960 if (to == 0)
8961 return 0;
8962
8963 if (JUMP_LABEL (insn) == to)
8964 to_usage = 1;
8965
6a5293dc
RS
8966 /* Maybe TO was deleted because the jump is unconditional.
8967 If so, there is nothing left in this basic block. */
8968 /* ??? Perhaps it would be smarter to set TO
8969 to whatever follows this insn,
8970 and pretend the basic block had always ended here. */
8971 if (INSN_DELETED_P (to))
8972 break;
8973
7afe21cc
RK
8974 insn = PREV_INSN (to);
8975 }
8976
8977 /* See if it is ok to keep on going past the label
8978 which used to end our basic block. Remember that we incremented
d45cf215 8979 the count of that label, so we decrement it here. If we made
7afe21cc
RK
8980 a jump unconditional, TO_USAGE will be one; in that case, we don't
8981 want to count the use in that jump. */
8982
8983 if (to != 0 && NEXT_INSN (insn) == to
8984 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
8985 {
8986 struct cse_basic_block_data val;
146135d6 8987 rtx prev;
7afe21cc
RK
8988
8989 insn = NEXT_INSN (to);
8990
8991 if (LABEL_NUSES (to) == 0)
146135d6 8992 insn = delete_insn (to);
7afe21cc 8993
146135d6
RK
8994 /* If TO was the last insn in the function, we are done. */
8995 if (insn == 0)
7afe21cc
RK
8996 return 0;
8997
146135d6
RK
8998 /* If TO was preceded by a BARRIER we are done with this block
8999 because it has no continuation. */
9000 prev = prev_nonnote_insn (to);
9001 if (prev && GET_CODE (prev) == BARRIER)
9002 return insn;
9003
9004 /* Find the end of the following block. Note that we won't be
9005 following branches in this case. */
7afe21cc
RK
9006 to_usage = 0;
9007 val.path_size = 0;
8b3686ed 9008 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7afe21cc
RK
9009
9010 /* If the tables we allocated have enough space left
9011 to handle all the SETs in the next basic block,
9012 continue through it. Otherwise, return,
9013 and that block will be scanned individually. */
9014 if (val.nsets * 2 + next_qty > max_qty)
9015 break;
9016
9017 cse_basic_block_start = val.low_cuid;
9018 cse_basic_block_end = val.high_cuid;
9019 to = val.last;
9020
9021 /* Prevent TO from being deleted if it is a label. */
9022 if (to != 0 && GET_CODE (to) == CODE_LABEL)
9023 ++LABEL_NUSES (to);
9024
9025 /* Back up so we process the first insn in the extension. */
9026 insn = PREV_INSN (insn);
9027 }
9028 }
9029
9030 if (next_qty > max_qty)
9031 abort ();
9032
9033 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
9034 the previous insn is the only insn that branches to the head of a loop,
9035 we can cse into the loop. Don't do this if we changed the jump
9036 structure of a loop unless we aren't going to be following jumps. */
9037
8b3686ed
RK
9038 if ((cse_jumps_altered == 0
9039 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7afe21cc
RK
9040 && around_loop && to != 0
9041 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
9042 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
9043 && JUMP_LABEL (PREV_INSN (to)) != 0
9044 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
9045 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
9046
9047 return to ? NEXT_INSN (to) : 0;
9048}
9049\f
9050/* Count the number of times registers are used (not set) in X.
9051 COUNTS is an array in which we accumulate the count, INCR is how much
79644f06
RK
9052 we count each register usage.
9053
9054 Don't count a usage of DEST, which is the SET_DEST of a SET which
9055 contains X in its SET_SRC. This is because such a SET does not
9056 modify the liveness of DEST. */
7afe21cc
RK
9057
9058static void
79644f06 9059count_reg_usage (x, counts, dest, incr)
7afe21cc
RK
9060 rtx x;
9061 int *counts;
79644f06 9062 rtx dest;
7afe21cc
RK
9063 int incr;
9064{
f1e7c95f 9065 enum rtx_code code;
7afe21cc
RK
9066 char *fmt;
9067 int i, j;
9068
f1e7c95f
RK
9069 if (x == 0)
9070 return;
9071
9072 switch (code = GET_CODE (x))
7afe21cc
RK
9073 {
9074 case REG:
79644f06
RK
9075 if (x != dest)
9076 counts[REGNO (x)] += incr;
7afe21cc
RK
9077 return;
9078
9079 case PC:
9080 case CC0:
9081 case CONST:
9082 case CONST_INT:
9083 case CONST_DOUBLE:
9084 case SYMBOL_REF:
9085 case LABEL_REF:
02e39abc
JL
9086 return;
9087
9088 case CLOBBER:
9089 /* If we are clobbering a MEM, mark any registers inside the address
9090 as being used. */
9091 if (GET_CODE (XEXP (x, 0)) == MEM)
9092 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7afe21cc
RK
9093 return;
9094
9095 case SET:
9096 /* Unless we are setting a REG, count everything in SET_DEST. */
9097 if (GET_CODE (SET_DEST (x)) != REG)
79644f06 9098 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
9ff08f70
RK
9099
9100 /* If SRC has side-effects, then we can't delete this insn, so the
9101 usage of SET_DEST inside SRC counts.
9102
9103 ??? Strictly-speaking, we might be preserving this insn
9104 because some other SET has side-effects, but that's hard
9105 to do and can't happen now. */
9106 count_reg_usage (SET_SRC (x), counts,
9107 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
9108 incr);
7afe21cc
RK
9109 return;
9110
f1e7c95f
RK
9111 case CALL_INSN:
9112 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
9113
9114 /* ... falls through ... */
7afe21cc
RK
9115 case INSN:
9116 case JUMP_INSN:
79644f06 9117 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
7afe21cc
RK
9118
9119 /* Things used in a REG_EQUAL note aren't dead since loop may try to
9120 use them. */
9121
f1e7c95f 9122 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
7afe21cc
RK
9123 return;
9124
9125 case EXPR_LIST:
9126 case INSN_LIST:
f1e7c95f 9127 if (REG_NOTE_KIND (x) == REG_EQUAL
c6a26dc4 9128 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
79644f06 9129 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
f1e7c95f 9130 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
7afe21cc 9131 return;
e9a25f70
JL
9132
9133 default:
9134 break;
7afe21cc
RK
9135 }
9136
9137 fmt = GET_RTX_FORMAT (code);
9138 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9139 {
9140 if (fmt[i] == 'e')
79644f06 9141 count_reg_usage (XEXP (x, i), counts, dest, incr);
7afe21cc
RK
9142 else if (fmt[i] == 'E')
9143 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
79644f06 9144 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7afe21cc
RK
9145 }
9146}
9147\f
9148/* Scan all the insns and delete any that are dead; i.e., they store a register
9149 that is never used or they copy a register to itself.
9150
c6a26dc4
JL
9151 This is used to remove insns made obviously dead by cse, loop or other
9152 optimizations. It improves the heuristics in loop since it won't try to
9153 move dead invariants out of loops or make givs for dead quantities. The
9154 remaining passes of the compilation are also sped up. */
7afe21cc
RK
9155
9156void
c6a26dc4 9157delete_trivially_dead_insns (insns, nreg)
7afe21cc
RK
9158 rtx insns;
9159 int nreg;
9160{
9161 int *counts = (int *) alloca (nreg * sizeof (int));
77fa0940 9162 rtx insn, prev;
51723711 9163#ifdef HAVE_cc0
d45cf215 9164 rtx tem;
51723711 9165#endif
7afe21cc 9166 int i;
614bb5d4 9167 int in_libcall = 0, dead_libcall = 0;
7afe21cc
RK
9168
9169 /* First count the number of times each register is used. */
4c9a05bc 9170 bzero ((char *) counts, sizeof (int) * nreg);
7afe21cc 9171 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
79644f06 9172 count_reg_usage (insn, counts, NULL_RTX, 1);
7afe21cc
RK
9173
9174 /* Go from the last insn to the first and delete insns that only set unused
9175 registers or copy a register to itself. As we delete an insn, remove
9176 usage counts for registers it uses. */
77fa0940 9177 for (insn = prev_real_insn (get_last_insn ()); insn; insn = prev)
7afe21cc
RK
9178 {
9179 int live_insn = 0;
614bb5d4 9180 rtx note;
7afe21cc 9181
77fa0940
RK
9182 prev = prev_real_insn (insn);
9183
614bb5d4
JL
9184 /* Don't delete any insns that are part of a libcall block unless
9185 we can delete the whole libcall block.
9186
77fa0940
RK
9187 Flow or loop might get confused if we did that. Remember
9188 that we are scanning backwards. */
906c4e36 9189 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
614bb5d4
JL
9190 {
9191 in_libcall = 1;
9192 live_insn = 1;
9193 dead_libcall = 0;
e4890d45 9194
614bb5d4
JL
9195 /* See if there's a REG_EQUAL note on this insn and try to
9196 replace the source with the REG_EQUAL expression.
9197
9198 We assume that insns with REG_RETVALs can only be reg->reg
9199 copies at this point. */
9200 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
9201 if (note)
9202 {
9203 rtx set = single_set (insn);
9204 if (set
9205 && validate_change (insn, &SET_SRC (set), XEXP (note, 0), 0))
9206 {
9207 remove_note (insn,
9208 find_reg_note (insn, REG_RETVAL, NULL_RTX));
9209 dead_libcall = 1;
9210 }
9211 }
9212 }
9213 else if (in_libcall)
9214 live_insn = ! dead_libcall;
e4890d45 9215 else if (GET_CODE (PATTERN (insn)) == SET)
7afe21cc
RK
9216 {
9217 if (GET_CODE (SET_DEST (PATTERN (insn))) == REG
9218 && SET_DEST (PATTERN (insn)) == SET_SRC (PATTERN (insn)))
9219 ;
9220
d45cf215
RS
9221#ifdef HAVE_cc0
9222 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
9223 && ! side_effects_p (SET_SRC (PATTERN (insn)))
9224 && ((tem = next_nonnote_insn (insn)) == 0
9225 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
9226 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
9227 ;
9228#endif
7afe21cc
RK
9229 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
9230 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
9231 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
9232 || side_effects_p (SET_SRC (PATTERN (insn))))
9233 live_insn = 1;
9234 }
9235 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
9236 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
9237 {
9238 rtx elt = XVECEXP (PATTERN (insn), 0, i);
9239
9240 if (GET_CODE (elt) == SET)
9241 {
9242 if (GET_CODE (SET_DEST (elt)) == REG
9243 && SET_DEST (elt) == SET_SRC (elt))
9244 ;
9245
d45cf215
RS
9246#ifdef HAVE_cc0
9247 else if (GET_CODE (SET_DEST (elt)) == CC0
9248 && ! side_effects_p (SET_SRC (elt))
9249 && ((tem = next_nonnote_insn (insn)) == 0
9250 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
9251 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
9252 ;
9253#endif
7afe21cc
RK
9254 else if (GET_CODE (SET_DEST (elt)) != REG
9255 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
9256 || counts[REGNO (SET_DEST (elt))] != 0
9257 || side_effects_p (SET_SRC (elt)))
9258 live_insn = 1;
9259 }
9260 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
9261 live_insn = 1;
9262 }
9263 else
9264 live_insn = 1;
9265
9266 /* If this is a dead insn, delete it and show registers in it aren't
e4890d45 9267 being used. */
7afe21cc 9268
e4890d45 9269 if (! live_insn)
7afe21cc 9270 {
79644f06 9271 count_reg_usage (insn, counts, NULL_RTX, -1);
77fa0940 9272 delete_insn (insn);
7afe21cc 9273 }
e4890d45 9274
906c4e36 9275 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
614bb5d4
JL
9276 {
9277 in_libcall = 0;
9278 dead_libcall = 0;
9279 }
7afe21cc
RK
9280 }
9281}
This page took 1.930906 seconds and 5 git commands to generate.