]> gcc.gnu.org Git - gcc.git/blob - gcc/cse.c
(invalidate): Use proper type for in_table.
[gcc.git] / gcc / cse.c
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 #include "config.h"
22 #include "rtl.h"
23 #include "regs.h"
24 #include "hard-reg-set.h"
25 #include "flags.h"
26 #include "real.h"
27 #include "insn-config.h"
28 #include "recog.h"
29
30 #include <stdio.h>
31 #include <setjmp.h>
32
33 /* The basic idea of common subexpression elimination is to go
34 through the code, keeping a record of expressions that would
35 have the same value at the current scan point, and replacing
36 expressions encountered with the cheapest equivalent expression.
37
38 It is too complicated to keep track of the different possibilities
39 when control paths merge; so, at each label, we forget all that is
40 known and start fresh. This can be described as processing each
41 basic block separately. Note, however, that these are not quite
42 the same as the basic blocks found by a later pass and used for
43 data flow analysis and register packing. We do not need to start fresh
44 after a conditional jump instruction if there is no label there.
45
46 We use two data structures to record the equivalent expressions:
47 a hash table for most expressions, and several vectors together
48 with "quantity numbers" to record equivalent (pseudo) registers.
49
50 The use of the special data structure for registers is desirable
51 because it is faster. It is possible because registers references
52 contain a fairly small number, the register number, taken from
53 a contiguously allocated series, and two register references are
54 identical if they have the same number. General expressions
55 do not have any such thing, so the only way to retrieve the
56 information recorded on an expression other than a register
57 is to keep it in a hash table.
58
59 Registers and "quantity numbers":
60
61 At the start of each basic block, all of the (hardware and pseudo)
62 registers used in the function are given distinct quantity
63 numbers to indicate their contents. During scan, when the code
64 copies one register into another, we copy the quantity number.
65 When a register is loaded in any other way, we allocate a new
66 quantity number to describe the value generated by this operation.
67 `reg_qty' records what quantity a register is currently thought
68 of as containing.
69
70 All real quantity numbers are greater than or equal to `max_reg'.
71 If register N has not been assigned a quantity, reg_qty[N] will equal N.
72
73 Quantity numbers below `max_reg' do not exist and none of the `qty_...'
74 variables should be referenced with an index below `max_reg'.
75
76 We also maintain a bidirectional chain of registers for each
77 quantity number. `qty_first_reg', `qty_last_reg',
78 `reg_next_eqv' and `reg_prev_eqv' hold these chains.
79
80 The first register in a chain is the one whose lifespan is least local.
81 Among equals, it is the one that was seen first.
82 We replace any equivalent register with that one.
83
84 If two registers have the same quantity number, it must be true that
85 REG expressions with `qty_mode' must be in the hash table for both
86 registers and must be in the same class.
87
88 The converse is not true. Since hard registers may be referenced in
89 any mode, two REG expressions might be equivalent in the hash table
90 but not have the same quantity number if the quantity number of one
91 of the registers is not the same mode as those expressions.
92
93 Constants and quantity numbers
94
95 When a quantity has a known constant value, that value is stored
96 in the appropriate element of qty_const. This is in addition to
97 putting the constant in the hash table as is usual for non-regs.
98
99 Whether a reg or a constant is preferred is determined by the configuration
100 macro CONST_COSTS and will often depend on the constant value. In any
101 event, expressions containing constants can be simplified, by fold_rtx.
102
103 When a quantity has a known nearly constant value (such as an address
104 of a stack slot), that value is stored in the appropriate element
105 of qty_const.
106
107 Integer constants don't have a machine mode. However, cse
108 determines the intended machine mode from the destination
109 of the instruction that moves the constant. The machine mode
110 is recorded in the hash table along with the actual RTL
111 constant expression so that different modes are kept separate.
112
113 Other expressions:
114
115 To record known equivalences among expressions in general
116 we use a hash table called `table'. It has a fixed number of buckets
117 that contain chains of `struct table_elt' elements for expressions.
118 These chains connect the elements whose expressions have the same
119 hash codes.
120
121 Other chains through the same elements connect the elements which
122 currently have equivalent values.
123
124 Register references in an expression are canonicalized before hashing
125 the expression. This is done using `reg_qty' and `qty_first_reg'.
126 The hash code of a register reference is computed using the quantity
127 number, not the register number.
128
129 When the value of an expression changes, it is necessary to remove from the
130 hash table not just that expression but all expressions whose values
131 could be different as a result.
132
133 1. If the value changing is in memory, except in special cases
134 ANYTHING referring to memory could be changed. That is because
135 nobody knows where a pointer does not point.
136 The function `invalidate_memory' removes what is necessary.
137
138 The special cases are when the address is constant or is
139 a constant plus a fixed register such as the frame pointer
140 or a static chain pointer. When such addresses are stored in,
141 we can tell exactly which other such addresses must be invalidated
142 due to overlap. `invalidate' does this.
143 All expressions that refer to non-constant
144 memory addresses are also invalidated. `invalidate_memory' does this.
145
146 2. If the value changing is a register, all expressions
147 containing references to that register, and only those,
148 must be removed.
149
150 Because searching the entire hash table for expressions that contain
151 a register is very slow, we try to figure out when it isn't necessary.
152 Precisely, this is necessary only when expressions have been
153 entered in the hash table using this register, and then the value has
154 changed, and then another expression wants to be added to refer to
155 the register's new value. This sequence of circumstances is rare
156 within any one basic block.
157
158 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
159 reg_tick[i] is incremented whenever a value is stored in register i.
160 reg_in_table[i] holds -1 if no references to register i have been
161 entered in the table; otherwise, it contains the value reg_tick[i] had
162 when the references were entered. If we want to enter a reference
163 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
164 Until we want to enter a new entry, the mere fact that the two vectors
165 don't match makes the entries be ignored if anyone tries to match them.
166
167 Registers themselves are entered in the hash table as well as in
168 the equivalent-register chains. However, the vectors `reg_tick'
169 and `reg_in_table' do not apply to expressions which are simple
170 register references. These expressions are removed from the table
171 immediately when they become invalid, and this can be done even if
172 we do not immediately search for all the expressions that refer to
173 the register.
174
175 A CLOBBER rtx in an instruction invalidates its operand for further
176 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
177 invalidates everything that resides in memory.
178
179 Related expressions:
180
181 Constant expressions that differ only by an additive integer
182 are called related. When a constant expression is put in
183 the table, the related expression with no constant term
184 is also entered. These are made to point at each other
185 so that it is possible to find out if there exists any
186 register equivalent to an expression related to a given expression. */
187
188 /* One plus largest register number used in this function. */
189
190 static int max_reg;
191
192 /* Length of vectors indexed by quantity number.
193 We know in advance we will not need a quantity number this big. */
194
195 static int max_qty;
196
197 /* Next quantity number to be allocated.
198 This is 1 + the largest number needed so far. */
199
200 static int next_qty;
201
202 /* Indexed by quantity number, gives the first (or last) (pseudo) register
203 in the chain of registers that currently contain this quantity. */
204
205 static int *qty_first_reg;
206 static int *qty_last_reg;
207
208 /* Index by quantity number, gives the mode of the quantity. */
209
210 static enum machine_mode *qty_mode;
211
212 /* Indexed by quantity number, gives the rtx of the constant value of the
213 quantity, or zero if it does not have a known value.
214 A sum of the frame pointer (or arg pointer) plus a constant
215 can also be entered here. */
216
217 static rtx *qty_const;
218
219 /* Indexed by qty number, gives the insn that stored the constant value
220 recorded in `qty_const'. */
221
222 static rtx *qty_const_insn;
223
224 /* The next three variables are used to track when a comparison between a
225 quantity and some constant or register has been passed. In that case, we
226 know the results of the comparison in case we see it again. These variables
227 record a comparison that is known to be true. */
228
229 /* Indexed by qty number, gives the rtx code of a comparison with a known
230 result involving this quantity. If none, it is UNKNOWN. */
231 static enum rtx_code *qty_comparison_code;
232
233 /* Indexed by qty number, gives the constant being compared against in a
234 comparison of known result. If no such comparison, it is undefined.
235 If the comparison is not with a constant, it is zero. */
236
237 static rtx *qty_comparison_const;
238
239 /* Indexed by qty number, gives the quantity being compared against in a
240 comparison of known result. If no such comparison, if it undefined.
241 If the comparison is not with a register, it is -1. */
242
243 static int *qty_comparison_qty;
244
245 #ifdef HAVE_cc0
246 /* For machines that have a CC0, we do not record its value in the hash
247 table since its use is guaranteed to be the insn immediately following
248 its definition and any other insn is presumed to invalidate it.
249
250 Instead, we store below the value last assigned to CC0. If it should
251 happen to be a constant, it is stored in preference to the actual
252 assigned value. In case it is a constant, we store the mode in which
253 the constant should be interpreted. */
254
255 static rtx prev_insn_cc0;
256 static enum machine_mode prev_insn_cc0_mode;
257 #endif
258
259 /* Previous actual insn. 0 if at first insn of basic block. */
260
261 static rtx prev_insn;
262
263 /* Insn being scanned. */
264
265 static rtx this_insn;
266
267 /* Index by (pseudo) register number, gives the quantity number
268 of the register's current contents. */
269
270 static int *reg_qty;
271
272 /* Index by (pseudo) register number, gives the number of the next (or
273 previous) (pseudo) register in the chain of registers sharing the same
274 value.
275
276 Or -1 if this register is at the end of the chain.
277
278 If reg_qty[N] == N, reg_next_eqv[N] is undefined. */
279
280 static int *reg_next_eqv;
281 static int *reg_prev_eqv;
282
283 /* Index by (pseudo) register number, gives the number of times
284 that register has been altered in the current basic block. */
285
286 static int *reg_tick;
287
288 /* Index by (pseudo) register number, gives the reg_tick value at which
289 rtx's containing this register are valid in the hash table.
290 If this does not equal the current reg_tick value, such expressions
291 existing in the hash table are invalid.
292 If this is -1, no expressions containing this register have been
293 entered in the table. */
294
295 static int *reg_in_table;
296
297 /* A HARD_REG_SET containing all the hard registers for which there is
298 currently a REG expression in the hash table. Note the difference
299 from the above variables, which indicate if the REG is mentioned in some
300 expression in the table. */
301
302 static HARD_REG_SET hard_regs_in_table;
303
304 /* A HARD_REG_SET containing all the hard registers that are invalidated
305 by a CALL_INSN. */
306
307 static HARD_REG_SET regs_invalidated_by_call;
308
309 /* Two vectors of ints:
310 one containing max_reg -1's; the other max_reg + 500 (an approximation
311 for max_qty) elements where element i contains i.
312 These are used to initialize various other vectors fast. */
313
314 static int *all_minus_one;
315 static int *consec_ints;
316
317 /* CUID of insn that starts the basic block currently being cse-processed. */
318
319 static int cse_basic_block_start;
320
321 /* CUID of insn that ends the basic block currently being cse-processed. */
322
323 static int cse_basic_block_end;
324
325 /* Vector mapping INSN_UIDs to cuids.
326 The cuids are like uids but increase monotonically always.
327 We use them to see whether a reg is used outside a given basic block. */
328
329 static int *uid_cuid;
330
331 /* Highest UID in UID_CUID. */
332 static int max_uid;
333
334 /* Get the cuid of an insn. */
335
336 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
337
338 /* Nonzero if cse has altered conditional jump insns
339 in such a way that jump optimization should be redone. */
340
341 static int cse_jumps_altered;
342
343 /* canon_hash stores 1 in do_not_record
344 if it notices a reference to CC0, PC, or some other volatile
345 subexpression. */
346
347 static int do_not_record;
348
349 /* canon_hash stores 1 in hash_arg_in_memory
350 if it notices a reference to memory within the expression being hashed. */
351
352 static int hash_arg_in_memory;
353
354 /* canon_hash stores 1 in hash_arg_in_struct
355 if it notices a reference to memory that's part of a structure. */
356
357 static int hash_arg_in_struct;
358
359 /* The hash table contains buckets which are chains of `struct table_elt's,
360 each recording one expression's information.
361 That expression is in the `exp' field.
362
363 Those elements with the same hash code are chained in both directions
364 through the `next_same_hash' and `prev_same_hash' fields.
365
366 Each set of expressions with equivalent values
367 are on a two-way chain through the `next_same_value'
368 and `prev_same_value' fields, and all point with
369 the `first_same_value' field at the first element in
370 that chain. The chain is in order of increasing cost.
371 Each element's cost value is in its `cost' field.
372
373 The `in_memory' field is nonzero for elements that
374 involve any reference to memory. These elements are removed
375 whenever a write is done to an unidentified location in memory.
376 To be safe, we assume that a memory address is unidentified unless
377 the address is either a symbol constant or a constant plus
378 the frame pointer or argument pointer.
379
380 The `in_struct' field is nonzero for elements that
381 involve any reference to memory inside a structure or array.
382
383 The `related_value' field is used to connect related expressions
384 (that differ by adding an integer).
385 The related expressions are chained in a circular fashion.
386 `related_value' is zero for expressions for which this
387 chain is not useful.
388
389 The `cost' field stores the cost of this element's expression.
390
391 The `is_const' flag is set if the element is a constant (including
392 a fixed address).
393
394 The `flag' field is used as a temporary during some search routines.
395
396 The `mode' field is usually the same as GET_MODE (`exp'), but
397 if `exp' is a CONST_INT and has no machine mode then the `mode'
398 field is the mode it was being used as. Each constant is
399 recorded separately for each mode it is used with. */
400
401
402 struct table_elt
403 {
404 rtx exp;
405 struct table_elt *next_same_hash;
406 struct table_elt *prev_same_hash;
407 struct table_elt *next_same_value;
408 struct table_elt *prev_same_value;
409 struct table_elt *first_same_value;
410 struct table_elt *related_value;
411 int cost;
412 enum machine_mode mode;
413 char in_memory;
414 char in_struct;
415 char is_const;
416 char flag;
417 };
418
419 #define HASHBITS 16
420
421 /* We don't want a lot of buckets, because we rarely have very many
422 things stored in the hash table, and a lot of buckets slows
423 down a lot of loops that happen frequently. */
424 #define NBUCKETS 31
425
426 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
427 register (hard registers may require `do_not_record' to be set). */
428
429 #define HASH(X, M) \
430 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
431 ? ((((int) REG << 7) + reg_qty[REGNO (X)]) % NBUCKETS) \
432 : canon_hash (X, M) % NBUCKETS)
433
434 /* Determine whether register number N is considered a fixed register for CSE.
435 It is desirable to replace other regs with fixed regs, to reduce need for
436 non-fixed hard regs.
437 A reg wins if it is either the frame pointer or designated as fixed,
438 but not if it is an overlapping register. */
439 #ifdef OVERLAPPING_REGNO_P
440 #define FIXED_REGNO_P(N) \
441 (((N) == FRAME_POINTER_REGNUM || fixed_regs[N]) \
442 && ! OVERLAPPING_REGNO_P ((N)))
443 #else
444 #define FIXED_REGNO_P(N) \
445 ((N) == FRAME_POINTER_REGNUM || fixed_regs[N])
446 #endif
447
448 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
449 hard registers and pointers into the frame are the cheapest with a cost
450 of 0. Next come pseudos with a cost of one and other hard registers with
451 a cost of 2. Aside from these special cases, call `rtx_cost'. */
452
453 #define CHEAP_REG(N) \
454 ((N) == FRAME_POINTER_REGNUM || (N) == STACK_POINTER_REGNUM \
455 || (N) == ARG_POINTER_REGNUM \
456 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
457 || ((N) < FIRST_PSEUDO_REGISTER \
458 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
459
460 #define COST(X) \
461 (GET_CODE (X) == REG \
462 ? (CHEAP_REG (REGNO (X)) ? 0 \
463 : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
464 : 2) \
465 : rtx_cost (X, SET) * 2)
466
467 /* Determine if the quantity number for register X represents a valid index
468 into the `qty_...' variables. */
469
470 #define REGNO_QTY_VALID_P(N) (reg_qty[N] != (N))
471
472 static struct table_elt *table[NBUCKETS];
473
474 /* Chain of `struct table_elt's made so far for this function
475 but currently removed from the table. */
476
477 static struct table_elt *free_element_chain;
478
479 /* Number of `struct table_elt' structures made so far for this function. */
480
481 static int n_elements_made;
482
483 /* Maximum value `n_elements_made' has had so far in this compilation
484 for functions previously processed. */
485
486 static int max_elements_made;
487
488 /* Surviving equivalence class when two equivalence classes are merged
489 by recording the effects of a jump in the last insn. Zero if the
490 last insn was not a conditional jump. */
491
492 static struct table_elt *last_jump_equiv_class;
493
494 /* Set to the cost of a constant pool reference if one was found for a
495 symbolic constant. If this was found, it means we should try to
496 convert constants into constant pool entries if they don't fit in
497 the insn. */
498
499 static int constant_pool_entries_cost;
500
501 /* Bits describing what kind of values in memory must be invalidated
502 for a particular instruction. If all three bits are zero,
503 no memory refs need to be invalidated. Each bit is more powerful
504 than the preceding ones, and if a bit is set then the preceding
505 bits are also set.
506
507 Here is how the bits are set:
508 Pushing onto the stack invalidates only the stack pointer,
509 writing at a fixed address invalidates only variable addresses,
510 writing in a structure element at variable address
511 invalidates all but scalar variables,
512 and writing in anything else at variable address invalidates everything. */
513
514 struct write_data
515 {
516 int sp : 1; /* Invalidate stack pointer. */
517 int var : 1; /* Invalidate variable addresses. */
518 int nonscalar : 1; /* Invalidate all but scalar variables. */
519 int all : 1; /* Invalidate all memory refs. */
520 };
521
522 /* Define maximum length of a branch path. */
523
524 #define PATHLENGTH 10
525
526 /* This data describes a block that will be processed by cse_basic_block. */
527
528 struct cse_basic_block_data {
529 /* Lowest CUID value of insns in block. */
530 int low_cuid;
531 /* Highest CUID value of insns in block. */
532 int high_cuid;
533 /* Total number of SETs in block. */
534 int nsets;
535 /* Last insn in the block. */
536 rtx last;
537 /* Size of current branch path, if any. */
538 int path_size;
539 /* Current branch path, indicating which branches will be taken. */
540 struct branch_path {
541 /* The branch insn. */
542 rtx branch;
543 /* Whether it should be taken or not. AROUND is the same as taken
544 except that it is used when the destination label is not preceded
545 by a BARRIER. */
546 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
547 } path[PATHLENGTH];
548 };
549
550 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
551 virtual regs here because the simplify_*_operation routines are called
552 by integrate.c, which is called before virtual register instantiation. */
553
554 #define FIXED_BASE_PLUS_P(X) \
555 ((X) == frame_pointer_rtx || (X) == arg_pointer_rtx \
556 || (X) == virtual_stack_vars_rtx \
557 || (X) == virtual_incoming_args_rtx \
558 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
559 && (XEXP (X, 0) == frame_pointer_rtx \
560 || XEXP (X, 0) == arg_pointer_rtx \
561 || XEXP (X, 0) == virtual_stack_vars_rtx \
562 || XEXP (X, 0) == virtual_incoming_args_rtx)))
563
564 /* Similar, but also allows reference to the stack pointer.
565
566 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
567 arg_pointer_rtx by itself is nonzero, because on at least one machine,
568 the i960, the arg pointer is zero when it is unused. */
569
570 #define NONZERO_BASE_PLUS_P(X) \
571 ((X) == frame_pointer_rtx \
572 || (X) == virtual_stack_vars_rtx \
573 || (X) == virtual_incoming_args_rtx \
574 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
575 && (XEXP (X, 0) == frame_pointer_rtx \
576 || XEXP (X, 0) == arg_pointer_rtx \
577 || XEXP (X, 0) == virtual_stack_vars_rtx \
578 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
579 || (X) == stack_pointer_rtx \
580 || (X) == virtual_stack_dynamic_rtx \
581 || (X) == virtual_outgoing_args_rtx \
582 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
583 && (XEXP (X, 0) == stack_pointer_rtx \
584 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
585 || XEXP (X, 0) == virtual_outgoing_args_rtx)))
586
587 static void new_basic_block PROTO((void));
588 static void make_new_qty PROTO((int));
589 static void make_regs_eqv PROTO((int, int));
590 static void delete_reg_equiv PROTO((int));
591 static int mention_regs PROTO((rtx));
592 static int insert_regs PROTO((rtx, struct table_elt *, int));
593 static void free_element PROTO((struct table_elt *));
594 static void remove_from_table PROTO((struct table_elt *, int));
595 static struct table_elt *get_element PROTO((void));
596 static struct table_elt *lookup PROTO((rtx, int, enum machine_mode)),
597 *lookup_for_remove PROTO((rtx, int, enum machine_mode));
598 static rtx lookup_as_function PROTO((rtx, enum rtx_code));
599 static struct table_elt *insert PROTO((rtx, struct table_elt *, int,
600 enum machine_mode));
601 static void merge_equiv_classes PROTO((struct table_elt *,
602 struct table_elt *));
603 static void invalidate PROTO((rtx));
604 static void remove_invalid_refs PROTO((int));
605 static void rehash_using_reg PROTO((rtx));
606 static void invalidate_memory PROTO((struct write_data *));
607 static void invalidate_for_call PROTO((void));
608 static rtx use_related_value PROTO((rtx, struct table_elt *));
609 static int canon_hash PROTO((rtx, enum machine_mode));
610 static int safe_hash PROTO((rtx, enum machine_mode));
611 static int exp_equiv_p PROTO((rtx, rtx, int, int));
612 static void set_nonvarying_address_components PROTO((rtx, int, rtx *,
613 HOST_WIDE_INT *,
614 HOST_WIDE_INT *));
615 static int refers_to_p PROTO((rtx, rtx));
616 static int refers_to_mem_p PROTO((rtx, rtx, HOST_WIDE_INT,
617 HOST_WIDE_INT));
618 static int cse_rtx_addr_varies_p PROTO((rtx));
619 static rtx canon_reg PROTO((rtx, rtx));
620 static void find_best_addr PROTO((rtx, rtx *));
621 static enum rtx_code find_comparison_args PROTO((enum rtx_code, rtx *, rtx *,
622 enum machine_mode *,
623 enum machine_mode *));
624 static rtx cse_gen_binary PROTO((enum rtx_code, enum machine_mode,
625 rtx, rtx));
626 static rtx simplify_plus_minus PROTO((enum rtx_code, enum machine_mode,
627 rtx, rtx));
628 static rtx fold_rtx PROTO((rtx, rtx));
629 static rtx equiv_constant PROTO((rtx));
630 static void record_jump_equiv PROTO((rtx, int));
631 static void record_jump_cond PROTO((enum rtx_code, enum machine_mode,
632 rtx, rtx, int));
633 static void cse_insn PROTO((rtx, int));
634 static void note_mem_written PROTO((rtx, struct write_data *));
635 static void invalidate_from_clobbers PROTO((struct write_data *, rtx));
636 static rtx cse_process_notes PROTO((rtx, rtx));
637 static void cse_around_loop PROTO((rtx));
638 static void invalidate_skipped_set PROTO((rtx, rtx));
639 static void invalidate_skipped_block PROTO((rtx));
640 static void cse_check_loop_start PROTO((rtx, rtx));
641 static void cse_set_around_loop PROTO((rtx, rtx, rtx));
642 static rtx cse_basic_block PROTO((rtx, rtx, struct branch_path *, int));
643 static void count_reg_usage PROTO((rtx, int *, int));
644 \f
645 /* Return an estimate of the cost of computing rtx X.
646 One use is in cse, to decide which expression to keep in the hash table.
647 Another is in rtl generation, to pick the cheapest way to multiply.
648 Other uses like the latter are expected in the future. */
649
650 /* Return the right cost to give to an operation
651 to make the cost of the corresponding register-to-register instruction
652 N times that of a fast register-to-register instruction. */
653
654 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
655
656 int
657 rtx_cost (x, outer_code)
658 rtx x;
659 enum rtx_code outer_code;
660 {
661 register int i, j;
662 register enum rtx_code code;
663 register char *fmt;
664 register int total;
665
666 if (x == 0)
667 return 0;
668
669 /* Compute the default costs of certain things.
670 Note that RTX_COSTS can override the defaults. */
671
672 code = GET_CODE (x);
673 switch (code)
674 {
675 case MULT:
676 /* Count multiplication by 2**n as a shift,
677 because if we are considering it, we would output it as a shift. */
678 if (GET_CODE (XEXP (x, 1)) == CONST_INT
679 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
680 total = 2;
681 else
682 total = COSTS_N_INSNS (5);
683 break;
684 case DIV:
685 case UDIV:
686 case MOD:
687 case UMOD:
688 total = COSTS_N_INSNS (7);
689 break;
690 case USE:
691 /* Used in loop.c and combine.c as a marker. */
692 total = 0;
693 break;
694 case ASM_OPERANDS:
695 /* We don't want these to be used in substitutions because
696 we have no way of validating the resulting insn. So assign
697 anything containing an ASM_OPERANDS a very high cost. */
698 total = 1000;
699 break;
700 default:
701 total = 2;
702 }
703
704 switch (code)
705 {
706 case REG:
707 return ! CHEAP_REG (REGNO (x));
708
709 case SUBREG:
710 /* If we can't tie these modes, make this expensive. The larger
711 the mode, the more expensive it is. */
712 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
713 return COSTS_N_INSNS (2
714 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
715 return 2;
716 #ifdef RTX_COSTS
717 RTX_COSTS (x, code, outer_code);
718 #endif
719 CONST_COSTS (x, code, outer_code);
720 }
721
722 /* Sum the costs of the sub-rtx's, plus cost of this operation,
723 which is already in total. */
724
725 fmt = GET_RTX_FORMAT (code);
726 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
727 if (fmt[i] == 'e')
728 total += rtx_cost (XEXP (x, i), code);
729 else if (fmt[i] == 'E')
730 for (j = 0; j < XVECLEN (x, i); j++)
731 total += rtx_cost (XVECEXP (x, i, j), code);
732
733 return total;
734 }
735 \f
736 /* Clear the hash table and initialize each register with its own quantity,
737 for a new basic block. */
738
739 static void
740 new_basic_block ()
741 {
742 register int i;
743
744 next_qty = max_reg;
745
746 bzero (reg_tick, max_reg * sizeof (int));
747
748 bcopy (all_minus_one, reg_in_table, max_reg * sizeof (int));
749 bcopy (consec_ints, reg_qty, max_reg * sizeof (int));
750 CLEAR_HARD_REG_SET (hard_regs_in_table);
751
752 /* The per-quantity values used to be initialized here, but it is
753 much faster to initialize each as it is made in `make_new_qty'. */
754
755 for (i = 0; i < NBUCKETS; i++)
756 {
757 register struct table_elt *this, *next;
758 for (this = table[i]; this; this = next)
759 {
760 next = this->next_same_hash;
761 free_element (this);
762 }
763 }
764
765 bzero (table, sizeof table);
766
767 prev_insn = 0;
768
769 #ifdef HAVE_cc0
770 prev_insn_cc0 = 0;
771 #endif
772 }
773
774 /* Say that register REG contains a quantity not in any register before
775 and initialize that quantity. */
776
777 static void
778 make_new_qty (reg)
779 register int reg;
780 {
781 register int q;
782
783 if (next_qty >= max_qty)
784 abort ();
785
786 q = reg_qty[reg] = next_qty++;
787 qty_first_reg[q] = reg;
788 qty_last_reg[q] = reg;
789 qty_const[q] = qty_const_insn[q] = 0;
790 qty_comparison_code[q] = UNKNOWN;
791
792 reg_next_eqv[reg] = reg_prev_eqv[reg] = -1;
793 }
794
795 /* Make reg NEW equivalent to reg OLD.
796 OLD is not changing; NEW is. */
797
798 static void
799 make_regs_eqv (new, old)
800 register int new, old;
801 {
802 register int lastr, firstr;
803 register int q = reg_qty[old];
804
805 /* Nothing should become eqv until it has a "non-invalid" qty number. */
806 if (! REGNO_QTY_VALID_P (old))
807 abort ();
808
809 reg_qty[new] = q;
810 firstr = qty_first_reg[q];
811 lastr = qty_last_reg[q];
812
813 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
814 hard regs. Among pseudos, if NEW will live longer than any other reg
815 of the same qty, and that is beyond the current basic block,
816 make it the new canonical replacement for this qty. */
817 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
818 /* Certain fixed registers might be of the class NO_REGS. This means
819 that not only can they not be allocated by the compiler, but
820 they cannot be used in substitutions or canonicalizations
821 either. */
822 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
823 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
824 || (new >= FIRST_PSEUDO_REGISTER
825 && (firstr < FIRST_PSEUDO_REGISTER
826 || ((uid_cuid[regno_last_uid[new]] > cse_basic_block_end
827 || (uid_cuid[regno_first_uid[new]]
828 < cse_basic_block_start))
829 && (uid_cuid[regno_last_uid[new]]
830 > uid_cuid[regno_last_uid[firstr]]))))))
831 {
832 reg_prev_eqv[firstr] = new;
833 reg_next_eqv[new] = firstr;
834 reg_prev_eqv[new] = -1;
835 qty_first_reg[q] = new;
836 }
837 else
838 {
839 /* If NEW is a hard reg (known to be non-fixed), insert at end.
840 Otherwise, insert before any non-fixed hard regs that are at the
841 end. Registers of class NO_REGS cannot be used as an
842 equivalent for anything. */
843 while (lastr < FIRST_PSEUDO_REGISTER && reg_prev_eqv[lastr] >= 0
844 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
845 && new >= FIRST_PSEUDO_REGISTER)
846 lastr = reg_prev_eqv[lastr];
847 reg_next_eqv[new] = reg_next_eqv[lastr];
848 if (reg_next_eqv[lastr] >= 0)
849 reg_prev_eqv[reg_next_eqv[lastr]] = new;
850 else
851 qty_last_reg[q] = new;
852 reg_next_eqv[lastr] = new;
853 reg_prev_eqv[new] = lastr;
854 }
855 }
856
857 /* Remove REG from its equivalence class. */
858
859 static void
860 delete_reg_equiv (reg)
861 register int reg;
862 {
863 register int n = reg_next_eqv[reg];
864 register int p = reg_prev_eqv[reg];
865 register int q = reg_qty[reg];
866
867 /* If invalid, do nothing. N and P above are undefined in that case. */
868 if (q == reg)
869 return;
870
871 if (n != -1)
872 reg_prev_eqv[n] = p;
873 else
874 qty_last_reg[q] = p;
875 if (p != -1)
876 reg_next_eqv[p] = n;
877 else
878 qty_first_reg[q] = n;
879
880 reg_qty[reg] = reg;
881 }
882
883 /* Remove any invalid expressions from the hash table
884 that refer to any of the registers contained in expression X.
885
886 Make sure that newly inserted references to those registers
887 as subexpressions will be considered valid.
888
889 mention_regs is not called when a register itself
890 is being stored in the table.
891
892 Return 1 if we have done something that may have changed the hash code
893 of X. */
894
895 static int
896 mention_regs (x)
897 rtx x;
898 {
899 register enum rtx_code code;
900 register int i, j;
901 register char *fmt;
902 register int changed = 0;
903
904 if (x == 0)
905 return 0;
906
907 code = GET_CODE (x);
908 if (code == REG)
909 {
910 register int regno = REGNO (x);
911 register int endregno
912 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
913 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
914 int i;
915
916 for (i = regno; i < endregno; i++)
917 {
918 if (reg_in_table[i] >= 0 && reg_in_table[i] != reg_tick[i])
919 remove_invalid_refs (i);
920
921 reg_in_table[i] = reg_tick[i];
922 }
923
924 return 0;
925 }
926
927 /* If X is a comparison or a COMPARE and either operand is a register
928 that does not have a quantity, give it one. This is so that a later
929 call to record_jump_equiv won't cause X to be assigned a different
930 hash code and not found in the table after that call.
931
932 It is not necessary to do this here, since rehash_using_reg can
933 fix up the table later, but doing this here eliminates the need to
934 call that expensive function in the most common case where the only
935 use of the register is in the comparison. */
936
937 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
938 {
939 if (GET_CODE (XEXP (x, 0)) == REG
940 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
941 if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
942 {
943 rehash_using_reg (XEXP (x, 0));
944 changed = 1;
945 }
946
947 if (GET_CODE (XEXP (x, 1)) == REG
948 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
949 if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
950 {
951 rehash_using_reg (XEXP (x, 1));
952 changed = 1;
953 }
954 }
955
956 fmt = GET_RTX_FORMAT (code);
957 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
958 if (fmt[i] == 'e')
959 changed |= mention_regs (XEXP (x, i));
960 else if (fmt[i] == 'E')
961 for (j = 0; j < XVECLEN (x, i); j++)
962 changed |= mention_regs (XVECEXP (x, i, j));
963
964 return changed;
965 }
966
967 /* Update the register quantities for inserting X into the hash table
968 with a value equivalent to CLASSP.
969 (If the class does not contain a REG, it is irrelevant.)
970 If MODIFIED is nonzero, X is a destination; it is being modified.
971 Note that delete_reg_equiv should be called on a register
972 before insert_regs is done on that register with MODIFIED != 0.
973
974 Nonzero value means that elements of reg_qty have changed
975 so X's hash code may be different. */
976
977 static int
978 insert_regs (x, classp, modified)
979 rtx x;
980 struct table_elt *classp;
981 int modified;
982 {
983 if (GET_CODE (x) == REG)
984 {
985 register int regno = REGNO (x);
986
987 /* If REGNO is in the equivalence table already but is of the
988 wrong mode for that equivalence, don't do anything here. */
989
990 if (REGNO_QTY_VALID_P (regno)
991 && qty_mode[reg_qty[regno]] != GET_MODE (x))
992 return 0;
993
994 if (modified || ! REGNO_QTY_VALID_P (regno))
995 {
996 if (classp)
997 for (classp = classp->first_same_value;
998 classp != 0;
999 classp = classp->next_same_value)
1000 if (GET_CODE (classp->exp) == REG
1001 && GET_MODE (classp->exp) == GET_MODE (x))
1002 {
1003 make_regs_eqv (regno, REGNO (classp->exp));
1004 return 1;
1005 }
1006
1007 make_new_qty (regno);
1008 qty_mode[reg_qty[regno]] = GET_MODE (x);
1009 return 1;
1010 }
1011 }
1012
1013 /* If X is a SUBREG, we will likely be inserting the inner register in the
1014 table. If that register doesn't have an assigned quantity number at
1015 this point but does later, the insertion that we will be doing now will
1016 not be accessible because its hash code will have changed. So assign
1017 a quantity number now. */
1018
1019 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1020 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1021 {
1022 insert_regs (SUBREG_REG (x), NULL_PTR, 0);
1023 mention_regs (SUBREG_REG (x));
1024 return 1;
1025 }
1026 else
1027 return mention_regs (x);
1028 }
1029 \f
1030 /* Look in or update the hash table. */
1031
1032 /* Put the element ELT on the list of free elements. */
1033
1034 static void
1035 free_element (elt)
1036 struct table_elt *elt;
1037 {
1038 elt->next_same_hash = free_element_chain;
1039 free_element_chain = elt;
1040 }
1041
1042 /* Return an element that is free for use. */
1043
1044 static struct table_elt *
1045 get_element ()
1046 {
1047 struct table_elt *elt = free_element_chain;
1048 if (elt)
1049 {
1050 free_element_chain = elt->next_same_hash;
1051 return elt;
1052 }
1053 n_elements_made++;
1054 return (struct table_elt *) oballoc (sizeof (struct table_elt));
1055 }
1056
1057 /* Remove table element ELT from use in the table.
1058 HASH is its hash code, made using the HASH macro.
1059 It's an argument because often that is known in advance
1060 and we save much time not recomputing it. */
1061
1062 static void
1063 remove_from_table (elt, hash)
1064 register struct table_elt *elt;
1065 int hash;
1066 {
1067 if (elt == 0)
1068 return;
1069
1070 /* Mark this element as removed. See cse_insn. */
1071 elt->first_same_value = 0;
1072
1073 /* Remove the table element from its equivalence class. */
1074
1075 {
1076 register struct table_elt *prev = elt->prev_same_value;
1077 register struct table_elt *next = elt->next_same_value;
1078
1079 if (next) next->prev_same_value = prev;
1080
1081 if (prev)
1082 prev->next_same_value = next;
1083 else
1084 {
1085 register struct table_elt *newfirst = next;
1086 while (next)
1087 {
1088 next->first_same_value = newfirst;
1089 next = next->next_same_value;
1090 }
1091 }
1092 }
1093
1094 /* Remove the table element from its hash bucket. */
1095
1096 {
1097 register struct table_elt *prev = elt->prev_same_hash;
1098 register struct table_elt *next = elt->next_same_hash;
1099
1100 if (next) next->prev_same_hash = prev;
1101
1102 if (prev)
1103 prev->next_same_hash = next;
1104 else if (table[hash] == elt)
1105 table[hash] = next;
1106 else
1107 {
1108 /* This entry is not in the proper hash bucket. This can happen
1109 when two classes were merged by `merge_equiv_classes'. Search
1110 for the hash bucket that it heads. This happens only very
1111 rarely, so the cost is acceptable. */
1112 for (hash = 0; hash < NBUCKETS; hash++)
1113 if (table[hash] == elt)
1114 table[hash] = next;
1115 }
1116 }
1117
1118 /* Remove the table element from its related-value circular chain. */
1119
1120 if (elt->related_value != 0 && elt->related_value != elt)
1121 {
1122 register struct table_elt *p = elt->related_value;
1123 while (p->related_value != elt)
1124 p = p->related_value;
1125 p->related_value = elt->related_value;
1126 if (p->related_value == p)
1127 p->related_value = 0;
1128 }
1129
1130 free_element (elt);
1131 }
1132
1133 /* Look up X in the hash table and return its table element,
1134 or 0 if X is not in the table.
1135
1136 MODE is the machine-mode of X, or if X is an integer constant
1137 with VOIDmode then MODE is the mode with which X will be used.
1138
1139 Here we are satisfied to find an expression whose tree structure
1140 looks like X. */
1141
1142 static struct table_elt *
1143 lookup (x, hash, mode)
1144 rtx x;
1145 int hash;
1146 enum machine_mode mode;
1147 {
1148 register struct table_elt *p;
1149
1150 for (p = table[hash]; p; p = p->next_same_hash)
1151 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1152 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1153 return p;
1154
1155 return 0;
1156 }
1157
1158 /* Like `lookup' but don't care whether the table element uses invalid regs.
1159 Also ignore discrepancies in the machine mode of a register. */
1160
1161 static struct table_elt *
1162 lookup_for_remove (x, hash, mode)
1163 rtx x;
1164 int hash;
1165 enum machine_mode mode;
1166 {
1167 register struct table_elt *p;
1168
1169 if (GET_CODE (x) == REG)
1170 {
1171 int regno = REGNO (x);
1172 /* Don't check the machine mode when comparing registers;
1173 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1174 for (p = table[hash]; p; p = p->next_same_hash)
1175 if (GET_CODE (p->exp) == REG
1176 && REGNO (p->exp) == regno)
1177 return p;
1178 }
1179 else
1180 {
1181 for (p = table[hash]; p; p = p->next_same_hash)
1182 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1183 return p;
1184 }
1185
1186 return 0;
1187 }
1188
1189 /* Look for an expression equivalent to X and with code CODE.
1190 If one is found, return that expression. */
1191
1192 static rtx
1193 lookup_as_function (x, code)
1194 rtx x;
1195 enum rtx_code code;
1196 {
1197 register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS,
1198 GET_MODE (x));
1199 if (p == 0)
1200 return 0;
1201
1202 for (p = p->first_same_value; p; p = p->next_same_value)
1203 {
1204 if (GET_CODE (p->exp) == code
1205 /* Make sure this is a valid entry in the table. */
1206 && exp_equiv_p (p->exp, p->exp, 1, 0))
1207 return p->exp;
1208 }
1209
1210 return 0;
1211 }
1212
1213 /* Insert X in the hash table, assuming HASH is its hash code
1214 and CLASSP is an element of the class it should go in
1215 (or 0 if a new class should be made).
1216 It is inserted at the proper position to keep the class in
1217 the order cheapest first.
1218
1219 MODE is the machine-mode of X, or if X is an integer constant
1220 with VOIDmode then MODE is the mode with which X will be used.
1221
1222 For elements of equal cheapness, the most recent one
1223 goes in front, except that the first element in the list
1224 remains first unless a cheaper element is added. The order of
1225 pseudo-registers does not matter, as canon_reg will be called to
1226 find the cheapest when a register is retrieved from the table.
1227
1228 The in_memory field in the hash table element is set to 0.
1229 The caller must set it nonzero if appropriate.
1230
1231 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1232 and if insert_regs returns a nonzero value
1233 you must then recompute its hash code before calling here.
1234
1235 If necessary, update table showing constant values of quantities. */
1236
1237 #define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1238
1239 static struct table_elt *
1240 insert (x, classp, hash, mode)
1241 register rtx x;
1242 register struct table_elt *classp;
1243 int hash;
1244 enum machine_mode mode;
1245 {
1246 register struct table_elt *elt;
1247
1248 /* If X is a register and we haven't made a quantity for it,
1249 something is wrong. */
1250 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1251 abort ();
1252
1253 /* If X is a hard register, show it is being put in the table. */
1254 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1255 {
1256 int regno = REGNO (x);
1257 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1258 int i;
1259
1260 for (i = regno; i < endregno; i++)
1261 SET_HARD_REG_BIT (hard_regs_in_table, i);
1262 }
1263
1264
1265 /* Put an element for X into the right hash bucket. */
1266
1267 elt = get_element ();
1268 elt->exp = x;
1269 elt->cost = COST (x);
1270 elt->next_same_value = 0;
1271 elt->prev_same_value = 0;
1272 elt->next_same_hash = table[hash];
1273 elt->prev_same_hash = 0;
1274 elt->related_value = 0;
1275 elt->in_memory = 0;
1276 elt->mode = mode;
1277 elt->is_const = (CONSTANT_P (x)
1278 /* GNU C++ takes advantage of this for `this'
1279 (and other const values). */
1280 || (RTX_UNCHANGING_P (x)
1281 && GET_CODE (x) == REG
1282 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1283 || FIXED_BASE_PLUS_P (x));
1284
1285 if (table[hash])
1286 table[hash]->prev_same_hash = elt;
1287 table[hash] = elt;
1288
1289 /* Put it into the proper value-class. */
1290 if (classp)
1291 {
1292 classp = classp->first_same_value;
1293 if (CHEAPER (elt, classp))
1294 /* Insert at the head of the class */
1295 {
1296 register struct table_elt *p;
1297 elt->next_same_value = classp;
1298 classp->prev_same_value = elt;
1299 elt->first_same_value = elt;
1300
1301 for (p = classp; p; p = p->next_same_value)
1302 p->first_same_value = elt;
1303 }
1304 else
1305 {
1306 /* Insert not at head of the class. */
1307 /* Put it after the last element cheaper than X. */
1308 register struct table_elt *p, *next;
1309 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1310 p = next);
1311 /* Put it after P and before NEXT. */
1312 elt->next_same_value = next;
1313 if (next)
1314 next->prev_same_value = elt;
1315 elt->prev_same_value = p;
1316 p->next_same_value = elt;
1317 elt->first_same_value = classp;
1318 }
1319 }
1320 else
1321 elt->first_same_value = elt;
1322
1323 /* If this is a constant being set equivalent to a register or a register
1324 being set equivalent to a constant, note the constant equivalence.
1325
1326 If this is a constant, it cannot be equivalent to a different constant,
1327 and a constant is the only thing that can be cheaper than a register. So
1328 we know the register is the head of the class (before the constant was
1329 inserted).
1330
1331 If this is a register that is not already known equivalent to a
1332 constant, we must check the entire class.
1333
1334 If this is a register that is already known equivalent to an insn,
1335 update `qty_const_insn' to show that `this_insn' is the latest
1336 insn making that quantity equivalent to the constant. */
1337
1338 if (elt->is_const && classp && GET_CODE (classp->exp) == REG)
1339 {
1340 qty_const[reg_qty[REGNO (classp->exp)]]
1341 = gen_lowpart_if_possible (qty_mode[reg_qty[REGNO (classp->exp)]], x);
1342 qty_const_insn[reg_qty[REGNO (classp->exp)]] = this_insn;
1343 }
1344
1345 else if (GET_CODE (x) == REG && classp && ! qty_const[reg_qty[REGNO (x)]])
1346 {
1347 register struct table_elt *p;
1348
1349 for (p = classp; p != 0; p = p->next_same_value)
1350 {
1351 if (p->is_const)
1352 {
1353 qty_const[reg_qty[REGNO (x)]]
1354 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1355 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1356 break;
1357 }
1358 }
1359 }
1360
1361 else if (GET_CODE (x) == REG && qty_const[reg_qty[REGNO (x)]]
1362 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]])
1363 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1364
1365 /* If this is a constant with symbolic value,
1366 and it has a term with an explicit integer value,
1367 link it up with related expressions. */
1368 if (GET_CODE (x) == CONST)
1369 {
1370 rtx subexp = get_related_value (x);
1371 int subhash;
1372 struct table_elt *subelt, *subelt_prev;
1373
1374 if (subexp != 0)
1375 {
1376 /* Get the integer-free subexpression in the hash table. */
1377 subhash = safe_hash (subexp, mode) % NBUCKETS;
1378 subelt = lookup (subexp, subhash, mode);
1379 if (subelt == 0)
1380 subelt = insert (subexp, NULL_PTR, subhash, mode);
1381 /* Initialize SUBELT's circular chain if it has none. */
1382 if (subelt->related_value == 0)
1383 subelt->related_value = subelt;
1384 /* Find the element in the circular chain that precedes SUBELT. */
1385 subelt_prev = subelt;
1386 while (subelt_prev->related_value != subelt)
1387 subelt_prev = subelt_prev->related_value;
1388 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1389 This way the element that follows SUBELT is the oldest one. */
1390 elt->related_value = subelt_prev->related_value;
1391 subelt_prev->related_value = elt;
1392 }
1393 }
1394
1395 return elt;
1396 }
1397 \f
1398 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1399 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1400 the two classes equivalent.
1401
1402 CLASS1 will be the surviving class; CLASS2 should not be used after this
1403 call.
1404
1405 Any invalid entries in CLASS2 will not be copied. */
1406
1407 static void
1408 merge_equiv_classes (class1, class2)
1409 struct table_elt *class1, *class2;
1410 {
1411 struct table_elt *elt, *next, *new;
1412
1413 /* Ensure we start with the head of the classes. */
1414 class1 = class1->first_same_value;
1415 class2 = class2->first_same_value;
1416
1417 /* If they were already equal, forget it. */
1418 if (class1 == class2)
1419 return;
1420
1421 for (elt = class2; elt; elt = next)
1422 {
1423 int hash;
1424 rtx exp = elt->exp;
1425 enum machine_mode mode = elt->mode;
1426
1427 next = elt->next_same_value;
1428
1429 /* Remove old entry, make a new one in CLASS1's class.
1430 Don't do this for invalid entries as we cannot find their
1431 hash code (it also isn't necessary). */
1432 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1433 {
1434 hash_arg_in_memory = 0;
1435 hash_arg_in_struct = 0;
1436 hash = HASH (exp, mode);
1437
1438 if (GET_CODE (exp) == REG)
1439 delete_reg_equiv (REGNO (exp));
1440
1441 remove_from_table (elt, hash);
1442
1443 if (insert_regs (exp, class1, 0))
1444 hash = HASH (exp, mode);
1445 new = insert (exp, class1, hash, mode);
1446 new->in_memory = hash_arg_in_memory;
1447 new->in_struct = hash_arg_in_struct;
1448 }
1449 }
1450 }
1451 \f
1452 /* Remove from the hash table, or mark as invalid,
1453 all expressions whose values could be altered by storing in X.
1454 X is a register, a subreg, or a memory reference with nonvarying address
1455 (because, when a memory reference with a varying address is stored in,
1456 all memory references are removed by invalidate_memory
1457 so specific invalidation is superfluous).
1458
1459 A nonvarying address may be just a register or just
1460 a symbol reference, or it may be either of those plus
1461 a numeric offset. */
1462
1463 static void
1464 invalidate (x)
1465 rtx x;
1466 {
1467 register int i;
1468 register struct table_elt *p;
1469 rtx base;
1470 HOST_WIDE_INT start, end;
1471
1472 /* If X is a register, dependencies on its contents
1473 are recorded through the qty number mechanism.
1474 Just change the qty number of the register,
1475 mark it as invalid for expressions that refer to it,
1476 and remove it itself. */
1477
1478 if (GET_CODE (x) == REG)
1479 {
1480 register int regno = REGNO (x);
1481 register int hash = HASH (x, GET_MODE (x));
1482
1483 /* Remove REGNO from any quantity list it might be on and indicate
1484 that it's value might have changed. If it is a pseudo, remove its
1485 entry from the hash table.
1486
1487 For a hard register, we do the first two actions above for any
1488 additional hard registers corresponding to X. Then, if any of these
1489 registers are in the table, we must remove any REG entries that
1490 overlap these registers. */
1491
1492 delete_reg_equiv (regno);
1493 reg_tick[regno]++;
1494
1495 if (regno >= FIRST_PSEUDO_REGISTER)
1496 remove_from_table (lookup_for_remove (x, hash, GET_MODE (x)), hash);
1497 else
1498 {
1499 HOST_WIDE_INT in_table
1500 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1501 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1502 int tregno, tendregno;
1503 register struct table_elt *p, *next;
1504
1505 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1506
1507 for (i = regno + 1; i < endregno; i++)
1508 {
1509 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1510 CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1511 delete_reg_equiv (i);
1512 reg_tick[i]++;
1513 }
1514
1515 if (in_table)
1516 for (hash = 0; hash < NBUCKETS; hash++)
1517 for (p = table[hash]; p; p = next)
1518 {
1519 next = p->next_same_hash;
1520
1521 if (GET_CODE (p->exp) != REG
1522 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1523 continue;
1524
1525 tregno = REGNO (p->exp);
1526 tendregno
1527 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1528 if (tendregno > regno && tregno < endregno)
1529 remove_from_table (p, hash);
1530 }
1531 }
1532
1533 return;
1534 }
1535
1536 if (GET_CODE (x) == SUBREG)
1537 {
1538 if (GET_CODE (SUBREG_REG (x)) != REG)
1539 abort ();
1540 invalidate (SUBREG_REG (x));
1541 return;
1542 }
1543
1544 /* X is not a register; it must be a memory reference with
1545 a nonvarying address. Remove all hash table elements
1546 that refer to overlapping pieces of memory. */
1547
1548 if (GET_CODE (x) != MEM)
1549 abort ();
1550
1551 set_nonvarying_address_components (XEXP (x, 0), GET_MODE_SIZE (GET_MODE (x)),
1552 &base, &start, &end);
1553
1554 for (i = 0; i < NBUCKETS; i++)
1555 {
1556 register struct table_elt *next;
1557 for (p = table[i]; p; p = next)
1558 {
1559 next = p->next_same_hash;
1560 if (refers_to_mem_p (p->exp, base, start, end))
1561 remove_from_table (p, i);
1562 }
1563 }
1564 }
1565
1566 /* Remove all expressions that refer to register REGNO,
1567 since they are already invalid, and we are about to
1568 mark that register valid again and don't want the old
1569 expressions to reappear as valid. */
1570
1571 static void
1572 remove_invalid_refs (regno)
1573 int regno;
1574 {
1575 register int i;
1576 register struct table_elt *p, *next;
1577
1578 for (i = 0; i < NBUCKETS; i++)
1579 for (p = table[i]; p; p = next)
1580 {
1581 next = p->next_same_hash;
1582 if (GET_CODE (p->exp) != REG
1583 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1584 remove_from_table (p, i);
1585 }
1586 }
1587 \f
1588 /* Recompute the hash codes of any valid entries in the hash table that
1589 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1590
1591 This is called when we make a jump equivalence. */
1592
1593 static void
1594 rehash_using_reg (x)
1595 rtx x;
1596 {
1597 int i;
1598 struct table_elt *p, *next;
1599 int hash;
1600
1601 if (GET_CODE (x) == SUBREG)
1602 x = SUBREG_REG (x);
1603
1604 /* If X is not a register or if the register is known not to be in any
1605 valid entries in the table, we have no work to do. */
1606
1607 if (GET_CODE (x) != REG
1608 || reg_in_table[REGNO (x)] < 0
1609 || reg_in_table[REGNO (x)] != reg_tick[REGNO (x)])
1610 return;
1611
1612 /* Scan all hash chains looking for valid entries that mention X.
1613 If we find one and it is in the wrong hash chain, move it. We can skip
1614 objects that are registers, since they are handled specially. */
1615
1616 for (i = 0; i < NBUCKETS; i++)
1617 for (p = table[i]; p; p = next)
1618 {
1619 next = p->next_same_hash;
1620 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
1621 && exp_equiv_p (p->exp, p->exp, 1, 0)
1622 && i != (hash = safe_hash (p->exp, p->mode) % NBUCKETS))
1623 {
1624 if (p->next_same_hash)
1625 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1626
1627 if (p->prev_same_hash)
1628 p->prev_same_hash->next_same_hash = p->next_same_hash;
1629 else
1630 table[i] = p->next_same_hash;
1631
1632 p->next_same_hash = table[hash];
1633 p->prev_same_hash = 0;
1634 if (table[hash])
1635 table[hash]->prev_same_hash = p;
1636 table[hash] = p;
1637 }
1638 }
1639 }
1640 \f
1641 /* Remove from the hash table all expressions that reference memory,
1642 or some of them as specified by *WRITES. */
1643
1644 static void
1645 invalidate_memory (writes)
1646 struct write_data *writes;
1647 {
1648 register int i;
1649 register struct table_elt *p, *next;
1650 int all = writes->all;
1651 int nonscalar = writes->nonscalar;
1652
1653 for (i = 0; i < NBUCKETS; i++)
1654 for (p = table[i]; p; p = next)
1655 {
1656 next = p->next_same_hash;
1657 if (p->in_memory
1658 && (all
1659 || (nonscalar && p->in_struct)
1660 || cse_rtx_addr_varies_p (p->exp)))
1661 remove_from_table (p, i);
1662 }
1663 }
1664 \f
1665 /* Remove from the hash table any expression that is a call-clobbered
1666 register. Also update their TICK values. */
1667
1668 static void
1669 invalidate_for_call ()
1670 {
1671 int regno, endregno;
1672 int i;
1673 int hash;
1674 struct table_elt *p, *next;
1675 int in_table = 0;
1676
1677 /* Go through all the hard registers. For each that is clobbered in
1678 a CALL_INSN, remove the register from quantity chains and update
1679 reg_tick if defined. Also see if any of these registers is currently
1680 in the table. */
1681
1682 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1683 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1684 {
1685 delete_reg_equiv (regno);
1686 if (reg_tick[regno] >= 0)
1687 reg_tick[regno]++;
1688
1689 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1690 }
1691
1692 /* In the case where we have no call-clobbered hard registers in the
1693 table, we are done. Otherwise, scan the table and remove any
1694 entry that overlaps a call-clobbered register. */
1695
1696 if (in_table)
1697 for (hash = 0; hash < NBUCKETS; hash++)
1698 for (p = table[hash]; p; p = next)
1699 {
1700 next = p->next_same_hash;
1701
1702 if (GET_CODE (p->exp) != REG
1703 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1704 continue;
1705
1706 regno = REGNO (p->exp);
1707 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1708
1709 for (i = regno; i < endregno; i++)
1710 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1711 {
1712 remove_from_table (p, hash);
1713 break;
1714 }
1715 }
1716 }
1717 \f
1718 /* Given an expression X of type CONST,
1719 and ELT which is its table entry (or 0 if it
1720 is not in the hash table),
1721 return an alternate expression for X as a register plus integer.
1722 If none can be found, return 0. */
1723
1724 static rtx
1725 use_related_value (x, elt)
1726 rtx x;
1727 struct table_elt *elt;
1728 {
1729 register struct table_elt *relt = 0;
1730 register struct table_elt *p, *q;
1731 HOST_WIDE_INT offset;
1732
1733 /* First, is there anything related known?
1734 If we have a table element, we can tell from that.
1735 Otherwise, must look it up. */
1736
1737 if (elt != 0 && elt->related_value != 0)
1738 relt = elt;
1739 else if (elt == 0 && GET_CODE (x) == CONST)
1740 {
1741 rtx subexp = get_related_value (x);
1742 if (subexp != 0)
1743 relt = lookup (subexp,
1744 safe_hash (subexp, GET_MODE (subexp)) % NBUCKETS,
1745 GET_MODE (subexp));
1746 }
1747
1748 if (relt == 0)
1749 return 0;
1750
1751 /* Search all related table entries for one that has an
1752 equivalent register. */
1753
1754 p = relt;
1755 while (1)
1756 {
1757 /* This loop is strange in that it is executed in two different cases.
1758 The first is when X is already in the table. Then it is searching
1759 the RELATED_VALUE list of X's class (RELT). The second case is when
1760 X is not in the table. Then RELT points to a class for the related
1761 value.
1762
1763 Ensure that, whatever case we are in, that we ignore classes that have
1764 the same value as X. */
1765
1766 if (rtx_equal_p (x, p->exp))
1767 q = 0;
1768 else
1769 for (q = p->first_same_value; q; q = q->next_same_value)
1770 if (GET_CODE (q->exp) == REG)
1771 break;
1772
1773 if (q)
1774 break;
1775
1776 p = p->related_value;
1777
1778 /* We went all the way around, so there is nothing to be found.
1779 Alternatively, perhaps RELT was in the table for some other reason
1780 and it has no related values recorded. */
1781 if (p == relt || p == 0)
1782 break;
1783 }
1784
1785 if (q == 0)
1786 return 0;
1787
1788 offset = (get_integer_term (x) - get_integer_term (p->exp));
1789 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
1790 return plus_constant (q->exp, offset);
1791 }
1792 \f
1793 /* Hash an rtx. We are careful to make sure the value is never negative.
1794 Equivalent registers hash identically.
1795 MODE is used in hashing for CONST_INTs only;
1796 otherwise the mode of X is used.
1797
1798 Store 1 in do_not_record if any subexpression is volatile.
1799
1800 Store 1 in hash_arg_in_memory if X contains a MEM rtx
1801 which does not have the RTX_UNCHANGING_P bit set.
1802 In this case, also store 1 in hash_arg_in_struct
1803 if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
1804
1805 Note that cse_insn knows that the hash code of a MEM expression
1806 is just (int) MEM plus the hash code of the address. */
1807
1808 static int
1809 canon_hash (x, mode)
1810 rtx x;
1811 enum machine_mode mode;
1812 {
1813 register int i, j;
1814 register int hash = 0;
1815 register enum rtx_code code;
1816 register char *fmt;
1817
1818 /* repeat is used to turn tail-recursion into iteration. */
1819 repeat:
1820 if (x == 0)
1821 return hash;
1822
1823 code = GET_CODE (x);
1824 switch (code)
1825 {
1826 case REG:
1827 {
1828 register int regno = REGNO (x);
1829
1830 /* On some machines, we can't record any non-fixed hard register,
1831 because extending its life will cause reload problems. We
1832 consider ap, fp, and sp to be fixed for this purpose.
1833 On all machines, we can't record any global registers. */
1834
1835 if (regno < FIRST_PSEUDO_REGISTER
1836 && (global_regs[regno]
1837 #ifdef SMALL_REGISTER_CLASSES
1838 || (! fixed_regs[regno]
1839 && regno != FRAME_POINTER_REGNUM
1840 && regno != ARG_POINTER_REGNUM
1841 && regno != STACK_POINTER_REGNUM)
1842 #endif
1843 ))
1844 {
1845 do_not_record = 1;
1846 return 0;
1847 }
1848 return hash + ((int) REG << 7) + reg_qty[regno];
1849 }
1850
1851 case CONST_INT:
1852 hash += ((int) mode + ((int) CONST_INT << 7)
1853 + INTVAL (x) + (INTVAL (x) >> HASHBITS));
1854 return ((1 << HASHBITS) - 1) & hash;
1855
1856 case CONST_DOUBLE:
1857 /* This is like the general case, except that it only counts
1858 the integers representing the constant. */
1859 hash += (int) code + (int) GET_MODE (x);
1860 {
1861 int i;
1862 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1863 {
1864 int tem = XINT (x, i);
1865 hash += ((1 << HASHBITS) - 1) & (tem + (tem >> HASHBITS));
1866 }
1867 }
1868 return hash;
1869
1870 /* Assume there is only one rtx object for any given label. */
1871 case LABEL_REF:
1872 /* Use `and' to ensure a positive number. */
1873 return (hash + ((HOST_WIDE_INT) LABEL_REF << 7)
1874 + ((HOST_WIDE_INT) XEXP (x, 0) & ((1 << HASHBITS) - 1)));
1875
1876 case SYMBOL_REF:
1877 return (hash + ((HOST_WIDE_INT) SYMBOL_REF << 7)
1878 + ((HOST_WIDE_INT) XEXP (x, 0) & ((1 << HASHBITS) - 1)));
1879
1880 case MEM:
1881 if (MEM_VOLATILE_P (x))
1882 {
1883 do_not_record = 1;
1884 return 0;
1885 }
1886 if (! RTX_UNCHANGING_P (x))
1887 {
1888 hash_arg_in_memory = 1;
1889 if (MEM_IN_STRUCT_P (x)) hash_arg_in_struct = 1;
1890 }
1891 /* Now that we have already found this special case,
1892 might as well speed it up as much as possible. */
1893 hash += (int) MEM;
1894 x = XEXP (x, 0);
1895 goto repeat;
1896
1897 case PRE_DEC:
1898 case PRE_INC:
1899 case POST_DEC:
1900 case POST_INC:
1901 case PC:
1902 case CC0:
1903 case CALL:
1904 case UNSPEC_VOLATILE:
1905 do_not_record = 1;
1906 return 0;
1907
1908 case ASM_OPERANDS:
1909 if (MEM_VOLATILE_P (x))
1910 {
1911 do_not_record = 1;
1912 return 0;
1913 }
1914 }
1915
1916 i = GET_RTX_LENGTH (code) - 1;
1917 hash += (int) code + (int) GET_MODE (x);
1918 fmt = GET_RTX_FORMAT (code);
1919 for (; i >= 0; i--)
1920 {
1921 if (fmt[i] == 'e')
1922 {
1923 rtx tem = XEXP (x, i);
1924 rtx tem1;
1925
1926 /* If the operand is a REG that is equivalent to a constant, hash
1927 as if we were hashing the constant, since we will be comparing
1928 that way. */
1929 if (tem != 0 && GET_CODE (tem) == REG
1930 && REGNO_QTY_VALID_P (REGNO (tem))
1931 && qty_mode[reg_qty[REGNO (tem)]] == GET_MODE (tem)
1932 && (tem1 = qty_const[reg_qty[REGNO (tem)]]) != 0
1933 && CONSTANT_P (tem1))
1934 tem = tem1;
1935
1936 /* If we are about to do the last recursive call
1937 needed at this level, change it into iteration.
1938 This function is called enough to be worth it. */
1939 if (i == 0)
1940 {
1941 x = tem;
1942 goto repeat;
1943 }
1944 hash += canon_hash (tem, 0);
1945 }
1946 else if (fmt[i] == 'E')
1947 for (j = 0; j < XVECLEN (x, i); j++)
1948 hash += canon_hash (XVECEXP (x, i, j), 0);
1949 else if (fmt[i] == 's')
1950 {
1951 register char *p = XSTR (x, i);
1952 if (p)
1953 while (*p)
1954 {
1955 register int tem = *p++;
1956 hash += ((1 << HASHBITS) - 1) & (tem + (tem >> HASHBITS));
1957 }
1958 }
1959 else if (fmt[i] == 'i')
1960 {
1961 register int tem = XINT (x, i);
1962 hash += ((1 << HASHBITS) - 1) & (tem + (tem >> HASHBITS));
1963 }
1964 else
1965 abort ();
1966 }
1967 return hash;
1968 }
1969
1970 /* Like canon_hash but with no side effects. */
1971
1972 static int
1973 safe_hash (x, mode)
1974 rtx x;
1975 enum machine_mode mode;
1976 {
1977 int save_do_not_record = do_not_record;
1978 int save_hash_arg_in_memory = hash_arg_in_memory;
1979 int save_hash_arg_in_struct = hash_arg_in_struct;
1980 int hash = canon_hash (x, mode);
1981 hash_arg_in_memory = save_hash_arg_in_memory;
1982 hash_arg_in_struct = save_hash_arg_in_struct;
1983 do_not_record = save_do_not_record;
1984 return hash;
1985 }
1986 \f
1987 /* Return 1 iff X and Y would canonicalize into the same thing,
1988 without actually constructing the canonicalization of either one.
1989 If VALIDATE is nonzero,
1990 we assume X is an expression being processed from the rtl
1991 and Y was found in the hash table. We check register refs
1992 in Y for being marked as valid.
1993
1994 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
1995 that is known to be in the register. Ordinarily, we don't allow them
1996 to match, because letting them match would cause unpredictable results
1997 in all the places that search a hash table chain for an equivalent
1998 for a given value. A possible equivalent that has different structure
1999 has its hash code computed from different data. Whether the hash code
2000 is the same as that of the the given value is pure luck. */
2001
2002 static int
2003 exp_equiv_p (x, y, validate, equal_values)
2004 rtx x, y;
2005 int validate;
2006 int equal_values;
2007 {
2008 register int i, j;
2009 register enum rtx_code code;
2010 register char *fmt;
2011
2012 /* Note: it is incorrect to assume an expression is equivalent to itself
2013 if VALIDATE is nonzero. */
2014 if (x == y && !validate)
2015 return 1;
2016 if (x == 0 || y == 0)
2017 return x == y;
2018
2019 code = GET_CODE (x);
2020 if (code != GET_CODE (y))
2021 {
2022 if (!equal_values)
2023 return 0;
2024
2025 /* If X is a constant and Y is a register or vice versa, they may be
2026 equivalent. We only have to validate if Y is a register. */
2027 if (CONSTANT_P (x) && GET_CODE (y) == REG
2028 && REGNO_QTY_VALID_P (REGNO (y))
2029 && GET_MODE (y) == qty_mode[reg_qty[REGNO (y)]]
2030 && rtx_equal_p (x, qty_const[reg_qty[REGNO (y)]])
2031 && (! validate || reg_in_table[REGNO (y)] == reg_tick[REGNO (y)]))
2032 return 1;
2033
2034 if (CONSTANT_P (y) && code == REG
2035 && REGNO_QTY_VALID_P (REGNO (x))
2036 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]]
2037 && rtx_equal_p (y, qty_const[reg_qty[REGNO (x)]]))
2038 return 1;
2039
2040 return 0;
2041 }
2042
2043 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2044 if (GET_MODE (x) != GET_MODE (y))
2045 return 0;
2046
2047 switch (code)
2048 {
2049 case PC:
2050 case CC0:
2051 return x == y;
2052
2053 case CONST_INT:
2054 return INTVAL (x) == INTVAL (y);
2055
2056 case LABEL_REF:
2057 case SYMBOL_REF:
2058 return XEXP (x, 0) == XEXP (y, 0);
2059
2060 case REG:
2061 {
2062 int regno = REGNO (y);
2063 int endregno
2064 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2065 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2066 int i;
2067
2068 /* If the quantities are not the same, the expressions are not
2069 equivalent. If there are and we are not to validate, they
2070 are equivalent. Otherwise, ensure all regs are up-to-date. */
2071
2072 if (reg_qty[REGNO (x)] != reg_qty[regno])
2073 return 0;
2074
2075 if (! validate)
2076 return 1;
2077
2078 for (i = regno; i < endregno; i++)
2079 if (reg_in_table[i] != reg_tick[i])
2080 return 0;
2081
2082 return 1;
2083 }
2084
2085 /* For commutative operations, check both orders. */
2086 case PLUS:
2087 case MULT:
2088 case AND:
2089 case IOR:
2090 case XOR:
2091 case NE:
2092 case EQ:
2093 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2094 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2095 validate, equal_values))
2096 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2097 validate, equal_values)
2098 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2099 validate, equal_values)));
2100 }
2101
2102 /* Compare the elements. If any pair of corresponding elements
2103 fail to match, return 0 for the whole things. */
2104
2105 fmt = GET_RTX_FORMAT (code);
2106 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2107 {
2108 switch (fmt[i])
2109 {
2110 case 'e':
2111 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2112 return 0;
2113 break;
2114
2115 case 'E':
2116 if (XVECLEN (x, i) != XVECLEN (y, i))
2117 return 0;
2118 for (j = 0; j < XVECLEN (x, i); j++)
2119 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2120 validate, equal_values))
2121 return 0;
2122 break;
2123
2124 case 's':
2125 if (strcmp (XSTR (x, i), XSTR (y, i)))
2126 return 0;
2127 break;
2128
2129 case 'i':
2130 if (XINT (x, i) != XINT (y, i))
2131 return 0;
2132 break;
2133
2134 case 'w':
2135 if (XWINT (x, i) != XWINT (y, i))
2136 return 0;
2137 break;
2138
2139 case '0':
2140 break;
2141
2142 default:
2143 abort ();
2144 }
2145 }
2146
2147 return 1;
2148 }
2149 \f
2150 /* Return 1 iff any subexpression of X matches Y.
2151 Here we do not require that X or Y be valid (for registers referred to)
2152 for being in the hash table. */
2153
2154 static int
2155 refers_to_p (x, y)
2156 rtx x, y;
2157 {
2158 register int i;
2159 register enum rtx_code code;
2160 register char *fmt;
2161
2162 repeat:
2163 if (x == y)
2164 return 1;
2165 if (x == 0 || y == 0)
2166 return 0;
2167
2168 code = GET_CODE (x);
2169 /* If X as a whole has the same code as Y, they may match.
2170 If so, return 1. */
2171 if (code == GET_CODE (y))
2172 {
2173 if (exp_equiv_p (x, y, 0, 1))
2174 return 1;
2175 }
2176
2177 /* X does not match, so try its subexpressions. */
2178
2179 fmt = GET_RTX_FORMAT (code);
2180 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2181 if (fmt[i] == 'e')
2182 {
2183 if (i == 0)
2184 {
2185 x = XEXP (x, 0);
2186 goto repeat;
2187 }
2188 else
2189 if (refers_to_p (XEXP (x, i), y))
2190 return 1;
2191 }
2192 else if (fmt[i] == 'E')
2193 {
2194 int j;
2195 for (j = 0; j < XVECLEN (x, i); j++)
2196 if (refers_to_p (XVECEXP (x, i, j), y))
2197 return 1;
2198 }
2199
2200 return 0;
2201 }
2202 \f
2203 /* Given ADDR and SIZE (a memory address, and the size of the memory reference),
2204 set PBASE, PSTART, and PEND which correspond to the base of the address,
2205 the starting offset, and ending offset respectively.
2206
2207 ADDR is known to be a nonvarying address.
2208
2209 cse_address_varies_p returns zero for nonvarying addresses. */
2210
2211 static void
2212 set_nonvarying_address_components (addr, size, pbase, pstart, pend)
2213 rtx addr;
2214 int size;
2215 rtx *pbase;
2216 HOST_WIDE_INT *pstart, *pend;
2217 {
2218 rtx base;
2219 int start, end;
2220
2221 base = addr;
2222 start = 0;
2223 end = 0;
2224
2225 /* Registers with nonvarying addresses usually have constant equivalents;
2226 but the frame pointer register is also possible. */
2227 if (GET_CODE (base) == REG
2228 && qty_const != 0
2229 && REGNO_QTY_VALID_P (REGNO (base))
2230 && qty_mode[reg_qty[REGNO (base)]] == GET_MODE (base)
2231 && qty_const[reg_qty[REGNO (base)]] != 0)
2232 base = qty_const[reg_qty[REGNO (base)]];
2233 else if (GET_CODE (base) == PLUS
2234 && GET_CODE (XEXP (base, 1)) == CONST_INT
2235 && GET_CODE (XEXP (base, 0)) == REG
2236 && qty_const != 0
2237 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2238 && (qty_mode[reg_qty[REGNO (XEXP (base, 0))]]
2239 == GET_MODE (XEXP (base, 0)))
2240 && qty_const[reg_qty[REGNO (XEXP (base, 0))]])
2241 {
2242 start = INTVAL (XEXP (base, 1));
2243 base = qty_const[reg_qty[REGNO (XEXP (base, 0))]];
2244 }
2245
2246 /* By definition, operand1 of a LO_SUM is the associated constant
2247 address. Use the associated constant address as the base instead. */
2248 if (GET_CODE (base) == LO_SUM)
2249 base = XEXP (base, 1);
2250
2251 /* Strip off CONST. */
2252 if (GET_CODE (base) == CONST)
2253 base = XEXP (base, 0);
2254
2255 if (GET_CODE (base) == PLUS
2256 && GET_CODE (XEXP (base, 1)) == CONST_INT)
2257 {
2258 start += INTVAL (XEXP (base, 1));
2259 base = XEXP (base, 0);
2260 }
2261
2262 end = start + size;
2263
2264 /* Set the return values. */
2265 *pbase = base;
2266 *pstart = start;
2267 *pend = end;
2268 }
2269
2270 /* Return 1 iff any subexpression of X refers to memory
2271 at an address of BASE plus some offset
2272 such that any of the bytes' offsets fall between START (inclusive)
2273 and END (exclusive).
2274
2275 The value is undefined if X is a varying address (as determined by
2276 cse_rtx_addr_varies_p). This function is not used in such cases.
2277
2278 When used in the cse pass, `qty_const' is nonzero, and it is used
2279 to treat an address that is a register with a known constant value
2280 as if it were that constant value.
2281 In the loop pass, `qty_const' is zero, so this is not done. */
2282
2283 static int
2284 refers_to_mem_p (x, base, start, end)
2285 rtx x, base;
2286 HOST_WIDE_INT start, end;
2287 {
2288 register HOST_WIDE_INT i;
2289 register enum rtx_code code;
2290 register char *fmt;
2291
2292 if (GET_CODE (base) == CONST_INT)
2293 {
2294 start += INTVAL (base);
2295 end += INTVAL (base);
2296 base = const0_rtx;
2297 }
2298
2299 repeat:
2300 if (x == 0)
2301 return 0;
2302
2303 code = GET_CODE (x);
2304 if (code == MEM)
2305 {
2306 register rtx addr = XEXP (x, 0); /* Get the address. */
2307 rtx mybase;
2308 HOST_WIDE_INT mystart, myend;
2309
2310 set_nonvarying_address_components (addr, GET_MODE_SIZE (GET_MODE (x)),
2311 &mybase, &mystart, &myend);
2312
2313
2314 /* refers_to_mem_p is never called with varying addresses.
2315 If the base addresses are not equal, there is no chance
2316 of the memory addresses conflicting. */
2317 if (! rtx_equal_p (mybase, base))
2318 return 0;
2319
2320 return myend > start && mystart < end;
2321 }
2322
2323 /* X does not match, so try its subexpressions. */
2324
2325 fmt = GET_RTX_FORMAT (code);
2326 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2327 if (fmt[i] == 'e')
2328 {
2329 if (i == 0)
2330 {
2331 x = XEXP (x, 0);
2332 goto repeat;
2333 }
2334 else
2335 if (refers_to_mem_p (XEXP (x, i), base, start, end))
2336 return 1;
2337 }
2338 else if (fmt[i] == 'E')
2339 {
2340 int j;
2341 for (j = 0; j < XVECLEN (x, i); j++)
2342 if (refers_to_mem_p (XVECEXP (x, i, j), base, start, end))
2343 return 1;
2344 }
2345
2346 return 0;
2347 }
2348
2349 /* Nonzero if X refers to memory at a varying address;
2350 except that a register which has at the moment a known constant value
2351 isn't considered variable. */
2352
2353 static int
2354 cse_rtx_addr_varies_p (x)
2355 rtx x;
2356 {
2357 /* We need not check for X and the equivalence class being of the same
2358 mode because if X is equivalent to a constant in some mode, it
2359 doesn't vary in any mode. */
2360
2361 if (GET_CODE (x) == MEM
2362 && GET_CODE (XEXP (x, 0)) == REG
2363 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2364 && GET_MODE (XEXP (x, 0)) == qty_mode[reg_qty[REGNO (XEXP (x, 0))]]
2365 && qty_const[reg_qty[REGNO (XEXP (x, 0))]] != 0)
2366 return 0;
2367
2368 if (GET_CODE (x) == MEM
2369 && GET_CODE (XEXP (x, 0)) == PLUS
2370 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2371 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2372 && REGNO_QTY_VALID_P (REGNO (XEXP (XEXP (x, 0), 0)))
2373 && (GET_MODE (XEXP (XEXP (x, 0), 0))
2374 == qty_mode[reg_qty[REGNO (XEXP (XEXP (x, 0), 0))]])
2375 && qty_const[reg_qty[REGNO (XEXP (XEXP (x, 0), 0))]])
2376 return 0;
2377
2378 return rtx_addr_varies_p (x);
2379 }
2380 \f
2381 /* Canonicalize an expression:
2382 replace each register reference inside it
2383 with the "oldest" equivalent register.
2384
2385 If INSN is non-zero and we are replacing a pseudo with a hard register
2386 or vice versa, validate_change is used to ensure that INSN remains valid
2387 after we make our substitution. The calls are made with IN_GROUP non-zero
2388 so apply_change_group must be called upon the outermost return from this
2389 function (unless INSN is zero). The result of apply_change_group can
2390 generally be discarded since the changes we are making are optional. */
2391
2392 static rtx
2393 canon_reg (x, insn)
2394 rtx x;
2395 rtx insn;
2396 {
2397 register int i;
2398 register enum rtx_code code;
2399 register char *fmt;
2400
2401 if (x == 0)
2402 return x;
2403
2404 code = GET_CODE (x);
2405 switch (code)
2406 {
2407 case PC:
2408 case CC0:
2409 case CONST:
2410 case CONST_INT:
2411 case CONST_DOUBLE:
2412 case SYMBOL_REF:
2413 case LABEL_REF:
2414 case ADDR_VEC:
2415 case ADDR_DIFF_VEC:
2416 return x;
2417
2418 case REG:
2419 {
2420 register int first;
2421
2422 /* Never replace a hard reg, because hard regs can appear
2423 in more than one machine mode, and we must preserve the mode
2424 of each occurrence. Also, some hard regs appear in
2425 MEMs that are shared and mustn't be altered. Don't try to
2426 replace any reg that maps to a reg of class NO_REGS. */
2427 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2428 || ! REGNO_QTY_VALID_P (REGNO (x)))
2429 return x;
2430
2431 first = qty_first_reg[reg_qty[REGNO (x)]];
2432 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2433 : REGNO_REG_CLASS (first) == NO_REGS ? x
2434 : gen_rtx (REG, qty_mode[reg_qty[REGNO (x)]], first));
2435 }
2436 }
2437
2438 fmt = GET_RTX_FORMAT (code);
2439 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2440 {
2441 register int j;
2442
2443 if (fmt[i] == 'e')
2444 {
2445 rtx new = canon_reg (XEXP (x, i), insn);
2446
2447 /* If replacing pseudo with hard reg or vice versa, ensure the
2448 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2449 if (insn != 0 && new != 0
2450 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2451 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2452 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2453 || insn_n_dups[recog_memoized (insn)] > 0))
2454 validate_change (insn, &XEXP (x, i), new, 1);
2455 else
2456 XEXP (x, i) = new;
2457 }
2458 else if (fmt[i] == 'E')
2459 for (j = 0; j < XVECLEN (x, i); j++)
2460 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2461 }
2462
2463 return x;
2464 }
2465 \f
2466 /* LOC is a location with INSN that is an operand address (the contents of
2467 a MEM). Find the best equivalent address to use that is valid for this
2468 insn.
2469
2470 On most CISC machines, complicated address modes are costly, and rtx_cost
2471 is a good approximation for that cost. However, most RISC machines have
2472 only a few (usually only one) memory reference formats. If an address is
2473 valid at all, it is often just as cheap as any other address. Hence, for
2474 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2475 costs of various addresses. For two addresses of equal cost, choose the one
2476 with the highest `rtx_cost' value as that has the potential of eliminating
2477 the most insns. For equal costs, we choose the first in the equivalence
2478 class. Note that we ignore the fact that pseudo registers are cheaper
2479 than hard registers here because we would also prefer the pseudo registers.
2480 */
2481
2482 static void
2483 find_best_addr (insn, loc)
2484 rtx insn;
2485 rtx *loc;
2486 {
2487 struct table_elt *elt, *p;
2488 rtx addr = *loc;
2489 int our_cost;
2490 int found_better = 1;
2491 int save_do_not_record = do_not_record;
2492 int save_hash_arg_in_memory = hash_arg_in_memory;
2493 int save_hash_arg_in_struct = hash_arg_in_struct;
2494 int hash_code;
2495 int addr_volatile;
2496 int regno;
2497
2498 /* Do not try to replace constant addresses or addresses of local and
2499 argument slots. These MEM expressions are made only once and inserted
2500 in many instructions, as well as being used to control symbol table
2501 output. It is not safe to clobber them.
2502
2503 There are some uncommon cases where the address is already in a register
2504 for some reason, but we cannot take advantage of that because we have
2505 no easy way to unshare the MEM. In addition, looking up all stack
2506 addresses is costly. */
2507 if ((GET_CODE (addr) == PLUS
2508 && GET_CODE (XEXP (addr, 0)) == REG
2509 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2510 && (regno = REGNO (XEXP (addr, 0)),
2511 regno == FRAME_POINTER_REGNUM || regno == ARG_POINTER_REGNUM))
2512 || (GET_CODE (addr) == REG
2513 && (regno = REGNO (addr),
2514 regno == FRAME_POINTER_REGNUM || regno == ARG_POINTER_REGNUM))
2515 || CONSTANT_ADDRESS_P (addr))
2516 return;
2517
2518 /* If this address is not simply a register, try to fold it. This will
2519 sometimes simplify the expression. Many simplifications
2520 will not be valid, but some, usually applying the associative rule, will
2521 be valid and produce better code. */
2522 if (GET_CODE (addr) != REG
2523 && validate_change (insn, loc, fold_rtx (addr, insn), 0))
2524 addr = *loc;
2525
2526 /* If this address is not in the hash table, we can't look for equivalences
2527 of the whole address. Also, ignore if volatile. */
2528
2529 do_not_record = 0;
2530 hash_code = HASH (addr, Pmode);
2531 addr_volatile = do_not_record;
2532 do_not_record = save_do_not_record;
2533 hash_arg_in_memory = save_hash_arg_in_memory;
2534 hash_arg_in_struct = save_hash_arg_in_struct;
2535
2536 if (addr_volatile)
2537 return;
2538
2539 elt = lookup (addr, hash_code, Pmode);
2540
2541 #ifndef ADDRESS_COST
2542 if (elt)
2543 {
2544 our_cost = elt->cost;
2545
2546 /* Find the lowest cost below ours that works. */
2547 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2548 if (elt->cost < our_cost
2549 && (GET_CODE (elt->exp) == REG
2550 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2551 && validate_change (insn, loc,
2552 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2553 return;
2554 }
2555 #else
2556
2557 if (elt)
2558 {
2559 /* We need to find the best (under the criteria documented above) entry
2560 in the class that is valid. We use the `flag' field to indicate
2561 choices that were invalid and iterate until we can't find a better
2562 one that hasn't already been tried. */
2563
2564 for (p = elt->first_same_value; p; p = p->next_same_value)
2565 p->flag = 0;
2566
2567 while (found_better)
2568 {
2569 int best_addr_cost = ADDRESS_COST (*loc);
2570 int best_rtx_cost = (elt->cost + 1) >> 1;
2571 struct table_elt *best_elt = elt;
2572
2573 found_better = 0;
2574 for (p = elt->first_same_value; p; p = p->next_same_value)
2575 if (! p->flag
2576 && (GET_CODE (p->exp) == REG
2577 || exp_equiv_p (p->exp, p->exp, 1, 0))
2578 && (ADDRESS_COST (p->exp) < best_addr_cost
2579 || (ADDRESS_COST (p->exp) == best_addr_cost
2580 && (p->cost + 1) >> 1 > best_rtx_cost)))
2581 {
2582 found_better = 1;
2583 best_addr_cost = ADDRESS_COST (p->exp);
2584 best_rtx_cost = (p->cost + 1) >> 1;
2585 best_elt = p;
2586 }
2587
2588 if (found_better)
2589 {
2590 if (validate_change (insn, loc,
2591 canon_reg (copy_rtx (best_elt->exp),
2592 NULL_RTX), 0))
2593 return;
2594 else
2595 best_elt->flag = 1;
2596 }
2597 }
2598 }
2599
2600 /* If the address is a binary operation with the first operand a register
2601 and the second a constant, do the same as above, but looking for
2602 equivalences of the register. Then try to simplify before checking for
2603 the best address to use. This catches a few cases: First is when we
2604 have REG+const and the register is another REG+const. We can often merge
2605 the constants and eliminate one insn and one register. It may also be
2606 that a machine has a cheap REG+REG+const. Finally, this improves the
2607 code on the Alpha for unaligned byte stores. */
2608
2609 if (flag_expensive_optimizations
2610 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2611 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2612 && GET_CODE (XEXP (*loc, 0)) == REG
2613 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
2614 {
2615 rtx c = XEXP (*loc, 1);
2616
2617 do_not_record = 0;
2618 hash_code = HASH (XEXP (*loc, 0), Pmode);
2619 do_not_record = save_do_not_record;
2620 hash_arg_in_memory = save_hash_arg_in_memory;
2621 hash_arg_in_struct = save_hash_arg_in_struct;
2622
2623 elt = lookup (XEXP (*loc, 0), hash_code, Pmode);
2624 if (elt == 0)
2625 return;
2626
2627 /* We need to find the best (under the criteria documented above) entry
2628 in the class that is valid. We use the `flag' field to indicate
2629 choices that were invalid and iterate until we can't find a better
2630 one that hasn't already been tried. */
2631
2632 for (p = elt->first_same_value; p; p = p->next_same_value)
2633 p->flag = 0;
2634
2635 while (found_better)
2636 {
2637 int best_addr_cost = ADDRESS_COST (*loc);
2638 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2639 struct table_elt *best_elt = elt;
2640 rtx best_rtx = *loc;
2641
2642 found_better = 0;
2643 for (p = elt->first_same_value; p; p = p->next_same_value)
2644 if (! p->flag
2645 && (GET_CODE (p->exp) == REG
2646 || exp_equiv_p (p->exp, p->exp, 1, 0)))
2647 {
2648 rtx new = cse_gen_binary (GET_CODE (*loc), Pmode, p->exp, c);
2649
2650 if ((ADDRESS_COST (new) < best_addr_cost
2651 || (ADDRESS_COST (new) == best_addr_cost
2652 && (COST (new) + 1) >> 1 > best_rtx_cost)))
2653 {
2654 found_better = 1;
2655 best_addr_cost = ADDRESS_COST (new);
2656 best_rtx_cost = (COST (new) + 1) >> 1;
2657 best_elt = p;
2658 best_rtx = new;
2659 }
2660 }
2661
2662 if (found_better)
2663 {
2664 if (validate_change (insn, loc,
2665 canon_reg (copy_rtx (best_rtx),
2666 NULL_RTX), 0))
2667 return;
2668 else
2669 best_elt->flag = 1;
2670 }
2671 }
2672 }
2673 #endif
2674 }
2675 \f
2676 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2677 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2678 what values are being compared.
2679
2680 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2681 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2682 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2683 compared to produce cc0.
2684
2685 The return value is the comparison operator and is either the code of
2686 A or the code corresponding to the inverse of the comparison. */
2687
2688 static enum rtx_code
2689 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
2690 enum rtx_code code;
2691 rtx *parg1, *parg2;
2692 enum machine_mode *pmode1, *pmode2;
2693 {
2694 rtx arg1, arg2;
2695
2696 arg1 = *parg1, arg2 = *parg2;
2697
2698 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2699
2700 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2701 {
2702 /* Set non-zero when we find something of interest. */
2703 rtx x = 0;
2704 int reverse_code = 0;
2705 struct table_elt *p = 0;
2706
2707 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2708 On machines with CC0, this is the only case that can occur, since
2709 fold_rtx will return the COMPARE or item being compared with zero
2710 when given CC0. */
2711
2712 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2713 x = arg1;
2714
2715 /* If ARG1 is a comparison operator and CODE is testing for
2716 STORE_FLAG_VALUE, get the inner arguments. */
2717
2718 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
2719 {
2720 if (code == NE
2721 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2722 && code == LT && STORE_FLAG_VALUE == -1)
2723 #ifdef FLOAT_STORE_FLAG_VALUE
2724 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2725 && FLOAT_STORE_FLAG_VALUE < 0)
2726 #endif
2727 )
2728 x = arg1;
2729 else if (code == EQ
2730 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2731 && code == GE && STORE_FLAG_VALUE == -1)
2732 #ifdef FLOAT_STORE_FLAG_VALUE
2733 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2734 && FLOAT_STORE_FLAG_VALUE < 0)
2735 #endif
2736 )
2737 x = arg1, reverse_code = 1;
2738 }
2739
2740 /* ??? We could also check for
2741
2742 (ne (and (eq (...) (const_int 1))) (const_int 0))
2743
2744 and related forms, but let's wait until we see them occurring. */
2745
2746 if (x == 0)
2747 /* Look up ARG1 in the hash table and see if it has an equivalence
2748 that lets us see what is being compared. */
2749 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) % NBUCKETS,
2750 GET_MODE (arg1));
2751 if (p) p = p->first_same_value;
2752
2753 for (; p; p = p->next_same_value)
2754 {
2755 enum machine_mode inner_mode = GET_MODE (p->exp);
2756
2757 /* If the entry isn't valid, skip it. */
2758 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
2759 continue;
2760
2761 if (GET_CODE (p->exp) == COMPARE
2762 /* Another possibility is that this machine has a compare insn
2763 that includes the comparison code. In that case, ARG1 would
2764 be equivalent to a comparison operation that would set ARG1 to
2765 either STORE_FLAG_VALUE or zero. If this is an NE operation,
2766 ORIG_CODE is the actual comparison being done; if it is an EQ,
2767 we must reverse ORIG_CODE. On machine with a negative value
2768 for STORE_FLAG_VALUE, also look at LT and GE operations. */
2769 || ((code == NE
2770 || (code == LT
2771 && GET_MODE_CLASS (inner_mode) == MODE_INT
2772 && (GET_MODE_BITSIZE (inner_mode)
2773 <= HOST_BITS_PER_WIDE_INT)
2774 && (STORE_FLAG_VALUE
2775 & ((HOST_WIDE_INT) 1
2776 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2777 #ifdef FLOAT_STORE_FLAG_VALUE
2778 || (code == LT
2779 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2780 && FLOAT_STORE_FLAG_VALUE < 0)
2781 #endif
2782 )
2783 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
2784 {
2785 x = p->exp;
2786 break;
2787 }
2788 else if ((code == EQ
2789 || (code == GE
2790 && GET_MODE_CLASS (inner_mode) == MODE_INT
2791 && (GET_MODE_BITSIZE (inner_mode)
2792 <= HOST_BITS_PER_WIDE_INT)
2793 && (STORE_FLAG_VALUE
2794 & ((HOST_WIDE_INT) 1
2795 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2796 #ifdef FLOAT_STORE_FLAG_VALUE
2797 || (code == GE
2798 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2799 && FLOAT_STORE_FLAG_VALUE < 0)
2800 #endif
2801 )
2802 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
2803 {
2804 reverse_code = 1;
2805 x = p->exp;
2806 break;
2807 }
2808
2809 /* If this is fp + constant, the equivalent is a better operand since
2810 it may let us predict the value of the comparison. */
2811 else if (NONZERO_BASE_PLUS_P (p->exp))
2812 {
2813 arg1 = p->exp;
2814 continue;
2815 }
2816 }
2817
2818 /* If we didn't find a useful equivalence for ARG1, we are done.
2819 Otherwise, set up for the next iteration. */
2820 if (x == 0)
2821 break;
2822
2823 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
2824 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
2825 code = GET_CODE (x);
2826
2827 if (reverse_code)
2828 code = reverse_condition (code);
2829 }
2830
2831 /* Return our results. Return the modes from before fold_rtx
2832 because fold_rtx might produce const_int, and then it's too late. */
2833 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
2834 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
2835
2836 return code;
2837 }
2838 \f
2839 /* Try to simplify a unary operation CODE whose output mode is to be
2840 MODE with input operand OP whose mode was originally OP_MODE.
2841 Return zero if no simplification can be made. */
2842
2843 rtx
2844 simplify_unary_operation (code, mode, op, op_mode)
2845 enum rtx_code code;
2846 enum machine_mode mode;
2847 rtx op;
2848 enum machine_mode op_mode;
2849 {
2850 register int width = GET_MODE_BITSIZE (mode);
2851
2852 /* The order of these tests is critical so that, for example, we don't
2853 check the wrong mode (input vs. output) for a conversion operation,
2854 such as FIX. At some point, this should be simplified. */
2855
2856 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2857 if (code == FLOAT && GET_CODE (op) == CONST_INT)
2858 {
2859 REAL_VALUE_TYPE d;
2860
2861 #ifdef REAL_ARITHMETIC
2862 REAL_VALUE_FROM_INT (d, INTVAL (op), INTVAL (op) < 0 ? ~0 : 0);
2863 #else
2864 d = (double) INTVAL (op);
2865 #endif
2866 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2867 }
2868 else if (code == UNSIGNED_FLOAT && GET_CODE (op) == CONST_INT)
2869 {
2870 REAL_VALUE_TYPE d;
2871
2872 #ifdef REAL_ARITHMETIC
2873 REAL_VALUE_FROM_INT (d, INTVAL (op), 0);
2874 #else
2875 d = (double) (unsigned int) INTVAL (op);
2876 #endif
2877 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2878 }
2879
2880 else if (code == FLOAT && GET_CODE (op) == CONST_DOUBLE
2881 && GET_MODE (op) == VOIDmode)
2882 {
2883 REAL_VALUE_TYPE d;
2884
2885 #ifdef REAL_ARITHMETIC
2886 REAL_VALUE_FROM_INT (d, CONST_DOUBLE_LOW (op), CONST_DOUBLE_HIGH (op));
2887 #else
2888 if (CONST_DOUBLE_HIGH (op) < 0)
2889 {
2890 d = (double) (~ CONST_DOUBLE_HIGH (op));
2891 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2892 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2893 d += (double) (unsigned HOST_WIDE_INT) (~ CONST_DOUBLE_LOW (op));
2894 d = (- d - 1.0);
2895 }
2896 else
2897 {
2898 d = (double) CONST_DOUBLE_HIGH (op);
2899 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2900 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2901 d += (double) (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
2902 }
2903 #endif /* REAL_ARITHMETIC */
2904 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2905 }
2906 else if (code == UNSIGNED_FLOAT && GET_CODE (op) == CONST_DOUBLE
2907 && GET_MODE (op) == VOIDmode)
2908 {
2909 REAL_VALUE_TYPE d;
2910
2911 #ifdef REAL_ARITHMETIC
2912 REAL_VALUE_FROM_UNSIGNED_INT (d, CONST_DOUBLE_LOW (op),
2913 CONST_DOUBLE_HIGH (op));
2914 #else
2915 d = (double) CONST_DOUBLE_HIGH (op);
2916 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2917 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2918 d += (double) (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
2919 #endif /* REAL_ARITHMETIC */
2920 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2921 }
2922 #endif
2923
2924 if (GET_CODE (op) == CONST_INT
2925 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
2926 {
2927 register HOST_WIDE_INT arg0 = INTVAL (op);
2928 register HOST_WIDE_INT val;
2929
2930 switch (code)
2931 {
2932 case NOT:
2933 val = ~ arg0;
2934 break;
2935
2936 case NEG:
2937 val = - arg0;
2938 break;
2939
2940 case ABS:
2941 val = (arg0 >= 0 ? arg0 : - arg0);
2942 break;
2943
2944 case FFS:
2945 /* Don't use ffs here. Instead, get low order bit and then its
2946 number. If arg0 is zero, this will return 0, as desired. */
2947 arg0 &= GET_MODE_MASK (mode);
2948 val = exact_log2 (arg0 & (- arg0)) + 1;
2949 break;
2950
2951 case TRUNCATE:
2952 val = arg0;
2953 break;
2954
2955 case ZERO_EXTEND:
2956 if (op_mode == VOIDmode)
2957 op_mode = mode;
2958 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
2959 {
2960 /* If we were really extending the mode,
2961 we would have to distinguish between zero-extension
2962 and sign-extension. */
2963 if (width != GET_MODE_BITSIZE (op_mode))
2964 abort ();
2965 val = arg0;
2966 }
2967 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
2968 val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
2969 else
2970 return 0;
2971 break;
2972
2973 case SIGN_EXTEND:
2974 if (op_mode == VOIDmode)
2975 op_mode = mode;
2976 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
2977 {
2978 /* If we were really extending the mode,
2979 we would have to distinguish between zero-extension
2980 and sign-extension. */
2981 if (width != GET_MODE_BITSIZE (op_mode))
2982 abort ();
2983 val = arg0;
2984 }
2985 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
2986 {
2987 val
2988 = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
2989 if (val
2990 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1)))
2991 val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
2992 }
2993 else
2994 return 0;
2995 break;
2996
2997 case SQRT:
2998 return 0;
2999
3000 default:
3001 abort ();
3002 }
3003
3004 /* Clear the bits that don't belong in our mode,
3005 unless they and our sign bit are all one.
3006 So we get either a reasonable negative value or a reasonable
3007 unsigned value for this mode. */
3008 if (width < HOST_BITS_PER_WIDE_INT
3009 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3010 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3011 val &= (1 << width) - 1;
3012
3013 return GEN_INT (val);
3014 }
3015
3016 /* We can do some operations on integer CONST_DOUBLEs. Also allow
3017 for a DImode operation on a CONST_INT. */
3018 else if (GET_MODE (op) == VOIDmode
3019 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3020 {
3021 HOST_WIDE_INT l1, h1, lv, hv;
3022
3023 if (GET_CODE (op) == CONST_DOUBLE)
3024 l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
3025 else
3026 l1 = INTVAL (op), h1 = l1 < 0 ? -1 : 0;
3027
3028 switch (code)
3029 {
3030 case NOT:
3031 lv = ~ l1;
3032 hv = ~ h1;
3033 break;
3034
3035 case NEG:
3036 neg_double (l1, h1, &lv, &hv);
3037 break;
3038
3039 case ABS:
3040 if (h1 < 0)
3041 neg_double (l1, h1, &lv, &hv);
3042 else
3043 lv = l1, hv = h1;
3044 break;
3045
3046 case FFS:
3047 hv = 0;
3048 if (l1 == 0)
3049 lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & (-h1)) + 1;
3050 else
3051 lv = exact_log2 (l1 & (-l1)) + 1;
3052 break;
3053
3054 case TRUNCATE:
3055 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3056 return GEN_INT (l1 & GET_MODE_MASK (mode));
3057 else
3058 return 0;
3059 break;
3060
3061 case ZERO_EXTEND:
3062 if (op_mode == VOIDmode
3063 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3064 return 0;
3065
3066 hv = 0;
3067 lv = l1 & GET_MODE_MASK (op_mode);
3068 break;
3069
3070 case SIGN_EXTEND:
3071 if (op_mode == VOIDmode
3072 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3073 return 0;
3074 else
3075 {
3076 lv = l1 & GET_MODE_MASK (op_mode);
3077 if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
3078 && (lv & ((HOST_WIDE_INT) 1
3079 << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
3080 lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3081
3082 hv = (lv < 0) ? ~ (HOST_WIDE_INT) 0 : 0;
3083 }
3084 break;
3085
3086 case SQRT:
3087 return 0;
3088
3089 default:
3090 return 0;
3091 }
3092
3093 return immed_double_const (lv, hv, mode);
3094 }
3095
3096 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3097 else if (GET_CODE (op) == CONST_DOUBLE
3098 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3099 {
3100 REAL_VALUE_TYPE d;
3101 jmp_buf handler;
3102 rtx x;
3103
3104 if (setjmp (handler))
3105 /* There used to be a warning here, but that is inadvisable.
3106 People may want to cause traps, and the natural way
3107 to do it should not get a warning. */
3108 return 0;
3109
3110 set_float_handler (handler);
3111
3112 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3113
3114 switch (code)
3115 {
3116 case NEG:
3117 d = REAL_VALUE_NEGATE (d);
3118 break;
3119
3120 case ABS:
3121 if (REAL_VALUE_NEGATIVE (d))
3122 d = REAL_VALUE_NEGATE (d);
3123 break;
3124
3125 case FLOAT_TRUNCATE:
3126 d = real_value_truncate (mode, d);
3127 break;
3128
3129 case FLOAT_EXTEND:
3130 /* All this does is change the mode. */
3131 break;
3132
3133 case FIX:
3134 d = REAL_VALUE_RNDZINT (d);
3135 break;
3136
3137 case UNSIGNED_FIX:
3138 d = REAL_VALUE_UNSIGNED_RNDZINT (d);
3139 break;
3140
3141 case SQRT:
3142 return 0;
3143
3144 default:
3145 abort ();
3146 }
3147
3148 x = immed_real_const_1 (d, mode);
3149 set_float_handler (NULL_PTR);
3150 return x;
3151 }
3152 else if (GET_CODE (op) == CONST_DOUBLE && GET_MODE_CLASS (mode) == MODE_INT
3153 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3154 {
3155 REAL_VALUE_TYPE d;
3156 jmp_buf handler;
3157 rtx x;
3158 HOST_WIDE_INT val;
3159
3160 if (setjmp (handler))
3161 return 0;
3162
3163 set_float_handler (handler);
3164
3165 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3166
3167 switch (code)
3168 {
3169 case FIX:
3170 val = REAL_VALUE_FIX (d);
3171 break;
3172
3173 case UNSIGNED_FIX:
3174 val = REAL_VALUE_UNSIGNED_FIX (d);
3175 break;
3176
3177 default:
3178 abort ();
3179 }
3180
3181 set_float_handler (NULL_PTR);
3182
3183 /* Clear the bits that don't belong in our mode,
3184 unless they and our sign bit are all one.
3185 So we get either a reasonable negative value or a reasonable
3186 unsigned value for this mode. */
3187 if (width < HOST_BITS_PER_WIDE_INT
3188 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3189 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3190 val &= ((HOST_WIDE_INT) 1 << width) - 1;
3191
3192 return GEN_INT (val);
3193 }
3194 #endif
3195 /* This was formerly used only for non-IEEE float.
3196 eggert@twinsun.com says it is safe for IEEE also. */
3197 else
3198 {
3199 /* There are some simplifications we can do even if the operands
3200 aren't constant. */
3201 switch (code)
3202 {
3203 case NEG:
3204 case NOT:
3205 /* (not (not X)) == X, similarly for NEG. */
3206 if (GET_CODE (op) == code)
3207 return XEXP (op, 0);
3208 break;
3209
3210 case SIGN_EXTEND:
3211 /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
3212 becomes just the MINUS if its mode is MODE. This allows
3213 folding switch statements on machines using casesi (such as
3214 the Vax). */
3215 if (GET_CODE (op) == TRUNCATE
3216 && GET_MODE (XEXP (op, 0)) == mode
3217 && GET_CODE (XEXP (op, 0)) == MINUS
3218 && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
3219 && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
3220 return XEXP (op, 0);
3221 break;
3222 }
3223
3224 return 0;
3225 }
3226 }
3227 \f
3228 /* Simplify a binary operation CODE with result mode MODE, operating on OP0
3229 and OP1. Return 0 if no simplification is possible.
3230
3231 Don't use this for relational operations such as EQ or LT.
3232 Use simplify_relational_operation instead. */
3233
3234 rtx
3235 simplify_binary_operation (code, mode, op0, op1)
3236 enum rtx_code code;
3237 enum machine_mode mode;
3238 rtx op0, op1;
3239 {
3240 register HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
3241 HOST_WIDE_INT val;
3242 int width = GET_MODE_BITSIZE (mode);
3243 rtx tem;
3244
3245 /* Relational operations don't work here. We must know the mode
3246 of the operands in order to do the comparison correctly.
3247 Assuming a full word can give incorrect results.
3248 Consider comparing 128 with -128 in QImode. */
3249
3250 if (GET_RTX_CLASS (code) == '<')
3251 abort ();
3252
3253 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3254 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3255 && GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
3256 && mode == GET_MODE (op0) && mode == GET_MODE (op1))
3257 {
3258 REAL_VALUE_TYPE f0, f1, value;
3259 jmp_buf handler;
3260
3261 if (setjmp (handler))
3262 return 0;
3263
3264 set_float_handler (handler);
3265
3266 REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3267 REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
3268 f0 = real_value_truncate (mode, f0);
3269 f1 = real_value_truncate (mode, f1);
3270
3271 #ifdef REAL_ARITHMETIC
3272 REAL_ARITHMETIC (value, rtx_to_tree_code (code), f0, f1);
3273 #else
3274 switch (code)
3275 {
3276 case PLUS:
3277 value = f0 + f1;
3278 break;
3279 case MINUS:
3280 value = f0 - f1;
3281 break;
3282 case MULT:
3283 value = f0 * f1;
3284 break;
3285 case DIV:
3286 #ifndef REAL_INFINITY
3287 if (f1 == 0)
3288 return 0;
3289 #endif
3290 value = f0 / f1;
3291 break;
3292 case SMIN:
3293 value = MIN (f0, f1);
3294 break;
3295 case SMAX:
3296 value = MAX (f0, f1);
3297 break;
3298 default:
3299 abort ();
3300 }
3301 #endif
3302
3303 set_float_handler (NULL_PTR);
3304 value = real_value_truncate (mode, value);
3305 return immed_real_const_1 (value, mode);
3306 }
3307 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3308
3309 /* We can fold some multi-word operations. */
3310 if (GET_MODE_CLASS (mode) == MODE_INT
3311 && GET_CODE (op0) == CONST_DOUBLE
3312 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
3313 {
3314 HOST_WIDE_INT l1, l2, h1, h2, lv, hv;
3315
3316 l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3317
3318 if (GET_CODE (op1) == CONST_DOUBLE)
3319 l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3320 else
3321 l2 = INTVAL (op1), h2 = l2 < 0 ? -1 : 0;
3322
3323 switch (code)
3324 {
3325 case MINUS:
3326 /* A - B == A + (-B). */
3327 neg_double (l2, h2, &lv, &hv);
3328 l2 = lv, h2 = hv;
3329
3330 /* .. fall through ... */
3331
3332 case PLUS:
3333 add_double (l1, h1, l2, h2, &lv, &hv);
3334 break;
3335
3336 case MULT:
3337 mul_double (l1, h1, l2, h2, &lv, &hv);
3338 break;
3339
3340 case DIV: case MOD: case UDIV: case UMOD:
3341 /* We'd need to include tree.h to do this and it doesn't seem worth
3342 it. */
3343 return 0;
3344
3345 case AND:
3346 lv = l1 & l2, hv = h1 & h2;
3347 break;
3348
3349 case IOR:
3350 lv = l1 | l2, hv = h1 | h2;
3351 break;
3352
3353 case XOR:
3354 lv = l1 ^ l2, hv = h1 ^ h2;
3355 break;
3356
3357 case SMIN:
3358 if (h1 < h2
3359 || (h1 == h2
3360 && ((unsigned HOST_WIDE_INT) l1
3361 < (unsigned HOST_WIDE_INT) l2)))
3362 lv = l1, hv = h1;
3363 else
3364 lv = l2, hv = h2;
3365 break;
3366
3367 case SMAX:
3368 if (h1 > h2
3369 || (h1 == h2
3370 && ((unsigned HOST_WIDE_INT) l1
3371 > (unsigned HOST_WIDE_INT) l2)))
3372 lv = l1, hv = h1;
3373 else
3374 lv = l2, hv = h2;
3375 break;
3376
3377 case UMIN:
3378 if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2
3379 || (h1 == h2
3380 && ((unsigned HOST_WIDE_INT) l1
3381 < (unsigned HOST_WIDE_INT) l2)))
3382 lv = l1, hv = h1;
3383 else
3384 lv = l2, hv = h2;
3385 break;
3386
3387 case UMAX:
3388 if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2
3389 || (h1 == h2
3390 && ((unsigned HOST_WIDE_INT) l1
3391 > (unsigned HOST_WIDE_INT) l2)))
3392 lv = l1, hv = h1;
3393 else
3394 lv = l2, hv = h2;
3395 break;
3396
3397 case LSHIFTRT: case ASHIFTRT:
3398 case ASHIFT: case LSHIFT:
3399 case ROTATE: case ROTATERT:
3400 #ifdef SHIFT_COUNT_TRUNCATED
3401 l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
3402 #endif
3403
3404 if (h2 != 0 || l2 < 0 || l2 >= GET_MODE_BITSIZE (mode))
3405 return 0;
3406
3407 if (code == LSHIFTRT || code == ASHIFTRT)
3408 rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3409 code == ASHIFTRT);
3410 else if (code == ASHIFT || code == LSHIFT)
3411 lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3412 code == ASHIFT);
3413 else if (code == ROTATE)
3414 lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3415 else /* code == ROTATERT */
3416 rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3417 break;
3418
3419 default:
3420 return 0;
3421 }
3422
3423 return immed_double_const (lv, hv, mode);
3424 }
3425
3426 if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
3427 || width > HOST_BITS_PER_WIDE_INT || width == 0)
3428 {
3429 /* Even if we can't compute a constant result,
3430 there are some cases worth simplifying. */
3431
3432 switch (code)
3433 {
3434 case PLUS:
3435 /* In IEEE floating point, x+0 is not the same as x. Similarly
3436 for the other optimizations below. */
3437 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3438 && GET_MODE_CLASS (mode) != MODE_INT)
3439 break;
3440
3441 if (op1 == CONST0_RTX (mode))
3442 return op0;
3443
3444 /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3445 if (GET_CODE (op0) == NEG)
3446 return cse_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
3447 else if (GET_CODE (op1) == NEG)
3448 return cse_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
3449
3450 /* Handle both-operands-constant cases. We can only add
3451 CONST_INTs to constants since the sum of relocatable symbols
3452 can't be handled by most assemblers. */
3453
3454 if (CONSTANT_P (op0) && GET_CODE (op1) == CONST_INT)
3455 return plus_constant (op0, INTVAL (op1));
3456 else if (CONSTANT_P (op1) && GET_CODE (op0) == CONST_INT)
3457 return plus_constant (op1, INTVAL (op0));
3458
3459 /* If one of the operands is a PLUS or a MINUS, see if we can
3460 simplify this by the associative law.
3461 Don't use the associative law for floating point.
3462 The inaccuracy makes it nonassociative,
3463 and subtle programs can break if operations are associated. */
3464
3465 if ((GET_MODE_CLASS (mode) == MODE_INT
3466 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
3467 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3468 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3469 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3470 return tem;
3471 break;
3472
3473 case COMPARE:
3474 #ifdef HAVE_cc0
3475 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3476 using cc0, in which case we want to leave it as a COMPARE
3477 so we can distinguish it from a register-register-copy.
3478
3479 In IEEE floating point, x-0 is not the same as x. */
3480
3481 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3482 || GET_MODE_CLASS (mode) == MODE_INT)
3483 && op1 == CONST0_RTX (mode))
3484 return op0;
3485 #else
3486 /* Do nothing here. */
3487 #endif
3488 break;
3489
3490 case MINUS:
3491 /* None of these optimizations can be done for IEEE
3492 floating point. */
3493 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3494 && GET_MODE_CLASS (mode) != MODE_INT
3495 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
3496 break;
3497
3498 /* We can't assume x-x is 0 even with non-IEEE floating point. */
3499 if (rtx_equal_p (op0, op1)
3500 && ! side_effects_p (op0)
3501 && GET_MODE_CLASS (mode) != MODE_FLOAT
3502 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
3503 return const0_rtx;
3504
3505 /* Change subtraction from zero into negation. */
3506 if (op0 == CONST0_RTX (mode))
3507 return gen_rtx (NEG, mode, op1);
3508
3509 /* (-1 - a) is ~a. */
3510 if (op0 == constm1_rtx)
3511 return gen_rtx (NOT, mode, op1);
3512
3513 /* Subtracting 0 has no effect. */
3514 if (op1 == CONST0_RTX (mode))
3515 return op0;
3516
3517 /* (a - (-b)) -> (a + b). */
3518 if (GET_CODE (op1) == NEG)
3519 return cse_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
3520
3521 /* If one of the operands is a PLUS or a MINUS, see if we can
3522 simplify this by the associative law.
3523 Don't use the associative law for floating point.
3524 The inaccuracy makes it nonassociative,
3525 and subtle programs can break if operations are associated. */
3526
3527 if ((GET_MODE_CLASS (mode) == MODE_INT
3528 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
3529 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3530 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3531 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3532 return tem;
3533
3534 /* Don't let a relocatable value get a negative coeff. */
3535 if (GET_CODE (op1) == CONST_INT)
3536 return plus_constant (op0, - INTVAL (op1));
3537 break;
3538
3539 case MULT:
3540 if (op1 == constm1_rtx)
3541 {
3542 tem = simplify_unary_operation (NEG, mode, op0, mode);
3543
3544 return tem ? tem : gen_rtx (NEG, mode, op0);
3545 }
3546
3547 /* In IEEE floating point, x*0 is not always 0. */
3548 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3549 || GET_MODE_CLASS (mode) == MODE_INT)
3550 && op1 == CONST0_RTX (mode)
3551 && ! side_effects_p (op0))
3552 return op1;
3553
3554 /* In IEEE floating point, x*1 is not equivalent to x for nans.
3555 However, ANSI says we can drop signals,
3556 so we can do this anyway. */
3557 if (op1 == CONST1_RTX (mode))
3558 return op0;
3559
3560 /* Convert multiply by constant power of two into shift. */
3561 if (GET_CODE (op1) == CONST_INT
3562 && (val = exact_log2 (INTVAL (op1))) >= 0)
3563 return gen_rtx (ASHIFT, mode, op0, GEN_INT (val));
3564
3565 if (GET_CODE (op1) == CONST_DOUBLE
3566 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT)
3567 {
3568 REAL_VALUE_TYPE d;
3569 jmp_buf handler;
3570 int op1is2, op1ism1;
3571
3572 if (setjmp (handler))
3573 return 0;
3574
3575 set_float_handler (handler);
3576 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3577 op1is2 = REAL_VALUES_EQUAL (d, dconst2);
3578 op1ism1 = REAL_VALUES_EQUAL (d, dconstm1);
3579 set_float_handler (NULL_PTR);
3580
3581 /* x*2 is x+x and x*(-1) is -x */
3582 if (op1is2 && GET_MODE (op0) == mode)
3583 return gen_rtx (PLUS, mode, op0, copy_rtx (op0));
3584
3585 else if (op1ism1 && GET_MODE (op0) == mode)
3586 return gen_rtx (NEG, mode, op0);
3587 }
3588 break;
3589
3590 case IOR:
3591 if (op1 == const0_rtx)
3592 return op0;
3593 if (GET_CODE (op1) == CONST_INT
3594 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3595 return op1;
3596 if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3597 return op0;
3598 /* A | (~A) -> -1 */
3599 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3600 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3601 && ! side_effects_p (op0))
3602 return constm1_rtx;
3603 break;
3604
3605 case XOR:
3606 if (op1 == const0_rtx)
3607 return op0;
3608 if (GET_CODE (op1) == CONST_INT
3609 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3610 return gen_rtx (NOT, mode, op0);
3611 if (op0 == op1 && ! side_effects_p (op0))
3612 return const0_rtx;
3613 break;
3614
3615 case AND:
3616 if (op1 == const0_rtx && ! side_effects_p (op0))
3617 return const0_rtx;
3618 if (GET_CODE (op1) == CONST_INT
3619 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3620 return op0;
3621 if (op0 == op1 && ! side_effects_p (op0))
3622 return op0;
3623 /* A & (~A) -> 0 */
3624 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3625 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3626 && ! side_effects_p (op0))
3627 return const0_rtx;
3628 break;
3629
3630 case UDIV:
3631 /* Convert divide by power of two into shift (divide by 1 handled
3632 below). */
3633 if (GET_CODE (op1) == CONST_INT
3634 && (arg1 = exact_log2 (INTVAL (op1))) > 0)
3635 return gen_rtx (LSHIFTRT, mode, op0, GEN_INT (arg1));
3636
3637 /* ... fall through ... */
3638
3639 case DIV:
3640 if (op1 == CONST1_RTX (mode))
3641 return op0;
3642
3643 /* In IEEE floating point, 0/x is not always 0. */
3644 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3645 || GET_MODE_CLASS (mode) == MODE_INT)
3646 && op0 == CONST0_RTX (mode)
3647 && ! side_effects_p (op1))
3648 return op0;
3649
3650 #if 0 /* Turned off till an expert says this is a safe thing to do. */
3651 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3652 /* Change division by a constant into multiplication. */
3653 else if (GET_CODE (op1) == CONST_DOUBLE
3654 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT
3655 && op1 != CONST0_RTX (mode))
3656 {
3657 REAL_VALUE_TYPE d;
3658 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3659 if (REAL_VALUES_EQUAL (d, dconst0))
3660 abort();
3661 #if defined (REAL_ARITHMETIC)
3662 REAL_ARITHMETIC (d, (int) RDIV_EXPR, dconst1, d);
3663 return gen_rtx (MULT, mode, op0,
3664 CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
3665 #else
3666 return gen_rtx (MULT, mode, op0,
3667 CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
3668 }
3669 #endif
3670 #endif
3671 #endif
3672 break;
3673
3674 case UMOD:
3675 /* Handle modulus by power of two (mod with 1 handled below). */
3676 if (GET_CODE (op1) == CONST_INT
3677 && exact_log2 (INTVAL (op1)) > 0)
3678 return gen_rtx (AND, mode, op0, GEN_INT (INTVAL (op1) - 1));
3679
3680 /* ... fall through ... */
3681
3682 case MOD:
3683 if ((op0 == const0_rtx || op1 == const1_rtx)
3684 && ! side_effects_p (op0) && ! side_effects_p (op1))
3685 return const0_rtx;
3686 break;
3687
3688 case ROTATERT:
3689 case ROTATE:
3690 /* Rotating ~0 always results in ~0. */
3691 if (GET_CODE (op0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT
3692 && INTVAL (op0) == GET_MODE_MASK (mode)
3693 && ! side_effects_p (op1))
3694 return op0;
3695
3696 /* ... fall through ... */
3697
3698 case LSHIFT:
3699 case ASHIFT:
3700 case ASHIFTRT:
3701 case LSHIFTRT:
3702 if (op1 == const0_rtx)
3703 return op0;
3704 if (op0 == const0_rtx && ! side_effects_p (op1))
3705 return op0;
3706 break;
3707
3708 case SMIN:
3709 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
3710 && INTVAL (op1) == (HOST_WIDE_INT) 1 << (width -1)
3711 && ! side_effects_p (op0))
3712 return op1;
3713 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3714 return op0;
3715 break;
3716
3717 case SMAX:
3718 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
3719 && INTVAL (op1) == (unsigned) GET_MODE_MASK (mode) >> 1
3720 && ! side_effects_p (op0))
3721 return op1;
3722 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3723 return op0;
3724 break;
3725
3726 case UMIN:
3727 if (op1 == const0_rtx && ! side_effects_p (op0))
3728 return op1;
3729 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3730 return op0;
3731 break;
3732
3733 case UMAX:
3734 if (op1 == constm1_rtx && ! side_effects_p (op0))
3735 return op1;
3736 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3737 return op0;
3738 break;
3739
3740 default:
3741 abort ();
3742 }
3743
3744 return 0;
3745 }
3746
3747 /* Get the integer argument values in two forms:
3748 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
3749
3750 arg0 = INTVAL (op0);
3751 arg1 = INTVAL (op1);
3752
3753 if (width < HOST_BITS_PER_WIDE_INT)
3754 {
3755 arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
3756 arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
3757
3758 arg0s = arg0;
3759 if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
3760 arg0s |= ((HOST_WIDE_INT) (-1) << width);
3761
3762 arg1s = arg1;
3763 if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
3764 arg1s |= ((HOST_WIDE_INT) (-1) << width);
3765 }
3766 else
3767 {
3768 arg0s = arg0;
3769 arg1s = arg1;
3770 }
3771
3772 /* Compute the value of the arithmetic. */
3773
3774 switch (code)
3775 {
3776 case PLUS:
3777 val = arg0s + arg1s;
3778 break;
3779
3780 case MINUS:
3781 val = arg0s - arg1s;
3782 break;
3783
3784 case MULT:
3785 val = arg0s * arg1s;
3786 break;
3787
3788 case DIV:
3789 if (arg1s == 0)
3790 return 0;
3791 val = arg0s / arg1s;
3792 break;
3793
3794 case MOD:
3795 if (arg1s == 0)
3796 return 0;
3797 val = arg0s % arg1s;
3798 break;
3799
3800 case UDIV:
3801 if (arg1 == 0)
3802 return 0;
3803 val = (unsigned HOST_WIDE_INT) arg0 / arg1;
3804 break;
3805
3806 case UMOD:
3807 if (arg1 == 0)
3808 return 0;
3809 val = (unsigned HOST_WIDE_INT) arg0 % arg1;
3810 break;
3811
3812 case AND:
3813 val = arg0 & arg1;
3814 break;
3815
3816 case IOR:
3817 val = arg0 | arg1;
3818 break;
3819
3820 case XOR:
3821 val = arg0 ^ arg1;
3822 break;
3823
3824 case LSHIFTRT:
3825 /* If shift count is undefined, don't fold it; let the machine do
3826 what it wants. But truncate it if the machine will do that. */
3827 if (arg1 < 0)
3828 return 0;
3829
3830 #ifdef SHIFT_COUNT_TRUNCATED
3831 arg1 &= (BITS_PER_WORD - 1);
3832 #endif
3833
3834 if (arg1 >= width)
3835 return 0;
3836
3837 val = ((unsigned HOST_WIDE_INT) arg0) >> arg1;
3838 break;
3839
3840 case ASHIFT:
3841 case LSHIFT:
3842 if (arg1 < 0)
3843 return 0;
3844
3845 #ifdef SHIFT_COUNT_TRUNCATED
3846 arg1 &= (BITS_PER_WORD - 1);
3847 #endif
3848
3849 if (arg1 >= width)
3850 return 0;
3851
3852 val = ((unsigned HOST_WIDE_INT) arg0) << arg1;
3853 break;
3854
3855 case ASHIFTRT:
3856 if (arg1 < 0)
3857 return 0;
3858
3859 #ifdef SHIFT_COUNT_TRUNCATED
3860 arg1 &= (BITS_PER_WORD - 1);
3861 #endif
3862
3863 if (arg1 >= width)
3864 return 0;
3865
3866 val = arg0s >> arg1;
3867
3868 /* Bootstrap compiler may not have sign extended the right shift.
3869 Manually extend the sign to insure bootstrap cc matches gcc. */
3870 if (arg0s < 0 && arg1 > 0)
3871 val |= ((HOST_WIDE_INT) -1) << (HOST_BITS_PER_WIDE_INT - arg1);
3872
3873 break;
3874
3875 case ROTATERT:
3876 if (arg1 < 0)
3877 return 0;
3878
3879 arg1 %= width;
3880 val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
3881 | (((unsigned HOST_WIDE_INT) arg0) >> arg1));
3882 break;
3883
3884 case ROTATE:
3885 if (arg1 < 0)
3886 return 0;
3887
3888 arg1 %= width;
3889 val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
3890 | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
3891 break;
3892
3893 case COMPARE:
3894 /* Do nothing here. */
3895 return 0;
3896
3897 case SMIN:
3898 val = arg0s <= arg1s ? arg0s : arg1s;
3899 break;
3900
3901 case UMIN:
3902 val = ((unsigned HOST_WIDE_INT) arg0
3903 <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
3904 break;
3905
3906 case SMAX:
3907 val = arg0s > arg1s ? arg0s : arg1s;
3908 break;
3909
3910 case UMAX:
3911 val = ((unsigned HOST_WIDE_INT) arg0
3912 > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
3913 break;
3914
3915 default:
3916 abort ();
3917 }
3918
3919 /* Clear the bits that don't belong in our mode, unless they and our sign
3920 bit are all one. So we get either a reasonable negative value or a
3921 reasonable unsigned value for this mode. */
3922 if (width < HOST_BITS_PER_WIDE_INT
3923 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3924 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3925 val &= ((HOST_WIDE_INT) 1 << width) - 1;
3926
3927 return GEN_INT (val);
3928 }
3929 \f
3930 /* Simplify a PLUS or MINUS, at least one of whose operands may be another
3931 PLUS or MINUS.
3932
3933 Rather than test for specific case, we do this by a brute-force method
3934 and do all possible simplifications until no more changes occur. Then
3935 we rebuild the operation. */
3936
3937 static rtx
3938 simplify_plus_minus (code, mode, op0, op1)
3939 enum rtx_code code;
3940 enum machine_mode mode;
3941 rtx op0, op1;
3942 {
3943 rtx ops[8];
3944 int negs[8];
3945 rtx result, tem;
3946 int n_ops = 2, input_ops = 2, input_consts = 0, n_consts = 0;
3947 int first = 1, negate = 0, changed;
3948 int i, j;
3949
3950 bzero (ops, sizeof ops);
3951
3952 /* Set up the two operands and then expand them until nothing has been
3953 changed. If we run out of room in our array, give up; this should
3954 almost never happen. */
3955
3956 ops[0] = op0, ops[1] = op1, negs[0] = 0, negs[1] = (code == MINUS);
3957
3958 changed = 1;
3959 while (changed)
3960 {
3961 changed = 0;
3962
3963 for (i = 0; i < n_ops; i++)
3964 switch (GET_CODE (ops[i]))
3965 {
3966 case PLUS:
3967 case MINUS:
3968 if (n_ops == 7)
3969 return 0;
3970
3971 ops[n_ops] = XEXP (ops[i], 1);
3972 negs[n_ops++] = GET_CODE (ops[i]) == MINUS ? !negs[i] : negs[i];
3973 ops[i] = XEXP (ops[i], 0);
3974 input_ops++;
3975 changed = 1;
3976 break;
3977
3978 case NEG:
3979 ops[i] = XEXP (ops[i], 0);
3980 negs[i] = ! negs[i];
3981 changed = 1;
3982 break;
3983
3984 case CONST:
3985 ops[i] = XEXP (ops[i], 0);
3986 input_consts++;
3987 changed = 1;
3988 break;
3989
3990 case NOT:
3991 /* ~a -> (-a - 1) */
3992 if (n_ops != 7)
3993 {
3994 ops[n_ops] = constm1_rtx;
3995 negs[n_ops++] = negs[i];
3996 ops[i] = XEXP (ops[i], 0);
3997 negs[i] = ! negs[i];
3998 changed = 1;
3999 }
4000 break;
4001
4002 case CONST_INT:
4003 if (negs[i])
4004 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0, changed = 1;
4005 break;
4006 }
4007 }
4008
4009 /* If we only have two operands, we can't do anything. */
4010 if (n_ops <= 2)
4011 return 0;
4012
4013 /* Now simplify each pair of operands until nothing changes. The first
4014 time through just simplify constants against each other. */
4015
4016 changed = 1;
4017 while (changed)
4018 {
4019 changed = first;
4020
4021 for (i = 0; i < n_ops - 1; i++)
4022 for (j = i + 1; j < n_ops; j++)
4023 if (ops[i] != 0 && ops[j] != 0
4024 && (! first || (CONSTANT_P (ops[i]) && CONSTANT_P (ops[j]))))
4025 {
4026 rtx lhs = ops[i], rhs = ops[j];
4027 enum rtx_code ncode = PLUS;
4028
4029 if (negs[i] && ! negs[j])
4030 lhs = ops[j], rhs = ops[i], ncode = MINUS;
4031 else if (! negs[i] && negs[j])
4032 ncode = MINUS;
4033
4034 tem = simplify_binary_operation (ncode, mode, lhs, rhs);
4035 if (tem)
4036 {
4037 ops[i] = tem, ops[j] = 0;
4038 negs[i] = negs[i] && negs[j];
4039 if (GET_CODE (tem) == NEG)
4040 ops[i] = XEXP (tem, 0), negs[i] = ! negs[i];
4041
4042 if (GET_CODE (ops[i]) == CONST_INT && negs[i])
4043 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0;
4044 changed = 1;
4045 }
4046 }
4047
4048 first = 0;
4049 }
4050
4051 /* Pack all the operands to the lower-numbered entries and give up if
4052 we didn't reduce the number of operands we had. Make sure we
4053 count a CONST as two operands. If we have the same number of
4054 operands, but have made more CONSTs than we had, this is also
4055 an improvement, so accept it. */
4056
4057 for (i = 0, j = 0; j < n_ops; j++)
4058 if (ops[j] != 0)
4059 {
4060 ops[i] = ops[j], negs[i++] = negs[j];
4061 if (GET_CODE (ops[j]) == CONST)
4062 n_consts++;
4063 }
4064
4065 if (i + n_consts > input_ops
4066 || (i + n_consts == input_ops && n_consts <= input_consts))
4067 return 0;
4068
4069 n_ops = i;
4070
4071 /* If we have a CONST_INT, put it last. */
4072 for (i = 0; i < n_ops - 1; i++)
4073 if (GET_CODE (ops[i]) == CONST_INT)
4074 {
4075 tem = ops[n_ops - 1], ops[n_ops - 1] = ops[i] , ops[i] = tem;
4076 j = negs[n_ops - 1], negs[n_ops - 1] = negs[i], negs[i] = j;
4077 }
4078
4079 /* Put a non-negated operand first. If there aren't any, make all
4080 operands positive and negate the whole thing later. */
4081 for (i = 0; i < n_ops && negs[i]; i++)
4082 ;
4083
4084 if (i == n_ops)
4085 {
4086 for (i = 0; i < n_ops; i++)
4087 negs[i] = 0;
4088 negate = 1;
4089 }
4090 else if (i != 0)
4091 {
4092 tem = ops[0], ops[0] = ops[i], ops[i] = tem;
4093 j = negs[0], negs[0] = negs[i], negs[i] = j;
4094 }
4095
4096 /* Now make the result by performing the requested operations. */
4097 result = ops[0];
4098 for (i = 1; i < n_ops; i++)
4099 result = cse_gen_binary (negs[i] ? MINUS : PLUS, mode, result, ops[i]);
4100
4101 return negate ? gen_rtx (NEG, mode, result) : result;
4102 }
4103 \f
4104 /* Make a binary operation by properly ordering the operands and
4105 seeing if the expression folds. */
4106
4107 static rtx
4108 cse_gen_binary (code, mode, op0, op1)
4109 enum rtx_code code;
4110 enum machine_mode mode;
4111 rtx op0, op1;
4112 {
4113 rtx tem;
4114
4115 /* Put complex operands first and constants second if commutative. */
4116 if (GET_RTX_CLASS (code) == 'c'
4117 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
4118 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
4119 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
4120 || (GET_CODE (op0) == SUBREG
4121 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
4122 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
4123 tem = op0, op0 = op1, op1 = tem;
4124
4125 /* If this simplifies, do it. */
4126 tem = simplify_binary_operation (code, mode, op0, op1);
4127
4128 if (tem)
4129 return tem;
4130
4131 /* Handle addition and subtraction of CONST_INT specially. Otherwise,
4132 just form the operation. */
4133
4134 if (code == PLUS && GET_CODE (op1) == CONST_INT
4135 && GET_MODE (op0) != VOIDmode)
4136 return plus_constant (op0, INTVAL (op1));
4137 else if (code == MINUS && GET_CODE (op1) == CONST_INT
4138 && GET_MODE (op0) != VOIDmode)
4139 return plus_constant (op0, - INTVAL (op1));
4140 else
4141 return gen_rtx (code, mode, op0, op1);
4142 }
4143 \f
4144 /* Like simplify_binary_operation except used for relational operators.
4145 MODE is the mode of the operands, not that of the result. */
4146
4147 rtx
4148 simplify_relational_operation (code, mode, op0, op1)
4149 enum rtx_code code;
4150 enum machine_mode mode;
4151 rtx op0, op1;
4152 {
4153 register HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
4154 HOST_WIDE_INT val;
4155 int width = GET_MODE_BITSIZE (mode);
4156
4157 /* If op0 is a compare, extract the comparison arguments from it. */
4158 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
4159 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4160
4161 /* Unlike the arithmetic operations, we can do the comparison whether
4162 or not WIDTH is larger than HOST_BITS_PER_WIDE_INT because the
4163 CONST_INTs are to be understood as being infinite precision as
4164 is the comparison. So there is no question of overflow. */
4165
4166 if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT || width == 0)
4167 {
4168 /* Even if we can't compute a constant result,
4169 there are some cases worth simplifying. */
4170
4171 /* For non-IEEE floating-point, if the two operands are equal, we know
4172 the result. */
4173 if (rtx_equal_p (op0, op1)
4174 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4175 || GET_MODE_CLASS (GET_MODE (op0)) != MODE_FLOAT))
4176 return (code == EQ || code == GE || code == LE || code == LEU
4177 || code == GEU) ? const_true_rtx : const0_rtx;
4178
4179 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4180 else if (GET_CODE (op0) == CONST_DOUBLE
4181 && GET_CODE (op1) == CONST_DOUBLE
4182 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
4183 {
4184 REAL_VALUE_TYPE d0, d1;
4185 jmp_buf handler;
4186 int op0lt, op1lt, equal;
4187
4188 if (setjmp (handler))
4189 return 0;
4190
4191 set_float_handler (handler);
4192 REAL_VALUE_FROM_CONST_DOUBLE (d0, op0);
4193 REAL_VALUE_FROM_CONST_DOUBLE (d1, op1);
4194 equal = REAL_VALUES_EQUAL (d0, d1);
4195 op0lt = REAL_VALUES_LESS (d0, d1);
4196 op1lt = REAL_VALUES_LESS (d1, d0);
4197 set_float_handler (NULL_PTR);
4198
4199 switch (code)
4200 {
4201 case EQ:
4202 return equal ? const_true_rtx : const0_rtx;
4203 case NE:
4204 return !equal ? const_true_rtx : const0_rtx;
4205 case LE:
4206 return equal || op0lt ? const_true_rtx : const0_rtx;
4207 case LT:
4208 return op0lt ? const_true_rtx : const0_rtx;
4209 case GE:
4210 return equal || op1lt ? const_true_rtx : const0_rtx;
4211 case GT:
4212 return op1lt ? const_true_rtx : const0_rtx;
4213 }
4214 }
4215 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4216
4217 else if (GET_MODE_CLASS (mode) == MODE_INT
4218 && width > HOST_BITS_PER_WIDE_INT
4219 && (GET_CODE (op0) == CONST_DOUBLE
4220 || GET_CODE (op0) == CONST_INT)
4221 && (GET_CODE (op1) == CONST_DOUBLE
4222 || GET_CODE (op1) == CONST_INT))
4223 {
4224 HOST_WIDE_INT h0, l0, h1, l1;
4225 unsigned HOST_WIDE_INT uh0, ul0, uh1, ul1;
4226 int op0lt, op0ltu, equal;
4227
4228 if (GET_CODE (op0) == CONST_DOUBLE)
4229 l0 = CONST_DOUBLE_LOW (op0), h0 = CONST_DOUBLE_HIGH (op0);
4230 else
4231 l0 = INTVAL (op0), h0 = l0 < 0 ? -1 : 0;
4232
4233 if (GET_CODE (op1) == CONST_DOUBLE)
4234 l1 = CONST_DOUBLE_LOW (op1), h1 = CONST_DOUBLE_HIGH (op1);
4235 else
4236 l1 = INTVAL (op1), h1 = l1 < 0 ? -1 : 0;
4237
4238 uh0 = h0, ul0 = l0, uh1 = h1, ul1 = l1;
4239
4240 equal = (h0 == h1 && l0 == l1);
4241 op0lt = (h0 < h1 || (h0 == h1 && l0 < l1));
4242 op0ltu = (uh0 < uh1 || (uh0 == uh1 && ul0 < ul1));
4243
4244 switch (code)
4245 {
4246 case EQ:
4247 return equal ? const_true_rtx : const0_rtx;
4248 case NE:
4249 return !equal ? const_true_rtx : const0_rtx;
4250 case LE:
4251 return equal || op0lt ? const_true_rtx : const0_rtx;
4252 case LT:
4253 return op0lt ? const_true_rtx : const0_rtx;
4254 case GE:
4255 return !op0lt ? const_true_rtx : const0_rtx;
4256 case GT:
4257 return !equal && !op0lt ? const_true_rtx : const0_rtx;
4258 case LEU:
4259 return equal || op0ltu ? const_true_rtx : const0_rtx;
4260 case LTU:
4261 return op0ltu ? const_true_rtx : const0_rtx;
4262 case GEU:
4263 return !op0ltu ? const_true_rtx : const0_rtx;
4264 case GTU:
4265 return !equal && !op0ltu ? const_true_rtx : const0_rtx;
4266 }
4267 }
4268
4269 switch (code)
4270 {
4271 case EQ:
4272 {
4273 #if 0
4274 /* We can't make this assumption due to #pragma weak */
4275 if (CONSTANT_P (op0) && op1 == const0_rtx)
4276 return const0_rtx;
4277 #endif
4278 if (NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx
4279 /* On some machines, the ap reg can be 0 sometimes. */
4280 && op0 != arg_pointer_rtx)
4281 return const0_rtx;
4282 break;
4283 }
4284
4285 case NE:
4286 #if 0
4287 /* We can't make this assumption due to #pragma weak */
4288 if (CONSTANT_P (op0) && op1 == const0_rtx)
4289 return const_true_rtx;
4290 #endif
4291 if (NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx
4292 /* On some machines, the ap reg can be 0 sometimes. */
4293 && op0 != arg_pointer_rtx)
4294 return const_true_rtx;
4295 break;
4296
4297 case GEU:
4298 /* Unsigned values are never negative, but we must be sure we are
4299 actually comparing a value, not a CC operand. */
4300 if (op1 == const0_rtx
4301 && GET_MODE_CLASS (mode) == MODE_INT)
4302 return const_true_rtx;
4303 break;
4304
4305 case LTU:
4306 if (op1 == const0_rtx
4307 && GET_MODE_CLASS (mode) == MODE_INT)
4308 return const0_rtx;
4309 break;
4310
4311 case LEU:
4312 /* Unsigned values are never greater than the largest
4313 unsigned value. */
4314 if (GET_CODE (op1) == CONST_INT
4315 && INTVAL (op1) == GET_MODE_MASK (mode)
4316 && GET_MODE_CLASS (mode) == MODE_INT)
4317 return const_true_rtx;
4318 break;
4319
4320 case GTU:
4321 if (GET_CODE (op1) == CONST_INT
4322 && INTVAL (op1) == GET_MODE_MASK (mode)
4323 && GET_MODE_CLASS (mode) == MODE_INT)
4324 return const0_rtx;
4325 break;
4326 }
4327
4328 return 0;
4329 }
4330
4331 /* Get the integer argument values in two forms:
4332 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
4333
4334 arg0 = INTVAL (op0);
4335 arg1 = INTVAL (op1);
4336
4337 if (width < HOST_BITS_PER_WIDE_INT)
4338 {
4339 arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
4340 arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
4341
4342 arg0s = arg0;
4343 if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4344 arg0s |= ((HOST_WIDE_INT) (-1) << width);
4345
4346 arg1s = arg1;
4347 if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4348 arg1s |= ((HOST_WIDE_INT) (-1) << width);
4349 }
4350 else
4351 {
4352 arg0s = arg0;
4353 arg1s = arg1;
4354 }
4355
4356 /* Compute the value of the arithmetic. */
4357
4358 switch (code)
4359 {
4360 case NE:
4361 val = arg0 != arg1 ? STORE_FLAG_VALUE : 0;
4362 break;
4363
4364 case EQ:
4365 val = arg0 == arg1 ? STORE_FLAG_VALUE : 0;
4366 break;
4367
4368 case LE:
4369 val = arg0s <= arg1s ? STORE_FLAG_VALUE : 0;
4370 break;
4371
4372 case LT:
4373 val = arg0s < arg1s ? STORE_FLAG_VALUE : 0;
4374 break;
4375
4376 case GE:
4377 val = arg0s >= arg1s ? STORE_FLAG_VALUE : 0;
4378 break;
4379
4380 case GT:
4381 val = arg0s > arg1s ? STORE_FLAG_VALUE : 0;
4382 break;
4383
4384 case LEU:
4385 val = (((unsigned HOST_WIDE_INT) arg0)
4386 <= ((unsigned HOST_WIDE_INT) arg1) ? STORE_FLAG_VALUE : 0);
4387 break;
4388
4389 case LTU:
4390 val = (((unsigned HOST_WIDE_INT) arg0)
4391 < ((unsigned HOST_WIDE_INT) arg1) ? STORE_FLAG_VALUE : 0);
4392 break;
4393
4394 case GEU:
4395 val = (((unsigned HOST_WIDE_INT) arg0)
4396 >= ((unsigned HOST_WIDE_INT) arg1) ? STORE_FLAG_VALUE : 0);
4397 break;
4398
4399 case GTU:
4400 val = (((unsigned HOST_WIDE_INT) arg0)
4401 > ((unsigned HOST_WIDE_INT) arg1) ? STORE_FLAG_VALUE : 0);
4402 break;
4403
4404 default:
4405 abort ();
4406 }
4407
4408 /* Clear the bits that don't belong in our mode, unless they and our sign
4409 bit are all one. So we get either a reasonable negative value or a
4410 reasonable unsigned value for this mode. */
4411 if (width < HOST_BITS_PER_WIDE_INT
4412 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4413 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4414 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4415
4416 return GEN_INT (val);
4417 }
4418 \f
4419 /* Simplify CODE, an operation with result mode MODE and three operands,
4420 OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became
4421 a constant. Return 0 if no simplifications is possible. */
4422
4423 rtx
4424 simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2)
4425 enum rtx_code code;
4426 enum machine_mode mode, op0_mode;
4427 rtx op0, op1, op2;
4428 {
4429 int width = GET_MODE_BITSIZE (mode);
4430
4431 /* VOIDmode means "infinite" precision. */
4432 if (width == 0)
4433 width = HOST_BITS_PER_WIDE_INT;
4434
4435 switch (code)
4436 {
4437 case SIGN_EXTRACT:
4438 case ZERO_EXTRACT:
4439 if (GET_CODE (op0) == CONST_INT
4440 && GET_CODE (op1) == CONST_INT
4441 && GET_CODE (op2) == CONST_INT
4442 && INTVAL (op1) + INTVAL (op2) <= GET_MODE_BITSIZE (op0_mode)
4443 && width <= HOST_BITS_PER_WIDE_INT)
4444 {
4445 /* Extracting a bit-field from a constant */
4446 HOST_WIDE_INT val = INTVAL (op0);
4447
4448 #if BITS_BIG_ENDIAN
4449 val >>= (GET_MODE_BITSIZE (op0_mode) - INTVAL (op2) - INTVAL (op1));
4450 #else
4451 val >>= INTVAL (op2);
4452 #endif
4453 if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
4454 {
4455 /* First zero-extend. */
4456 val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
4457 /* If desired, propagate sign bit. */
4458 if (code == SIGN_EXTRACT
4459 && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))))
4460 val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
4461 }
4462
4463 /* Clear the bits that don't belong in our mode,
4464 unless they and our sign bit are all one.
4465 So we get either a reasonable negative value or a reasonable
4466 unsigned value for this mode. */
4467 if (width < HOST_BITS_PER_WIDE_INT
4468 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4469 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4470 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4471
4472 return GEN_INT (val);
4473 }
4474 break;
4475
4476 case IF_THEN_ELSE:
4477 if (GET_CODE (op0) == CONST_INT)
4478 return op0 != const0_rtx ? op1 : op2;
4479 break;
4480
4481 default:
4482 abort ();
4483 }
4484
4485 return 0;
4486 }
4487 \f
4488 /* If X is a nontrivial arithmetic operation on an argument
4489 for which a constant value can be determined, return
4490 the result of operating on that value, as a constant.
4491 Otherwise, return X, possibly with one or more operands
4492 modified by recursive calls to this function.
4493
4494 If X is a register whose contents are known, we do NOT
4495 return those contents here. equiv_constant is called to
4496 perform that task.
4497
4498 INSN is the insn that we may be modifying. If it is 0, make a copy
4499 of X before modifying it. */
4500
4501 static rtx
4502 fold_rtx (x, insn)
4503 rtx x;
4504 rtx insn;
4505 {
4506 register enum rtx_code code;
4507 register enum machine_mode mode;
4508 register char *fmt;
4509 register int i;
4510 rtx new = 0;
4511 int copied = 0;
4512 int must_swap = 0;
4513
4514 /* Folded equivalents of first two operands of X. */
4515 rtx folded_arg0;
4516 rtx folded_arg1;
4517
4518 /* Constant equivalents of first three operands of X;
4519 0 when no such equivalent is known. */
4520 rtx const_arg0;
4521 rtx const_arg1;
4522 rtx const_arg2;
4523
4524 /* The mode of the first operand of X. We need this for sign and zero
4525 extends. */
4526 enum machine_mode mode_arg0;
4527
4528 if (x == 0)
4529 return x;
4530
4531 mode = GET_MODE (x);
4532 code = GET_CODE (x);
4533 switch (code)
4534 {
4535 case CONST:
4536 case CONST_INT:
4537 case CONST_DOUBLE:
4538 case SYMBOL_REF:
4539 case LABEL_REF:
4540 case REG:
4541 /* No use simplifying an EXPR_LIST
4542 since they are used only for lists of args
4543 in a function call's REG_EQUAL note. */
4544 case EXPR_LIST:
4545 return x;
4546
4547 #ifdef HAVE_cc0
4548 case CC0:
4549 return prev_insn_cc0;
4550 #endif
4551
4552 case PC:
4553 /* If the next insn is a CODE_LABEL followed by a jump table,
4554 PC's value is a LABEL_REF pointing to that label. That
4555 lets us fold switch statements on the Vax. */
4556 if (insn && GET_CODE (insn) == JUMP_INSN)
4557 {
4558 rtx next = next_nonnote_insn (insn);
4559
4560 if (next && GET_CODE (next) == CODE_LABEL
4561 && NEXT_INSN (next) != 0
4562 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
4563 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
4564 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
4565 return gen_rtx (LABEL_REF, Pmode, next);
4566 }
4567 break;
4568
4569 case SUBREG:
4570 /* See if we previously assigned a constant value to this SUBREG. */
4571 if ((new = lookup_as_function (x, CONST_INT)) != 0
4572 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
4573 return new;
4574
4575 /* If this is a paradoxical SUBREG, we have no idea what value the
4576 extra bits would have. However, if the operand is equivalent
4577 to a SUBREG whose operand is the same as our mode, and all the
4578 modes are within a word, we can just use the inner operand
4579 because these SUBREGs just say how to treat the register. */
4580
4581 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4582 {
4583 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
4584 struct table_elt *elt;
4585
4586 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
4587 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
4588 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
4589 imode)) != 0)
4590 {
4591 for (elt = elt->first_same_value;
4592 elt; elt = elt->next_same_value)
4593 if (GET_CODE (elt->exp) == SUBREG
4594 && GET_MODE (SUBREG_REG (elt->exp)) == mode
4595 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4596 return copy_rtx (SUBREG_REG (elt->exp));
4597 }
4598
4599 return x;
4600 }
4601
4602 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
4603 We might be able to if the SUBREG is extracting a single word in an
4604 integral mode or extracting the low part. */
4605
4606 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
4607 const_arg0 = equiv_constant (folded_arg0);
4608 if (const_arg0)
4609 folded_arg0 = const_arg0;
4610
4611 if (folded_arg0 != SUBREG_REG (x))
4612 {
4613 new = 0;
4614
4615 if (GET_MODE_CLASS (mode) == MODE_INT
4616 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
4617 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
4618 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
4619 GET_MODE (SUBREG_REG (x)));
4620 if (new == 0 && subreg_lowpart_p (x))
4621 new = gen_lowpart_if_possible (mode, folded_arg0);
4622 if (new)
4623 return new;
4624 }
4625
4626 /* If this is a narrowing SUBREG and our operand is a REG, see if
4627 we can find an equivalence for REG that is an arithmetic operation
4628 in a wider mode where both operands are paradoxical SUBREGs
4629 from objects of our result mode. In that case, we couldn't report
4630 an equivalent value for that operation, since we don't know what the
4631 extra bits will be. But we can find an equivalence for this SUBREG
4632 by folding that operation is the narrow mode. This allows us to
4633 fold arithmetic in narrow modes when the machine only supports
4634 word-sized arithmetic.
4635
4636 Also look for a case where we have a SUBREG whose operand is the
4637 same as our result. If both modes are smaller than a word, we
4638 are simply interpreting a register in different modes and we
4639 can use the inner value. */
4640
4641 if (GET_CODE (folded_arg0) == REG
4642 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
4643 && subreg_lowpart_p (x))
4644 {
4645 struct table_elt *elt;
4646
4647 /* We can use HASH here since we know that canon_hash won't be
4648 called. */
4649 elt = lookup (folded_arg0,
4650 HASH (folded_arg0, GET_MODE (folded_arg0)),
4651 GET_MODE (folded_arg0));
4652
4653 if (elt)
4654 elt = elt->first_same_value;
4655
4656 for (; elt; elt = elt->next_same_value)
4657 {
4658 enum rtx_code eltcode = GET_CODE (elt->exp);
4659
4660 /* Just check for unary and binary operations. */
4661 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
4662 && GET_CODE (elt->exp) != SIGN_EXTEND
4663 && GET_CODE (elt->exp) != ZERO_EXTEND
4664 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4665 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
4666 {
4667 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
4668
4669 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
4670 op0 = fold_rtx (op0, NULL_RTX);
4671
4672 op0 = equiv_constant (op0);
4673 if (op0)
4674 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
4675 op0, mode);
4676 }
4677 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
4678 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
4679 && eltcode != DIV && eltcode != MOD
4680 && eltcode != UDIV && eltcode != UMOD
4681 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
4682 && eltcode != ROTATE && eltcode != ROTATERT
4683 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4684 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
4685 == mode))
4686 || CONSTANT_P (XEXP (elt->exp, 0)))
4687 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
4688 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
4689 == mode))
4690 || CONSTANT_P (XEXP (elt->exp, 1))))
4691 {
4692 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
4693 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
4694
4695 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
4696 op0 = fold_rtx (op0, NULL_RTX);
4697
4698 if (op0)
4699 op0 = equiv_constant (op0);
4700
4701 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
4702 op1 = fold_rtx (op1, NULL_RTX);
4703
4704 if (op1)
4705 op1 = equiv_constant (op1);
4706
4707 if (op0 && op1)
4708 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
4709 op0, op1);
4710 }
4711
4712 else if (GET_CODE (elt->exp) == SUBREG
4713 && GET_MODE (SUBREG_REG (elt->exp)) == mode
4714 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
4715 <= UNITS_PER_WORD)
4716 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4717 new = copy_rtx (SUBREG_REG (elt->exp));
4718
4719 if (new)
4720 return new;
4721 }
4722 }
4723
4724 return x;
4725
4726 case NOT:
4727 case NEG:
4728 /* If we have (NOT Y), see if Y is known to be (NOT Z).
4729 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
4730 new = lookup_as_function (XEXP (x, 0), code);
4731 if (new)
4732 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
4733 break;
4734
4735 case MEM:
4736 /* If we are not actually processing an insn, don't try to find the
4737 best address. Not only don't we care, but we could modify the
4738 MEM in an invalid way since we have no insn to validate against. */
4739 if (insn != 0)
4740 find_best_addr (insn, &XEXP (x, 0));
4741
4742 {
4743 /* Even if we don't fold in the insn itself,
4744 we can safely do so here, in hopes of getting a constant. */
4745 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
4746 rtx base = 0;
4747 HOST_WIDE_INT offset = 0;
4748
4749 if (GET_CODE (addr) == REG
4750 && REGNO_QTY_VALID_P (REGNO (addr))
4751 && GET_MODE (addr) == qty_mode[reg_qty[REGNO (addr)]]
4752 && qty_const[reg_qty[REGNO (addr)]] != 0)
4753 addr = qty_const[reg_qty[REGNO (addr)]];
4754
4755 /* If address is constant, split it into a base and integer offset. */
4756 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
4757 base = addr;
4758 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
4759 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
4760 {
4761 base = XEXP (XEXP (addr, 0), 0);
4762 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
4763 }
4764 else if (GET_CODE (addr) == LO_SUM
4765 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
4766 base = XEXP (addr, 1);
4767
4768 /* If this is a constant pool reference, we can fold it into its
4769 constant to allow better value tracking. */
4770 if (base && GET_CODE (base) == SYMBOL_REF
4771 && CONSTANT_POOL_ADDRESS_P (base))
4772 {
4773 rtx constant = get_pool_constant (base);
4774 enum machine_mode const_mode = get_pool_mode (base);
4775 rtx new;
4776
4777 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
4778 constant_pool_entries_cost = COST (constant);
4779
4780 /* If we are loading the full constant, we have an equivalence. */
4781 if (offset == 0 && mode == const_mode)
4782 return constant;
4783
4784 /* If this actually isn't a constant (wierd!), we can't do
4785 anything. Otherwise, handle the two most common cases:
4786 extracting a word from a multi-word constant, and extracting
4787 the low-order bits. Other cases don't seem common enough to
4788 worry about. */
4789 if (! CONSTANT_P (constant))
4790 return x;
4791
4792 if (GET_MODE_CLASS (mode) == MODE_INT
4793 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
4794 && offset % UNITS_PER_WORD == 0
4795 && (new = operand_subword (constant,
4796 offset / UNITS_PER_WORD,
4797 0, const_mode)) != 0)
4798 return new;
4799
4800 if (((BYTES_BIG_ENDIAN
4801 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
4802 || (! BYTES_BIG_ENDIAN && offset == 0))
4803 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
4804 return new;
4805 }
4806
4807 /* If this is a reference to a label at a known position in a jump
4808 table, we also know its value. */
4809 if (base && GET_CODE (base) == LABEL_REF)
4810 {
4811 rtx label = XEXP (base, 0);
4812 rtx table_insn = NEXT_INSN (label);
4813
4814 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
4815 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
4816 {
4817 rtx table = PATTERN (table_insn);
4818
4819 if (offset >= 0
4820 && (offset / GET_MODE_SIZE (GET_MODE (table))
4821 < XVECLEN (table, 0)))
4822 return XVECEXP (table, 0,
4823 offset / GET_MODE_SIZE (GET_MODE (table)));
4824 }
4825 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
4826 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
4827 {
4828 rtx table = PATTERN (table_insn);
4829
4830 if (offset >= 0
4831 && (offset / GET_MODE_SIZE (GET_MODE (table))
4832 < XVECLEN (table, 1)))
4833 {
4834 offset /= GET_MODE_SIZE (GET_MODE (table));
4835 new = gen_rtx (MINUS, Pmode, XVECEXP (table, 1, offset),
4836 XEXP (table, 0));
4837
4838 if (GET_MODE (table) != Pmode)
4839 new = gen_rtx (TRUNCATE, GET_MODE (table), new);
4840
4841 return new;
4842 }
4843 }
4844 }
4845
4846 return x;
4847 }
4848 }
4849
4850 const_arg0 = 0;
4851 const_arg1 = 0;
4852 const_arg2 = 0;
4853 mode_arg0 = VOIDmode;
4854
4855 /* Try folding our operands.
4856 Then see which ones have constant values known. */
4857
4858 fmt = GET_RTX_FORMAT (code);
4859 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4860 if (fmt[i] == 'e')
4861 {
4862 rtx arg = XEXP (x, i);
4863 rtx folded_arg = arg, const_arg = 0;
4864 enum machine_mode mode_arg = GET_MODE (arg);
4865 rtx cheap_arg, expensive_arg;
4866 rtx replacements[2];
4867 int j;
4868
4869 /* Most arguments are cheap, so handle them specially. */
4870 switch (GET_CODE (arg))
4871 {
4872 case REG:
4873 /* This is the same as calling equiv_constant; it is duplicated
4874 here for speed. */
4875 if (REGNO_QTY_VALID_P (REGNO (arg))
4876 && qty_const[reg_qty[REGNO (arg)]] != 0
4877 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != REG
4878 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != PLUS)
4879 const_arg
4880 = gen_lowpart_if_possible (GET_MODE (arg),
4881 qty_const[reg_qty[REGNO (arg)]]);
4882 break;
4883
4884 case CONST:
4885 case CONST_INT:
4886 case SYMBOL_REF:
4887 case LABEL_REF:
4888 case CONST_DOUBLE:
4889 const_arg = arg;
4890 break;
4891
4892 #ifdef HAVE_cc0
4893 case CC0:
4894 folded_arg = prev_insn_cc0;
4895 mode_arg = prev_insn_cc0_mode;
4896 const_arg = equiv_constant (folded_arg);
4897 break;
4898 #endif
4899
4900 default:
4901 folded_arg = fold_rtx (arg, insn);
4902 const_arg = equiv_constant (folded_arg);
4903 }
4904
4905 /* For the first three operands, see if the operand
4906 is constant or equivalent to a constant. */
4907 switch (i)
4908 {
4909 case 0:
4910 folded_arg0 = folded_arg;
4911 const_arg0 = const_arg;
4912 mode_arg0 = mode_arg;
4913 break;
4914 case 1:
4915 folded_arg1 = folded_arg;
4916 const_arg1 = const_arg;
4917 break;
4918 case 2:
4919 const_arg2 = const_arg;
4920 break;
4921 }
4922
4923 /* Pick the least expensive of the folded argument and an
4924 equivalent constant argument. */
4925 if (const_arg == 0 || const_arg == folded_arg
4926 || COST (const_arg) > COST (folded_arg))
4927 cheap_arg = folded_arg, expensive_arg = const_arg;
4928 else
4929 cheap_arg = const_arg, expensive_arg = folded_arg;
4930
4931 /* Try to replace the operand with the cheapest of the two
4932 possibilities. If it doesn't work and this is either of the first
4933 two operands of a commutative operation, try swapping them.
4934 If THAT fails, try the more expensive, provided it is cheaper
4935 than what is already there. */
4936
4937 if (cheap_arg == XEXP (x, i))
4938 continue;
4939
4940 if (insn == 0 && ! copied)
4941 {
4942 x = copy_rtx (x);
4943 copied = 1;
4944 }
4945
4946 replacements[0] = cheap_arg, replacements[1] = expensive_arg;
4947 for (j = 0;
4948 j < 2 && replacements[j]
4949 && COST (replacements[j]) < COST (XEXP (x, i));
4950 j++)
4951 {
4952 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
4953 break;
4954
4955 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
4956 {
4957 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
4958 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
4959
4960 if (apply_change_group ())
4961 {
4962 /* Swap them back to be invalid so that this loop can
4963 continue and flag them to be swapped back later. */
4964 rtx tem;
4965
4966 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
4967 XEXP (x, 1) = tem;
4968 must_swap = 1;
4969 break;
4970 }
4971 }
4972 }
4973 }
4974
4975 else if (fmt[i] == 'E')
4976 /* Don't try to fold inside of a vector of expressions.
4977 Doing nothing is harmless. */
4978 ;
4979
4980 /* If a commutative operation, place a constant integer as the second
4981 operand unless the first operand is also a constant integer. Otherwise,
4982 place any constant second unless the first operand is also a constant. */
4983
4984 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4985 {
4986 if (must_swap || (const_arg0
4987 && (const_arg1 == 0
4988 || (GET_CODE (const_arg0) == CONST_INT
4989 && GET_CODE (const_arg1) != CONST_INT))))
4990 {
4991 register rtx tem = XEXP (x, 0);
4992
4993 if (insn == 0 && ! copied)
4994 {
4995 x = copy_rtx (x);
4996 copied = 1;
4997 }
4998
4999 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
5000 validate_change (insn, &XEXP (x, 1), tem, 1);
5001 if (apply_change_group ())
5002 {
5003 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
5004 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
5005 }
5006 }
5007 }
5008
5009 /* If X is an arithmetic operation, see if we can simplify it. */
5010
5011 switch (GET_RTX_CLASS (code))
5012 {
5013 case '1':
5014 /* We can't simplify extension ops unless we know the original mode. */
5015 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
5016 && mode_arg0 == VOIDmode)
5017 break;
5018 new = simplify_unary_operation (code, mode,
5019 const_arg0 ? const_arg0 : folded_arg0,
5020 mode_arg0);
5021 break;
5022
5023 case '<':
5024 /* See what items are actually being compared and set FOLDED_ARG[01]
5025 to those values and CODE to the actual comparison code. If any are
5026 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
5027 do anything if both operands are already known to be constant. */
5028
5029 if (const_arg0 == 0 || const_arg1 == 0)
5030 {
5031 struct table_elt *p0, *p1;
5032 rtx true = const_true_rtx, false = const0_rtx;
5033 enum machine_mode mode_arg1;
5034
5035 #ifdef FLOAT_STORE_FLAG_VALUE
5036 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5037 {
5038 true = immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, mode);
5039 false = CONST0_RTX (mode);
5040 }
5041 #endif
5042
5043 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
5044 &mode_arg0, &mode_arg1);
5045 const_arg0 = equiv_constant (folded_arg0);
5046 const_arg1 = equiv_constant (folded_arg1);
5047
5048 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
5049 what kinds of things are being compared, so we can't do
5050 anything with this comparison. */
5051
5052 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
5053 break;
5054
5055 /* If we do not now have two constants being compared, see if we
5056 can nevertheless deduce some things about the comparison. */
5057 if (const_arg0 == 0 || const_arg1 == 0)
5058 {
5059 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or non-explicit
5060 constant? These aren't zero, but we don't know their sign. */
5061 if (const_arg1 == const0_rtx
5062 && (NONZERO_BASE_PLUS_P (folded_arg0)
5063 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
5064 come out as 0. */
5065 || GET_CODE (folded_arg0) == SYMBOL_REF
5066 #endif
5067 || GET_CODE (folded_arg0) == LABEL_REF
5068 || GET_CODE (folded_arg0) == CONST))
5069 {
5070 if (code == EQ)
5071 return false;
5072 else if (code == NE)
5073 return true;
5074 }
5075
5076 /* See if the two operands are the same. We don't do this
5077 for IEEE floating-point since we can't assume x == x
5078 since x might be a NaN. */
5079
5080 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5081 || GET_MODE_CLASS (mode_arg0) != MODE_FLOAT)
5082 && (folded_arg0 == folded_arg1
5083 || (GET_CODE (folded_arg0) == REG
5084 && GET_CODE (folded_arg1) == REG
5085 && (reg_qty[REGNO (folded_arg0)]
5086 == reg_qty[REGNO (folded_arg1)]))
5087 || ((p0 = lookup (folded_arg0,
5088 (safe_hash (folded_arg0, mode_arg0)
5089 % NBUCKETS), mode_arg0))
5090 && (p1 = lookup (folded_arg1,
5091 (safe_hash (folded_arg1, mode_arg0)
5092 % NBUCKETS), mode_arg0))
5093 && p0->first_same_value == p1->first_same_value)))
5094 return ((code == EQ || code == LE || code == GE
5095 || code == LEU || code == GEU)
5096 ? true : false);
5097
5098 /* If FOLDED_ARG0 is a register, see if the comparison we are
5099 doing now is either the same as we did before or the reverse
5100 (we only check the reverse if not floating-point). */
5101 else if (GET_CODE (folded_arg0) == REG)
5102 {
5103 int qty = reg_qty[REGNO (folded_arg0)];
5104
5105 if (REGNO_QTY_VALID_P (REGNO (folded_arg0))
5106 && (comparison_dominates_p (qty_comparison_code[qty], code)
5107 || (comparison_dominates_p (qty_comparison_code[qty],
5108 reverse_condition (code))
5109 && GET_MODE_CLASS (mode_arg0) == MODE_INT))
5110 && (rtx_equal_p (qty_comparison_const[qty], folded_arg1)
5111 || (const_arg1
5112 && rtx_equal_p (qty_comparison_const[qty],
5113 const_arg1))
5114 || (GET_CODE (folded_arg1) == REG
5115 && (reg_qty[REGNO (folded_arg1)]
5116 == qty_comparison_qty[qty]))))
5117 return (comparison_dominates_p (qty_comparison_code[qty],
5118 code)
5119 ? true : false);
5120 }
5121 }
5122 }
5123
5124 /* If we are comparing against zero, see if the first operand is
5125 equivalent to an IOR with a constant. If so, we may be able to
5126 determine the result of this comparison. */
5127
5128 if (const_arg1 == const0_rtx)
5129 {
5130 rtx y = lookup_as_function (folded_arg0, IOR);
5131 rtx inner_const;
5132
5133 if (y != 0
5134 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
5135 && GET_CODE (inner_const) == CONST_INT
5136 && INTVAL (inner_const) != 0)
5137 {
5138 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
5139 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
5140 && (INTVAL (inner_const)
5141 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
5142 rtx true = const_true_rtx, false = const0_rtx;
5143
5144 #ifdef FLOAT_STORE_FLAG_VALUE
5145 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5146 {
5147 true = immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, mode);
5148 false = CONST0_RTX (mode);
5149 }
5150 #endif
5151
5152 switch (code)
5153 {
5154 case EQ:
5155 return false;
5156 case NE:
5157 return true;
5158 case LT: case LE:
5159 if (has_sign)
5160 return true;
5161 break;
5162 case GT: case GE:
5163 if (has_sign)
5164 return false;
5165 break;
5166 }
5167 }
5168 }
5169
5170 new = simplify_relational_operation (code, mode_arg0,
5171 const_arg0 ? const_arg0 : folded_arg0,
5172 const_arg1 ? const_arg1 : folded_arg1);
5173 #ifdef FLOAT_STORE_FLAG_VALUE
5174 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
5175 new = ((new == const0_rtx) ? CONST0_RTX (mode)
5176 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, mode));
5177 #endif
5178 break;
5179
5180 case '2':
5181 case 'c':
5182 switch (code)
5183 {
5184 case PLUS:
5185 /* If the second operand is a LABEL_REF, see if the first is a MINUS
5186 with that LABEL_REF as its second operand. If so, the result is
5187 the first operand of that MINUS. This handles switches with an
5188 ADDR_DIFF_VEC table. */
5189 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
5190 {
5191 rtx y = lookup_as_function (folded_arg0, MINUS);
5192
5193 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5194 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
5195 return XEXP (y, 0);
5196 }
5197 goto from_plus;
5198
5199 case MINUS:
5200 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
5201 If so, produce (PLUS Z C2-C). */
5202 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
5203 {
5204 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
5205 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
5206 return fold_rtx (plus_constant (y, -INTVAL (const_arg1)),
5207 NULL_RTX);
5208 }
5209
5210 /* ... fall through ... */
5211
5212 from_plus:
5213 case SMIN: case SMAX: case UMIN: case UMAX:
5214 case IOR: case AND: case XOR:
5215 case MULT: case DIV: case UDIV:
5216 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
5217 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
5218 is known to be of similar form, we may be able to replace the
5219 operation with a combined operation. This may eliminate the
5220 intermediate operation if every use is simplified in this way.
5221 Note that the similar optimization done by combine.c only works
5222 if the intermediate operation's result has only one reference. */
5223
5224 if (GET_CODE (folded_arg0) == REG
5225 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
5226 {
5227 int is_shift
5228 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
5229 rtx y = lookup_as_function (folded_arg0, code);
5230 rtx inner_const;
5231 enum rtx_code associate_code;
5232 rtx new_const;
5233
5234 if (y == 0
5235 || 0 == (inner_const
5236 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
5237 || GET_CODE (inner_const) != CONST_INT
5238 /* If we have compiled a statement like
5239 "if (x == (x & mask1))", and now are looking at
5240 "x & mask2", we will have a case where the first operand
5241 of Y is the same as our first operand. Unless we detect
5242 this case, an infinite loop will result. */
5243 || XEXP (y, 0) == folded_arg0)
5244 break;
5245
5246 /* Don't associate these operations if they are a PLUS with the
5247 same constant and it is a power of two. These might be doable
5248 with a pre- or post-increment. Similarly for two subtracts of
5249 identical powers of two with post decrement. */
5250
5251 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
5252 && (0
5253 #if defined(HAVE_PRE_INCREMENT) || defined(HAVE_POST_INCREMENT)
5254 || exact_log2 (INTVAL (const_arg1)) >= 0
5255 #endif
5256 #if defined(HAVE_PRE_DECREMENT) || defined(HAVE_POST_DECREMENT)
5257 || exact_log2 (- INTVAL (const_arg1)) >= 0
5258 #endif
5259 ))
5260 break;
5261
5262 /* Compute the code used to compose the constants. For example,
5263 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
5264
5265 associate_code
5266 = (code == MULT || code == DIV || code == UDIV ? MULT
5267 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
5268
5269 new_const = simplify_binary_operation (associate_code, mode,
5270 const_arg1, inner_const);
5271
5272 if (new_const == 0)
5273 break;
5274
5275 /* If we are associating shift operations, don't let this
5276 produce a shift of larger than the object. This could
5277 occur when we following a sign-extend by a right shift on
5278 a machine that does a sign-extend as a pair of shifts. */
5279
5280 if (is_shift && GET_CODE (new_const) == CONST_INT
5281 && INTVAL (new_const) > GET_MODE_BITSIZE (mode))
5282 break;
5283
5284 y = copy_rtx (XEXP (y, 0));
5285
5286 /* If Y contains our first operand (the most common way this
5287 can happen is if Y is a MEM), we would do into an infinite
5288 loop if we tried to fold it. So don't in that case. */
5289
5290 if (! reg_mentioned_p (folded_arg0, y))
5291 y = fold_rtx (y, insn);
5292
5293 return cse_gen_binary (code, mode, y, new_const);
5294 }
5295 }
5296
5297 new = simplify_binary_operation (code, mode,
5298 const_arg0 ? const_arg0 : folded_arg0,
5299 const_arg1 ? const_arg1 : folded_arg1);
5300 break;
5301
5302 case 'o':
5303 /* (lo_sum (high X) X) is simply X. */
5304 if (code == LO_SUM && const_arg0 != 0
5305 && GET_CODE (const_arg0) == HIGH
5306 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
5307 return const_arg1;
5308 break;
5309
5310 case '3':
5311 case 'b':
5312 new = simplify_ternary_operation (code, mode, mode_arg0,
5313 const_arg0 ? const_arg0 : folded_arg0,
5314 const_arg1 ? const_arg1 : folded_arg1,
5315 const_arg2 ? const_arg2 : XEXP (x, 2));
5316 break;
5317 }
5318
5319 return new ? new : x;
5320 }
5321 \f
5322 /* Return a constant value currently equivalent to X.
5323 Return 0 if we don't know one. */
5324
5325 static rtx
5326 equiv_constant (x)
5327 rtx x;
5328 {
5329 if (GET_CODE (x) == REG
5330 && REGNO_QTY_VALID_P (REGNO (x))
5331 && qty_const[reg_qty[REGNO (x)]])
5332 x = gen_lowpart_if_possible (GET_MODE (x), qty_const[reg_qty[REGNO (x)]]);
5333
5334 if (x != 0 && CONSTANT_P (x))
5335 return x;
5336
5337 /* If X is a MEM, try to fold it outside the context of any insn to see if
5338 it might be equivalent to a constant. That handles the case where it
5339 is a constant-pool reference. Then try to look it up in the hash table
5340 in case it is something whose value we have seen before. */
5341
5342 if (GET_CODE (x) == MEM)
5343 {
5344 struct table_elt *elt;
5345
5346 x = fold_rtx (x, NULL_RTX);
5347 if (CONSTANT_P (x))
5348 return x;
5349
5350 elt = lookup (x, safe_hash (x, GET_MODE (x)) % NBUCKETS, GET_MODE (x));
5351 if (elt == 0)
5352 return 0;
5353
5354 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
5355 if (elt->is_const && CONSTANT_P (elt->exp))
5356 return elt->exp;
5357 }
5358
5359 return 0;
5360 }
5361 \f
5362 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
5363 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
5364 least-significant part of X.
5365 MODE specifies how big a part of X to return.
5366
5367 If the requested operation cannot be done, 0 is returned.
5368
5369 This is similar to gen_lowpart in emit-rtl.c. */
5370
5371 rtx
5372 gen_lowpart_if_possible (mode, x)
5373 enum machine_mode mode;
5374 register rtx x;
5375 {
5376 rtx result = gen_lowpart_common (mode, x);
5377
5378 if (result)
5379 return result;
5380 else if (GET_CODE (x) == MEM)
5381 {
5382 /* This is the only other case we handle. */
5383 register int offset = 0;
5384 rtx new;
5385
5386 #if WORDS_BIG_ENDIAN
5387 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
5388 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
5389 #endif
5390 #if BYTES_BIG_ENDIAN
5391 /* Adjust the address so that the address-after-the-data
5392 is unchanged. */
5393 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
5394 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
5395 #endif
5396 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
5397 if (! memory_address_p (mode, XEXP (new, 0)))
5398 return 0;
5399 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
5400 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
5401 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
5402 return new;
5403 }
5404 else
5405 return 0;
5406 }
5407 \f
5408 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
5409 branch. It will be zero if not.
5410
5411 In certain cases, this can cause us to add an equivalence. For example,
5412 if we are following the taken case of
5413 if (i == 2)
5414 we can add the fact that `i' and '2' are now equivalent.
5415
5416 In any case, we can record that this comparison was passed. If the same
5417 comparison is seen later, we will know its value. */
5418
5419 static void
5420 record_jump_equiv (insn, taken)
5421 rtx insn;
5422 int taken;
5423 {
5424 int cond_known_true;
5425 rtx op0, op1;
5426 enum machine_mode mode, mode0, mode1;
5427 int reversed_nonequality = 0;
5428 enum rtx_code code;
5429
5430 /* Ensure this is the right kind of insn. */
5431 if (! condjump_p (insn) || simplejump_p (insn))
5432 return;
5433
5434 /* See if this jump condition is known true or false. */
5435 if (taken)
5436 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
5437 else
5438 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
5439
5440 /* Get the type of comparison being done and the operands being compared.
5441 If we had to reverse a non-equality condition, record that fact so we
5442 know that it isn't valid for floating-point. */
5443 code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
5444 op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
5445 op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
5446
5447 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
5448 if (! cond_known_true)
5449 {
5450 reversed_nonequality = (code != EQ && code != NE);
5451 code = reverse_condition (code);
5452 }
5453
5454 /* The mode is the mode of the non-constant. */
5455 mode = mode0;
5456 if (mode1 != VOIDmode)
5457 mode = mode1;
5458
5459 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
5460 }
5461
5462 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
5463 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
5464 Make any useful entries we can with that information. Called from
5465 above function and called recursively. */
5466
5467 static void
5468 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
5469 enum rtx_code code;
5470 enum machine_mode mode;
5471 rtx op0, op1;
5472 int reversed_nonequality;
5473 {
5474 int op0_hash_code, op1_hash_code;
5475 int op0_in_memory, op0_in_struct, op1_in_memory, op1_in_struct;
5476 struct table_elt *op0_elt, *op1_elt;
5477
5478 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
5479 we know that they are also equal in the smaller mode (this is also
5480 true for all smaller modes whether or not there is a SUBREG, but
5481 is not worth testing for with no SUBREG. */
5482
5483 if (code == EQ && GET_CODE (op0) == SUBREG
5484 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
5485 {
5486 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5487 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5488
5489 record_jump_cond (code, mode, SUBREG_REG (op0),
5490 tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
5491 reversed_nonequality);
5492 }
5493
5494 if (code == EQ && GET_CODE (op1) == SUBREG
5495 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1))))
5496 {
5497 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5498 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5499
5500 record_jump_cond (code, mode, SUBREG_REG (op1),
5501 tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
5502 reversed_nonequality);
5503 }
5504
5505 /* Similarly, if this is an NE comparison, and either is a SUBREG
5506 making a smaller mode, we know the whole thing is also NE. */
5507
5508 if (code == NE && GET_CODE (op0) == SUBREG
5509 && subreg_lowpart_p (op0)
5510 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
5511 {
5512 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5513 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5514
5515 record_jump_cond (code, mode, SUBREG_REG (op0),
5516 tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
5517 reversed_nonequality);
5518 }
5519
5520 if (code == NE && GET_CODE (op1) == SUBREG
5521 && subreg_lowpart_p (op1)
5522 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1))))
5523 {
5524 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5525 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5526
5527 record_jump_cond (code, mode, SUBREG_REG (op1),
5528 tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
5529 reversed_nonequality);
5530 }
5531
5532 /* Hash both operands. */
5533
5534 do_not_record = 0;
5535 hash_arg_in_memory = 0;
5536 hash_arg_in_struct = 0;
5537 op0_hash_code = HASH (op0, mode);
5538 op0_in_memory = hash_arg_in_memory;
5539 op0_in_struct = hash_arg_in_struct;
5540
5541 if (do_not_record)
5542 return;
5543
5544 do_not_record = 0;
5545 hash_arg_in_memory = 0;
5546 hash_arg_in_struct = 0;
5547 op1_hash_code = HASH (op1, mode);
5548 op1_in_memory = hash_arg_in_memory;
5549 op1_in_struct = hash_arg_in_struct;
5550
5551 if (do_not_record)
5552 return;
5553
5554 /* Look up both operands. */
5555 op0_elt = lookup (op0, op0_hash_code, mode);
5556 op1_elt = lookup (op1, op1_hash_code, mode);
5557
5558 /* If we aren't setting two things equal all we can do is save this
5559 comparison. Similarly if this is floating-point. In the latter
5560 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
5561 If we record the equality, we might inadvertently delete code
5562 whose intent was to change -0 to +0. */
5563
5564 if (code != EQ || GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
5565 {
5566 /* If we reversed a floating-point comparison, if OP0 is not a
5567 register, or if OP1 is neither a register or constant, we can't
5568 do anything. */
5569
5570 if (GET_CODE (op1) != REG)
5571 op1 = equiv_constant (op1);
5572
5573 if ((reversed_nonequality && GET_MODE_CLASS (mode) != MODE_INT)
5574 || GET_CODE (op0) != REG || op1 == 0)
5575 return;
5576
5577 /* Put OP0 in the hash table if it isn't already. This gives it a
5578 new quantity number. */
5579 if (op0_elt == 0)
5580 {
5581 if (insert_regs (op0, NULL_PTR, 0))
5582 {
5583 rehash_using_reg (op0);
5584 op0_hash_code = HASH (op0, mode);
5585 }
5586
5587 op0_elt = insert (op0, NULL_PTR, op0_hash_code, mode);
5588 op0_elt->in_memory = op0_in_memory;
5589 op0_elt->in_struct = op0_in_struct;
5590 }
5591
5592 qty_comparison_code[reg_qty[REGNO (op0)]] = code;
5593 if (GET_CODE (op1) == REG)
5594 {
5595 /* Put OP1 in the hash table so it gets a new quantity number. */
5596 if (op1_elt == 0)
5597 {
5598 if (insert_regs (op1, NULL_PTR, 0))
5599 {
5600 rehash_using_reg (op1);
5601 op1_hash_code = HASH (op1, mode);
5602 }
5603
5604 op1_elt = insert (op1, NULL_PTR, op1_hash_code, mode);
5605 op1_elt->in_memory = op1_in_memory;
5606 op1_elt->in_struct = op1_in_struct;
5607 }
5608
5609 qty_comparison_qty[reg_qty[REGNO (op0)]] = reg_qty[REGNO (op1)];
5610 qty_comparison_const[reg_qty[REGNO (op0)]] = 0;
5611 }
5612 else
5613 {
5614 qty_comparison_qty[reg_qty[REGNO (op0)]] = -1;
5615 qty_comparison_const[reg_qty[REGNO (op0)]] = op1;
5616 }
5617
5618 return;
5619 }
5620
5621 /* If both are equivalent, merge the two classes. Save this class for
5622 `cse_set_around_loop'. */
5623 if (op0_elt && op1_elt)
5624 {
5625 merge_equiv_classes (op0_elt, op1_elt);
5626 last_jump_equiv_class = op0_elt;
5627 }
5628
5629 /* For whichever side doesn't have an equivalence, make one. */
5630 if (op0_elt == 0)
5631 {
5632 if (insert_regs (op0, op1_elt, 0))
5633 {
5634 rehash_using_reg (op0);
5635 op0_hash_code = HASH (op0, mode);
5636 }
5637
5638 op0_elt = insert (op0, op1_elt, op0_hash_code, mode);
5639 op0_elt->in_memory = op0_in_memory;
5640 op0_elt->in_struct = op0_in_struct;
5641 last_jump_equiv_class = op0_elt;
5642 }
5643
5644 if (op1_elt == 0)
5645 {
5646 if (insert_regs (op1, op0_elt, 0))
5647 {
5648 rehash_using_reg (op1);
5649 op1_hash_code = HASH (op1, mode);
5650 }
5651
5652 op1_elt = insert (op1, op0_elt, op1_hash_code, mode);
5653 op1_elt->in_memory = op1_in_memory;
5654 op1_elt->in_struct = op1_in_struct;
5655 last_jump_equiv_class = op1_elt;
5656 }
5657 }
5658 \f
5659 /* CSE processing for one instruction.
5660 First simplify sources and addresses of all assignments
5661 in the instruction, using previously-computed equivalents values.
5662 Then install the new sources and destinations in the table
5663 of available values.
5664
5665 If IN_LIBCALL_BLOCK is nonzero, don't record any equivalence made in
5666 the insn. */
5667
5668 /* Data on one SET contained in the instruction. */
5669
5670 struct set
5671 {
5672 /* The SET rtx itself. */
5673 rtx rtl;
5674 /* The SET_SRC of the rtx (the original value, if it is changing). */
5675 rtx src;
5676 /* The hash-table element for the SET_SRC of the SET. */
5677 struct table_elt *src_elt;
5678 /* Hash code for the SET_SRC. */
5679 int src_hash_code;
5680 /* Hash code for the SET_DEST. */
5681 int dest_hash_code;
5682 /* The SET_DEST, with SUBREG, etc., stripped. */
5683 rtx inner_dest;
5684 /* Place where the pointer to the INNER_DEST was found. */
5685 rtx *inner_dest_loc;
5686 /* Nonzero if the SET_SRC is in memory. */
5687 char src_in_memory;
5688 /* Nonzero if the SET_SRC is in a structure. */
5689 char src_in_struct;
5690 /* Nonzero if the SET_SRC contains something
5691 whose value cannot be predicted and understood. */
5692 char src_volatile;
5693 /* Original machine mode, in case it becomes a CONST_INT. */
5694 enum machine_mode mode;
5695 /* A constant equivalent for SET_SRC, if any. */
5696 rtx src_const;
5697 /* Hash code of constant equivalent for SET_SRC. */
5698 int src_const_hash_code;
5699 /* Table entry for constant equivalent for SET_SRC, if any. */
5700 struct table_elt *src_const_elt;
5701 };
5702
5703 static void
5704 cse_insn (insn, in_libcall_block)
5705 rtx insn;
5706 int in_libcall_block;
5707 {
5708 register rtx x = PATTERN (insn);
5709 rtx tem;
5710 register int i;
5711 register int n_sets = 0;
5712
5713 /* Records what this insn does to set CC0. */
5714 rtx this_insn_cc0 = 0;
5715 enum machine_mode this_insn_cc0_mode;
5716 struct write_data writes_memory;
5717 static struct write_data init = {0, 0, 0, 0};
5718
5719 rtx src_eqv = 0;
5720 struct table_elt *src_eqv_elt = 0;
5721 int src_eqv_volatile;
5722 int src_eqv_in_memory;
5723 int src_eqv_in_struct;
5724 int src_eqv_hash_code;
5725
5726 struct set *sets;
5727
5728 this_insn = insn;
5729 writes_memory = init;
5730
5731 /* Find all the SETs and CLOBBERs in this instruction.
5732 Record all the SETs in the array `set' and count them.
5733 Also determine whether there is a CLOBBER that invalidates
5734 all memory references, or all references at varying addresses. */
5735
5736 if (GET_CODE (x) == SET)
5737 {
5738 sets = (struct set *) alloca (sizeof (struct set));
5739 sets[0].rtl = x;
5740
5741 /* Ignore SETs that are unconditional jumps.
5742 They never need cse processing, so this does not hurt.
5743 The reason is not efficiency but rather
5744 so that we can test at the end for instructions
5745 that have been simplified to unconditional jumps
5746 and not be misled by unchanged instructions
5747 that were unconditional jumps to begin with. */
5748 if (SET_DEST (x) == pc_rtx
5749 && GET_CODE (SET_SRC (x)) == LABEL_REF)
5750 ;
5751
5752 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
5753 The hard function value register is used only once, to copy to
5754 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
5755 Ensure we invalidate the destination register. On the 80386 no
5756 other code would invalidate it since it is a fixed_reg.
5757 We need not check the return of apply_change_group; see canon_reg. */
5758
5759 else if (GET_CODE (SET_SRC (x)) == CALL)
5760 {
5761 canon_reg (SET_SRC (x), insn);
5762 apply_change_group ();
5763 fold_rtx (SET_SRC (x), insn);
5764 invalidate (SET_DEST (x));
5765 }
5766 else
5767 n_sets = 1;
5768 }
5769 else if (GET_CODE (x) == PARALLEL)
5770 {
5771 register int lim = XVECLEN (x, 0);
5772
5773 sets = (struct set *) alloca (lim * sizeof (struct set));
5774
5775 /* Find all regs explicitly clobbered in this insn,
5776 and ensure they are not replaced with any other regs
5777 elsewhere in this insn.
5778 When a reg that is clobbered is also used for input,
5779 we should presume that that is for a reason,
5780 and we should not substitute some other register
5781 which is not supposed to be clobbered.
5782 Therefore, this loop cannot be merged into the one below
5783 because a CALL may precede a CLOBBER and refer to the
5784 value clobbered. We must not let a canonicalization do
5785 anything in that case. */
5786 for (i = 0; i < lim; i++)
5787 {
5788 register rtx y = XVECEXP (x, 0, i);
5789 if (GET_CODE (y) == CLOBBER
5790 && (GET_CODE (XEXP (y, 0)) == REG
5791 || GET_CODE (XEXP (y, 0)) == SUBREG))
5792 invalidate (XEXP (y, 0));
5793 }
5794
5795 for (i = 0; i < lim; i++)
5796 {
5797 register rtx y = XVECEXP (x, 0, i);
5798 if (GET_CODE (y) == SET)
5799 {
5800 /* As above, we ignore unconditional jumps and call-insns and
5801 ignore the result of apply_change_group. */
5802 if (GET_CODE (SET_SRC (y)) == CALL)
5803 {
5804 canon_reg (SET_SRC (y), insn);
5805 apply_change_group ();
5806 fold_rtx (SET_SRC (y), insn);
5807 invalidate (SET_DEST (y));
5808 }
5809 else if (SET_DEST (y) == pc_rtx
5810 && GET_CODE (SET_SRC (y)) == LABEL_REF)
5811 ;
5812 else
5813 sets[n_sets++].rtl = y;
5814 }
5815 else if (GET_CODE (y) == CLOBBER)
5816 {
5817 /* If we clobber memory, take note of that,
5818 and canon the address.
5819 This does nothing when a register is clobbered
5820 because we have already invalidated the reg. */
5821 if (GET_CODE (XEXP (y, 0)) == MEM)
5822 {
5823 canon_reg (XEXP (y, 0), NULL_RTX);
5824 note_mem_written (XEXP (y, 0), &writes_memory);
5825 }
5826 }
5827 else if (GET_CODE (y) == USE
5828 && ! (GET_CODE (XEXP (y, 0)) == REG
5829 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
5830 canon_reg (y, NULL_RTX);
5831 else if (GET_CODE (y) == CALL)
5832 {
5833 /* The result of apply_change_group can be ignored; see
5834 canon_reg. */
5835 canon_reg (y, insn);
5836 apply_change_group ();
5837 fold_rtx (y, insn);
5838 }
5839 }
5840 }
5841 else if (GET_CODE (x) == CLOBBER)
5842 {
5843 if (GET_CODE (XEXP (x, 0)) == MEM)
5844 {
5845 canon_reg (XEXP (x, 0), NULL_RTX);
5846 note_mem_written (XEXP (x, 0), &writes_memory);
5847 }
5848 }
5849
5850 /* Canonicalize a USE of a pseudo register or memory location. */
5851 else if (GET_CODE (x) == USE
5852 && ! (GET_CODE (XEXP (x, 0)) == REG
5853 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
5854 canon_reg (XEXP (x, 0), NULL_RTX);
5855 else if (GET_CODE (x) == CALL)
5856 {
5857 /* The result of apply_change_group can be ignored; see canon_reg. */
5858 canon_reg (x, insn);
5859 apply_change_group ();
5860 fold_rtx (x, insn);
5861 }
5862
5863 if (n_sets == 1 && REG_NOTES (insn) != 0)
5864 {
5865 /* Store the equivalent value in SRC_EQV, if different. */
5866 rtx tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5867
5868 if (tem && ! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl)))
5869 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
5870 }
5871
5872 /* Canonicalize sources and addresses of destinations.
5873 We do this in a separate pass to avoid problems when a MATCH_DUP is
5874 present in the insn pattern. In that case, we want to ensure that
5875 we don't break the duplicate nature of the pattern. So we will replace
5876 both operands at the same time. Otherwise, we would fail to find an
5877 equivalent substitution in the loop calling validate_change below.
5878
5879 We used to suppress canonicalization of DEST if it appears in SRC,
5880 but we don't do this any more. */
5881
5882 for (i = 0; i < n_sets; i++)
5883 {
5884 rtx dest = SET_DEST (sets[i].rtl);
5885 rtx src = SET_SRC (sets[i].rtl);
5886 rtx new = canon_reg (src, insn);
5887
5888 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
5889 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
5890 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
5891 || insn_n_dups[recog_memoized (insn)] > 0)
5892 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
5893 else
5894 SET_SRC (sets[i].rtl) = new;
5895
5896 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
5897 {
5898 validate_change (insn, &XEXP (dest, 1),
5899 canon_reg (XEXP (dest, 1), insn), 1);
5900 validate_change (insn, &XEXP (dest, 2),
5901 canon_reg (XEXP (dest, 2), insn), 1);
5902 }
5903
5904 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
5905 || GET_CODE (dest) == ZERO_EXTRACT
5906 || GET_CODE (dest) == SIGN_EXTRACT)
5907 dest = XEXP (dest, 0);
5908
5909 if (GET_CODE (dest) == MEM)
5910 canon_reg (dest, insn);
5911 }
5912
5913 /* Now that we have done all the replacements, we can apply the change
5914 group and see if they all work. Note that this will cause some
5915 canonicalizations that would have worked individually not to be applied
5916 because some other canonicalization didn't work, but this should not
5917 occur often.
5918
5919 The result of apply_change_group can be ignored; see canon_reg. */
5920
5921 apply_change_group ();
5922
5923 /* Set sets[i].src_elt to the class each source belongs to.
5924 Detect assignments from or to volatile things
5925 and set set[i] to zero so they will be ignored
5926 in the rest of this function.
5927
5928 Nothing in this loop changes the hash table or the register chains. */
5929
5930 for (i = 0; i < n_sets; i++)
5931 {
5932 register rtx src, dest;
5933 register rtx src_folded;
5934 register struct table_elt *elt = 0, *p;
5935 enum machine_mode mode;
5936 rtx src_eqv_here;
5937 rtx src_const = 0;
5938 rtx src_related = 0;
5939 struct table_elt *src_const_elt = 0;
5940 int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
5941 int src_related_cost = 10000, src_elt_cost = 10000;
5942 /* Set non-zero if we need to call force_const_mem on with the
5943 contents of src_folded before using it. */
5944 int src_folded_force_flag = 0;
5945
5946 dest = SET_DEST (sets[i].rtl);
5947 src = SET_SRC (sets[i].rtl);
5948
5949 /* If SRC is a constant that has no machine mode,
5950 hash it with the destination's machine mode.
5951 This way we can keep different modes separate. */
5952
5953 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5954 sets[i].mode = mode;
5955
5956 if (src_eqv)
5957 {
5958 enum machine_mode eqvmode = mode;
5959 if (GET_CODE (dest) == STRICT_LOW_PART)
5960 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5961 do_not_record = 0;
5962 hash_arg_in_memory = 0;
5963 hash_arg_in_struct = 0;
5964 src_eqv = fold_rtx (src_eqv, insn);
5965 src_eqv_hash_code = HASH (src_eqv, eqvmode);
5966
5967 /* Find the equivalence class for the equivalent expression. */
5968
5969 if (!do_not_record)
5970 src_eqv_elt = lookup (src_eqv, src_eqv_hash_code, eqvmode);
5971
5972 src_eqv_volatile = do_not_record;
5973 src_eqv_in_memory = hash_arg_in_memory;
5974 src_eqv_in_struct = hash_arg_in_struct;
5975 }
5976
5977 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
5978 value of the INNER register, not the destination. So it is not
5979 a legal substitution for the source. But save it for later. */
5980 if (GET_CODE (dest) == STRICT_LOW_PART)
5981 src_eqv_here = 0;
5982 else
5983 src_eqv_here = src_eqv;
5984
5985 /* Simplify and foldable subexpressions in SRC. Then get the fully-
5986 simplified result, which may not necessarily be valid. */
5987 src_folded = fold_rtx (src, insn);
5988
5989 /* If storing a constant in a bitfield, pre-truncate the constant
5990 so we will be able to record it later. */
5991 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5992 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5993 {
5994 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5995
5996 if (GET_CODE (src) == CONST_INT
5997 && GET_CODE (width) == CONST_INT
5998 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5999 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6000 src_folded
6001 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
6002 << INTVAL (width)) - 1));
6003 }
6004
6005 /* Compute SRC's hash code, and also notice if it
6006 should not be recorded at all. In that case,
6007 prevent any further processing of this assignment. */
6008 do_not_record = 0;
6009 hash_arg_in_memory = 0;
6010 hash_arg_in_struct = 0;
6011
6012 sets[i].src = src;
6013 sets[i].src_hash_code = HASH (src, mode);
6014 sets[i].src_volatile = do_not_record;
6015 sets[i].src_in_memory = hash_arg_in_memory;
6016 sets[i].src_in_struct = hash_arg_in_struct;
6017
6018 #if 0
6019 /* It is no longer clear why we used to do this, but it doesn't
6020 appear to still be needed. So let's try without it since this
6021 code hurts cse'ing widened ops. */
6022 /* If source is a perverse subreg (such as QI treated as an SI),
6023 treat it as volatile. It may do the work of an SI in one context
6024 where the extra bits are not being used, but cannot replace an SI
6025 in general. */
6026 if (GET_CODE (src) == SUBREG
6027 && (GET_MODE_SIZE (GET_MODE (src))
6028 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
6029 sets[i].src_volatile = 1;
6030 #endif
6031
6032 /* Locate all possible equivalent forms for SRC. Try to replace
6033 SRC in the insn with each cheaper equivalent.
6034
6035 We have the following types of equivalents: SRC itself, a folded
6036 version, a value given in a REG_EQUAL note, or a value related
6037 to a constant.
6038
6039 Each of these equivalents may be part of an additional class
6040 of equivalents (if more than one is in the table, they must be in
6041 the same class; we check for this).
6042
6043 If the source is volatile, we don't do any table lookups.
6044
6045 We note any constant equivalent for possible later use in a
6046 REG_NOTE. */
6047
6048 if (!sets[i].src_volatile)
6049 elt = lookup (src, sets[i].src_hash_code, mode);
6050
6051 sets[i].src_elt = elt;
6052
6053 if (elt && src_eqv_here && src_eqv_elt)
6054 {
6055 if (elt->first_same_value != src_eqv_elt->first_same_value)
6056 {
6057 /* The REG_EQUAL is indicating that two formerly distinct
6058 classes are now equivalent. So merge them. */
6059 merge_equiv_classes (elt, src_eqv_elt);
6060 src_eqv_hash_code = HASH (src_eqv, elt->mode);
6061 src_eqv_elt = lookup (src_eqv, src_eqv_hash_code, elt->mode);
6062 }
6063
6064 src_eqv_here = 0;
6065 }
6066
6067 else if (src_eqv_elt)
6068 elt = src_eqv_elt;
6069
6070 /* Try to find a constant somewhere and record it in `src_const'.
6071 Record its table element, if any, in `src_const_elt'. Look in
6072 any known equivalences first. (If the constant is not in the
6073 table, also set `sets[i].src_const_hash_code'). */
6074 if (elt)
6075 for (p = elt->first_same_value; p; p = p->next_same_value)
6076 if (p->is_const)
6077 {
6078 src_const = p->exp;
6079 src_const_elt = elt;
6080 break;
6081 }
6082
6083 if (src_const == 0
6084 && (CONSTANT_P (src_folded)
6085 /* Consider (minus (label_ref L1) (label_ref L2)) as
6086 "constant" here so we will record it. This allows us
6087 to fold switch statements when an ADDR_DIFF_VEC is used. */
6088 || (GET_CODE (src_folded) == MINUS
6089 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
6090 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
6091 src_const = src_folded, src_const_elt = elt;
6092 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
6093 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
6094
6095 /* If we don't know if the constant is in the table, get its
6096 hash code and look it up. */
6097 if (src_const && src_const_elt == 0)
6098 {
6099 sets[i].src_const_hash_code = HASH (src_const, mode);
6100 src_const_elt = lookup (src_const, sets[i].src_const_hash_code,
6101 mode);
6102 }
6103
6104 sets[i].src_const = src_const;
6105 sets[i].src_const_elt = src_const_elt;
6106
6107 /* If the constant and our source are both in the table, mark them as
6108 equivalent. Otherwise, if a constant is in the table but the source
6109 isn't, set ELT to it. */
6110 if (src_const_elt && elt
6111 && src_const_elt->first_same_value != elt->first_same_value)
6112 merge_equiv_classes (elt, src_const_elt);
6113 else if (src_const_elt && elt == 0)
6114 elt = src_const_elt;
6115
6116 /* See if there is a register linearly related to a constant
6117 equivalent of SRC. */
6118 if (src_const
6119 && (GET_CODE (src_const) == CONST
6120 || (src_const_elt && src_const_elt->related_value != 0)))
6121 {
6122 src_related = use_related_value (src_const, src_const_elt);
6123 if (src_related)
6124 {
6125 struct table_elt *src_related_elt
6126 = lookup (src_related, HASH (src_related, mode), mode);
6127 if (src_related_elt && elt)
6128 {
6129 if (elt->first_same_value
6130 != src_related_elt->first_same_value)
6131 /* This can occur when we previously saw a CONST
6132 involving a SYMBOL_REF and then see the SYMBOL_REF
6133 twice. Merge the involved classes. */
6134 merge_equiv_classes (elt, src_related_elt);
6135
6136 src_related = 0;
6137 src_related_elt = 0;
6138 }
6139 else if (src_related_elt && elt == 0)
6140 elt = src_related_elt;
6141 }
6142 }
6143
6144 /* See if we have a CONST_INT that is already in a register in a
6145 wider mode. */
6146
6147 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
6148 && GET_MODE_CLASS (mode) == MODE_INT
6149 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
6150 {
6151 enum machine_mode wider_mode;
6152
6153 for (wider_mode = GET_MODE_WIDER_MODE (mode);
6154 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
6155 && src_related == 0;
6156 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
6157 {
6158 struct table_elt *const_elt
6159 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
6160
6161 if (const_elt == 0)
6162 continue;
6163
6164 for (const_elt = const_elt->first_same_value;
6165 const_elt; const_elt = const_elt->next_same_value)
6166 if (GET_CODE (const_elt->exp) == REG)
6167 {
6168 src_related = gen_lowpart_if_possible (mode,
6169 const_elt->exp);
6170 break;
6171 }
6172 }
6173 }
6174
6175 /* Another possibility is that we have an AND with a constant in
6176 a mode narrower than a word. If so, it might have been generated
6177 as part of an "if" which would narrow the AND. If we already
6178 have done the AND in a wider mode, we can use a SUBREG of that
6179 value. */
6180
6181 if (flag_expensive_optimizations && ! src_related
6182 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
6183 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6184 {
6185 enum machine_mode tmode;
6186 rtx new_and = gen_rtx (AND, VOIDmode, NULL_RTX, XEXP (src, 1));
6187
6188 for (tmode = GET_MODE_WIDER_MODE (mode);
6189 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6190 tmode = GET_MODE_WIDER_MODE (tmode))
6191 {
6192 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
6193 struct table_elt *larger_elt;
6194
6195 if (inner)
6196 {
6197 PUT_MODE (new_and, tmode);
6198 XEXP (new_and, 0) = inner;
6199 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
6200 if (larger_elt == 0)
6201 continue;
6202
6203 for (larger_elt = larger_elt->first_same_value;
6204 larger_elt; larger_elt = larger_elt->next_same_value)
6205 if (GET_CODE (larger_elt->exp) == REG)
6206 {
6207 src_related
6208 = gen_lowpart_if_possible (mode, larger_elt->exp);
6209 break;
6210 }
6211
6212 if (src_related)
6213 break;
6214 }
6215 }
6216 }
6217
6218 if (src == src_folded)
6219 src_folded = 0;
6220
6221 /* At this point, ELT, if non-zero, points to a class of expressions
6222 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
6223 and SRC_RELATED, if non-zero, each contain additional equivalent
6224 expressions. Prune these latter expressions by deleting expressions
6225 already in the equivalence class.
6226
6227 Check for an equivalent identical to the destination. If found,
6228 this is the preferred equivalent since it will likely lead to
6229 elimination of the insn. Indicate this by placing it in
6230 `src_related'. */
6231
6232 if (elt) elt = elt->first_same_value;
6233 for (p = elt; p; p = p->next_same_value)
6234 {
6235 enum rtx_code code = GET_CODE (p->exp);
6236
6237 /* If the expression is not valid, ignore it. Then we do not
6238 have to check for validity below. In most cases, we can use
6239 `rtx_equal_p', since canonicalization has already been done. */
6240 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
6241 continue;
6242
6243 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
6244 src = 0;
6245 else if (src_folded && GET_CODE (src_folded) == code
6246 && rtx_equal_p (src_folded, p->exp))
6247 src_folded = 0;
6248 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
6249 && rtx_equal_p (src_eqv_here, p->exp))
6250 src_eqv_here = 0;
6251 else if (src_related && GET_CODE (src_related) == code
6252 && rtx_equal_p (src_related, p->exp))
6253 src_related = 0;
6254
6255 /* This is the same as the destination of the insns, we want
6256 to prefer it. Copy it to src_related. The code below will
6257 then give it a negative cost. */
6258 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
6259 src_related = dest;
6260
6261 }
6262
6263 /* Find the cheapest valid equivalent, trying all the available
6264 possibilities. Prefer items not in the hash table to ones
6265 that are when they are equal cost. Note that we can never
6266 worsen an insn as the current contents will also succeed.
6267 If we find an equivalent identical to the destination, use it as best,
6268 since this insn will probably be eliminated in that case. */
6269 if (src)
6270 {
6271 if (rtx_equal_p (src, dest))
6272 src_cost = -1;
6273 else
6274 src_cost = COST (src);
6275 }
6276
6277 if (src_eqv_here)
6278 {
6279 if (rtx_equal_p (src_eqv_here, dest))
6280 src_eqv_cost = -1;
6281 else
6282 src_eqv_cost = COST (src_eqv_here);
6283 }
6284
6285 if (src_folded)
6286 {
6287 if (rtx_equal_p (src_folded, dest))
6288 src_folded_cost = -1;
6289 else
6290 src_folded_cost = COST (src_folded);
6291 }
6292
6293 if (src_related)
6294 {
6295 if (rtx_equal_p (src_related, dest))
6296 src_related_cost = -1;
6297 else
6298 src_related_cost = COST (src_related);
6299 }
6300
6301 /* If this was an indirect jump insn, a known label will really be
6302 cheaper even though it looks more expensive. */
6303 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
6304 src_folded = src_const, src_folded_cost = -1;
6305
6306 /* Terminate loop when replacement made. This must terminate since
6307 the current contents will be tested and will always be valid. */
6308 while (1)
6309 {
6310 rtx trial;
6311
6312 /* Skip invalid entries. */
6313 while (elt && GET_CODE (elt->exp) != REG
6314 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6315 elt = elt->next_same_value;
6316
6317 if (elt) src_elt_cost = elt->cost;
6318
6319 /* Find cheapest and skip it for the next time. For items
6320 of equal cost, use this order:
6321 src_folded, src, src_eqv, src_related and hash table entry. */
6322 if (src_folded_cost <= src_cost
6323 && src_folded_cost <= src_eqv_cost
6324 && src_folded_cost <= src_related_cost
6325 && src_folded_cost <= src_elt_cost)
6326 {
6327 trial = src_folded, src_folded_cost = 10000;
6328 if (src_folded_force_flag)
6329 trial = force_const_mem (mode, trial);
6330 }
6331 else if (src_cost <= src_eqv_cost
6332 && src_cost <= src_related_cost
6333 && src_cost <= src_elt_cost)
6334 trial = src, src_cost = 10000;
6335 else if (src_eqv_cost <= src_related_cost
6336 && src_eqv_cost <= src_elt_cost)
6337 trial = src_eqv_here, src_eqv_cost = 10000;
6338 else if (src_related_cost <= src_elt_cost)
6339 trial = src_related, src_related_cost = 10000;
6340 else
6341 {
6342 trial = copy_rtx (elt->exp);
6343 elt = elt->next_same_value;
6344 src_elt_cost = 10000;
6345 }
6346
6347 /* We don't normally have an insn matching (set (pc) (pc)), so
6348 check for this separately here. We will delete such an
6349 insn below.
6350
6351 Tablejump insns contain a USE of the table, so simply replacing
6352 the operand with the constant won't match. This is simply an
6353 unconditional branch, however, and is therefore valid. Just
6354 insert the substitution here and we will delete and re-emit
6355 the insn later. */
6356
6357 if (n_sets == 1 && dest == pc_rtx
6358 && (trial == pc_rtx
6359 || (GET_CODE (trial) == LABEL_REF
6360 && ! condjump_p (insn))))
6361 {
6362 /* If TRIAL is a label in front of a jump table, we are
6363 really falling through the switch (this is how casesi
6364 insns work), so we must branch around the table. */
6365 if (GET_CODE (trial) == CODE_LABEL
6366 && NEXT_INSN (trial) != 0
6367 && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
6368 && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
6369 || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
6370
6371 trial = gen_rtx (LABEL_REF, Pmode, get_label_after (trial));
6372
6373 SET_SRC (sets[i].rtl) = trial;
6374 break;
6375 }
6376
6377 /* Look for a substitution that makes a valid insn. */
6378 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
6379 {
6380 /* The result of apply_change_group can be ignored; see
6381 canon_reg. */
6382
6383 validate_change (insn, &SET_SRC (sets[i].rtl),
6384 canon_reg (SET_SRC (sets[i].rtl), insn),
6385 1);
6386 apply_change_group ();
6387 break;
6388 }
6389
6390 /* If we previously found constant pool entries for
6391 constants and this is a constant, try making a
6392 pool entry. Put it in src_folded unless we already have done
6393 this since that is where it likely came from. */
6394
6395 else if (constant_pool_entries_cost
6396 && CONSTANT_P (trial)
6397 && (src_folded == 0 || GET_CODE (src_folded) != MEM)
6398 && GET_MODE_CLASS (mode) != MODE_CC)
6399 {
6400 src_folded_force_flag = 1;
6401 src_folded = trial;
6402 src_folded_cost = constant_pool_entries_cost;
6403 }
6404 }
6405
6406 src = SET_SRC (sets[i].rtl);
6407
6408 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
6409 However, there is an important exception: If both are registers
6410 that are not the head of their equivalence class, replace SET_SRC
6411 with the head of the class. If we do not do this, we will have
6412 both registers live over a portion of the basic block. This way,
6413 their lifetimes will likely abut instead of overlapping. */
6414 if (GET_CODE (dest) == REG
6415 && REGNO_QTY_VALID_P (REGNO (dest))
6416 && qty_mode[reg_qty[REGNO (dest)]] == GET_MODE (dest)
6417 && qty_first_reg[reg_qty[REGNO (dest)]] != REGNO (dest)
6418 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
6419 /* Don't do this if the original insn had a hard reg as
6420 SET_SRC. */
6421 && (GET_CODE (sets[i].src) != REG
6422 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER))
6423 /* We can't call canon_reg here because it won't do anything if
6424 SRC is a hard register. */
6425 {
6426 int first = qty_first_reg[reg_qty[REGNO (src)]];
6427
6428 src = SET_SRC (sets[i].rtl)
6429 = first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
6430 : gen_rtx (REG, GET_MODE (src), first);
6431
6432 /* If we had a constant that is cheaper than what we are now
6433 setting SRC to, use that constant. We ignored it when we
6434 thought we could make this into a no-op. */
6435 if (src_const && COST (src_const) < COST (src)
6436 && validate_change (insn, &SET_SRC (sets[i].rtl), src_const, 0))
6437 src = src_const;
6438 }
6439
6440 /* If we made a change, recompute SRC values. */
6441 if (src != sets[i].src)
6442 {
6443 do_not_record = 0;
6444 hash_arg_in_memory = 0;
6445 hash_arg_in_struct = 0;
6446 sets[i].src = src;
6447 sets[i].src_hash_code = HASH (src, mode);
6448 sets[i].src_volatile = do_not_record;
6449 sets[i].src_in_memory = hash_arg_in_memory;
6450 sets[i].src_in_struct = hash_arg_in_struct;
6451 sets[i].src_elt = lookup (src, sets[i].src_hash_code, mode);
6452 }
6453
6454 /* If this is a single SET, we are setting a register, and we have an
6455 equivalent constant, we want to add a REG_NOTE. We don't want
6456 to write a REG_EQUAL note for a constant pseudo since verifying that
6457 that pseudo hasn't been eliminated is a pain. Such a note also
6458 won't help anything. */
6459 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
6460 && GET_CODE (src_const) != REG)
6461 {
6462 rtx tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6463
6464 /* Record the actual constant value in a REG_EQUAL note, making
6465 a new one if one does not already exist. */
6466 if (tem)
6467 XEXP (tem, 0) = src_const;
6468 else
6469 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUAL,
6470 src_const, REG_NOTES (insn));
6471
6472 /* If storing a constant value in a register that
6473 previously held the constant value 0,
6474 record this fact with a REG_WAS_0 note on this insn.
6475
6476 Note that the *register* is required to have previously held 0,
6477 not just any register in the quantity and we must point to the
6478 insn that set that register to zero.
6479
6480 Rather than track each register individually, we just see if
6481 the last set for this quantity was for this register. */
6482
6483 if (REGNO_QTY_VALID_P (REGNO (dest))
6484 && qty_const[reg_qty[REGNO (dest)]] == const0_rtx)
6485 {
6486 /* See if we previously had a REG_WAS_0 note. */
6487 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6488 rtx const_insn = qty_const_insn[reg_qty[REGNO (dest)]];
6489
6490 if ((tem = single_set (const_insn)) != 0
6491 && rtx_equal_p (SET_DEST (tem), dest))
6492 {
6493 if (note)
6494 XEXP (note, 0) = const_insn;
6495 else
6496 REG_NOTES (insn) = gen_rtx (INSN_LIST, REG_WAS_0,
6497 const_insn, REG_NOTES (insn));
6498 }
6499 }
6500 }
6501
6502 /* Now deal with the destination. */
6503 do_not_record = 0;
6504 sets[i].inner_dest_loc = &SET_DEST (sets[0].rtl);
6505
6506 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
6507 to the MEM or REG within it. */
6508 while (GET_CODE (dest) == SIGN_EXTRACT
6509 || GET_CODE (dest) == ZERO_EXTRACT
6510 || GET_CODE (dest) == SUBREG
6511 || GET_CODE (dest) == STRICT_LOW_PART)
6512 {
6513 sets[i].inner_dest_loc = &XEXP (dest, 0);
6514 dest = XEXP (dest, 0);
6515 }
6516
6517 sets[i].inner_dest = dest;
6518
6519 if (GET_CODE (dest) == MEM)
6520 {
6521 dest = fold_rtx (dest, insn);
6522
6523 /* Decide whether we invalidate everything in memory,
6524 or just things at non-fixed places.
6525 Writing a large aggregate must invalidate everything
6526 because we don't know how long it is. */
6527 note_mem_written (dest, &writes_memory);
6528 }
6529
6530 /* Compute the hash code of the destination now,
6531 before the effects of this instruction are recorded,
6532 since the register values used in the address computation
6533 are those before this instruction. */
6534 sets[i].dest_hash_code = HASH (dest, mode);
6535
6536 /* Don't enter a bit-field in the hash table
6537 because the value in it after the store
6538 may not equal what was stored, due to truncation. */
6539
6540 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6541 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6542 {
6543 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6544
6545 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
6546 && GET_CODE (width) == CONST_INT
6547 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6548 && ! (INTVAL (src_const)
6549 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6550 /* Exception: if the value is constant,
6551 and it won't be truncated, record it. */
6552 ;
6553 else
6554 {
6555 /* This is chosen so that the destination will be invalidated
6556 but no new value will be recorded.
6557 We must invalidate because sometimes constant
6558 values can be recorded for bitfields. */
6559 sets[i].src_elt = 0;
6560 sets[i].src_volatile = 1;
6561 src_eqv = 0;
6562 src_eqv_elt = 0;
6563 }
6564 }
6565
6566 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
6567 the insn. */
6568 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
6569 {
6570 PUT_CODE (insn, NOTE);
6571 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
6572 NOTE_SOURCE_FILE (insn) = 0;
6573 cse_jumps_altered = 1;
6574 /* One less use of the label this insn used to jump to. */
6575 --LABEL_NUSES (JUMP_LABEL (insn));
6576 /* No more processing for this set. */
6577 sets[i].rtl = 0;
6578 }
6579
6580 /* If this SET is now setting PC to a label, we know it used to
6581 be a conditional or computed branch. So we see if we can follow
6582 it. If it was a computed branch, delete it and re-emit. */
6583 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
6584 {
6585 rtx p;
6586
6587 /* If this is not in the format for a simple branch and
6588 we are the only SET in it, re-emit it. */
6589 if (! simplejump_p (insn) && n_sets == 1)
6590 {
6591 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
6592 JUMP_LABEL (new) = XEXP (src, 0);
6593 LABEL_NUSES (XEXP (src, 0))++;
6594 delete_insn (insn);
6595 insn = new;
6596 }
6597
6598 /* Now that we've converted this jump to an unconditional jump,
6599 there is dead code after it. Delete the dead code until we
6600 reach a BARRIER, the end of the function, or a label. Do
6601 not delete NOTEs except for NOTE_INSN_DELETED since later
6602 phases assume these notes are retained. */
6603
6604 p = insn;
6605
6606 while (NEXT_INSN (p) != 0
6607 && GET_CODE (NEXT_INSN (p)) != BARRIER
6608 && GET_CODE (NEXT_INSN (p)) != CODE_LABEL)
6609 {
6610 if (GET_CODE (NEXT_INSN (p)) != NOTE
6611 || NOTE_LINE_NUMBER (NEXT_INSN (p)) == NOTE_INSN_DELETED)
6612 delete_insn (NEXT_INSN (p));
6613 else
6614 p = NEXT_INSN (p);
6615 }
6616
6617 /* If we don't have a BARRIER immediately after INSN, put one there.
6618 Much code assumes that there are no NOTEs between a JUMP_INSN and
6619 BARRIER. */
6620
6621 if (NEXT_INSN (insn) == 0
6622 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
6623 emit_barrier_after (insn);
6624
6625 /* We might have two BARRIERs separated by notes. Delete the second
6626 one if so. */
6627
6628 if (p != insn && NEXT_INSN (p) != 0
6629 && GET_CODE (NEXT_INSN (p)) == BARRIER)
6630 delete_insn (NEXT_INSN (p));
6631
6632 cse_jumps_altered = 1;
6633 sets[i].rtl = 0;
6634 }
6635
6636 /* If destination is volatile, invalidate it and then do no further
6637 processing for this assignment. */
6638
6639 else if (do_not_record)
6640 {
6641 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
6642 || GET_CODE (dest) == MEM)
6643 invalidate (dest);
6644 sets[i].rtl = 0;
6645 }
6646
6647 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
6648 sets[i].dest_hash_code = HASH (SET_DEST (sets[i].rtl), mode);
6649
6650 #ifdef HAVE_cc0
6651 /* If setting CC0, record what it was set to, or a constant, if it
6652 is equivalent to a constant. If it is being set to a floating-point
6653 value, make a COMPARE with the appropriate constant of 0. If we
6654 don't do this, later code can interpret this as a test against
6655 const0_rtx, which can cause problems if we try to put it into an
6656 insn as a floating-point operand. */
6657 if (dest == cc0_rtx)
6658 {
6659 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
6660 this_insn_cc0_mode = mode;
6661 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
6662 this_insn_cc0 = gen_rtx (COMPARE, VOIDmode, this_insn_cc0,
6663 CONST0_RTX (mode));
6664 }
6665 #endif
6666 }
6667
6668 /* Now enter all non-volatile source expressions in the hash table
6669 if they are not already present.
6670 Record their equivalence classes in src_elt.
6671 This way we can insert the corresponding destinations into
6672 the same classes even if the actual sources are no longer in them
6673 (having been invalidated). */
6674
6675 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
6676 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
6677 {
6678 register struct table_elt *elt;
6679 register struct table_elt *classp = sets[0].src_elt;
6680 rtx dest = SET_DEST (sets[0].rtl);
6681 enum machine_mode eqvmode = GET_MODE (dest);
6682
6683 if (GET_CODE (dest) == STRICT_LOW_PART)
6684 {
6685 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6686 classp = 0;
6687 }
6688 if (insert_regs (src_eqv, classp, 0))
6689 src_eqv_hash_code = HASH (src_eqv, eqvmode);
6690 elt = insert (src_eqv, classp, src_eqv_hash_code, eqvmode);
6691 elt->in_memory = src_eqv_in_memory;
6692 elt->in_struct = src_eqv_in_struct;
6693 src_eqv_elt = elt;
6694 }
6695
6696 for (i = 0; i < n_sets; i++)
6697 if (sets[i].rtl && ! sets[i].src_volatile
6698 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
6699 {
6700 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
6701 {
6702 /* REG_EQUAL in setting a STRICT_LOW_PART
6703 gives an equivalent for the entire destination register,
6704 not just for the subreg being stored in now.
6705 This is a more interesting equivalence, so we arrange later
6706 to treat the entire reg as the destination. */
6707 sets[i].src_elt = src_eqv_elt;
6708 sets[i].src_hash_code = src_eqv_hash_code;
6709 }
6710 else
6711 {
6712 /* Insert source and constant equivalent into hash table, if not
6713 already present. */
6714 register struct table_elt *classp = src_eqv_elt;
6715 register rtx src = sets[i].src;
6716 register rtx dest = SET_DEST (sets[i].rtl);
6717 enum machine_mode mode
6718 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6719
6720 if (sets[i].src_elt == 0)
6721 {
6722 register struct table_elt *elt;
6723
6724 /* Note that these insert_regs calls cannot remove
6725 any of the src_elt's, because they would have failed to
6726 match if not still valid. */
6727 if (insert_regs (src, classp, 0))
6728 sets[i].src_hash_code = HASH (src, mode);
6729 elt = insert (src, classp, sets[i].src_hash_code, mode);
6730 elt->in_memory = sets[i].src_in_memory;
6731 elt->in_struct = sets[i].src_in_struct;
6732 sets[i].src_elt = classp = elt;
6733 }
6734
6735 if (sets[i].src_const && sets[i].src_const_elt == 0
6736 && src != sets[i].src_const
6737 && ! rtx_equal_p (sets[i].src_const, src))
6738 sets[i].src_elt = insert (sets[i].src_const, classp,
6739 sets[i].src_const_hash_code, mode);
6740 }
6741 }
6742 else if (sets[i].src_elt == 0)
6743 /* If we did not insert the source into the hash table (e.g., it was
6744 volatile), note the equivalence class for the REG_EQUAL value, if any,
6745 so that the destination goes into that class. */
6746 sets[i].src_elt = src_eqv_elt;
6747
6748 invalidate_from_clobbers (&writes_memory, x);
6749
6750 /* Some registers are invalidated by subroutine calls. Memory is
6751 invalidated by non-constant calls. */
6752
6753 if (GET_CODE (insn) == CALL_INSN)
6754 {
6755 static struct write_data everything = {0, 1, 1, 1};
6756
6757 if (! CONST_CALL_P (insn))
6758 invalidate_memory (&everything);
6759 invalidate_for_call ();
6760 }
6761
6762 /* Now invalidate everything set by this instruction.
6763 If a SUBREG or other funny destination is being set,
6764 sets[i].rtl is still nonzero, so here we invalidate the reg
6765 a part of which is being set. */
6766
6767 for (i = 0; i < n_sets; i++)
6768 if (sets[i].rtl)
6769 {
6770 register rtx dest = sets[i].inner_dest;
6771
6772 /* Needed for registers to remove the register from its
6773 previous quantity's chain.
6774 Needed for memory if this is a nonvarying address, unless
6775 we have just done an invalidate_memory that covers even those. */
6776 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
6777 || (! writes_memory.all && ! cse_rtx_addr_varies_p (dest)))
6778 invalidate (dest);
6779 }
6780
6781 /* Make sure registers mentioned in destinations
6782 are safe for use in an expression to be inserted.
6783 This removes from the hash table
6784 any invalid entry that refers to one of these registers.
6785
6786 We don't care about the return value from mention_regs because
6787 we are going to hash the SET_DEST values unconditionally. */
6788
6789 for (i = 0; i < n_sets; i++)
6790 if (sets[i].rtl && GET_CODE (SET_DEST (sets[i].rtl)) != REG)
6791 mention_regs (SET_DEST (sets[i].rtl));
6792
6793 /* We may have just removed some of the src_elt's from the hash table.
6794 So replace each one with the current head of the same class. */
6795
6796 for (i = 0; i < n_sets; i++)
6797 if (sets[i].rtl)
6798 {
6799 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6800 /* If elt was removed, find current head of same class,
6801 or 0 if nothing remains of that class. */
6802 {
6803 register struct table_elt *elt = sets[i].src_elt;
6804
6805 while (elt && elt->prev_same_value)
6806 elt = elt->prev_same_value;
6807
6808 while (elt && elt->first_same_value == 0)
6809 elt = elt->next_same_value;
6810 sets[i].src_elt = elt ? elt->first_same_value : 0;
6811 }
6812 }
6813
6814 /* Now insert the destinations into their equivalence classes. */
6815
6816 for (i = 0; i < n_sets; i++)
6817 if (sets[i].rtl)
6818 {
6819 register rtx dest = SET_DEST (sets[i].rtl);
6820 register struct table_elt *elt;
6821
6822 /* Don't record value if we are not supposed to risk allocating
6823 floating-point values in registers that might be wider than
6824 memory. */
6825 if ((flag_float_store
6826 && GET_CODE (dest) == MEM
6827 && GET_MODE_CLASS (GET_MODE (dest)) == MODE_FLOAT)
6828 /* Don't record values of destinations set inside a libcall block
6829 since we might delete the libcall. Things should have been set
6830 up so we won't want to reuse such a value, but we play it safe
6831 here. */
6832 || in_libcall_block
6833 /* If we didn't put a REG_EQUAL value or a source into the hash
6834 table, there is no point is recording DEST. */
6835 || sets[i].src_elt == 0)
6836 continue;
6837
6838 /* STRICT_LOW_PART isn't part of the value BEING set,
6839 and neither is the SUBREG inside it.
6840 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6841 if (GET_CODE (dest) == STRICT_LOW_PART)
6842 dest = SUBREG_REG (XEXP (dest, 0));
6843
6844 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6845 /* Registers must also be inserted into chains for quantities. */
6846 if (insert_regs (dest, sets[i].src_elt, 1))
6847 /* If `insert_regs' changes something, the hash code must be
6848 recalculated. */
6849 sets[i].dest_hash_code = HASH (dest, GET_MODE (dest));
6850
6851 elt = insert (dest, sets[i].src_elt,
6852 sets[i].dest_hash_code, GET_MODE (dest));
6853 elt->in_memory = GET_CODE (sets[i].inner_dest) == MEM;
6854 if (elt->in_memory)
6855 {
6856 /* This implicitly assumes a whole struct
6857 need not have MEM_IN_STRUCT_P.
6858 But a whole struct is *supposed* to have MEM_IN_STRUCT_P. */
6859 elt->in_struct = (MEM_IN_STRUCT_P (sets[i].inner_dest)
6860 || sets[i].inner_dest != SET_DEST (sets[i].rtl));
6861 }
6862
6863 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6864 narrower than M2, and both M1 and M2 are the same number of words,
6865 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6866 make that equivalence as well.
6867
6868 However, BAR may have equivalences for which gen_lowpart_if_possible
6869 will produce a simpler value than gen_lowpart_if_possible applied to
6870 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6871 BAR's equivalences. If we don't get a simplified form, make
6872 the SUBREG. It will not be used in an equivalence, but will
6873 cause two similar assignments to be detected.
6874
6875 Note the loop below will find SUBREG_REG (DEST) since we have
6876 already entered SRC and DEST of the SET in the table. */
6877
6878 if (GET_CODE (dest) == SUBREG
6879 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) / UNITS_PER_WORD
6880 == GET_MODE_SIZE (GET_MODE (dest)) / UNITS_PER_WORD)
6881 && (GET_MODE_SIZE (GET_MODE (dest))
6882 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6883 && sets[i].src_elt != 0)
6884 {
6885 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6886 struct table_elt *elt, *classp = 0;
6887
6888 for (elt = sets[i].src_elt->first_same_value; elt;
6889 elt = elt->next_same_value)
6890 {
6891 rtx new_src = 0;
6892 int src_hash;
6893 struct table_elt *src_elt;
6894
6895 /* Ignore invalid entries. */
6896 if (GET_CODE (elt->exp) != REG
6897 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6898 continue;
6899
6900 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
6901 if (new_src == 0)
6902 new_src = gen_rtx (SUBREG, new_mode, elt->exp, 0);
6903
6904 src_hash = HASH (new_src, new_mode);
6905 src_elt = lookup (new_src, src_hash, new_mode);
6906
6907 /* Put the new source in the hash table is if isn't
6908 already. */
6909 if (src_elt == 0)
6910 {
6911 if (insert_regs (new_src, classp, 0))
6912 src_hash = HASH (new_src, new_mode);
6913 src_elt = insert (new_src, classp, src_hash, new_mode);
6914 src_elt->in_memory = elt->in_memory;
6915 src_elt->in_struct = elt->in_struct;
6916 }
6917 else if (classp && classp != src_elt->first_same_value)
6918 /* Show that two things that we've seen before are
6919 actually the same. */
6920 merge_equiv_classes (src_elt, classp);
6921
6922 classp = src_elt->first_same_value;
6923 }
6924 }
6925 }
6926
6927 /* Special handling for (set REG0 REG1)
6928 where REG0 is the "cheapest", cheaper than REG1.
6929 After cse, REG1 will probably not be used in the sequel,
6930 so (if easily done) change this insn to (set REG1 REG0) and
6931 replace REG1 with REG0 in the previous insn that computed their value.
6932 Then REG1 will become a dead store and won't cloud the situation
6933 for later optimizations.
6934
6935 Do not make this change if REG1 is a hard register, because it will
6936 then be used in the sequel and we may be changing a two-operand insn
6937 into a three-operand insn.
6938
6939 Also do not do this if we are operating on a copy of INSN. */
6940
6941 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
6942 && NEXT_INSN (PREV_INSN (insn)) == insn
6943 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
6944 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6945 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl)))
6946 && (qty_first_reg[reg_qty[REGNO (SET_SRC (sets[0].rtl))]]
6947 == REGNO (SET_DEST (sets[0].rtl))))
6948 {
6949 rtx prev = PREV_INSN (insn);
6950 while (prev && GET_CODE (prev) == NOTE)
6951 prev = PREV_INSN (prev);
6952
6953 if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
6954 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
6955 {
6956 rtx dest = SET_DEST (sets[0].rtl);
6957 rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
6958
6959 validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
6960 validate_change (insn, & SET_DEST (sets[0].rtl),
6961 SET_SRC (sets[0].rtl), 1);
6962 validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
6963 apply_change_group ();
6964
6965 /* If REG1 was equivalent to a constant, REG0 is not. */
6966 if (note)
6967 PUT_REG_NOTE_KIND (note, REG_EQUAL);
6968
6969 /* If there was a REG_WAS_0 note on PREV, remove it. Move
6970 any REG_WAS_0 note on INSN to PREV. */
6971 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
6972 if (note)
6973 remove_note (prev, note);
6974
6975 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6976 if (note)
6977 {
6978 remove_note (insn, note);
6979 XEXP (note, 1) = REG_NOTES (prev);
6980 REG_NOTES (prev) = note;
6981 }
6982 }
6983 }
6984
6985 /* If this is a conditional jump insn, record any known equivalences due to
6986 the condition being tested. */
6987
6988 last_jump_equiv_class = 0;
6989 if (GET_CODE (insn) == JUMP_INSN
6990 && n_sets == 1 && GET_CODE (x) == SET
6991 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6992 record_jump_equiv (insn, 0);
6993
6994 #ifdef HAVE_cc0
6995 /* If the previous insn set CC0 and this insn no longer references CC0,
6996 delete the previous insn. Here we use the fact that nothing expects CC0
6997 to be valid over an insn, which is true until the final pass. */
6998 if (prev_insn && GET_CODE (prev_insn) == INSN
6999 && (tem = single_set (prev_insn)) != 0
7000 && SET_DEST (tem) == cc0_rtx
7001 && ! reg_mentioned_p (cc0_rtx, x))
7002 {
7003 PUT_CODE (prev_insn, NOTE);
7004 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
7005 NOTE_SOURCE_FILE (prev_insn) = 0;
7006 }
7007
7008 prev_insn_cc0 = this_insn_cc0;
7009 prev_insn_cc0_mode = this_insn_cc0_mode;
7010 #endif
7011
7012 prev_insn = insn;
7013 }
7014 \f
7015 /* Store 1 in *WRITES_PTR for those categories of memory ref
7016 that must be invalidated when the expression WRITTEN is stored in.
7017 If WRITTEN is null, say everything must be invalidated. */
7018
7019 static void
7020 note_mem_written (written, writes_ptr)
7021 rtx written;
7022 struct write_data *writes_ptr;
7023 {
7024 static struct write_data everything = {0, 1, 1, 1};
7025
7026 if (written == 0)
7027 *writes_ptr = everything;
7028 else if (GET_CODE (written) == MEM)
7029 {
7030 /* Pushing or popping the stack invalidates just the stack pointer. */
7031 rtx addr = XEXP (written, 0);
7032 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7033 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7034 && GET_CODE (XEXP (addr, 0)) == REG
7035 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
7036 {
7037 writes_ptr->sp = 1;
7038 return;
7039 }
7040 else if (GET_MODE (written) == BLKmode)
7041 *writes_ptr = everything;
7042 else if (cse_rtx_addr_varies_p (written))
7043 {
7044 /* A varying address that is a sum indicates an array element,
7045 and that's just as good as a structure element
7046 in implying that we need not invalidate scalar variables.
7047 However, we must allow QImode aliasing of scalars, because the
7048 ANSI C standard allows character pointers to alias anything. */
7049 if (! ((MEM_IN_STRUCT_P (written)
7050 || GET_CODE (XEXP (written, 0)) == PLUS)
7051 && GET_MODE (written) != QImode))
7052 writes_ptr->all = 1;
7053 writes_ptr->nonscalar = 1;
7054 }
7055 writes_ptr->var = 1;
7056 }
7057 }
7058
7059 /* Perform invalidation on the basis of everything about an insn
7060 except for invalidating the actual places that are SET in it.
7061 This includes the places CLOBBERed, and anything that might
7062 alias with something that is SET or CLOBBERed.
7063
7064 W points to the writes_memory for this insn, a struct write_data
7065 saying which kinds of memory references must be invalidated.
7066 X is the pattern of the insn. */
7067
7068 static void
7069 invalidate_from_clobbers (w, x)
7070 struct write_data *w;
7071 rtx x;
7072 {
7073 /* If W->var is not set, W specifies no action.
7074 If W->all is set, this step gets all memory refs
7075 so they can be ignored in the rest of this function. */
7076 if (w->var)
7077 invalidate_memory (w);
7078
7079 if (w->sp)
7080 {
7081 if (reg_tick[STACK_POINTER_REGNUM] >= 0)
7082 reg_tick[STACK_POINTER_REGNUM]++;
7083
7084 /* This should be *very* rare. */
7085 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
7086 invalidate (stack_pointer_rtx);
7087 }
7088
7089 if (GET_CODE (x) == CLOBBER)
7090 {
7091 rtx ref = XEXP (x, 0);
7092 if (ref
7093 && (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7094 || (GET_CODE (ref) == MEM && ! w->all)))
7095 invalidate (ref);
7096 }
7097 else if (GET_CODE (x) == PARALLEL)
7098 {
7099 register int i;
7100 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
7101 {
7102 register rtx y = XVECEXP (x, 0, i);
7103 if (GET_CODE (y) == CLOBBER)
7104 {
7105 rtx ref = XEXP (y, 0);
7106 if (ref
7107 &&(GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7108 || (GET_CODE (ref) == MEM && !w->all)))
7109 invalidate (ref);
7110 }
7111 }
7112 }
7113 }
7114 \f
7115 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
7116 and replace any registers in them with either an equivalent constant
7117 or the canonical form of the register. If we are inside an address,
7118 only do this if the address remains valid.
7119
7120 OBJECT is 0 except when within a MEM in which case it is the MEM.
7121
7122 Return the replacement for X. */
7123
7124 static rtx
7125 cse_process_notes (x, object)
7126 rtx x;
7127 rtx object;
7128 {
7129 enum rtx_code code = GET_CODE (x);
7130 char *fmt = GET_RTX_FORMAT (code);
7131 int qty;
7132 int i;
7133
7134 switch (code)
7135 {
7136 case CONST_INT:
7137 case CONST:
7138 case SYMBOL_REF:
7139 case LABEL_REF:
7140 case CONST_DOUBLE:
7141 case PC:
7142 case CC0:
7143 case LO_SUM:
7144 return x;
7145
7146 case MEM:
7147 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
7148 return x;
7149
7150 case EXPR_LIST:
7151 case INSN_LIST:
7152 if (REG_NOTE_KIND (x) == REG_EQUAL)
7153 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
7154 if (XEXP (x, 1))
7155 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
7156 return x;
7157
7158 case SIGN_EXTEND:
7159 case ZERO_EXTEND:
7160 {
7161 rtx new = cse_process_notes (XEXP (x, 0), object);
7162 /* We don't substitute VOIDmode constants into these rtx,
7163 since they would impede folding. */
7164 if (GET_MODE (new) != VOIDmode)
7165 validate_change (object, &XEXP (x, 0), new, 0);
7166 return x;
7167 }
7168
7169 case REG:
7170 i = reg_qty[REGNO (x)];
7171
7172 /* Return a constant or a constant register. */
7173 if (REGNO_QTY_VALID_P (REGNO (x))
7174 && qty_const[i] != 0
7175 && (CONSTANT_P (qty_const[i])
7176 || GET_CODE (qty_const[i]) == REG))
7177 {
7178 rtx new = gen_lowpart_if_possible (GET_MODE (x), qty_const[i]);
7179 if (new)
7180 return new;
7181 }
7182
7183 /* Otherwise, canonicalize this register. */
7184 return canon_reg (x, NULL_RTX);
7185 }
7186
7187 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7188 if (fmt[i] == 'e')
7189 validate_change (object, &XEXP (x, i),
7190 cse_process_notes (XEXP (x, i), object), 0);
7191
7192 return x;
7193 }
7194 \f
7195 /* Find common subexpressions between the end test of a loop and the beginning
7196 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
7197
7198 Often we have a loop where an expression in the exit test is used
7199 in the body of the loop. For example "while (*p) *q++ = *p++;".
7200 Because of the way we duplicate the loop exit test in front of the loop,
7201 however, we don't detect that common subexpression. This will be caught
7202 when global cse is implemented, but this is a quite common case.
7203
7204 This function handles the most common cases of these common expressions.
7205 It is called after we have processed the basic block ending with the
7206 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
7207 jumps to a label used only once. */
7208
7209 static void
7210 cse_around_loop (loop_start)
7211 rtx loop_start;
7212 {
7213 rtx insn;
7214 int i;
7215 struct table_elt *p;
7216
7217 /* If the jump at the end of the loop doesn't go to the start, we don't
7218 do anything. */
7219 for (insn = PREV_INSN (loop_start);
7220 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
7221 insn = PREV_INSN (insn))
7222 ;
7223
7224 if (insn == 0
7225 || GET_CODE (insn) != NOTE
7226 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
7227 return;
7228
7229 /* If the last insn of the loop (the end test) was an NE comparison,
7230 we will interpret it as an EQ comparison, since we fell through
7231 the loop. Any equivalences resulting from that comparison are
7232 therefore not valid and must be invalidated. */
7233 if (last_jump_equiv_class)
7234 for (p = last_jump_equiv_class->first_same_value; p;
7235 p = p->next_same_value)
7236 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
7237 || GET_CODE (p->exp) == SUBREG)
7238 invalidate (p->exp);
7239
7240 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
7241 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
7242
7243 The only thing we do with SET_DEST is invalidate entries, so we
7244 can safely process each SET in order. It is slightly less efficient
7245 to do so, but we only want to handle the most common cases. */
7246
7247 for (insn = NEXT_INSN (loop_start);
7248 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
7249 && ! (GET_CODE (insn) == NOTE
7250 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
7251 insn = NEXT_INSN (insn))
7252 {
7253 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
7254 && (GET_CODE (PATTERN (insn)) == SET
7255 || GET_CODE (PATTERN (insn)) == CLOBBER))
7256 cse_set_around_loop (PATTERN (insn), insn, loop_start);
7257 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
7258 && GET_CODE (PATTERN (insn)) == PARALLEL)
7259 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7260 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
7261 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
7262 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
7263 loop_start);
7264 }
7265 }
7266 \f
7267 /* Variable used for communications between the next two routines. */
7268
7269 static struct write_data skipped_writes_memory;
7270
7271 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
7272 since they are done elsewhere. This function is called via note_stores. */
7273
7274 static void
7275 invalidate_skipped_set (dest, set)
7276 rtx set;
7277 rtx dest;
7278 {
7279 if (GET_CODE (set) == CLOBBER
7280 #ifdef HAVE_cc0
7281 || dest == cc0_rtx
7282 #endif
7283 || dest == pc_rtx)
7284 return;
7285
7286 if (GET_CODE (dest) == MEM)
7287 note_mem_written (dest, &skipped_writes_memory);
7288
7289 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7290 || (! skipped_writes_memory.all && ! cse_rtx_addr_varies_p (dest)))
7291 invalidate (dest);
7292 }
7293
7294 /* Invalidate all insns from START up to the end of the function or the
7295 next label. This called when we wish to CSE around a block that is
7296 conditionally executed. */
7297
7298 static void
7299 invalidate_skipped_block (start)
7300 rtx start;
7301 {
7302 rtx insn;
7303 int i;
7304 static struct write_data init = {0, 0, 0, 0};
7305 static struct write_data everything = {0, 1, 1, 1};
7306
7307 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
7308 insn = NEXT_INSN (insn))
7309 {
7310 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7311 continue;
7312
7313 skipped_writes_memory = init;
7314
7315 if (GET_CODE (insn) == CALL_INSN)
7316 {
7317 invalidate_for_call ();
7318 skipped_writes_memory = everything;
7319 }
7320
7321 note_stores (PATTERN (insn), invalidate_skipped_set);
7322 invalidate_from_clobbers (&skipped_writes_memory, PATTERN (insn));
7323 }
7324 }
7325 \f
7326 /* Used for communication between the following two routines; contains a
7327 value to be checked for modification. */
7328
7329 static rtx cse_check_loop_start_value;
7330
7331 /* If modifying X will modify the value in CSE_CHECK_LOOP_START_VALUE,
7332 indicate that fact by setting CSE_CHECK_LOOP_START_VALUE to 0. */
7333
7334 static void
7335 cse_check_loop_start (x, set)
7336 rtx x;
7337 rtx set;
7338 {
7339 if (cse_check_loop_start_value == 0
7340 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
7341 return;
7342
7343 if ((GET_CODE (x) == MEM && GET_CODE (cse_check_loop_start_value) == MEM)
7344 || reg_overlap_mentioned_p (x, cse_check_loop_start_value))
7345 cse_check_loop_start_value = 0;
7346 }
7347
7348 /* X is a SET or CLOBBER contained in INSN that was found near the start of
7349 a loop that starts with the label at LOOP_START.
7350
7351 If X is a SET, we see if its SET_SRC is currently in our hash table.
7352 If so, we see if it has a value equal to some register used only in the
7353 loop exit code (as marked by jump.c).
7354
7355 If those two conditions are true, we search backwards from the start of
7356 the loop to see if that same value was loaded into a register that still
7357 retains its value at the start of the loop.
7358
7359 If so, we insert an insn after the load to copy the destination of that
7360 load into the equivalent register and (try to) replace our SET_SRC with that
7361 register.
7362
7363 In any event, we invalidate whatever this SET or CLOBBER modifies. */
7364
7365 static void
7366 cse_set_around_loop (x, insn, loop_start)
7367 rtx x;
7368 rtx insn;
7369 rtx loop_start;
7370 {
7371 rtx p;
7372 struct table_elt *src_elt;
7373 static struct write_data init = {0, 0, 0, 0};
7374 struct write_data writes_memory;
7375
7376 writes_memory = init;
7377
7378 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
7379 are setting PC or CC0 or whose SET_SRC is already a register. */
7380 if (GET_CODE (x) == SET
7381 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
7382 && GET_CODE (SET_SRC (x)) != REG)
7383 {
7384 src_elt = lookup (SET_SRC (x),
7385 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
7386 GET_MODE (SET_DEST (x)));
7387
7388 if (src_elt)
7389 for (src_elt = src_elt->first_same_value; src_elt;
7390 src_elt = src_elt->next_same_value)
7391 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
7392 && COST (src_elt->exp) < COST (SET_SRC (x)))
7393 {
7394 rtx p, set;
7395
7396 /* Look for an insn in front of LOOP_START that sets
7397 something in the desired mode to SET_SRC (x) before we hit
7398 a label or CALL_INSN. */
7399
7400 for (p = prev_nonnote_insn (loop_start);
7401 p && GET_CODE (p) != CALL_INSN
7402 && GET_CODE (p) != CODE_LABEL;
7403 p = prev_nonnote_insn (p))
7404 if ((set = single_set (p)) != 0
7405 && GET_CODE (SET_DEST (set)) == REG
7406 && GET_MODE (SET_DEST (set)) == src_elt->mode
7407 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
7408 {
7409 /* We now have to ensure that nothing between P
7410 and LOOP_START modified anything referenced in
7411 SET_SRC (x). We know that nothing within the loop
7412 can modify it, or we would have invalidated it in
7413 the hash table. */
7414 rtx q;
7415
7416 cse_check_loop_start_value = SET_SRC (x);
7417 for (q = p; q != loop_start; q = NEXT_INSN (q))
7418 if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
7419 note_stores (PATTERN (q), cse_check_loop_start);
7420
7421 /* If nothing was changed and we can replace our
7422 SET_SRC, add an insn after P to copy its destination
7423 to what we will be replacing SET_SRC with. */
7424 if (cse_check_loop_start_value
7425 && validate_change (insn, &SET_SRC (x),
7426 src_elt->exp, 0))
7427 emit_insn_after (gen_move_insn (src_elt->exp,
7428 SET_DEST (set)),
7429 p);
7430 break;
7431 }
7432 }
7433 }
7434
7435 /* Now invalidate anything modified by X. */
7436 note_mem_written (SET_DEST (x), &writes_memory);
7437
7438 if (writes_memory.var)
7439 invalidate_memory (&writes_memory);
7440
7441 /* See comment on similar code in cse_insn for explanation of these tests. */
7442 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
7443 || (GET_CODE (SET_DEST (x)) == MEM && ! writes_memory.all
7444 && ! cse_rtx_addr_varies_p (SET_DEST (x))))
7445 invalidate (SET_DEST (x));
7446 }
7447 \f
7448 /* Find the end of INSN's basic block and return its range,
7449 the total number of SETs in all the insns of the block, the last insn of the
7450 block, and the branch path.
7451
7452 The branch path indicates which branches should be followed. If a non-zero
7453 path size is specified, the block should be rescanned and a different set
7454 of branches will be taken. The branch path is only used if
7455 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
7456
7457 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
7458 used to describe the block. It is filled in with the information about
7459 the current block. The incoming structure's branch path, if any, is used
7460 to construct the output branch path. */
7461
7462 void
7463 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
7464 rtx insn;
7465 struct cse_basic_block_data *data;
7466 int follow_jumps;
7467 int after_loop;
7468 int skip_blocks;
7469 {
7470 rtx p = insn, q;
7471 int nsets = 0;
7472 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
7473 rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
7474 int path_size = data->path_size;
7475 int path_entry = 0;
7476 int i;
7477
7478 /* Update the previous branch path, if any. If the last branch was
7479 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
7480 shorten the path by one and look at the previous branch. We know that
7481 at least one branch must have been taken if PATH_SIZE is non-zero. */
7482 while (path_size > 0)
7483 {
7484 if (data->path[path_size - 1].status != NOT_TAKEN)
7485 {
7486 data->path[path_size - 1].status = NOT_TAKEN;
7487 break;
7488 }
7489 else
7490 path_size--;
7491 }
7492
7493 /* Scan to end of this basic block. */
7494 while (p && GET_CODE (p) != CODE_LABEL)
7495 {
7496 /* Don't cse out the end of a loop. This makes a difference
7497 only for the unusual loops that always execute at least once;
7498 all other loops have labels there so we will stop in any case.
7499 Cse'ing out the end of the loop is dangerous because it
7500 might cause an invariant expression inside the loop
7501 to be reused after the end of the loop. This would make it
7502 hard to move the expression out of the loop in loop.c,
7503 especially if it is one of several equivalent expressions
7504 and loop.c would like to eliminate it.
7505
7506 If we are running after loop.c has finished, we can ignore
7507 the NOTE_INSN_LOOP_END. */
7508
7509 if (! after_loop && GET_CODE (p) == NOTE
7510 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
7511 break;
7512
7513 /* Don't cse over a call to setjmp; on some machines (eg vax)
7514 the regs restored by the longjmp come from
7515 a later time than the setjmp. */
7516 if (GET_CODE (p) == NOTE
7517 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
7518 break;
7519
7520 /* A PARALLEL can have lots of SETs in it,
7521 especially if it is really an ASM_OPERANDS. */
7522 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
7523 && GET_CODE (PATTERN (p)) == PARALLEL)
7524 nsets += XVECLEN (PATTERN (p), 0);
7525 else if (GET_CODE (p) != NOTE)
7526 nsets += 1;
7527
7528 /* Ignore insns made by CSE; they cannot affect the boundaries of
7529 the basic block. */
7530
7531 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
7532 high_cuid = INSN_CUID (p);
7533 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
7534 low_cuid = INSN_CUID (p);
7535
7536 /* See if this insn is in our branch path. If it is and we are to
7537 take it, do so. */
7538 if (path_entry < path_size && data->path[path_entry].branch == p)
7539 {
7540 if (data->path[path_entry].status != NOT_TAKEN)
7541 p = JUMP_LABEL (p);
7542
7543 /* Point to next entry in path, if any. */
7544 path_entry++;
7545 }
7546
7547 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
7548 was specified, we haven't reached our maximum path length, there are
7549 insns following the target of the jump, this is the only use of the
7550 jump label, and the target label is preceded by a BARRIER.
7551
7552 Alternatively, we can follow the jump if it branches around a
7553 block of code and there are no other branches into the block.
7554 In this case invalidate_skipped_block will be called to invalidate any
7555 registers set in the block when following the jump. */
7556
7557 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
7558 && GET_CODE (p) == JUMP_INSN
7559 && GET_CODE (PATTERN (p)) == SET
7560 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
7561 && LABEL_NUSES (JUMP_LABEL (p)) == 1
7562 && NEXT_INSN (JUMP_LABEL (p)) != 0)
7563 {
7564 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
7565 if ((GET_CODE (q) != NOTE
7566 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
7567 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
7568 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
7569 break;
7570
7571 /* If we ran into a BARRIER, this code is an extension of the
7572 basic block when the branch is taken. */
7573 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
7574 {
7575 /* Don't allow ourself to keep walking around an
7576 always-executed loop. */
7577 if (next_real_insn (q) == next)
7578 {
7579 p = NEXT_INSN (p);
7580 continue;
7581 }
7582
7583 /* Similarly, don't put a branch in our path more than once. */
7584 for (i = 0; i < path_entry; i++)
7585 if (data->path[i].branch == p)
7586 break;
7587
7588 if (i != path_entry)
7589 break;
7590
7591 data->path[path_entry].branch = p;
7592 data->path[path_entry++].status = TAKEN;
7593
7594 /* This branch now ends our path. It was possible that we
7595 didn't see this branch the last time around (when the
7596 insn in front of the target was a JUMP_INSN that was
7597 turned into a no-op). */
7598 path_size = path_entry;
7599
7600 p = JUMP_LABEL (p);
7601 /* Mark block so we won't scan it again later. */
7602 PUT_MODE (NEXT_INSN (p), QImode);
7603 }
7604 /* Detect a branch around a block of code. */
7605 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
7606 {
7607 register rtx tmp;
7608
7609 if (next_real_insn (q) == next)
7610 {
7611 p = NEXT_INSN (p);
7612 continue;
7613 }
7614
7615 for (i = 0; i < path_entry; i++)
7616 if (data->path[i].branch == p)
7617 break;
7618
7619 if (i != path_entry)
7620 break;
7621
7622 /* This is no_labels_between_p (p, q) with an added check for
7623 reaching the end of a function (in case Q precedes P). */
7624 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
7625 if (GET_CODE (tmp) == CODE_LABEL)
7626 break;
7627
7628 if (tmp == q)
7629 {
7630 data->path[path_entry].branch = p;
7631 data->path[path_entry++].status = AROUND;
7632
7633 path_size = path_entry;
7634
7635 p = JUMP_LABEL (p);
7636 /* Mark block so we won't scan it again later. */
7637 PUT_MODE (NEXT_INSN (p), QImode);
7638 }
7639 }
7640 }
7641 p = NEXT_INSN (p);
7642 }
7643
7644 data->low_cuid = low_cuid;
7645 data->high_cuid = high_cuid;
7646 data->nsets = nsets;
7647 data->last = p;
7648
7649 /* If all jumps in the path are not taken, set our path length to zero
7650 so a rescan won't be done. */
7651 for (i = path_size - 1; i >= 0; i--)
7652 if (data->path[i].status != NOT_TAKEN)
7653 break;
7654
7655 if (i == -1)
7656 data->path_size = 0;
7657 else
7658 data->path_size = path_size;
7659
7660 /* End the current branch path. */
7661 data->path[path_size].branch = 0;
7662 }
7663 \f
7664 /* Perform cse on the instructions of a function.
7665 F is the first instruction.
7666 NREGS is one plus the highest pseudo-reg number used in the instruction.
7667
7668 AFTER_LOOP is 1 if this is the cse call done after loop optimization
7669 (only if -frerun-cse-after-loop).
7670
7671 Returns 1 if jump_optimize should be redone due to simplifications
7672 in conditional jump instructions. */
7673
7674 int
7675 cse_main (f, nregs, after_loop, file)
7676 rtx f;
7677 int nregs;
7678 int after_loop;
7679 FILE *file;
7680 {
7681 struct cse_basic_block_data val;
7682 register rtx insn = f;
7683 register int i;
7684
7685 cse_jumps_altered = 0;
7686 constant_pool_entries_cost = 0;
7687 val.path_size = 0;
7688
7689 init_recog ();
7690
7691 max_reg = nregs;
7692
7693 all_minus_one = (int *) alloca (nregs * sizeof (int));
7694 consec_ints = (int *) alloca (nregs * sizeof (int));
7695
7696 for (i = 0; i < nregs; i++)
7697 {
7698 all_minus_one[i] = -1;
7699 consec_ints[i] = i;
7700 }
7701
7702 reg_next_eqv = (int *) alloca (nregs * sizeof (int));
7703 reg_prev_eqv = (int *) alloca (nregs * sizeof (int));
7704 reg_qty = (int *) alloca (nregs * sizeof (int));
7705 reg_in_table = (int *) alloca (nregs * sizeof (int));
7706 reg_tick = (int *) alloca (nregs * sizeof (int));
7707
7708 /* Discard all the free elements of the previous function
7709 since they are allocated in the temporarily obstack. */
7710 bzero (table, sizeof table);
7711 free_element_chain = 0;
7712 n_elements_made = 0;
7713
7714 /* Find the largest uid. */
7715
7716 max_uid = get_max_uid ();
7717 uid_cuid = (int *) alloca ((max_uid + 1) * sizeof (int));
7718 bzero (uid_cuid, (max_uid + 1) * sizeof (int));
7719
7720 /* Compute the mapping from uids to cuids.
7721 CUIDs are numbers assigned to insns, like uids,
7722 except that cuids increase monotonically through the code.
7723 Don't assign cuids to line-number NOTEs, so that the distance in cuids
7724 between two insns is not affected by -g. */
7725
7726 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
7727 {
7728 if (GET_CODE (insn) != NOTE
7729 || NOTE_LINE_NUMBER (insn) < 0)
7730 INSN_CUID (insn) = ++i;
7731 else
7732 /* Give a line number note the same cuid as preceding insn. */
7733 INSN_CUID (insn) = i;
7734 }
7735
7736 /* Initialize which registers are clobbered by calls. */
7737
7738 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
7739
7740 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7741 if ((call_used_regs[i]
7742 /* Used to check !fixed_regs[i] here, but that isn't safe;
7743 fixed regs are still call-clobbered, and sched can get
7744 confused if they can "live across calls".
7745
7746 The frame pointer is always preserved across calls. The arg
7747 pointer is if it is fixed. The stack pointer usually is, unless
7748 RETURN_POPS_ARGS, in which case an explicit CLOBBER
7749 will be present. If we are generating PIC code, the PIC offset
7750 table register is preserved across calls. */
7751
7752 && i != STACK_POINTER_REGNUM
7753 && i != FRAME_POINTER_REGNUM
7754 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
7755 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
7756 #endif
7757 #ifdef PIC_OFFSET_TABLE_REGNUM
7758 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
7759 #endif
7760 )
7761 || global_regs[i])
7762 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
7763
7764 /* Loop over basic blocks.
7765 Compute the maximum number of qty's needed for each basic block
7766 (which is 2 for each SET). */
7767 insn = f;
7768 while (insn)
7769 {
7770 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
7771 flag_cse_skip_blocks);
7772
7773 /* If this basic block was already processed or has no sets, skip it. */
7774 if (val.nsets == 0 || GET_MODE (insn) == QImode)
7775 {
7776 PUT_MODE (insn, VOIDmode);
7777 insn = (val.last ? NEXT_INSN (val.last) : 0);
7778 val.path_size = 0;
7779 continue;
7780 }
7781
7782 cse_basic_block_start = val.low_cuid;
7783 cse_basic_block_end = val.high_cuid;
7784 max_qty = val.nsets * 2;
7785
7786 if (file)
7787 fprintf (file, ";; Processing block from %d to %d, %d sets.\n",
7788 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
7789 val.nsets);
7790
7791 /* Make MAX_QTY bigger to give us room to optimize
7792 past the end of this basic block, if that should prove useful. */
7793 if (max_qty < 500)
7794 max_qty = 500;
7795
7796 max_qty += max_reg;
7797
7798 /* If this basic block is being extended by following certain jumps,
7799 (see `cse_end_of_basic_block'), we reprocess the code from the start.
7800 Otherwise, we start after this basic block. */
7801 if (val.path_size > 0)
7802 cse_basic_block (insn, val.last, val.path, 0);
7803 else
7804 {
7805 int old_cse_jumps_altered = cse_jumps_altered;
7806 rtx temp;
7807
7808 /* When cse changes a conditional jump to an unconditional
7809 jump, we want to reprocess the block, since it will give
7810 us a new branch path to investigate. */
7811 cse_jumps_altered = 0;
7812 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
7813 if (cse_jumps_altered == 0
7814 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7815 insn = temp;
7816
7817 cse_jumps_altered |= old_cse_jumps_altered;
7818 }
7819
7820 #ifdef USE_C_ALLOCA
7821 alloca (0);
7822 #endif
7823 }
7824
7825 /* Tell refers_to_mem_p that qty_const info is not available. */
7826 qty_const = 0;
7827
7828 if (max_elements_made < n_elements_made)
7829 max_elements_made = n_elements_made;
7830
7831 return cse_jumps_altered;
7832 }
7833
7834 /* Process a single basic block. FROM and TO and the limits of the basic
7835 block. NEXT_BRANCH points to the branch path when following jumps or
7836 a null path when not following jumps.
7837
7838 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
7839 loop. This is true when we are being called for the last time on a
7840 block and this CSE pass is before loop.c. */
7841
7842 static rtx
7843 cse_basic_block (from, to, next_branch, around_loop)
7844 register rtx from, to;
7845 struct branch_path *next_branch;
7846 int around_loop;
7847 {
7848 register rtx insn;
7849 int to_usage = 0;
7850 int in_libcall_block = 0;
7851
7852 /* Each of these arrays is undefined before max_reg, so only allocate
7853 the space actually needed and adjust the start below. */
7854
7855 qty_first_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
7856 qty_last_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
7857 qty_mode= (enum machine_mode *) alloca ((max_qty - max_reg) * sizeof (enum machine_mode));
7858 qty_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
7859 qty_const_insn = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
7860 qty_comparison_code
7861 = (enum rtx_code *) alloca ((max_qty - max_reg) * sizeof (enum rtx_code));
7862 qty_comparison_qty = (int *) alloca ((max_qty - max_reg) * sizeof (int));
7863 qty_comparison_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
7864
7865 qty_first_reg -= max_reg;
7866 qty_last_reg -= max_reg;
7867 qty_mode -= max_reg;
7868 qty_const -= max_reg;
7869 qty_const_insn -= max_reg;
7870 qty_comparison_code -= max_reg;
7871 qty_comparison_qty -= max_reg;
7872 qty_comparison_const -= max_reg;
7873
7874 new_basic_block ();
7875
7876 /* TO might be a label. If so, protect it from being deleted. */
7877 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7878 ++LABEL_NUSES (to);
7879
7880 for (insn = from; insn != to; insn = NEXT_INSN (insn))
7881 {
7882 register enum rtx_code code;
7883
7884 /* See if this is a branch that is part of the path. If so, and it is
7885 to be taken, do so. */
7886 if (next_branch->branch == insn)
7887 {
7888 enum taken status = next_branch++->status;
7889 if (status != NOT_TAKEN)
7890 {
7891 if (status == TAKEN)
7892 record_jump_equiv (insn, 1);
7893 else
7894 invalidate_skipped_block (NEXT_INSN (insn));
7895
7896 /* Set the last insn as the jump insn; it doesn't affect cc0.
7897 Then follow this branch. */
7898 #ifdef HAVE_cc0
7899 prev_insn_cc0 = 0;
7900 #endif
7901 prev_insn = insn;
7902 insn = JUMP_LABEL (insn);
7903 continue;
7904 }
7905 }
7906
7907 code = GET_CODE (insn);
7908 if (GET_MODE (insn) == QImode)
7909 PUT_MODE (insn, VOIDmode);
7910
7911 if (GET_RTX_CLASS (code) == 'i')
7912 {
7913 /* Process notes first so we have all notes in canonical forms when
7914 looking for duplicate operations. */
7915
7916 if (REG_NOTES (insn))
7917 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7918
7919 /* Track when we are inside in LIBCALL block. Inside such a block,
7920 we do not want to record destinations. The last insn of a
7921 LIBCALL block is not considered to be part of the block, since
7922 its destination is the result of the block and hence should be
7923 recorded. */
7924
7925 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7926 in_libcall_block = 1;
7927 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7928 in_libcall_block = 0;
7929
7930 cse_insn (insn, in_libcall_block);
7931 }
7932
7933 /* If INSN is now an unconditional jump, skip to the end of our
7934 basic block by pretending that we just did the last insn in the
7935 basic block. If we are jumping to the end of our block, show
7936 that we can have one usage of TO. */
7937
7938 if (simplejump_p (insn))
7939 {
7940 if (to == 0)
7941 return 0;
7942
7943 if (JUMP_LABEL (insn) == to)
7944 to_usage = 1;
7945
7946 /* Maybe TO was deleted because the jump is unconditional.
7947 If so, there is nothing left in this basic block. */
7948 /* ??? Perhaps it would be smarter to set TO
7949 to whatever follows this insn,
7950 and pretend the basic block had always ended here. */
7951 if (INSN_DELETED_P (to))
7952 break;
7953
7954 insn = PREV_INSN (to);
7955 }
7956
7957 /* See if it is ok to keep on going past the label
7958 which used to end our basic block. Remember that we incremented
7959 the count of that label, so we decrement it here. If we made
7960 a jump unconditional, TO_USAGE will be one; in that case, we don't
7961 want to count the use in that jump. */
7962
7963 if (to != 0 && NEXT_INSN (insn) == to
7964 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7965 {
7966 struct cse_basic_block_data val;
7967
7968 insn = NEXT_INSN (to);
7969
7970 if (LABEL_NUSES (to) == 0)
7971 delete_insn (to);
7972
7973 /* Find the end of the following block. Note that we won't be
7974 following branches in this case. If TO was the last insn
7975 in the function, we are done. Similarly, if we deleted the
7976 insn after TO, it must have been because it was preceded by
7977 a BARRIER. In that case, we are done with this block because it
7978 has no continuation. */
7979
7980 if (insn == 0 || INSN_DELETED_P (insn))
7981 return 0;
7982
7983 to_usage = 0;
7984 val.path_size = 0;
7985 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7986
7987 /* If the tables we allocated have enough space left
7988 to handle all the SETs in the next basic block,
7989 continue through it. Otherwise, return,
7990 and that block will be scanned individually. */
7991 if (val.nsets * 2 + next_qty > max_qty)
7992 break;
7993
7994 cse_basic_block_start = val.low_cuid;
7995 cse_basic_block_end = val.high_cuid;
7996 to = val.last;
7997
7998 /* Prevent TO from being deleted if it is a label. */
7999 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8000 ++LABEL_NUSES (to);
8001
8002 /* Back up so we process the first insn in the extension. */
8003 insn = PREV_INSN (insn);
8004 }
8005 }
8006
8007 if (next_qty > max_qty)
8008 abort ();
8009
8010 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
8011 the previous insn is the only insn that branches to the head of a loop,
8012 we can cse into the loop. Don't do this if we changed the jump
8013 structure of a loop unless we aren't going to be following jumps. */
8014
8015 if ((cse_jumps_altered == 0
8016 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8017 && around_loop && to != 0
8018 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
8019 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
8020 && JUMP_LABEL (PREV_INSN (to)) != 0
8021 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
8022 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
8023
8024 return to ? NEXT_INSN (to) : 0;
8025 }
8026 \f
8027 /* Count the number of times registers are used (not set) in X.
8028 COUNTS is an array in which we accumulate the count, INCR is how much
8029 we count each register usage. */
8030
8031 static void
8032 count_reg_usage (x, counts, incr)
8033 rtx x;
8034 int *counts;
8035 int incr;
8036 {
8037 enum rtx_code code = GET_CODE (x);
8038 char *fmt;
8039 int i, j;
8040
8041 switch (code)
8042 {
8043 case REG:
8044 counts[REGNO (x)] += incr;
8045 return;
8046
8047 case PC:
8048 case CC0:
8049 case CONST:
8050 case CONST_INT:
8051 case CONST_DOUBLE:
8052 case SYMBOL_REF:
8053 case LABEL_REF:
8054 case CLOBBER:
8055 return;
8056
8057 case SET:
8058 /* Unless we are setting a REG, count everything in SET_DEST. */
8059 if (GET_CODE (SET_DEST (x)) != REG)
8060 count_reg_usage (SET_DEST (x), counts, incr);
8061 count_reg_usage (SET_SRC (x), counts, incr);
8062 return;
8063
8064 case INSN:
8065 case JUMP_INSN:
8066 case CALL_INSN:
8067 count_reg_usage (PATTERN (x), counts, incr);
8068
8069 /* Things used in a REG_EQUAL note aren't dead since loop may try to
8070 use them. */
8071
8072 if (REG_NOTES (x))
8073 count_reg_usage (REG_NOTES (x), counts, incr);
8074 return;
8075
8076 case EXPR_LIST:
8077 case INSN_LIST:
8078 if (REG_NOTE_KIND (x) == REG_EQUAL)
8079 count_reg_usage (XEXP (x, 0), counts, incr);
8080 if (XEXP (x, 1))
8081 count_reg_usage (XEXP (x, 1), counts, incr);
8082 return;
8083 }
8084
8085 fmt = GET_RTX_FORMAT (code);
8086 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8087 {
8088 if (fmt[i] == 'e')
8089 count_reg_usage (XEXP (x, i), counts, incr);
8090 else if (fmt[i] == 'E')
8091 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8092 count_reg_usage (XVECEXP (x, i, j), counts, incr);
8093 }
8094 }
8095 \f
8096 /* Scan all the insns and delete any that are dead; i.e., they store a register
8097 that is never used or they copy a register to itself.
8098
8099 This is used to remove insns made obviously dead by cse. It improves the
8100 heuristics in loop since it won't try to move dead invariants out of loops
8101 or make givs for dead quantities. The remaining passes of the compilation
8102 are also sped up. */
8103
8104 void
8105 delete_dead_from_cse (insns, nreg)
8106 rtx insns;
8107 int nreg;
8108 {
8109 int *counts = (int *) alloca (nreg * sizeof (int));
8110 rtx insn, prev;
8111 rtx tem;
8112 int i;
8113 int in_libcall = 0;
8114
8115 /* First count the number of times each register is used. */
8116 bzero (counts, sizeof (int) * nreg);
8117 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
8118 count_reg_usage (insn, counts, 1);
8119
8120 /* Go from the last insn to the first and delete insns that only set unused
8121 registers or copy a register to itself. As we delete an insn, remove
8122 usage counts for registers it uses. */
8123 for (insn = prev_real_insn (get_last_insn ()); insn; insn = prev)
8124 {
8125 int live_insn = 0;
8126
8127 prev = prev_real_insn (insn);
8128
8129 /* Don't delete any insns that are part of a libcall block.
8130 Flow or loop might get confused if we did that. Remember
8131 that we are scanning backwards. */
8132 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8133 in_libcall = 1;
8134
8135 if (in_libcall)
8136 live_insn = 1;
8137 else if (GET_CODE (PATTERN (insn)) == SET)
8138 {
8139 if (GET_CODE (SET_DEST (PATTERN (insn))) == REG
8140 && SET_DEST (PATTERN (insn)) == SET_SRC (PATTERN (insn)))
8141 ;
8142
8143 #ifdef HAVE_cc0
8144 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
8145 && ! side_effects_p (SET_SRC (PATTERN (insn)))
8146 && ((tem = next_nonnote_insn (insn)) == 0
8147 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
8148 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
8149 ;
8150 #endif
8151 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
8152 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
8153 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
8154 || side_effects_p (SET_SRC (PATTERN (insn))))
8155 live_insn = 1;
8156 }
8157 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
8158 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
8159 {
8160 rtx elt = XVECEXP (PATTERN (insn), 0, i);
8161
8162 if (GET_CODE (elt) == SET)
8163 {
8164 if (GET_CODE (SET_DEST (elt)) == REG
8165 && SET_DEST (elt) == SET_SRC (elt))
8166 ;
8167
8168 #ifdef HAVE_cc0
8169 else if (GET_CODE (SET_DEST (elt)) == CC0
8170 && ! side_effects_p (SET_SRC (elt))
8171 && ((tem = next_nonnote_insn (insn)) == 0
8172 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
8173 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
8174 ;
8175 #endif
8176 else if (GET_CODE (SET_DEST (elt)) != REG
8177 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
8178 || counts[REGNO (SET_DEST (elt))] != 0
8179 || side_effects_p (SET_SRC (elt)))
8180 live_insn = 1;
8181 }
8182 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
8183 live_insn = 1;
8184 }
8185 else
8186 live_insn = 1;
8187
8188 /* If this is a dead insn, delete it and show registers in it aren't
8189 being used. */
8190
8191 if (! live_insn)
8192 {
8193 count_reg_usage (insn, counts, -1);
8194 delete_insn (insn);
8195 }
8196
8197 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
8198 in_libcall = 0;
8199 }
8200 }
This page took 0.379838 seconds and 6 git commands to generate.