]> gcc.gnu.org Git - gcc.git/blob - gcc/cse.c
(simplify_binary_operation...
[gcc.git] / gcc / cse.c
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 #include "config.h"
22 #include "rtl.h"
23 #include "regs.h"
24 #include "hard-reg-set.h"
25 #include "flags.h"
26 #include "real.h"
27 #include "insn-config.h"
28 #include "recog.h"
29
30 #include <stdio.h>
31 #include <setjmp.h>
32
33 /* The basic idea of common subexpression elimination is to go
34 through the code, keeping a record of expressions that would
35 have the same value at the current scan point, and replacing
36 expressions encountered with the cheapest equivalent expression.
37
38 It is too complicated to keep track of the different possibilities
39 when control paths merge; so, at each label, we forget all that is
40 known and start fresh. This can be described as processing each
41 basic block separately. Note, however, that these are not quite
42 the same as the basic blocks found by a later pass and used for
43 data flow analysis and register packing. We do not need to start fresh
44 after a conditional jump instruction if there is no label there.
45
46 We use two data structures to record the equivalent expressions:
47 a hash table for most expressions, and several vectors together
48 with "quantity numbers" to record equivalent (pseudo) registers.
49
50 The use of the special data structure for registers is desirable
51 because it is faster. It is possible because registers references
52 contain a fairly small number, the register number, taken from
53 a contiguously allocated series, and two register references are
54 identical if they have the same number. General expressions
55 do not have any such thing, so the only way to retrieve the
56 information recorded on an expression other than a register
57 is to keep it in a hash table.
58
59 Registers and "quantity numbers":
60
61 At the start of each basic block, all of the (hardware and pseudo)
62 registers used in the function are given distinct quantity
63 numbers to indicate their contents. During scan, when the code
64 copies one register into another, we copy the quantity number.
65 When a register is loaded in any other way, we allocate a new
66 quantity number to describe the value generated by this operation.
67 `reg_qty' records what quantity a register is currently thought
68 of as containing.
69
70 All real quantity numbers are greater than or equal to `max_reg'.
71 If register N has not been assigned a quantity, reg_qty[N] will equal N.
72
73 Quantity numbers below `max_reg' do not exist and none of the `qty_...'
74 variables should be referenced with an index below `max_reg'.
75
76 We also maintain a bidirectional chain of registers for each
77 quantity number. `qty_first_reg', `qty_last_reg',
78 `reg_next_eqv' and `reg_prev_eqv' hold these chains.
79
80 The first register in a chain is the one whose lifespan is least local.
81 Among equals, it is the one that was seen first.
82 We replace any equivalent register with that one.
83
84 If two registers have the same quantity number, it must be true that
85 REG expressions with `qty_mode' must be in the hash table for both
86 registers and must be in the same class.
87
88 The converse is not true. Since hard registers may be referenced in
89 any mode, two REG expressions might be equivalent in the hash table
90 but not have the same quantity number if the quantity number of one
91 of the registers is not the same mode as those expressions.
92
93 Constants and quantity numbers
94
95 When a quantity has a known constant value, that value is stored
96 in the appropriate element of qty_const. This is in addition to
97 putting the constant in the hash table as is usual for non-regs.
98
99 Whether a reg or a constant is preferred is determined by the configuration
100 macro CONST_COSTS and will often depend on the constant value. In any
101 event, expressions containing constants can be simplified, by fold_rtx.
102
103 When a quantity has a known nearly constant value (such as an address
104 of a stack slot), that value is stored in the appropriate element
105 of qty_const.
106
107 Integer constants don't have a machine mode. However, cse
108 determines the intended machine mode from the destination
109 of the instruction that moves the constant. The machine mode
110 is recorded in the hash table along with the actual RTL
111 constant expression so that different modes are kept separate.
112
113 Other expressions:
114
115 To record known equivalences among expressions in general
116 we use a hash table called `table'. It has a fixed number of buckets
117 that contain chains of `struct table_elt' elements for expressions.
118 These chains connect the elements whose expressions have the same
119 hash codes.
120
121 Other chains through the same elements connect the elements which
122 currently have equivalent values.
123
124 Register references in an expression are canonicalized before hashing
125 the expression. This is done using `reg_qty' and `qty_first_reg'.
126 The hash code of a register reference is computed using the quantity
127 number, not the register number.
128
129 When the value of an expression changes, it is necessary to remove from the
130 hash table not just that expression but all expressions whose values
131 could be different as a result.
132
133 1. If the value changing is in memory, except in special cases
134 ANYTHING referring to memory could be changed. That is because
135 nobody knows where a pointer does not point.
136 The function `invalidate_memory' removes what is necessary.
137
138 The special cases are when the address is constant or is
139 a constant plus a fixed register such as the frame pointer
140 or a static chain pointer. When such addresses are stored in,
141 we can tell exactly which other such addresses must be invalidated
142 due to overlap. `invalidate' does this.
143 All expressions that refer to non-constant
144 memory addresses are also invalidated. `invalidate_memory' does this.
145
146 2. If the value changing is a register, all expressions
147 containing references to that register, and only those,
148 must be removed.
149
150 Because searching the entire hash table for expressions that contain
151 a register is very slow, we try to figure out when it isn't necessary.
152 Precisely, this is necessary only when expressions have been
153 entered in the hash table using this register, and then the value has
154 changed, and then another expression wants to be added to refer to
155 the register's new value. This sequence of circumstances is rare
156 within any one basic block.
157
158 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
159 reg_tick[i] is incremented whenever a value is stored in register i.
160 reg_in_table[i] holds -1 if no references to register i have been
161 entered in the table; otherwise, it contains the value reg_tick[i] had
162 when the references were entered. If we want to enter a reference
163 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
164 Until we want to enter a new entry, the mere fact that the two vectors
165 don't match makes the entries be ignored if anyone tries to match them.
166
167 Registers themselves are entered in the hash table as well as in
168 the equivalent-register chains. However, the vectors `reg_tick'
169 and `reg_in_table' do not apply to expressions which are simple
170 register references. These expressions are removed from the table
171 immediately when they become invalid, and this can be done even if
172 we do not immediately search for all the expressions that refer to
173 the register.
174
175 A CLOBBER rtx in an instruction invalidates its operand for further
176 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
177 invalidates everything that resides in memory.
178
179 Related expressions:
180
181 Constant expressions that differ only by an additive integer
182 are called related. When a constant expression is put in
183 the table, the related expression with no constant term
184 is also entered. These are made to point at each other
185 so that it is possible to find out if there exists any
186 register equivalent to an expression related to a given expression. */
187
188 /* One plus largest register number used in this function. */
189
190 static int max_reg;
191
192 /* Length of vectors indexed by quantity number.
193 We know in advance we will not need a quantity number this big. */
194
195 static int max_qty;
196
197 /* Next quantity number to be allocated.
198 This is 1 + the largest number needed so far. */
199
200 static int next_qty;
201
202 /* Indexed by quantity number, gives the first (or last) (pseudo) register
203 in the chain of registers that currently contain this quantity. */
204
205 static int *qty_first_reg;
206 static int *qty_last_reg;
207
208 /* Index by quantity number, gives the mode of the quantity. */
209
210 static enum machine_mode *qty_mode;
211
212 /* Indexed by quantity number, gives the rtx of the constant value of the
213 quantity, or zero if it does not have a known value.
214 A sum of the frame pointer (or arg pointer) plus a constant
215 can also be entered here. */
216
217 static rtx *qty_const;
218
219 /* Indexed by qty number, gives the insn that stored the constant value
220 recorded in `qty_const'. */
221
222 static rtx *qty_const_insn;
223
224 /* The next three variables are used to track when a comparison between a
225 quantity and some constant or register has been passed. In that case, we
226 know the results of the comparison in case we see it again. These variables
227 record a comparison that is known to be true. */
228
229 /* Indexed by qty number, gives the rtx code of a comparison with a known
230 result involving this quantity. If none, it is UNKNOWN. */
231 static enum rtx_code *qty_comparison_code;
232
233 /* Indexed by qty number, gives the constant being compared against in a
234 comparison of known result. If no such comparison, it is undefined.
235 If the comparison is not with a constant, it is zero. */
236
237 static rtx *qty_comparison_const;
238
239 /* Indexed by qty number, gives the quantity being compared against in a
240 comparison of known result. If no such comparison, if it undefined.
241 If the comparison is not with a register, it is -1. */
242
243 static int *qty_comparison_qty;
244
245 #ifdef HAVE_cc0
246 /* For machines that have a CC0, we do not record its value in the hash
247 table since its use is guaranteed to be the insn immediately following
248 its definition and any other insn is presumed to invalidate it.
249
250 Instead, we store below the value last assigned to CC0. If it should
251 happen to be a constant, it is stored in preference to the actual
252 assigned value. In case it is a constant, we store the mode in which
253 the constant should be interpreted. */
254
255 static rtx prev_insn_cc0;
256 static enum machine_mode prev_insn_cc0_mode;
257 #endif
258
259 /* Previous actual insn. 0 if at first insn of basic block. */
260
261 static rtx prev_insn;
262
263 /* Insn being scanned. */
264
265 static rtx this_insn;
266
267 /* Index by (pseudo) register number, gives the quantity number
268 of the register's current contents. */
269
270 static int *reg_qty;
271
272 /* Index by (pseudo) register number, gives the number of the next (or
273 previous) (pseudo) register in the chain of registers sharing the same
274 value.
275
276 Or -1 if this register is at the end of the chain.
277
278 If reg_qty[N] == N, reg_next_eqv[N] is undefined. */
279
280 static int *reg_next_eqv;
281 static int *reg_prev_eqv;
282
283 /* Index by (pseudo) register number, gives the number of times
284 that register has been altered in the current basic block. */
285
286 static int *reg_tick;
287
288 /* Index by (pseudo) register number, gives the reg_tick value at which
289 rtx's containing this register are valid in the hash table.
290 If this does not equal the current reg_tick value, such expressions
291 existing in the hash table are invalid.
292 If this is -1, no expressions containing this register have been
293 entered in the table. */
294
295 static int *reg_in_table;
296
297 /* A HARD_REG_SET containing all the hard registers for which there is
298 currently a REG expression in the hash table. Note the difference
299 from the above variables, which indicate if the REG is mentioned in some
300 expression in the table. */
301
302 static HARD_REG_SET hard_regs_in_table;
303
304 /* A HARD_REG_SET containing all the hard registers that are invalidated
305 by a CALL_INSN. */
306
307 static HARD_REG_SET regs_invalidated_by_call;
308
309 /* Two vectors of ints:
310 one containing max_reg -1's; the other max_reg + 500 (an approximation
311 for max_qty) elements where element i contains i.
312 These are used to initialize various other vectors fast. */
313
314 static int *all_minus_one;
315 static int *consec_ints;
316
317 /* CUID of insn that starts the basic block currently being cse-processed. */
318
319 static int cse_basic_block_start;
320
321 /* CUID of insn that ends the basic block currently being cse-processed. */
322
323 static int cse_basic_block_end;
324
325 /* Vector mapping INSN_UIDs to cuids.
326 The cuids are like uids but increase monotonically always.
327 We use them to see whether a reg is used outside a given basic block. */
328
329 static int *uid_cuid;
330
331 /* Highest UID in UID_CUID. */
332 static int max_uid;
333
334 /* Get the cuid of an insn. */
335
336 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
337
338 /* Nonzero if cse has altered conditional jump insns
339 in such a way that jump optimization should be redone. */
340
341 static int cse_jumps_altered;
342
343 /* canon_hash stores 1 in do_not_record
344 if it notices a reference to CC0, PC, or some other volatile
345 subexpression. */
346
347 static int do_not_record;
348
349 /* canon_hash stores 1 in hash_arg_in_memory
350 if it notices a reference to memory within the expression being hashed. */
351
352 static int hash_arg_in_memory;
353
354 /* canon_hash stores 1 in hash_arg_in_struct
355 if it notices a reference to memory that's part of a structure. */
356
357 static int hash_arg_in_struct;
358
359 /* The hash table contains buckets which are chains of `struct table_elt's,
360 each recording one expression's information.
361 That expression is in the `exp' field.
362
363 Those elements with the same hash code are chained in both directions
364 through the `next_same_hash' and `prev_same_hash' fields.
365
366 Each set of expressions with equivalent values
367 are on a two-way chain through the `next_same_value'
368 and `prev_same_value' fields, and all point with
369 the `first_same_value' field at the first element in
370 that chain. The chain is in order of increasing cost.
371 Each element's cost value is in its `cost' field.
372
373 The `in_memory' field is nonzero for elements that
374 involve any reference to memory. These elements are removed
375 whenever a write is done to an unidentified location in memory.
376 To be safe, we assume that a memory address is unidentified unless
377 the address is either a symbol constant or a constant plus
378 the frame pointer or argument pointer.
379
380 The `in_struct' field is nonzero for elements that
381 involve any reference to memory inside a structure or array.
382
383 The `related_value' field is used to connect related expressions
384 (that differ by adding an integer).
385 The related expressions are chained in a circular fashion.
386 `related_value' is zero for expressions for which this
387 chain is not useful.
388
389 The `cost' field stores the cost of this element's expression.
390
391 The `is_const' flag is set if the element is a constant (including
392 a fixed address).
393
394 The `flag' field is used as a temporary during some search routines.
395
396 The `mode' field is usually the same as GET_MODE (`exp'), but
397 if `exp' is a CONST_INT and has no machine mode then the `mode'
398 field is the mode it was being used as. Each constant is
399 recorded separately for each mode it is used with. */
400
401
402 struct table_elt
403 {
404 rtx exp;
405 struct table_elt *next_same_hash;
406 struct table_elt *prev_same_hash;
407 struct table_elt *next_same_value;
408 struct table_elt *prev_same_value;
409 struct table_elt *first_same_value;
410 struct table_elt *related_value;
411 int cost;
412 enum machine_mode mode;
413 char in_memory;
414 char in_struct;
415 char is_const;
416 char flag;
417 };
418
419 #define HASHBITS 16
420
421 /* We don't want a lot of buckets, because we rarely have very many
422 things stored in the hash table, and a lot of buckets slows
423 down a lot of loops that happen frequently. */
424 #define NBUCKETS 31
425
426 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
427 register (hard registers may require `do_not_record' to be set). */
428
429 #define HASH(X, M) \
430 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
431 ? ((((int) REG << 7) + reg_qty[REGNO (X)]) % NBUCKETS) \
432 : canon_hash (X, M) % NBUCKETS)
433
434 /* Determine whether register number N is considered a fixed register for CSE.
435 It is desirable to replace other regs with fixed regs, to reduce need for
436 non-fixed hard regs.
437 A reg wins if it is either the frame pointer or designated as fixed,
438 but not if it is an overlapping register. */
439 #ifdef OVERLAPPING_REGNO_P
440 #define FIXED_REGNO_P(N) \
441 (((N) == FRAME_POINTER_REGNUM || fixed_regs[N]) \
442 && ! OVERLAPPING_REGNO_P ((N)))
443 #else
444 #define FIXED_REGNO_P(N) \
445 ((N) == FRAME_POINTER_REGNUM || fixed_regs[N])
446 #endif
447
448 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
449 hard registers and pointers into the frame are the cheapest with a cost
450 of 0. Next come pseudos with a cost of one and other hard registers with
451 a cost of 2. Aside from these special cases, call `rtx_cost'. */
452
453 #define CHEAP_REG(N) \
454 ((N) == FRAME_POINTER_REGNUM || (N) == STACK_POINTER_REGNUM \
455 || (N) == ARG_POINTER_REGNUM \
456 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
457 || ((N) < FIRST_PSEUDO_REGISTER \
458 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
459
460 #define COST(X) \
461 (GET_CODE (X) == REG \
462 ? (CHEAP_REG (REGNO (X)) ? 0 \
463 : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
464 : 2) \
465 : rtx_cost (X, SET) * 2)
466
467 /* Determine if the quantity number for register X represents a valid index
468 into the `qty_...' variables. */
469
470 #define REGNO_QTY_VALID_P(N) (reg_qty[N] != (N))
471
472 static struct table_elt *table[NBUCKETS];
473
474 /* Chain of `struct table_elt's made so far for this function
475 but currently removed from the table. */
476
477 static struct table_elt *free_element_chain;
478
479 /* Number of `struct table_elt' structures made so far for this function. */
480
481 static int n_elements_made;
482
483 /* Maximum value `n_elements_made' has had so far in this compilation
484 for functions previously processed. */
485
486 static int max_elements_made;
487
488 /* Surviving equivalence class when two equivalence classes are merged
489 by recording the effects of a jump in the last insn. Zero if the
490 last insn was not a conditional jump. */
491
492 static struct table_elt *last_jump_equiv_class;
493
494 /* Set to the cost of a constant pool reference if one was found for a
495 symbolic constant. If this was found, it means we should try to
496 convert constants into constant pool entries if they don't fit in
497 the insn. */
498
499 static int constant_pool_entries_cost;
500
501 /* Bits describing what kind of values in memory must be invalidated
502 for a particular instruction. If all three bits are zero,
503 no memory refs need to be invalidated. Each bit is more powerful
504 than the preceding ones, and if a bit is set then the preceding
505 bits are also set.
506
507 Here is how the bits are set:
508 Pushing onto the stack invalidates only the stack pointer,
509 writing at a fixed address invalidates only variable addresses,
510 writing in a structure element at variable address
511 invalidates all but scalar variables,
512 and writing in anything else at variable address invalidates everything. */
513
514 struct write_data
515 {
516 int sp : 1; /* Invalidate stack pointer. */
517 int var : 1; /* Invalidate variable addresses. */
518 int nonscalar : 1; /* Invalidate all but scalar variables. */
519 int all : 1; /* Invalidate all memory refs. */
520 };
521
522 /* Define maximum length of a branch path. */
523
524 #define PATHLENGTH 10
525
526 /* This data describes a block that will be processed by cse_basic_block. */
527
528 struct cse_basic_block_data {
529 /* Lowest CUID value of insns in block. */
530 int low_cuid;
531 /* Highest CUID value of insns in block. */
532 int high_cuid;
533 /* Total number of SETs in block. */
534 int nsets;
535 /* Last insn in the block. */
536 rtx last;
537 /* Size of current branch path, if any. */
538 int path_size;
539 /* Current branch path, indicating which branches will be taken. */
540 struct branch_path {
541 /* The branch insn. */
542 rtx branch;
543 /* Whether it should be taken or not. AROUND is the same as taken
544 except that it is used when the destination label is not preceded
545 by a BARRIER. */
546 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
547 } path[PATHLENGTH];
548 };
549
550 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
551 virtual regs here because the simplify_*_operation routines are called
552 by integrate.c, which is called before virtual register instantiation. */
553
554 #define FIXED_BASE_PLUS_P(X) \
555 ((X) == frame_pointer_rtx || (X) == arg_pointer_rtx \
556 || (X) == virtual_stack_vars_rtx \
557 || (X) == virtual_incoming_args_rtx \
558 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
559 && (XEXP (X, 0) == frame_pointer_rtx \
560 || XEXP (X, 0) == arg_pointer_rtx \
561 || XEXP (X, 0) == virtual_stack_vars_rtx \
562 || XEXP (X, 0) == virtual_incoming_args_rtx)))
563
564 /* Similar, but also allows reference to the stack pointer.
565
566 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
567 arg_pointer_rtx by itself is nonzero, because on at least one machine,
568 the i960, the arg pointer is zero when it is unused. */
569
570 #define NONZERO_BASE_PLUS_P(X) \
571 ((X) == frame_pointer_rtx \
572 || (X) == virtual_stack_vars_rtx \
573 || (X) == virtual_incoming_args_rtx \
574 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
575 && (XEXP (X, 0) == frame_pointer_rtx \
576 || XEXP (X, 0) == arg_pointer_rtx \
577 || XEXP (X, 0) == virtual_stack_vars_rtx \
578 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
579 || (X) == stack_pointer_rtx \
580 || (X) == virtual_stack_dynamic_rtx \
581 || (X) == virtual_outgoing_args_rtx \
582 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
583 && (XEXP (X, 0) == stack_pointer_rtx \
584 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
585 || XEXP (X, 0) == virtual_outgoing_args_rtx)))
586
587 static void new_basic_block PROTO((void));
588 static void make_new_qty PROTO((int));
589 static void make_regs_eqv PROTO((int, int));
590 static void delete_reg_equiv PROTO((int));
591 static int mention_regs PROTO((rtx));
592 static int insert_regs PROTO((rtx, struct table_elt *, int));
593 static void free_element PROTO((struct table_elt *));
594 static void remove_from_table PROTO((struct table_elt *, int));
595 static struct table_elt *get_element PROTO((void));
596 static struct table_elt *lookup PROTO((rtx, int, enum machine_mode)),
597 *lookup_for_remove PROTO((rtx, int, enum machine_mode));
598 static rtx lookup_as_function PROTO((rtx, enum rtx_code));
599 static struct table_elt *insert PROTO((rtx, struct table_elt *, int,
600 enum machine_mode));
601 static void merge_equiv_classes PROTO((struct table_elt *,
602 struct table_elt *));
603 static void invalidate PROTO((rtx));
604 static void remove_invalid_refs PROTO((int));
605 static void rehash_using_reg PROTO((rtx));
606 static void invalidate_memory PROTO((struct write_data *));
607 static void invalidate_for_call PROTO((void));
608 static rtx use_related_value PROTO((rtx, struct table_elt *));
609 static int canon_hash PROTO((rtx, enum machine_mode));
610 static int safe_hash PROTO((rtx, enum machine_mode));
611 static int exp_equiv_p PROTO((rtx, rtx, int, int));
612 static void set_nonvarying_address_components PROTO((rtx, int, rtx *,
613 HOST_WIDE_INT *,
614 HOST_WIDE_INT *));
615 static int refers_to_p PROTO((rtx, rtx));
616 static int refers_to_mem_p PROTO((rtx, rtx, HOST_WIDE_INT,
617 HOST_WIDE_INT));
618 static int cse_rtx_addr_varies_p PROTO((rtx));
619 static rtx canon_reg PROTO((rtx, rtx));
620 static void find_best_addr PROTO((rtx, rtx *));
621 static enum rtx_code find_comparison_args PROTO((enum rtx_code, rtx *, rtx *,
622 enum machine_mode *,
623 enum machine_mode *));
624 static rtx cse_gen_binary PROTO((enum rtx_code, enum machine_mode,
625 rtx, rtx));
626 static rtx simplify_plus_minus PROTO((enum rtx_code, enum machine_mode,
627 rtx, rtx));
628 static rtx fold_rtx PROTO((rtx, rtx));
629 static rtx equiv_constant PROTO((rtx));
630 static void record_jump_equiv PROTO((rtx, int));
631 static void record_jump_cond PROTO((enum rtx_code, enum machine_mode,
632 rtx, rtx, int));
633 static void cse_insn PROTO((rtx, int));
634 static void note_mem_written PROTO((rtx, struct write_data *));
635 static void invalidate_from_clobbers PROTO((struct write_data *, rtx));
636 static rtx cse_process_notes PROTO((rtx, rtx));
637 static void cse_around_loop PROTO((rtx));
638 static void invalidate_skipped_set PROTO((rtx, rtx));
639 static void invalidate_skipped_block PROTO((rtx));
640 static void cse_check_loop_start PROTO((rtx, rtx));
641 static void cse_set_around_loop PROTO((rtx, rtx, rtx));
642 static rtx cse_basic_block PROTO((rtx, rtx, struct branch_path *, int));
643 static void count_reg_usage PROTO((rtx, int *, int));
644 \f
645 /* Return an estimate of the cost of computing rtx X.
646 One use is in cse, to decide which expression to keep in the hash table.
647 Another is in rtl generation, to pick the cheapest way to multiply.
648 Other uses like the latter are expected in the future. */
649
650 /* Return the right cost to give to an operation
651 to make the cost of the corresponding register-to-register instruction
652 N times that of a fast register-to-register instruction. */
653
654 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
655
656 int
657 rtx_cost (x, outer_code)
658 rtx x;
659 enum rtx_code outer_code;
660 {
661 register int i, j;
662 register enum rtx_code code;
663 register char *fmt;
664 register int total;
665
666 if (x == 0)
667 return 0;
668
669 /* Compute the default costs of certain things.
670 Note that RTX_COSTS can override the defaults. */
671
672 code = GET_CODE (x);
673 switch (code)
674 {
675 case MULT:
676 /* Count multiplication by 2**n as a shift,
677 because if we are considering it, we would output it as a shift. */
678 if (GET_CODE (XEXP (x, 1)) == CONST_INT
679 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
680 total = 2;
681 else
682 total = COSTS_N_INSNS (5);
683 break;
684 case DIV:
685 case UDIV:
686 case MOD:
687 case UMOD:
688 total = COSTS_N_INSNS (7);
689 break;
690 case USE:
691 /* Used in loop.c and combine.c as a marker. */
692 total = 0;
693 break;
694 case ASM_OPERANDS:
695 /* We don't want these to be used in substitutions because
696 we have no way of validating the resulting insn. So assign
697 anything containing an ASM_OPERANDS a very high cost. */
698 total = 1000;
699 break;
700 default:
701 total = 2;
702 }
703
704 switch (code)
705 {
706 case REG:
707 return ! CHEAP_REG (REGNO (x));
708
709 case SUBREG:
710 /* If we can't tie these modes, make this expensive. The larger
711 the mode, the more expensive it is. */
712 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
713 return COSTS_N_INSNS (2
714 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
715 return 2;
716 #ifdef RTX_COSTS
717 RTX_COSTS (x, code, outer_code);
718 #endif
719 CONST_COSTS (x, code, outer_code);
720 }
721
722 /* Sum the costs of the sub-rtx's, plus cost of this operation,
723 which is already in total. */
724
725 fmt = GET_RTX_FORMAT (code);
726 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
727 if (fmt[i] == 'e')
728 total += rtx_cost (XEXP (x, i), code);
729 else if (fmt[i] == 'E')
730 for (j = 0; j < XVECLEN (x, i); j++)
731 total += rtx_cost (XVECEXP (x, i, j), code);
732
733 return total;
734 }
735 \f
736 /* Clear the hash table and initialize each register with its own quantity,
737 for a new basic block. */
738
739 static void
740 new_basic_block ()
741 {
742 register int i;
743
744 next_qty = max_reg;
745
746 bzero (reg_tick, max_reg * sizeof (int));
747
748 bcopy (all_minus_one, reg_in_table, max_reg * sizeof (int));
749 bcopy (consec_ints, reg_qty, max_reg * sizeof (int));
750 CLEAR_HARD_REG_SET (hard_regs_in_table);
751
752 /* The per-quantity values used to be initialized here, but it is
753 much faster to initialize each as it is made in `make_new_qty'. */
754
755 for (i = 0; i < NBUCKETS; i++)
756 {
757 register struct table_elt *this, *next;
758 for (this = table[i]; this; this = next)
759 {
760 next = this->next_same_hash;
761 free_element (this);
762 }
763 }
764
765 bzero (table, sizeof table);
766
767 prev_insn = 0;
768
769 #ifdef HAVE_cc0
770 prev_insn_cc0 = 0;
771 #endif
772 }
773
774 /* Say that register REG contains a quantity not in any register before
775 and initialize that quantity. */
776
777 static void
778 make_new_qty (reg)
779 register int reg;
780 {
781 register int q;
782
783 if (next_qty >= max_qty)
784 abort ();
785
786 q = reg_qty[reg] = next_qty++;
787 qty_first_reg[q] = reg;
788 qty_last_reg[q] = reg;
789 qty_const[q] = qty_const_insn[q] = 0;
790 qty_comparison_code[q] = UNKNOWN;
791
792 reg_next_eqv[reg] = reg_prev_eqv[reg] = -1;
793 }
794
795 /* Make reg NEW equivalent to reg OLD.
796 OLD is not changing; NEW is. */
797
798 static void
799 make_regs_eqv (new, old)
800 register int new, old;
801 {
802 register int lastr, firstr;
803 register int q = reg_qty[old];
804
805 /* Nothing should become eqv until it has a "non-invalid" qty number. */
806 if (! REGNO_QTY_VALID_P (old))
807 abort ();
808
809 reg_qty[new] = q;
810 firstr = qty_first_reg[q];
811 lastr = qty_last_reg[q];
812
813 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
814 hard regs. Among pseudos, if NEW will live longer than any other reg
815 of the same qty, and that is beyond the current basic block,
816 make it the new canonical replacement for this qty. */
817 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
818 /* Certain fixed registers might be of the class NO_REGS. This means
819 that not only can they not be allocated by the compiler, but
820 they cannot be used in substitutions or canonicalizations
821 either. */
822 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
823 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
824 || (new >= FIRST_PSEUDO_REGISTER
825 && (firstr < FIRST_PSEUDO_REGISTER
826 || ((uid_cuid[regno_last_uid[new]] > cse_basic_block_end
827 || (uid_cuid[regno_first_uid[new]]
828 < cse_basic_block_start))
829 && (uid_cuid[regno_last_uid[new]]
830 > uid_cuid[regno_last_uid[firstr]]))))))
831 {
832 reg_prev_eqv[firstr] = new;
833 reg_next_eqv[new] = firstr;
834 reg_prev_eqv[new] = -1;
835 qty_first_reg[q] = new;
836 }
837 else
838 {
839 /* If NEW is a hard reg (known to be non-fixed), insert at end.
840 Otherwise, insert before any non-fixed hard regs that are at the
841 end. Registers of class NO_REGS cannot be used as an
842 equivalent for anything. */
843 while (lastr < FIRST_PSEUDO_REGISTER && reg_prev_eqv[lastr] >= 0
844 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
845 && new >= FIRST_PSEUDO_REGISTER)
846 lastr = reg_prev_eqv[lastr];
847 reg_next_eqv[new] = reg_next_eqv[lastr];
848 if (reg_next_eqv[lastr] >= 0)
849 reg_prev_eqv[reg_next_eqv[lastr]] = new;
850 else
851 qty_last_reg[q] = new;
852 reg_next_eqv[lastr] = new;
853 reg_prev_eqv[new] = lastr;
854 }
855 }
856
857 /* Remove REG from its equivalence class. */
858
859 static void
860 delete_reg_equiv (reg)
861 register int reg;
862 {
863 register int n = reg_next_eqv[reg];
864 register int p = reg_prev_eqv[reg];
865 register int q = reg_qty[reg];
866
867 /* If invalid, do nothing. N and P above are undefined in that case. */
868 if (q == reg)
869 return;
870
871 if (n != -1)
872 reg_prev_eqv[n] = p;
873 else
874 qty_last_reg[q] = p;
875 if (p != -1)
876 reg_next_eqv[p] = n;
877 else
878 qty_first_reg[q] = n;
879
880 reg_qty[reg] = reg;
881 }
882
883 /* Remove any invalid expressions from the hash table
884 that refer to any of the registers contained in expression X.
885
886 Make sure that newly inserted references to those registers
887 as subexpressions will be considered valid.
888
889 mention_regs is not called when a register itself
890 is being stored in the table.
891
892 Return 1 if we have done something that may have changed the hash code
893 of X. */
894
895 static int
896 mention_regs (x)
897 rtx x;
898 {
899 register enum rtx_code code;
900 register int i, j;
901 register char *fmt;
902 register int changed = 0;
903
904 if (x == 0)
905 return 0;
906
907 code = GET_CODE (x);
908 if (code == REG)
909 {
910 register int regno = REGNO (x);
911 register int endregno
912 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
913 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
914 int i;
915
916 for (i = regno; i < endregno; i++)
917 {
918 if (reg_in_table[i] >= 0 && reg_in_table[i] != reg_tick[i])
919 remove_invalid_refs (i);
920
921 reg_in_table[i] = reg_tick[i];
922 }
923
924 return 0;
925 }
926
927 /* If X is a comparison or a COMPARE and either operand is a register
928 that does not have a quantity, give it one. This is so that a later
929 call to record_jump_equiv won't cause X to be assigned a different
930 hash code and not found in the table after that call.
931
932 It is not necessary to do this here, since rehash_using_reg can
933 fix up the table later, but doing this here eliminates the need to
934 call that expensive function in the most common case where the only
935 use of the register is in the comparison. */
936
937 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
938 {
939 if (GET_CODE (XEXP (x, 0)) == REG
940 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
941 if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
942 {
943 rehash_using_reg (XEXP (x, 0));
944 changed = 1;
945 }
946
947 if (GET_CODE (XEXP (x, 1)) == REG
948 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
949 if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
950 {
951 rehash_using_reg (XEXP (x, 1));
952 changed = 1;
953 }
954 }
955
956 fmt = GET_RTX_FORMAT (code);
957 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
958 if (fmt[i] == 'e')
959 changed |= mention_regs (XEXP (x, i));
960 else if (fmt[i] == 'E')
961 for (j = 0; j < XVECLEN (x, i); j++)
962 changed |= mention_regs (XVECEXP (x, i, j));
963
964 return changed;
965 }
966
967 /* Update the register quantities for inserting X into the hash table
968 with a value equivalent to CLASSP.
969 (If the class does not contain a REG, it is irrelevant.)
970 If MODIFIED is nonzero, X is a destination; it is being modified.
971 Note that delete_reg_equiv should be called on a register
972 before insert_regs is done on that register with MODIFIED != 0.
973
974 Nonzero value means that elements of reg_qty have changed
975 so X's hash code may be different. */
976
977 static int
978 insert_regs (x, classp, modified)
979 rtx x;
980 struct table_elt *classp;
981 int modified;
982 {
983 if (GET_CODE (x) == REG)
984 {
985 register int regno = REGNO (x);
986
987 /* If REGNO is in the equivalence table already but is of the
988 wrong mode for that equivalence, don't do anything here. */
989
990 if (REGNO_QTY_VALID_P (regno)
991 && qty_mode[reg_qty[regno]] != GET_MODE (x))
992 return 0;
993
994 if (modified || ! REGNO_QTY_VALID_P (regno))
995 {
996 if (classp)
997 for (classp = classp->first_same_value;
998 classp != 0;
999 classp = classp->next_same_value)
1000 if (GET_CODE (classp->exp) == REG
1001 && GET_MODE (classp->exp) == GET_MODE (x))
1002 {
1003 make_regs_eqv (regno, REGNO (classp->exp));
1004 return 1;
1005 }
1006
1007 make_new_qty (regno);
1008 qty_mode[reg_qty[regno]] = GET_MODE (x);
1009 return 1;
1010 }
1011 }
1012
1013 /* If X is a SUBREG, we will likely be inserting the inner register in the
1014 table. If that register doesn't have an assigned quantity number at
1015 this point but does later, the insertion that we will be doing now will
1016 not be accessible because its hash code will have changed. So assign
1017 a quantity number now. */
1018
1019 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1020 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1021 {
1022 insert_regs (SUBREG_REG (x), NULL_PTR, 0);
1023 mention_regs (SUBREG_REG (x));
1024 return 1;
1025 }
1026 else
1027 return mention_regs (x);
1028 }
1029 \f
1030 /* Look in or update the hash table. */
1031
1032 /* Put the element ELT on the list of free elements. */
1033
1034 static void
1035 free_element (elt)
1036 struct table_elt *elt;
1037 {
1038 elt->next_same_hash = free_element_chain;
1039 free_element_chain = elt;
1040 }
1041
1042 /* Return an element that is free for use. */
1043
1044 static struct table_elt *
1045 get_element ()
1046 {
1047 struct table_elt *elt = free_element_chain;
1048 if (elt)
1049 {
1050 free_element_chain = elt->next_same_hash;
1051 return elt;
1052 }
1053 n_elements_made++;
1054 return (struct table_elt *) oballoc (sizeof (struct table_elt));
1055 }
1056
1057 /* Remove table element ELT from use in the table.
1058 HASH is its hash code, made using the HASH macro.
1059 It's an argument because often that is known in advance
1060 and we save much time not recomputing it. */
1061
1062 static void
1063 remove_from_table (elt, hash)
1064 register struct table_elt *elt;
1065 int hash;
1066 {
1067 if (elt == 0)
1068 return;
1069
1070 /* Mark this element as removed. See cse_insn. */
1071 elt->first_same_value = 0;
1072
1073 /* Remove the table element from its equivalence class. */
1074
1075 {
1076 register struct table_elt *prev = elt->prev_same_value;
1077 register struct table_elt *next = elt->next_same_value;
1078
1079 if (next) next->prev_same_value = prev;
1080
1081 if (prev)
1082 prev->next_same_value = next;
1083 else
1084 {
1085 register struct table_elt *newfirst = next;
1086 while (next)
1087 {
1088 next->first_same_value = newfirst;
1089 next = next->next_same_value;
1090 }
1091 }
1092 }
1093
1094 /* Remove the table element from its hash bucket. */
1095
1096 {
1097 register struct table_elt *prev = elt->prev_same_hash;
1098 register struct table_elt *next = elt->next_same_hash;
1099
1100 if (next) next->prev_same_hash = prev;
1101
1102 if (prev)
1103 prev->next_same_hash = next;
1104 else if (table[hash] == elt)
1105 table[hash] = next;
1106 else
1107 {
1108 /* This entry is not in the proper hash bucket. This can happen
1109 when two classes were merged by `merge_equiv_classes'. Search
1110 for the hash bucket that it heads. This happens only very
1111 rarely, so the cost is acceptable. */
1112 for (hash = 0; hash < NBUCKETS; hash++)
1113 if (table[hash] == elt)
1114 table[hash] = next;
1115 }
1116 }
1117
1118 /* Remove the table element from its related-value circular chain. */
1119
1120 if (elt->related_value != 0 && elt->related_value != elt)
1121 {
1122 register struct table_elt *p = elt->related_value;
1123 while (p->related_value != elt)
1124 p = p->related_value;
1125 p->related_value = elt->related_value;
1126 if (p->related_value == p)
1127 p->related_value = 0;
1128 }
1129
1130 free_element (elt);
1131 }
1132
1133 /* Look up X in the hash table and return its table element,
1134 or 0 if X is not in the table.
1135
1136 MODE is the machine-mode of X, or if X is an integer constant
1137 with VOIDmode then MODE is the mode with which X will be used.
1138
1139 Here we are satisfied to find an expression whose tree structure
1140 looks like X. */
1141
1142 static struct table_elt *
1143 lookup (x, hash, mode)
1144 rtx x;
1145 int hash;
1146 enum machine_mode mode;
1147 {
1148 register struct table_elt *p;
1149
1150 for (p = table[hash]; p; p = p->next_same_hash)
1151 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1152 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1153 return p;
1154
1155 return 0;
1156 }
1157
1158 /* Like `lookup' but don't care whether the table element uses invalid regs.
1159 Also ignore discrepancies in the machine mode of a register. */
1160
1161 static struct table_elt *
1162 lookup_for_remove (x, hash, mode)
1163 rtx x;
1164 int hash;
1165 enum machine_mode mode;
1166 {
1167 register struct table_elt *p;
1168
1169 if (GET_CODE (x) == REG)
1170 {
1171 int regno = REGNO (x);
1172 /* Don't check the machine mode when comparing registers;
1173 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1174 for (p = table[hash]; p; p = p->next_same_hash)
1175 if (GET_CODE (p->exp) == REG
1176 && REGNO (p->exp) == regno)
1177 return p;
1178 }
1179 else
1180 {
1181 for (p = table[hash]; p; p = p->next_same_hash)
1182 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1183 return p;
1184 }
1185
1186 return 0;
1187 }
1188
1189 /* Look for an expression equivalent to X and with code CODE.
1190 If one is found, return that expression. */
1191
1192 static rtx
1193 lookup_as_function (x, code)
1194 rtx x;
1195 enum rtx_code code;
1196 {
1197 register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS,
1198 GET_MODE (x));
1199 if (p == 0)
1200 return 0;
1201
1202 for (p = p->first_same_value; p; p = p->next_same_value)
1203 {
1204 if (GET_CODE (p->exp) == code
1205 /* Make sure this is a valid entry in the table. */
1206 && exp_equiv_p (p->exp, p->exp, 1, 0))
1207 return p->exp;
1208 }
1209
1210 return 0;
1211 }
1212
1213 /* Insert X in the hash table, assuming HASH is its hash code
1214 and CLASSP is an element of the class it should go in
1215 (or 0 if a new class should be made).
1216 It is inserted at the proper position to keep the class in
1217 the order cheapest first.
1218
1219 MODE is the machine-mode of X, or if X is an integer constant
1220 with VOIDmode then MODE is the mode with which X will be used.
1221
1222 For elements of equal cheapness, the most recent one
1223 goes in front, except that the first element in the list
1224 remains first unless a cheaper element is added. The order of
1225 pseudo-registers does not matter, as canon_reg will be called to
1226 find the cheapest when a register is retrieved from the table.
1227
1228 The in_memory field in the hash table element is set to 0.
1229 The caller must set it nonzero if appropriate.
1230
1231 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1232 and if insert_regs returns a nonzero value
1233 you must then recompute its hash code before calling here.
1234
1235 If necessary, update table showing constant values of quantities. */
1236
1237 #define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1238
1239 static struct table_elt *
1240 insert (x, classp, hash, mode)
1241 register rtx x;
1242 register struct table_elt *classp;
1243 int hash;
1244 enum machine_mode mode;
1245 {
1246 register struct table_elt *elt;
1247
1248 /* If X is a register and we haven't made a quantity for it,
1249 something is wrong. */
1250 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1251 abort ();
1252
1253 /* If X is a hard register, show it is being put in the table. */
1254 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1255 {
1256 int regno = REGNO (x);
1257 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1258 int i;
1259
1260 for (i = regno; i < endregno; i++)
1261 SET_HARD_REG_BIT (hard_regs_in_table, i);
1262 }
1263
1264
1265 /* Put an element for X into the right hash bucket. */
1266
1267 elt = get_element ();
1268 elt->exp = x;
1269 elt->cost = COST (x);
1270 elt->next_same_value = 0;
1271 elt->prev_same_value = 0;
1272 elt->next_same_hash = table[hash];
1273 elt->prev_same_hash = 0;
1274 elt->related_value = 0;
1275 elt->in_memory = 0;
1276 elt->mode = mode;
1277 elt->is_const = (CONSTANT_P (x)
1278 /* GNU C++ takes advantage of this for `this'
1279 (and other const values). */
1280 || (RTX_UNCHANGING_P (x)
1281 && GET_CODE (x) == REG
1282 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1283 || FIXED_BASE_PLUS_P (x));
1284
1285 if (table[hash])
1286 table[hash]->prev_same_hash = elt;
1287 table[hash] = elt;
1288
1289 /* Put it into the proper value-class. */
1290 if (classp)
1291 {
1292 classp = classp->first_same_value;
1293 if (CHEAPER (elt, classp))
1294 /* Insert at the head of the class */
1295 {
1296 register struct table_elt *p;
1297 elt->next_same_value = classp;
1298 classp->prev_same_value = elt;
1299 elt->first_same_value = elt;
1300
1301 for (p = classp; p; p = p->next_same_value)
1302 p->first_same_value = elt;
1303 }
1304 else
1305 {
1306 /* Insert not at head of the class. */
1307 /* Put it after the last element cheaper than X. */
1308 register struct table_elt *p, *next;
1309 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1310 p = next);
1311 /* Put it after P and before NEXT. */
1312 elt->next_same_value = next;
1313 if (next)
1314 next->prev_same_value = elt;
1315 elt->prev_same_value = p;
1316 p->next_same_value = elt;
1317 elt->first_same_value = classp;
1318 }
1319 }
1320 else
1321 elt->first_same_value = elt;
1322
1323 /* If this is a constant being set equivalent to a register or a register
1324 being set equivalent to a constant, note the constant equivalence.
1325
1326 If this is a constant, it cannot be equivalent to a different constant,
1327 and a constant is the only thing that can be cheaper than a register. So
1328 we know the register is the head of the class (before the constant was
1329 inserted).
1330
1331 If this is a register that is not already known equivalent to a
1332 constant, we must check the entire class.
1333
1334 If this is a register that is already known equivalent to an insn,
1335 update `qty_const_insn' to show that `this_insn' is the latest
1336 insn making that quantity equivalent to the constant. */
1337
1338 if (elt->is_const && classp && GET_CODE (classp->exp) == REG)
1339 {
1340 qty_const[reg_qty[REGNO (classp->exp)]]
1341 = gen_lowpart_if_possible (qty_mode[reg_qty[REGNO (classp->exp)]], x);
1342 qty_const_insn[reg_qty[REGNO (classp->exp)]] = this_insn;
1343 }
1344
1345 else if (GET_CODE (x) == REG && classp && ! qty_const[reg_qty[REGNO (x)]])
1346 {
1347 register struct table_elt *p;
1348
1349 for (p = classp; p != 0; p = p->next_same_value)
1350 {
1351 if (p->is_const)
1352 {
1353 qty_const[reg_qty[REGNO (x)]]
1354 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1355 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1356 break;
1357 }
1358 }
1359 }
1360
1361 else if (GET_CODE (x) == REG && qty_const[reg_qty[REGNO (x)]]
1362 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]])
1363 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1364
1365 /* If this is a constant with symbolic value,
1366 and it has a term with an explicit integer value,
1367 link it up with related expressions. */
1368 if (GET_CODE (x) == CONST)
1369 {
1370 rtx subexp = get_related_value (x);
1371 int subhash;
1372 struct table_elt *subelt, *subelt_prev;
1373
1374 if (subexp != 0)
1375 {
1376 /* Get the integer-free subexpression in the hash table. */
1377 subhash = safe_hash (subexp, mode) % NBUCKETS;
1378 subelt = lookup (subexp, subhash, mode);
1379 if (subelt == 0)
1380 subelt = insert (subexp, NULL_PTR, subhash, mode);
1381 /* Initialize SUBELT's circular chain if it has none. */
1382 if (subelt->related_value == 0)
1383 subelt->related_value = subelt;
1384 /* Find the element in the circular chain that precedes SUBELT. */
1385 subelt_prev = subelt;
1386 while (subelt_prev->related_value != subelt)
1387 subelt_prev = subelt_prev->related_value;
1388 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1389 This way the element that follows SUBELT is the oldest one. */
1390 elt->related_value = subelt_prev->related_value;
1391 subelt_prev->related_value = elt;
1392 }
1393 }
1394
1395 return elt;
1396 }
1397 \f
1398 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1399 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1400 the two classes equivalent.
1401
1402 CLASS1 will be the surviving class; CLASS2 should not be used after this
1403 call.
1404
1405 Any invalid entries in CLASS2 will not be copied. */
1406
1407 static void
1408 merge_equiv_classes (class1, class2)
1409 struct table_elt *class1, *class2;
1410 {
1411 struct table_elt *elt, *next, *new;
1412
1413 /* Ensure we start with the head of the classes. */
1414 class1 = class1->first_same_value;
1415 class2 = class2->first_same_value;
1416
1417 /* If they were already equal, forget it. */
1418 if (class1 == class2)
1419 return;
1420
1421 for (elt = class2; elt; elt = next)
1422 {
1423 int hash;
1424 rtx exp = elt->exp;
1425 enum machine_mode mode = elt->mode;
1426
1427 next = elt->next_same_value;
1428
1429 /* Remove old entry, make a new one in CLASS1's class.
1430 Don't do this for invalid entries as we cannot find their
1431 hash code (it also isn't necessary). */
1432 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1433 {
1434 hash_arg_in_memory = 0;
1435 hash_arg_in_struct = 0;
1436 hash = HASH (exp, mode);
1437
1438 if (GET_CODE (exp) == REG)
1439 delete_reg_equiv (REGNO (exp));
1440
1441 remove_from_table (elt, hash);
1442
1443 if (insert_regs (exp, class1, 0))
1444 hash = HASH (exp, mode);
1445 new = insert (exp, class1, hash, mode);
1446 new->in_memory = hash_arg_in_memory;
1447 new->in_struct = hash_arg_in_struct;
1448 }
1449 }
1450 }
1451 \f
1452 /* Remove from the hash table, or mark as invalid,
1453 all expressions whose values could be altered by storing in X.
1454 X is a register, a subreg, or a memory reference with nonvarying address
1455 (because, when a memory reference with a varying address is stored in,
1456 all memory references are removed by invalidate_memory
1457 so specific invalidation is superfluous).
1458
1459 A nonvarying address may be just a register or just
1460 a symbol reference, or it may be either of those plus
1461 a numeric offset. */
1462
1463 static void
1464 invalidate (x)
1465 rtx x;
1466 {
1467 register int i;
1468 register struct table_elt *p;
1469 rtx base;
1470 HOST_WIDE_INT start, end;
1471
1472 /* If X is a register, dependencies on its contents
1473 are recorded through the qty number mechanism.
1474 Just change the qty number of the register,
1475 mark it as invalid for expressions that refer to it,
1476 and remove it itself. */
1477
1478 if (GET_CODE (x) == REG)
1479 {
1480 register int regno = REGNO (x);
1481 register int hash = HASH (x, GET_MODE (x));
1482
1483 /* Remove REGNO from any quantity list it might be on and indicate
1484 that it's value might have changed. If it is a pseudo, remove its
1485 entry from the hash table.
1486
1487 For a hard register, we do the first two actions above for any
1488 additional hard registers corresponding to X. Then, if any of these
1489 registers are in the table, we must remove any REG entries that
1490 overlap these registers. */
1491
1492 delete_reg_equiv (regno);
1493 reg_tick[regno]++;
1494
1495 if (regno >= FIRST_PSEUDO_REGISTER)
1496 remove_from_table (lookup_for_remove (x, hash, GET_MODE (x)), hash);
1497 else
1498 {
1499 int in_table = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1500 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1501 int tregno, tendregno;
1502 register struct table_elt *p, *next;
1503
1504 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1505
1506 for (i = regno + 1; i < endregno; i++)
1507 {
1508 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1509 CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1510 delete_reg_equiv (i);
1511 reg_tick[i]++;
1512 }
1513
1514 if (in_table)
1515 for (hash = 0; hash < NBUCKETS; hash++)
1516 for (p = table[hash]; p; p = next)
1517 {
1518 next = p->next_same_hash;
1519
1520 if (GET_CODE (p->exp) != REG
1521 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1522 continue;
1523
1524 tregno = REGNO (p->exp);
1525 tendregno
1526 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1527 if (tendregno > regno && tregno < endregno)
1528 remove_from_table (p, hash);
1529 }
1530 }
1531
1532 return;
1533 }
1534
1535 if (GET_CODE (x) == SUBREG)
1536 {
1537 if (GET_CODE (SUBREG_REG (x)) != REG)
1538 abort ();
1539 invalidate (SUBREG_REG (x));
1540 return;
1541 }
1542
1543 /* X is not a register; it must be a memory reference with
1544 a nonvarying address. Remove all hash table elements
1545 that refer to overlapping pieces of memory. */
1546
1547 if (GET_CODE (x) != MEM)
1548 abort ();
1549
1550 set_nonvarying_address_components (XEXP (x, 0), GET_MODE_SIZE (GET_MODE (x)),
1551 &base, &start, &end);
1552
1553 for (i = 0; i < NBUCKETS; i++)
1554 {
1555 register struct table_elt *next;
1556 for (p = table[i]; p; p = next)
1557 {
1558 next = p->next_same_hash;
1559 if (refers_to_mem_p (p->exp, base, start, end))
1560 remove_from_table (p, i);
1561 }
1562 }
1563 }
1564
1565 /* Remove all expressions that refer to register REGNO,
1566 since they are already invalid, and we are about to
1567 mark that register valid again and don't want the old
1568 expressions to reappear as valid. */
1569
1570 static void
1571 remove_invalid_refs (regno)
1572 int regno;
1573 {
1574 register int i;
1575 register struct table_elt *p, *next;
1576
1577 for (i = 0; i < NBUCKETS; i++)
1578 for (p = table[i]; p; p = next)
1579 {
1580 next = p->next_same_hash;
1581 if (GET_CODE (p->exp) != REG
1582 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1583 remove_from_table (p, i);
1584 }
1585 }
1586 \f
1587 /* Recompute the hash codes of any valid entries in the hash table that
1588 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1589
1590 This is called when we make a jump equivalence. */
1591
1592 static void
1593 rehash_using_reg (x)
1594 rtx x;
1595 {
1596 int i;
1597 struct table_elt *p, *next;
1598 int hash;
1599
1600 if (GET_CODE (x) == SUBREG)
1601 x = SUBREG_REG (x);
1602
1603 /* If X is not a register or if the register is known not to be in any
1604 valid entries in the table, we have no work to do. */
1605
1606 if (GET_CODE (x) != REG
1607 || reg_in_table[REGNO (x)] < 0
1608 || reg_in_table[REGNO (x)] != reg_tick[REGNO (x)])
1609 return;
1610
1611 /* Scan all hash chains looking for valid entries that mention X.
1612 If we find one and it is in the wrong hash chain, move it. We can skip
1613 objects that are registers, since they are handled specially. */
1614
1615 for (i = 0; i < NBUCKETS; i++)
1616 for (p = table[i]; p; p = next)
1617 {
1618 next = p->next_same_hash;
1619 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
1620 && exp_equiv_p (p->exp, p->exp, 1, 0)
1621 && i != (hash = safe_hash (p->exp, p->mode) % NBUCKETS))
1622 {
1623 if (p->next_same_hash)
1624 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1625
1626 if (p->prev_same_hash)
1627 p->prev_same_hash->next_same_hash = p->next_same_hash;
1628 else
1629 table[i] = p->next_same_hash;
1630
1631 p->next_same_hash = table[hash];
1632 p->prev_same_hash = 0;
1633 if (table[hash])
1634 table[hash]->prev_same_hash = p;
1635 table[hash] = p;
1636 }
1637 }
1638 }
1639 \f
1640 /* Remove from the hash table all expressions that reference memory,
1641 or some of them as specified by *WRITES. */
1642
1643 static void
1644 invalidate_memory (writes)
1645 struct write_data *writes;
1646 {
1647 register int i;
1648 register struct table_elt *p, *next;
1649 int all = writes->all;
1650 int nonscalar = writes->nonscalar;
1651
1652 for (i = 0; i < NBUCKETS; i++)
1653 for (p = table[i]; p; p = next)
1654 {
1655 next = p->next_same_hash;
1656 if (p->in_memory
1657 && (all
1658 || (nonscalar && p->in_struct)
1659 || cse_rtx_addr_varies_p (p->exp)))
1660 remove_from_table (p, i);
1661 }
1662 }
1663 \f
1664 /* Remove from the hash table any expression that is a call-clobbered
1665 register. Also update their TICK values. */
1666
1667 static void
1668 invalidate_for_call ()
1669 {
1670 int regno, endregno;
1671 int i;
1672 int hash;
1673 struct table_elt *p, *next;
1674 int in_table = 0;
1675
1676 /* Go through all the hard registers. For each that is clobbered in
1677 a CALL_INSN, remove the register from quantity chains and update
1678 reg_tick if defined. Also see if any of these registers is currently
1679 in the table. */
1680
1681 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1682 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1683 {
1684 delete_reg_equiv (regno);
1685 if (reg_tick[regno] >= 0)
1686 reg_tick[regno]++;
1687
1688 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1689 }
1690
1691 /* In the case where we have no call-clobbered hard registers in the
1692 table, we are done. Otherwise, scan the table and remove any
1693 entry that overlaps a call-clobbered register. */
1694
1695 if (in_table)
1696 for (hash = 0; hash < NBUCKETS; hash++)
1697 for (p = table[hash]; p; p = next)
1698 {
1699 next = p->next_same_hash;
1700
1701 if (GET_CODE (p->exp) != REG
1702 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1703 continue;
1704
1705 regno = REGNO (p->exp);
1706 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1707
1708 for (i = regno; i < endregno; i++)
1709 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1710 {
1711 remove_from_table (p, hash);
1712 break;
1713 }
1714 }
1715 }
1716 \f
1717 /* Given an expression X of type CONST,
1718 and ELT which is its table entry (or 0 if it
1719 is not in the hash table),
1720 return an alternate expression for X as a register plus integer.
1721 If none can be found, return 0. */
1722
1723 static rtx
1724 use_related_value (x, elt)
1725 rtx x;
1726 struct table_elt *elt;
1727 {
1728 register struct table_elt *relt = 0;
1729 register struct table_elt *p, *q;
1730 HOST_WIDE_INT offset;
1731
1732 /* First, is there anything related known?
1733 If we have a table element, we can tell from that.
1734 Otherwise, must look it up. */
1735
1736 if (elt != 0 && elt->related_value != 0)
1737 relt = elt;
1738 else if (elt == 0 && GET_CODE (x) == CONST)
1739 {
1740 rtx subexp = get_related_value (x);
1741 if (subexp != 0)
1742 relt = lookup (subexp,
1743 safe_hash (subexp, GET_MODE (subexp)) % NBUCKETS,
1744 GET_MODE (subexp));
1745 }
1746
1747 if (relt == 0)
1748 return 0;
1749
1750 /* Search all related table entries for one that has an
1751 equivalent register. */
1752
1753 p = relt;
1754 while (1)
1755 {
1756 /* This loop is strange in that it is executed in two different cases.
1757 The first is when X is already in the table. Then it is searching
1758 the RELATED_VALUE list of X's class (RELT). The second case is when
1759 X is not in the table. Then RELT points to a class for the related
1760 value.
1761
1762 Ensure that, whatever case we are in, that we ignore classes that have
1763 the same value as X. */
1764
1765 if (rtx_equal_p (x, p->exp))
1766 q = 0;
1767 else
1768 for (q = p->first_same_value; q; q = q->next_same_value)
1769 if (GET_CODE (q->exp) == REG)
1770 break;
1771
1772 if (q)
1773 break;
1774
1775 p = p->related_value;
1776
1777 /* We went all the way around, so there is nothing to be found.
1778 Alternatively, perhaps RELT was in the table for some other reason
1779 and it has no related values recorded. */
1780 if (p == relt || p == 0)
1781 break;
1782 }
1783
1784 if (q == 0)
1785 return 0;
1786
1787 offset = (get_integer_term (x) - get_integer_term (p->exp));
1788 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
1789 return plus_constant (q->exp, offset);
1790 }
1791 \f
1792 /* Hash an rtx. We are careful to make sure the value is never negative.
1793 Equivalent registers hash identically.
1794 MODE is used in hashing for CONST_INTs only;
1795 otherwise the mode of X is used.
1796
1797 Store 1 in do_not_record if any subexpression is volatile.
1798
1799 Store 1 in hash_arg_in_memory if X contains a MEM rtx
1800 which does not have the RTX_UNCHANGING_P bit set.
1801 In this case, also store 1 in hash_arg_in_struct
1802 if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
1803
1804 Note that cse_insn knows that the hash code of a MEM expression
1805 is just (int) MEM plus the hash code of the address. */
1806
1807 static int
1808 canon_hash (x, mode)
1809 rtx x;
1810 enum machine_mode mode;
1811 {
1812 register int i, j;
1813 register int hash = 0;
1814 register enum rtx_code code;
1815 register char *fmt;
1816
1817 /* repeat is used to turn tail-recursion into iteration. */
1818 repeat:
1819 if (x == 0)
1820 return hash;
1821
1822 code = GET_CODE (x);
1823 switch (code)
1824 {
1825 case REG:
1826 {
1827 register int regno = REGNO (x);
1828
1829 /* On some machines, we can't record any non-fixed hard register,
1830 because extending its life will cause reload problems. We
1831 consider ap, fp, and sp to be fixed for this purpose.
1832 On all machines, we can't record any global registers. */
1833
1834 if (regno < FIRST_PSEUDO_REGISTER
1835 && (global_regs[regno]
1836 #ifdef SMALL_REGISTER_CLASSES
1837 || (! fixed_regs[regno]
1838 && regno != FRAME_POINTER_REGNUM
1839 && regno != ARG_POINTER_REGNUM
1840 && regno != STACK_POINTER_REGNUM)
1841 #endif
1842 ))
1843 {
1844 do_not_record = 1;
1845 return 0;
1846 }
1847 return hash + ((int) REG << 7) + reg_qty[regno];
1848 }
1849
1850 case CONST_INT:
1851 hash += ((int) mode + ((int) CONST_INT << 7)
1852 + INTVAL (x) + (INTVAL (x) >> HASHBITS));
1853 return ((1 << HASHBITS) - 1) & hash;
1854
1855 case CONST_DOUBLE:
1856 /* This is like the general case, except that it only counts
1857 the integers representing the constant. */
1858 hash += (int) code + (int) GET_MODE (x);
1859 {
1860 int i;
1861 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1862 {
1863 int tem = XINT (x, i);
1864 hash += ((1 << HASHBITS) - 1) & (tem + (tem >> HASHBITS));
1865 }
1866 }
1867 return hash;
1868
1869 /* Assume there is only one rtx object for any given label. */
1870 case LABEL_REF:
1871 /* Use `and' to ensure a positive number. */
1872 return (hash + ((HOST_WIDE_INT) LABEL_REF << 7)
1873 + ((HOST_WIDE_INT) XEXP (x, 0) & ((1 << HASHBITS) - 1)));
1874
1875 case SYMBOL_REF:
1876 return (hash + ((HOST_WIDE_INT) SYMBOL_REF << 7)
1877 + ((HOST_WIDE_INT) XEXP (x, 0) & ((1 << HASHBITS) - 1)));
1878
1879 case MEM:
1880 if (MEM_VOLATILE_P (x))
1881 {
1882 do_not_record = 1;
1883 return 0;
1884 }
1885 if (! RTX_UNCHANGING_P (x))
1886 {
1887 hash_arg_in_memory = 1;
1888 if (MEM_IN_STRUCT_P (x)) hash_arg_in_struct = 1;
1889 }
1890 /* Now that we have already found this special case,
1891 might as well speed it up as much as possible. */
1892 hash += (int) MEM;
1893 x = XEXP (x, 0);
1894 goto repeat;
1895
1896 case PRE_DEC:
1897 case PRE_INC:
1898 case POST_DEC:
1899 case POST_INC:
1900 case PC:
1901 case CC0:
1902 case CALL:
1903 case UNSPEC_VOLATILE:
1904 do_not_record = 1;
1905 return 0;
1906
1907 case ASM_OPERANDS:
1908 if (MEM_VOLATILE_P (x))
1909 {
1910 do_not_record = 1;
1911 return 0;
1912 }
1913 }
1914
1915 i = GET_RTX_LENGTH (code) - 1;
1916 hash += (int) code + (int) GET_MODE (x);
1917 fmt = GET_RTX_FORMAT (code);
1918 for (; i >= 0; i--)
1919 {
1920 if (fmt[i] == 'e')
1921 {
1922 rtx tem = XEXP (x, i);
1923 rtx tem1;
1924
1925 /* If the operand is a REG that is equivalent to a constant, hash
1926 as if we were hashing the constant, since we will be comparing
1927 that way. */
1928 if (tem != 0 && GET_CODE (tem) == REG
1929 && REGNO_QTY_VALID_P (REGNO (tem))
1930 && qty_mode[reg_qty[REGNO (tem)]] == GET_MODE (tem)
1931 && (tem1 = qty_const[reg_qty[REGNO (tem)]]) != 0
1932 && CONSTANT_P (tem1))
1933 tem = tem1;
1934
1935 /* If we are about to do the last recursive call
1936 needed at this level, change it into iteration.
1937 This function is called enough to be worth it. */
1938 if (i == 0)
1939 {
1940 x = tem;
1941 goto repeat;
1942 }
1943 hash += canon_hash (tem, 0);
1944 }
1945 else if (fmt[i] == 'E')
1946 for (j = 0; j < XVECLEN (x, i); j++)
1947 hash += canon_hash (XVECEXP (x, i, j), 0);
1948 else if (fmt[i] == 's')
1949 {
1950 register char *p = XSTR (x, i);
1951 if (p)
1952 while (*p)
1953 {
1954 register int tem = *p++;
1955 hash += ((1 << HASHBITS) - 1) & (tem + (tem >> HASHBITS));
1956 }
1957 }
1958 else if (fmt[i] == 'i')
1959 {
1960 register int tem = XINT (x, i);
1961 hash += ((1 << HASHBITS) - 1) & (tem + (tem >> HASHBITS));
1962 }
1963 else
1964 abort ();
1965 }
1966 return hash;
1967 }
1968
1969 /* Like canon_hash but with no side effects. */
1970
1971 static int
1972 safe_hash (x, mode)
1973 rtx x;
1974 enum machine_mode mode;
1975 {
1976 int save_do_not_record = do_not_record;
1977 int save_hash_arg_in_memory = hash_arg_in_memory;
1978 int save_hash_arg_in_struct = hash_arg_in_struct;
1979 int hash = canon_hash (x, mode);
1980 hash_arg_in_memory = save_hash_arg_in_memory;
1981 hash_arg_in_struct = save_hash_arg_in_struct;
1982 do_not_record = save_do_not_record;
1983 return hash;
1984 }
1985 \f
1986 /* Return 1 iff X and Y would canonicalize into the same thing,
1987 without actually constructing the canonicalization of either one.
1988 If VALIDATE is nonzero,
1989 we assume X is an expression being processed from the rtl
1990 and Y was found in the hash table. We check register refs
1991 in Y for being marked as valid.
1992
1993 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
1994 that is known to be in the register. Ordinarily, we don't allow them
1995 to match, because letting them match would cause unpredictable results
1996 in all the places that search a hash table chain for an equivalent
1997 for a given value. A possible equivalent that has different structure
1998 has its hash code computed from different data. Whether the hash code
1999 is the same as that of the the given value is pure luck. */
2000
2001 static int
2002 exp_equiv_p (x, y, validate, equal_values)
2003 rtx x, y;
2004 int validate;
2005 int equal_values;
2006 {
2007 register int i, j;
2008 register enum rtx_code code;
2009 register char *fmt;
2010
2011 /* Note: it is incorrect to assume an expression is equivalent to itself
2012 if VALIDATE is nonzero. */
2013 if (x == y && !validate)
2014 return 1;
2015 if (x == 0 || y == 0)
2016 return x == y;
2017
2018 code = GET_CODE (x);
2019 if (code != GET_CODE (y))
2020 {
2021 if (!equal_values)
2022 return 0;
2023
2024 /* If X is a constant and Y is a register or vice versa, they may be
2025 equivalent. We only have to validate if Y is a register. */
2026 if (CONSTANT_P (x) && GET_CODE (y) == REG
2027 && REGNO_QTY_VALID_P (REGNO (y))
2028 && GET_MODE (y) == qty_mode[reg_qty[REGNO (y)]]
2029 && rtx_equal_p (x, qty_const[reg_qty[REGNO (y)]])
2030 && (! validate || reg_in_table[REGNO (y)] == reg_tick[REGNO (y)]))
2031 return 1;
2032
2033 if (CONSTANT_P (y) && code == REG
2034 && REGNO_QTY_VALID_P (REGNO (x))
2035 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]]
2036 && rtx_equal_p (y, qty_const[reg_qty[REGNO (x)]]))
2037 return 1;
2038
2039 return 0;
2040 }
2041
2042 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2043 if (GET_MODE (x) != GET_MODE (y))
2044 return 0;
2045
2046 switch (code)
2047 {
2048 case PC:
2049 case CC0:
2050 return x == y;
2051
2052 case CONST_INT:
2053 return INTVAL (x) == INTVAL (y);
2054
2055 case LABEL_REF:
2056 case SYMBOL_REF:
2057 return XEXP (x, 0) == XEXP (y, 0);
2058
2059 case REG:
2060 {
2061 int regno = REGNO (y);
2062 int endregno
2063 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2064 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2065 int i;
2066
2067 /* If the quantities are not the same, the expressions are not
2068 equivalent. If there are and we are not to validate, they
2069 are equivalent. Otherwise, ensure all regs are up-to-date. */
2070
2071 if (reg_qty[REGNO (x)] != reg_qty[regno])
2072 return 0;
2073
2074 if (! validate)
2075 return 1;
2076
2077 for (i = regno; i < endregno; i++)
2078 if (reg_in_table[i] != reg_tick[i])
2079 return 0;
2080
2081 return 1;
2082 }
2083
2084 /* For commutative operations, check both orders. */
2085 case PLUS:
2086 case MULT:
2087 case AND:
2088 case IOR:
2089 case XOR:
2090 case NE:
2091 case EQ:
2092 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2093 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2094 validate, equal_values))
2095 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2096 validate, equal_values)
2097 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2098 validate, equal_values)));
2099 }
2100
2101 /* Compare the elements. If any pair of corresponding elements
2102 fail to match, return 0 for the whole things. */
2103
2104 fmt = GET_RTX_FORMAT (code);
2105 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2106 {
2107 switch (fmt[i])
2108 {
2109 case 'e':
2110 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2111 return 0;
2112 break;
2113
2114 case 'E':
2115 if (XVECLEN (x, i) != XVECLEN (y, i))
2116 return 0;
2117 for (j = 0; j < XVECLEN (x, i); j++)
2118 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2119 validate, equal_values))
2120 return 0;
2121 break;
2122
2123 case 's':
2124 if (strcmp (XSTR (x, i), XSTR (y, i)))
2125 return 0;
2126 break;
2127
2128 case 'i':
2129 if (XINT (x, i) != XINT (y, i))
2130 return 0;
2131 break;
2132
2133 case 'w':
2134 if (XWINT (x, i) != XWINT (y, i))
2135 return 0;
2136 break;
2137
2138 case '0':
2139 break;
2140
2141 default:
2142 abort ();
2143 }
2144 }
2145
2146 return 1;
2147 }
2148 \f
2149 /* Return 1 iff any subexpression of X matches Y.
2150 Here we do not require that X or Y be valid (for registers referred to)
2151 for being in the hash table. */
2152
2153 static int
2154 refers_to_p (x, y)
2155 rtx x, y;
2156 {
2157 register int i;
2158 register enum rtx_code code;
2159 register char *fmt;
2160
2161 repeat:
2162 if (x == y)
2163 return 1;
2164 if (x == 0 || y == 0)
2165 return 0;
2166
2167 code = GET_CODE (x);
2168 /* If X as a whole has the same code as Y, they may match.
2169 If so, return 1. */
2170 if (code == GET_CODE (y))
2171 {
2172 if (exp_equiv_p (x, y, 0, 1))
2173 return 1;
2174 }
2175
2176 /* X does not match, so try its subexpressions. */
2177
2178 fmt = GET_RTX_FORMAT (code);
2179 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2180 if (fmt[i] == 'e')
2181 {
2182 if (i == 0)
2183 {
2184 x = XEXP (x, 0);
2185 goto repeat;
2186 }
2187 else
2188 if (refers_to_p (XEXP (x, i), y))
2189 return 1;
2190 }
2191 else if (fmt[i] == 'E')
2192 {
2193 int j;
2194 for (j = 0; j < XVECLEN (x, i); j++)
2195 if (refers_to_p (XVECEXP (x, i, j), y))
2196 return 1;
2197 }
2198
2199 return 0;
2200 }
2201 \f
2202 /* Given ADDR and SIZE (a memory address, and the size of the memory reference),
2203 set PBASE, PSTART, and PEND which correspond to the base of the address,
2204 the starting offset, and ending offset respectively.
2205
2206 ADDR is known to be a nonvarying address.
2207
2208 cse_address_varies_p returns zero for nonvarying addresses. */
2209
2210 static void
2211 set_nonvarying_address_components (addr, size, pbase, pstart, pend)
2212 rtx addr;
2213 int size;
2214 rtx *pbase;
2215 HOST_WIDE_INT *pstart, *pend;
2216 {
2217 rtx base;
2218 int start, end;
2219
2220 base = addr;
2221 start = 0;
2222 end = 0;
2223
2224 /* Registers with nonvarying addresses usually have constant equivalents;
2225 but the frame pointer register is also possible. */
2226 if (GET_CODE (base) == REG
2227 && qty_const != 0
2228 && REGNO_QTY_VALID_P (REGNO (base))
2229 && qty_mode[reg_qty[REGNO (base)]] == GET_MODE (base)
2230 && qty_const[reg_qty[REGNO (base)]] != 0)
2231 base = qty_const[reg_qty[REGNO (base)]];
2232 else if (GET_CODE (base) == PLUS
2233 && GET_CODE (XEXP (base, 1)) == CONST_INT
2234 && GET_CODE (XEXP (base, 0)) == REG
2235 && qty_const != 0
2236 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2237 && (qty_mode[reg_qty[REGNO (XEXP (base, 0))]]
2238 == GET_MODE (XEXP (base, 0)))
2239 && qty_const[reg_qty[REGNO (XEXP (base, 0))]])
2240 {
2241 start = INTVAL (XEXP (base, 1));
2242 base = qty_const[reg_qty[REGNO (XEXP (base, 0))]];
2243 }
2244
2245 /* By definition, operand1 of a LO_SUM is the associated constant
2246 address. Use the associated constant address as the base instead. */
2247 if (GET_CODE (base) == LO_SUM)
2248 base = XEXP (base, 1);
2249
2250 /* Strip off CONST. */
2251 if (GET_CODE (base) == CONST)
2252 base = XEXP (base, 0);
2253
2254 if (GET_CODE (base) == PLUS
2255 && GET_CODE (XEXP (base, 1)) == CONST_INT)
2256 {
2257 start += INTVAL (XEXP (base, 1));
2258 base = XEXP (base, 0);
2259 }
2260
2261 end = start + size;
2262
2263 /* Set the return values. */
2264 *pbase = base;
2265 *pstart = start;
2266 *pend = end;
2267 }
2268
2269 /* Return 1 iff any subexpression of X refers to memory
2270 at an address of BASE plus some offset
2271 such that any of the bytes' offsets fall between START (inclusive)
2272 and END (exclusive).
2273
2274 The value is undefined if X is a varying address (as determined by
2275 cse_rtx_addr_varies_p). This function is not used in such cases.
2276
2277 When used in the cse pass, `qty_const' is nonzero, and it is used
2278 to treat an address that is a register with a known constant value
2279 as if it were that constant value.
2280 In the loop pass, `qty_const' is zero, so this is not done. */
2281
2282 static int
2283 refers_to_mem_p (x, base, start, end)
2284 rtx x, base;
2285 HOST_WIDE_INT start, end;
2286 {
2287 register HOST_WIDE_INT i;
2288 register enum rtx_code code;
2289 register char *fmt;
2290
2291 if (GET_CODE (base) == CONST_INT)
2292 {
2293 start += INTVAL (base);
2294 end += INTVAL (base);
2295 base = const0_rtx;
2296 }
2297
2298 repeat:
2299 if (x == 0)
2300 return 0;
2301
2302 code = GET_CODE (x);
2303 if (code == MEM)
2304 {
2305 register rtx addr = XEXP (x, 0); /* Get the address. */
2306 rtx mybase;
2307 HOST_WIDE_INT mystart, myend;
2308
2309 set_nonvarying_address_components (addr, GET_MODE_SIZE (GET_MODE (x)),
2310 &mybase, &mystart, &myend);
2311
2312
2313 /* refers_to_mem_p is never called with varying addresses.
2314 If the base addresses are not equal, there is no chance
2315 of the memory addresses conflicting. */
2316 if (! rtx_equal_p (mybase, base))
2317 return 0;
2318
2319 return myend > start && mystart < end;
2320 }
2321
2322 /* X does not match, so try its subexpressions. */
2323
2324 fmt = GET_RTX_FORMAT (code);
2325 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2326 if (fmt[i] == 'e')
2327 {
2328 if (i == 0)
2329 {
2330 x = XEXP (x, 0);
2331 goto repeat;
2332 }
2333 else
2334 if (refers_to_mem_p (XEXP (x, i), base, start, end))
2335 return 1;
2336 }
2337 else if (fmt[i] == 'E')
2338 {
2339 int j;
2340 for (j = 0; j < XVECLEN (x, i); j++)
2341 if (refers_to_mem_p (XVECEXP (x, i, j), base, start, end))
2342 return 1;
2343 }
2344
2345 return 0;
2346 }
2347
2348 /* Nonzero if X refers to memory at a varying address;
2349 except that a register which has at the moment a known constant value
2350 isn't considered variable. */
2351
2352 static int
2353 cse_rtx_addr_varies_p (x)
2354 rtx x;
2355 {
2356 /* We need not check for X and the equivalence class being of the same
2357 mode because if X is equivalent to a constant in some mode, it
2358 doesn't vary in any mode. */
2359
2360 if (GET_CODE (x) == MEM
2361 && GET_CODE (XEXP (x, 0)) == REG
2362 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2363 && GET_MODE (XEXP (x, 0)) == qty_mode[reg_qty[REGNO (XEXP (x, 0))]]
2364 && qty_const[reg_qty[REGNO (XEXP (x, 0))]] != 0)
2365 return 0;
2366
2367 if (GET_CODE (x) == MEM
2368 && GET_CODE (XEXP (x, 0)) == PLUS
2369 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2370 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2371 && REGNO_QTY_VALID_P (REGNO (XEXP (XEXP (x, 0), 0)))
2372 && (GET_MODE (XEXP (XEXP (x, 0), 0))
2373 == qty_mode[reg_qty[REGNO (XEXP (XEXP (x, 0), 0))]])
2374 && qty_const[reg_qty[REGNO (XEXP (XEXP (x, 0), 0))]])
2375 return 0;
2376
2377 return rtx_addr_varies_p (x);
2378 }
2379 \f
2380 /* Canonicalize an expression:
2381 replace each register reference inside it
2382 with the "oldest" equivalent register.
2383
2384 If INSN is non-zero and we are replacing a pseudo with a hard register
2385 or vice versa, validate_change is used to ensure that INSN remains valid
2386 after we make our substitution. The calls are made with IN_GROUP non-zero
2387 so apply_change_group must be called upon the outermost return from this
2388 function (unless INSN is zero). The result of apply_change_group can
2389 generally be discarded since the changes we are making are optional. */
2390
2391 static rtx
2392 canon_reg (x, insn)
2393 rtx x;
2394 rtx insn;
2395 {
2396 register int i;
2397 register enum rtx_code code;
2398 register char *fmt;
2399
2400 if (x == 0)
2401 return x;
2402
2403 code = GET_CODE (x);
2404 switch (code)
2405 {
2406 case PC:
2407 case CC0:
2408 case CONST:
2409 case CONST_INT:
2410 case CONST_DOUBLE:
2411 case SYMBOL_REF:
2412 case LABEL_REF:
2413 case ADDR_VEC:
2414 case ADDR_DIFF_VEC:
2415 return x;
2416
2417 case REG:
2418 {
2419 register int first;
2420
2421 /* Never replace a hard reg, because hard regs can appear
2422 in more than one machine mode, and we must preserve the mode
2423 of each occurrence. Also, some hard regs appear in
2424 MEMs that are shared and mustn't be altered. Don't try to
2425 replace any reg that maps to a reg of class NO_REGS. */
2426 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2427 || ! REGNO_QTY_VALID_P (REGNO (x)))
2428 return x;
2429
2430 first = qty_first_reg[reg_qty[REGNO (x)]];
2431 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2432 : REGNO_REG_CLASS (first) == NO_REGS ? x
2433 : gen_rtx (REG, qty_mode[reg_qty[REGNO (x)]], first));
2434 }
2435 }
2436
2437 fmt = GET_RTX_FORMAT (code);
2438 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2439 {
2440 register int j;
2441
2442 if (fmt[i] == 'e')
2443 {
2444 rtx new = canon_reg (XEXP (x, i), insn);
2445
2446 /* If replacing pseudo with hard reg or vice versa, ensure the
2447 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2448 if (insn != 0 && new != 0
2449 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2450 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2451 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2452 || insn_n_dups[recog_memoized (insn)] > 0))
2453 validate_change (insn, &XEXP (x, i), new, 1);
2454 else
2455 XEXP (x, i) = new;
2456 }
2457 else if (fmt[i] == 'E')
2458 for (j = 0; j < XVECLEN (x, i); j++)
2459 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2460 }
2461
2462 return x;
2463 }
2464 \f
2465 /* LOC is a location with INSN that is an operand address (the contents of
2466 a MEM). Find the best equivalent address to use that is valid for this
2467 insn.
2468
2469 On most CISC machines, complicated address modes are costly, and rtx_cost
2470 is a good approximation for that cost. However, most RISC machines have
2471 only a few (usually only one) memory reference formats. If an address is
2472 valid at all, it is often just as cheap as any other address. Hence, for
2473 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2474 costs of various addresses. For two addresses of equal cost, choose the one
2475 with the highest `rtx_cost' value as that has the potential of eliminating
2476 the most insns. For equal costs, we choose the first in the equivalence
2477 class. Note that we ignore the fact that pseudo registers are cheaper
2478 than hard registers here because we would also prefer the pseudo registers.
2479 */
2480
2481 static void
2482 find_best_addr (insn, loc)
2483 rtx insn;
2484 rtx *loc;
2485 {
2486 struct table_elt *elt, *p;
2487 rtx addr = *loc;
2488 int our_cost;
2489 int found_better = 1;
2490 int save_do_not_record = do_not_record;
2491 int save_hash_arg_in_memory = hash_arg_in_memory;
2492 int save_hash_arg_in_struct = hash_arg_in_struct;
2493 int hash_code;
2494 int addr_volatile;
2495 int regno;
2496
2497 /* Do not try to replace constant addresses or addresses of local and
2498 argument slots. These MEM expressions are made only once and inserted
2499 in many instructions, as well as being used to control symbol table
2500 output. It is not safe to clobber them.
2501
2502 There are some uncommon cases where the address is already in a register
2503 for some reason, but we cannot take advantage of that because we have
2504 no easy way to unshare the MEM. In addition, looking up all stack
2505 addresses is costly. */
2506 if ((GET_CODE (addr) == PLUS
2507 && GET_CODE (XEXP (addr, 0)) == REG
2508 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2509 && (regno = REGNO (XEXP (addr, 0)),
2510 regno == FRAME_POINTER_REGNUM || regno == ARG_POINTER_REGNUM))
2511 || (GET_CODE (addr) == REG
2512 && (regno = REGNO (addr),
2513 regno == FRAME_POINTER_REGNUM || regno == ARG_POINTER_REGNUM))
2514 || CONSTANT_ADDRESS_P (addr))
2515 return;
2516
2517 /* If this address is not simply a register, try to fold it. This will
2518 sometimes simplify the expression. Many simplifications
2519 will not be valid, but some, usually applying the associative rule, will
2520 be valid and produce better code. */
2521 if (GET_CODE (addr) != REG
2522 && validate_change (insn, loc, fold_rtx (addr, insn), 0))
2523 addr = *loc;
2524
2525 /* If this address is not in the hash table, we can't look for equivalences
2526 of the whole address. Also, ignore if volatile. */
2527
2528 do_not_record = 0;
2529 hash_code = HASH (addr, Pmode);
2530 addr_volatile = do_not_record;
2531 do_not_record = save_do_not_record;
2532 hash_arg_in_memory = save_hash_arg_in_memory;
2533 hash_arg_in_struct = save_hash_arg_in_struct;
2534
2535 if (addr_volatile)
2536 return;
2537
2538 elt = lookup (addr, hash_code, Pmode);
2539
2540 #ifndef ADDRESS_COST
2541 if (elt)
2542 {
2543 our_cost = elt->cost;
2544
2545 /* Find the lowest cost below ours that works. */
2546 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2547 if (elt->cost < our_cost
2548 && (GET_CODE (elt->exp) == REG
2549 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2550 && validate_change (insn, loc,
2551 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2552 return;
2553 }
2554 #else
2555
2556 if (elt)
2557 {
2558 /* We need to find the best (under the criteria documented above) entry
2559 in the class that is valid. We use the `flag' field to indicate
2560 choices that were invalid and iterate until we can't find a better
2561 one that hasn't already been tried. */
2562
2563 for (p = elt->first_same_value; p; p = p->next_same_value)
2564 p->flag = 0;
2565
2566 while (found_better)
2567 {
2568 int best_addr_cost = ADDRESS_COST (*loc);
2569 int best_rtx_cost = (elt->cost + 1) >> 1;
2570 struct table_elt *best_elt = elt;
2571
2572 found_better = 0;
2573 for (p = elt->first_same_value; p; p = p->next_same_value)
2574 if (! p->flag
2575 && (GET_CODE (p->exp) == REG
2576 || exp_equiv_p (p->exp, p->exp, 1, 0))
2577 && (ADDRESS_COST (p->exp) < best_addr_cost
2578 || (ADDRESS_COST (p->exp) == best_addr_cost
2579 && (p->cost + 1) >> 1 > best_rtx_cost)))
2580 {
2581 found_better = 1;
2582 best_addr_cost = ADDRESS_COST (p->exp);
2583 best_rtx_cost = (p->cost + 1) >> 1;
2584 best_elt = p;
2585 }
2586
2587 if (found_better)
2588 {
2589 if (validate_change (insn, loc,
2590 canon_reg (copy_rtx (best_elt->exp),
2591 NULL_RTX), 0))
2592 return;
2593 else
2594 best_elt->flag = 1;
2595 }
2596 }
2597 }
2598
2599 /* If the address is a binary operation with the first operand a register
2600 and the second a constant, do the same as above, but looking for
2601 equivalences of the register. Then try to simplify before checking for
2602 the best address to use. This catches a few cases: First is when we
2603 have REG+const and the register is another REG+const. We can often merge
2604 the constants and eliminate one insn and one register. It may also be
2605 that a machine has a cheap REG+REG+const. Finally, this improves the
2606 code on the Alpha for unaligned byte stores. */
2607
2608 if (flag_expensive_optimizations
2609 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2610 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2611 && GET_CODE (XEXP (*loc, 0)) == REG
2612 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
2613 {
2614 rtx c = XEXP (*loc, 1);
2615
2616 do_not_record = 0;
2617 hash_code = HASH (XEXP (*loc, 0), Pmode);
2618 do_not_record = save_do_not_record;
2619 hash_arg_in_memory = save_hash_arg_in_memory;
2620 hash_arg_in_struct = save_hash_arg_in_struct;
2621
2622 elt = lookup (XEXP (*loc, 0), hash_code, Pmode);
2623 if (elt == 0)
2624 return;
2625
2626 /* We need to find the best (under the criteria documented above) entry
2627 in the class that is valid. We use the `flag' field to indicate
2628 choices that were invalid and iterate until we can't find a better
2629 one that hasn't already been tried. */
2630
2631 for (p = elt->first_same_value; p; p = p->next_same_value)
2632 p->flag = 0;
2633
2634 while (found_better)
2635 {
2636 int best_addr_cost = ADDRESS_COST (*loc);
2637 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2638 struct table_elt *best_elt = elt;
2639 rtx best_rtx = *loc;
2640
2641 found_better = 0;
2642 for (p = elt->first_same_value; p; p = p->next_same_value)
2643 if (! p->flag
2644 && (GET_CODE (p->exp) == REG
2645 || exp_equiv_p (p->exp, p->exp, 1, 0)))
2646 {
2647 rtx new = cse_gen_binary (GET_CODE (*loc), Pmode, p->exp, c);
2648
2649 if ((ADDRESS_COST (new) < best_addr_cost
2650 || (ADDRESS_COST (new) == best_addr_cost
2651 && (COST (new) + 1) >> 1 > best_rtx_cost)))
2652 {
2653 found_better = 1;
2654 best_addr_cost = ADDRESS_COST (new);
2655 best_rtx_cost = (COST (new) + 1) >> 1;
2656 best_elt = p;
2657 best_rtx = new;
2658 }
2659 }
2660
2661 if (found_better)
2662 {
2663 if (validate_change (insn, loc,
2664 canon_reg (copy_rtx (best_rtx),
2665 NULL_RTX), 0))
2666 return;
2667 else
2668 best_elt->flag = 1;
2669 }
2670 }
2671 }
2672 #endif
2673 }
2674 \f
2675 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2676 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2677 what values are being compared.
2678
2679 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2680 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2681 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2682 compared to produce cc0.
2683
2684 The return value is the comparison operator and is either the code of
2685 A or the code corresponding to the inverse of the comparison. */
2686
2687 static enum rtx_code
2688 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
2689 enum rtx_code code;
2690 rtx *parg1, *parg2;
2691 enum machine_mode *pmode1, *pmode2;
2692 {
2693 rtx arg1, arg2;
2694
2695 arg1 = *parg1, arg2 = *parg2;
2696
2697 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2698
2699 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2700 {
2701 /* Set non-zero when we find something of interest. */
2702 rtx x = 0;
2703 int reverse_code = 0;
2704 struct table_elt *p = 0;
2705
2706 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2707 On machines with CC0, this is the only case that can occur, since
2708 fold_rtx will return the COMPARE or item being compared with zero
2709 when given CC0. */
2710
2711 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2712 x = arg1;
2713
2714 /* If ARG1 is a comparison operator and CODE is testing for
2715 STORE_FLAG_VALUE, get the inner arguments. */
2716
2717 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
2718 {
2719 if (code == NE
2720 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2721 && code == LT && STORE_FLAG_VALUE == -1)
2722 #ifdef FLOAT_STORE_FLAG_VALUE
2723 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2724 && FLOAT_STORE_FLAG_VALUE < 0)
2725 #endif
2726 )
2727 x = arg1;
2728 else if (code == EQ
2729 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2730 && code == GE && STORE_FLAG_VALUE == -1)
2731 #ifdef FLOAT_STORE_FLAG_VALUE
2732 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2733 && FLOAT_STORE_FLAG_VALUE < 0)
2734 #endif
2735 )
2736 x = arg1, reverse_code = 1;
2737 }
2738
2739 /* ??? We could also check for
2740
2741 (ne (and (eq (...) (const_int 1))) (const_int 0))
2742
2743 and related forms, but let's wait until we see them occurring. */
2744
2745 if (x == 0)
2746 /* Look up ARG1 in the hash table and see if it has an equivalence
2747 that lets us see what is being compared. */
2748 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) % NBUCKETS,
2749 GET_MODE (arg1));
2750 if (p) p = p->first_same_value;
2751
2752 for (; p; p = p->next_same_value)
2753 {
2754 enum machine_mode inner_mode = GET_MODE (p->exp);
2755
2756 /* If the entry isn't valid, skip it. */
2757 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
2758 continue;
2759
2760 if (GET_CODE (p->exp) == COMPARE
2761 /* Another possibility is that this machine has a compare insn
2762 that includes the comparison code. In that case, ARG1 would
2763 be equivalent to a comparison operation that would set ARG1 to
2764 either STORE_FLAG_VALUE or zero. If this is an NE operation,
2765 ORIG_CODE is the actual comparison being done; if it is an EQ,
2766 we must reverse ORIG_CODE. On machine with a negative value
2767 for STORE_FLAG_VALUE, also look at LT and GE operations. */
2768 || ((code == NE
2769 || (code == LT
2770 && GET_MODE_CLASS (inner_mode) == MODE_INT
2771 && (GET_MODE_BITSIZE (inner_mode)
2772 <= HOST_BITS_PER_WIDE_INT)
2773 && (STORE_FLAG_VALUE
2774 & ((HOST_WIDE_INT) 1
2775 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2776 #ifdef FLOAT_STORE_FLAG_VALUE
2777 || (code == LT
2778 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2779 && FLOAT_STORE_FLAG_VALUE < 0)
2780 #endif
2781 )
2782 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
2783 {
2784 x = p->exp;
2785 break;
2786 }
2787 else if ((code == EQ
2788 || (code == GE
2789 && GET_MODE_CLASS (inner_mode) == MODE_INT
2790 && (GET_MODE_BITSIZE (inner_mode)
2791 <= HOST_BITS_PER_WIDE_INT)
2792 && (STORE_FLAG_VALUE
2793 & ((HOST_WIDE_INT) 1
2794 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2795 #ifdef FLOAT_STORE_FLAG_VALUE
2796 || (code == GE
2797 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2798 && FLOAT_STORE_FLAG_VALUE < 0)
2799 #endif
2800 )
2801 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
2802 {
2803 reverse_code = 1;
2804 x = p->exp;
2805 break;
2806 }
2807
2808 /* If this is fp + constant, the equivalent is a better operand since
2809 it may let us predict the value of the comparison. */
2810 else if (NONZERO_BASE_PLUS_P (p->exp))
2811 {
2812 arg1 = p->exp;
2813 continue;
2814 }
2815 }
2816
2817 /* If we didn't find a useful equivalence for ARG1, we are done.
2818 Otherwise, set up for the next iteration. */
2819 if (x == 0)
2820 break;
2821
2822 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
2823 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
2824 code = GET_CODE (x);
2825
2826 if (reverse_code)
2827 code = reverse_condition (code);
2828 }
2829
2830 /* Return our results. Return the modes from before fold_rtx
2831 because fold_rtx might produce const_int, and then it's too late. */
2832 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
2833 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
2834
2835 return code;
2836 }
2837 \f
2838 /* Try to simplify a unary operation CODE whose output mode is to be
2839 MODE with input operand OP whose mode was originally OP_MODE.
2840 Return zero if no simplification can be made. */
2841
2842 rtx
2843 simplify_unary_operation (code, mode, op, op_mode)
2844 enum rtx_code code;
2845 enum machine_mode mode;
2846 rtx op;
2847 enum machine_mode op_mode;
2848 {
2849 register int width = GET_MODE_BITSIZE (mode);
2850
2851 /* The order of these tests is critical so that, for example, we don't
2852 check the wrong mode (input vs. output) for a conversion operation,
2853 such as FIX. At some point, this should be simplified. */
2854
2855 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2856 if (code == FLOAT && GET_CODE (op) == CONST_INT)
2857 {
2858 REAL_VALUE_TYPE d;
2859
2860 #ifdef REAL_ARITHMETIC
2861 REAL_VALUE_FROM_INT (d, INTVAL (op), INTVAL (op) < 0 ? ~0 : 0);
2862 #else
2863 d = (double) INTVAL (op);
2864 #endif
2865 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2866 }
2867 else if (code == UNSIGNED_FLOAT && GET_CODE (op) == CONST_INT)
2868 {
2869 REAL_VALUE_TYPE d;
2870
2871 #ifdef REAL_ARITHMETIC
2872 REAL_VALUE_FROM_INT (d, INTVAL (op), 0);
2873 #else
2874 d = (double) (unsigned int) INTVAL (op);
2875 #endif
2876 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2877 }
2878
2879 else if (code == FLOAT && GET_CODE (op) == CONST_DOUBLE
2880 && GET_MODE (op) == VOIDmode)
2881 {
2882 REAL_VALUE_TYPE d;
2883
2884 #ifdef REAL_ARITHMETIC
2885 REAL_VALUE_FROM_INT (d, CONST_DOUBLE_LOW (op), CONST_DOUBLE_HIGH (op));
2886 #else
2887 if (CONST_DOUBLE_HIGH (op) < 0)
2888 {
2889 d = (double) (~ CONST_DOUBLE_HIGH (op));
2890 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2891 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2892 d += (double) (unsigned HOST_WIDE_INT) (~ CONST_DOUBLE_LOW (op));
2893 d = (- d - 1.0);
2894 }
2895 else
2896 {
2897 d = (double) CONST_DOUBLE_HIGH (op);
2898 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2899 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2900 d += (double) (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
2901 }
2902 #endif /* REAL_ARITHMETIC */
2903 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2904 }
2905 else if (code == UNSIGNED_FLOAT && GET_CODE (op) == CONST_DOUBLE
2906 && GET_MODE (op) == VOIDmode)
2907 {
2908 REAL_VALUE_TYPE d;
2909
2910 #ifdef REAL_ARITHMETIC
2911 REAL_VALUE_FROM_UNSIGNED_INT (d, CONST_DOUBLE_LOW (op),
2912 CONST_DOUBLE_HIGH (op));
2913 #else
2914 d = (double) CONST_DOUBLE_HIGH (op);
2915 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2916 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2917 d += (double) (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
2918 #endif /* REAL_ARITHMETIC */
2919 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2920 }
2921 #endif
2922
2923 if (GET_CODE (op) == CONST_INT
2924 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
2925 {
2926 register HOST_WIDE_INT arg0 = INTVAL (op);
2927 register HOST_WIDE_INT val;
2928
2929 switch (code)
2930 {
2931 case NOT:
2932 val = ~ arg0;
2933 break;
2934
2935 case NEG:
2936 val = - arg0;
2937 break;
2938
2939 case ABS:
2940 val = (arg0 >= 0 ? arg0 : - arg0);
2941 break;
2942
2943 case FFS:
2944 /* Don't use ffs here. Instead, get low order bit and then its
2945 number. If arg0 is zero, this will return 0, as desired. */
2946 arg0 &= GET_MODE_MASK (mode);
2947 val = exact_log2 (arg0 & (- arg0)) + 1;
2948 break;
2949
2950 case TRUNCATE:
2951 val = arg0;
2952 break;
2953
2954 case ZERO_EXTEND:
2955 if (op_mode == VOIDmode)
2956 op_mode = mode;
2957 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
2958 {
2959 /* If we were really extending the mode,
2960 we would have to distinguish between zero-extension
2961 and sign-extension. */
2962 if (width != GET_MODE_BITSIZE (op_mode))
2963 abort ();
2964 val = arg0;
2965 }
2966 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
2967 val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
2968 else
2969 return 0;
2970 break;
2971
2972 case SIGN_EXTEND:
2973 if (op_mode == VOIDmode)
2974 op_mode = mode;
2975 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
2976 {
2977 /* If we were really extending the mode,
2978 we would have to distinguish between zero-extension
2979 and sign-extension. */
2980 if (width != GET_MODE_BITSIZE (op_mode))
2981 abort ();
2982 val = arg0;
2983 }
2984 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
2985 {
2986 val
2987 = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
2988 if (val
2989 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1)))
2990 val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
2991 }
2992 else
2993 return 0;
2994 break;
2995
2996 case SQRT:
2997 return 0;
2998
2999 default:
3000 abort ();
3001 }
3002
3003 /* Clear the bits that don't belong in our mode,
3004 unless they and our sign bit are all one.
3005 So we get either a reasonable negative value or a reasonable
3006 unsigned value for this mode. */
3007 if (width < HOST_BITS_PER_WIDE_INT
3008 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3009 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3010 val &= (1 << width) - 1;
3011
3012 return GEN_INT (val);
3013 }
3014
3015 /* We can do some operations on integer CONST_DOUBLEs. Also allow
3016 for a DImode operation on a CONST_INT. */
3017 else if (GET_MODE (op) == VOIDmode
3018 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3019 {
3020 HOST_WIDE_INT l1, h1, lv, hv;
3021
3022 if (GET_CODE (op) == CONST_DOUBLE)
3023 l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
3024 else
3025 l1 = INTVAL (op), h1 = l1 < 0 ? -1 : 0;
3026
3027 switch (code)
3028 {
3029 case NOT:
3030 lv = ~ l1;
3031 hv = ~ h1;
3032 break;
3033
3034 case NEG:
3035 neg_double (l1, h1, &lv, &hv);
3036 break;
3037
3038 case ABS:
3039 if (h1 < 0)
3040 neg_double (l1, h1, &lv, &hv);
3041 else
3042 lv = l1, hv = h1;
3043 break;
3044
3045 case FFS:
3046 hv = 0;
3047 if (l1 == 0)
3048 lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & (-h1)) + 1;
3049 else
3050 lv = exact_log2 (l1 & (-l1)) + 1;
3051 break;
3052
3053 case TRUNCATE:
3054 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3055 return GEN_INT (l1 & GET_MODE_MASK (mode));
3056 else
3057 return 0;
3058 break;
3059
3060 case ZERO_EXTEND:
3061 if (op_mode == VOIDmode
3062 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3063 return 0;
3064
3065 hv = 0;
3066 lv = l1 & GET_MODE_MASK (op_mode);
3067 break;
3068
3069 case SIGN_EXTEND:
3070 if (op_mode == VOIDmode
3071 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3072 return 0;
3073 else
3074 {
3075 lv = l1 & GET_MODE_MASK (op_mode);
3076 if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
3077 && (lv & ((HOST_WIDE_INT) 1
3078 << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
3079 lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3080
3081 hv = (lv < 0) ? ~ (HOST_WIDE_INT) 0 : 0;
3082 }
3083 break;
3084
3085 case SQRT:
3086 return 0;
3087
3088 default:
3089 return 0;
3090 }
3091
3092 return immed_double_const (lv, hv, mode);
3093 }
3094
3095 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3096 else if (GET_CODE (op) == CONST_DOUBLE
3097 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3098 {
3099 REAL_VALUE_TYPE d;
3100 jmp_buf handler;
3101 rtx x;
3102
3103 if (setjmp (handler))
3104 /* There used to be a warning here, but that is inadvisable.
3105 People may want to cause traps, and the natural way
3106 to do it should not get a warning. */
3107 return 0;
3108
3109 set_float_handler (handler);
3110
3111 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3112
3113 switch (code)
3114 {
3115 case NEG:
3116 d = REAL_VALUE_NEGATE (d);
3117 break;
3118
3119 case ABS:
3120 if (REAL_VALUE_NEGATIVE (d))
3121 d = REAL_VALUE_NEGATE (d);
3122 break;
3123
3124 case FLOAT_TRUNCATE:
3125 d = real_value_truncate (mode, d);
3126 break;
3127
3128 case FLOAT_EXTEND:
3129 /* All this does is change the mode. */
3130 break;
3131
3132 case FIX:
3133 d = REAL_VALUE_RNDZINT (d);
3134 break;
3135
3136 case UNSIGNED_FIX:
3137 d = REAL_VALUE_UNSIGNED_RNDZINT (d);
3138 break;
3139
3140 case SQRT:
3141 return 0;
3142
3143 default:
3144 abort ();
3145 }
3146
3147 x = immed_real_const_1 (d, mode);
3148 set_float_handler (NULL_PTR);
3149 return x;
3150 }
3151 else if (GET_CODE (op) == CONST_DOUBLE && GET_MODE_CLASS (mode) == MODE_INT
3152 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3153 {
3154 REAL_VALUE_TYPE d;
3155 jmp_buf handler;
3156 rtx x;
3157 HOST_WIDE_INT val;
3158
3159 if (setjmp (handler))
3160 return 0;
3161
3162 set_float_handler (handler);
3163
3164 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3165
3166 switch (code)
3167 {
3168 case FIX:
3169 val = REAL_VALUE_FIX (d);
3170 break;
3171
3172 case UNSIGNED_FIX:
3173 val = REAL_VALUE_UNSIGNED_FIX (d);
3174 break;
3175
3176 default:
3177 abort ();
3178 }
3179
3180 set_float_handler (NULL_PTR);
3181
3182 /* Clear the bits that don't belong in our mode,
3183 unless they and our sign bit are all one.
3184 So we get either a reasonable negative value or a reasonable
3185 unsigned value for this mode. */
3186 if (width < HOST_BITS_PER_WIDE_INT
3187 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3188 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3189 val &= ((HOST_WIDE_INT) 1 << width) - 1;
3190
3191 return GEN_INT (val);
3192 }
3193 #endif
3194 /* This was formerly used only for non-IEEE float.
3195 eggert@twinsun.com says it is safe for IEEE also. */
3196 else
3197 {
3198 /* There are some simplifications we can do even if the operands
3199 aren't constant. */
3200 switch (code)
3201 {
3202 case NEG:
3203 case NOT:
3204 /* (not (not X)) == X, similarly for NEG. */
3205 if (GET_CODE (op) == code)
3206 return XEXP (op, 0);
3207 break;
3208
3209 case SIGN_EXTEND:
3210 /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
3211 becomes just the MINUS if its mode is MODE. This allows
3212 folding switch statements on machines using casesi (such as
3213 the Vax). */
3214 if (GET_CODE (op) == TRUNCATE
3215 && GET_MODE (XEXP (op, 0)) == mode
3216 && GET_CODE (XEXP (op, 0)) == MINUS
3217 && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
3218 && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
3219 return XEXP (op, 0);
3220 break;
3221 }
3222
3223 return 0;
3224 }
3225 }
3226 \f
3227 /* Simplify a binary operation CODE with result mode MODE, operating on OP0
3228 and OP1. Return 0 if no simplification is possible.
3229
3230 Don't use this for relational operations such as EQ or LT.
3231 Use simplify_relational_operation instead. */
3232
3233 rtx
3234 simplify_binary_operation (code, mode, op0, op1)
3235 enum rtx_code code;
3236 enum machine_mode mode;
3237 rtx op0, op1;
3238 {
3239 register HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
3240 HOST_WIDE_INT val;
3241 int width = GET_MODE_BITSIZE (mode);
3242 rtx tem;
3243
3244 /* Relational operations don't work here. We must know the mode
3245 of the operands in order to do the comparison correctly.
3246 Assuming a full word can give incorrect results.
3247 Consider comparing 128 with -128 in QImode. */
3248
3249 if (GET_RTX_CLASS (code) == '<')
3250 abort ();
3251
3252 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3253 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3254 && GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
3255 && mode == GET_MODE (op0) && mode == GET_MODE (op1))
3256 {
3257 REAL_VALUE_TYPE f0, f1, value;
3258 jmp_buf handler;
3259
3260 if (setjmp (handler))
3261 return 0;
3262
3263 set_float_handler (handler);
3264
3265 REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3266 REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
3267 f0 = real_value_truncate (mode, f0);
3268 f1 = real_value_truncate (mode, f1);
3269
3270 #ifdef REAL_ARITHMETIC
3271 REAL_ARITHMETIC (value, rtx_to_tree_code (code), f0, f1);
3272 #else
3273 switch (code)
3274 {
3275 case PLUS:
3276 value = f0 + f1;
3277 break;
3278 case MINUS:
3279 value = f0 - f1;
3280 break;
3281 case MULT:
3282 value = f0 * f1;
3283 break;
3284 case DIV:
3285 #ifndef REAL_INFINITY
3286 if (f1 == 0)
3287 return 0;
3288 #endif
3289 value = f0 / f1;
3290 break;
3291 case SMIN:
3292 value = MIN (f0, f1);
3293 break;
3294 case SMAX:
3295 value = MAX (f0, f1);
3296 break;
3297 default:
3298 abort ();
3299 }
3300 #endif
3301
3302 set_float_handler (NULL_PTR);
3303 value = real_value_truncate (mode, value);
3304 return immed_real_const_1 (value, mode);
3305 }
3306 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3307
3308 /* We can fold some multi-word operations. */
3309 if (GET_MODE_CLASS (mode) == MODE_INT
3310 && GET_CODE (op0) == CONST_DOUBLE
3311 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
3312 {
3313 HOST_WIDE_INT l1, l2, h1, h2, lv, hv;
3314
3315 l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3316
3317 if (GET_CODE (op1) == CONST_DOUBLE)
3318 l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3319 else
3320 l2 = INTVAL (op1), h2 = l2 < 0 ? -1 : 0;
3321
3322 switch (code)
3323 {
3324 case MINUS:
3325 /* A - B == A + (-B). */
3326 neg_double (l2, h2, &lv, &hv);
3327 l2 = lv, h2 = hv;
3328
3329 /* .. fall through ... */
3330
3331 case PLUS:
3332 add_double (l1, h1, l2, h2, &lv, &hv);
3333 break;
3334
3335 case MULT:
3336 mul_double (l1, h1, l2, h2, &lv, &hv);
3337 break;
3338
3339 case DIV: case MOD: case UDIV: case UMOD:
3340 /* We'd need to include tree.h to do this and it doesn't seem worth
3341 it. */
3342 return 0;
3343
3344 case AND:
3345 lv = l1 & l2, hv = h1 & h2;
3346 break;
3347
3348 case IOR:
3349 lv = l1 | l2, hv = h1 | h2;
3350 break;
3351
3352 case XOR:
3353 lv = l1 ^ l2, hv = h1 ^ h2;
3354 break;
3355
3356 case SMIN:
3357 if (h1 < h2
3358 || (h1 == h2
3359 && ((unsigned HOST_WIDE_INT) l1
3360 < (unsigned HOST_WIDE_INT) l2)))
3361 lv = l1, hv = h1;
3362 else
3363 lv = l2, hv = h2;
3364 break;
3365
3366 case SMAX:
3367 if (h1 > h2
3368 || (h1 == h2
3369 && ((unsigned HOST_WIDE_INT) l1
3370 > (unsigned HOST_WIDE_INT) l2)))
3371 lv = l1, hv = h1;
3372 else
3373 lv = l2, hv = h2;
3374 break;
3375
3376 case UMIN:
3377 if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2
3378 || (h1 == h2
3379 && ((unsigned HOST_WIDE_INT) l1
3380 < (unsigned HOST_WIDE_INT) l2)))
3381 lv = l1, hv = h1;
3382 else
3383 lv = l2, hv = h2;
3384 break;
3385
3386 case UMAX:
3387 if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2
3388 || (h1 == h2
3389 && ((unsigned HOST_WIDE_INT) l1
3390 > (unsigned HOST_WIDE_INT) l2)))
3391 lv = l1, hv = h1;
3392 else
3393 lv = l2, hv = h2;
3394 break;
3395
3396 case LSHIFTRT: case ASHIFTRT:
3397 case ASHIFT: case LSHIFT:
3398 case ROTATE: case ROTATERT:
3399 #ifdef SHIFT_COUNT_TRUNCATED
3400 l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
3401 #endif
3402
3403 if (h2 != 0 || l2 < 0 || l2 >= GET_MODE_BITSIZE (mode))
3404 return 0;
3405
3406 if (code == LSHIFTRT || code == ASHIFTRT)
3407 rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3408 code == ASHIFTRT);
3409 else if (code == ASHIFT || code == LSHIFT)
3410 lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3411 code == ASHIFT);
3412 else if (code == ROTATE)
3413 lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3414 else /* code == ROTATERT */
3415 rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3416 break;
3417
3418 default:
3419 return 0;
3420 }
3421
3422 return immed_double_const (lv, hv, mode);
3423 }
3424
3425 if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
3426 || width > HOST_BITS_PER_WIDE_INT || width == 0)
3427 {
3428 /* Even if we can't compute a constant result,
3429 there are some cases worth simplifying. */
3430
3431 switch (code)
3432 {
3433 case PLUS:
3434 /* In IEEE floating point, x+0 is not the same as x. Similarly
3435 for the other optimizations below. */
3436 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3437 && GET_MODE_CLASS (mode) != MODE_INT)
3438 break;
3439
3440 if (op1 == CONST0_RTX (mode))
3441 return op0;
3442
3443 /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3444 if (GET_CODE (op0) == NEG)
3445 return cse_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
3446 else if (GET_CODE (op1) == NEG)
3447 return cse_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
3448
3449 /* Handle both-operands-constant cases. We can only add
3450 CONST_INTs to constants since the sum of relocatable symbols
3451 can't be handled by most assemblers. */
3452
3453 if (CONSTANT_P (op0) && GET_CODE (op1) == CONST_INT)
3454 return plus_constant (op0, INTVAL (op1));
3455 else if (CONSTANT_P (op1) && GET_CODE (op0) == CONST_INT)
3456 return plus_constant (op1, INTVAL (op0));
3457
3458 /* If one of the operands is a PLUS or a MINUS, see if we can
3459 simplify this by the associative law.
3460 Don't use the associative law for floating point.
3461 The inaccuracy makes it nonassociative,
3462 and subtle programs can break if operations are associated. */
3463
3464 if ((GET_MODE_CLASS (mode) == MODE_INT
3465 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
3466 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3467 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3468 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3469 return tem;
3470 break;
3471
3472 case COMPARE:
3473 #ifdef HAVE_cc0
3474 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3475 using cc0, in which case we want to leave it as a COMPARE
3476 so we can distinguish it from a register-register-copy.
3477
3478 In IEEE floating point, x-0 is not the same as x. */
3479
3480 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3481 || GET_MODE_CLASS (mode) == MODE_INT)
3482 && op1 == CONST0_RTX (mode))
3483 return op0;
3484 #else
3485 /* Do nothing here. */
3486 #endif
3487 break;
3488
3489 case MINUS:
3490 /* None of these optimizations can be done for IEEE
3491 floating point. */
3492 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3493 && GET_MODE_CLASS (mode) != MODE_INT
3494 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
3495 break;
3496
3497 /* We can't assume x-x is 0 even with non-IEEE floating point. */
3498 if (rtx_equal_p (op0, op1)
3499 && ! side_effects_p (op0)
3500 && GET_MODE_CLASS (mode) != MODE_FLOAT
3501 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
3502 return const0_rtx;
3503
3504 /* Change subtraction from zero into negation. */
3505 if (op0 == CONST0_RTX (mode))
3506 return gen_rtx (NEG, mode, op1);
3507
3508 /* (-1 - a) is ~a. */
3509 if (op0 == constm1_rtx)
3510 return gen_rtx (NOT, mode, op1);
3511
3512 /* Subtracting 0 has no effect. */
3513 if (op1 == CONST0_RTX (mode))
3514 return op0;
3515
3516 /* (a - (-b)) -> (a + b). */
3517 if (GET_CODE (op1) == NEG)
3518 return cse_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
3519
3520 /* If one of the operands is a PLUS or a MINUS, see if we can
3521 simplify this by the associative law.
3522 Don't use the associative law for floating point.
3523 The inaccuracy makes it nonassociative,
3524 and subtle programs can break if operations are associated. */
3525
3526 if ((GET_MODE_CLASS (mode) == MODE_INT
3527 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
3528 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3529 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3530 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3531 return tem;
3532
3533 /* Don't let a relocatable value get a negative coeff. */
3534 if (GET_CODE (op1) == CONST_INT)
3535 return plus_constant (op0, - INTVAL (op1));
3536 break;
3537
3538 case MULT:
3539 if (op1 == constm1_rtx)
3540 {
3541 tem = simplify_unary_operation (NEG, mode, op0, mode);
3542
3543 return tem ? tem : gen_rtx (NEG, mode, op0);
3544 }
3545
3546 /* In IEEE floating point, x*0 is not always 0. */
3547 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3548 || GET_MODE_CLASS (mode) == MODE_INT)
3549 && op1 == CONST0_RTX (mode)
3550 && ! side_effects_p (op0))
3551 return op1;
3552
3553 /* In IEEE floating point, x*1 is not equivalent to x for nans.
3554 However, ANSI says we can drop signals,
3555 so we can do this anyway. */
3556 if (op1 == CONST1_RTX (mode))
3557 return op0;
3558
3559 /* Convert multiply by constant power of two into shift. */
3560 if (GET_CODE (op1) == CONST_INT
3561 && (val = exact_log2 (INTVAL (op1))) >= 0)
3562 return gen_rtx (ASHIFT, mode, op0, GEN_INT (val));
3563
3564 if (GET_CODE (op1) == CONST_DOUBLE
3565 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT)
3566 {
3567 REAL_VALUE_TYPE d;
3568 jmp_buf handler;
3569 int op1is2, op1ism1;
3570
3571 if (setjmp (handler))
3572 return 0;
3573
3574 set_float_handler (handler);
3575 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3576 op1is2 = REAL_VALUES_EQUAL (d, dconst2);
3577 op1ism1 = REAL_VALUES_EQUAL (d, dconstm1);
3578 set_float_handler (NULL_PTR);
3579
3580 /* x*2 is x+x and x*(-1) is -x */
3581 if (op1is2 && GET_MODE (op0) == mode)
3582 return gen_rtx (PLUS, mode, op0, copy_rtx (op0));
3583
3584 else if (op1ism1 && GET_MODE (op0) == mode)
3585 return gen_rtx (NEG, mode, op0);
3586 }
3587 break;
3588
3589 case IOR:
3590 if (op1 == const0_rtx)
3591 return op0;
3592 if (GET_CODE (op1) == CONST_INT
3593 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3594 return op1;
3595 if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3596 return op0;
3597 /* A | (~A) -> -1 */
3598 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3599 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3600 && ! side_effects_p (op0))
3601 return constm1_rtx;
3602 break;
3603
3604 case XOR:
3605 if (op1 == const0_rtx)
3606 return op0;
3607 if (GET_CODE (op1) == CONST_INT
3608 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3609 return gen_rtx (NOT, mode, op0);
3610 if (op0 == op1 && ! side_effects_p (op0))
3611 return const0_rtx;
3612 break;
3613
3614 case AND:
3615 if (op1 == const0_rtx && ! side_effects_p (op0))
3616 return const0_rtx;
3617 if (GET_CODE (op1) == CONST_INT
3618 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3619 return op0;
3620 if (op0 == op1 && ! side_effects_p (op0))
3621 return op0;
3622 /* A & (~A) -> 0 */
3623 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3624 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3625 && ! side_effects_p (op0))
3626 return const0_rtx;
3627 break;
3628
3629 case UDIV:
3630 /* Convert divide by power of two into shift (divide by 1 handled
3631 below). */
3632 if (GET_CODE (op1) == CONST_INT
3633 && (arg1 = exact_log2 (INTVAL (op1))) > 0)
3634 return gen_rtx (LSHIFTRT, mode, op0, GEN_INT (arg1));
3635
3636 /* ... fall through ... */
3637
3638 case DIV:
3639 if (op1 == CONST1_RTX (mode))
3640 return op0;
3641
3642 /* In IEEE floating point, 0/x is not always 0. */
3643 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3644 || GET_MODE_CLASS (mode) == MODE_INT)
3645 && op0 == CONST0_RTX (mode)
3646 && ! side_effects_p (op1))
3647 return op0;
3648
3649 #if 0 /* Turned off till an expert says this is a safe thing to do. */
3650 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3651 /* Change division by a constant into multiplication. */
3652 else if (GET_CODE (op1) == CONST_DOUBLE
3653 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT
3654 && op1 != CONST0_RTX (mode))
3655 {
3656 REAL_VALUE_TYPE d;
3657 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3658 if (REAL_VALUES_EQUAL (d, dconst0))
3659 abort();
3660 #if defined (REAL_ARITHMETIC)
3661 REAL_ARITHMETIC (d, (int) RDIV_EXPR, dconst1, d);
3662 return gen_rtx (MULT, mode, op0,
3663 CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
3664 #else
3665 return gen_rtx (MULT, mode, op0,
3666 CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
3667 }
3668 #endif
3669 #endif
3670 #endif
3671 break;
3672
3673 case UMOD:
3674 /* Handle modulus by power of two (mod with 1 handled below). */
3675 if (GET_CODE (op1) == CONST_INT
3676 && exact_log2 (INTVAL (op1)) > 0)
3677 return gen_rtx (AND, mode, op0, GEN_INT (INTVAL (op1) - 1));
3678
3679 /* ... fall through ... */
3680
3681 case MOD:
3682 if ((op0 == const0_rtx || op1 == const1_rtx)
3683 && ! side_effects_p (op0) && ! side_effects_p (op1))
3684 return const0_rtx;
3685 break;
3686
3687 case ROTATERT:
3688 case ROTATE:
3689 /* Rotating ~0 always results in ~0. */
3690 if (GET_CODE (op0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT
3691 && INTVAL (op0) == GET_MODE_MASK (mode)
3692 && ! side_effects_p (op1))
3693 return op0;
3694
3695 /* ... fall through ... */
3696
3697 case LSHIFT:
3698 case ASHIFT:
3699 case ASHIFTRT:
3700 case LSHIFTRT:
3701 if (op1 == const0_rtx)
3702 return op0;
3703 if (op0 == const0_rtx && ! side_effects_p (op1))
3704 return op0;
3705 break;
3706
3707 case SMIN:
3708 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
3709 && INTVAL (op1) == (HOST_WIDE_INT) 1 << (width -1)
3710 && ! side_effects_p (op0))
3711 return op1;
3712 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3713 return op0;
3714 break;
3715
3716 case SMAX:
3717 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
3718 && INTVAL (op1) == (unsigned) GET_MODE_MASK (mode) >> 1
3719 && ! side_effects_p (op0))
3720 return op1;
3721 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3722 return op0;
3723 break;
3724
3725 case UMIN:
3726 if (op1 == const0_rtx && ! side_effects_p (op0))
3727 return op1;
3728 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3729 return op0;
3730 break;
3731
3732 case UMAX:
3733 if (op1 == constm1_rtx && ! side_effects_p (op0))
3734 return op1;
3735 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3736 return op0;
3737 break;
3738
3739 default:
3740 abort ();
3741 }
3742
3743 return 0;
3744 }
3745
3746 /* Get the integer argument values in two forms:
3747 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
3748
3749 arg0 = INTVAL (op0);
3750 arg1 = INTVAL (op1);
3751
3752 if (width < HOST_BITS_PER_WIDE_INT)
3753 {
3754 arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
3755 arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
3756
3757 arg0s = arg0;
3758 if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
3759 arg0s |= ((HOST_WIDE_INT) (-1) << width);
3760
3761 arg1s = arg1;
3762 if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
3763 arg1s |= ((HOST_WIDE_INT) (-1) << width);
3764 }
3765 else
3766 {
3767 arg0s = arg0;
3768 arg1s = arg1;
3769 }
3770
3771 /* Compute the value of the arithmetic. */
3772
3773 switch (code)
3774 {
3775 case PLUS:
3776 val = arg0s + arg1s;
3777 break;
3778
3779 case MINUS:
3780 val = arg0s - arg1s;
3781 break;
3782
3783 case MULT:
3784 val = arg0s * arg1s;
3785 break;
3786
3787 case DIV:
3788 if (arg1s == 0)
3789 return 0;
3790 val = arg0s / arg1s;
3791 break;
3792
3793 case MOD:
3794 if (arg1s == 0)
3795 return 0;
3796 val = arg0s % arg1s;
3797 break;
3798
3799 case UDIV:
3800 if (arg1 == 0)
3801 return 0;
3802 val = (unsigned HOST_WIDE_INT) arg0 / arg1;
3803 break;
3804
3805 case UMOD:
3806 if (arg1 == 0)
3807 return 0;
3808 val = (unsigned HOST_WIDE_INT) arg0 % arg1;
3809 break;
3810
3811 case AND:
3812 val = arg0 & arg1;
3813 break;
3814
3815 case IOR:
3816 val = arg0 | arg1;
3817 break;
3818
3819 case XOR:
3820 val = arg0 ^ arg1;
3821 break;
3822
3823 case LSHIFTRT:
3824 /* If shift count is undefined, don't fold it; let the machine do
3825 what it wants. But truncate it if the machine will do that. */
3826 if (arg1 < 0)
3827 return 0;
3828
3829 #ifdef SHIFT_COUNT_TRUNCATED
3830 arg1 &= (BITS_PER_WORD - 1);
3831 #endif
3832
3833 if (arg1 >= width)
3834 return 0;
3835
3836 val = ((unsigned HOST_WIDE_INT) arg0) >> arg1;
3837 break;
3838
3839 case ASHIFT:
3840 case LSHIFT:
3841 if (arg1 < 0)
3842 return 0;
3843
3844 #ifdef SHIFT_COUNT_TRUNCATED
3845 arg1 &= (BITS_PER_WORD - 1);
3846 #endif
3847
3848 if (arg1 >= width)
3849 return 0;
3850
3851 val = ((unsigned HOST_WIDE_INT) arg0) << arg1;
3852 break;
3853
3854 case ASHIFTRT:
3855 if (arg1 < 0)
3856 return 0;
3857
3858 #ifdef SHIFT_COUNT_TRUNCATED
3859 arg1 &= (BITS_PER_WORD - 1);
3860 #endif
3861
3862 if (arg1 >= width)
3863 return 0;
3864
3865 val = arg0s >> arg1;
3866
3867 /* Bootstrap compiler may not have sign extended the right shift.
3868 Manually extend the sign to insure bootstrap cc matches gcc. */
3869 if (arg0s < 0 && arg1 > 0)
3870 val |= ((HOST_WIDE_INT) -1) << (HOST_BITS_PER_WIDE_INT - arg1);
3871
3872 break;
3873
3874 case ROTATERT:
3875 if (arg1 < 0)
3876 return 0;
3877
3878 arg1 %= width;
3879 val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
3880 | (((unsigned HOST_WIDE_INT) arg0) >> arg1));
3881 break;
3882
3883 case ROTATE:
3884 if (arg1 < 0)
3885 return 0;
3886
3887 arg1 %= width;
3888 val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
3889 | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
3890 break;
3891
3892 case COMPARE:
3893 /* Do nothing here. */
3894 return 0;
3895
3896 case SMIN:
3897 val = arg0s <= arg1s ? arg0s : arg1s;
3898 break;
3899
3900 case UMIN:
3901 val = ((unsigned HOST_WIDE_INT) arg0
3902 <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
3903 break;
3904
3905 case SMAX:
3906 val = arg0s > arg1s ? arg0s : arg1s;
3907 break;
3908
3909 case UMAX:
3910 val = ((unsigned HOST_WIDE_INT) arg0
3911 > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
3912 break;
3913
3914 default:
3915 abort ();
3916 }
3917
3918 /* Clear the bits that don't belong in our mode, unless they and our sign
3919 bit are all one. So we get either a reasonable negative value or a
3920 reasonable unsigned value for this mode. */
3921 if (width < HOST_BITS_PER_WIDE_INT
3922 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3923 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3924 val &= ((HOST_WIDE_INT) 1 << width) - 1;
3925
3926 return GEN_INT (val);
3927 }
3928 \f
3929 /* Simplify a PLUS or MINUS, at least one of whose operands may be another
3930 PLUS or MINUS.
3931
3932 Rather than test for specific case, we do this by a brute-force method
3933 and do all possible simplifications until no more changes occur. Then
3934 we rebuild the operation. */
3935
3936 static rtx
3937 simplify_plus_minus (code, mode, op0, op1)
3938 enum rtx_code code;
3939 enum machine_mode mode;
3940 rtx op0, op1;
3941 {
3942 rtx ops[8];
3943 int negs[8];
3944 rtx result, tem;
3945 int n_ops = 2, input_ops = 2, input_consts = 0, n_consts = 0;
3946 int first = 1, negate = 0, changed;
3947 int i, j;
3948
3949 bzero (ops, sizeof ops);
3950
3951 /* Set up the two operands and then expand them until nothing has been
3952 changed. If we run out of room in our array, give up; this should
3953 almost never happen. */
3954
3955 ops[0] = op0, ops[1] = op1, negs[0] = 0, negs[1] = (code == MINUS);
3956
3957 changed = 1;
3958 while (changed)
3959 {
3960 changed = 0;
3961
3962 for (i = 0; i < n_ops; i++)
3963 switch (GET_CODE (ops[i]))
3964 {
3965 case PLUS:
3966 case MINUS:
3967 if (n_ops == 7)
3968 return 0;
3969
3970 ops[n_ops] = XEXP (ops[i], 1);
3971 negs[n_ops++] = GET_CODE (ops[i]) == MINUS ? !negs[i] : negs[i];
3972 ops[i] = XEXP (ops[i], 0);
3973 input_ops++;
3974 changed = 1;
3975 break;
3976
3977 case NEG:
3978 ops[i] = XEXP (ops[i], 0);
3979 negs[i] = ! negs[i];
3980 changed = 1;
3981 break;
3982
3983 case CONST:
3984 ops[i] = XEXP (ops[i], 0);
3985 input_consts++;
3986 changed = 1;
3987 break;
3988
3989 case NOT:
3990 /* ~a -> (-a - 1) */
3991 if (n_ops != 7)
3992 {
3993 ops[n_ops] = constm1_rtx;
3994 negs[n_ops++] = negs[i];
3995 ops[i] = XEXP (ops[i], 0);
3996 negs[i] = ! negs[i];
3997 changed = 1;
3998 }
3999 break;
4000
4001 case CONST_INT:
4002 if (negs[i])
4003 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0, changed = 1;
4004 break;
4005 }
4006 }
4007
4008 /* If we only have two operands, we can't do anything. */
4009 if (n_ops <= 2)
4010 return 0;
4011
4012 /* Now simplify each pair of operands until nothing changes. The first
4013 time through just simplify constants against each other. */
4014
4015 changed = 1;
4016 while (changed)
4017 {
4018 changed = first;
4019
4020 for (i = 0; i < n_ops - 1; i++)
4021 for (j = i + 1; j < n_ops; j++)
4022 if (ops[i] != 0 && ops[j] != 0
4023 && (! first || (CONSTANT_P (ops[i]) && CONSTANT_P (ops[j]))))
4024 {
4025 rtx lhs = ops[i], rhs = ops[j];
4026 enum rtx_code ncode = PLUS;
4027
4028 if (negs[i] && ! negs[j])
4029 lhs = ops[j], rhs = ops[i], ncode = MINUS;
4030 else if (! negs[i] && negs[j])
4031 ncode = MINUS;
4032
4033 tem = simplify_binary_operation (ncode, mode, lhs, rhs);
4034 if (tem)
4035 {
4036 ops[i] = tem, ops[j] = 0;
4037 negs[i] = negs[i] && negs[j];
4038 if (GET_CODE (tem) == NEG)
4039 ops[i] = XEXP (tem, 0), negs[i] = ! negs[i];
4040
4041 if (GET_CODE (ops[i]) == CONST_INT && negs[i])
4042 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0;
4043 changed = 1;
4044 }
4045 }
4046
4047 first = 0;
4048 }
4049
4050 /* Pack all the operands to the lower-numbered entries and give up if
4051 we didn't reduce the number of operands we had. Make sure we
4052 count a CONST as two operands. If we have the same number of
4053 operands, but have made more CONSTs than we had, this is also
4054 an improvement, so accept it. */
4055
4056 for (i = 0, j = 0; j < n_ops; j++)
4057 if (ops[j] != 0)
4058 {
4059 ops[i] = ops[j], negs[i++] = negs[j];
4060 if (GET_CODE (ops[j]) == CONST)
4061 n_consts++;
4062 }
4063
4064 if (i + n_consts > input_ops
4065 || (i + n_consts == input_ops && n_consts <= input_consts))
4066 return 0;
4067
4068 n_ops = i;
4069
4070 /* If we have a CONST_INT, put it last. */
4071 for (i = 0; i < n_ops - 1; i++)
4072 if (GET_CODE (ops[i]) == CONST_INT)
4073 {
4074 tem = ops[n_ops - 1], ops[n_ops - 1] = ops[i] , ops[i] = tem;
4075 j = negs[n_ops - 1], negs[n_ops - 1] = negs[i], negs[i] = j;
4076 }
4077
4078 /* Put a non-negated operand first. If there aren't any, make all
4079 operands positive and negate the whole thing later. */
4080 for (i = 0; i < n_ops && negs[i]; i++)
4081 ;
4082
4083 if (i == n_ops)
4084 {
4085 for (i = 0; i < n_ops; i++)
4086 negs[i] = 0;
4087 negate = 1;
4088 }
4089 else if (i != 0)
4090 {
4091 tem = ops[0], ops[0] = ops[i], ops[i] = tem;
4092 j = negs[0], negs[0] = negs[i], negs[i] = j;
4093 }
4094
4095 /* Now make the result by performing the requested operations. */
4096 result = ops[0];
4097 for (i = 1; i < n_ops; i++)
4098 result = cse_gen_binary (negs[i] ? MINUS : PLUS, mode, result, ops[i]);
4099
4100 return negate ? gen_rtx (NEG, mode, result) : result;
4101 }
4102 \f
4103 /* Make a binary operation by properly ordering the operands and
4104 seeing if the expression folds. */
4105
4106 static rtx
4107 cse_gen_binary (code, mode, op0, op1)
4108 enum rtx_code code;
4109 enum machine_mode mode;
4110 rtx op0, op1;
4111 {
4112 rtx tem;
4113
4114 /* Put complex operands first and constants second if commutative. */
4115 if (GET_RTX_CLASS (code) == 'c'
4116 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
4117 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
4118 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
4119 || (GET_CODE (op0) == SUBREG
4120 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
4121 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
4122 tem = op0, op0 = op1, op1 = tem;
4123
4124 /* If this simplifies, do it. */
4125 tem = simplify_binary_operation (code, mode, op0, op1);
4126
4127 if (tem)
4128 return tem;
4129
4130 /* Handle addition and subtraction of CONST_INT specially. Otherwise,
4131 just form the operation. */
4132
4133 if (code == PLUS && GET_CODE (op1) == CONST_INT
4134 && GET_MODE (op0) != VOIDmode)
4135 return plus_constant (op0, INTVAL (op1));
4136 else if (code == MINUS && GET_CODE (op1) == CONST_INT
4137 && GET_MODE (op0) != VOIDmode)
4138 return plus_constant (op0, - INTVAL (op1));
4139 else
4140 return gen_rtx (code, mode, op0, op1);
4141 }
4142 \f
4143 /* Like simplify_binary_operation except used for relational operators.
4144 MODE is the mode of the operands, not that of the result. */
4145
4146 rtx
4147 simplify_relational_operation (code, mode, op0, op1)
4148 enum rtx_code code;
4149 enum machine_mode mode;
4150 rtx op0, op1;
4151 {
4152 register HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
4153 HOST_WIDE_INT val;
4154 int width = GET_MODE_BITSIZE (mode);
4155
4156 /* If op0 is a compare, extract the comparison arguments from it. */
4157 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
4158 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4159
4160 /* Unlike the arithmetic operations, we can do the comparison whether
4161 or not WIDTH is larger than HOST_BITS_PER_WIDE_INT because the
4162 CONST_INTs are to be understood as being infinite precision as
4163 is the comparison. So there is no question of overflow. */
4164
4165 if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT || width == 0)
4166 {
4167 /* Even if we can't compute a constant result,
4168 there are some cases worth simplifying. */
4169
4170 /* For non-IEEE floating-point, if the two operands are equal, we know
4171 the result. */
4172 if (rtx_equal_p (op0, op1)
4173 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4174 || GET_MODE_CLASS (GET_MODE (op0)) != MODE_FLOAT))
4175 return (code == EQ || code == GE || code == LE || code == LEU
4176 || code == GEU) ? const_true_rtx : const0_rtx;
4177
4178 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4179 else if (GET_CODE (op0) == CONST_DOUBLE
4180 && GET_CODE (op1) == CONST_DOUBLE
4181 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
4182 {
4183 REAL_VALUE_TYPE d0, d1;
4184 jmp_buf handler;
4185 int op0lt, op1lt, equal;
4186
4187 if (setjmp (handler))
4188 return 0;
4189
4190 set_float_handler (handler);
4191 REAL_VALUE_FROM_CONST_DOUBLE (d0, op0);
4192 REAL_VALUE_FROM_CONST_DOUBLE (d1, op1);
4193 equal = REAL_VALUES_EQUAL (d0, d1);
4194 op0lt = REAL_VALUES_LESS (d0, d1);
4195 op1lt = REAL_VALUES_LESS (d1, d0);
4196 set_float_handler (NULL_PTR);
4197
4198 switch (code)
4199 {
4200 case EQ:
4201 return equal ? const_true_rtx : const0_rtx;
4202 case NE:
4203 return !equal ? const_true_rtx : const0_rtx;
4204 case LE:
4205 return equal || op0lt ? const_true_rtx : const0_rtx;
4206 case LT:
4207 return op0lt ? const_true_rtx : const0_rtx;
4208 case GE:
4209 return equal || op1lt ? const_true_rtx : const0_rtx;
4210 case GT:
4211 return op1lt ? const_true_rtx : const0_rtx;
4212 }
4213 }
4214 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4215
4216 else if (GET_MODE_CLASS (mode) == MODE_INT
4217 && width > HOST_BITS_PER_WIDE_INT
4218 && (GET_CODE (op0) == CONST_DOUBLE
4219 || GET_CODE (op0) == CONST_INT)
4220 && (GET_CODE (op1) == CONST_DOUBLE
4221 || GET_CODE (op1) == CONST_INT))
4222 {
4223 HOST_WIDE_INT h0, l0, h1, l1;
4224 unsigned HOST_WIDE_INT uh0, ul0, uh1, ul1;
4225 int op0lt, op0ltu, equal;
4226
4227 if (GET_CODE (op0) == CONST_DOUBLE)
4228 l0 = CONST_DOUBLE_LOW (op0), h0 = CONST_DOUBLE_HIGH (op0);
4229 else
4230 l0 = INTVAL (op0), h0 = l0 < 0 ? -1 : 0;
4231
4232 if (GET_CODE (op1) == CONST_DOUBLE)
4233 l1 = CONST_DOUBLE_LOW (op1), h1 = CONST_DOUBLE_HIGH (op1);
4234 else
4235 l1 = INTVAL (op1), h1 = l1 < 0 ? -1 : 0;
4236
4237 uh0 = h0, ul0 = l0, uh1 = h1, ul1 = l1;
4238
4239 equal = (h0 == h1 && l0 == l1);
4240 op0lt = (h0 < h1 || (h0 == h1 && l0 < l1));
4241 op0ltu = (uh0 < uh1 || (uh0 == uh1 && ul0 < ul1));
4242
4243 switch (code)
4244 {
4245 case EQ:
4246 return equal ? const_true_rtx : const0_rtx;
4247 case NE:
4248 return !equal ? const_true_rtx : const0_rtx;
4249 case LE:
4250 return equal || op0lt ? const_true_rtx : const0_rtx;
4251 case LT:
4252 return op0lt ? const_true_rtx : const0_rtx;
4253 case GE:
4254 return !op0lt ? const_true_rtx : const0_rtx;
4255 case GT:
4256 return !equal && !op0lt ? const_true_rtx : const0_rtx;
4257 case LEU:
4258 return equal || op0ltu ? const_true_rtx : const0_rtx;
4259 case LTU:
4260 return op0ltu ? const_true_rtx : const0_rtx;
4261 case GEU:
4262 return !op0ltu ? const_true_rtx : const0_rtx;
4263 case GTU:
4264 return !equal && !op0ltu ? const_true_rtx : const0_rtx;
4265 }
4266 }
4267
4268 switch (code)
4269 {
4270 case EQ:
4271 {
4272 #if 0
4273 /* We can't make this assumption due to #pragma weak */
4274 if (CONSTANT_P (op0) && op1 == const0_rtx)
4275 return const0_rtx;
4276 #endif
4277 if (NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx
4278 /* On some machines, the ap reg can be 0 sometimes. */
4279 && op0 != arg_pointer_rtx)
4280 return const0_rtx;
4281 break;
4282 }
4283
4284 case NE:
4285 #if 0
4286 /* We can't make this assumption due to #pragma weak */
4287 if (CONSTANT_P (op0) && op1 == const0_rtx)
4288 return const_true_rtx;
4289 #endif
4290 if (NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx
4291 /* On some machines, the ap reg can be 0 sometimes. */
4292 && op0 != arg_pointer_rtx)
4293 return const_true_rtx;
4294 break;
4295
4296 case GEU:
4297 /* Unsigned values are never negative, but we must be sure we are
4298 actually comparing a value, not a CC operand. */
4299 if (op1 == const0_rtx
4300 && GET_MODE_CLASS (mode) == MODE_INT)
4301 return const_true_rtx;
4302 break;
4303
4304 case LTU:
4305 if (op1 == const0_rtx
4306 && GET_MODE_CLASS (mode) == MODE_INT)
4307 return const0_rtx;
4308 break;
4309
4310 case LEU:
4311 /* Unsigned values are never greater than the largest
4312 unsigned value. */
4313 if (GET_CODE (op1) == CONST_INT
4314 && INTVAL (op1) == GET_MODE_MASK (mode)
4315 && GET_MODE_CLASS (mode) == MODE_INT)
4316 return const_true_rtx;
4317 break;
4318
4319 case GTU:
4320 if (GET_CODE (op1) == CONST_INT
4321 && INTVAL (op1) == GET_MODE_MASK (mode)
4322 && GET_MODE_CLASS (mode) == MODE_INT)
4323 return const0_rtx;
4324 break;
4325 }
4326
4327 return 0;
4328 }
4329
4330 /* Get the integer argument values in two forms:
4331 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
4332
4333 arg0 = INTVAL (op0);
4334 arg1 = INTVAL (op1);
4335
4336 if (width < HOST_BITS_PER_WIDE_INT)
4337 {
4338 arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
4339 arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
4340
4341 arg0s = arg0;
4342 if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4343 arg0s |= ((HOST_WIDE_INT) (-1) << width);
4344
4345 arg1s = arg1;
4346 if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4347 arg1s |= ((HOST_WIDE_INT) (-1) << width);
4348 }
4349 else
4350 {
4351 arg0s = arg0;
4352 arg1s = arg1;
4353 }
4354
4355 /* Compute the value of the arithmetic. */
4356
4357 switch (code)
4358 {
4359 case NE:
4360 val = arg0 != arg1 ? STORE_FLAG_VALUE : 0;
4361 break;
4362
4363 case EQ:
4364 val = arg0 == arg1 ? STORE_FLAG_VALUE : 0;
4365 break;
4366
4367 case LE:
4368 val = arg0s <= arg1s ? STORE_FLAG_VALUE : 0;
4369 break;
4370
4371 case LT:
4372 val = arg0s < arg1s ? STORE_FLAG_VALUE : 0;
4373 break;
4374
4375 case GE:
4376 val = arg0s >= arg1s ? STORE_FLAG_VALUE : 0;
4377 break;
4378
4379 case GT:
4380 val = arg0s > arg1s ? STORE_FLAG_VALUE : 0;
4381 break;
4382
4383 case LEU:
4384 val = (((unsigned HOST_WIDE_INT) arg0)
4385 <= ((unsigned HOST_WIDE_INT) arg1) ? STORE_FLAG_VALUE : 0);
4386 break;
4387
4388 case LTU:
4389 val = (((unsigned HOST_WIDE_INT) arg0)
4390 < ((unsigned HOST_WIDE_INT) arg1) ? STORE_FLAG_VALUE : 0);
4391 break;
4392
4393 case GEU:
4394 val = (((unsigned HOST_WIDE_INT) arg0)
4395 >= ((unsigned HOST_WIDE_INT) arg1) ? STORE_FLAG_VALUE : 0);
4396 break;
4397
4398 case GTU:
4399 val = (((unsigned HOST_WIDE_INT) arg0)
4400 > ((unsigned HOST_WIDE_INT) arg1) ? STORE_FLAG_VALUE : 0);
4401 break;
4402
4403 default:
4404 abort ();
4405 }
4406
4407 /* Clear the bits that don't belong in our mode, unless they and our sign
4408 bit are all one. So we get either a reasonable negative value or a
4409 reasonable unsigned value for this mode. */
4410 if (width < HOST_BITS_PER_WIDE_INT
4411 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4412 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4413 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4414
4415 return GEN_INT (val);
4416 }
4417 \f
4418 /* Simplify CODE, an operation with result mode MODE and three operands,
4419 OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became
4420 a constant. Return 0 if no simplifications is possible. */
4421
4422 rtx
4423 simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2)
4424 enum rtx_code code;
4425 enum machine_mode mode, op0_mode;
4426 rtx op0, op1, op2;
4427 {
4428 int width = GET_MODE_BITSIZE (mode);
4429
4430 /* VOIDmode means "infinite" precision. */
4431 if (width == 0)
4432 width = HOST_BITS_PER_WIDE_INT;
4433
4434 switch (code)
4435 {
4436 case SIGN_EXTRACT:
4437 case ZERO_EXTRACT:
4438 if (GET_CODE (op0) == CONST_INT
4439 && GET_CODE (op1) == CONST_INT
4440 && GET_CODE (op2) == CONST_INT
4441 && INTVAL (op1) + INTVAL (op2) <= GET_MODE_BITSIZE (op0_mode)
4442 && width <= HOST_BITS_PER_WIDE_INT)
4443 {
4444 /* Extracting a bit-field from a constant */
4445 HOST_WIDE_INT val = INTVAL (op0);
4446
4447 #if BITS_BIG_ENDIAN
4448 val >>= (GET_MODE_BITSIZE (op0_mode) - INTVAL (op2) - INTVAL (op1));
4449 #else
4450 val >>= INTVAL (op2);
4451 #endif
4452 if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
4453 {
4454 /* First zero-extend. */
4455 val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
4456 /* If desired, propagate sign bit. */
4457 if (code == SIGN_EXTRACT
4458 && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))))
4459 val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
4460 }
4461
4462 /* Clear the bits that don't belong in our mode,
4463 unless they and our sign bit are all one.
4464 So we get either a reasonable negative value or a reasonable
4465 unsigned value for this mode. */
4466 if (width < HOST_BITS_PER_WIDE_INT
4467 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4468 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4469 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4470
4471 return GEN_INT (val);
4472 }
4473 break;
4474
4475 case IF_THEN_ELSE:
4476 if (GET_CODE (op0) == CONST_INT)
4477 return op0 != const0_rtx ? op1 : op2;
4478 break;
4479
4480 default:
4481 abort ();
4482 }
4483
4484 return 0;
4485 }
4486 \f
4487 /* If X is a nontrivial arithmetic operation on an argument
4488 for which a constant value can be determined, return
4489 the result of operating on that value, as a constant.
4490 Otherwise, return X, possibly with one or more operands
4491 modified by recursive calls to this function.
4492
4493 If X is a register whose contents are known, we do NOT
4494 return those contents here. equiv_constant is called to
4495 perform that task.
4496
4497 INSN is the insn that we may be modifying. If it is 0, make a copy
4498 of X before modifying it. */
4499
4500 static rtx
4501 fold_rtx (x, insn)
4502 rtx x;
4503 rtx insn;
4504 {
4505 register enum rtx_code code;
4506 register enum machine_mode mode;
4507 register char *fmt;
4508 register int i;
4509 rtx new = 0;
4510 int copied = 0;
4511 int must_swap = 0;
4512
4513 /* Folded equivalents of first two operands of X. */
4514 rtx folded_arg0;
4515 rtx folded_arg1;
4516
4517 /* Constant equivalents of first three operands of X;
4518 0 when no such equivalent is known. */
4519 rtx const_arg0;
4520 rtx const_arg1;
4521 rtx const_arg2;
4522
4523 /* The mode of the first operand of X. We need this for sign and zero
4524 extends. */
4525 enum machine_mode mode_arg0;
4526
4527 if (x == 0)
4528 return x;
4529
4530 mode = GET_MODE (x);
4531 code = GET_CODE (x);
4532 switch (code)
4533 {
4534 case CONST:
4535 case CONST_INT:
4536 case CONST_DOUBLE:
4537 case SYMBOL_REF:
4538 case LABEL_REF:
4539 case REG:
4540 /* No use simplifying an EXPR_LIST
4541 since they are used only for lists of args
4542 in a function call's REG_EQUAL note. */
4543 case EXPR_LIST:
4544 return x;
4545
4546 #ifdef HAVE_cc0
4547 case CC0:
4548 return prev_insn_cc0;
4549 #endif
4550
4551 case PC:
4552 /* If the next insn is a CODE_LABEL followed by a jump table,
4553 PC's value is a LABEL_REF pointing to that label. That
4554 lets us fold switch statements on the Vax. */
4555 if (insn && GET_CODE (insn) == JUMP_INSN)
4556 {
4557 rtx next = next_nonnote_insn (insn);
4558
4559 if (next && GET_CODE (next) == CODE_LABEL
4560 && NEXT_INSN (next) != 0
4561 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
4562 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
4563 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
4564 return gen_rtx (LABEL_REF, Pmode, next);
4565 }
4566 break;
4567
4568 case SUBREG:
4569 /* See if we previously assigned a constant value to this SUBREG. */
4570 if ((new = lookup_as_function (x, CONST_INT)) != 0
4571 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
4572 return new;
4573
4574 /* If this is a paradoxical SUBREG, we have no idea what value the
4575 extra bits would have. However, if the operand is equivalent
4576 to a SUBREG whose operand is the same as our mode, and all the
4577 modes are within a word, we can just use the inner operand
4578 because these SUBREGs just say how to treat the register. */
4579
4580 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4581 {
4582 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
4583 struct table_elt *elt;
4584
4585 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
4586 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
4587 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
4588 imode)) != 0)
4589 {
4590 for (elt = elt->first_same_value;
4591 elt; elt = elt->next_same_value)
4592 if (GET_CODE (elt->exp) == SUBREG
4593 && GET_MODE (SUBREG_REG (elt->exp)) == mode
4594 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4595 return copy_rtx (SUBREG_REG (elt->exp));
4596 }
4597
4598 return x;
4599 }
4600
4601 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
4602 We might be able to if the SUBREG is extracting a single word in an
4603 integral mode or extracting the low part. */
4604
4605 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
4606 const_arg0 = equiv_constant (folded_arg0);
4607 if (const_arg0)
4608 folded_arg0 = const_arg0;
4609
4610 if (folded_arg0 != SUBREG_REG (x))
4611 {
4612 new = 0;
4613
4614 if (GET_MODE_CLASS (mode) == MODE_INT
4615 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
4616 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
4617 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
4618 GET_MODE (SUBREG_REG (x)));
4619 if (new == 0 && subreg_lowpart_p (x))
4620 new = gen_lowpart_if_possible (mode, folded_arg0);
4621 if (new)
4622 return new;
4623 }
4624
4625 /* If this is a narrowing SUBREG and our operand is a REG, see if
4626 we can find an equivalence for REG that is an arithmetic operation
4627 in a wider mode where both operands are paradoxical SUBREGs
4628 from objects of our result mode. In that case, we couldn't report
4629 an equivalent value for that operation, since we don't know what the
4630 extra bits will be. But we can find an equivalence for this SUBREG
4631 by folding that operation is the narrow mode. This allows us to
4632 fold arithmetic in narrow modes when the machine only supports
4633 word-sized arithmetic.
4634
4635 Also look for a case where we have a SUBREG whose operand is the
4636 same as our result. If both modes are smaller than a word, we
4637 are simply interpreting a register in different modes and we
4638 can use the inner value. */
4639
4640 if (GET_CODE (folded_arg0) == REG
4641 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
4642 && subreg_lowpart_p (x))
4643 {
4644 struct table_elt *elt;
4645
4646 /* We can use HASH here since we know that canon_hash won't be
4647 called. */
4648 elt = lookup (folded_arg0,
4649 HASH (folded_arg0, GET_MODE (folded_arg0)),
4650 GET_MODE (folded_arg0));
4651
4652 if (elt)
4653 elt = elt->first_same_value;
4654
4655 for (; elt; elt = elt->next_same_value)
4656 {
4657 enum rtx_code eltcode = GET_CODE (elt->exp);
4658
4659 /* Just check for unary and binary operations. */
4660 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
4661 && GET_CODE (elt->exp) != SIGN_EXTEND
4662 && GET_CODE (elt->exp) != ZERO_EXTEND
4663 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4664 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
4665 {
4666 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
4667
4668 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
4669 op0 = fold_rtx (op0, NULL_RTX);
4670
4671 op0 = equiv_constant (op0);
4672 if (op0)
4673 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
4674 op0, mode);
4675 }
4676 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
4677 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
4678 && eltcode != DIV && eltcode != MOD
4679 && eltcode != UDIV && eltcode != UMOD
4680 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
4681 && eltcode != ROTATE && eltcode != ROTATERT
4682 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4683 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
4684 == mode))
4685 || CONSTANT_P (XEXP (elt->exp, 0)))
4686 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
4687 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
4688 == mode))
4689 || CONSTANT_P (XEXP (elt->exp, 1))))
4690 {
4691 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
4692 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
4693
4694 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
4695 op0 = fold_rtx (op0, NULL_RTX);
4696
4697 if (op0)
4698 op0 = equiv_constant (op0);
4699
4700 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
4701 op1 = fold_rtx (op1, NULL_RTX);
4702
4703 if (op1)
4704 op1 = equiv_constant (op1);
4705
4706 if (op0 && op1)
4707 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
4708 op0, op1);
4709 }
4710
4711 else if (GET_CODE (elt->exp) == SUBREG
4712 && GET_MODE (SUBREG_REG (elt->exp)) == mode
4713 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
4714 <= UNITS_PER_WORD)
4715 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4716 new = copy_rtx (SUBREG_REG (elt->exp));
4717
4718 if (new)
4719 return new;
4720 }
4721 }
4722
4723 return x;
4724
4725 case NOT:
4726 case NEG:
4727 /* If we have (NOT Y), see if Y is known to be (NOT Z).
4728 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
4729 new = lookup_as_function (XEXP (x, 0), code);
4730 if (new)
4731 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
4732 break;
4733
4734 case MEM:
4735 /* If we are not actually processing an insn, don't try to find the
4736 best address. Not only don't we care, but we could modify the
4737 MEM in an invalid way since we have no insn to validate against. */
4738 if (insn != 0)
4739 find_best_addr (insn, &XEXP (x, 0));
4740
4741 {
4742 /* Even if we don't fold in the insn itself,
4743 we can safely do so here, in hopes of getting a constant. */
4744 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
4745 rtx base = 0;
4746 HOST_WIDE_INT offset = 0;
4747
4748 if (GET_CODE (addr) == REG
4749 && REGNO_QTY_VALID_P (REGNO (addr))
4750 && GET_MODE (addr) == qty_mode[reg_qty[REGNO (addr)]]
4751 && qty_const[reg_qty[REGNO (addr)]] != 0)
4752 addr = qty_const[reg_qty[REGNO (addr)]];
4753
4754 /* If address is constant, split it into a base and integer offset. */
4755 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
4756 base = addr;
4757 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
4758 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
4759 {
4760 base = XEXP (XEXP (addr, 0), 0);
4761 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
4762 }
4763 else if (GET_CODE (addr) == LO_SUM
4764 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
4765 base = XEXP (addr, 1);
4766
4767 /* If this is a constant pool reference, we can fold it into its
4768 constant to allow better value tracking. */
4769 if (base && GET_CODE (base) == SYMBOL_REF
4770 && CONSTANT_POOL_ADDRESS_P (base))
4771 {
4772 rtx constant = get_pool_constant (base);
4773 enum machine_mode const_mode = get_pool_mode (base);
4774 rtx new;
4775
4776 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
4777 constant_pool_entries_cost = COST (constant);
4778
4779 /* If we are loading the full constant, we have an equivalence. */
4780 if (offset == 0 && mode == const_mode)
4781 return constant;
4782
4783 /* If this actually isn't a constant (wierd!), we can't do
4784 anything. Otherwise, handle the two most common cases:
4785 extracting a word from a multi-word constant, and extracting
4786 the low-order bits. Other cases don't seem common enough to
4787 worry about. */
4788 if (! CONSTANT_P (constant))
4789 return x;
4790
4791 if (GET_MODE_CLASS (mode) == MODE_INT
4792 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
4793 && offset % UNITS_PER_WORD == 0
4794 && (new = operand_subword (constant,
4795 offset / UNITS_PER_WORD,
4796 0, const_mode)) != 0)
4797 return new;
4798
4799 if (((BYTES_BIG_ENDIAN
4800 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
4801 || (! BYTES_BIG_ENDIAN && offset == 0))
4802 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
4803 return new;
4804 }
4805
4806 /* If this is a reference to a label at a known position in a jump
4807 table, we also know its value. */
4808 if (base && GET_CODE (base) == LABEL_REF)
4809 {
4810 rtx label = XEXP (base, 0);
4811 rtx table_insn = NEXT_INSN (label);
4812
4813 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
4814 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
4815 {
4816 rtx table = PATTERN (table_insn);
4817
4818 if (offset >= 0
4819 && (offset / GET_MODE_SIZE (GET_MODE (table))
4820 < XVECLEN (table, 0)))
4821 return XVECEXP (table, 0,
4822 offset / GET_MODE_SIZE (GET_MODE (table)));
4823 }
4824 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
4825 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
4826 {
4827 rtx table = PATTERN (table_insn);
4828
4829 if (offset >= 0
4830 && (offset / GET_MODE_SIZE (GET_MODE (table))
4831 < XVECLEN (table, 1)))
4832 {
4833 offset /= GET_MODE_SIZE (GET_MODE (table));
4834 new = gen_rtx (MINUS, Pmode, XVECEXP (table, 1, offset),
4835 XEXP (table, 0));
4836
4837 if (GET_MODE (table) != Pmode)
4838 new = gen_rtx (TRUNCATE, GET_MODE (table), new);
4839
4840 return new;
4841 }
4842 }
4843 }
4844
4845 return x;
4846 }
4847 }
4848
4849 const_arg0 = 0;
4850 const_arg1 = 0;
4851 const_arg2 = 0;
4852 mode_arg0 = VOIDmode;
4853
4854 /* Try folding our operands.
4855 Then see which ones have constant values known. */
4856
4857 fmt = GET_RTX_FORMAT (code);
4858 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4859 if (fmt[i] == 'e')
4860 {
4861 rtx arg = XEXP (x, i);
4862 rtx folded_arg = arg, const_arg = 0;
4863 enum machine_mode mode_arg = GET_MODE (arg);
4864 rtx cheap_arg, expensive_arg;
4865 rtx replacements[2];
4866 int j;
4867
4868 /* Most arguments are cheap, so handle them specially. */
4869 switch (GET_CODE (arg))
4870 {
4871 case REG:
4872 /* This is the same as calling equiv_constant; it is duplicated
4873 here for speed. */
4874 if (REGNO_QTY_VALID_P (REGNO (arg))
4875 && qty_const[reg_qty[REGNO (arg)]] != 0
4876 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != REG
4877 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != PLUS)
4878 const_arg
4879 = gen_lowpart_if_possible (GET_MODE (arg),
4880 qty_const[reg_qty[REGNO (arg)]]);
4881 break;
4882
4883 case CONST:
4884 case CONST_INT:
4885 case SYMBOL_REF:
4886 case LABEL_REF:
4887 case CONST_DOUBLE:
4888 const_arg = arg;
4889 break;
4890
4891 #ifdef HAVE_cc0
4892 case CC0:
4893 folded_arg = prev_insn_cc0;
4894 mode_arg = prev_insn_cc0_mode;
4895 const_arg = equiv_constant (folded_arg);
4896 break;
4897 #endif
4898
4899 default:
4900 folded_arg = fold_rtx (arg, insn);
4901 const_arg = equiv_constant (folded_arg);
4902 }
4903
4904 /* For the first three operands, see if the operand
4905 is constant or equivalent to a constant. */
4906 switch (i)
4907 {
4908 case 0:
4909 folded_arg0 = folded_arg;
4910 const_arg0 = const_arg;
4911 mode_arg0 = mode_arg;
4912 break;
4913 case 1:
4914 folded_arg1 = folded_arg;
4915 const_arg1 = const_arg;
4916 break;
4917 case 2:
4918 const_arg2 = const_arg;
4919 break;
4920 }
4921
4922 /* Pick the least expensive of the folded argument and an
4923 equivalent constant argument. */
4924 if (const_arg == 0 || const_arg == folded_arg
4925 || COST (const_arg) > COST (folded_arg))
4926 cheap_arg = folded_arg, expensive_arg = const_arg;
4927 else
4928 cheap_arg = const_arg, expensive_arg = folded_arg;
4929
4930 /* Try to replace the operand with the cheapest of the two
4931 possibilities. If it doesn't work and this is either of the first
4932 two operands of a commutative operation, try swapping them.
4933 If THAT fails, try the more expensive, provided it is cheaper
4934 than what is already there. */
4935
4936 if (cheap_arg == XEXP (x, i))
4937 continue;
4938
4939 if (insn == 0 && ! copied)
4940 {
4941 x = copy_rtx (x);
4942 copied = 1;
4943 }
4944
4945 replacements[0] = cheap_arg, replacements[1] = expensive_arg;
4946 for (j = 0;
4947 j < 2 && replacements[j]
4948 && COST (replacements[j]) < COST (XEXP (x, i));
4949 j++)
4950 {
4951 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
4952 break;
4953
4954 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
4955 {
4956 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
4957 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
4958
4959 if (apply_change_group ())
4960 {
4961 /* Swap them back to be invalid so that this loop can
4962 continue and flag them to be swapped back later. */
4963 rtx tem;
4964
4965 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
4966 XEXP (x, 1) = tem;
4967 must_swap = 1;
4968 break;
4969 }
4970 }
4971 }
4972 }
4973
4974 else if (fmt[i] == 'E')
4975 /* Don't try to fold inside of a vector of expressions.
4976 Doing nothing is harmless. */
4977 ;
4978
4979 /* If a commutative operation, place a constant integer as the second
4980 operand unless the first operand is also a constant integer. Otherwise,
4981 place any constant second unless the first operand is also a constant. */
4982
4983 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4984 {
4985 if (must_swap || (const_arg0
4986 && (const_arg1 == 0
4987 || (GET_CODE (const_arg0) == CONST_INT
4988 && GET_CODE (const_arg1) != CONST_INT))))
4989 {
4990 register rtx tem = XEXP (x, 0);
4991
4992 if (insn == 0 && ! copied)
4993 {
4994 x = copy_rtx (x);
4995 copied = 1;
4996 }
4997
4998 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
4999 validate_change (insn, &XEXP (x, 1), tem, 1);
5000 if (apply_change_group ())
5001 {
5002 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
5003 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
5004 }
5005 }
5006 }
5007
5008 /* If X is an arithmetic operation, see if we can simplify it. */
5009
5010 switch (GET_RTX_CLASS (code))
5011 {
5012 case '1':
5013 /* We can't simplify extension ops unless we know the original mode. */
5014 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
5015 && mode_arg0 == VOIDmode)
5016 break;
5017 new = simplify_unary_operation (code, mode,
5018 const_arg0 ? const_arg0 : folded_arg0,
5019 mode_arg0);
5020 break;
5021
5022 case '<':
5023 /* See what items are actually being compared and set FOLDED_ARG[01]
5024 to those values and CODE to the actual comparison code. If any are
5025 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
5026 do anything if both operands are already known to be constant. */
5027
5028 if (const_arg0 == 0 || const_arg1 == 0)
5029 {
5030 struct table_elt *p0, *p1;
5031 rtx true = const_true_rtx, false = const0_rtx;
5032 enum machine_mode mode_arg1;
5033
5034 #ifdef FLOAT_STORE_FLAG_VALUE
5035 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5036 {
5037 true = immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, mode);
5038 false = CONST0_RTX (mode);
5039 }
5040 #endif
5041
5042 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
5043 &mode_arg0, &mode_arg1);
5044 const_arg0 = equiv_constant (folded_arg0);
5045 const_arg1 = equiv_constant (folded_arg1);
5046
5047 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
5048 what kinds of things are being compared, so we can't do
5049 anything with this comparison. */
5050
5051 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
5052 break;
5053
5054 /* If we do not now have two constants being compared, see if we
5055 can nevertheless deduce some things about the comparison. */
5056 if (const_arg0 == 0 || const_arg1 == 0)
5057 {
5058 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or non-explicit
5059 constant? These aren't zero, but we don't know their sign. */
5060 if (const_arg1 == const0_rtx
5061 && (NONZERO_BASE_PLUS_P (folded_arg0)
5062 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
5063 come out as 0. */
5064 || GET_CODE (folded_arg0) == SYMBOL_REF
5065 #endif
5066 || GET_CODE (folded_arg0) == LABEL_REF
5067 || GET_CODE (folded_arg0) == CONST))
5068 {
5069 if (code == EQ)
5070 return false;
5071 else if (code == NE)
5072 return true;
5073 }
5074
5075 /* See if the two operands are the same. We don't do this
5076 for IEEE floating-point since we can't assume x == x
5077 since x might be a NaN. */
5078
5079 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5080 || GET_MODE_CLASS (mode_arg0) != MODE_FLOAT)
5081 && (folded_arg0 == folded_arg1
5082 || (GET_CODE (folded_arg0) == REG
5083 && GET_CODE (folded_arg1) == REG
5084 && (reg_qty[REGNO (folded_arg0)]
5085 == reg_qty[REGNO (folded_arg1)]))
5086 || ((p0 = lookup (folded_arg0,
5087 (safe_hash (folded_arg0, mode_arg0)
5088 % NBUCKETS), mode_arg0))
5089 && (p1 = lookup (folded_arg1,
5090 (safe_hash (folded_arg1, mode_arg0)
5091 % NBUCKETS), mode_arg0))
5092 && p0->first_same_value == p1->first_same_value)))
5093 return ((code == EQ || code == LE || code == GE
5094 || code == LEU || code == GEU)
5095 ? true : false);
5096
5097 /* If FOLDED_ARG0 is a register, see if the comparison we are
5098 doing now is either the same as we did before or the reverse
5099 (we only check the reverse if not floating-point). */
5100 else if (GET_CODE (folded_arg0) == REG)
5101 {
5102 int qty = reg_qty[REGNO (folded_arg0)];
5103
5104 if (REGNO_QTY_VALID_P (REGNO (folded_arg0))
5105 && (comparison_dominates_p (qty_comparison_code[qty], code)
5106 || (comparison_dominates_p (qty_comparison_code[qty],
5107 reverse_condition (code))
5108 && GET_MODE_CLASS (mode_arg0) == MODE_INT))
5109 && (rtx_equal_p (qty_comparison_const[qty], folded_arg1)
5110 || (const_arg1
5111 && rtx_equal_p (qty_comparison_const[qty],
5112 const_arg1))
5113 || (GET_CODE (folded_arg1) == REG
5114 && (reg_qty[REGNO (folded_arg1)]
5115 == qty_comparison_qty[qty]))))
5116 return (comparison_dominates_p (qty_comparison_code[qty],
5117 code)
5118 ? true : false);
5119 }
5120 }
5121 }
5122
5123 /* If we are comparing against zero, see if the first operand is
5124 equivalent to an IOR with a constant. If so, we may be able to
5125 determine the result of this comparison. */
5126
5127 if (const_arg1 == const0_rtx)
5128 {
5129 rtx y = lookup_as_function (folded_arg0, IOR);
5130 rtx inner_const;
5131
5132 if (y != 0
5133 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
5134 && GET_CODE (inner_const) == CONST_INT
5135 && INTVAL (inner_const) != 0)
5136 {
5137 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
5138 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
5139 && (INTVAL (inner_const)
5140 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
5141 rtx true = const_true_rtx, false = const0_rtx;
5142
5143 #ifdef FLOAT_STORE_FLAG_VALUE
5144 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5145 {
5146 true = immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, mode);
5147 false = CONST0_RTX (mode);
5148 }
5149 #endif
5150
5151 switch (code)
5152 {
5153 case EQ:
5154 return false;
5155 case NE:
5156 return true;
5157 case LT: case LE:
5158 if (has_sign)
5159 return true;
5160 break;
5161 case GT: case GE:
5162 if (has_sign)
5163 return false;
5164 break;
5165 }
5166 }
5167 }
5168
5169 new = simplify_relational_operation (code, mode_arg0,
5170 const_arg0 ? const_arg0 : folded_arg0,
5171 const_arg1 ? const_arg1 : folded_arg1);
5172 #ifdef FLOAT_STORE_FLAG_VALUE
5173 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
5174 new = ((new == const0_rtx) ? CONST0_RTX (mode)
5175 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, mode));
5176 #endif
5177 break;
5178
5179 case '2':
5180 case 'c':
5181 switch (code)
5182 {
5183 case PLUS:
5184 /* If the second operand is a LABEL_REF, see if the first is a MINUS
5185 with that LABEL_REF as its second operand. If so, the result is
5186 the first operand of that MINUS. This handles switches with an
5187 ADDR_DIFF_VEC table. */
5188 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
5189 {
5190 rtx y = lookup_as_function (folded_arg0, MINUS);
5191
5192 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5193 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
5194 return XEXP (y, 0);
5195 }
5196 goto from_plus;
5197
5198 case MINUS:
5199 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
5200 If so, produce (PLUS Z C2-C). */
5201 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
5202 {
5203 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
5204 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
5205 return fold_rtx (plus_constant (y, -INTVAL (const_arg1)),
5206 NULL_RTX);
5207 }
5208
5209 /* ... fall through ... */
5210
5211 from_plus:
5212 case SMIN: case SMAX: case UMIN: case UMAX:
5213 case IOR: case AND: case XOR:
5214 case MULT: case DIV: case UDIV:
5215 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
5216 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
5217 is known to be of similar form, we may be able to replace the
5218 operation with a combined operation. This may eliminate the
5219 intermediate operation if every use is simplified in this way.
5220 Note that the similar optimization done by combine.c only works
5221 if the intermediate operation's result has only one reference. */
5222
5223 if (GET_CODE (folded_arg0) == REG
5224 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
5225 {
5226 int is_shift
5227 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
5228 rtx y = lookup_as_function (folded_arg0, code);
5229 rtx inner_const;
5230 enum rtx_code associate_code;
5231 rtx new_const;
5232
5233 if (y == 0
5234 || 0 == (inner_const
5235 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
5236 || GET_CODE (inner_const) != CONST_INT
5237 /* If we have compiled a statement like
5238 "if (x == (x & mask1))", and now are looking at
5239 "x & mask2", we will have a case where the first operand
5240 of Y is the same as our first operand. Unless we detect
5241 this case, an infinite loop will result. */
5242 || XEXP (y, 0) == folded_arg0)
5243 break;
5244
5245 /* Don't associate these operations if they are a PLUS with the
5246 same constant and it is a power of two. These might be doable
5247 with a pre- or post-increment. Similarly for two subtracts of
5248 identical powers of two with post decrement. */
5249
5250 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
5251 && (0
5252 #if defined(HAVE_PRE_INCREMENT) || defined(HAVE_POST_INCREMENT)
5253 || exact_log2 (INTVAL (const_arg1)) >= 0
5254 #endif
5255 #if defined(HAVE_PRE_DECREMENT) || defined(HAVE_POST_DECREMENT)
5256 || exact_log2 (- INTVAL (const_arg1)) >= 0
5257 #endif
5258 ))
5259 break;
5260
5261 /* Compute the code used to compose the constants. For example,
5262 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
5263
5264 associate_code
5265 = (code == MULT || code == DIV || code == UDIV ? MULT
5266 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
5267
5268 new_const = simplify_binary_operation (associate_code, mode,
5269 const_arg1, inner_const);
5270
5271 if (new_const == 0)
5272 break;
5273
5274 /* If we are associating shift operations, don't let this
5275 produce a shift of larger than the object. This could
5276 occur when we following a sign-extend by a right shift on
5277 a machine that does a sign-extend as a pair of shifts. */
5278
5279 if (is_shift && GET_CODE (new_const) == CONST_INT
5280 && INTVAL (new_const) > GET_MODE_BITSIZE (mode))
5281 break;
5282
5283 y = copy_rtx (XEXP (y, 0));
5284
5285 /* If Y contains our first operand (the most common way this
5286 can happen is if Y is a MEM), we would do into an infinite
5287 loop if we tried to fold it. So don't in that case. */
5288
5289 if (! reg_mentioned_p (folded_arg0, y))
5290 y = fold_rtx (y, insn);
5291
5292 return cse_gen_binary (code, mode, y, new_const);
5293 }
5294 }
5295
5296 new = simplify_binary_operation (code, mode,
5297 const_arg0 ? const_arg0 : folded_arg0,
5298 const_arg1 ? const_arg1 : folded_arg1);
5299 break;
5300
5301 case 'o':
5302 /* (lo_sum (high X) X) is simply X. */
5303 if (code == LO_SUM && const_arg0 != 0
5304 && GET_CODE (const_arg0) == HIGH
5305 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
5306 return const_arg1;
5307 break;
5308
5309 case '3':
5310 case 'b':
5311 new = simplify_ternary_operation (code, mode, mode_arg0,
5312 const_arg0 ? const_arg0 : folded_arg0,
5313 const_arg1 ? const_arg1 : folded_arg1,
5314 const_arg2 ? const_arg2 : XEXP (x, 2));
5315 break;
5316 }
5317
5318 return new ? new : x;
5319 }
5320 \f
5321 /* Return a constant value currently equivalent to X.
5322 Return 0 if we don't know one. */
5323
5324 static rtx
5325 equiv_constant (x)
5326 rtx x;
5327 {
5328 if (GET_CODE (x) == REG
5329 && REGNO_QTY_VALID_P (REGNO (x))
5330 && qty_const[reg_qty[REGNO (x)]])
5331 x = gen_lowpart_if_possible (GET_MODE (x), qty_const[reg_qty[REGNO (x)]]);
5332
5333 if (x != 0 && CONSTANT_P (x))
5334 return x;
5335
5336 /* If X is a MEM, try to fold it outside the context of any insn to see if
5337 it might be equivalent to a constant. That handles the case where it
5338 is a constant-pool reference. Then try to look it up in the hash table
5339 in case it is something whose value we have seen before. */
5340
5341 if (GET_CODE (x) == MEM)
5342 {
5343 struct table_elt *elt;
5344
5345 x = fold_rtx (x, NULL_RTX);
5346 if (CONSTANT_P (x))
5347 return x;
5348
5349 elt = lookup (x, safe_hash (x, GET_MODE (x)) % NBUCKETS, GET_MODE (x));
5350 if (elt == 0)
5351 return 0;
5352
5353 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
5354 if (elt->is_const && CONSTANT_P (elt->exp))
5355 return elt->exp;
5356 }
5357
5358 return 0;
5359 }
5360 \f
5361 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
5362 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
5363 least-significant part of X.
5364 MODE specifies how big a part of X to return.
5365
5366 If the requested operation cannot be done, 0 is returned.
5367
5368 This is similar to gen_lowpart in emit-rtl.c. */
5369
5370 rtx
5371 gen_lowpart_if_possible (mode, x)
5372 enum machine_mode mode;
5373 register rtx x;
5374 {
5375 rtx result = gen_lowpart_common (mode, x);
5376
5377 if (result)
5378 return result;
5379 else if (GET_CODE (x) == MEM)
5380 {
5381 /* This is the only other case we handle. */
5382 register int offset = 0;
5383 rtx new;
5384
5385 #if WORDS_BIG_ENDIAN
5386 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
5387 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
5388 #endif
5389 #if BYTES_BIG_ENDIAN
5390 /* Adjust the address so that the address-after-the-data
5391 is unchanged. */
5392 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
5393 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
5394 #endif
5395 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
5396 if (! memory_address_p (mode, XEXP (new, 0)))
5397 return 0;
5398 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
5399 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
5400 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
5401 return new;
5402 }
5403 else
5404 return 0;
5405 }
5406 \f
5407 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
5408 branch. It will be zero if not.
5409
5410 In certain cases, this can cause us to add an equivalence. For example,
5411 if we are following the taken case of
5412 if (i == 2)
5413 we can add the fact that `i' and '2' are now equivalent.
5414
5415 In any case, we can record that this comparison was passed. If the same
5416 comparison is seen later, we will know its value. */
5417
5418 static void
5419 record_jump_equiv (insn, taken)
5420 rtx insn;
5421 int taken;
5422 {
5423 int cond_known_true;
5424 rtx op0, op1;
5425 enum machine_mode mode, mode0, mode1;
5426 int reversed_nonequality = 0;
5427 enum rtx_code code;
5428
5429 /* Ensure this is the right kind of insn. */
5430 if (! condjump_p (insn) || simplejump_p (insn))
5431 return;
5432
5433 /* See if this jump condition is known true or false. */
5434 if (taken)
5435 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
5436 else
5437 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
5438
5439 /* Get the type of comparison being done and the operands being compared.
5440 If we had to reverse a non-equality condition, record that fact so we
5441 know that it isn't valid for floating-point. */
5442 code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
5443 op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
5444 op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
5445
5446 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
5447 if (! cond_known_true)
5448 {
5449 reversed_nonequality = (code != EQ && code != NE);
5450 code = reverse_condition (code);
5451 }
5452
5453 /* The mode is the mode of the non-constant. */
5454 mode = mode0;
5455 if (mode1 != VOIDmode)
5456 mode = mode1;
5457
5458 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
5459 }
5460
5461 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
5462 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
5463 Make any useful entries we can with that information. Called from
5464 above function and called recursively. */
5465
5466 static void
5467 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
5468 enum rtx_code code;
5469 enum machine_mode mode;
5470 rtx op0, op1;
5471 int reversed_nonequality;
5472 {
5473 int op0_hash_code, op1_hash_code;
5474 int op0_in_memory, op0_in_struct, op1_in_memory, op1_in_struct;
5475 struct table_elt *op0_elt, *op1_elt;
5476
5477 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
5478 we know that they are also equal in the smaller mode (this is also
5479 true for all smaller modes whether or not there is a SUBREG, but
5480 is not worth testing for with no SUBREG. */
5481
5482 if (code == EQ && GET_CODE (op0) == SUBREG
5483 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
5484 {
5485 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5486 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5487
5488 record_jump_cond (code, mode, SUBREG_REG (op0),
5489 tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
5490 reversed_nonequality);
5491 }
5492
5493 if (code == EQ && GET_CODE (op1) == SUBREG
5494 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1))))
5495 {
5496 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5497 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5498
5499 record_jump_cond (code, mode, SUBREG_REG (op1),
5500 tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
5501 reversed_nonequality);
5502 }
5503
5504 /* Similarly, if this is an NE comparison, and either is a SUBREG
5505 making a smaller mode, we know the whole thing is also NE. */
5506
5507 if (code == NE && GET_CODE (op0) == SUBREG
5508 && subreg_lowpart_p (op0)
5509 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
5510 {
5511 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5512 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5513
5514 record_jump_cond (code, mode, SUBREG_REG (op0),
5515 tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
5516 reversed_nonequality);
5517 }
5518
5519 if (code == NE && GET_CODE (op1) == SUBREG
5520 && subreg_lowpart_p (op1)
5521 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1))))
5522 {
5523 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5524 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5525
5526 record_jump_cond (code, mode, SUBREG_REG (op1),
5527 tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
5528 reversed_nonequality);
5529 }
5530
5531 /* Hash both operands. */
5532
5533 do_not_record = 0;
5534 hash_arg_in_memory = 0;
5535 hash_arg_in_struct = 0;
5536 op0_hash_code = HASH (op0, mode);
5537 op0_in_memory = hash_arg_in_memory;
5538 op0_in_struct = hash_arg_in_struct;
5539
5540 if (do_not_record)
5541 return;
5542
5543 do_not_record = 0;
5544 hash_arg_in_memory = 0;
5545 hash_arg_in_struct = 0;
5546 op1_hash_code = HASH (op1, mode);
5547 op1_in_memory = hash_arg_in_memory;
5548 op1_in_struct = hash_arg_in_struct;
5549
5550 if (do_not_record)
5551 return;
5552
5553 /* Look up both operands. */
5554 op0_elt = lookup (op0, op0_hash_code, mode);
5555 op1_elt = lookup (op1, op1_hash_code, mode);
5556
5557 /* If we aren't setting two things equal all we can do is save this
5558 comparison. Similarly if this is floating-point. In the latter
5559 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
5560 If we record the equality, we might inadvertently delete code
5561 whose intent was to change -0 to +0. */
5562
5563 if (code != EQ || GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
5564 {
5565 /* If we reversed a floating-point comparison, if OP0 is not a
5566 register, or if OP1 is neither a register or constant, we can't
5567 do anything. */
5568
5569 if (GET_CODE (op1) != REG)
5570 op1 = equiv_constant (op1);
5571
5572 if ((reversed_nonequality && GET_MODE_CLASS (mode) != MODE_INT)
5573 || GET_CODE (op0) != REG || op1 == 0)
5574 return;
5575
5576 /* Put OP0 in the hash table if it isn't already. This gives it a
5577 new quantity number. */
5578 if (op0_elt == 0)
5579 {
5580 if (insert_regs (op0, NULL_PTR, 0))
5581 {
5582 rehash_using_reg (op0);
5583 op0_hash_code = HASH (op0, mode);
5584 }
5585
5586 op0_elt = insert (op0, NULL_PTR, op0_hash_code, mode);
5587 op0_elt->in_memory = op0_in_memory;
5588 op0_elt->in_struct = op0_in_struct;
5589 }
5590
5591 qty_comparison_code[reg_qty[REGNO (op0)]] = code;
5592 if (GET_CODE (op1) == REG)
5593 {
5594 /* Put OP1 in the hash table so it gets a new quantity number. */
5595 if (op1_elt == 0)
5596 {
5597 if (insert_regs (op1, NULL_PTR, 0))
5598 {
5599 rehash_using_reg (op1);
5600 op1_hash_code = HASH (op1, mode);
5601 }
5602
5603 op1_elt = insert (op1, NULL_PTR, op1_hash_code, mode);
5604 op1_elt->in_memory = op1_in_memory;
5605 op1_elt->in_struct = op1_in_struct;
5606 }
5607
5608 qty_comparison_qty[reg_qty[REGNO (op0)]] = reg_qty[REGNO (op1)];
5609 qty_comparison_const[reg_qty[REGNO (op0)]] = 0;
5610 }
5611 else
5612 {
5613 qty_comparison_qty[reg_qty[REGNO (op0)]] = -1;
5614 qty_comparison_const[reg_qty[REGNO (op0)]] = op1;
5615 }
5616
5617 return;
5618 }
5619
5620 /* If both are equivalent, merge the two classes. Save this class for
5621 `cse_set_around_loop'. */
5622 if (op0_elt && op1_elt)
5623 {
5624 merge_equiv_classes (op0_elt, op1_elt);
5625 last_jump_equiv_class = op0_elt;
5626 }
5627
5628 /* For whichever side doesn't have an equivalence, make one. */
5629 if (op0_elt == 0)
5630 {
5631 if (insert_regs (op0, op1_elt, 0))
5632 {
5633 rehash_using_reg (op0);
5634 op0_hash_code = HASH (op0, mode);
5635 }
5636
5637 op0_elt = insert (op0, op1_elt, op0_hash_code, mode);
5638 op0_elt->in_memory = op0_in_memory;
5639 op0_elt->in_struct = op0_in_struct;
5640 last_jump_equiv_class = op0_elt;
5641 }
5642
5643 if (op1_elt == 0)
5644 {
5645 if (insert_regs (op1, op0_elt, 0))
5646 {
5647 rehash_using_reg (op1);
5648 op1_hash_code = HASH (op1, mode);
5649 }
5650
5651 op1_elt = insert (op1, op0_elt, op1_hash_code, mode);
5652 op1_elt->in_memory = op1_in_memory;
5653 op1_elt->in_struct = op1_in_struct;
5654 last_jump_equiv_class = op1_elt;
5655 }
5656 }
5657 \f
5658 /* CSE processing for one instruction.
5659 First simplify sources and addresses of all assignments
5660 in the instruction, using previously-computed equivalents values.
5661 Then install the new sources and destinations in the table
5662 of available values.
5663
5664 If IN_LIBCALL_BLOCK is nonzero, don't record any equivalence made in
5665 the insn. */
5666
5667 /* Data on one SET contained in the instruction. */
5668
5669 struct set
5670 {
5671 /* The SET rtx itself. */
5672 rtx rtl;
5673 /* The SET_SRC of the rtx (the original value, if it is changing). */
5674 rtx src;
5675 /* The hash-table element for the SET_SRC of the SET. */
5676 struct table_elt *src_elt;
5677 /* Hash code for the SET_SRC. */
5678 int src_hash_code;
5679 /* Hash code for the SET_DEST. */
5680 int dest_hash_code;
5681 /* The SET_DEST, with SUBREG, etc., stripped. */
5682 rtx inner_dest;
5683 /* Place where the pointer to the INNER_DEST was found. */
5684 rtx *inner_dest_loc;
5685 /* Nonzero if the SET_SRC is in memory. */
5686 char src_in_memory;
5687 /* Nonzero if the SET_SRC is in a structure. */
5688 char src_in_struct;
5689 /* Nonzero if the SET_SRC contains something
5690 whose value cannot be predicted and understood. */
5691 char src_volatile;
5692 /* Original machine mode, in case it becomes a CONST_INT. */
5693 enum machine_mode mode;
5694 /* A constant equivalent for SET_SRC, if any. */
5695 rtx src_const;
5696 /* Hash code of constant equivalent for SET_SRC. */
5697 int src_const_hash_code;
5698 /* Table entry for constant equivalent for SET_SRC, if any. */
5699 struct table_elt *src_const_elt;
5700 };
5701
5702 static void
5703 cse_insn (insn, in_libcall_block)
5704 rtx insn;
5705 int in_libcall_block;
5706 {
5707 register rtx x = PATTERN (insn);
5708 rtx tem;
5709 register int i;
5710 register int n_sets = 0;
5711
5712 /* Records what this insn does to set CC0. */
5713 rtx this_insn_cc0 = 0;
5714 enum machine_mode this_insn_cc0_mode;
5715 struct write_data writes_memory;
5716 static struct write_data init = {0, 0, 0, 0};
5717
5718 rtx src_eqv = 0;
5719 struct table_elt *src_eqv_elt = 0;
5720 int src_eqv_volatile;
5721 int src_eqv_in_memory;
5722 int src_eqv_in_struct;
5723 int src_eqv_hash_code;
5724
5725 struct set *sets;
5726
5727 this_insn = insn;
5728 writes_memory = init;
5729
5730 /* Find all the SETs and CLOBBERs in this instruction.
5731 Record all the SETs in the array `set' and count them.
5732 Also determine whether there is a CLOBBER that invalidates
5733 all memory references, or all references at varying addresses. */
5734
5735 if (GET_CODE (x) == SET)
5736 {
5737 sets = (struct set *) alloca (sizeof (struct set));
5738 sets[0].rtl = x;
5739
5740 /* Ignore SETs that are unconditional jumps.
5741 They never need cse processing, so this does not hurt.
5742 The reason is not efficiency but rather
5743 so that we can test at the end for instructions
5744 that have been simplified to unconditional jumps
5745 and not be misled by unchanged instructions
5746 that were unconditional jumps to begin with. */
5747 if (SET_DEST (x) == pc_rtx
5748 && GET_CODE (SET_SRC (x)) == LABEL_REF)
5749 ;
5750
5751 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
5752 The hard function value register is used only once, to copy to
5753 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
5754 Ensure we invalidate the destination register. On the 80386 no
5755 other code would invalidate it since it is a fixed_reg.
5756 We need not check the return of apply_change_group; see canon_reg. */
5757
5758 else if (GET_CODE (SET_SRC (x)) == CALL)
5759 {
5760 canon_reg (SET_SRC (x), insn);
5761 apply_change_group ();
5762 fold_rtx (SET_SRC (x), insn);
5763 invalidate (SET_DEST (x));
5764 }
5765 else
5766 n_sets = 1;
5767 }
5768 else if (GET_CODE (x) == PARALLEL)
5769 {
5770 register int lim = XVECLEN (x, 0);
5771
5772 sets = (struct set *) alloca (lim * sizeof (struct set));
5773
5774 /* Find all regs explicitly clobbered in this insn,
5775 and ensure they are not replaced with any other regs
5776 elsewhere in this insn.
5777 When a reg that is clobbered is also used for input,
5778 we should presume that that is for a reason,
5779 and we should not substitute some other register
5780 which is not supposed to be clobbered.
5781 Therefore, this loop cannot be merged into the one below
5782 because a CALL may precede a CLOBBER and refer to the
5783 value clobbered. We must not let a canonicalization do
5784 anything in that case. */
5785 for (i = 0; i < lim; i++)
5786 {
5787 register rtx y = XVECEXP (x, 0, i);
5788 if (GET_CODE (y) == CLOBBER
5789 && (GET_CODE (XEXP (y, 0)) == REG
5790 || GET_CODE (XEXP (y, 0)) == SUBREG))
5791 invalidate (XEXP (y, 0));
5792 }
5793
5794 for (i = 0; i < lim; i++)
5795 {
5796 register rtx y = XVECEXP (x, 0, i);
5797 if (GET_CODE (y) == SET)
5798 {
5799 /* As above, we ignore unconditional jumps and call-insns and
5800 ignore the result of apply_change_group. */
5801 if (GET_CODE (SET_SRC (y)) == CALL)
5802 {
5803 canon_reg (SET_SRC (y), insn);
5804 apply_change_group ();
5805 fold_rtx (SET_SRC (y), insn);
5806 invalidate (SET_DEST (y));
5807 }
5808 else if (SET_DEST (y) == pc_rtx
5809 && GET_CODE (SET_SRC (y)) == LABEL_REF)
5810 ;
5811 else
5812 sets[n_sets++].rtl = y;
5813 }
5814 else if (GET_CODE (y) == CLOBBER)
5815 {
5816 /* If we clobber memory, take note of that,
5817 and canon the address.
5818 This does nothing when a register is clobbered
5819 because we have already invalidated the reg. */
5820 if (GET_CODE (XEXP (y, 0)) == MEM)
5821 {
5822 canon_reg (XEXP (y, 0), NULL_RTX);
5823 note_mem_written (XEXP (y, 0), &writes_memory);
5824 }
5825 }
5826 else if (GET_CODE (y) == USE
5827 && ! (GET_CODE (XEXP (y, 0)) == REG
5828 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
5829 canon_reg (y, NULL_RTX);
5830 else if (GET_CODE (y) == CALL)
5831 {
5832 /* The result of apply_change_group can be ignored; see
5833 canon_reg. */
5834 canon_reg (y, insn);
5835 apply_change_group ();
5836 fold_rtx (y, insn);
5837 }
5838 }
5839 }
5840 else if (GET_CODE (x) == CLOBBER)
5841 {
5842 if (GET_CODE (XEXP (x, 0)) == MEM)
5843 {
5844 canon_reg (XEXP (x, 0), NULL_RTX);
5845 note_mem_written (XEXP (x, 0), &writes_memory);
5846 }
5847 }
5848
5849 /* Canonicalize a USE of a pseudo register or memory location. */
5850 else if (GET_CODE (x) == USE
5851 && ! (GET_CODE (XEXP (x, 0)) == REG
5852 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
5853 canon_reg (XEXP (x, 0), NULL_RTX);
5854 else if (GET_CODE (x) == CALL)
5855 {
5856 /* The result of apply_change_group can be ignored; see canon_reg. */
5857 canon_reg (x, insn);
5858 apply_change_group ();
5859 fold_rtx (x, insn);
5860 }
5861
5862 if (n_sets == 1 && REG_NOTES (insn) != 0)
5863 {
5864 /* Store the equivalent value in SRC_EQV, if different. */
5865 rtx tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5866
5867 if (tem && ! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl)))
5868 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
5869 }
5870
5871 /* Canonicalize sources and addresses of destinations.
5872 We do this in a separate pass to avoid problems when a MATCH_DUP is
5873 present in the insn pattern. In that case, we want to ensure that
5874 we don't break the duplicate nature of the pattern. So we will replace
5875 both operands at the same time. Otherwise, we would fail to find an
5876 equivalent substitution in the loop calling validate_change below.
5877
5878 We used to suppress canonicalization of DEST if it appears in SRC,
5879 but we don't do this any more. */
5880
5881 for (i = 0; i < n_sets; i++)
5882 {
5883 rtx dest = SET_DEST (sets[i].rtl);
5884 rtx src = SET_SRC (sets[i].rtl);
5885 rtx new = canon_reg (src, insn);
5886
5887 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
5888 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
5889 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
5890 || insn_n_dups[recog_memoized (insn)] > 0)
5891 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
5892 else
5893 SET_SRC (sets[i].rtl) = new;
5894
5895 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
5896 {
5897 validate_change (insn, &XEXP (dest, 1),
5898 canon_reg (XEXP (dest, 1), insn), 1);
5899 validate_change (insn, &XEXP (dest, 2),
5900 canon_reg (XEXP (dest, 2), insn), 1);
5901 }
5902
5903 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
5904 || GET_CODE (dest) == ZERO_EXTRACT
5905 || GET_CODE (dest) == SIGN_EXTRACT)
5906 dest = XEXP (dest, 0);
5907
5908 if (GET_CODE (dest) == MEM)
5909 canon_reg (dest, insn);
5910 }
5911
5912 /* Now that we have done all the replacements, we can apply the change
5913 group and see if they all work. Note that this will cause some
5914 canonicalizations that would have worked individually not to be applied
5915 because some other canonicalization didn't work, but this should not
5916 occur often.
5917
5918 The result of apply_change_group can be ignored; see canon_reg. */
5919
5920 apply_change_group ();
5921
5922 /* Set sets[i].src_elt to the class each source belongs to.
5923 Detect assignments from or to volatile things
5924 and set set[i] to zero so they will be ignored
5925 in the rest of this function.
5926
5927 Nothing in this loop changes the hash table or the register chains. */
5928
5929 for (i = 0; i < n_sets; i++)
5930 {
5931 register rtx src, dest;
5932 register rtx src_folded;
5933 register struct table_elt *elt = 0, *p;
5934 enum machine_mode mode;
5935 rtx src_eqv_here;
5936 rtx src_const = 0;
5937 rtx src_related = 0;
5938 struct table_elt *src_const_elt = 0;
5939 int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
5940 int src_related_cost = 10000, src_elt_cost = 10000;
5941 /* Set non-zero if we need to call force_const_mem on with the
5942 contents of src_folded before using it. */
5943 int src_folded_force_flag = 0;
5944
5945 dest = SET_DEST (sets[i].rtl);
5946 src = SET_SRC (sets[i].rtl);
5947
5948 /* If SRC is a constant that has no machine mode,
5949 hash it with the destination's machine mode.
5950 This way we can keep different modes separate. */
5951
5952 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5953 sets[i].mode = mode;
5954
5955 if (src_eqv)
5956 {
5957 enum machine_mode eqvmode = mode;
5958 if (GET_CODE (dest) == STRICT_LOW_PART)
5959 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5960 do_not_record = 0;
5961 hash_arg_in_memory = 0;
5962 hash_arg_in_struct = 0;
5963 src_eqv = fold_rtx (src_eqv, insn);
5964 src_eqv_hash_code = HASH (src_eqv, eqvmode);
5965
5966 /* Find the equivalence class for the equivalent expression. */
5967
5968 if (!do_not_record)
5969 src_eqv_elt = lookup (src_eqv, src_eqv_hash_code, eqvmode);
5970
5971 src_eqv_volatile = do_not_record;
5972 src_eqv_in_memory = hash_arg_in_memory;
5973 src_eqv_in_struct = hash_arg_in_struct;
5974 }
5975
5976 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
5977 value of the INNER register, not the destination. So it is not
5978 a legal substitution for the source. But save it for later. */
5979 if (GET_CODE (dest) == STRICT_LOW_PART)
5980 src_eqv_here = 0;
5981 else
5982 src_eqv_here = src_eqv;
5983
5984 /* Simplify and foldable subexpressions in SRC. Then get the fully-
5985 simplified result, which may not necessarily be valid. */
5986 src_folded = fold_rtx (src, insn);
5987
5988 /* If storing a constant in a bitfield, pre-truncate the constant
5989 so we will be able to record it later. */
5990 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5991 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5992 {
5993 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5994
5995 if (GET_CODE (src) == CONST_INT
5996 && GET_CODE (width) == CONST_INT
5997 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5998 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5999 src_folded
6000 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
6001 << INTVAL (width)) - 1));
6002 }
6003
6004 /* Compute SRC's hash code, and also notice if it
6005 should not be recorded at all. In that case,
6006 prevent any further processing of this assignment. */
6007 do_not_record = 0;
6008 hash_arg_in_memory = 0;
6009 hash_arg_in_struct = 0;
6010
6011 sets[i].src = src;
6012 sets[i].src_hash_code = HASH (src, mode);
6013 sets[i].src_volatile = do_not_record;
6014 sets[i].src_in_memory = hash_arg_in_memory;
6015 sets[i].src_in_struct = hash_arg_in_struct;
6016
6017 #if 0
6018 /* It is no longer clear why we used to do this, but it doesn't
6019 appear to still be needed. So let's try without it since this
6020 code hurts cse'ing widened ops. */
6021 /* If source is a perverse subreg (such as QI treated as an SI),
6022 treat it as volatile. It may do the work of an SI in one context
6023 where the extra bits are not being used, but cannot replace an SI
6024 in general. */
6025 if (GET_CODE (src) == SUBREG
6026 && (GET_MODE_SIZE (GET_MODE (src))
6027 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
6028 sets[i].src_volatile = 1;
6029 #endif
6030
6031 /* Locate all possible equivalent forms for SRC. Try to replace
6032 SRC in the insn with each cheaper equivalent.
6033
6034 We have the following types of equivalents: SRC itself, a folded
6035 version, a value given in a REG_EQUAL note, or a value related
6036 to a constant.
6037
6038 Each of these equivalents may be part of an additional class
6039 of equivalents (if more than one is in the table, they must be in
6040 the same class; we check for this).
6041
6042 If the source is volatile, we don't do any table lookups.
6043
6044 We note any constant equivalent for possible later use in a
6045 REG_NOTE. */
6046
6047 if (!sets[i].src_volatile)
6048 elt = lookup (src, sets[i].src_hash_code, mode);
6049
6050 sets[i].src_elt = elt;
6051
6052 if (elt && src_eqv_here && src_eqv_elt)
6053 {
6054 if (elt->first_same_value != src_eqv_elt->first_same_value)
6055 {
6056 /* The REG_EQUAL is indicating that two formerly distinct
6057 classes are now equivalent. So merge them. */
6058 merge_equiv_classes (elt, src_eqv_elt);
6059 src_eqv_hash_code = HASH (src_eqv, elt->mode);
6060 src_eqv_elt = lookup (src_eqv, src_eqv_hash_code, elt->mode);
6061 }
6062
6063 src_eqv_here = 0;
6064 }
6065
6066 else if (src_eqv_elt)
6067 elt = src_eqv_elt;
6068
6069 /* Try to find a constant somewhere and record it in `src_const'.
6070 Record its table element, if any, in `src_const_elt'. Look in
6071 any known equivalences first. (If the constant is not in the
6072 table, also set `sets[i].src_const_hash_code'). */
6073 if (elt)
6074 for (p = elt->first_same_value; p; p = p->next_same_value)
6075 if (p->is_const)
6076 {
6077 src_const = p->exp;
6078 src_const_elt = elt;
6079 break;
6080 }
6081
6082 if (src_const == 0
6083 && (CONSTANT_P (src_folded)
6084 /* Consider (minus (label_ref L1) (label_ref L2)) as
6085 "constant" here so we will record it. This allows us
6086 to fold switch statements when an ADDR_DIFF_VEC is used. */
6087 || (GET_CODE (src_folded) == MINUS
6088 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
6089 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
6090 src_const = src_folded, src_const_elt = elt;
6091 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
6092 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
6093
6094 /* If we don't know if the constant is in the table, get its
6095 hash code and look it up. */
6096 if (src_const && src_const_elt == 0)
6097 {
6098 sets[i].src_const_hash_code = HASH (src_const, mode);
6099 src_const_elt = lookup (src_const, sets[i].src_const_hash_code,
6100 mode);
6101 }
6102
6103 sets[i].src_const = src_const;
6104 sets[i].src_const_elt = src_const_elt;
6105
6106 /* If the constant and our source are both in the table, mark them as
6107 equivalent. Otherwise, if a constant is in the table but the source
6108 isn't, set ELT to it. */
6109 if (src_const_elt && elt
6110 && src_const_elt->first_same_value != elt->first_same_value)
6111 merge_equiv_classes (elt, src_const_elt);
6112 else if (src_const_elt && elt == 0)
6113 elt = src_const_elt;
6114
6115 /* See if there is a register linearly related to a constant
6116 equivalent of SRC. */
6117 if (src_const
6118 && (GET_CODE (src_const) == CONST
6119 || (src_const_elt && src_const_elt->related_value != 0)))
6120 {
6121 src_related = use_related_value (src_const, src_const_elt);
6122 if (src_related)
6123 {
6124 struct table_elt *src_related_elt
6125 = lookup (src_related, HASH (src_related, mode), mode);
6126 if (src_related_elt && elt)
6127 {
6128 if (elt->first_same_value
6129 != src_related_elt->first_same_value)
6130 /* This can occur when we previously saw a CONST
6131 involving a SYMBOL_REF and then see the SYMBOL_REF
6132 twice. Merge the involved classes. */
6133 merge_equiv_classes (elt, src_related_elt);
6134
6135 src_related = 0;
6136 src_related_elt = 0;
6137 }
6138 else if (src_related_elt && elt == 0)
6139 elt = src_related_elt;
6140 }
6141 }
6142
6143 /* See if we have a CONST_INT that is already in a register in a
6144 wider mode. */
6145
6146 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
6147 && GET_MODE_CLASS (mode) == MODE_INT
6148 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
6149 {
6150 enum machine_mode wider_mode;
6151
6152 for (wider_mode = GET_MODE_WIDER_MODE (mode);
6153 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
6154 && src_related == 0;
6155 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
6156 {
6157 struct table_elt *const_elt
6158 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
6159
6160 if (const_elt == 0)
6161 continue;
6162
6163 for (const_elt = const_elt->first_same_value;
6164 const_elt; const_elt = const_elt->next_same_value)
6165 if (GET_CODE (const_elt->exp) == REG)
6166 {
6167 src_related = gen_lowpart_if_possible (mode,
6168 const_elt->exp);
6169 break;
6170 }
6171 }
6172 }
6173
6174 /* Another possibility is that we have an AND with a constant in
6175 a mode narrower than a word. If so, it might have been generated
6176 as part of an "if" which would narrow the AND. If we already
6177 have done the AND in a wider mode, we can use a SUBREG of that
6178 value. */
6179
6180 if (flag_expensive_optimizations && ! src_related
6181 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
6182 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6183 {
6184 enum machine_mode tmode;
6185 rtx new_and = gen_rtx (AND, VOIDmode, NULL_RTX, XEXP (src, 1));
6186
6187 for (tmode = GET_MODE_WIDER_MODE (mode);
6188 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6189 tmode = GET_MODE_WIDER_MODE (tmode))
6190 {
6191 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
6192 struct table_elt *larger_elt;
6193
6194 if (inner)
6195 {
6196 PUT_MODE (new_and, tmode);
6197 XEXP (new_and, 0) = inner;
6198 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
6199 if (larger_elt == 0)
6200 continue;
6201
6202 for (larger_elt = larger_elt->first_same_value;
6203 larger_elt; larger_elt = larger_elt->next_same_value)
6204 if (GET_CODE (larger_elt->exp) == REG)
6205 {
6206 src_related
6207 = gen_lowpart_if_possible (mode, larger_elt->exp);
6208 break;
6209 }
6210
6211 if (src_related)
6212 break;
6213 }
6214 }
6215 }
6216
6217 if (src == src_folded)
6218 src_folded = 0;
6219
6220 /* At this point, ELT, if non-zero, points to a class of expressions
6221 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
6222 and SRC_RELATED, if non-zero, each contain additional equivalent
6223 expressions. Prune these latter expressions by deleting expressions
6224 already in the equivalence class.
6225
6226 Check for an equivalent identical to the destination. If found,
6227 this is the preferred equivalent since it will likely lead to
6228 elimination of the insn. Indicate this by placing it in
6229 `src_related'. */
6230
6231 if (elt) elt = elt->first_same_value;
6232 for (p = elt; p; p = p->next_same_value)
6233 {
6234 enum rtx_code code = GET_CODE (p->exp);
6235
6236 /* If the expression is not valid, ignore it. Then we do not
6237 have to check for validity below. In most cases, we can use
6238 `rtx_equal_p', since canonicalization has already been done. */
6239 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
6240 continue;
6241
6242 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
6243 src = 0;
6244 else if (src_folded && GET_CODE (src_folded) == code
6245 && rtx_equal_p (src_folded, p->exp))
6246 src_folded = 0;
6247 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
6248 && rtx_equal_p (src_eqv_here, p->exp))
6249 src_eqv_here = 0;
6250 else if (src_related && GET_CODE (src_related) == code
6251 && rtx_equal_p (src_related, p->exp))
6252 src_related = 0;
6253
6254 /* This is the same as the destination of the insns, we want
6255 to prefer it. Copy it to src_related. The code below will
6256 then give it a negative cost. */
6257 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
6258 src_related = dest;
6259
6260 }
6261
6262 /* Find the cheapest valid equivalent, trying all the available
6263 possibilities. Prefer items not in the hash table to ones
6264 that are when they are equal cost. Note that we can never
6265 worsen an insn as the current contents will also succeed.
6266 If we find an equivalent identical to the destination, use it as best,
6267 since this insn will probably be eliminated in that case. */
6268 if (src)
6269 {
6270 if (rtx_equal_p (src, dest))
6271 src_cost = -1;
6272 else
6273 src_cost = COST (src);
6274 }
6275
6276 if (src_eqv_here)
6277 {
6278 if (rtx_equal_p (src_eqv_here, dest))
6279 src_eqv_cost = -1;
6280 else
6281 src_eqv_cost = COST (src_eqv_here);
6282 }
6283
6284 if (src_folded)
6285 {
6286 if (rtx_equal_p (src_folded, dest))
6287 src_folded_cost = -1;
6288 else
6289 src_folded_cost = COST (src_folded);
6290 }
6291
6292 if (src_related)
6293 {
6294 if (rtx_equal_p (src_related, dest))
6295 src_related_cost = -1;
6296 else
6297 src_related_cost = COST (src_related);
6298 }
6299
6300 /* If this was an indirect jump insn, a known label will really be
6301 cheaper even though it looks more expensive. */
6302 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
6303 src_folded = src_const, src_folded_cost = -1;
6304
6305 /* Terminate loop when replacement made. This must terminate since
6306 the current contents will be tested and will always be valid. */
6307 while (1)
6308 {
6309 rtx trial;
6310
6311 /* Skip invalid entries. */
6312 while (elt && GET_CODE (elt->exp) != REG
6313 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6314 elt = elt->next_same_value;
6315
6316 if (elt) src_elt_cost = elt->cost;
6317
6318 /* Find cheapest and skip it for the next time. For items
6319 of equal cost, use this order:
6320 src_folded, src, src_eqv, src_related and hash table entry. */
6321 if (src_folded_cost <= src_cost
6322 && src_folded_cost <= src_eqv_cost
6323 && src_folded_cost <= src_related_cost
6324 && src_folded_cost <= src_elt_cost)
6325 {
6326 trial = src_folded, src_folded_cost = 10000;
6327 if (src_folded_force_flag)
6328 trial = force_const_mem (mode, trial);
6329 }
6330 else if (src_cost <= src_eqv_cost
6331 && src_cost <= src_related_cost
6332 && src_cost <= src_elt_cost)
6333 trial = src, src_cost = 10000;
6334 else if (src_eqv_cost <= src_related_cost
6335 && src_eqv_cost <= src_elt_cost)
6336 trial = src_eqv_here, src_eqv_cost = 10000;
6337 else if (src_related_cost <= src_elt_cost)
6338 trial = src_related, src_related_cost = 10000;
6339 else
6340 {
6341 trial = copy_rtx (elt->exp);
6342 elt = elt->next_same_value;
6343 src_elt_cost = 10000;
6344 }
6345
6346 /* We don't normally have an insn matching (set (pc) (pc)), so
6347 check for this separately here. We will delete such an
6348 insn below.
6349
6350 Tablejump insns contain a USE of the table, so simply replacing
6351 the operand with the constant won't match. This is simply an
6352 unconditional branch, however, and is therefore valid. Just
6353 insert the substitution here and we will delete and re-emit
6354 the insn later. */
6355
6356 if (n_sets == 1 && dest == pc_rtx
6357 && (trial == pc_rtx
6358 || (GET_CODE (trial) == LABEL_REF
6359 && ! condjump_p (insn))))
6360 {
6361 /* If TRIAL is a label in front of a jump table, we are
6362 really falling through the switch (this is how casesi
6363 insns work), so we must branch around the table. */
6364 if (GET_CODE (trial) == CODE_LABEL
6365 && NEXT_INSN (trial) != 0
6366 && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
6367 && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
6368 || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
6369
6370 trial = gen_rtx (LABEL_REF, Pmode, get_label_after (trial));
6371
6372 SET_SRC (sets[i].rtl) = trial;
6373 break;
6374 }
6375
6376 /* Look for a substitution that makes a valid insn. */
6377 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
6378 {
6379 /* The result of apply_change_group can be ignored; see
6380 canon_reg. */
6381
6382 validate_change (insn, &SET_SRC (sets[i].rtl),
6383 canon_reg (SET_SRC (sets[i].rtl), insn),
6384 1);
6385 apply_change_group ();
6386 break;
6387 }
6388
6389 /* If we previously found constant pool entries for
6390 constants and this is a constant, try making a
6391 pool entry. Put it in src_folded unless we already have done
6392 this since that is where it likely came from. */
6393
6394 else if (constant_pool_entries_cost
6395 && CONSTANT_P (trial)
6396 && (src_folded == 0 || GET_CODE (src_folded) != MEM)
6397 && GET_MODE_CLASS (mode) != MODE_CC)
6398 {
6399 src_folded_force_flag = 1;
6400 src_folded = trial;
6401 src_folded_cost = constant_pool_entries_cost;
6402 }
6403 }
6404
6405 src = SET_SRC (sets[i].rtl);
6406
6407 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
6408 However, there is an important exception: If both are registers
6409 that are not the head of their equivalence class, replace SET_SRC
6410 with the head of the class. If we do not do this, we will have
6411 both registers live over a portion of the basic block. This way,
6412 their lifetimes will likely abut instead of overlapping. */
6413 if (GET_CODE (dest) == REG
6414 && REGNO_QTY_VALID_P (REGNO (dest))
6415 && qty_mode[reg_qty[REGNO (dest)]] == GET_MODE (dest)
6416 && qty_first_reg[reg_qty[REGNO (dest)]] != REGNO (dest)
6417 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
6418 /* Don't do this if the original insn had a hard reg as
6419 SET_SRC. */
6420 && (GET_CODE (sets[i].src) != REG
6421 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER))
6422 /* We can't call canon_reg here because it won't do anything if
6423 SRC is a hard register. */
6424 {
6425 int first = qty_first_reg[reg_qty[REGNO (src)]];
6426
6427 src = SET_SRC (sets[i].rtl)
6428 = first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
6429 : gen_rtx (REG, GET_MODE (src), first);
6430
6431 /* If we had a constant that is cheaper than what we are now
6432 setting SRC to, use that constant. We ignored it when we
6433 thought we could make this into a no-op. */
6434 if (src_const && COST (src_const) < COST (src)
6435 && validate_change (insn, &SET_SRC (sets[i].rtl), src_const, 0))
6436 src = src_const;
6437 }
6438
6439 /* If we made a change, recompute SRC values. */
6440 if (src != sets[i].src)
6441 {
6442 do_not_record = 0;
6443 hash_arg_in_memory = 0;
6444 hash_arg_in_struct = 0;
6445 sets[i].src = src;
6446 sets[i].src_hash_code = HASH (src, mode);
6447 sets[i].src_volatile = do_not_record;
6448 sets[i].src_in_memory = hash_arg_in_memory;
6449 sets[i].src_in_struct = hash_arg_in_struct;
6450 sets[i].src_elt = lookup (src, sets[i].src_hash_code, mode);
6451 }
6452
6453 /* If this is a single SET, we are setting a register, and we have an
6454 equivalent constant, we want to add a REG_NOTE. We don't want
6455 to write a REG_EQUAL note for a constant pseudo since verifying that
6456 that pseudo hasn't been eliminated is a pain. Such a note also
6457 won't help anything. */
6458 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
6459 && GET_CODE (src_const) != REG)
6460 {
6461 rtx tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6462
6463 /* Record the actual constant value in a REG_EQUAL note, making
6464 a new one if one does not already exist. */
6465 if (tem)
6466 XEXP (tem, 0) = src_const;
6467 else
6468 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUAL,
6469 src_const, REG_NOTES (insn));
6470
6471 /* If storing a constant value in a register that
6472 previously held the constant value 0,
6473 record this fact with a REG_WAS_0 note on this insn.
6474
6475 Note that the *register* is required to have previously held 0,
6476 not just any register in the quantity and we must point to the
6477 insn that set that register to zero.
6478
6479 Rather than track each register individually, we just see if
6480 the last set for this quantity was for this register. */
6481
6482 if (REGNO_QTY_VALID_P (REGNO (dest))
6483 && qty_const[reg_qty[REGNO (dest)]] == const0_rtx)
6484 {
6485 /* See if we previously had a REG_WAS_0 note. */
6486 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6487 rtx const_insn = qty_const_insn[reg_qty[REGNO (dest)]];
6488
6489 if ((tem = single_set (const_insn)) != 0
6490 && rtx_equal_p (SET_DEST (tem), dest))
6491 {
6492 if (note)
6493 XEXP (note, 0) = const_insn;
6494 else
6495 REG_NOTES (insn) = gen_rtx (INSN_LIST, REG_WAS_0,
6496 const_insn, REG_NOTES (insn));
6497 }
6498 }
6499 }
6500
6501 /* Now deal with the destination. */
6502 do_not_record = 0;
6503 sets[i].inner_dest_loc = &SET_DEST (sets[0].rtl);
6504
6505 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
6506 to the MEM or REG within it. */
6507 while (GET_CODE (dest) == SIGN_EXTRACT
6508 || GET_CODE (dest) == ZERO_EXTRACT
6509 || GET_CODE (dest) == SUBREG
6510 || GET_CODE (dest) == STRICT_LOW_PART)
6511 {
6512 sets[i].inner_dest_loc = &XEXP (dest, 0);
6513 dest = XEXP (dest, 0);
6514 }
6515
6516 sets[i].inner_dest = dest;
6517
6518 if (GET_CODE (dest) == MEM)
6519 {
6520 dest = fold_rtx (dest, insn);
6521
6522 /* Decide whether we invalidate everything in memory,
6523 or just things at non-fixed places.
6524 Writing a large aggregate must invalidate everything
6525 because we don't know how long it is. */
6526 note_mem_written (dest, &writes_memory);
6527 }
6528
6529 /* Compute the hash code of the destination now,
6530 before the effects of this instruction are recorded,
6531 since the register values used in the address computation
6532 are those before this instruction. */
6533 sets[i].dest_hash_code = HASH (dest, mode);
6534
6535 /* Don't enter a bit-field in the hash table
6536 because the value in it after the store
6537 may not equal what was stored, due to truncation. */
6538
6539 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6540 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6541 {
6542 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6543
6544 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
6545 && GET_CODE (width) == CONST_INT
6546 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6547 && ! (INTVAL (src_const)
6548 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6549 /* Exception: if the value is constant,
6550 and it won't be truncated, record it. */
6551 ;
6552 else
6553 {
6554 /* This is chosen so that the destination will be invalidated
6555 but no new value will be recorded.
6556 We must invalidate because sometimes constant
6557 values can be recorded for bitfields. */
6558 sets[i].src_elt = 0;
6559 sets[i].src_volatile = 1;
6560 src_eqv = 0;
6561 src_eqv_elt = 0;
6562 }
6563 }
6564
6565 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
6566 the insn. */
6567 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
6568 {
6569 PUT_CODE (insn, NOTE);
6570 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
6571 NOTE_SOURCE_FILE (insn) = 0;
6572 cse_jumps_altered = 1;
6573 /* One less use of the label this insn used to jump to. */
6574 --LABEL_NUSES (JUMP_LABEL (insn));
6575 /* No more processing for this set. */
6576 sets[i].rtl = 0;
6577 }
6578
6579 /* If this SET is now setting PC to a label, we know it used to
6580 be a conditional or computed branch. So we see if we can follow
6581 it. If it was a computed branch, delete it and re-emit. */
6582 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
6583 {
6584 rtx p;
6585
6586 /* If this is not in the format for a simple branch and
6587 we are the only SET in it, re-emit it. */
6588 if (! simplejump_p (insn) && n_sets == 1)
6589 {
6590 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
6591 JUMP_LABEL (new) = XEXP (src, 0);
6592 LABEL_NUSES (XEXP (src, 0))++;
6593 delete_insn (insn);
6594 insn = new;
6595 }
6596
6597 /* Now that we've converted this jump to an unconditional jump,
6598 there is dead code after it. Delete the dead code until we
6599 reach a BARRIER, the end of the function, or a label. Do
6600 not delete NOTEs except for NOTE_INSN_DELETED since later
6601 phases assume these notes are retained. */
6602
6603 p = insn;
6604
6605 while (NEXT_INSN (p) != 0
6606 && GET_CODE (NEXT_INSN (p)) != BARRIER
6607 && GET_CODE (NEXT_INSN (p)) != CODE_LABEL)
6608 {
6609 if (GET_CODE (NEXT_INSN (p)) != NOTE
6610 || NOTE_LINE_NUMBER (NEXT_INSN (p)) == NOTE_INSN_DELETED)
6611 delete_insn (NEXT_INSN (p));
6612 else
6613 p = NEXT_INSN (p);
6614 }
6615
6616 /* If we don't have a BARRIER immediately after INSN, put one there.
6617 Much code assumes that there are no NOTEs between a JUMP_INSN and
6618 BARRIER. */
6619
6620 if (NEXT_INSN (insn) == 0
6621 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
6622 emit_barrier_after (insn);
6623
6624 /* We might have two BARRIERs separated by notes. Delete the second
6625 one if so. */
6626
6627 if (p != insn && NEXT_INSN (p) != 0
6628 && GET_CODE (NEXT_INSN (p)) == BARRIER)
6629 delete_insn (NEXT_INSN (p));
6630
6631 cse_jumps_altered = 1;
6632 sets[i].rtl = 0;
6633 }
6634
6635 /* If destination is volatile, invalidate it and then do no further
6636 processing for this assignment. */
6637
6638 else if (do_not_record)
6639 {
6640 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
6641 || GET_CODE (dest) == MEM)
6642 invalidate (dest);
6643 sets[i].rtl = 0;
6644 }
6645
6646 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
6647 sets[i].dest_hash_code = HASH (SET_DEST (sets[i].rtl), mode);
6648
6649 #ifdef HAVE_cc0
6650 /* If setting CC0, record what it was set to, or a constant, if it
6651 is equivalent to a constant. If it is being set to a floating-point
6652 value, make a COMPARE with the appropriate constant of 0. If we
6653 don't do this, later code can interpret this as a test against
6654 const0_rtx, which can cause problems if we try to put it into an
6655 insn as a floating-point operand. */
6656 if (dest == cc0_rtx)
6657 {
6658 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
6659 this_insn_cc0_mode = mode;
6660 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
6661 this_insn_cc0 = gen_rtx (COMPARE, VOIDmode, this_insn_cc0,
6662 CONST0_RTX (mode));
6663 }
6664 #endif
6665 }
6666
6667 /* Now enter all non-volatile source expressions in the hash table
6668 if they are not already present.
6669 Record their equivalence classes in src_elt.
6670 This way we can insert the corresponding destinations into
6671 the same classes even if the actual sources are no longer in them
6672 (having been invalidated). */
6673
6674 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
6675 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
6676 {
6677 register struct table_elt *elt;
6678 register struct table_elt *classp = sets[0].src_elt;
6679 rtx dest = SET_DEST (sets[0].rtl);
6680 enum machine_mode eqvmode = GET_MODE (dest);
6681
6682 if (GET_CODE (dest) == STRICT_LOW_PART)
6683 {
6684 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6685 classp = 0;
6686 }
6687 if (insert_regs (src_eqv, classp, 0))
6688 src_eqv_hash_code = HASH (src_eqv, eqvmode);
6689 elt = insert (src_eqv, classp, src_eqv_hash_code, eqvmode);
6690 elt->in_memory = src_eqv_in_memory;
6691 elt->in_struct = src_eqv_in_struct;
6692 src_eqv_elt = elt;
6693 }
6694
6695 for (i = 0; i < n_sets; i++)
6696 if (sets[i].rtl && ! sets[i].src_volatile
6697 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
6698 {
6699 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
6700 {
6701 /* REG_EQUAL in setting a STRICT_LOW_PART
6702 gives an equivalent for the entire destination register,
6703 not just for the subreg being stored in now.
6704 This is a more interesting equivalence, so we arrange later
6705 to treat the entire reg as the destination. */
6706 sets[i].src_elt = src_eqv_elt;
6707 sets[i].src_hash_code = src_eqv_hash_code;
6708 }
6709 else
6710 {
6711 /* Insert source and constant equivalent into hash table, if not
6712 already present. */
6713 register struct table_elt *classp = src_eqv_elt;
6714 register rtx src = sets[i].src;
6715 register rtx dest = SET_DEST (sets[i].rtl);
6716 enum machine_mode mode
6717 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6718
6719 if (sets[i].src_elt == 0)
6720 {
6721 register struct table_elt *elt;
6722
6723 /* Note that these insert_regs calls cannot remove
6724 any of the src_elt's, because they would have failed to
6725 match if not still valid. */
6726 if (insert_regs (src, classp, 0))
6727 sets[i].src_hash_code = HASH (src, mode);
6728 elt = insert (src, classp, sets[i].src_hash_code, mode);
6729 elt->in_memory = sets[i].src_in_memory;
6730 elt->in_struct = sets[i].src_in_struct;
6731 sets[i].src_elt = classp = elt;
6732 }
6733
6734 if (sets[i].src_const && sets[i].src_const_elt == 0
6735 && src != sets[i].src_const
6736 && ! rtx_equal_p (sets[i].src_const, src))
6737 sets[i].src_elt = insert (sets[i].src_const, classp,
6738 sets[i].src_const_hash_code, mode);
6739 }
6740 }
6741 else if (sets[i].src_elt == 0)
6742 /* If we did not insert the source into the hash table (e.g., it was
6743 volatile), note the equivalence class for the REG_EQUAL value, if any,
6744 so that the destination goes into that class. */
6745 sets[i].src_elt = src_eqv_elt;
6746
6747 invalidate_from_clobbers (&writes_memory, x);
6748
6749 /* Some registers are invalidated by subroutine calls. Memory is
6750 invalidated by non-constant calls. */
6751
6752 if (GET_CODE (insn) == CALL_INSN)
6753 {
6754 static struct write_data everything = {0, 1, 1, 1};
6755
6756 if (! CONST_CALL_P (insn))
6757 invalidate_memory (&everything);
6758 invalidate_for_call ();
6759 }
6760
6761 /* Now invalidate everything set by this instruction.
6762 If a SUBREG or other funny destination is being set,
6763 sets[i].rtl is still nonzero, so here we invalidate the reg
6764 a part of which is being set. */
6765
6766 for (i = 0; i < n_sets; i++)
6767 if (sets[i].rtl)
6768 {
6769 register rtx dest = sets[i].inner_dest;
6770
6771 /* Needed for registers to remove the register from its
6772 previous quantity's chain.
6773 Needed for memory if this is a nonvarying address, unless
6774 we have just done an invalidate_memory that covers even those. */
6775 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
6776 || (! writes_memory.all && ! cse_rtx_addr_varies_p (dest)))
6777 invalidate (dest);
6778 }
6779
6780 /* Make sure registers mentioned in destinations
6781 are safe for use in an expression to be inserted.
6782 This removes from the hash table
6783 any invalid entry that refers to one of these registers.
6784
6785 We don't care about the return value from mention_regs because
6786 we are going to hash the SET_DEST values unconditionally. */
6787
6788 for (i = 0; i < n_sets; i++)
6789 if (sets[i].rtl && GET_CODE (SET_DEST (sets[i].rtl)) != REG)
6790 mention_regs (SET_DEST (sets[i].rtl));
6791
6792 /* We may have just removed some of the src_elt's from the hash table.
6793 So replace each one with the current head of the same class. */
6794
6795 for (i = 0; i < n_sets; i++)
6796 if (sets[i].rtl)
6797 {
6798 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6799 /* If elt was removed, find current head of same class,
6800 or 0 if nothing remains of that class. */
6801 {
6802 register struct table_elt *elt = sets[i].src_elt;
6803
6804 while (elt && elt->prev_same_value)
6805 elt = elt->prev_same_value;
6806
6807 while (elt && elt->first_same_value == 0)
6808 elt = elt->next_same_value;
6809 sets[i].src_elt = elt ? elt->first_same_value : 0;
6810 }
6811 }
6812
6813 /* Now insert the destinations into their equivalence classes. */
6814
6815 for (i = 0; i < n_sets; i++)
6816 if (sets[i].rtl)
6817 {
6818 register rtx dest = SET_DEST (sets[i].rtl);
6819 register struct table_elt *elt;
6820
6821 /* Don't record value if we are not supposed to risk allocating
6822 floating-point values in registers that might be wider than
6823 memory. */
6824 if ((flag_float_store
6825 && GET_CODE (dest) == MEM
6826 && GET_MODE_CLASS (GET_MODE (dest)) == MODE_FLOAT)
6827 /* Don't record values of destinations set inside a libcall block
6828 since we might delete the libcall. Things should have been set
6829 up so we won't want to reuse such a value, but we play it safe
6830 here. */
6831 || in_libcall_block
6832 /* If we didn't put a REG_EQUAL value or a source into the hash
6833 table, there is no point is recording DEST. */
6834 || sets[i].src_elt == 0)
6835 continue;
6836
6837 /* STRICT_LOW_PART isn't part of the value BEING set,
6838 and neither is the SUBREG inside it.
6839 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6840 if (GET_CODE (dest) == STRICT_LOW_PART)
6841 dest = SUBREG_REG (XEXP (dest, 0));
6842
6843 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6844 /* Registers must also be inserted into chains for quantities. */
6845 if (insert_regs (dest, sets[i].src_elt, 1))
6846 /* If `insert_regs' changes something, the hash code must be
6847 recalculated. */
6848 sets[i].dest_hash_code = HASH (dest, GET_MODE (dest));
6849
6850 elt = insert (dest, sets[i].src_elt,
6851 sets[i].dest_hash_code, GET_MODE (dest));
6852 elt->in_memory = GET_CODE (sets[i].inner_dest) == MEM;
6853 if (elt->in_memory)
6854 {
6855 /* This implicitly assumes a whole struct
6856 need not have MEM_IN_STRUCT_P.
6857 But a whole struct is *supposed* to have MEM_IN_STRUCT_P. */
6858 elt->in_struct = (MEM_IN_STRUCT_P (sets[i].inner_dest)
6859 || sets[i].inner_dest != SET_DEST (sets[i].rtl));
6860 }
6861
6862 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6863 narrower than M2, and both M1 and M2 are the same number of words,
6864 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6865 make that equivalence as well.
6866
6867 However, BAR may have equivalences for which gen_lowpart_if_possible
6868 will produce a simpler value than gen_lowpart_if_possible applied to
6869 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6870 BAR's equivalences. If we don't get a simplified form, make
6871 the SUBREG. It will not be used in an equivalence, but will
6872 cause two similar assignments to be detected.
6873
6874 Note the loop below will find SUBREG_REG (DEST) since we have
6875 already entered SRC and DEST of the SET in the table. */
6876
6877 if (GET_CODE (dest) == SUBREG
6878 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) / UNITS_PER_WORD
6879 == GET_MODE_SIZE (GET_MODE (dest)) / UNITS_PER_WORD)
6880 && (GET_MODE_SIZE (GET_MODE (dest))
6881 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6882 && sets[i].src_elt != 0)
6883 {
6884 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6885 struct table_elt *elt, *classp = 0;
6886
6887 for (elt = sets[i].src_elt->first_same_value; elt;
6888 elt = elt->next_same_value)
6889 {
6890 rtx new_src = 0;
6891 int src_hash;
6892 struct table_elt *src_elt;
6893
6894 /* Ignore invalid entries. */
6895 if (GET_CODE (elt->exp) != REG
6896 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6897 continue;
6898
6899 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
6900 if (new_src == 0)
6901 new_src = gen_rtx (SUBREG, new_mode, elt->exp, 0);
6902
6903 src_hash = HASH (new_src, new_mode);
6904 src_elt = lookup (new_src, src_hash, new_mode);
6905
6906 /* Put the new source in the hash table is if isn't
6907 already. */
6908 if (src_elt == 0)
6909 {
6910 if (insert_regs (new_src, classp, 0))
6911 src_hash = HASH (new_src, new_mode);
6912 src_elt = insert (new_src, classp, src_hash, new_mode);
6913 src_elt->in_memory = elt->in_memory;
6914 src_elt->in_struct = elt->in_struct;
6915 }
6916 else if (classp && classp != src_elt->first_same_value)
6917 /* Show that two things that we've seen before are
6918 actually the same. */
6919 merge_equiv_classes (src_elt, classp);
6920
6921 classp = src_elt->first_same_value;
6922 }
6923 }
6924 }
6925
6926 /* Special handling for (set REG0 REG1)
6927 where REG0 is the "cheapest", cheaper than REG1.
6928 After cse, REG1 will probably not be used in the sequel,
6929 so (if easily done) change this insn to (set REG1 REG0) and
6930 replace REG1 with REG0 in the previous insn that computed their value.
6931 Then REG1 will become a dead store and won't cloud the situation
6932 for later optimizations.
6933
6934 Do not make this change if REG1 is a hard register, because it will
6935 then be used in the sequel and we may be changing a two-operand insn
6936 into a three-operand insn.
6937
6938 Also do not do this if we are operating on a copy of INSN. */
6939
6940 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
6941 && NEXT_INSN (PREV_INSN (insn)) == insn
6942 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
6943 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6944 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl)))
6945 && (qty_first_reg[reg_qty[REGNO (SET_SRC (sets[0].rtl))]]
6946 == REGNO (SET_DEST (sets[0].rtl))))
6947 {
6948 rtx prev = PREV_INSN (insn);
6949 while (prev && GET_CODE (prev) == NOTE)
6950 prev = PREV_INSN (prev);
6951
6952 if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
6953 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
6954 {
6955 rtx dest = SET_DEST (sets[0].rtl);
6956 rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
6957
6958 validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
6959 validate_change (insn, & SET_DEST (sets[0].rtl),
6960 SET_SRC (sets[0].rtl), 1);
6961 validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
6962 apply_change_group ();
6963
6964 /* If REG1 was equivalent to a constant, REG0 is not. */
6965 if (note)
6966 PUT_REG_NOTE_KIND (note, REG_EQUAL);
6967
6968 /* If there was a REG_WAS_0 note on PREV, remove it. Move
6969 any REG_WAS_0 note on INSN to PREV. */
6970 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
6971 if (note)
6972 remove_note (prev, note);
6973
6974 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6975 if (note)
6976 {
6977 remove_note (insn, note);
6978 XEXP (note, 1) = REG_NOTES (prev);
6979 REG_NOTES (prev) = note;
6980 }
6981 }
6982 }
6983
6984 /* If this is a conditional jump insn, record any known equivalences due to
6985 the condition being tested. */
6986
6987 last_jump_equiv_class = 0;
6988 if (GET_CODE (insn) == JUMP_INSN
6989 && n_sets == 1 && GET_CODE (x) == SET
6990 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6991 record_jump_equiv (insn, 0);
6992
6993 #ifdef HAVE_cc0
6994 /* If the previous insn set CC0 and this insn no longer references CC0,
6995 delete the previous insn. Here we use the fact that nothing expects CC0
6996 to be valid over an insn, which is true until the final pass. */
6997 if (prev_insn && GET_CODE (prev_insn) == INSN
6998 && (tem = single_set (prev_insn)) != 0
6999 && SET_DEST (tem) == cc0_rtx
7000 && ! reg_mentioned_p (cc0_rtx, x))
7001 {
7002 PUT_CODE (prev_insn, NOTE);
7003 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
7004 NOTE_SOURCE_FILE (prev_insn) = 0;
7005 }
7006
7007 prev_insn_cc0 = this_insn_cc0;
7008 prev_insn_cc0_mode = this_insn_cc0_mode;
7009 #endif
7010
7011 prev_insn = insn;
7012 }
7013 \f
7014 /* Store 1 in *WRITES_PTR for those categories of memory ref
7015 that must be invalidated when the expression WRITTEN is stored in.
7016 If WRITTEN is null, say everything must be invalidated. */
7017
7018 static void
7019 note_mem_written (written, writes_ptr)
7020 rtx written;
7021 struct write_data *writes_ptr;
7022 {
7023 static struct write_data everything = {0, 1, 1, 1};
7024
7025 if (written == 0)
7026 *writes_ptr = everything;
7027 else if (GET_CODE (written) == MEM)
7028 {
7029 /* Pushing or popping the stack invalidates just the stack pointer. */
7030 rtx addr = XEXP (written, 0);
7031 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7032 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7033 && GET_CODE (XEXP (addr, 0)) == REG
7034 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
7035 {
7036 writes_ptr->sp = 1;
7037 return;
7038 }
7039 else if (GET_MODE (written) == BLKmode)
7040 *writes_ptr = everything;
7041 else if (cse_rtx_addr_varies_p (written))
7042 {
7043 /* A varying address that is a sum indicates an array element,
7044 and that's just as good as a structure element
7045 in implying that we need not invalidate scalar variables.
7046 However, we must allow QImode aliasing of scalars, because the
7047 ANSI C standard allows character pointers to alias anything. */
7048 if (! ((MEM_IN_STRUCT_P (written)
7049 || GET_CODE (XEXP (written, 0)) == PLUS)
7050 && GET_MODE (written) != QImode))
7051 writes_ptr->all = 1;
7052 writes_ptr->nonscalar = 1;
7053 }
7054 writes_ptr->var = 1;
7055 }
7056 }
7057
7058 /* Perform invalidation on the basis of everything about an insn
7059 except for invalidating the actual places that are SET in it.
7060 This includes the places CLOBBERed, and anything that might
7061 alias with something that is SET or CLOBBERed.
7062
7063 W points to the writes_memory for this insn, a struct write_data
7064 saying which kinds of memory references must be invalidated.
7065 X is the pattern of the insn. */
7066
7067 static void
7068 invalidate_from_clobbers (w, x)
7069 struct write_data *w;
7070 rtx x;
7071 {
7072 /* If W->var is not set, W specifies no action.
7073 If W->all is set, this step gets all memory refs
7074 so they can be ignored in the rest of this function. */
7075 if (w->var)
7076 invalidate_memory (w);
7077
7078 if (w->sp)
7079 {
7080 if (reg_tick[STACK_POINTER_REGNUM] >= 0)
7081 reg_tick[STACK_POINTER_REGNUM]++;
7082
7083 /* This should be *very* rare. */
7084 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
7085 invalidate (stack_pointer_rtx);
7086 }
7087
7088 if (GET_CODE (x) == CLOBBER)
7089 {
7090 rtx ref = XEXP (x, 0);
7091 if (ref
7092 && (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7093 || (GET_CODE (ref) == MEM && ! w->all)))
7094 invalidate (ref);
7095 }
7096 else if (GET_CODE (x) == PARALLEL)
7097 {
7098 register int i;
7099 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
7100 {
7101 register rtx y = XVECEXP (x, 0, i);
7102 if (GET_CODE (y) == CLOBBER)
7103 {
7104 rtx ref = XEXP (y, 0);
7105 if (ref
7106 &&(GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7107 || (GET_CODE (ref) == MEM && !w->all)))
7108 invalidate (ref);
7109 }
7110 }
7111 }
7112 }
7113 \f
7114 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
7115 and replace any registers in them with either an equivalent constant
7116 or the canonical form of the register. If we are inside an address,
7117 only do this if the address remains valid.
7118
7119 OBJECT is 0 except when within a MEM in which case it is the MEM.
7120
7121 Return the replacement for X. */
7122
7123 static rtx
7124 cse_process_notes (x, object)
7125 rtx x;
7126 rtx object;
7127 {
7128 enum rtx_code code = GET_CODE (x);
7129 char *fmt = GET_RTX_FORMAT (code);
7130 int qty;
7131 int i;
7132
7133 switch (code)
7134 {
7135 case CONST_INT:
7136 case CONST:
7137 case SYMBOL_REF:
7138 case LABEL_REF:
7139 case CONST_DOUBLE:
7140 case PC:
7141 case CC0:
7142 case LO_SUM:
7143 return x;
7144
7145 case MEM:
7146 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
7147 return x;
7148
7149 case EXPR_LIST:
7150 case INSN_LIST:
7151 if (REG_NOTE_KIND (x) == REG_EQUAL)
7152 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
7153 if (XEXP (x, 1))
7154 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
7155 return x;
7156
7157 case SIGN_EXTEND:
7158 case ZERO_EXTEND:
7159 {
7160 rtx new = cse_process_notes (XEXP (x, 0), object);
7161 /* We don't substitute VOIDmode constants into these rtx,
7162 since they would impede folding. */
7163 if (GET_MODE (new) != VOIDmode)
7164 validate_change (object, &XEXP (x, 0), new, 0);
7165 return x;
7166 }
7167
7168 case REG:
7169 i = reg_qty[REGNO (x)];
7170
7171 /* Return a constant or a constant register. */
7172 if (REGNO_QTY_VALID_P (REGNO (x))
7173 && qty_const[i] != 0
7174 && (CONSTANT_P (qty_const[i])
7175 || GET_CODE (qty_const[i]) == REG))
7176 {
7177 rtx new = gen_lowpart_if_possible (GET_MODE (x), qty_const[i]);
7178 if (new)
7179 return new;
7180 }
7181
7182 /* Otherwise, canonicalize this register. */
7183 return canon_reg (x, NULL_RTX);
7184 }
7185
7186 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7187 if (fmt[i] == 'e')
7188 validate_change (object, &XEXP (x, i),
7189 cse_process_notes (XEXP (x, i), object), 0);
7190
7191 return x;
7192 }
7193 \f
7194 /* Find common subexpressions between the end test of a loop and the beginning
7195 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
7196
7197 Often we have a loop where an expression in the exit test is used
7198 in the body of the loop. For example "while (*p) *q++ = *p++;".
7199 Because of the way we duplicate the loop exit test in front of the loop,
7200 however, we don't detect that common subexpression. This will be caught
7201 when global cse is implemented, but this is a quite common case.
7202
7203 This function handles the most common cases of these common expressions.
7204 It is called after we have processed the basic block ending with the
7205 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
7206 jumps to a label used only once. */
7207
7208 static void
7209 cse_around_loop (loop_start)
7210 rtx loop_start;
7211 {
7212 rtx insn;
7213 int i;
7214 struct table_elt *p;
7215
7216 /* If the jump at the end of the loop doesn't go to the start, we don't
7217 do anything. */
7218 for (insn = PREV_INSN (loop_start);
7219 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
7220 insn = PREV_INSN (insn))
7221 ;
7222
7223 if (insn == 0
7224 || GET_CODE (insn) != NOTE
7225 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
7226 return;
7227
7228 /* If the last insn of the loop (the end test) was an NE comparison,
7229 we will interpret it as an EQ comparison, since we fell through
7230 the loop. Any equivalences resulting from that comparison are
7231 therefore not valid and must be invalidated. */
7232 if (last_jump_equiv_class)
7233 for (p = last_jump_equiv_class->first_same_value; p;
7234 p = p->next_same_value)
7235 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
7236 || GET_CODE (p->exp) == SUBREG)
7237 invalidate (p->exp);
7238
7239 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
7240 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
7241
7242 The only thing we do with SET_DEST is invalidate entries, so we
7243 can safely process each SET in order. It is slightly less efficient
7244 to do so, but we only want to handle the most common cases. */
7245
7246 for (insn = NEXT_INSN (loop_start);
7247 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
7248 && ! (GET_CODE (insn) == NOTE
7249 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
7250 insn = NEXT_INSN (insn))
7251 {
7252 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
7253 && (GET_CODE (PATTERN (insn)) == SET
7254 || GET_CODE (PATTERN (insn)) == CLOBBER))
7255 cse_set_around_loop (PATTERN (insn), insn, loop_start);
7256 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
7257 && GET_CODE (PATTERN (insn)) == PARALLEL)
7258 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7259 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
7260 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
7261 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
7262 loop_start);
7263 }
7264 }
7265 \f
7266 /* Variable used for communications between the next two routines. */
7267
7268 static struct write_data skipped_writes_memory;
7269
7270 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
7271 since they are done elsewhere. This function is called via note_stores. */
7272
7273 static void
7274 invalidate_skipped_set (dest, set)
7275 rtx set;
7276 rtx dest;
7277 {
7278 if (GET_CODE (set) == CLOBBER
7279 #ifdef HAVE_cc0
7280 || dest == cc0_rtx
7281 #endif
7282 || dest == pc_rtx)
7283 return;
7284
7285 if (GET_CODE (dest) == MEM)
7286 note_mem_written (dest, &skipped_writes_memory);
7287
7288 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7289 || (! skipped_writes_memory.all && ! cse_rtx_addr_varies_p (dest)))
7290 invalidate (dest);
7291 }
7292
7293 /* Invalidate all insns from START up to the end of the function or the
7294 next label. This called when we wish to CSE around a block that is
7295 conditionally executed. */
7296
7297 static void
7298 invalidate_skipped_block (start)
7299 rtx start;
7300 {
7301 rtx insn;
7302 int i;
7303 static struct write_data init = {0, 0, 0, 0};
7304 static struct write_data everything = {0, 1, 1, 1};
7305
7306 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
7307 insn = NEXT_INSN (insn))
7308 {
7309 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7310 continue;
7311
7312 skipped_writes_memory = init;
7313
7314 if (GET_CODE (insn) == CALL_INSN)
7315 {
7316 invalidate_for_call ();
7317 skipped_writes_memory = everything;
7318 }
7319
7320 note_stores (PATTERN (insn), invalidate_skipped_set);
7321 invalidate_from_clobbers (&skipped_writes_memory, PATTERN (insn));
7322 }
7323 }
7324 \f
7325 /* Used for communication between the following two routines; contains a
7326 value to be checked for modification. */
7327
7328 static rtx cse_check_loop_start_value;
7329
7330 /* If modifying X will modify the value in CSE_CHECK_LOOP_START_VALUE,
7331 indicate that fact by setting CSE_CHECK_LOOP_START_VALUE to 0. */
7332
7333 static void
7334 cse_check_loop_start (x, set)
7335 rtx x;
7336 rtx set;
7337 {
7338 if (cse_check_loop_start_value == 0
7339 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
7340 return;
7341
7342 if ((GET_CODE (x) == MEM && GET_CODE (cse_check_loop_start_value) == MEM)
7343 || reg_overlap_mentioned_p (x, cse_check_loop_start_value))
7344 cse_check_loop_start_value = 0;
7345 }
7346
7347 /* X is a SET or CLOBBER contained in INSN that was found near the start of
7348 a loop that starts with the label at LOOP_START.
7349
7350 If X is a SET, we see if its SET_SRC is currently in our hash table.
7351 If so, we see if it has a value equal to some register used only in the
7352 loop exit code (as marked by jump.c).
7353
7354 If those two conditions are true, we search backwards from the start of
7355 the loop to see if that same value was loaded into a register that still
7356 retains its value at the start of the loop.
7357
7358 If so, we insert an insn after the load to copy the destination of that
7359 load into the equivalent register and (try to) replace our SET_SRC with that
7360 register.
7361
7362 In any event, we invalidate whatever this SET or CLOBBER modifies. */
7363
7364 static void
7365 cse_set_around_loop (x, insn, loop_start)
7366 rtx x;
7367 rtx insn;
7368 rtx loop_start;
7369 {
7370 rtx p;
7371 struct table_elt *src_elt;
7372 static struct write_data init = {0, 0, 0, 0};
7373 struct write_data writes_memory;
7374
7375 writes_memory = init;
7376
7377 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
7378 are setting PC or CC0 or whose SET_SRC is already a register. */
7379 if (GET_CODE (x) == SET
7380 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
7381 && GET_CODE (SET_SRC (x)) != REG)
7382 {
7383 src_elt = lookup (SET_SRC (x),
7384 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
7385 GET_MODE (SET_DEST (x)));
7386
7387 if (src_elt)
7388 for (src_elt = src_elt->first_same_value; src_elt;
7389 src_elt = src_elt->next_same_value)
7390 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
7391 && COST (src_elt->exp) < COST (SET_SRC (x)))
7392 {
7393 rtx p, set;
7394
7395 /* Look for an insn in front of LOOP_START that sets
7396 something in the desired mode to SET_SRC (x) before we hit
7397 a label or CALL_INSN. */
7398
7399 for (p = prev_nonnote_insn (loop_start);
7400 p && GET_CODE (p) != CALL_INSN
7401 && GET_CODE (p) != CODE_LABEL;
7402 p = prev_nonnote_insn (p))
7403 if ((set = single_set (p)) != 0
7404 && GET_CODE (SET_DEST (set)) == REG
7405 && GET_MODE (SET_DEST (set)) == src_elt->mode
7406 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
7407 {
7408 /* We now have to ensure that nothing between P
7409 and LOOP_START modified anything referenced in
7410 SET_SRC (x). We know that nothing within the loop
7411 can modify it, or we would have invalidated it in
7412 the hash table. */
7413 rtx q;
7414
7415 cse_check_loop_start_value = SET_SRC (x);
7416 for (q = p; q != loop_start; q = NEXT_INSN (q))
7417 if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
7418 note_stores (PATTERN (q), cse_check_loop_start);
7419
7420 /* If nothing was changed and we can replace our
7421 SET_SRC, add an insn after P to copy its destination
7422 to what we will be replacing SET_SRC with. */
7423 if (cse_check_loop_start_value
7424 && validate_change (insn, &SET_SRC (x),
7425 src_elt->exp, 0))
7426 emit_insn_after (gen_move_insn (src_elt->exp,
7427 SET_DEST (set)),
7428 p);
7429 break;
7430 }
7431 }
7432 }
7433
7434 /* Now invalidate anything modified by X. */
7435 note_mem_written (SET_DEST (x), &writes_memory);
7436
7437 if (writes_memory.var)
7438 invalidate_memory (&writes_memory);
7439
7440 /* See comment on similar code in cse_insn for explanation of these tests. */
7441 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
7442 || (GET_CODE (SET_DEST (x)) == MEM && ! writes_memory.all
7443 && ! cse_rtx_addr_varies_p (SET_DEST (x))))
7444 invalidate (SET_DEST (x));
7445 }
7446 \f
7447 /* Find the end of INSN's basic block and return its range,
7448 the total number of SETs in all the insns of the block, the last insn of the
7449 block, and the branch path.
7450
7451 The branch path indicates which branches should be followed. If a non-zero
7452 path size is specified, the block should be rescanned and a different set
7453 of branches will be taken. The branch path is only used if
7454 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
7455
7456 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
7457 used to describe the block. It is filled in with the information about
7458 the current block. The incoming structure's branch path, if any, is used
7459 to construct the output branch path. */
7460
7461 void
7462 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
7463 rtx insn;
7464 struct cse_basic_block_data *data;
7465 int follow_jumps;
7466 int after_loop;
7467 int skip_blocks;
7468 {
7469 rtx p = insn, q;
7470 int nsets = 0;
7471 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
7472 rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
7473 int path_size = data->path_size;
7474 int path_entry = 0;
7475 int i;
7476
7477 /* Update the previous branch path, if any. If the last branch was
7478 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
7479 shorten the path by one and look at the previous branch. We know that
7480 at least one branch must have been taken if PATH_SIZE is non-zero. */
7481 while (path_size > 0)
7482 {
7483 if (data->path[path_size - 1].status != NOT_TAKEN)
7484 {
7485 data->path[path_size - 1].status = NOT_TAKEN;
7486 break;
7487 }
7488 else
7489 path_size--;
7490 }
7491
7492 /* Scan to end of this basic block. */
7493 while (p && GET_CODE (p) != CODE_LABEL)
7494 {
7495 /* Don't cse out the end of a loop. This makes a difference
7496 only for the unusual loops that always execute at least once;
7497 all other loops have labels there so we will stop in any case.
7498 Cse'ing out the end of the loop is dangerous because it
7499 might cause an invariant expression inside the loop
7500 to be reused after the end of the loop. This would make it
7501 hard to move the expression out of the loop in loop.c,
7502 especially if it is one of several equivalent expressions
7503 and loop.c would like to eliminate it.
7504
7505 If we are running after loop.c has finished, we can ignore
7506 the NOTE_INSN_LOOP_END. */
7507
7508 if (! after_loop && GET_CODE (p) == NOTE
7509 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
7510 break;
7511
7512 /* Don't cse over a call to setjmp; on some machines (eg vax)
7513 the regs restored by the longjmp come from
7514 a later time than the setjmp. */
7515 if (GET_CODE (p) == NOTE
7516 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
7517 break;
7518
7519 /* A PARALLEL can have lots of SETs in it,
7520 especially if it is really an ASM_OPERANDS. */
7521 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
7522 && GET_CODE (PATTERN (p)) == PARALLEL)
7523 nsets += XVECLEN (PATTERN (p), 0);
7524 else if (GET_CODE (p) != NOTE)
7525 nsets += 1;
7526
7527 /* Ignore insns made by CSE; they cannot affect the boundaries of
7528 the basic block. */
7529
7530 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
7531 high_cuid = INSN_CUID (p);
7532 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
7533 low_cuid = INSN_CUID (p);
7534
7535 /* See if this insn is in our branch path. If it is and we are to
7536 take it, do so. */
7537 if (path_entry < path_size && data->path[path_entry].branch == p)
7538 {
7539 if (data->path[path_entry].status != NOT_TAKEN)
7540 p = JUMP_LABEL (p);
7541
7542 /* Point to next entry in path, if any. */
7543 path_entry++;
7544 }
7545
7546 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
7547 was specified, we haven't reached our maximum path length, there are
7548 insns following the target of the jump, this is the only use of the
7549 jump label, and the target label is preceded by a BARRIER.
7550
7551 Alternatively, we can follow the jump if it branches around a
7552 block of code and there are no other branches into the block.
7553 In this case invalidate_skipped_block will be called to invalidate any
7554 registers set in the block when following the jump. */
7555
7556 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
7557 && GET_CODE (p) == JUMP_INSN
7558 && GET_CODE (PATTERN (p)) == SET
7559 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
7560 && LABEL_NUSES (JUMP_LABEL (p)) == 1
7561 && NEXT_INSN (JUMP_LABEL (p)) != 0)
7562 {
7563 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
7564 if ((GET_CODE (q) != NOTE
7565 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
7566 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
7567 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
7568 break;
7569
7570 /* If we ran into a BARRIER, this code is an extension of the
7571 basic block when the branch is taken. */
7572 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
7573 {
7574 /* Don't allow ourself to keep walking around an
7575 always-executed loop. */
7576 if (next_real_insn (q) == next)
7577 {
7578 p = NEXT_INSN (p);
7579 continue;
7580 }
7581
7582 /* Similarly, don't put a branch in our path more than once. */
7583 for (i = 0; i < path_entry; i++)
7584 if (data->path[i].branch == p)
7585 break;
7586
7587 if (i != path_entry)
7588 break;
7589
7590 data->path[path_entry].branch = p;
7591 data->path[path_entry++].status = TAKEN;
7592
7593 /* This branch now ends our path. It was possible that we
7594 didn't see this branch the last time around (when the
7595 insn in front of the target was a JUMP_INSN that was
7596 turned into a no-op). */
7597 path_size = path_entry;
7598
7599 p = JUMP_LABEL (p);
7600 /* Mark block so we won't scan it again later. */
7601 PUT_MODE (NEXT_INSN (p), QImode);
7602 }
7603 /* Detect a branch around a block of code. */
7604 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
7605 {
7606 register rtx tmp;
7607
7608 if (next_real_insn (q) == next)
7609 {
7610 p = NEXT_INSN (p);
7611 continue;
7612 }
7613
7614 for (i = 0; i < path_entry; i++)
7615 if (data->path[i].branch == p)
7616 break;
7617
7618 if (i != path_entry)
7619 break;
7620
7621 /* This is no_labels_between_p (p, q) with an added check for
7622 reaching the end of a function (in case Q precedes P). */
7623 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
7624 if (GET_CODE (tmp) == CODE_LABEL)
7625 break;
7626
7627 if (tmp == q)
7628 {
7629 data->path[path_entry].branch = p;
7630 data->path[path_entry++].status = AROUND;
7631
7632 path_size = path_entry;
7633
7634 p = JUMP_LABEL (p);
7635 /* Mark block so we won't scan it again later. */
7636 PUT_MODE (NEXT_INSN (p), QImode);
7637 }
7638 }
7639 }
7640 p = NEXT_INSN (p);
7641 }
7642
7643 data->low_cuid = low_cuid;
7644 data->high_cuid = high_cuid;
7645 data->nsets = nsets;
7646 data->last = p;
7647
7648 /* If all jumps in the path are not taken, set our path length to zero
7649 so a rescan won't be done. */
7650 for (i = path_size - 1; i >= 0; i--)
7651 if (data->path[i].status != NOT_TAKEN)
7652 break;
7653
7654 if (i == -1)
7655 data->path_size = 0;
7656 else
7657 data->path_size = path_size;
7658
7659 /* End the current branch path. */
7660 data->path[path_size].branch = 0;
7661 }
7662 \f
7663 /* Perform cse on the instructions of a function.
7664 F is the first instruction.
7665 NREGS is one plus the highest pseudo-reg number used in the instruction.
7666
7667 AFTER_LOOP is 1 if this is the cse call done after loop optimization
7668 (only if -frerun-cse-after-loop).
7669
7670 Returns 1 if jump_optimize should be redone due to simplifications
7671 in conditional jump instructions. */
7672
7673 int
7674 cse_main (f, nregs, after_loop, file)
7675 rtx f;
7676 int nregs;
7677 int after_loop;
7678 FILE *file;
7679 {
7680 struct cse_basic_block_data val;
7681 register rtx insn = f;
7682 register int i;
7683
7684 cse_jumps_altered = 0;
7685 constant_pool_entries_cost = 0;
7686 val.path_size = 0;
7687
7688 init_recog ();
7689
7690 max_reg = nregs;
7691
7692 all_minus_one = (int *) alloca (nregs * sizeof (int));
7693 consec_ints = (int *) alloca (nregs * sizeof (int));
7694
7695 for (i = 0; i < nregs; i++)
7696 {
7697 all_minus_one[i] = -1;
7698 consec_ints[i] = i;
7699 }
7700
7701 reg_next_eqv = (int *) alloca (nregs * sizeof (int));
7702 reg_prev_eqv = (int *) alloca (nregs * sizeof (int));
7703 reg_qty = (int *) alloca (nregs * sizeof (int));
7704 reg_in_table = (int *) alloca (nregs * sizeof (int));
7705 reg_tick = (int *) alloca (nregs * sizeof (int));
7706
7707 /* Discard all the free elements of the previous function
7708 since they are allocated in the temporarily obstack. */
7709 bzero (table, sizeof table);
7710 free_element_chain = 0;
7711 n_elements_made = 0;
7712
7713 /* Find the largest uid. */
7714
7715 max_uid = get_max_uid ();
7716 uid_cuid = (int *) alloca ((max_uid + 1) * sizeof (int));
7717 bzero (uid_cuid, (max_uid + 1) * sizeof (int));
7718
7719 /* Compute the mapping from uids to cuids.
7720 CUIDs are numbers assigned to insns, like uids,
7721 except that cuids increase monotonically through the code.
7722 Don't assign cuids to line-number NOTEs, so that the distance in cuids
7723 between two insns is not affected by -g. */
7724
7725 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
7726 {
7727 if (GET_CODE (insn) != NOTE
7728 || NOTE_LINE_NUMBER (insn) < 0)
7729 INSN_CUID (insn) = ++i;
7730 else
7731 /* Give a line number note the same cuid as preceding insn. */
7732 INSN_CUID (insn) = i;
7733 }
7734
7735 /* Initialize which registers are clobbered by calls. */
7736
7737 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
7738
7739 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7740 if ((call_used_regs[i]
7741 /* Used to check !fixed_regs[i] here, but that isn't safe;
7742 fixed regs are still call-clobbered, and sched can get
7743 confused if they can "live across calls".
7744
7745 The frame pointer is always preserved across calls. The arg
7746 pointer is if it is fixed. The stack pointer usually is, unless
7747 RETURN_POPS_ARGS, in which case an explicit CLOBBER
7748 will be present. If we are generating PIC code, the PIC offset
7749 table register is preserved across calls. */
7750
7751 && i != STACK_POINTER_REGNUM
7752 && i != FRAME_POINTER_REGNUM
7753 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
7754 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
7755 #endif
7756 #ifdef PIC_OFFSET_TABLE_REGNUM
7757 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
7758 #endif
7759 )
7760 || global_regs[i])
7761 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
7762
7763 /* Loop over basic blocks.
7764 Compute the maximum number of qty's needed for each basic block
7765 (which is 2 for each SET). */
7766 insn = f;
7767 while (insn)
7768 {
7769 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
7770 flag_cse_skip_blocks);
7771
7772 /* If this basic block was already processed or has no sets, skip it. */
7773 if (val.nsets == 0 || GET_MODE (insn) == QImode)
7774 {
7775 PUT_MODE (insn, VOIDmode);
7776 insn = (val.last ? NEXT_INSN (val.last) : 0);
7777 val.path_size = 0;
7778 continue;
7779 }
7780
7781 cse_basic_block_start = val.low_cuid;
7782 cse_basic_block_end = val.high_cuid;
7783 max_qty = val.nsets * 2;
7784
7785 if (file)
7786 fprintf (file, ";; Processing block from %d to %d, %d sets.\n",
7787 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
7788 val.nsets);
7789
7790 /* Make MAX_QTY bigger to give us room to optimize
7791 past the end of this basic block, if that should prove useful. */
7792 if (max_qty < 500)
7793 max_qty = 500;
7794
7795 max_qty += max_reg;
7796
7797 /* If this basic block is being extended by following certain jumps,
7798 (see `cse_end_of_basic_block'), we reprocess the code from the start.
7799 Otherwise, we start after this basic block. */
7800 if (val.path_size > 0)
7801 cse_basic_block (insn, val.last, val.path, 0);
7802 else
7803 {
7804 int old_cse_jumps_altered = cse_jumps_altered;
7805 rtx temp;
7806
7807 /* When cse changes a conditional jump to an unconditional
7808 jump, we want to reprocess the block, since it will give
7809 us a new branch path to investigate. */
7810 cse_jumps_altered = 0;
7811 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
7812 if (cse_jumps_altered == 0
7813 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7814 insn = temp;
7815
7816 cse_jumps_altered |= old_cse_jumps_altered;
7817 }
7818
7819 #ifdef USE_C_ALLOCA
7820 alloca (0);
7821 #endif
7822 }
7823
7824 /* Tell refers_to_mem_p that qty_const info is not available. */
7825 qty_const = 0;
7826
7827 if (max_elements_made < n_elements_made)
7828 max_elements_made = n_elements_made;
7829
7830 return cse_jumps_altered;
7831 }
7832
7833 /* Process a single basic block. FROM and TO and the limits of the basic
7834 block. NEXT_BRANCH points to the branch path when following jumps or
7835 a null path when not following jumps.
7836
7837 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
7838 loop. This is true when we are being called for the last time on a
7839 block and this CSE pass is before loop.c. */
7840
7841 static rtx
7842 cse_basic_block (from, to, next_branch, around_loop)
7843 register rtx from, to;
7844 struct branch_path *next_branch;
7845 int around_loop;
7846 {
7847 register rtx insn;
7848 int to_usage = 0;
7849 int in_libcall_block = 0;
7850
7851 /* Each of these arrays is undefined before max_reg, so only allocate
7852 the space actually needed and adjust the start below. */
7853
7854 qty_first_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
7855 qty_last_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
7856 qty_mode= (enum machine_mode *) alloca ((max_qty - max_reg) * sizeof (enum machine_mode));
7857 qty_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
7858 qty_const_insn = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
7859 qty_comparison_code
7860 = (enum rtx_code *) alloca ((max_qty - max_reg) * sizeof (enum rtx_code));
7861 qty_comparison_qty = (int *) alloca ((max_qty - max_reg) * sizeof (int));
7862 qty_comparison_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
7863
7864 qty_first_reg -= max_reg;
7865 qty_last_reg -= max_reg;
7866 qty_mode -= max_reg;
7867 qty_const -= max_reg;
7868 qty_const_insn -= max_reg;
7869 qty_comparison_code -= max_reg;
7870 qty_comparison_qty -= max_reg;
7871 qty_comparison_const -= max_reg;
7872
7873 new_basic_block ();
7874
7875 /* TO might be a label. If so, protect it from being deleted. */
7876 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7877 ++LABEL_NUSES (to);
7878
7879 for (insn = from; insn != to; insn = NEXT_INSN (insn))
7880 {
7881 register enum rtx_code code;
7882
7883 /* See if this is a branch that is part of the path. If so, and it is
7884 to be taken, do so. */
7885 if (next_branch->branch == insn)
7886 {
7887 enum taken status = next_branch++->status;
7888 if (status != NOT_TAKEN)
7889 {
7890 if (status == TAKEN)
7891 record_jump_equiv (insn, 1);
7892 else
7893 invalidate_skipped_block (NEXT_INSN (insn));
7894
7895 /* Set the last insn as the jump insn; it doesn't affect cc0.
7896 Then follow this branch. */
7897 #ifdef HAVE_cc0
7898 prev_insn_cc0 = 0;
7899 #endif
7900 prev_insn = insn;
7901 insn = JUMP_LABEL (insn);
7902 continue;
7903 }
7904 }
7905
7906 code = GET_CODE (insn);
7907 if (GET_MODE (insn) == QImode)
7908 PUT_MODE (insn, VOIDmode);
7909
7910 if (GET_RTX_CLASS (code) == 'i')
7911 {
7912 /* Process notes first so we have all notes in canonical forms when
7913 looking for duplicate operations. */
7914
7915 if (REG_NOTES (insn))
7916 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7917
7918 /* Track when we are inside in LIBCALL block. Inside such a block,
7919 we do not want to record destinations. The last insn of a
7920 LIBCALL block is not considered to be part of the block, since
7921 its destination is the result of the block and hence should be
7922 recorded. */
7923
7924 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7925 in_libcall_block = 1;
7926 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7927 in_libcall_block = 0;
7928
7929 cse_insn (insn, in_libcall_block);
7930 }
7931
7932 /* If INSN is now an unconditional jump, skip to the end of our
7933 basic block by pretending that we just did the last insn in the
7934 basic block. If we are jumping to the end of our block, show
7935 that we can have one usage of TO. */
7936
7937 if (simplejump_p (insn))
7938 {
7939 if (to == 0)
7940 return 0;
7941
7942 if (JUMP_LABEL (insn) == to)
7943 to_usage = 1;
7944
7945 /* Maybe TO was deleted because the jump is unconditional.
7946 If so, there is nothing left in this basic block. */
7947 /* ??? Perhaps it would be smarter to set TO
7948 to whatever follows this insn,
7949 and pretend the basic block had always ended here. */
7950 if (INSN_DELETED_P (to))
7951 break;
7952
7953 insn = PREV_INSN (to);
7954 }
7955
7956 /* See if it is ok to keep on going past the label
7957 which used to end our basic block. Remember that we incremented
7958 the count of that label, so we decrement it here. If we made
7959 a jump unconditional, TO_USAGE will be one; in that case, we don't
7960 want to count the use in that jump. */
7961
7962 if (to != 0 && NEXT_INSN (insn) == to
7963 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7964 {
7965 struct cse_basic_block_data val;
7966
7967 insn = NEXT_INSN (to);
7968
7969 if (LABEL_NUSES (to) == 0)
7970 delete_insn (to);
7971
7972 /* Find the end of the following block. Note that we won't be
7973 following branches in this case. If TO was the last insn
7974 in the function, we are done. Similarly, if we deleted the
7975 insn after TO, it must have been because it was preceded by
7976 a BARRIER. In that case, we are done with this block because it
7977 has no continuation. */
7978
7979 if (insn == 0 || INSN_DELETED_P (insn))
7980 return 0;
7981
7982 to_usage = 0;
7983 val.path_size = 0;
7984 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7985
7986 /* If the tables we allocated have enough space left
7987 to handle all the SETs in the next basic block,
7988 continue through it. Otherwise, return,
7989 and that block will be scanned individually. */
7990 if (val.nsets * 2 + next_qty > max_qty)
7991 break;
7992
7993 cse_basic_block_start = val.low_cuid;
7994 cse_basic_block_end = val.high_cuid;
7995 to = val.last;
7996
7997 /* Prevent TO from being deleted if it is a label. */
7998 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7999 ++LABEL_NUSES (to);
8000
8001 /* Back up so we process the first insn in the extension. */
8002 insn = PREV_INSN (insn);
8003 }
8004 }
8005
8006 if (next_qty > max_qty)
8007 abort ();
8008
8009 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
8010 the previous insn is the only insn that branches to the head of a loop,
8011 we can cse into the loop. Don't do this if we changed the jump
8012 structure of a loop unless we aren't going to be following jumps. */
8013
8014 if ((cse_jumps_altered == 0
8015 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8016 && around_loop && to != 0
8017 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
8018 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
8019 && JUMP_LABEL (PREV_INSN (to)) != 0
8020 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
8021 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
8022
8023 return to ? NEXT_INSN (to) : 0;
8024 }
8025 \f
8026 /* Count the number of times registers are used (not set) in X.
8027 COUNTS is an array in which we accumulate the count, INCR is how much
8028 we count each register usage. */
8029
8030 static void
8031 count_reg_usage (x, counts, incr)
8032 rtx x;
8033 int *counts;
8034 int incr;
8035 {
8036 enum rtx_code code = GET_CODE (x);
8037 char *fmt;
8038 int i, j;
8039
8040 switch (code)
8041 {
8042 case REG:
8043 counts[REGNO (x)] += incr;
8044 return;
8045
8046 case PC:
8047 case CC0:
8048 case CONST:
8049 case CONST_INT:
8050 case CONST_DOUBLE:
8051 case SYMBOL_REF:
8052 case LABEL_REF:
8053 case CLOBBER:
8054 return;
8055
8056 case SET:
8057 /* Unless we are setting a REG, count everything in SET_DEST. */
8058 if (GET_CODE (SET_DEST (x)) != REG)
8059 count_reg_usage (SET_DEST (x), counts, incr);
8060 count_reg_usage (SET_SRC (x), counts, incr);
8061 return;
8062
8063 case INSN:
8064 case JUMP_INSN:
8065 case CALL_INSN:
8066 count_reg_usage (PATTERN (x), counts, incr);
8067
8068 /* Things used in a REG_EQUAL note aren't dead since loop may try to
8069 use them. */
8070
8071 if (REG_NOTES (x))
8072 count_reg_usage (REG_NOTES (x), counts, incr);
8073 return;
8074
8075 case EXPR_LIST:
8076 case INSN_LIST:
8077 if (REG_NOTE_KIND (x) == REG_EQUAL)
8078 count_reg_usage (XEXP (x, 0), counts, incr);
8079 if (XEXP (x, 1))
8080 count_reg_usage (XEXP (x, 1), counts, incr);
8081 return;
8082 }
8083
8084 fmt = GET_RTX_FORMAT (code);
8085 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8086 {
8087 if (fmt[i] == 'e')
8088 count_reg_usage (XEXP (x, i), counts, incr);
8089 else if (fmt[i] == 'E')
8090 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8091 count_reg_usage (XVECEXP (x, i, j), counts, incr);
8092 }
8093 }
8094 \f
8095 /* Scan all the insns and delete any that are dead; i.e., they store a register
8096 that is never used or they copy a register to itself.
8097
8098 This is used to remove insns made obviously dead by cse. It improves the
8099 heuristics in loop since it won't try to move dead invariants out of loops
8100 or make givs for dead quantities. The remaining passes of the compilation
8101 are also sped up. */
8102
8103 void
8104 delete_dead_from_cse (insns, nreg)
8105 rtx insns;
8106 int nreg;
8107 {
8108 int *counts = (int *) alloca (nreg * sizeof (int));
8109 rtx insn, prev;
8110 rtx tem;
8111 int i;
8112 int in_libcall = 0;
8113
8114 /* First count the number of times each register is used. */
8115 bzero (counts, sizeof (int) * nreg);
8116 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
8117 count_reg_usage (insn, counts, 1);
8118
8119 /* Go from the last insn to the first and delete insns that only set unused
8120 registers or copy a register to itself. As we delete an insn, remove
8121 usage counts for registers it uses. */
8122 for (insn = prev_real_insn (get_last_insn ()); insn; insn = prev)
8123 {
8124 int live_insn = 0;
8125
8126 prev = prev_real_insn (insn);
8127
8128 /* Don't delete any insns that are part of a libcall block.
8129 Flow or loop might get confused if we did that. Remember
8130 that we are scanning backwards. */
8131 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8132 in_libcall = 1;
8133
8134 if (in_libcall)
8135 live_insn = 1;
8136 else if (GET_CODE (PATTERN (insn)) == SET)
8137 {
8138 if (GET_CODE (SET_DEST (PATTERN (insn))) == REG
8139 && SET_DEST (PATTERN (insn)) == SET_SRC (PATTERN (insn)))
8140 ;
8141
8142 #ifdef HAVE_cc0
8143 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
8144 && ! side_effects_p (SET_SRC (PATTERN (insn)))
8145 && ((tem = next_nonnote_insn (insn)) == 0
8146 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
8147 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
8148 ;
8149 #endif
8150 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
8151 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
8152 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
8153 || side_effects_p (SET_SRC (PATTERN (insn))))
8154 live_insn = 1;
8155 }
8156 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
8157 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
8158 {
8159 rtx elt = XVECEXP (PATTERN (insn), 0, i);
8160
8161 if (GET_CODE (elt) == SET)
8162 {
8163 if (GET_CODE (SET_DEST (elt)) == REG
8164 && SET_DEST (elt) == SET_SRC (elt))
8165 ;
8166
8167 #ifdef HAVE_cc0
8168 else if (GET_CODE (SET_DEST (elt)) == CC0
8169 && ! side_effects_p (SET_SRC (elt))
8170 && ((tem = next_nonnote_insn (insn)) == 0
8171 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
8172 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
8173 ;
8174 #endif
8175 else if (GET_CODE (SET_DEST (elt)) != REG
8176 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
8177 || counts[REGNO (SET_DEST (elt))] != 0
8178 || side_effects_p (SET_SRC (elt)))
8179 live_insn = 1;
8180 }
8181 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
8182 live_insn = 1;
8183 }
8184 else
8185 live_insn = 1;
8186
8187 /* If this is a dead insn, delete it and show registers in it aren't
8188 being used. */
8189
8190 if (! live_insn)
8191 {
8192 count_reg_usage (insn, counts, -1);
8193 delete_insn (insn);
8194 }
8195
8196 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
8197 in_libcall = 0;
8198 }
8199 }
This page took 0.448128 seconds and 6 git commands to generate.