]> gcc.gnu.org Git - gcc.git/blob - gcc/cse.c
4552cacb2a481eb2c2c30aac4b4d0c7bebb0cfd7
[gcc.git] / gcc / cse.c
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 #include "config.h"
22 #include "rtl.h"
23 #include "regs.h"
24 #include "hard-reg-set.h"
25 #include "flags.h"
26 #include "real.h"
27 #include "insn-config.h"
28 #include "recog.h"
29
30 #include <stdio.h>
31 #include <setjmp.h>
32
33 /* The basic idea of common subexpression elimination is to go
34 through the code, keeping a record of expressions that would
35 have the same value at the current scan point, and replacing
36 expressions encountered with the cheapest equivalent expression.
37
38 It is too complicated to keep track of the different possibilities
39 when control paths merge; so, at each label, we forget all that is
40 known and start fresh. This can be described as processing each
41 basic block separately. Note, however, that these are not quite
42 the same as the basic blocks found by a later pass and used for
43 data flow analysis and register packing. We do not need to start fresh
44 after a conditional jump instruction if there is no label there.
45
46 We use two data structures to record the equivalent expressions:
47 a hash table for most expressions, and several vectors together
48 with "quantity numbers" to record equivalent (pseudo) registers.
49
50 The use of the special data structure for registers is desirable
51 because it is faster. It is possible because registers references
52 contain a fairly small number, the register number, taken from
53 a contiguously allocated series, and two register references are
54 identical if they have the same number. General expressions
55 do not have any such thing, so the only way to retrieve the
56 information recorded on an expression other than a register
57 is to keep it in a hash table.
58
59 Registers and "quantity numbers":
60
61 At the start of each basic block, all of the (hardware and pseudo)
62 registers used in the function are given distinct quantity
63 numbers to indicate their contents. During scan, when the code
64 copies one register into another, we copy the quantity number.
65 When a register is loaded in any other way, we allocate a new
66 quantity number to describe the value generated by this operation.
67 `reg_qty' records what quantity a register is currently thought
68 of as containing.
69
70 All real quantity numbers are greater than or equal to `max_reg'.
71 If register N has not been assigned a quantity, reg_qty[N] will equal N.
72
73 Quantity numbers below `max_reg' do not exist and none of the `qty_...'
74 variables should be referenced with an index below `max_reg'.
75
76 We also maintain a bidirectional chain of registers for each
77 quantity number. `qty_first_reg', `qty_last_reg',
78 `reg_next_eqv' and `reg_prev_eqv' hold these chains.
79
80 The first register in a chain is the one whose lifespan is least local.
81 Among equals, it is the one that was seen first.
82 We replace any equivalent register with that one.
83
84 If two registers have the same quantity number, it must be true that
85 REG expressions with `qty_mode' must be in the hash table for both
86 registers and must be in the same class.
87
88 The converse is not true. Since hard registers may be referenced in
89 any mode, two REG expressions might be equivalent in the hash table
90 but not have the same quantity number if the quantity number of one
91 of the registers is not the same mode as those expressions.
92
93 Constants and quantity numbers
94
95 When a quantity has a known constant value, that value is stored
96 in the appropriate element of qty_const. This is in addition to
97 putting the constant in the hash table as is usual for non-regs.
98
99 Whether a reg or a constant is preferred is determined by the configuration
100 macro CONST_COSTS and will often depend on the constant value. In any
101 event, expressions containing constants can be simplified, by fold_rtx.
102
103 When a quantity has a known nearly constant value (such as an address
104 of a stack slot), that value is stored in the appropriate element
105 of qty_const.
106
107 Integer constants don't have a machine mode. However, cse
108 determines the intended machine mode from the destination
109 of the instruction that moves the constant. The machine mode
110 is recorded in the hash table along with the actual RTL
111 constant expression so that different modes are kept separate.
112
113 Other expressions:
114
115 To record known equivalences among expressions in general
116 we use a hash table called `table'. It has a fixed number of buckets
117 that contain chains of `struct table_elt' elements for expressions.
118 These chains connect the elements whose expressions have the same
119 hash codes.
120
121 Other chains through the same elements connect the elements which
122 currently have equivalent values.
123
124 Register references in an expression are canonicalized before hashing
125 the expression. This is done using `reg_qty' and `qty_first_reg'.
126 The hash code of a register reference is computed using the quantity
127 number, not the register number.
128
129 When the value of an expression changes, it is necessary to remove from the
130 hash table not just that expression but all expressions whose values
131 could be different as a result.
132
133 1. If the value changing is in memory, except in special cases
134 ANYTHING referring to memory could be changed. That is because
135 nobody knows where a pointer does not point.
136 The function `invalidate_memory' removes what is necessary.
137
138 The special cases are when the address is constant or is
139 a constant plus a fixed register such as the frame pointer
140 or a static chain pointer. When such addresses are stored in,
141 we can tell exactly which other such addresses must be invalidated
142 due to overlap. `invalidate' does this.
143 All expressions that refer to non-constant
144 memory addresses are also invalidated. `invalidate_memory' does this.
145
146 2. If the value changing is a register, all expressions
147 containing references to that register, and only those,
148 must be removed.
149
150 Because searching the entire hash table for expressions that contain
151 a register is very slow, we try to figure out when it isn't necessary.
152 Precisely, this is necessary only when expressions have been
153 entered in the hash table using this register, and then the value has
154 changed, and then another expression wants to be added to refer to
155 the register's new value. This sequence of circumstances is rare
156 within any one basic block.
157
158 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
159 reg_tick[i] is incremented whenever a value is stored in register i.
160 reg_in_table[i] holds -1 if no references to register i have been
161 entered in the table; otherwise, it contains the value reg_tick[i] had
162 when the references were entered. If we want to enter a reference
163 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
164 Until we want to enter a new entry, the mere fact that the two vectors
165 don't match makes the entries be ignored if anyone tries to match them.
166
167 Registers themselves are entered in the hash table as well as in
168 the equivalent-register chains. However, the vectors `reg_tick'
169 and `reg_in_table' do not apply to expressions which are simple
170 register references. These expressions are removed from the table
171 immediately when they become invalid, and this can be done even if
172 we do not immediately search for all the expressions that refer to
173 the register.
174
175 A CLOBBER rtx in an instruction invalidates its operand for further
176 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
177 invalidates everything that resides in memory.
178
179 Related expressions:
180
181 Constant expressions that differ only by an additive integer
182 are called related. When a constant expression is put in
183 the table, the related expression with no constant term
184 is also entered. These are made to point at each other
185 so that it is possible to find out if there exists any
186 register equivalent to an expression related to a given expression. */
187
188 /* One plus largest register number used in this function. */
189
190 static int max_reg;
191
192 /* Length of vectors indexed by quantity number.
193 We know in advance we will not need a quantity number this big. */
194
195 static int max_qty;
196
197 /* Next quantity number to be allocated.
198 This is 1 + the largest number needed so far. */
199
200 static int next_qty;
201
202 /* Indexed by quantity number, gives the first (or last) (pseudo) register
203 in the chain of registers that currently contain this quantity. */
204
205 static int *qty_first_reg;
206 static int *qty_last_reg;
207
208 /* Index by quantity number, gives the mode of the quantity. */
209
210 static enum machine_mode *qty_mode;
211
212 /* Indexed by quantity number, gives the rtx of the constant value of the
213 quantity, or zero if it does not have a known value.
214 A sum of the frame pointer (or arg pointer) plus a constant
215 can also be entered here. */
216
217 static rtx *qty_const;
218
219 /* Indexed by qty number, gives the insn that stored the constant value
220 recorded in `qty_const'. */
221
222 static rtx *qty_const_insn;
223
224 /* The next three variables are used to track when a comparison between a
225 quantity and some constant or register has been passed. In that case, we
226 know the results of the comparison in case we see it again. These variables
227 record a comparison that is known to be true. */
228
229 /* Indexed by qty number, gives the rtx code of a comparison with a known
230 result involving this quantity. If none, it is UNKNOWN. */
231 static enum rtx_code *qty_comparison_code;
232
233 /* Indexed by qty number, gives the constant being compared against in a
234 comparison of known result. If no such comparison, it is undefined.
235 If the comparison is not with a constant, it is zero. */
236
237 static rtx *qty_comparison_const;
238
239 /* Indexed by qty number, gives the quantity being compared against in a
240 comparison of known result. If no such comparison, if it undefined.
241 If the comparison is not with a register, it is -1. */
242
243 static int *qty_comparison_qty;
244
245 #ifdef HAVE_cc0
246 /* For machines that have a CC0, we do not record its value in the hash
247 table since its use is guaranteed to be the insn immediately following
248 its definition and any other insn is presumed to invalidate it.
249
250 Instead, we store below the value last assigned to CC0. If it should
251 happen to be a constant, it is stored in preference to the actual
252 assigned value. In case it is a constant, we store the mode in which
253 the constant should be interpreted. */
254
255 static rtx prev_insn_cc0;
256 static enum machine_mode prev_insn_cc0_mode;
257 #endif
258
259 /* Previous actual insn. 0 if at first insn of basic block. */
260
261 static rtx prev_insn;
262
263 /* Insn being scanned. */
264
265 static rtx this_insn;
266
267 /* Index by (pseudo) register number, gives the quantity number
268 of the register's current contents. */
269
270 static int *reg_qty;
271
272 /* Index by (pseudo) register number, gives the number of the next (or
273 previous) (pseudo) register in the chain of registers sharing the same
274 value.
275
276 Or -1 if this register is at the end of the chain.
277
278 If reg_qty[N] == N, reg_next_eqv[N] is undefined. */
279
280 static int *reg_next_eqv;
281 static int *reg_prev_eqv;
282
283 /* Index by (pseudo) register number, gives the number of times
284 that register has been altered in the current basic block. */
285
286 static int *reg_tick;
287
288 /* Index by (pseudo) register number, gives the reg_tick value at which
289 rtx's containing this register are valid in the hash table.
290 If this does not equal the current reg_tick value, such expressions
291 existing in the hash table are invalid.
292 If this is -1, no expressions containing this register have been
293 entered in the table. */
294
295 static int *reg_in_table;
296
297 /* A HARD_REG_SET containing all the hard registers for which there is
298 currently a REG expression in the hash table. Note the difference
299 from the above variables, which indicate if the REG is mentioned in some
300 expression in the table. */
301
302 static HARD_REG_SET hard_regs_in_table;
303
304 /* A HARD_REG_SET containing all the hard registers that are invalidated
305 by a CALL_INSN. */
306
307 static HARD_REG_SET regs_invalidated_by_call;
308
309 /* Two vectors of ints:
310 one containing max_reg -1's; the other max_reg + 500 (an approximation
311 for max_qty) elements where element i contains i.
312 These are used to initialize various other vectors fast. */
313
314 static int *all_minus_one;
315 static int *consec_ints;
316
317 /* CUID of insn that starts the basic block currently being cse-processed. */
318
319 static int cse_basic_block_start;
320
321 /* CUID of insn that ends the basic block currently being cse-processed. */
322
323 static int cse_basic_block_end;
324
325 /* Vector mapping INSN_UIDs to cuids.
326 The cuids are like uids but increase monotonically always.
327 We use them to see whether a reg is used outside a given basic block. */
328
329 static short *uid_cuid;
330
331 /* Get the cuid of an insn. */
332
333 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
334
335 /* Nonzero if cse has altered conditional jump insns
336 in such a way that jump optimization should be redone. */
337
338 static int cse_jumps_altered;
339
340 /* canon_hash stores 1 in do_not_record
341 if it notices a reference to CC0, PC, or some other volatile
342 subexpression. */
343
344 static int do_not_record;
345
346 /* canon_hash stores 1 in hash_arg_in_memory
347 if it notices a reference to memory within the expression being hashed. */
348
349 static int hash_arg_in_memory;
350
351 /* canon_hash stores 1 in hash_arg_in_struct
352 if it notices a reference to memory that's part of a structure. */
353
354 static int hash_arg_in_struct;
355
356 /* The hash table contains buckets which are chains of `struct table_elt's,
357 each recording one expression's information.
358 That expression is in the `exp' field.
359
360 Those elements with the same hash code are chained in both directions
361 through the `next_same_hash' and `prev_same_hash' fields.
362
363 Each set of expressions with equivalent values
364 are on a two-way chain through the `next_same_value'
365 and `prev_same_value' fields, and all point with
366 the `first_same_value' field at the first element in
367 that chain. The chain is in order of increasing cost.
368 Each element's cost value is in its `cost' field.
369
370 The `in_memory' field is nonzero for elements that
371 involve any reference to memory. These elements are removed
372 whenever a write is done to an unidentified location in memory.
373 To be safe, we assume that a memory address is unidentified unless
374 the address is either a symbol constant or a constant plus
375 the frame pointer or argument pointer.
376
377 The `in_struct' field is nonzero for elements that
378 involve any reference to memory inside a structure or array.
379
380 The `related_value' field is used to connect related expressions
381 (that differ by adding an integer).
382 The related expressions are chained in a circular fashion.
383 `related_value' is zero for expressions for which this
384 chain is not useful.
385
386 The `cost' field stores the cost of this element's expression.
387
388 The `is_const' flag is set if the element is a constant (including
389 a fixed address).
390
391 The `flag' field is used as a temporary during some search routines.
392
393 The `mode' field is usually the same as GET_MODE (`exp'), but
394 if `exp' is a CONST_INT and has no machine mode then the `mode'
395 field is the mode it was being used as. Each constant is
396 recorded separately for each mode it is used with. */
397
398
399 struct table_elt
400 {
401 rtx exp;
402 struct table_elt *next_same_hash;
403 struct table_elt *prev_same_hash;
404 struct table_elt *next_same_value;
405 struct table_elt *prev_same_value;
406 struct table_elt *first_same_value;
407 struct table_elt *related_value;
408 int cost;
409 enum machine_mode mode;
410 char in_memory;
411 char in_struct;
412 char is_const;
413 char flag;
414 };
415
416 #define HASHBITS 16
417
418 /* We don't want a lot of buckets, because we rarely have very many
419 things stored in the hash table, and a lot of buckets slows
420 down a lot of loops that happen frequently. */
421 #define NBUCKETS 31
422
423 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
424 register (hard registers may require `do_not_record' to be set). */
425
426 #define HASH(X, M) \
427 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
428 ? ((((int) REG << 7) + reg_qty[REGNO (X)]) % NBUCKETS) \
429 : canon_hash (X, M) % NBUCKETS)
430
431 /* Determine whether register number N is considered a fixed register for CSE.
432 It is desirable to replace other regs with fixed regs, to reduce need for
433 non-fixed hard regs.
434 A reg wins if it is either the frame pointer or designated as fixed,
435 but not if it is an overlapping register. */
436 #ifdef OVERLAPPING_REGNO_P
437 #define FIXED_REGNO_P(N) \
438 (((N) == FRAME_POINTER_REGNUM || fixed_regs[N]) \
439 && ! OVERLAPPING_REGNO_P ((N)))
440 #else
441 #define FIXED_REGNO_P(N) \
442 ((N) == FRAME_POINTER_REGNUM || fixed_regs[N])
443 #endif
444
445 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
446 hard registers are the cheapest with a cost of 0. Next come pseudos
447 with a cost of one and other hard registers with a cost of 2. Aside
448 from these special cases, call `rtx_cost'. */
449
450 #define COST(X) \
451 (GET_CODE (X) == REG \
452 ? (REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
453 : (FIXED_REGNO_P (REGNO (X)) \
454 && REGNO_REG_CLASS (REGNO (X)) != NO_REGS) ? 0 \
455 : 2) \
456 : rtx_cost (X) * 2) \
457
458 /* Determine if the quantity number for register X represents a valid index
459 into the `qty_...' variables. */
460
461 #define REGNO_QTY_VALID_P(N) (reg_qty[N] != (N))
462
463 static struct table_elt *table[NBUCKETS];
464
465 /* Chain of `struct table_elt's made so far for this function
466 but currently removed from the table. */
467
468 static struct table_elt *free_element_chain;
469
470 /* Number of `struct table_elt' structures made so far for this function. */
471
472 static int n_elements_made;
473
474 /* Maximum value `n_elements_made' has had so far in this compilation
475 for functions previously processed. */
476
477 static int max_elements_made;
478
479 /* Surviving equivalence class when two equivalence classes are merged
480 by recording the effects of a jump in the last insn. Zero if the
481 last insn was not a conditional jump. */
482
483 static struct table_elt *last_jump_equiv_class;
484
485 /* Set to the cost of a constant pool reference if one was found for a
486 symbolic constant. If this was found, it means we should try to
487 convert constants into constant pool entries if they don't fit in
488 the insn. */
489
490 static int constant_pool_entries_cost;
491
492 /* Bits describing what kind of values in memory must be invalidated
493 for a particular instruction. If all three bits are zero,
494 no memory refs need to be invalidated. Each bit is more powerful
495 than the preceding ones, and if a bit is set then the preceding
496 bits are also set.
497
498 Here is how the bits are set:
499 Pushing onto the stack invalidates only the stack pointer,
500 writing at a fixed address invalidates only variable addresses,
501 writing in a structure element at variable address
502 invalidates all but scalar variables,
503 and writing in anything else at variable address invalidates everything. */
504
505 struct write_data
506 {
507 int sp : 1; /* Invalidate stack pointer. */
508 int var : 1; /* Invalidate variable addresses. */
509 int nonscalar : 1; /* Invalidate all but scalar variables. */
510 int all : 1; /* Invalidate all memory refs. */
511 };
512
513 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
514 virtual regs here because the simplify_*_operation routines are called
515 by integrate.c, which is called before virtual register instantiation. */
516
517 #define FIXED_BASE_PLUS_P(X) \
518 ((X) == frame_pointer_rtx || (X) == arg_pointer_rtx \
519 || (X) == virtual_stack_vars_rtx \
520 || (X) == virtual_incoming_args_rtx \
521 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
522 && (XEXP (X, 0) == frame_pointer_rtx \
523 || XEXP (X, 0) == arg_pointer_rtx \
524 || XEXP (X, 0) == virtual_stack_vars_rtx \
525 || XEXP (X, 0) == virtual_incoming_args_rtx)))
526
527 /* Similar, but also allows reference to the stack pointer. */
528
529 #define NONZERO_BASE_PLUS_P(X) \
530 (FIXED_BASE_PLUS_P (X) \
531 || (X) == stack_pointer_rtx \
532 || (X) == virtual_stack_dynamic_rtx \
533 || (X) == virtual_outgoing_args_rtx \
534 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
535 && (XEXP (X, 0) == stack_pointer_rtx \
536 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
537 || XEXP (X, 0) == virtual_outgoing_args_rtx)))
538
539 static struct table_elt *lookup ();
540 static void free_element ();
541
542 static int insert_regs ();
543 static void rehash_using_reg ();
544 static void remove_invalid_refs ();
545 static int exp_equiv_p ();
546 int refers_to_p ();
547 int refers_to_mem_p ();
548 static void invalidate_from_clobbers ();
549 static int safe_hash ();
550 static int canon_hash ();
551 static rtx fold_rtx ();
552 static rtx equiv_constant ();
553 static void record_jump_cond ();
554 static void note_mem_written ();
555 static int cse_rtx_addr_varies_p ();
556 static enum rtx_code find_comparison_args ();
557 static void cse_insn ();
558 static void cse_set_around_loop ();
559 \f
560 /* Return an estimate of the cost of computing rtx X.
561 One use is in cse, to decide which expression to keep in the hash table.
562 Another is in rtl generation, to pick the cheapest way to multiply.
563 Other uses like the latter are expected in the future. */
564
565 /* Return the right cost to give to an operation
566 to make the cost of the corresponding register-to-register instruction
567 N times that of a fast register-to-register instruction. */
568
569 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
570
571 int
572 rtx_cost (x)
573 rtx x;
574 {
575 register int i, j;
576 register enum rtx_code code;
577 register char *fmt;
578 register int total;
579
580 if (x == 0)
581 return 0;
582
583 /* Compute the default costs of certain things.
584 Note that RTX_COSTS can override the defaults. */
585
586 code = GET_CODE (x);
587 switch (code)
588 {
589 case MULT:
590 /* Count multiplication by 2**n as a shift,
591 because if we are considering it, we would output it as a shift. */
592 if (GET_CODE (XEXP (x, 1)) == CONST_INT
593 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
594 total = 2;
595 else
596 total = COSTS_N_INSNS (5);
597 break;
598 case DIV:
599 case UDIV:
600 case MOD:
601 case UMOD:
602 total = COSTS_N_INSNS (7);
603 break;
604 case USE:
605 /* Used in loop.c and combine.c as a marker. */
606 total = 0;
607 break;
608 case ASM_OPERANDS:
609 /* We don't want these to be used in substitutions because
610 we have no way of validating the resulting insn. So assign
611 anything containing an ASM_OPERANDS a very high cost. */
612 total = 1000;
613 break;
614 default:
615 total = 2;
616 }
617
618 switch (code)
619 {
620 case REG:
621 return 1;
622 case SUBREG:
623 return 2;
624 #ifdef RTX_COSTS
625 RTX_COSTS (x, code);
626 #endif
627 CONST_COSTS (x, code);
628 }
629
630 /* Sum the costs of the sub-rtx's, plus cost of this operation,
631 which is already in total. */
632
633 fmt = GET_RTX_FORMAT (code);
634 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
635 if (fmt[i] == 'e')
636 total += rtx_cost (XEXP (x, i));
637 else if (fmt[i] == 'E')
638 for (j = 0; j < XVECLEN (x, i); j++)
639 total += rtx_cost (XVECEXP (x, i, j));
640
641 return total;
642 }
643 \f
644 /* Clear the hash table and initialize each register with its own quantity,
645 for a new basic block. */
646
647 static void
648 new_basic_block ()
649 {
650 register int i;
651
652 next_qty = max_reg;
653
654 bzero (reg_tick, max_reg * sizeof (int));
655
656 bcopy (all_minus_one, reg_in_table, max_reg * sizeof (int));
657 bcopy (consec_ints, reg_qty, max_reg * sizeof (int));
658 CLEAR_HARD_REG_SET (hard_regs_in_table);
659
660 /* The per-quantity values used to be initialized here, but it is
661 much faster to initialize each as it is made in `make_new_qty'. */
662
663 for (i = 0; i < NBUCKETS; i++)
664 {
665 register struct table_elt *this, *next;
666 for (this = table[i]; this; this = next)
667 {
668 next = this->next_same_hash;
669 free_element (this);
670 }
671 }
672
673 bzero (table, sizeof table);
674
675 prev_insn = 0;
676
677 #ifdef HAVE_cc0
678 prev_insn_cc0 = 0;
679 #endif
680 }
681
682 /* Say that register REG contains a quantity not in any register before
683 and initialize that quantity. */
684
685 static void
686 make_new_qty (reg)
687 register int reg;
688 {
689 register int q;
690
691 if (next_qty >= max_qty)
692 abort ();
693
694 q = reg_qty[reg] = next_qty++;
695 qty_first_reg[q] = reg;
696 qty_last_reg[q] = reg;
697 qty_const[q] = qty_const_insn[q] = 0;
698 qty_comparison_code[q] = UNKNOWN;
699
700 reg_next_eqv[reg] = reg_prev_eqv[reg] = -1;
701 }
702
703 /* Make reg NEW equivalent to reg OLD.
704 OLD is not changing; NEW is. */
705
706 static void
707 make_regs_eqv (new, old)
708 register int new, old;
709 {
710 register int lastr, firstr;
711 register int q = reg_qty[old];
712
713 /* Nothing should become eqv until it has a "non-invalid" qty number. */
714 if (! REGNO_QTY_VALID_P (old))
715 abort ();
716
717 reg_qty[new] = q;
718 firstr = qty_first_reg[q];
719 lastr = qty_last_reg[q];
720
721 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
722 hard regs. Among pseudos, if NEW will live longer than any other reg
723 of the same qty, and that is beyond the current basic block,
724 make it the new canonical replacement for this qty. */
725 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
726 /* Certain fixed registers might be of the class NO_REGS. This means
727 that not only can they not be allocated by the compiler, but
728 they cannot be used in substitutions or cannonicallizations
729 either. */
730 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
731 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
732 || (new >= FIRST_PSEUDO_REGISTER
733 && (firstr < FIRST_PSEUDO_REGISTER
734 || ((uid_cuid[regno_last_uid[new]] > cse_basic_block_end
735 || (uid_cuid[regno_first_uid[new]]
736 < cse_basic_block_start))
737 && (uid_cuid[regno_last_uid[new]]
738 > uid_cuid[regno_last_uid[firstr]]))))))
739 {
740 reg_prev_eqv[firstr] = new;
741 reg_next_eqv[new] = firstr;
742 reg_prev_eqv[new] = -1;
743 qty_first_reg[q] = new;
744 }
745 else
746 {
747 /* If NEW is a hard reg (known to be non-fixed), insert at end.
748 Otherwise, insert before any non-fixed hard regs that are at the
749 end. Registers of class NO_REGS cannot be used as an
750 equivalent for anything. */
751 while (lastr < FIRST_PSEUDO_REGISTER && reg_prev_eqv[lastr] >= 0
752 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
753 && new >= FIRST_PSEUDO_REGISTER)
754 lastr = reg_prev_eqv[lastr];
755 reg_next_eqv[new] = reg_next_eqv[lastr];
756 if (reg_next_eqv[lastr] >= 0)
757 reg_prev_eqv[reg_next_eqv[lastr]] = new;
758 else
759 qty_last_reg[q] = new;
760 reg_next_eqv[lastr] = new;
761 reg_prev_eqv[new] = lastr;
762 }
763 }
764
765 /* Remove REG from its equivalence class. */
766
767 static void
768 delete_reg_equiv (reg)
769 register int reg;
770 {
771 register int n = reg_next_eqv[reg];
772 register int p = reg_prev_eqv[reg];
773 register int q = reg_qty[reg];
774
775 /* If invalid, do nothing. N and P above are undefined in that case. */
776 if (q == reg)
777 return;
778
779 if (n != -1)
780 reg_prev_eqv[n] = p;
781 else
782 qty_last_reg[q] = p;
783 if (p != -1)
784 reg_next_eqv[p] = n;
785 else
786 qty_first_reg[q] = n;
787
788 reg_qty[reg] = reg;
789 }
790
791 /* Remove any invalid expressions from the hash table
792 that refer to any of the registers contained in expression X.
793
794 Make sure that newly inserted references to those registers
795 as subexpressions will be considered valid.
796
797 mention_regs is not called when a register itself
798 is being stored in the table.
799
800 Return 1 if we have done something that may have changed the hash code
801 of X. */
802
803 static int
804 mention_regs (x)
805 rtx x;
806 {
807 register enum rtx_code code;
808 register int i, j;
809 register char *fmt;
810 register int changed = 0;
811
812 if (x == 0)
813 return;
814
815 code = GET_CODE (x);
816 if (code == REG)
817 {
818 register int regno = REGNO (x);
819 register int endregno
820 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
821 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
822 int i;
823
824 for (i = regno; i < endregno; i++)
825 {
826 if (reg_in_table[i] >= 0 && reg_in_table[i] != reg_tick[i])
827 remove_invalid_refs (i);
828
829 reg_in_table[i] = reg_tick[i];
830 }
831
832 return 0;
833 }
834
835 /* If X is a comparison or a COMPARE and either operand is a register
836 that does not have a quantity, give it one. This is so that a later
837 call to record_jump_equiv won't cause X to be assigned a different
838 hash code and not found in the table after that call.
839
840 It is not necessary to do this here, since rehash_using_reg can
841 fix up the table later, but doing this here eliminates the need to
842 call that expensive function in the most common case where the only
843 use of the register is in the comparison. */
844
845 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
846 {
847 if (GET_CODE (XEXP (x, 0)) == REG
848 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
849 if (insert_regs (XEXP (x, 0), 0, 0))
850 {
851 rehash_using_reg (XEXP (x, 0));
852 changed = 1;
853 }
854
855 if (GET_CODE (XEXP (x, 1)) == REG
856 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
857 if (insert_regs (XEXP (x, 1), 0, 0))
858 {
859 rehash_using_reg (XEXP (x, 1));
860 changed = 1;
861 }
862 }
863
864 fmt = GET_RTX_FORMAT (code);
865 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
866 if (fmt[i] == 'e')
867 changed |= mention_regs (XEXP (x, i));
868 else if (fmt[i] == 'E')
869 for (j = 0; j < XVECLEN (x, i); j++)
870 changed |= mention_regs (XVECEXP (x, i, j));
871
872 return changed;
873 }
874
875 /* Update the register quantities for inserting X into the hash table
876 with a value equivalent to CLASSP.
877 (If the class does not contain a REG, it is irrelevant.)
878 If MODIFIED is nonzero, X is a destination; it is being modified.
879 Note that delete_reg_equiv should be called on a register
880 before insert_regs is done on that register with MODIFIED != 0.
881
882 Nonzero value means that elements of reg_qty have changed
883 so X's hash code may be different. */
884
885 static int
886 insert_regs (x, classp, modified)
887 rtx x;
888 struct table_elt *classp;
889 int modified;
890 {
891 if (GET_CODE (x) == REG)
892 {
893 register int regno = REGNO (x);
894
895 if (modified
896 || ! (REGNO_QTY_VALID_P (regno)
897 && qty_mode[reg_qty[regno]] == GET_MODE (x)))
898 {
899 if (classp)
900 for (classp = classp->first_same_value;
901 classp != 0;
902 classp = classp->next_same_value)
903 if (GET_CODE (classp->exp) == REG
904 && GET_MODE (classp->exp) == GET_MODE (x))
905 {
906 make_regs_eqv (regno, REGNO (classp->exp));
907 return 1;
908 }
909
910 make_new_qty (regno);
911 qty_mode[reg_qty[regno]] = GET_MODE (x);
912 return 1;
913 }
914 }
915 else
916 return mention_regs (x);
917 }
918 \f
919 /* Look in or update the hash table. */
920
921 /* Put the element ELT on the list of free elements. */
922
923 static void
924 free_element (elt)
925 struct table_elt *elt;
926 {
927 elt->next_same_hash = free_element_chain;
928 free_element_chain = elt;
929 }
930
931 /* Return an element that is free for use. */
932
933 static struct table_elt *
934 get_element ()
935 {
936 struct table_elt *elt = free_element_chain;
937 if (elt)
938 {
939 free_element_chain = elt->next_same_hash;
940 return elt;
941 }
942 n_elements_made++;
943 return (struct table_elt *) oballoc (sizeof (struct table_elt));
944 }
945
946 /* Remove table element ELT from use in the table.
947 HASH is its hash code, made using the HASH macro.
948 It's an argument because often that is known in advance
949 and we save much time not recomputing it. */
950
951 static void
952 remove_from_table (elt, hash)
953 register struct table_elt *elt;
954 int hash;
955 {
956 if (elt == 0)
957 return;
958
959 /* Mark this element as removed. See cse_insn. */
960 elt->first_same_value = 0;
961
962 /* Remove the table element from its equivalence class. */
963
964 {
965 register struct table_elt *prev = elt->prev_same_value;
966 register struct table_elt *next = elt->next_same_value;
967
968 if (next) next->prev_same_value = prev;
969
970 if (prev)
971 prev->next_same_value = next;
972 else
973 {
974 register struct table_elt *newfirst = next;
975 while (next)
976 {
977 next->first_same_value = newfirst;
978 next = next->next_same_value;
979 }
980 }
981 }
982
983 /* Remove the table element from its hash bucket. */
984
985 {
986 register struct table_elt *prev = elt->prev_same_hash;
987 register struct table_elt *next = elt->next_same_hash;
988
989 if (next) next->prev_same_hash = prev;
990
991 if (prev)
992 prev->next_same_hash = next;
993 else if (table[hash] == elt)
994 table[hash] = next;
995 else
996 {
997 /* This entry is not in the proper hash bucket. This can happen
998 when two classes were merged by `merge_equiv_classes'. Search
999 for the hash bucket that it heads. This happens only very
1000 rarely, so the cost is acceptable. */
1001 for (hash = 0; hash < NBUCKETS; hash++)
1002 if (table[hash] == elt)
1003 table[hash] = next;
1004 }
1005 }
1006
1007 /* Remove the table element from its related-value circular chain. */
1008
1009 if (elt->related_value != 0 && elt->related_value != elt)
1010 {
1011 register struct table_elt *p = elt->related_value;
1012 while (p->related_value != elt)
1013 p = p->related_value;
1014 p->related_value = elt->related_value;
1015 if (p->related_value == p)
1016 p->related_value = 0;
1017 }
1018
1019 free_element (elt);
1020 }
1021
1022 /* Look up X in the hash table and return its table element,
1023 or 0 if X is not in the table.
1024
1025 MODE is the machine-mode of X, or if X is an integer constant
1026 with VOIDmode then MODE is the mode with which X will be used.
1027
1028 Here we are satisfied to find an expression whose tree structure
1029 looks like X. */
1030
1031 static struct table_elt *
1032 lookup (x, hash, mode)
1033 rtx x;
1034 int hash;
1035 enum machine_mode mode;
1036 {
1037 register struct table_elt *p;
1038
1039 for (p = table[hash]; p; p = p->next_same_hash)
1040 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1041 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1042 return p;
1043
1044 return 0;
1045 }
1046
1047 /* Like `lookup' but don't care whether the table element uses invalid regs.
1048 Also ignore discrepancies in the machine mode of a register. */
1049
1050 static struct table_elt *
1051 lookup_for_remove (x, hash, mode)
1052 rtx x;
1053 int hash;
1054 enum machine_mode mode;
1055 {
1056 register struct table_elt *p;
1057
1058 if (GET_CODE (x) == REG)
1059 {
1060 int regno = REGNO (x);
1061 /* Don't check the machine mode when comparing registers;
1062 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1063 for (p = table[hash]; p; p = p->next_same_hash)
1064 if (GET_CODE (p->exp) == REG
1065 && REGNO (p->exp) == regno)
1066 return p;
1067 }
1068 else
1069 {
1070 for (p = table[hash]; p; p = p->next_same_hash)
1071 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1072 return p;
1073 }
1074
1075 return 0;
1076 }
1077
1078 /* Look for an expression equivalent to X and with code CODE.
1079 If one is found, return that expression. */
1080
1081 static rtx
1082 lookup_as_function (x, code)
1083 rtx x;
1084 enum rtx_code code;
1085 {
1086 register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS,
1087 GET_MODE (x));
1088 if (p == 0)
1089 return 0;
1090
1091 for (p = p->first_same_value; p; p = p->next_same_value)
1092 {
1093 if (GET_CODE (p->exp) == code
1094 /* Make sure this is a valid entry in the table. */
1095 && exp_equiv_p (p->exp, p->exp, 1, 0))
1096 return p->exp;
1097 }
1098
1099 return 0;
1100 }
1101
1102 /* Insert X in the hash table, assuming HASH is its hash code
1103 and CLASSP is an element of the class it should go in
1104 (or 0 if a new class should be made).
1105 It is inserted at the proper position to keep the class in
1106 the order cheapest first.
1107
1108 MODE is the machine-mode of X, or if X is an integer constant
1109 with VOIDmode then MODE is the mode with which X will be used.
1110
1111 For elements of equal cheapness, the most recent one
1112 goes in front, except that the first element in the list
1113 remains first unless a cheaper element is added. The order of
1114 pseudo-registers does not matter, as canon_reg will be called to
1115 find the cheapest when a register is retreived from the table.
1116
1117 The in_memory field in the hash table element is set to 0.
1118 The caller must set it nonzero if appropriate.
1119
1120 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1121 and if insert_regs returns a nonzero value
1122 you must then recompute its hash code before calling here.
1123
1124 If necessary, update table showing constant values of quantities. */
1125
1126 #define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1127
1128 static struct table_elt *
1129 insert (x, classp, hash, mode)
1130 register rtx x;
1131 register struct table_elt *classp;
1132 int hash;
1133 enum machine_mode mode;
1134 {
1135 register struct table_elt *elt;
1136
1137 /* If X is a register and we haven't made a quantity for it,
1138 something is wrong. */
1139 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1140 abort ();
1141
1142 /* If X is a hard register, show it is being put in the table. */
1143 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1144 {
1145 int regno = REGNO (x);
1146 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1147 int i;
1148
1149 for (i = regno; i < endregno; i++)
1150 SET_HARD_REG_BIT (hard_regs_in_table, i);
1151 }
1152
1153
1154 /* Put an element for X into the right hash bucket. */
1155
1156 elt = get_element ();
1157 elt->exp = x;
1158 elt->cost = COST (x);
1159 elt->next_same_value = 0;
1160 elt->prev_same_value = 0;
1161 elt->next_same_hash = table[hash];
1162 elt->prev_same_hash = 0;
1163 elt->related_value = 0;
1164 elt->in_memory = 0;
1165 elt->mode = mode;
1166 elt->is_const = (CONSTANT_P (x)
1167 /* GNU C++ takes advantage of this for `this'
1168 (and other const values). */
1169 || (RTX_UNCHANGING_P (x)
1170 && GET_CODE (x) == REG
1171 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1172 || FIXED_BASE_PLUS_P (x));
1173
1174 if (table[hash])
1175 table[hash]->prev_same_hash = elt;
1176 table[hash] = elt;
1177
1178 /* Put it into the proper value-class. */
1179 if (classp)
1180 {
1181 classp = classp->first_same_value;
1182 if (CHEAPER (elt, classp))
1183 /* Insert at the head of the class */
1184 {
1185 register struct table_elt *p;
1186 elt->next_same_value = classp;
1187 classp->prev_same_value = elt;
1188 elt->first_same_value = elt;
1189
1190 for (p = classp; p; p = p->next_same_value)
1191 p->first_same_value = elt;
1192 }
1193 else
1194 {
1195 /* Insert not at head of the class. */
1196 /* Put it after the last element cheaper than X. */
1197 register struct table_elt *p, *next;
1198 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1199 p = next);
1200 /* Put it after P and before NEXT. */
1201 elt->next_same_value = next;
1202 if (next)
1203 next->prev_same_value = elt;
1204 elt->prev_same_value = p;
1205 p->next_same_value = elt;
1206 elt->first_same_value = classp;
1207 }
1208 }
1209 else
1210 elt->first_same_value = elt;
1211
1212 /* If this is a constant being set equivalent to a register or a register
1213 being set equivalent to a constant, note the constant equivalence.
1214
1215 If this is a constant, it cannot be equivalent to a different constant,
1216 and a constant is the only thing that can be cheaper than a register. So
1217 we know the register is the head of the class (before the constant was
1218 inserted).
1219
1220 If this is a register that is not already known equivalent to a
1221 constant, we must check the entire class.
1222
1223 If this is a register that is already known equivalent to an insn,
1224 update `qty_const_insn' to show that `this_insn' is the latest
1225 insn making that quantity equivalent to the constant. */
1226
1227 if (elt->is_const && classp && GET_CODE (classp->exp) == REG)
1228 {
1229 qty_const[reg_qty[REGNO (classp->exp)]]
1230 = gen_lowpart_if_possible (qty_mode[reg_qty[REGNO (classp->exp)]], x);
1231 qty_const_insn[reg_qty[REGNO (classp->exp)]] = this_insn;
1232 }
1233
1234 else if (GET_CODE (x) == REG && classp && ! qty_const[reg_qty[REGNO (x)]])
1235 {
1236 register struct table_elt *p;
1237
1238 for (p = classp; p != 0; p = p->next_same_value)
1239 {
1240 if (p->is_const)
1241 {
1242 qty_const[reg_qty[REGNO (x)]]
1243 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1244 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1245 break;
1246 }
1247 }
1248 }
1249
1250 else if (GET_CODE (x) == REG && qty_const[reg_qty[REGNO (x)]]
1251 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]])
1252 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1253
1254 /* If this is a constant with symbolic value,
1255 and it has a term with an explicit integer value,
1256 link it up with related expressions. */
1257 if (GET_CODE (x) == CONST)
1258 {
1259 rtx subexp = get_related_value (x);
1260 int subhash;
1261 struct table_elt *subelt, *subelt_prev;
1262
1263 if (subexp != 0)
1264 {
1265 /* Get the integer-free subexpression in the hash table. */
1266 subhash = safe_hash (subexp, mode) % NBUCKETS;
1267 subelt = lookup (subexp, subhash, mode);
1268 if (subelt == 0)
1269 subelt = insert (subexp, 0, subhash, mode);
1270 /* Initialize SUBELT's circular chain if it has none. */
1271 if (subelt->related_value == 0)
1272 subelt->related_value = subelt;
1273 /* Find the element in the circular chain that precedes SUBELT. */
1274 subelt_prev = subelt;
1275 while (subelt_prev->related_value != subelt)
1276 subelt_prev = subelt_prev->related_value;
1277 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1278 This way the element that follows SUBELT is the oldest one. */
1279 elt->related_value = subelt_prev->related_value;
1280 subelt_prev->related_value = elt;
1281 }
1282 }
1283
1284 return elt;
1285 }
1286 \f
1287 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1288 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1289 the two classes equivalent.
1290
1291 CLASS1 will be the surviving class; CLASS2 should not be used after this
1292 call.
1293
1294 Any invalid entries in CLASS2 will not be copied. */
1295
1296 static void
1297 merge_equiv_classes (class1, class2)
1298 struct table_elt *class1, *class2;
1299 {
1300 struct table_elt *elt, *next, *new;
1301
1302 /* Ensure we start with the head of the classes. */
1303 class1 = class1->first_same_value;
1304 class2 = class2->first_same_value;
1305
1306 /* If they were already equal, forget it. */
1307 if (class1 == class2)
1308 return;
1309
1310 for (elt = class2; elt; elt = next)
1311 {
1312 int hash;
1313 rtx exp = elt->exp;
1314 enum machine_mode mode = elt->mode;
1315
1316 next = elt->next_same_value;
1317
1318 /* Remove old entry, make a new one in CLASS1's class.
1319 Don't do this for invalid entries as we cannot find their
1320 hash code (it also isn't necessary). */
1321 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1322 {
1323 hash_arg_in_memory = 0;
1324 hash_arg_in_struct = 0;
1325 hash = HASH (exp, mode);
1326
1327 if (GET_CODE (exp) == REG)
1328 delete_reg_equiv (REGNO (exp));
1329
1330 remove_from_table (elt, hash);
1331
1332 if (insert_regs (exp, class1, 0))
1333 hash = HASH (exp, mode);
1334 new = insert (exp, class1, hash, mode);
1335 new->in_memory = hash_arg_in_memory;
1336 new->in_struct = hash_arg_in_struct;
1337 }
1338 }
1339 }
1340 \f
1341 /* Remove from the hash table, or mark as invalid,
1342 all expressions whose values could be altered by storing in X.
1343 X is a register, a subreg, or a memory reference with nonvarying address
1344 (because, when a memory reference with a varying address is stored in,
1345 all memory references are removed by invalidate_memory
1346 so specific invalidation is superfluous).
1347
1348 A nonvarying address may be just a register or just
1349 a symbol reference, or it may be either of those plus
1350 a numeric offset. */
1351
1352 static void
1353 invalidate (x)
1354 rtx x;
1355 {
1356 register int i;
1357 register struct table_elt *p;
1358 register rtx base;
1359 register int start, end;
1360
1361 /* If X is a register, dependencies on its contents
1362 are recorded through the qty number mechanism.
1363 Just change the qty number of the register,
1364 mark it as invalid for expressions that refer to it,
1365 and remove it itself. */
1366
1367 if (GET_CODE (x) == REG)
1368 {
1369 register int regno = REGNO (x);
1370 register int hash = HASH (x, GET_MODE (x));
1371
1372 /* Remove REGNO from any quantity list it might be on and indicate
1373 that it's value might have changed. If it is a pseudo, remove its
1374 entry from the hash table.
1375
1376 For a hard register, we do the first two actions above for any
1377 additional hard registers corresponding to X. Then, if any of these
1378 registers are in the table, we must remove any REG entries that
1379 overlap these registers. */
1380
1381 delete_reg_equiv (regno);
1382 reg_tick[regno]++;
1383
1384 if (regno >= FIRST_PSEUDO_REGISTER)
1385 remove_from_table (lookup_for_remove (x, hash, GET_MODE (x)), hash);
1386 else
1387 {
1388 int in_table = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1389 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1390 int tregno, tendregno;
1391 register struct table_elt *p, *next;
1392
1393 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1394
1395 for (i = regno + 1; i < endregno; i++)
1396 {
1397 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1398 CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1399 delete_reg_equiv (i);
1400 reg_tick[i]++;
1401 }
1402
1403 if (in_table)
1404 for (hash = 0; hash < NBUCKETS; hash++)
1405 for (p = table[hash]; p; p = next)
1406 {
1407 next = p->next_same_hash;
1408
1409 if (GET_CODE (p->exp) != REG
1410 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1411 continue;
1412
1413 tregno = REGNO (p->exp);
1414 tendregno
1415 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1416 if (tendregno > regno && tregno < endregno)
1417 remove_from_table (p, hash);
1418 }
1419 }
1420
1421 return;
1422 }
1423
1424 if (GET_CODE (x) == SUBREG)
1425 {
1426 if (GET_CODE (SUBREG_REG (x)) != REG)
1427 abort ();
1428 invalidate (SUBREG_REG (x));
1429 return;
1430 }
1431
1432 /* X is not a register; it must be a memory reference with
1433 a nonvarying address. Remove all hash table elements
1434 that refer to overlapping pieces of memory. */
1435
1436 if (GET_CODE (x) != MEM)
1437 abort ();
1438 base = XEXP (x, 0);
1439 start = 0;
1440
1441 /* Registers with nonvarying addresses usually have constant equivalents;
1442 but the frame pointer register is also possible. */
1443 if (GET_CODE (base) == REG
1444 && REGNO_QTY_VALID_P (REGNO (base))
1445 && qty_mode[reg_qty[REGNO (base)]] == GET_MODE (base)
1446 && qty_const[reg_qty[REGNO (base)]] != 0)
1447 base = qty_const[reg_qty[REGNO (base)]];
1448 else if (GET_CODE (base) == PLUS
1449 && GET_CODE (XEXP (base, 1)) == CONST_INT
1450 && GET_CODE (XEXP (base, 0)) == REG
1451 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
1452 && (qty_mode[reg_qty[REGNO (XEXP (base, 0))]]
1453 == GET_MODE (XEXP (base, 0)))
1454 && qty_const[reg_qty[REGNO (XEXP (base, 0))]])
1455 {
1456 start = INTVAL (XEXP (base, 1));
1457 base = qty_const[reg_qty[REGNO (XEXP (base, 0))]];
1458 }
1459
1460 if (GET_CODE (base) == CONST)
1461 base = XEXP (base, 0);
1462 if (GET_CODE (base) == PLUS
1463 && GET_CODE (XEXP (base, 1)) == CONST_INT)
1464 {
1465 start += INTVAL (XEXP (base, 1));
1466 base = XEXP (base, 0);
1467 }
1468
1469 end = start + GET_MODE_SIZE (GET_MODE (x));
1470 for (i = 0; i < NBUCKETS; i++)
1471 {
1472 register struct table_elt *next;
1473 for (p = table[i]; p; p = next)
1474 {
1475 next = p->next_same_hash;
1476 if (refers_to_mem_p (p->exp, base, start, end))
1477 remove_from_table (p, i);
1478 }
1479 }
1480 }
1481
1482 /* Remove all expressions that refer to register REGNO,
1483 since they are already invalid, and we are about to
1484 mark that register valid again and don't want the old
1485 expressions to reappear as valid. */
1486
1487 static void
1488 remove_invalid_refs (regno)
1489 int regno;
1490 {
1491 register int i;
1492 register struct table_elt *p, *next;
1493
1494 for (i = 0; i < NBUCKETS; i++)
1495 for (p = table[i]; p; p = next)
1496 {
1497 next = p->next_same_hash;
1498 if (GET_CODE (p->exp) != REG
1499 && refers_to_regno_p (regno, regno + 1, p->exp, 0))
1500 remove_from_table (p, i);
1501 }
1502 }
1503 \f
1504 /* Recompute the hash codes of any valid entries in the hash table that
1505 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1506
1507 This is called when we make a jump equivalence. */
1508
1509 static void
1510 rehash_using_reg (x)
1511 rtx x;
1512 {
1513 int i;
1514 struct table_elt *p, *next;
1515 int hash;
1516
1517 if (GET_CODE (x) == SUBREG)
1518 x = SUBREG_REG (x);
1519
1520 /* If X is not a register or if the register is known not to be in any
1521 valid entries in the table, we have no work to do. */
1522
1523 if (GET_CODE (x) != REG
1524 || reg_in_table[REGNO (x)] < 0
1525 || reg_in_table[REGNO (x)] != reg_tick[REGNO (x)])
1526 return;
1527
1528 /* Scan all hash chains looking for valid entries that mention X.
1529 If we find one and it is in the wrong hash chain, move it. We can skip
1530 objects that are registers, since they are handled specially. */
1531
1532 for (i = 0; i < NBUCKETS; i++)
1533 for (p = table[i]; p; p = next)
1534 {
1535 next = p->next_same_hash;
1536 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
1537 && exp_equiv_p (p->exp, p->exp, 1, 0)
1538 && i != (hash = safe_hash (p->exp, p->mode) % NBUCKETS))
1539 {
1540 if (p->next_same_hash)
1541 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1542
1543 if (p->prev_same_hash)
1544 p->prev_same_hash->next_same_hash = p->next_same_hash;
1545 else
1546 table[i] = p->next_same_hash;
1547
1548 p->next_same_hash = table[hash];
1549 p->prev_same_hash = 0;
1550 if (table[hash])
1551 table[hash]->prev_same_hash = p;
1552 table[hash] = p;
1553 }
1554 }
1555 }
1556 \f
1557 /* Remove from the hash table all expressions that reference memory,
1558 or some of them as specified by *WRITES. */
1559
1560 static void
1561 invalidate_memory (writes)
1562 struct write_data *writes;
1563 {
1564 register int i;
1565 register struct table_elt *p, *next;
1566 int all = writes->all;
1567 int nonscalar = writes->nonscalar;
1568
1569 for (i = 0; i < NBUCKETS; i++)
1570 for (p = table[i]; p; p = next)
1571 {
1572 next = p->next_same_hash;
1573 if (p->in_memory
1574 && (all
1575 || (nonscalar && p->in_struct)
1576 || cse_rtx_addr_varies_p (p->exp)))
1577 remove_from_table (p, i);
1578 }
1579 }
1580 \f
1581 /* Remove from the hash table any expression that is a call-clobbered
1582 register. Also update their TICK values. */
1583
1584 static void
1585 invalidate_for_call ()
1586 {
1587 int regno, endregno;
1588 int i;
1589 int hash;
1590 struct table_elt *p, *next;
1591 int in_table = 0;
1592
1593 /* Go through all the hard registers. For each that is clobbered in
1594 a CALL_INSN, remove the register from quantity chains and update
1595 reg_tick if defined. Also see if any of these registers is currently
1596 in the table. */
1597
1598 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1599 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1600 {
1601 delete_reg_equiv (regno);
1602 if (reg_tick[regno] >= 0)
1603 reg_tick[regno]++;
1604
1605 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1606 }
1607
1608 /* In the case where we have no call-clobbered hard registers in the
1609 table, we are done. Otherwise, scan the table and remove any
1610 entry that overlaps a call-clobbered register. */
1611
1612 if (in_table)
1613 for (hash = 0; hash < NBUCKETS; hash++)
1614 for (p = table[hash]; p; p = next)
1615 {
1616 next = p->next_same_hash;
1617
1618 if (GET_CODE (p->exp) != REG
1619 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1620 continue;
1621
1622 regno = REGNO (p->exp);
1623 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1624
1625 for (i = regno; i < endregno; i++)
1626 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1627 {
1628 remove_from_table (p, hash);
1629 break;
1630 }
1631 }
1632 }
1633 \f
1634 /* Given an expression X of type CONST,
1635 and ELT which is its table entry (or 0 if it
1636 is not in the hash table),
1637 return an alternate expression for X as a register plus integer.
1638 If none can be found, return 0. */
1639
1640 static rtx
1641 use_related_value (x, elt)
1642 rtx x;
1643 struct table_elt *elt;
1644 {
1645 register struct table_elt *relt = 0;
1646 register struct table_elt *p, *q;
1647 int offset;
1648
1649 /* First, is there anything related known?
1650 If we have a table element, we can tell from that.
1651 Otherwise, must look it up. */
1652
1653 if (elt != 0 && elt->related_value != 0)
1654 relt = elt;
1655 else if (elt == 0 && GET_CODE (x) == CONST)
1656 {
1657 rtx subexp = get_related_value (x);
1658 if (subexp != 0)
1659 relt = lookup (subexp,
1660 safe_hash (subexp, GET_MODE (subexp)) % NBUCKETS,
1661 GET_MODE (subexp));
1662 }
1663
1664 if (relt == 0)
1665 return 0;
1666
1667 /* Search all related table entries for one that has an
1668 equivalent register. */
1669
1670 p = relt;
1671 while (1)
1672 {
1673 /* This loop is strange in that it is executed in two different cases.
1674 The first is when X is already in the table. Then it is searching
1675 the RELATED_VALUE list of X's class (RELT). The second case is when
1676 X is not in the table. Then RELT points to a class for the related
1677 value.
1678
1679 Ensure that, whatever case we are in, that we ignore classes that have
1680 the same value as X. */
1681
1682 if (rtx_equal_p (x, p->exp))
1683 q = 0;
1684 else
1685 for (q = p->first_same_value; q; q = q->next_same_value)
1686 if (GET_CODE (q->exp) == REG)
1687 break;
1688
1689 if (q)
1690 break;
1691
1692 p = p->related_value;
1693
1694 /* We went all the way around, so there is nothing to be found.
1695 Alternatively, perhaps RELT was in the table for some other reason
1696 and it has no related values recorded. */
1697 if (p == relt || p == 0)
1698 break;
1699 }
1700
1701 if (q == 0)
1702 return 0;
1703
1704 offset = (get_integer_term (x) - get_integer_term (p->exp));
1705 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
1706 return plus_constant (q->exp, offset);
1707 }
1708 \f
1709 /* Hash an rtx. We are careful to make sure the value is never negative.
1710 Equivalent registers hash identically.
1711 MODE is used in hashing for CONST_INTs only;
1712 otherwise the mode of X is used.
1713
1714 Store 1 in do_not_record if any subexpression is volatile.
1715
1716 Store 1 in hash_arg_in_memory if X contains a MEM rtx
1717 which does not have the RTX_UNCHANGING_P bit set.
1718 In this case, also store 1 in hash_arg_in_struct
1719 if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
1720
1721 Note that cse_insn knows that the hash code of a MEM expression
1722 is just (int) MEM plus the hash code of the address. */
1723
1724 static int
1725 canon_hash (x, mode)
1726 rtx x;
1727 enum machine_mode mode;
1728 {
1729 register int i, j;
1730 register int hash = 0;
1731 register enum rtx_code code;
1732 register char *fmt;
1733
1734 /* repeat is used to turn tail-recursion into iteration. */
1735 repeat:
1736 if (x == 0)
1737 return hash;
1738
1739 code = GET_CODE (x);
1740 switch (code)
1741 {
1742 case REG:
1743 {
1744 register int regno = REGNO (x);
1745
1746 /* On some machines, we can't record any non-fixed hard register,
1747 because extending its life will cause reload problems. We
1748 consider ap, fp, and sp to be fixed for this purpose.
1749 On all machines, we can't record any global registers. */
1750
1751 if (regno < FIRST_PSEUDO_REGISTER
1752 && (global_regs[regno]
1753 #ifdef SMALL_REGISTER_CLASSES
1754 || (! fixed_regs[regno]
1755 && regno != FRAME_POINTER_REGNUM
1756 && regno != ARG_POINTER_REGNUM
1757 && regno != STACK_POINTER_REGNUM)
1758 #endif
1759 ))
1760 {
1761 do_not_record = 1;
1762 return 0;
1763 }
1764 return hash + ((int) REG << 7) + reg_qty[regno];
1765 }
1766
1767 case CONST_INT:
1768 hash += ((int) mode + ((int) CONST_INT << 7)
1769 + INTVAL (x) + (INTVAL (x) >> HASHBITS));
1770 return ((1 << HASHBITS) - 1) & hash;
1771
1772 case CONST_DOUBLE:
1773 /* This is like the general case, except that it only counts
1774 the integers representing the constant. */
1775 hash += (int) code + (int) GET_MODE (x);
1776 {
1777 int i;
1778 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1779 {
1780 int tem = XINT (x, i);
1781 hash += ((1 << HASHBITS) - 1) & (tem + (tem >> HASHBITS));
1782 }
1783 }
1784 return hash;
1785
1786 /* Assume there is only one rtx object for any given label. */
1787 case LABEL_REF:
1788 /* Use `and' to ensure a positive number. */
1789 return (hash + ((int) LABEL_REF << 7)
1790 + ((int) XEXP (x, 0) & ((1 << HASHBITS) - 1)));
1791
1792 case SYMBOL_REF:
1793 return (hash + ((int) SYMBOL_REF << 7)
1794 + ((int) XEXP (x, 0) & ((1 << HASHBITS) - 1)));
1795
1796 case MEM:
1797 if (MEM_VOLATILE_P (x))
1798 {
1799 do_not_record = 1;
1800 return 0;
1801 }
1802 if (! RTX_UNCHANGING_P (x))
1803 {
1804 hash_arg_in_memory = 1;
1805 if (MEM_IN_STRUCT_P (x)) hash_arg_in_struct = 1;
1806 }
1807 /* Now that we have already found this special case,
1808 might as well speed it up as much as possible. */
1809 hash += (int) MEM;
1810 x = XEXP (x, 0);
1811 goto repeat;
1812
1813 case PRE_DEC:
1814 case PRE_INC:
1815 case POST_DEC:
1816 case POST_INC:
1817 case PC:
1818 case CC0:
1819 case CALL:
1820 case UNSPEC_VOLATILE:
1821 do_not_record = 1;
1822 return 0;
1823
1824 case ASM_OPERANDS:
1825 if (MEM_VOLATILE_P (x))
1826 {
1827 do_not_record = 1;
1828 return 0;
1829 }
1830 }
1831
1832 i = GET_RTX_LENGTH (code) - 1;
1833 hash += (int) code + (int) GET_MODE (x);
1834 fmt = GET_RTX_FORMAT (code);
1835 for (; i >= 0; i--)
1836 {
1837 if (fmt[i] == 'e')
1838 {
1839 rtx tem = XEXP (x, i);
1840 rtx tem1;
1841
1842 /* If the operand is a REG that is equivalent to a constant, hash
1843 as if we were hashing the constant, since we will be comparing
1844 that way. */
1845 if (tem != 0 && GET_CODE (tem) == REG
1846 && REGNO_QTY_VALID_P (REGNO (tem))
1847 && qty_mode[reg_qty[REGNO (tem)]] == GET_MODE (tem)
1848 && (tem1 = qty_const[reg_qty[REGNO (tem)]]) != 0
1849 && CONSTANT_P (tem1))
1850 tem = tem1;
1851
1852 /* If we are about to do the last recursive call
1853 needed at this level, change it into iteration.
1854 This function is called enough to be worth it. */
1855 if (i == 0)
1856 {
1857 x = tem;
1858 goto repeat;
1859 }
1860 hash += canon_hash (tem, 0);
1861 }
1862 else if (fmt[i] == 'E')
1863 for (j = 0; j < XVECLEN (x, i); j++)
1864 hash += canon_hash (XVECEXP (x, i, j), 0);
1865 else if (fmt[i] == 's')
1866 {
1867 register char *p = XSTR (x, i);
1868 if (p)
1869 while (*p)
1870 {
1871 register int tem = *p++;
1872 hash += ((1 << HASHBITS) - 1) & (tem + (tem >> HASHBITS));
1873 }
1874 }
1875 else if (fmt[i] == 'i')
1876 {
1877 register int tem = XINT (x, i);
1878 hash += ((1 << HASHBITS) - 1) & (tem + (tem >> HASHBITS));
1879 }
1880 else
1881 abort ();
1882 }
1883 return hash;
1884 }
1885
1886 /* Like canon_hash but with no side effects. */
1887
1888 static int
1889 safe_hash (x, mode)
1890 rtx x;
1891 enum machine_mode mode;
1892 {
1893 int save_do_not_record = do_not_record;
1894 int save_hash_arg_in_memory = hash_arg_in_memory;
1895 int save_hash_arg_in_struct = hash_arg_in_struct;
1896 int hash = canon_hash (x, mode);
1897 hash_arg_in_memory = save_hash_arg_in_memory;
1898 hash_arg_in_struct = save_hash_arg_in_struct;
1899 do_not_record = save_do_not_record;
1900 return hash;
1901 }
1902 \f
1903 /* Return 1 iff X and Y would canonicalize into the same thing,
1904 without actually constructing the canonicalization of either one.
1905 If VALIDATE is nonzero,
1906 we assume X is an expression being processed from the rtl
1907 and Y was found in the hash table. We check register refs
1908 in Y for being marked as valid.
1909
1910 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
1911 that is known to be in the register. Ordinarily, we don't allow them
1912 to match, because letting them match would cause unpredictable results
1913 in all the places that search a hash table chain for an equivalent
1914 for a given value. A possible equivalent that has different structure
1915 has its hash code computed from different data. Whether the hash code
1916 is the same as that of the the given value is pure luck. */
1917
1918 static int
1919 exp_equiv_p (x, y, validate, equal_values)
1920 rtx x, y;
1921 int validate;
1922 int equal_values;
1923 {
1924 register int i;
1925 register enum rtx_code code;
1926 register char *fmt;
1927
1928 /* Note: it is incorrect to assume an expression is equivalent to itself
1929 if VALIDATE is nonzero. */
1930 if (x == y && !validate)
1931 return 1;
1932 if (x == 0 || y == 0)
1933 return x == y;
1934
1935 code = GET_CODE (x);
1936 if (code != GET_CODE (y))
1937 {
1938 if (!equal_values)
1939 return 0;
1940
1941 /* If X is a constant and Y is a register or vice versa, they may be
1942 equivalent. We only have to validate if Y is a register. */
1943 if (CONSTANT_P (x) && GET_CODE (y) == REG
1944 && REGNO_QTY_VALID_P (REGNO (y))
1945 && GET_MODE (y) == qty_mode[reg_qty[REGNO (y)]]
1946 && rtx_equal_p (x, qty_const[reg_qty[REGNO (y)]])
1947 && (! validate || reg_in_table[REGNO (y)] == reg_tick[REGNO (y)]))
1948 return 1;
1949
1950 if (CONSTANT_P (y) && code == REG
1951 && REGNO_QTY_VALID_P (REGNO (x))
1952 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]]
1953 && rtx_equal_p (y, qty_const[reg_qty[REGNO (x)]]))
1954 return 1;
1955
1956 return 0;
1957 }
1958
1959 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1960 if (GET_MODE (x) != GET_MODE (y))
1961 return 0;
1962
1963 switch (code)
1964 {
1965 case PC:
1966 case CC0:
1967 return x == y;
1968
1969 case CONST_INT:
1970 return XINT (x, 0) == XINT (y, 0);
1971
1972 case LABEL_REF:
1973 case SYMBOL_REF:
1974 return XEXP (x, 0) == XEXP (y, 0);
1975
1976 case REG:
1977 {
1978 int regno = REGNO (y);
1979 int endregno
1980 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1981 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
1982 int i;
1983
1984 /* If the quantities are not the same, the expressions are not
1985 equivalent. If there are and we are not to validate, they
1986 are equivalent. Otherwise, ensure all regs are up-to-date. */
1987
1988 if (reg_qty[REGNO (x)] != reg_qty[regno])
1989 return 0;
1990
1991 if (! validate)
1992 return 1;
1993
1994 for (i = regno; i < endregno; i++)
1995 if (reg_in_table[i] != reg_tick[i])
1996 return 0;
1997
1998 return 1;
1999 }
2000
2001 /* For commutative operations, check both orders. */
2002 case PLUS:
2003 case MULT:
2004 case AND:
2005 case IOR:
2006 case XOR:
2007 case NE:
2008 case EQ:
2009 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2010 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2011 validate, equal_values))
2012 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2013 validate, equal_values)
2014 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2015 validate, equal_values)));
2016 }
2017
2018 /* Compare the elements. If any pair of corresponding elements
2019 fail to match, return 0 for the whole things. */
2020
2021 fmt = GET_RTX_FORMAT (code);
2022 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2023 {
2024 if (fmt[i] == 'e')
2025 {
2026 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2027 return 0;
2028 }
2029 else if (fmt[i] == 'E')
2030 {
2031 int j;
2032 if (XVECLEN (x, i) != XVECLEN (y, i))
2033 return 0;
2034 for (j = 0; j < XVECLEN (x, i); j++)
2035 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2036 validate, equal_values))
2037 return 0;
2038 }
2039 else if (fmt[i] == 's')
2040 {
2041 if (strcmp (XSTR (x, i), XSTR (y, i)))
2042 return 0;
2043 }
2044 else if (fmt[i] == 'i')
2045 {
2046 if (XINT (x, i) != XINT (y, i))
2047 return 0;
2048 }
2049 else if (fmt[i] != '0')
2050 abort ();
2051 }
2052 return 1;
2053 }
2054 \f
2055 /* Return 1 iff any subexpression of X matches Y.
2056 Here we do not require that X or Y be valid (for registers referred to)
2057 for being in the hash table. */
2058
2059 int
2060 refers_to_p (x, y)
2061 rtx x, y;
2062 {
2063 register int i;
2064 register enum rtx_code code;
2065 register char *fmt;
2066
2067 repeat:
2068 if (x == y)
2069 return 1;
2070 if (x == 0 || y == 0)
2071 return 0;
2072
2073 code = GET_CODE (x);
2074 /* If X as a whole has the same code as Y, they may match.
2075 If so, return 1. */
2076 if (code == GET_CODE (y))
2077 {
2078 if (exp_equiv_p (x, y, 0, 1))
2079 return 1;
2080 }
2081
2082 /* X does not match, so try its subexpressions. */
2083
2084 fmt = GET_RTX_FORMAT (code);
2085 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2086 if (fmt[i] == 'e')
2087 {
2088 if (i == 0)
2089 {
2090 x = XEXP (x, 0);
2091 goto repeat;
2092 }
2093 else
2094 if (refers_to_p (XEXP (x, i), y))
2095 return 1;
2096 }
2097 else if (fmt[i] == 'E')
2098 {
2099 int j;
2100 for (j = 0; j < XVECLEN (x, i); j++)
2101 if (refers_to_p (XVECEXP (x, i, j), y))
2102 return 1;
2103 }
2104
2105 return 0;
2106 }
2107 \f
2108 /* Return 1 iff any subexpression of X refers to memory
2109 at an address of BASE plus some offset
2110 such that any of the bytes' offsets fall between START (inclusive)
2111 and END (exclusive).
2112
2113 The value is undefined if X is a varying address.
2114 This function is not used in such cases.
2115
2116 When used in the cse pass, `qty_const' is nonzero, and it is used
2117 to treat an address that is a register with a known constant value
2118 as if it were that constant value.
2119 In the loop pass, `qty_const' is zero, so this is not done. */
2120
2121 int
2122 refers_to_mem_p (x, base, start, end)
2123 rtx x, base;
2124 int start, end;
2125 {
2126 register int i;
2127 register enum rtx_code code;
2128 register char *fmt;
2129
2130 if (GET_CODE (base) == CONST_INT)
2131 {
2132 start += INTVAL (base);
2133 end += INTVAL (base);
2134 base = const0_rtx;
2135 }
2136
2137 repeat:
2138 if (x == 0)
2139 return 0;
2140
2141 code = GET_CODE (x);
2142 if (code == MEM)
2143 {
2144 register rtx addr = XEXP (x, 0); /* Get the address. */
2145 int myend;
2146
2147 i = 0;
2148 if (GET_CODE (addr) == REG
2149 /* qty_const is 0 when outside the cse pass;
2150 at such times, this info is not available. */
2151 && qty_const != 0
2152 && REGNO_QTY_VALID_P (REGNO (addr))
2153 && GET_MODE (addr) == qty_mode[reg_qty[REGNO (addr)]]
2154 && qty_const[reg_qty[REGNO (addr)]] != 0)
2155 addr = qty_const[reg_qty[REGNO (addr)]];
2156 else if (GET_CODE (addr) == PLUS
2157 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2158 && GET_CODE (XEXP (addr, 0)) == REG
2159 && qty_const != 0
2160 && REGNO_QTY_VALID_P (REGNO (XEXP (addr, 0)))
2161 && (GET_MODE (XEXP (addr, 0))
2162 == qty_mode[reg_qty[REGNO (XEXP (addr, 0))]])
2163 && qty_const[reg_qty[REGNO (XEXP (addr, 0))]])
2164 {
2165 i = INTVAL (XEXP (addr, 1));
2166 addr = qty_const[reg_qty[REGNO (XEXP (addr, 0))]];
2167 }
2168
2169 check_addr:
2170 if (GET_CODE (addr) == CONST)
2171 addr = XEXP (addr, 0);
2172
2173 /* If ADDR is BASE, or BASE plus an integer, put
2174 the integer in I. */
2175 if (GET_CODE (addr) == PLUS
2176 && XEXP (addr, 0) == base
2177 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
2178 i += INTVAL (XEXP (addr, 1));
2179 else if (GET_CODE (addr) == LO_SUM)
2180 {
2181 if (GET_CODE (base) != LO_SUM)
2182 return 1;
2183 /* The REG component of the LO_SUM is known by the
2184 const value in the XEXP part. */
2185 addr = XEXP (addr, 1);
2186 base = XEXP (base, 1);
2187 i = 0;
2188 if (GET_CODE (base) == CONST)
2189 base = XEXP (base, 0);
2190 if (GET_CODE (base) == PLUS
2191 && GET_CODE (XEXP (base, 1)) == CONST_INT)
2192 {
2193 int tem = INTVAL (XEXP (base, 1));
2194 start += tem;
2195 end += tem;
2196 base = XEXP (base, 0);
2197 }
2198 goto check_addr;
2199 }
2200 else if (GET_CODE (base) == LO_SUM)
2201 {
2202 base = XEXP (base, 1);
2203 if (GET_CODE (base) == CONST)
2204 base = XEXP (base, 0);
2205 if (GET_CODE (base) == PLUS
2206 && GET_CODE (XEXP (base, 1)) == CONST_INT)
2207 {
2208 int tem = INTVAL (XEXP (base, 1));
2209 start += tem;
2210 end += tem;
2211 base = XEXP (base, 0);
2212 }
2213 goto check_addr;
2214 }
2215 else if (GET_CODE (addr) == CONST_INT && base == const0_rtx)
2216 i = INTVAL (addr);
2217 else if (addr != base)
2218 return 0;
2219
2220 myend = i + GET_MODE_SIZE (GET_MODE (x));
2221 return myend > start && i < end;
2222 }
2223
2224 /* X does not match, so try its subexpressions. */
2225
2226 fmt = GET_RTX_FORMAT (code);
2227 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2228 if (fmt[i] == 'e')
2229 {
2230 if (i == 0)
2231 {
2232 x = XEXP (x, 0);
2233 goto repeat;
2234 }
2235 else
2236 if (refers_to_mem_p (XEXP (x, i), base, start, end))
2237 return 1;
2238 }
2239 else if (fmt[i] == 'E')
2240 {
2241 int j;
2242 for (j = 0; j < XVECLEN (x, i); j++)
2243 if (refers_to_mem_p (XVECEXP (x, i, j), base, start, end))
2244 return 1;
2245 }
2246
2247 return 0;
2248 }
2249
2250 /* Nonzero if X refers to memory at a varying address;
2251 except that a register which has at the moment a known constant value
2252 isn't considered variable. */
2253
2254 static int
2255 cse_rtx_addr_varies_p (x)
2256 rtx x;
2257 {
2258 /* We need not check for X and the equivalence class being of the same
2259 mode because if X is equivalent to a constant in some mode, it
2260 doesn't vary in any mode. */
2261
2262 if (GET_CODE (x) == MEM
2263 && GET_CODE (XEXP (x, 0)) == REG
2264 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2265 && GET_MODE (XEXP (x, 0)) == qty_mode[reg_qty[REGNO (XEXP (x, 0))]]
2266 && qty_const[reg_qty[REGNO (XEXP (x, 0))]] != 0)
2267 return 0;
2268
2269 if (GET_CODE (x) == MEM
2270 && GET_CODE (XEXP (x, 0)) == PLUS
2271 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2272 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2273 && REGNO_QTY_VALID_P (REGNO (XEXP (XEXP (x, 0), 0)))
2274 && (GET_MODE (XEXP (XEXP (x, 0), 0))
2275 == qty_mode[reg_qty[REGNO (XEXP (XEXP (x, 0), 0))]])
2276 && qty_const[reg_qty[REGNO (XEXP (XEXP (x, 0), 0))]])
2277 return 0;
2278
2279 return rtx_addr_varies_p (x);
2280 }
2281 \f
2282 /* Canonicalize an expression:
2283 replace each register reference inside it
2284 with the "oldest" equivalent register.
2285
2286 If INSN is non-zero and we are replacing a pseudo with a hard register
2287 or vice versa, verify that INSN remains valid after we make our
2288 substitution. */
2289
2290 static rtx
2291 canon_reg (x, insn)
2292 rtx x;
2293 rtx insn;
2294 {
2295 register int i;
2296 register enum rtx_code code;
2297 register char *fmt;
2298
2299 if (x == 0)
2300 return x;
2301
2302 code = GET_CODE (x);
2303 switch (code)
2304 {
2305 case PC:
2306 case CC0:
2307 case CONST:
2308 case CONST_INT:
2309 case CONST_DOUBLE:
2310 case SYMBOL_REF:
2311 case LABEL_REF:
2312 case ADDR_VEC:
2313 case ADDR_DIFF_VEC:
2314 return x;
2315
2316 case REG:
2317 {
2318 register int first;
2319
2320 /* Never replace a hard reg, because hard regs can appear
2321 in more than one machine mode, and we must preserve the mode
2322 of each occurrence. Also, some hard regs appear in
2323 MEMs that are shared and mustn't be altered. Don't try to
2324 replace any reg that maps to a reg of class NO_REGS. */
2325 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2326 || ! REGNO_QTY_VALID_P (REGNO (x)))
2327 return x;
2328
2329 first = qty_first_reg[reg_qty[REGNO (x)]];
2330 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2331 : REGNO_REG_CLASS (first) == NO_REGS ? x
2332 : gen_rtx (REG, qty_mode[reg_qty[REGNO (x)]], first));
2333 }
2334 }
2335
2336 fmt = GET_RTX_FORMAT (code);
2337 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2338 {
2339 register int j;
2340
2341 if (fmt[i] == 'e')
2342 {
2343 rtx new = canon_reg (XEXP (x, i), insn);
2344
2345 /* If replacing pseudo with hard reg or vice versa, ensure the
2346 insn remains valid. */
2347 if (new && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2348 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
2349 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER)))
2350 validate_change (insn, &XEXP (x, i), new, 0);
2351 else
2352 XEXP (x, i) = new;
2353 }
2354 else if (fmt[i] == 'E')
2355 for (j = 0; j < XVECLEN (x, i); j++)
2356 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2357 }
2358
2359 return x;
2360 }
2361 \f
2362 /* LOC is a location with INSN that is an operand address (the contents of
2363 a MEM). Find the best equivalent address to use that is valid for this
2364 insn.
2365
2366 On most CISC machines, complicated address modes are costly, and rtx_cost
2367 is a good approximation for that cost. However, most RISC machines have
2368 only a few (usually only one) memory reference formats. If an address is
2369 valid at all, it is often just as cheap as any other address. Hence, for
2370 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2371 costs of various addresses. For two addresses of equal cost, choose the one
2372 with the highest `rtx_cost' value as that has the potential of eliminating
2373 the most insns. For equal costs, we choose the first in the equivalence
2374 class. Note that we ignore the fact that pseudo registers are cheaper
2375 than hard registers here because we would also prefer the pseudo registers.
2376 */
2377
2378 void
2379 find_best_addr (insn, loc)
2380 rtx insn;
2381 rtx *loc;
2382 {
2383 struct table_elt *elt, *p;
2384 rtx addr = *loc;
2385 int our_cost;
2386 int found_better = 1;
2387 int save_do_not_record = do_not_record;
2388 int save_hash_arg_in_memory = hash_arg_in_memory;
2389 int save_hash_arg_in_struct = hash_arg_in_struct;
2390 int hash_code;
2391 int addr_volatile;
2392 int regno;
2393
2394 /* Do not try to replace constant addresses or addresses of local and
2395 argument slots. These MEM expressions are made only once and inserted
2396 in many instructions, as well as being used to control symbol table
2397 output. It is not safe to clobber them.
2398
2399 There are some uncommon cases where the address is already in a register
2400 for some reason, but we cannot take advantage of that because we have
2401 no easy way to unshare the MEM. In addition, looking up all stack
2402 addresses is costly. */
2403 if ((GET_CODE (addr) == PLUS
2404 && GET_CODE (XEXP (addr, 0)) == REG
2405 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2406 && (regno = REGNO (XEXP (addr, 0)),
2407 regno == FRAME_POINTER_REGNUM || regno == ARG_POINTER_REGNUM))
2408 || (GET_CODE (addr) == REG
2409 && (regno = REGNO (addr),
2410 regno == FRAME_POINTER_REGNUM || regno == ARG_POINTER_REGNUM))
2411 || CONSTANT_ADDRESS_P (addr))
2412 return;
2413
2414 /* If this address is not simply a register, try to fold it. This will
2415 sometimes simplify the expression. Many simplifications
2416 will not be valid, but some, usually applying the associative rule, will
2417 be valid and produce better code. */
2418 if (GET_CODE (addr) != REG
2419 && validate_change (insn, loc, fold_rtx (addr, insn), 0))
2420 addr = *loc;
2421
2422 /* If this address is not in the hash table, we can't do any better.
2423 Also, ignore if volatile. */
2424 do_not_record = 0;
2425 hash_code = HASH (addr, Pmode);
2426 addr_volatile = do_not_record;
2427 do_not_record = save_do_not_record;
2428 hash_arg_in_memory = save_hash_arg_in_memory;
2429 hash_arg_in_struct = save_hash_arg_in_struct;
2430
2431 if (addr_volatile)
2432 return;
2433
2434 elt = lookup (addr, hash_code, Pmode);
2435
2436 if (elt == 0)
2437 return;
2438
2439 #ifndef ADDRESS_COST
2440 our_cost = elt->cost;
2441
2442 /* Find the lowest cost below ours that works. */
2443 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2444 if (elt->cost < our_cost
2445 && (GET_CODE (elt->exp) == REG || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2446 && validate_change (insn, loc, canon_reg (copy_rtx (elt->exp), 0), 0))
2447 return;
2448
2449 #else
2450
2451 /* We need to find the best (under the criteria documented above) entry in
2452 the class that is valid. We use the `flag' field to indicate choices
2453 that were invalid and iterate until we can't find a better one that
2454 hasn't already been tried. */
2455
2456 for (p = elt->first_same_value; p; p = p->next_same_value)
2457 p->flag = 0;
2458
2459 while (found_better)
2460 {
2461 int best_addr_cost = ADDRESS_COST (*loc);
2462 int best_rtx_cost = (elt->cost + 1) >> 1;
2463 struct table_elt *best_elt = elt;
2464
2465 found_better = 0;
2466 for (p = elt->first_same_value; p; p = p->next_same_value)
2467 if (! p->flag
2468 && (GET_CODE (p->exp) == REG || exp_equiv_p (p->exp, p->exp, 1, 0))
2469 && (ADDRESS_COST (p->exp) < best_addr_cost
2470 || (ADDRESS_COST (p->exp) == best_addr_cost
2471 && (p->cost + 1) >> 1 > best_rtx_cost)))
2472 {
2473 found_better = 1;
2474 best_addr_cost = ADDRESS_COST (p->exp);
2475 best_rtx_cost = (p->cost + 1) >> 1;
2476 best_elt = p;
2477 }
2478
2479 if (found_better)
2480 {
2481 if (validate_change (insn, loc,
2482 canon_reg (copy_rtx (best_elt->exp), 0), 0))
2483 return;
2484 else
2485 best_elt->flag = 1;
2486 }
2487 }
2488 #endif
2489 }
2490 \f
2491 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2492 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2493 what values are being compared.
2494
2495 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2496 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2497 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2498 compared to produce cc0.
2499
2500 The return value is the comparison operator and is either the code of
2501 A or the code corresponding to the inverse of the comparison. */
2502
2503 static enum rtx_code
2504 find_comparison_args (code, parg1, parg2)
2505 enum rtx_code code;
2506 rtx *parg1, *parg2;
2507 {
2508 rtx arg1, arg2;
2509
2510 arg1 = *parg1, arg2 = *parg2;
2511
2512 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2513
2514 while (arg2 == const0_rtx)
2515 {
2516 /* Set non-zero when we find something of interest. */
2517 rtx x = 0;
2518 int reverse_code = 0;
2519 struct table_elt *p = 0;
2520
2521 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2522 On machines with CC0, this is the only case that can occur, since
2523 fold_rtx will return the COMPARE or item being compared with zero
2524 when given CC0. */
2525
2526 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2527 x = arg1;
2528
2529 /* If ARG1 is a comparison operator and CODE is testing for
2530 STORE_FLAG_VALUE, get the inner arguments. */
2531
2532 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
2533 {
2534 if (code == NE || (code == LT && STORE_FLAG_VALUE == -1))
2535 x = arg1;
2536 else if (code == EQ || (code == GE && STORE_FLAG_VALUE == -1))
2537 x = arg1, reverse_code = 1;
2538 }
2539
2540 /* ??? We could also check for
2541
2542 (ne (and (eq (...) (const_int 1))) (const_int 0))
2543
2544 and related forms, but let's wait until we see them occurring. */
2545
2546 if (x == 0)
2547 /* Look up ARG1 in the hash table and see if it has an equivalence
2548 that lets us see what is being compared. */
2549 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) % NBUCKETS,
2550 GET_MODE (arg1));
2551 if (p) p = p->first_same_value;
2552
2553 for (; p; p = p->next_same_value)
2554 {
2555 enum machine_mode inner_mode = GET_MODE (p->exp);
2556
2557 /* If the entry isn't valid, skip it. */
2558 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
2559 continue;
2560
2561 if (GET_CODE (p->exp) == COMPARE
2562 /* Another possibility is that this machine has a compare insn
2563 that includes the comparison code. In that case, ARG1 would
2564 be equivalent to a comparison operation that would set ARG1 to
2565 either STORE_FLAG_VALUE or zero. If this is an NE operation,
2566 ORIG_CODE is the actual comparison being done; if it is an EQ,
2567 we must reverse ORIG_CODE. On machine with a negative value
2568 for STORE_FLAG_VALUE, also look at LT and GE operations. */
2569 || ((code == NE
2570 || (code == LT
2571 && inner_mode != VOIDmode
2572 && GET_MODE_BITSIZE (inner_mode) <= HOST_BITS_PER_INT
2573 && (STORE_FLAG_VALUE
2574 & (1 << (GET_MODE_BITSIZE (inner_mode) - 1)))))
2575 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
2576 {
2577 x = p->exp;
2578 break;
2579 }
2580 else if ((code == EQ
2581 || (code == GE
2582 && inner_mode != VOIDmode
2583 && GET_MODE_BITSIZE (inner_mode) <= HOST_BITS_PER_INT
2584 && (STORE_FLAG_VALUE
2585 & (1 << (GET_MODE_BITSIZE (inner_mode) - 1)))))
2586 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
2587 {
2588 reverse_code = 1;
2589 x = p->exp;
2590 break;
2591 }
2592
2593 /* If this is fp + constant, the equivalent is a better operand since
2594 it may let us predict the value of the comparison. */
2595 else if (NONZERO_BASE_PLUS_P (p->exp))
2596 {
2597 arg1 = p->exp;
2598 continue;
2599 }
2600 }
2601
2602 /* If we didn't find a useful equivalence for ARG1, we are done.
2603 Otherwise, set up for the next iteration. */
2604 if (x == 0)
2605 break;
2606
2607 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
2608 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
2609 code = GET_CODE (x);
2610
2611 if (reverse_code)
2612 code = reverse_condition (code);
2613 }
2614
2615 /* Return our results. */
2616 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
2617
2618 return code;
2619 }
2620 \f
2621 /* Try to simplify a unary operation CODE whose output mode is to be
2622 MODE with input operand OP whose mode was originally OP_MODE.
2623 Return zero if no simplification can be made. */
2624
2625 rtx
2626 simplify_unary_operation (code, mode, op, op_mode)
2627 enum rtx_code code;
2628 enum machine_mode mode;
2629 rtx op;
2630 enum machine_mode op_mode;
2631 {
2632 register int width = GET_MODE_BITSIZE (mode);
2633
2634 /* The order of these tests is critical so that, for example, we don't
2635 check the wrong mode (input vs. output) for a conversion operation,
2636 such as FIX. At some point, this should be simplified. */
2637
2638 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2639 if (code == FLOAT && GET_CODE (op) == CONST_INT)
2640 {
2641 REAL_VALUE_TYPE d;
2642
2643 #ifdef REAL_ARITHMETIC
2644 REAL_VALUE_FROM_INT (d, INTVAL (op), INTVAL (op) < 0 ? ~0 : 0);
2645 #else
2646 d = (double) INTVAL (op);
2647 #endif
2648 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2649 }
2650 else if (code == UNSIGNED_FLOAT && GET_CODE (op) == CONST_INT)
2651 {
2652 REAL_VALUE_TYPE d;
2653
2654 #ifdef REAL_ARITHMETIC
2655 REAL_VALUE_FROM_INT (d, INTVAL (op), 0);
2656 #else
2657 d = (double) (unsigned int) INTVAL (op);
2658 #endif
2659 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2660 }
2661
2662 else if (code == FLOAT && GET_CODE (op) == CONST_DOUBLE
2663 && GET_MODE (op) == VOIDmode)
2664 {
2665 REAL_VALUE_TYPE d;
2666
2667 #ifdef REAL_ARITHMETIC
2668 REAL_VALUE_FROM_INT (d, CONST_DOUBLE_LOW (op), CONST_DOUBLE_HIGH (op));
2669 #else
2670 if (CONST_DOUBLE_HIGH (op) < 0)
2671 {
2672 d = (double) (~ CONST_DOUBLE_HIGH (op));
2673 d *= ((double) (1 << (HOST_BITS_PER_INT / 2))
2674 * (double) (1 << (HOST_BITS_PER_INT / 2)));
2675 d += (double) (unsigned) (~ CONST_DOUBLE_LOW (op));
2676 d = (- d - 1.0);
2677 }
2678 else
2679 {
2680 d = (double) CONST_DOUBLE_HIGH (op);
2681 d *= ((double) (1 << (HOST_BITS_PER_INT / 2))
2682 * (double) (1 << (HOST_BITS_PER_INT / 2)));
2683 d += (double) (unsigned) CONST_DOUBLE_LOW (op);
2684 }
2685 #endif /* REAL_ARITHMETIC */
2686 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2687 }
2688 else if (code == UNSIGNED_FLOAT && GET_CODE (op) == CONST_DOUBLE
2689 && GET_MODE (op) == VOIDmode)
2690 {
2691 REAL_VALUE_TYPE d;
2692
2693 #ifdef REAL_ARITHMETIC
2694 REAL_VALUE_FROM_UNSIGNED_INT (d, CONST_DOUBLE_LOW (op),
2695 CONST_DOUBLE_HIGH (op));
2696 #else
2697 d = (double) CONST_DOUBLE_HIGH (op);
2698 d *= ((double) (1 << (HOST_BITS_PER_INT / 2))
2699 * (double) (1 << (HOST_BITS_PER_INT / 2)));
2700 d += (double) (unsigned) CONST_DOUBLE_LOW (op);
2701 #endif /* REAL_ARITHMETIC */
2702 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2703 }
2704 #endif
2705
2706 else if (GET_CODE (op) == CONST_INT
2707 && width <= HOST_BITS_PER_INT && width > 0)
2708 {
2709 register int arg0 = INTVAL (op);
2710 register int val;
2711
2712 switch (code)
2713 {
2714 case NOT:
2715 val = ~ arg0;
2716 break;
2717
2718 case NEG:
2719 val = - arg0;
2720 break;
2721
2722 case ABS:
2723 val = (arg0 >= 0 ? arg0 : - arg0);
2724 break;
2725
2726 case FFS:
2727 /* Don't use ffs here. Instead, get low order bit and then its
2728 number. If arg0 is zero, this will return 0, as desired. */
2729 arg0 &= GET_MODE_MASK (mode);
2730 val = exact_log2 (arg0 & (- arg0)) + 1;
2731 break;
2732
2733 case TRUNCATE:
2734 val = arg0;
2735 break;
2736
2737 case ZERO_EXTEND:
2738 if (op_mode == VOIDmode)
2739 op_mode = mode;
2740 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_INT)
2741 val = arg0;
2742 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_INT)
2743 val = arg0 & ~((-1) << GET_MODE_BITSIZE (op_mode));
2744 else
2745 return 0;
2746 break;
2747
2748 case SIGN_EXTEND:
2749 if (op_mode == VOIDmode)
2750 op_mode = mode;
2751 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_INT)
2752 val = arg0;
2753 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_INT)
2754 {
2755 val = arg0 & ~((-1) << GET_MODE_BITSIZE (op_mode));
2756 if (val & (1 << (GET_MODE_BITSIZE (op_mode) - 1)))
2757 val -= 1 << GET_MODE_BITSIZE (op_mode);
2758 }
2759 else
2760 return 0;
2761 break;
2762
2763 case SQRT:
2764 return 0;
2765
2766 default:
2767 abort ();
2768 }
2769
2770 /* Clear the bits that don't belong in our mode,
2771 unless they and our sign bit are all one.
2772 So we get either a reasonable negative value or a reasonable
2773 unsigned value for this mode. */
2774 if (width < HOST_BITS_PER_INT
2775 && ((val & ((-1) << (width - 1))) != ((-1) << (width - 1))))
2776 val &= (1 << width) - 1;
2777
2778 return gen_rtx (CONST_INT, VOIDmode, val);
2779 }
2780
2781 /* We can do some operations on integer CONST_DOUBLEs. Also allow
2782 for a DImode operation on a CONST_INT. */
2783 else if (GET_MODE (op) == VOIDmode
2784 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
2785 {
2786 int l1, h1, lv, hv;
2787
2788 if (GET_CODE (op) == CONST_DOUBLE)
2789 l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
2790 else
2791 l1 = INTVAL (op), h1 = l1 < 0 ? -1 : 0;
2792
2793 switch (code)
2794 {
2795 case NOT:
2796 lv = ~ l1;
2797 hv = ~ h1;
2798 break;
2799
2800 case NEG:
2801 neg_double (l1, h1, &lv, &hv);
2802 break;
2803
2804 case ABS:
2805 if (h1 < 0)
2806 neg_double (l1, h1, &lv, &hv);
2807 else
2808 lv = l1, hv = h1;
2809 break;
2810
2811 case FFS:
2812 hv = 0;
2813 if (l1 == 0)
2814 lv = HOST_BITS_PER_INT + exact_log2 (h1 & (-h1)) + 1;
2815 else
2816 lv = exact_log2 (l1 & (-l1)) + 1;
2817 break;
2818
2819 case TRUNCATE:
2820 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT)
2821 return gen_rtx (CONST_INT, VOIDmode, l1 & GET_MODE_MASK (mode));
2822 else
2823 return 0;
2824 break;
2825
2826 case SQRT:
2827 return 0;
2828
2829 default:
2830 return 0;
2831 }
2832
2833 return immed_double_const (lv, hv, mode);
2834 }
2835
2836 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2837 else if (GET_CODE (op) == CONST_DOUBLE
2838 && GET_MODE_CLASS (mode) == MODE_FLOAT)
2839 {
2840 REAL_VALUE_TYPE d;
2841 jmp_buf handler;
2842 rtx x;
2843
2844 if (setjmp (handler))
2845 /* There used to be a warning here, but that is inadvisable.
2846 People may want to cause traps, and the natural way
2847 to do it should not get a warning. */
2848 return 0;
2849
2850 set_float_handler (handler);
2851
2852 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
2853
2854 switch (code)
2855 {
2856 case NEG:
2857 d = REAL_VALUE_NEGATE (d);
2858 break;
2859
2860 case ABS:
2861 if (REAL_VALUE_NEGATIVE (d))
2862 d = REAL_VALUE_NEGATE (d);
2863 break;
2864
2865 case FLOAT_TRUNCATE:
2866 d = (double) REAL_VALUE_TRUNCATE (mode, d);
2867 break;
2868
2869 case FLOAT_EXTEND:
2870 /* All this does is change the mode. */
2871 break;
2872
2873 case FIX:
2874 d = (double) REAL_VALUE_FIX_TRUNCATE (d);
2875 break;
2876
2877 case UNSIGNED_FIX:
2878 d = (double) REAL_VALUE_UNSIGNED_FIX_TRUNCATE (d);
2879 break;
2880
2881 case SQRT:
2882 return 0;
2883
2884 default:
2885 abort ();
2886 }
2887
2888 x = immed_real_const_1 (d, mode);
2889 set_float_handler (0);
2890 return x;
2891 }
2892 else if (GET_CODE (op) == CONST_DOUBLE && GET_MODE_CLASS (mode) == MODE_INT
2893 && width <= HOST_BITS_PER_INT && width > 0)
2894 {
2895 REAL_VALUE_TYPE d;
2896 jmp_buf handler;
2897 rtx x;
2898 int val;
2899
2900 if (setjmp (handler))
2901 return 0;
2902
2903 set_float_handler (handler);
2904
2905 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
2906
2907 switch (code)
2908 {
2909 case FIX:
2910 val = REAL_VALUE_FIX (d);
2911 break;
2912
2913 case UNSIGNED_FIX:
2914 val = REAL_VALUE_UNSIGNED_FIX (d);
2915 break;
2916
2917 default:
2918 abort ();
2919 }
2920
2921 set_float_handler (0);
2922
2923 /* Clear the bits that don't belong in our mode,
2924 unless they and our sign bit are all one.
2925 So we get either a reasonable negative value or a reasonable
2926 unsigned value for this mode. */
2927 if (width < HOST_BITS_PER_INT
2928 && ((val & ((-1) << (width - 1))) != ((-1) << (width - 1))))
2929 val &= (1 << width) - 1;
2930
2931 return gen_rtx (CONST_INT, VOIDmode, val);
2932 }
2933 #endif
2934 else if (GET_MODE_CLASS (mode) == MODE_INT
2935 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
2936 {
2937 /* There are some simplifications we can do even if the operands
2938 aren't constant, but they don't apply to floating-point
2939 unless not IEEE. */
2940 switch (code)
2941 {
2942 case NEG:
2943 case NOT:
2944 /* (not (not X)) == X, similarly for NEG. */
2945 if (GET_CODE (op) == code)
2946 return XEXP (op, 0);
2947 break;
2948
2949 case SIGN_EXTEND:
2950 /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
2951 becomes just the MINUS if its mode is MODE. This allows
2952 folding switch statements on machines using casesi (such as
2953 the Vax). */
2954 if (GET_CODE (op) == TRUNCATE
2955 && GET_MODE (XEXP (op, 0)) == mode
2956 && GET_CODE (XEXP (op, 0)) == MINUS
2957 && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
2958 && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
2959 return XEXP (op, 0);
2960 break;
2961 }
2962
2963 return 0;
2964 }
2965 else
2966 return 0;
2967 }
2968 \f
2969 /* Simplify a binary operation CODE with result mode MODE, operating on OP0
2970 and OP1. Return 0 if no simplification is possible.
2971
2972 Don't use this for relational operations such as EQ or LT.
2973 Use simplify_relational_operation instead. */
2974
2975 rtx
2976 simplify_binary_operation (code, mode, op0, op1)
2977 enum rtx_code code;
2978 enum machine_mode mode;
2979 rtx op0, op1;
2980 {
2981 register int arg0, arg1, arg0s, arg1s;
2982 int val;
2983 int width = GET_MODE_BITSIZE (mode);
2984
2985 /* Relational operations don't work here. We must know the mode
2986 of the operands in order to do the comparison correctly.
2987 Assuming a full word can give incorrect results.
2988 Consider comparing 128 with -128 in QImode. */
2989
2990 if (GET_RTX_CLASS (code) == '<')
2991 abort ();
2992
2993 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2994 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2995 && GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
2996 && mode == GET_MODE (op0) && mode == GET_MODE (op1))
2997 {
2998 REAL_VALUE_TYPE f0, f1, value;
2999 jmp_buf handler;
3000
3001 if (setjmp (handler))
3002 return 0;
3003
3004 set_float_handler (handler);
3005
3006 REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3007 REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
3008 f0 = REAL_VALUE_TRUNCATE (mode, f0);
3009 f1 = REAL_VALUE_TRUNCATE (mode, f1);
3010
3011 #ifdef REAL_ARITHMETIC
3012 REAL_ARITHMETIC (value, code, f0, f1);
3013 #else
3014 switch (code)
3015 {
3016 case PLUS:
3017 value = f0 + f1;
3018 break;
3019 case MINUS:
3020 value = f0 - f1;
3021 break;
3022 case MULT:
3023 value = f0 * f1;
3024 break;
3025 case DIV:
3026 #ifndef REAL_INFINITY
3027 if (f1 == 0)
3028 abort ();
3029 #endif
3030 value = f0 / f1;
3031 break;
3032 case SMIN:
3033 value = MIN (f0, f1);
3034 break;
3035 case SMAX:
3036 value = MAX (f0, f1);
3037 break;
3038 default:
3039 abort ();
3040 }
3041 #endif
3042
3043 set_float_handler (0);
3044 value = REAL_VALUE_TRUNCATE (mode, value);
3045 return immed_real_const_1 (value, mode);
3046 }
3047
3048 /* We can fold some multi-word operations. */
3049 else if (GET_MODE_CLASS (mode) == MODE_INT
3050 && GET_CODE (op0) == CONST_DOUBLE
3051 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
3052 {
3053 int l1, l2, h1, h2, lv, hv;
3054
3055 l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3056
3057 if (GET_CODE (op1) == CONST_DOUBLE)
3058 l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3059 else
3060 l2 = INTVAL (op1), h2 = l2 < 0 ? -1 : 0;
3061
3062 switch (code)
3063 {
3064 case MINUS:
3065 /* A - B == A + (-B). */
3066 neg_double (l2, h2, &lv, &hv);
3067 l2 = lv, h2 = hv;
3068
3069 /* .. fall through ... */
3070
3071 case PLUS:
3072 add_double (l1, h1, l2, h2, &lv, &hv);
3073 break;
3074
3075 case MULT:
3076 mul_double (l1, h1, l2, h2, &lv, &hv);
3077 break;
3078
3079 case DIV: case MOD: case UDIV: case UMOD:
3080 /* We'd need to include tree.h to do this and it doesn't seem worth
3081 it. */
3082 return 0;
3083
3084 case AND:
3085 lv = l1 & l2, hv = h1 & h2;
3086 break;
3087
3088 case IOR:
3089 lv = l1 | l2, hv = h1 | h2;
3090 break;
3091
3092 case XOR:
3093 lv = l1 ^ l2, hv = h1 ^ h2;
3094 break;
3095
3096 case SMIN:
3097 if (h1 < h2 || (h1 == h2 && (unsigned) l1 < (unsigned) l2))
3098 lv = l1, hv = h1;
3099 else
3100 lv = l2, hv = h2;
3101 break;
3102
3103 case SMAX:
3104 if (h1 > h2 || (h1 == h2 && (unsigned) l1 > (unsigned) l2))
3105 lv = l1, hv = h1;
3106 else
3107 lv = l2, hv = h2;
3108 break;
3109
3110 case UMIN:
3111 if ((unsigned) h1 < (unsigned) h2
3112 || (h1 == h2 && (unsigned) l1 < (unsigned) l2))
3113 lv = l1, hv = h1;
3114 else
3115 lv = l2, hv = h2;
3116 break;
3117
3118 case UMAX:
3119 if ((unsigned) h1 > (unsigned) h2
3120 || (h1 == h2 && (unsigned) l1 > (unsigned) l2))
3121 lv = l1, hv = h1;
3122 else
3123 lv = l2, hv = h2;
3124 break;
3125
3126 case LSHIFTRT: case ASHIFTRT:
3127 case ASHIFT: case LSHIFT:
3128 case ROTATE: case ROTATERT:
3129 #ifdef SHIFT_COUNT_TRUNCATED
3130 l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
3131 #endif
3132
3133 if (h2 != 0 || l2 < 0 || l2 >= GET_MODE_BITSIZE (mode))
3134 return 0;
3135
3136 if (code == LSHIFTRT || code == ASHIFTRT)
3137 rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3138 code == ASHIFTRT);
3139 else if (code == ASHIFT || code == LSHIFT)
3140 lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3141 code == ASHIFT);
3142 else if (code == ROTATE)
3143 lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3144 else /* code == ROTATERT */
3145 rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3146 break;
3147
3148 default:
3149 return 0;
3150 }
3151
3152 return immed_double_const (lv, hv, mode);
3153 }
3154 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3155
3156 if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
3157 || width > HOST_BITS_PER_INT || width == 0)
3158 {
3159 /* Even if we can't compute a constant result,
3160 there are some cases worth simplifying. */
3161
3162 switch (code)
3163 {
3164 case PLUS:
3165 /* In IEEE floating point, x+0 is not the same as x. Similarly
3166 for the other optimizations below. */
3167 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3168 && GET_MODE_CLASS (mode) != MODE_INT)
3169 break;
3170
3171 if (op1 == CONST0_RTX (mode))
3172 return op0;
3173
3174 /* Strip off any surrounding CONSTs. They don't matter in any of
3175 the cases below. */
3176 if (GET_CODE (op0) == CONST)
3177 op0 = XEXP (op0, 0);
3178 if (GET_CODE (op1) == CONST)
3179 op1 = XEXP (op1, 0);
3180
3181 /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3182 if (GET_CODE (op0) == NEG)
3183 {
3184 rtx tem = simplify_binary_operation (MINUS, mode,
3185 op1, XEXP (op0, 0));
3186 return tem ? tem : gen_rtx (MINUS, mode, op1, XEXP (op0, 0));
3187 }
3188 else if (GET_CODE (op1) == NEG)
3189 {
3190 rtx tem = simplify_binary_operation (MINUS, mode,
3191 op0, XEXP (op1, 0));
3192 return tem ? tem : gen_rtx (MINUS, mode, op0, XEXP (op1, 0));
3193 }
3194
3195 /* Don't use the associative law for floating point.
3196 The inaccuracy makes it nonassociative,
3197 and subtle programs can break if operations are associated. */
3198 if (GET_MODE_CLASS (mode) != MODE_INT)
3199 break;
3200
3201 /* (a - b) + b -> a, similarly a + (b - a) -> a */
3202 if (GET_CODE (op0) == MINUS
3203 && rtx_equal_p (XEXP (op0, 1), op1) && ! side_effects_p (op1))
3204 return XEXP (op0, 0);
3205
3206 if (GET_CODE (op1) == MINUS
3207 && rtx_equal_p (XEXP (op1, 1), op0) && ! side_effects_p (op0))
3208 return XEXP (op1, 0);
3209
3210 /* (c1 - a) + c2 becomes (c1 + c2) - a. */
3211 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == MINUS
3212 && GET_CODE (XEXP (op0, 0)) == CONST_INT)
3213 {
3214 rtx tem = simplify_binary_operation (PLUS, mode, op1,
3215 XEXP (op0, 0));
3216
3217 return tem ? gen_rtx (MINUS, mode, tem, XEXP (op0, 1)) : 0;
3218 }
3219
3220 /* Handle both-operands-constant cases. */
3221 if (CONSTANT_P (op0) && CONSTANT_P (op1)
3222 && GET_CODE (op0) != CONST_DOUBLE
3223 && GET_CODE (op1) != CONST_DOUBLE
3224 && GET_MODE_CLASS (mode) == MODE_INT)
3225 {
3226 if (GET_CODE (op1) == CONST_INT)
3227 return plus_constant (op0, INTVAL (op1));
3228 else if (GET_CODE (op0) == CONST_INT)
3229 return plus_constant (op1, INTVAL (op0));
3230 else
3231 return gen_rtx (CONST, mode,
3232 gen_rtx (PLUS, mode,
3233 GET_CODE (op0) == CONST
3234 ? XEXP (op0, 0) : op0,
3235 GET_CODE (op1) == CONST
3236 ? XEXP (op1, 0) : op1));
3237 }
3238 else if (GET_CODE (op1) == CONST_INT
3239 && GET_CODE (op0) == PLUS
3240 && (CONSTANT_P (XEXP (op0, 0))
3241 || CONSTANT_P (XEXP (op0, 1))))
3242 /* constant + (variable + constant)
3243 can result if an index register is made constant.
3244 We simplify this by adding the constants.
3245 If we did not, it would become an invalid address. */
3246 return plus_constant (op0, INTVAL (op1));
3247 break;
3248
3249 case COMPARE:
3250 #ifdef HAVE_cc0
3251 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3252 using cc0, in which case we want to leave it as a COMPARE
3253 so we can distinguish it from a register-register-copy.
3254
3255 In IEEE floating point, x-0 is not the same as x. */
3256
3257 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3258 || GET_MODE_CLASS (mode) == MODE_INT)
3259 && op1 == CONST0_RTX (mode))
3260 return op0;
3261 #else
3262 /* Do nothing here. */
3263 #endif
3264 break;
3265
3266 case MINUS:
3267 /* In IEEE floating point, x-0 is not the same as x. */
3268 if (rtx_equal_p (op0, op1)
3269 && ! side_effects_p (op0)
3270 /* We can't assume x-x is 0
3271 even with non-IEEE floating point. */
3272 && GET_MODE_CLASS (mode) != MODE_FLOAT)
3273 return const0_rtx;
3274
3275 /* Change subtraction from zero into negation. */
3276 if (op0 == CONST0_RTX (mode))
3277 return gen_rtx (NEG, mode, op1);
3278
3279 /* The remainer of these cases cannot be done for IEEE
3280 floating-point. */
3281 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3282 && GET_MODE_CLASS (mode) != MODE_INT)
3283 break;
3284
3285 /* Subtracting 0 has no effect. */
3286 if (op1 == CONST0_RTX (mode))
3287 return op0;
3288
3289 /* Strip off any surrounding CONSTs. They don't matter in any of
3290 the cases below. */
3291 if (GET_CODE (op0) == CONST)
3292 op0 = XEXP (op0, 0);
3293 if (GET_CODE (op1) == CONST)
3294 op1 = XEXP (op1, 0);
3295
3296 /* (a - (-b)) -> (a + b). */
3297 if (GET_CODE (op1) == NEG)
3298 {
3299 rtx tem = simplify_binary_operation (PLUS, mode,
3300 op0, XEXP (op1, 0));
3301 return tem ? tem : gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
3302 }
3303
3304 /* Don't use the associative law for floating point.
3305 The inaccuracy makes it nonassociative,
3306 and subtle programs can break if operations are associated. */
3307 if (GET_MODE_CLASS (mode) != MODE_INT)
3308 break;
3309
3310 /* (a + b) - a -> b, and (b - (a + b)) -> -a */
3311 if (GET_CODE (op0) == PLUS
3312 && rtx_equal_p (XEXP (op0, 0), op1)
3313 && ! side_effects_p (op1))
3314 return XEXP (op0, 1);
3315 else if (GET_CODE (op0) == PLUS
3316 && rtx_equal_p (XEXP (op0, 1), op1)
3317 && ! side_effects_p (op1))
3318 return XEXP (op0, 0);
3319
3320 if (GET_CODE (op1) == PLUS
3321 && rtx_equal_p (XEXP (op1, 0), op0)
3322 && ! side_effects_p (op0))
3323 {
3324 rtx tem = simplify_unary_operation (NEG, mode, XEXP (op1, 1),
3325 mode);
3326
3327 return tem ? tem : gen_rtx (NEG, mode, XEXP (op1, 1));
3328 }
3329 else if (GET_CODE (op1) == PLUS
3330 && rtx_equal_p (XEXP (op1, 1), op0)
3331 && ! side_effects_p (op0))
3332 {
3333 rtx tem = simplify_unary_operation (NEG, mode, XEXP (op1, 0),
3334 mode);
3335
3336 return tem ? tem : gen_rtx (NEG, mode, XEXP (op1, 0));
3337 }
3338
3339 /* a - (a - b) -> b */
3340 if (GET_CODE (op1) == MINUS && rtx_equal_p (op0, XEXP (op1, 0))
3341 && ! side_effects_p (op0))
3342 return XEXP (op1, 1);
3343
3344 /* (a +/- b) - (a +/- c) can be simplified. Do variants of
3345 this involving commutativity. The most common case is
3346 (a + C1) - (a + C2), but it's not hard to do all the cases. */
3347 if ((GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS)
3348 && (GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS))
3349 {
3350 rtx lhs0 = XEXP (op0, 0), lhs1 = XEXP (op0, 1);
3351 rtx rhs0 = XEXP (op1, 0), rhs1 = XEXP (op1, 1);
3352 int lhs_neg = GET_CODE (op0) == MINUS;
3353 int rhs_neg = GET_CODE (op1) == MINUS;
3354 rtx lhs = 0, rhs = 0;
3355
3356 /* Set LHS and RHS to the two different terms. */
3357 if (rtx_equal_p (lhs0, rhs0) && ! side_effects_p (lhs0))
3358 lhs = lhs1, rhs = rhs1;
3359 else if (! rhs_neg && rtx_equal_p (lhs0, rhs1)
3360 && ! side_effects_p (lhs0))
3361 lhs = lhs1, rhs = rhs0;
3362 else if (! lhs_neg && rtx_equal_p (lhs1, rhs0)
3363 && ! side_effects_p (lhs1))
3364 lhs = lhs0, rhs = rhs1;
3365 else if (! lhs_neg && ! rhs_neg && rtx_equal_p (lhs1, rhs1)
3366 && ! side_effects_p (lhs1))
3367 lhs = lhs0, rhs = rhs0;
3368
3369 /* The RHS is the operand of a MINUS, so its negation
3370 status should be complemented. */
3371 rhs_neg = ! rhs_neg;
3372
3373 /* If we found two values equal, form the sum or difference
3374 of the remaining two terms. */
3375 if (lhs)
3376 {
3377 rtx tem = simplify_binary_operation (lhs_neg == rhs_neg
3378 ? PLUS : MINUS,
3379 mode,
3380 lhs_neg ? rhs : lhs,
3381 lhs_neg ? lhs : rhs);
3382 if (tem == 0)
3383 tem = gen_rtx (lhs_neg == rhs_neg
3384 ? PLUS : MINUS,
3385 mode, lhs_neg ? rhs : lhs,
3386 lhs_neg ? lhs : rhs);
3387
3388 /* If both sides negated, negate result. */
3389 if (lhs_neg && rhs_neg)
3390 {
3391 rtx tem1
3392 = simplify_unary_operation (NEG, mode, tem, mode);
3393 if (tem1 == 0)
3394 tem1 = gen_rtx (NEG, mode, tem);
3395 tem = tem1;
3396 }
3397
3398 return tem;
3399 }
3400
3401 return 0;
3402 }
3403
3404 /* c1 - (a + c2) becomes (c1 - c2) - a. */
3405 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == PLUS
3406 && GET_CODE (XEXP (op1, 1)) == CONST_INT)
3407 {
3408 rtx tem = simplify_binary_operation (MINUS, mode, op0,
3409 XEXP (op1, 1));
3410
3411 return tem ? gen_rtx (MINUS, mode, tem, XEXP (op1, 0)) : 0;
3412 }
3413
3414 /* c1 - (c2 - a) becomes (c1 - c2) + a. */
3415 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == MINUS
3416 && GET_CODE (XEXP (op1, 0)) == CONST_INT)
3417 {
3418 rtx tem = simplify_binary_operation (MINUS, mode, op0,
3419 XEXP (op1, 0));
3420
3421 return (tem && GET_CODE (tem) == CONST_INT
3422 ? plus_constant (XEXP (op1, 1), INTVAL (tem))
3423 : 0);
3424 }
3425
3426 /* Don't let a relocatable value get a negative coeff. */
3427 if (GET_CODE (op1) == CONST_INT)
3428 return plus_constant (op0, - INTVAL (op1));
3429 break;
3430
3431 case MULT:
3432 if (op1 == constm1_rtx)
3433 {
3434 rtx tem = simplify_unary_operation (NEG, mode, op0, mode);
3435
3436 return tem ? tem : gen_rtx (NEG, mode, op0);
3437 }
3438
3439 /* In IEEE floating point, x*0 is not always 0. */
3440 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3441 || GET_MODE_CLASS (mode) == MODE_INT)
3442 && op1 == CONST0_RTX (mode)
3443 && ! side_effects_p (op0))
3444 return op1;
3445
3446 /* In IEEE floating point, x*1 is not equivalent to x for nans.
3447 However, ANSI says we can drop signals,
3448 so we can do this anyway. */
3449 if (op1 == CONST1_RTX (mode))
3450 return op0;
3451
3452 /* Convert multiply by constant power of two into shift. */
3453 if (GET_CODE (op1) == CONST_INT
3454 && (val = exact_log2 (INTVAL (op1))) >= 0)
3455 return gen_rtx (ASHIFT, mode, op0,
3456 gen_rtx (CONST_INT, VOIDmode, val));
3457
3458 if (GET_CODE (op1) == CONST_DOUBLE
3459 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT)
3460 {
3461 REAL_VALUE_TYPE d;
3462 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3463
3464 /* x*2 is x+x and x*(-1) is -x */
3465 if (REAL_VALUES_EQUAL (d, dconst2)
3466 && GET_MODE (op0) == mode)
3467 return gen_rtx (PLUS, mode, op0, copy_rtx (op0));
3468
3469 else if (REAL_VALUES_EQUAL (d, dconstm1)
3470 && GET_MODE (op0) == mode)
3471 return gen_rtx (NEG, mode, op0);
3472 }
3473 break;
3474
3475 case IOR:
3476 if (op1 == const0_rtx)
3477 return op0;
3478 if (GET_CODE (op1) == CONST_INT
3479 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3480 return op1;
3481 if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3482 return op0;
3483 /* A | (~A) -> -1 */
3484 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3485 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3486 && ! side_effects_p (op0))
3487 return constm1_rtx;
3488 break;
3489
3490 case XOR:
3491 if (op1 == const0_rtx)
3492 return op0;
3493 if (GET_CODE (op1) == CONST_INT
3494 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3495 return gen_rtx (NOT, mode, op0);
3496 if (op0 == op1 && ! side_effects_p (op0))
3497 return const0_rtx;
3498 break;
3499
3500 case AND:
3501 if (op1 == const0_rtx && ! side_effects_p (op0))
3502 return const0_rtx;
3503 if (GET_CODE (op1) == CONST_INT
3504 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3505 return op0;
3506 if (op0 == op1 && ! side_effects_p (op0))
3507 return op0;
3508 /* A & (~A) -> 0 */
3509 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3510 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3511 && ! side_effects_p (op0))
3512 return const0_rtx;
3513 break;
3514
3515 case UDIV:
3516 /* Convert divide by power of two into shift (divide by 1 handled
3517 below). */
3518 if (GET_CODE (op1) == CONST_INT
3519 && (arg1 = exact_log2 (INTVAL (op1))) > 0)
3520 return gen_rtx (LSHIFTRT, mode, op0,
3521 gen_rtx (CONST_INT, VOIDmode, arg1));
3522
3523 /* ... fall through ... */
3524
3525 case DIV:
3526 if (op1 == CONST1_RTX (mode))
3527 return op0;
3528 else if (op0 == CONST0_RTX (mode)
3529 && ! side_effects_p (op1))
3530 return op0;
3531 #if 0 /* Turned off till an expert says this is a safe thing to do. */
3532 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3533 /* Change division by a constant into multiplication. */
3534 else if (GET_CODE (op1) == CONST_DOUBLE
3535 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT
3536 && op1 != CONST0_RTX (mode))
3537 {
3538 REAL_VALUE_TYPE d;
3539 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3540 if (REAL_VALUES_EQUAL (d, dconst0))
3541 abort();
3542 #if defined (REAL_ARITHMETIC)
3543 REAL_ARITHMETIC (d, RDIV_EXPR, dconst1, d);
3544 return gen_rtx (MULT, mode, op0,
3545 CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
3546 #else
3547 return gen_rtx (MULT, mode, op0,
3548 CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
3549 }
3550 #endif
3551 #endif
3552 #endif
3553 break;
3554
3555 case UMOD:
3556 /* Handle modulus by power of two (mod with 1 handled below). */
3557 if (GET_CODE (op1) == CONST_INT
3558 && exact_log2 (INTVAL (op1)) > 0)
3559 return gen_rtx (AND, mode, op0,
3560 gen_rtx (CONST_INT, VOIDmode, INTVAL (op1) - 1));
3561
3562 /* ... fall through ... */
3563
3564 case MOD:
3565 if ((op0 == const0_rtx || op1 == const1_rtx)
3566 && ! side_effects_p (op0) && ! side_effects_p (op1))
3567 return const0_rtx;
3568 break;
3569
3570 case ROTATERT:
3571 case ROTATE:
3572 /* Rotating ~0 always results in ~0. */
3573 if (GET_CODE (op0) == CONST_INT && width <= HOST_BITS_PER_INT
3574 && INTVAL (op0) == GET_MODE_MASK (mode)
3575 && ! side_effects_p (op1))
3576 return op0;
3577
3578 /* ... fall through ... */
3579
3580 case LSHIFT:
3581 case ASHIFT:
3582 case ASHIFTRT:
3583 case LSHIFTRT:
3584 if (op1 == const0_rtx)
3585 return op0;
3586 if (op0 == const0_rtx && ! side_effects_p (op1))
3587 return op0;
3588 break;
3589
3590 case SMIN:
3591 if (width <= HOST_BITS_PER_INT && GET_CODE (op1) == CONST_INT
3592 && INTVAL (op1) == 1 << (width -1)
3593 && ! side_effects_p (op0))
3594 return op1;
3595 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3596 return op0;
3597 break;
3598
3599 case SMAX:
3600 if (width <= HOST_BITS_PER_INT && GET_CODE (op1) == CONST_INT
3601 && INTVAL (op1) == GET_MODE_MASK (mode) >> 1
3602 && ! side_effects_p (op0))
3603 return op1;
3604 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3605 return op0;
3606 break;
3607
3608 case UMIN:
3609 if (op1 == const0_rtx && ! side_effects_p (op0))
3610 return op1;
3611 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3612 return op0;
3613 break;
3614
3615 case UMAX:
3616 if (op1 == constm1_rtx && ! side_effects_p (op0))
3617 return op1;
3618 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3619 return op0;
3620 break;
3621
3622 default:
3623 abort ();
3624 }
3625
3626 return 0;
3627 }
3628
3629 /* Get the integer argument values in two forms:
3630 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
3631
3632 arg0 = INTVAL (op0);
3633 arg1 = INTVAL (op1);
3634
3635 if (width < HOST_BITS_PER_INT)
3636 {
3637 arg0 &= (1 << width) - 1;
3638 arg1 &= (1 << width) - 1;
3639
3640 arg0s = arg0;
3641 if (arg0s & (1 << (width - 1)))
3642 arg0s |= ((-1) << width);
3643
3644 arg1s = arg1;
3645 if (arg1s & (1 << (width - 1)))
3646 arg1s |= ((-1) << width);
3647 }
3648 else
3649 {
3650 arg0s = arg0;
3651 arg1s = arg1;
3652 }
3653
3654 /* Compute the value of the arithmetic. */
3655
3656 switch (code)
3657 {
3658 case PLUS:
3659 val = arg0s + arg1s;
3660 break;
3661
3662 case MINUS:
3663 val = arg0s - arg1s;
3664 break;
3665
3666 case MULT:
3667 val = arg0s * arg1s;
3668 break;
3669
3670 case DIV:
3671 if (arg1s == 0)
3672 return 0;
3673 val = arg0s / arg1s;
3674 break;
3675
3676 case MOD:
3677 if (arg1s == 0)
3678 return 0;
3679 val = arg0s % arg1s;
3680 break;
3681
3682 case UDIV:
3683 if (arg1 == 0)
3684 return 0;
3685 val = (unsigned) arg0 / arg1;
3686 break;
3687
3688 case UMOD:
3689 if (arg1 == 0)
3690 return 0;
3691 val = (unsigned) arg0 % arg1;
3692 break;
3693
3694 case AND:
3695 val = arg0 & arg1;
3696 break;
3697
3698 case IOR:
3699 val = arg0 | arg1;
3700 break;
3701
3702 case XOR:
3703 val = arg0 ^ arg1;
3704 break;
3705
3706 case LSHIFTRT:
3707 /* If shift count is undefined, don't fold it; let the machine do
3708 what it wants. But truncate it if the machine will do that. */
3709 if (arg1 < 0)
3710 return 0;
3711
3712 #ifdef SHIFT_COUNT_TRUNCATED
3713 arg1 &= (BITS_PER_WORD - 1);
3714 #endif
3715
3716 if (arg1 >= width)
3717 return 0;
3718
3719 val = ((unsigned) arg0) >> arg1;
3720 break;
3721
3722 case ASHIFT:
3723 case LSHIFT:
3724 if (arg1 < 0)
3725 return 0;
3726
3727 #ifdef SHIFT_COUNT_TRUNCATED
3728 arg1 &= (BITS_PER_WORD - 1);
3729 #endif
3730
3731 if (arg1 >= width)
3732 return 0;
3733
3734 val = ((unsigned) arg0) << arg1;
3735 break;
3736
3737 case ASHIFTRT:
3738 if (arg1 < 0)
3739 return 0;
3740
3741 #ifdef SHIFT_COUNT_TRUNCATED
3742 arg1 &= (BITS_PER_WORD - 1);
3743 #endif
3744
3745 if (arg1 >= width)
3746 return 0;
3747
3748 val = arg0s >> arg1;
3749 break;
3750
3751 case ROTATERT:
3752 if (arg1 < 0)
3753 return 0;
3754
3755 arg1 %= width;
3756 val = ((((unsigned) arg0) << (width - arg1))
3757 | (((unsigned) arg0) >> arg1));
3758 break;
3759
3760 case ROTATE:
3761 if (arg1 < 0)
3762 return 0;
3763
3764 arg1 %= width;
3765 val = ((((unsigned) arg0) << arg1)
3766 | (((unsigned) arg0) >> (width - arg1)));
3767 break;
3768
3769 case COMPARE:
3770 /* Do nothing here. */
3771 return 0;
3772
3773 default:
3774 abort ();
3775 }
3776
3777 /* Clear the bits that don't belong in our mode, unless they and our sign
3778 bit are all one. So we get either a reasonable negative value or a
3779 reasonable unsigned value for this mode. */
3780 if (width < HOST_BITS_PER_INT
3781 && ((val & ((-1) << (width - 1))) != ((-1) << (width - 1))))
3782 val &= (1 << width) - 1;
3783
3784 return gen_rtx (CONST_INT, VOIDmode, val);
3785 }
3786 \f
3787 /* Like simplify_binary_operation except used for relational operators.
3788 MODE is the mode of the operands, not that of the result. */
3789
3790 rtx
3791 simplify_relational_operation (code, mode, op0, op1)
3792 enum rtx_code code;
3793 enum machine_mode mode;
3794 rtx op0, op1;
3795 {
3796 register int arg0, arg1, arg0s, arg1s;
3797 int val;
3798 int width = GET_MODE_BITSIZE (mode);
3799
3800 /* If op0 is a compare, extract the comparison arguments from it. */
3801 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
3802 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3803
3804 if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
3805 || width > HOST_BITS_PER_INT || width == 0)
3806 {
3807 /* Even if we can't compute a constant result,
3808 there are some cases worth simplifying. */
3809
3810 /* For non-IEEE floating-point, if the two operands are equal, we know
3811 the result. */
3812 if (rtx_equal_p (op0, op1)
3813 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3814 || GET_MODE_CLASS (GET_MODE (op0)) != MODE_FLOAT))
3815 return (code == EQ || code == GE || code == LE || code == LEU
3816 || code == GEU) ? const_true_rtx : const0_rtx;
3817 else if (GET_CODE (op0) == CONST_DOUBLE
3818 && GET_CODE (op1) == CONST_DOUBLE
3819 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
3820 {
3821 REAL_VALUE_TYPE d0, d1;
3822 int value;
3823 jmp_buf handler;
3824 int op0lt, op1lt, equal;
3825
3826 if (setjmp (handler))
3827 return 0;
3828
3829 set_float_handler (handler);
3830 REAL_VALUE_FROM_CONST_DOUBLE (d0, op0);
3831 REAL_VALUE_FROM_CONST_DOUBLE (d1, op1);
3832 equal = REAL_VALUES_EQUAL (d0, d1);
3833 op0lt = REAL_VALUES_LESS (d0, d1);
3834 op1lt = REAL_VALUES_LESS (d1, d0);
3835 set_float_handler (0);
3836
3837 switch (code)
3838 {
3839 case EQ:
3840 return equal ? const_true_rtx : const0_rtx;
3841 case NE:
3842 return !equal ? const_true_rtx : const0_rtx;
3843 case LE:
3844 return equal || op0lt ? const_true_rtx : const0_rtx;
3845 case LT:
3846 return op0lt ? const_true_rtx : const0_rtx;
3847 case GE:
3848 return equal || op1lt ? const_true_rtx : const0_rtx;
3849 case GT:
3850 return op1lt ? const_true_rtx : const0_rtx;
3851 }
3852 }
3853
3854 switch (code)
3855 {
3856 case EQ:
3857 {
3858 #if 0
3859 /* We can't make this assumption due to #pragma weak */
3860 if (CONSTANT_P (op0) && op1 == const0_rtx)
3861 return const0_rtx;
3862 #endif
3863 if (NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx
3864 /* On some machines, the ap reg can be 0 sometimes. */
3865 && op0 != arg_pointer_rtx)
3866 return const0_rtx;
3867 break;
3868 }
3869
3870 case NE:
3871 #if 0
3872 /* We can't make this assumption due to #pragma weak */
3873 if (CONSTANT_P (op0) && op1 == const0_rtx)
3874 return const_true_rtx;
3875 #endif
3876 if (NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx
3877 /* On some machines, the ap reg can be 0 sometimes. */
3878 && op0 != arg_pointer_rtx)
3879 return const_true_rtx;
3880 break;
3881
3882 case GEU:
3883 /* Unsigned values are never negative, but we must be sure we are
3884 actually comparing a value, not a CC operand. */
3885 if (op1 == const0_rtx
3886 && GET_MODE_CLASS (mode) == MODE_INT)
3887 return const_true_rtx;
3888 break;
3889
3890 case LTU:
3891 if (op1 == const0_rtx
3892 && GET_MODE_CLASS (mode) == MODE_INT)
3893 return const0_rtx;
3894 break;
3895
3896 case LEU:
3897 /* Unsigned values are never greater than the largest
3898 unsigned value. */
3899 if (GET_CODE (op1) == CONST_INT
3900 && INTVAL (op1) == GET_MODE_MASK (mode)
3901 && GET_MODE_CLASS (mode) == MODE_INT)
3902 return const_true_rtx;
3903 break;
3904
3905 case GTU:
3906 if (GET_CODE (op1) == CONST_INT
3907 && INTVAL (op1) == GET_MODE_MASK (mode)
3908 && GET_MODE_CLASS (mode) == MODE_INT)
3909 return const0_rtx;
3910 break;
3911 }
3912
3913 return 0;
3914 }
3915
3916 /* Get the integer argument values in two forms:
3917 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
3918
3919 arg0 = INTVAL (op0);
3920 arg1 = INTVAL (op1);
3921
3922 if (width < HOST_BITS_PER_INT)
3923 {
3924 arg0 &= (1 << width) - 1;
3925 arg1 &= (1 << width) - 1;
3926
3927 arg0s = arg0;
3928 if (arg0s & (1 << (width - 1)))
3929 arg0s |= ((-1) << width);
3930
3931 arg1s = arg1;
3932 if (arg1s & (1 << (width - 1)))
3933 arg1s |= ((-1) << width);
3934 }
3935 else
3936 {
3937 arg0s = arg0;
3938 arg1s = arg1;
3939 }
3940
3941 /* Compute the value of the arithmetic. */
3942
3943 switch (code)
3944 {
3945 case NE:
3946 val = arg0 != arg1 ? STORE_FLAG_VALUE : 0;
3947 break;
3948
3949 case EQ:
3950 val = arg0 == arg1 ? STORE_FLAG_VALUE : 0;
3951 break;
3952
3953 case LE:
3954 val = arg0s <= arg1s ? STORE_FLAG_VALUE : 0;
3955 break;
3956
3957 case LT:
3958 val = arg0s < arg1s ? STORE_FLAG_VALUE : 0;
3959 break;
3960
3961 case GE:
3962 val = arg0s >= arg1s ? STORE_FLAG_VALUE : 0;
3963 break;
3964
3965 case GT:
3966 val = arg0s > arg1s ? STORE_FLAG_VALUE : 0;
3967 break;
3968
3969 case LEU:
3970 val = ((unsigned) arg0) <= ((unsigned) arg1) ? STORE_FLAG_VALUE : 0;
3971 break;
3972
3973 case LTU:
3974 val = ((unsigned) arg0) < ((unsigned) arg1) ? STORE_FLAG_VALUE : 0;
3975 break;
3976
3977 case GEU:
3978 val = ((unsigned) arg0) >= ((unsigned) arg1) ? STORE_FLAG_VALUE : 0;
3979 break;
3980
3981 case GTU:
3982 val = ((unsigned) arg0) > ((unsigned) arg1) ? STORE_FLAG_VALUE : 0;
3983 break;
3984
3985 default:
3986 abort ();
3987 }
3988
3989 /* Clear the bits that don't belong in our mode, unless they and our sign
3990 bit are all one. So we get either a reasonable negative value or a
3991 reasonable unsigned value for this mode. */
3992 if (width < HOST_BITS_PER_INT
3993 && ((val & ((-1) << (width - 1))) != ((-1) << (width - 1))))
3994 val &= (1 << width) - 1;
3995
3996 return gen_rtx (CONST_INT, VOIDmode, val);
3997 }
3998 \f
3999 /* Simplify CODE, an operation with result mode MODE and three operands,
4000 OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became
4001 a constant. Return 0 if no simplifications is possible. */
4002
4003 rtx
4004 simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2)
4005 enum rtx_code code;
4006 enum machine_mode mode, op0_mode;
4007 rtx op0, op1, op2;
4008 {
4009 int width = GET_MODE_BITSIZE (mode);
4010
4011 /* VOIDmode means "infinite" precision. */
4012 if (width == 0)
4013 width = HOST_BITS_PER_INT;
4014
4015 switch (code)
4016 {
4017 case SIGN_EXTRACT:
4018 case ZERO_EXTRACT:
4019 if (GET_CODE (op0) == CONST_INT
4020 && GET_CODE (op1) == CONST_INT
4021 && GET_CODE (op2) == CONST_INT
4022 && INTVAL (op1) + INTVAL (op2) <= GET_MODE_BITSIZE (op0_mode)
4023 && width <= HOST_BITS_PER_INT)
4024 {
4025 /* Extracting a bit-field from a constant */
4026 int val = INTVAL (op0);
4027
4028 #if BITS_BIG_ENDIAN
4029 val >>= (GET_MODE_BITSIZE (op0_mode) - INTVAL (op2) - INTVAL (op1));
4030 #else
4031 val >>= INTVAL (op2);
4032 #endif
4033 if (HOST_BITS_PER_INT != INTVAL (op1))
4034 {
4035 /* First zero-extend. */
4036 val &= (1 << INTVAL (op1)) - 1;
4037 /* If desired, propagate sign bit. */
4038 if (code == SIGN_EXTRACT && (val & (1 << (INTVAL (op1) - 1))))
4039 val |= ~ (1 << INTVAL (op1));
4040 }
4041
4042 /* Clear the bits that don't belong in our mode,
4043 unless they and our sign bit are all one.
4044 So we get either a reasonable negative value or a reasonable
4045 unsigned value for this mode. */
4046 if (width < HOST_BITS_PER_INT
4047 && ((val & ((-1) << (width - 1))) != ((-1) << (width - 1))))
4048 val &= (1 << width) - 1;
4049
4050 return gen_rtx (CONST_INT, VOIDmode, val);
4051 }
4052 break;
4053
4054 case IF_THEN_ELSE:
4055 if (GET_CODE (op0) == CONST_INT)
4056 return op0 != const0_rtx ? op1 : op2;
4057 break;
4058
4059 default:
4060 abort ();
4061 }
4062
4063 return 0;
4064 }
4065 \f
4066 /* If X is a nontrivial arithmetic operation on an argument
4067 for which a constant value can be determined, return
4068 the result of operating on that value, as a constant.
4069 Otherwise, return X, possibly with one or more operands
4070 modified by recursive calls to this function.
4071
4072 If X is a register whose contents are known, we do NOT
4073 return those contents. This is because an instruction that
4074 uses a register is usually faster than one that uses a constant.
4075
4076 INSN is the insn that we may be modifying. If it is 0, make a copy
4077 of X before modifying it. */
4078
4079 static rtx
4080 fold_rtx (x, insn)
4081 rtx x;
4082 rtx insn;
4083 {
4084 register enum rtx_code code;
4085 register enum machine_mode mode;
4086 register char *fmt;
4087 register int i, val;
4088 rtx new = 0;
4089 int copied = 0;
4090 int must_swap = 0;
4091
4092 /* Folded equivalents of first two operands of X. */
4093 rtx folded_arg0;
4094 rtx folded_arg1;
4095
4096 /* Constant equivalents of first three operands of X;
4097 0 when no such equivalent is known. */
4098 rtx const_arg0;
4099 rtx const_arg1;
4100 rtx const_arg2;
4101
4102 /* The mode of the first operand of X. We need this for sign and zero
4103 extends. */
4104 enum machine_mode mode_arg0;
4105
4106 if (x == 0)
4107 return x;
4108
4109 mode = GET_MODE (x);
4110 code = GET_CODE (x);
4111 switch (code)
4112 {
4113 case CONST:
4114 case CONST_INT:
4115 case CONST_DOUBLE:
4116 case SYMBOL_REF:
4117 case LABEL_REF:
4118 case REG:
4119 /* No use simplifying an EXPR_LIST
4120 since they are used only for lists of args
4121 in a function call's REG_EQUAL note. */
4122 case EXPR_LIST:
4123 return x;
4124
4125 #ifdef HAVE_cc0
4126 case CC0:
4127 return prev_insn_cc0;
4128 #endif
4129
4130 case PC:
4131 /* If the next insn is a CODE_LABEL followed by a jump table,
4132 PC's value is a LABEL_REF pointing to that label. That
4133 lets us fold switch statements on the Vax. */
4134 if (insn && GET_CODE (insn) == JUMP_INSN)
4135 {
4136 rtx next = next_nonnote_insn (insn);
4137
4138 if (next && GET_CODE (next) == CODE_LABEL
4139 && NEXT_INSN (next) != 0
4140 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
4141 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
4142 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
4143 return gen_rtx (LABEL_REF, Pmode, next);
4144 }
4145 break;
4146
4147 case SUBREG:
4148 /* If this is a single word of a multi-word value, see if we previously
4149 assigned a value to that word. */
4150 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
4151 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
4152 && (new = lookup_as_function (x, CONST_INT)) != 0)
4153 return new;
4154
4155 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
4156 We might be able to if the SUBREG is extracting a single word in an
4157 integral mode or extracting the low part. */
4158
4159 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
4160 const_arg0 = equiv_constant (folded_arg0);
4161 if (const_arg0)
4162 folded_arg0 = const_arg0;
4163
4164 if (folded_arg0 != SUBREG_REG (x))
4165 {
4166 new = 0;
4167
4168 if (GET_MODE_CLASS (mode) == MODE_INT
4169 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
4170 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
4171 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
4172 GET_MODE (SUBREG_REG (x)));
4173 if (new == 0 && subreg_lowpart_p (x))
4174 new = gen_lowpart_if_possible (mode, folded_arg0);
4175 if (new)
4176 return new;
4177 }
4178 return x;
4179
4180 case NOT:
4181 case NEG:
4182 /* If we have (NOT Y), see if Y is known to be (NOT Z).
4183 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
4184 new = lookup_as_function (XEXP (x, 0), code);
4185 if (new)
4186 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
4187 break;
4188
4189 case MEM:
4190 /* If we are not actually processing an insn, don't try to find the
4191 best address. Not only don't we care, but we could modify the
4192 MEM in an invalid way since we have no insn to validate against. */
4193 if (insn != 0)
4194 find_best_addr (insn, &XEXP (x, 0));
4195
4196 {
4197 /* Even if we don't fold in the insn itself,
4198 we can safely do so here, in hopes of getting a constant. */
4199 rtx addr = fold_rtx (XEXP (x, 0), 0);
4200 rtx base = 0;
4201 int offset = 0;
4202
4203 if (GET_CODE (addr) == REG
4204 && REGNO_QTY_VALID_P (REGNO (addr))
4205 && GET_MODE (addr) == qty_mode[reg_qty[REGNO (addr)]]
4206 && qty_const[reg_qty[REGNO (addr)]] != 0)
4207 addr = qty_const[reg_qty[REGNO (addr)]];
4208
4209 /* If address is constant, split it into a base and integer offset. */
4210 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
4211 base = addr;
4212 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
4213 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
4214 {
4215 base = XEXP (XEXP (addr, 0), 0);
4216 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
4217 }
4218 else if (GET_CODE (addr) == LO_SUM
4219 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
4220 base = XEXP (addr, 1);
4221
4222 /* If this is a constant pool reference, we can fold it into its
4223 constant to allow better value tracking. */
4224 if (base && GET_CODE (base) == SYMBOL_REF
4225 && CONSTANT_POOL_ADDRESS_P (base))
4226 {
4227 rtx constant = get_pool_constant (base);
4228 enum machine_mode const_mode = get_pool_mode (base);
4229 rtx new;
4230
4231 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
4232 constant_pool_entries_cost = COST (constant);
4233
4234 /* If we are loading the full constant, we have an equivalence. */
4235 if (offset == 0 && mode == const_mode)
4236 return constant;
4237
4238 /* If this actually isn't a constant (wierd!), we can't do
4239 anything. Otherwise, handle the two most common cases:
4240 extracting a word from a multi-word constant, and extracting
4241 the low-order bits. Other cases don't seem common enough to
4242 worry about. */
4243 if (! CONSTANT_P (constant))
4244 return x;
4245
4246 if (GET_MODE_CLASS (mode) == MODE_INT
4247 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
4248 && offset % UNITS_PER_WORD == 0
4249 && (new = operand_subword (constant,
4250 offset / UNITS_PER_WORD,
4251 0, const_mode)) != 0)
4252 return new;
4253
4254 if (((BYTES_BIG_ENDIAN
4255 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
4256 || (! BYTES_BIG_ENDIAN && offset == 0))
4257 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
4258 return new;
4259 }
4260
4261 /* If this is a reference to a label at a known position in a jump
4262 table, we also know its value. */
4263 if (base && GET_CODE (base) == LABEL_REF)
4264 {
4265 rtx label = XEXP (base, 0);
4266 rtx table_insn = NEXT_INSN (label);
4267
4268 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
4269 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
4270 {
4271 rtx table = PATTERN (table_insn);
4272
4273 if (offset >= 0
4274 && (offset / GET_MODE_SIZE (GET_MODE (table))
4275 < XVECLEN (table, 0)))
4276 return XVECEXP (table, 0,
4277 offset / GET_MODE_SIZE (GET_MODE (table)));
4278 }
4279 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
4280 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
4281 {
4282 rtx table = PATTERN (table_insn);
4283
4284 if (offset >= 0
4285 && (offset / GET_MODE_SIZE (GET_MODE (table))
4286 < XVECLEN (table, 1)))
4287 {
4288 offset /= GET_MODE_SIZE (GET_MODE (table));
4289 new = gen_rtx (MINUS, Pmode, XVECEXP (table, 1, offset),
4290 XEXP (table, 0));
4291
4292 if (GET_MODE (table) != Pmode)
4293 new = gen_rtx (TRUNCATE, GET_MODE (table), new);
4294
4295 return new;
4296 }
4297 }
4298 }
4299
4300 return x;
4301 }
4302 }
4303
4304 const_arg0 = 0;
4305 const_arg1 = 0;
4306 const_arg2 = 0;
4307 mode_arg0 = VOIDmode;
4308
4309 /* Try folding our operands.
4310 Then see which ones have constant values known. */
4311
4312 fmt = GET_RTX_FORMAT (code);
4313 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4314 if (fmt[i] == 'e')
4315 {
4316 rtx arg = XEXP (x, i);
4317 rtx folded_arg = arg, const_arg = 0;
4318 enum machine_mode mode_arg = GET_MODE (arg);
4319 rtx cheap_arg, expensive_arg;
4320 rtx replacements[2];
4321 int j;
4322
4323 /* Most arguments are cheap, so handle them specially. */
4324 switch (GET_CODE (arg))
4325 {
4326 case REG:
4327 /* This is the same as calling equiv_constant; it is duplicated
4328 here for speed. */
4329 if (REGNO_QTY_VALID_P (REGNO (arg))
4330 && qty_const[reg_qty[REGNO (arg)]] != 0
4331 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != REG
4332 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != PLUS)
4333 const_arg
4334 = gen_lowpart_if_possible (GET_MODE (arg),
4335 qty_const[reg_qty[REGNO (arg)]]);
4336 break;
4337
4338 case CONST:
4339 case CONST_INT:
4340 case SYMBOL_REF:
4341 case LABEL_REF:
4342 case CONST_DOUBLE:
4343 const_arg = arg;
4344 break;
4345
4346 #ifdef HAVE_cc0
4347 case CC0:
4348 folded_arg = prev_insn_cc0;
4349 mode_arg = prev_insn_cc0_mode;
4350 const_arg = equiv_constant (folded_arg);
4351 break;
4352 #endif
4353
4354 default:
4355 folded_arg = fold_rtx (arg, insn);
4356 const_arg = equiv_constant (folded_arg);
4357 }
4358
4359 /* For the first three operands, see if the operand
4360 is constant or equivalent to a constant. */
4361 switch (i)
4362 {
4363 case 0:
4364 folded_arg0 = folded_arg;
4365 const_arg0 = const_arg;
4366 mode_arg0 = mode_arg;
4367 break;
4368 case 1:
4369 folded_arg1 = folded_arg;
4370 const_arg1 = const_arg;
4371 break;
4372 case 2:
4373 const_arg2 = const_arg;
4374 break;
4375 }
4376
4377 /* Pick the least expensive of the folded argument and an
4378 equivalent constant argument. */
4379 if (const_arg == 0 || const_arg == folded_arg
4380 || COST (const_arg) > COST (folded_arg))
4381 cheap_arg = folded_arg, expensive_arg = const_arg;
4382 else
4383 cheap_arg = const_arg, expensive_arg = folded_arg;
4384
4385 /* Try to replace the operand with the cheapest of the two
4386 possibilities. If it doesn't work and this is either of the first
4387 two operands of a commutative operation, try swapping them.
4388 If THAT fails, try the more expensive, provided it is cheaper
4389 than what is already there. */
4390
4391 if (cheap_arg == XEXP (x, i))
4392 continue;
4393
4394 if (insn == 0 && ! copied)
4395 {
4396 x = copy_rtx (x);
4397 copied = 1;
4398 }
4399
4400 replacements[0] = cheap_arg, replacements[1] = expensive_arg;
4401 for (j = 0;
4402 j < 2 && replacements[j]
4403 && COST (replacements[j]) < COST (XEXP (x, i));
4404 j++)
4405 {
4406 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
4407 break;
4408
4409 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
4410 {
4411 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
4412 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
4413
4414 if (apply_change_group ())
4415 {
4416 /* Swap them back to be invalid so that this loop can
4417 continue and flag them to be swapped back later. */
4418 rtx tem;
4419
4420 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
4421 XEXP (x, 1) = tem;
4422 must_swap = 1;
4423 break;
4424 }
4425 }
4426 }
4427 }
4428
4429 else if (fmt[i] == 'E')
4430 /* Don't try to fold inside of a vector of expressions.
4431 Doing nothing is harmless. */
4432 ;
4433
4434 /* If a commutative operation, place a constant integer as the second
4435 operand unless the first operand is also a constant integer. Otherwise,
4436 place any constant second unless the first operand is also a constant. */
4437
4438 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4439 {
4440 if (must_swap || (const_arg0
4441 && (const_arg1 == 0
4442 || (GET_CODE (const_arg0) == CONST_INT
4443 && GET_CODE (const_arg1) != CONST_INT))))
4444 {
4445 register rtx tem = XEXP (x, 0);
4446
4447 if (insn == 0 && ! copied)
4448 {
4449 x = copy_rtx (x);
4450 copied = 1;
4451 }
4452
4453 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
4454 validate_change (insn, &XEXP (x, 1), tem, 1);
4455 if (apply_change_group ())
4456 {
4457 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
4458 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
4459 }
4460 }
4461 }
4462
4463 /* If X is an arithmetic operation, see if we can simplify it. */
4464
4465 switch (GET_RTX_CLASS (code))
4466 {
4467 case '1':
4468 new = simplify_unary_operation (code, mode,
4469 const_arg0 ? const_arg0 : folded_arg0,
4470 mode_arg0);
4471 break;
4472
4473 case '<':
4474 /* See what items are actually being compared and set FOLDED_ARG[01]
4475 to those values and CODE to the actual comparison code. If any are
4476 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
4477 do anything if both operands are already known to be constant. */
4478
4479 if (const_arg0 == 0 || const_arg1 == 0)
4480 {
4481 struct table_elt *p0, *p1;
4482
4483 code = find_comparison_args (code, &folded_arg0, &folded_arg1);
4484 const_arg0 = equiv_constant (folded_arg0);
4485 const_arg1 = equiv_constant (folded_arg1);
4486
4487 /* Get a mode from the values actually being compared, or from the
4488 old value of MODE_ARG0 if both are constants. If the resulting
4489 mode is VOIDmode or a MODE_CC mode, we don't know what kinds
4490 of things are being compared, so we can't do anything with this
4491 comparison. */
4492
4493 if (GET_MODE (folded_arg0) != VOIDmode
4494 && GET_MODE_CLASS (GET_MODE (folded_arg0)) != MODE_CC)
4495 mode_arg0 = GET_MODE (folded_arg0);
4496
4497 else if (GET_MODE (folded_arg1) != VOIDmode
4498 && GET_MODE_CLASS (GET_MODE (folded_arg1)) != MODE_CC)
4499 mode_arg0 = GET_MODE (folded_arg1);
4500
4501 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
4502 break;
4503
4504 /* If we do not now have two constants being compared, see if we
4505 can nevertheless deduce some things about the comparison. */
4506 if (const_arg0 == 0 || const_arg1 == 0)
4507 {
4508 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or non-explicit
4509 constant? These aren't zero, but we don't know their sign. */
4510 if (const_arg1 == const0_rtx
4511 && (NONZERO_BASE_PLUS_P (folded_arg0)
4512 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
4513 come out as 0. */
4514 || GET_CODE (folded_arg0) == SYMBOL_REF
4515 #endif
4516 || GET_CODE (folded_arg0) == LABEL_REF
4517 || GET_CODE (folded_arg0) == CONST))
4518 {
4519 if (code == EQ)
4520 return const0_rtx;
4521 else if (code == NE)
4522 return const_true_rtx;
4523 }
4524
4525 /* See if the two operands are the same. We don't do this
4526 for IEEE floating-point since we can't assume x == x
4527 since x might be a NaN. */
4528
4529 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4530 || GET_MODE_CLASS (mode_arg0) != MODE_FLOAT)
4531 && (folded_arg0 == folded_arg1
4532 || (GET_CODE (folded_arg0) == REG
4533 && GET_CODE (folded_arg1) == REG
4534 && (reg_qty[REGNO (folded_arg0)]
4535 == reg_qty[REGNO (folded_arg1)]))
4536 || ((p0 = lookup (folded_arg0,
4537 (safe_hash (folded_arg0, mode_arg0)
4538 % NBUCKETS), mode_arg0))
4539 && (p1 = lookup (folded_arg1,
4540 (safe_hash (folded_arg1, mode_arg0)
4541 % NBUCKETS), mode_arg0))
4542 && p0->first_same_value == p1->first_same_value)))
4543 return ((code == EQ || code == LE || code == GE
4544 || code == LEU || code == GEU)
4545 ? const_true_rtx : const0_rtx);
4546
4547 /* If FOLDED_ARG0 is a register, see if the comparison we are
4548 doing now is either the same as we did before or the reverse
4549 (we only check the reverse if not floating-point). */
4550 else if (GET_CODE (folded_arg0) == REG)
4551 {
4552 int qty = reg_qty[REGNO (folded_arg0)];
4553
4554 if (REGNO_QTY_VALID_P (REGNO (folded_arg0))
4555 && (comparison_dominates_p (qty_comparison_code[qty], code)
4556 || (comparison_dominates_p (qty_comparison_code[qty],
4557 reverse_condition (code))
4558 && GET_MODE_CLASS (mode_arg0) == MODE_INT))
4559 && (rtx_equal_p (qty_comparison_const[qty], folded_arg1)
4560 || (const_arg1
4561 && rtx_equal_p (qty_comparison_const[qty],
4562 const_arg1))
4563 || (GET_CODE (folded_arg1) == REG
4564 && (reg_qty[REGNO (folded_arg1)]
4565 == qty_comparison_qty[qty]))))
4566 return (comparison_dominates_p (qty_comparison_code[qty],
4567 code)
4568 ? const_true_rtx : const0_rtx);
4569 }
4570 }
4571 }
4572
4573 /* If we are comparing against zero, see if the first operand is
4574 equivalent to an IOR with a constant. If so, we may be able to
4575 determine the result of this comparison. */
4576
4577 if (const_arg1 == const0_rtx)
4578 {
4579 rtx y = lookup_as_function (folded_arg0, IOR);
4580 rtx inner_const;
4581
4582 if (y != 0
4583 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
4584 && GET_CODE (inner_const) == CONST_INT
4585 && INTVAL (inner_const) != 0)
4586 {
4587 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
4588 int has_sign = (HOST_BITS_PER_INT >= sign_bitnum
4589 && (INTVAL (inner_const) & (1 << sign_bitnum)));
4590
4591 switch (code)
4592 {
4593 case EQ:
4594 return const0_rtx;
4595 case NE:
4596 return const_true_rtx;
4597 case LT: case LE:
4598 if (has_sign)
4599 return const_true_rtx;
4600 break;
4601 case GT: case GE:
4602 if (has_sign)
4603 return const0_rtx;
4604 break;
4605 }
4606 }
4607 }
4608
4609 new = simplify_relational_operation (code, mode_arg0,
4610 const_arg0 ? const_arg0 : folded_arg0,
4611 const_arg1 ? const_arg1 : folded_arg1);
4612 break;
4613
4614 case '2':
4615 case 'c':
4616 switch (code)
4617 {
4618 case PLUS:
4619 /* If the second operand is a LABEL_REF, see if the first is a MINUS
4620 with that LABEL_REF as its second operand. If so, the result is
4621 the first operand of that MINUS. This handles switches with an
4622 ADDR_DIFF_VEC table. */
4623 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4624 {
4625 rtx y = lookup_as_function (folded_arg0, MINUS);
4626
4627 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4628 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4629 return XEXP (y, 0);
4630 }
4631
4632 /* ... fall through ... */
4633
4634 case MINUS:
4635 case SMIN: case SMAX: case UMIN: case UMAX:
4636 case IOR: case AND: case XOR:
4637 case MULT: case DIV: case UDIV:
4638 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4639 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4640 is known to be of similar form, we may be able to replace the
4641 operation with a combined operation. This may eliminate the
4642 intermediate operation if every use is simplified in this way.
4643 Note that the similar optimization done by combine.c only works
4644 if the intermediate operation's result has only one reference. */
4645
4646 if (GET_CODE (folded_arg0) == REG
4647 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4648 {
4649 int is_shift
4650 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4651 rtx y = lookup_as_function (folded_arg0, code);
4652 rtx inner_const;
4653 enum rtx_code associate_code;
4654 rtx new_const;
4655
4656 if (y == 0
4657 || 0 == (inner_const
4658 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4659 || GET_CODE (inner_const) != CONST_INT
4660 /* If we have compiled a statement like
4661 "if (x == (x & mask1))", and now are looking at
4662 "x & mask2", we will have a case where the first operand
4663 of Y is the same as our first operand. Unless we detect
4664 this case, an infinite loop will result. */
4665 || XEXP (y, 0) == folded_arg0)
4666 break;
4667
4668 /* Don't associate these operations if they are a PLUS with the
4669 same constant and it is a power of two. These might be doable
4670 with a pre- or post-increment. Similarly for two subtracts of
4671 identical powers of two with post decrement. */
4672
4673 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
4674 && (0
4675 #if defined(HAVE_PRE_INCREMENT) || defined(HAVE_POST_INCREMENT)
4676 || exact_log2 (INTVAL (const_arg1)) >= 0
4677 #endif
4678 #if defined(HAVE_PRE_DECREMENT) || defined(HAVE_POST_DECREMENT)
4679 || exact_log2 (- INTVAL (const_arg1)) >= 0
4680 #endif
4681 ))
4682 break;
4683
4684 /* Compute the code used to compose the constants. For example,
4685 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
4686
4687 associate_code
4688 = (code == MULT || code == DIV || code == UDIV ? MULT
4689 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
4690
4691 new_const = simplify_binary_operation (associate_code, mode,
4692 const_arg1, inner_const);
4693
4694 if (new_const == 0)
4695 break;
4696
4697 /* If we are associating shift operations, don't let this
4698 produce a shift of larger than the object. This could
4699 occur when we following a sign-extend by a right shift on
4700 a machine that does a sign-extend as a pair of shifts. */
4701
4702 if (is_shift && GET_CODE (new_const) == CONST_INT
4703 && INTVAL (new_const) > GET_MODE_BITSIZE (mode))
4704 break;
4705
4706 y = copy_rtx (XEXP (y, 0));
4707
4708 /* If Y contains our first operand (the most common way this
4709 can happen is if Y is a MEM), we would do into an infinite
4710 loop if we tried to fold it. So don't in that case. */
4711
4712 if (! reg_mentioned_p (folded_arg0, y))
4713 y = fold_rtx (y, insn);
4714
4715 new = simplify_binary_operation (code, mode, y, new_const);
4716 if (new)
4717 return new;
4718
4719 return gen_rtx (code, mode, y, new_const);
4720 }
4721 }
4722
4723 new = simplify_binary_operation (code, mode,
4724 const_arg0 ? const_arg0 : folded_arg0,
4725 const_arg1 ? const_arg1 : folded_arg1);
4726 break;
4727
4728 case 'o':
4729 /* (lo_sum (high X) X) is simply X. */
4730 if (code == LO_SUM && const_arg0 != 0
4731 && GET_CODE (const_arg0) == HIGH
4732 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4733 return const_arg1;
4734 break;
4735
4736 case '3':
4737 case 'b':
4738 new = simplify_ternary_operation (code, mode, mode_arg0,
4739 const_arg0 ? const_arg0 : folded_arg0,
4740 const_arg1 ? const_arg1 : folded_arg1,
4741 const_arg2 ? const_arg2 : XEXP (x, 2));
4742 break;
4743 }
4744
4745 return new ? new : x;
4746 }
4747 \f
4748 /* Return a constant value currently equivalent to X.
4749 Return 0 if we don't know one. */
4750
4751 static rtx
4752 equiv_constant (x)
4753 rtx x;
4754 {
4755 if (GET_CODE (x) == REG
4756 && REGNO_QTY_VALID_P (REGNO (x))
4757 && qty_const[reg_qty[REGNO (x)]])
4758 x = gen_lowpart_if_possible (GET_MODE (x), qty_const[reg_qty[REGNO (x)]]);
4759
4760 if (x != 0 && CONSTANT_P (x))
4761 return x;
4762
4763 return 0;
4764 }
4765 \f
4766 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4767 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4768 least-significant part of X.
4769 MODE specifies how big a part of X to return.
4770
4771 If the requested operation cannot be done, 0 is returned.
4772
4773 This is similar to gen_lowpart in emit-rtl.c. */
4774
4775 rtx
4776 gen_lowpart_if_possible (mode, x)
4777 enum machine_mode mode;
4778 register rtx x;
4779 {
4780 rtx result = gen_lowpart_common (mode, x);
4781
4782 if (result)
4783 return result;
4784 else if (GET_CODE (x) == MEM)
4785 {
4786 /* This is the only other case we handle. */
4787 register int offset = 0;
4788 rtx new;
4789
4790 #if WORDS_BIG_ENDIAN
4791 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4792 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4793 #endif
4794 #if BYTES_BIG_ENDIAN
4795 /* Adjust the address so that the address-after-the-data
4796 is unchanged. */
4797 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4798 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4799 #endif
4800 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
4801 if (! memory_address_p (mode, XEXP (new, 0)))
4802 return 0;
4803 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
4804 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
4805 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
4806 return new;
4807 }
4808 else
4809 return 0;
4810 }
4811 \f
4812 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4813 branch. It will be zero if not.
4814
4815 In certain cases, this can cause us to add an equivalence. For example,
4816 if we are following the taken case of
4817 if (i == 2)
4818 we can add the fact that `i' and '2' are now equivalent.
4819
4820 In any case, we can record that this comparison was passed. If the same
4821 comparison is seen later, we will know its value. */
4822
4823 static void
4824 record_jump_equiv (insn, taken)
4825 rtx insn;
4826 int taken;
4827 {
4828 int cond_known_true;
4829 rtx op0, op1;
4830 enum machine_mode mode;
4831 int reversed_nonequality = 0;
4832 enum rtx_code code;
4833
4834 /* Ensure this is the right kind of insn. */
4835 if (! condjump_p (insn) || simplejump_p (insn))
4836 return;
4837
4838 /* See if this jump condition is known true or false. */
4839 if (taken)
4840 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
4841 else
4842 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
4843
4844 /* Get the type of comparison being done and the operands being compared.
4845 If we had to reverse a non-equality condition, record that fact so we
4846 know that it isn't valid for floating-point. */
4847 code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
4848 op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
4849 op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
4850
4851 code = find_comparison_args (code, &op0, &op1);
4852 if (! cond_known_true)
4853 {
4854 reversed_nonequality = (code != EQ && code != NE);
4855 code = reverse_condition (code);
4856 }
4857
4858 /* The mode is the mode of the non-constant. */
4859 mode = GET_MODE (op0);
4860 if (mode == VOIDmode) mode = GET_MODE (op1);
4861
4862 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4863 }
4864
4865 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4866 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4867 Make any useful entries we can with that information. Called from
4868 above function and called recursively. */
4869
4870 static void
4871 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
4872 enum rtx_code code;
4873 enum machine_mode mode;
4874 rtx op0, op1;
4875 int reversed_nonequality;
4876 {
4877 int op0_hash_code, op1_hash_code;
4878 int op0_in_memory, op0_in_struct, op1_in_memory, op1_in_struct;
4879 struct table_elt *op0_elt, *op1_elt;
4880
4881 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4882 we know that they are also equal in the smaller mode (this is also
4883 true for all smaller modes whether or not there is a SUBREG, but
4884 is not worth testing for with no SUBREG. */
4885
4886 if (code == EQ && GET_CODE (op0) == SUBREG
4887 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
4888 {
4889 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4890 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4891
4892 record_jump_cond (code, mode, SUBREG_REG (op0),
4893 tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
4894 reversed_nonequality);
4895 }
4896
4897 if (code == EQ && GET_CODE (op1) == SUBREG
4898 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1))))
4899 {
4900 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4901 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4902
4903 record_jump_cond (code, mode, SUBREG_REG (op1),
4904 tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
4905 reversed_nonequality);
4906 }
4907
4908 /* Similarly, if this is an NE comparison, and either is a SUBREG
4909 making a smaller mode, we know the whole thing is also NE. */
4910
4911 if (code == NE && GET_CODE (op0) == SUBREG
4912 && subreg_lowpart_p (op0)
4913 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
4914 {
4915 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4916 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4917
4918 record_jump_cond (code, mode, SUBREG_REG (op0),
4919 tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
4920 reversed_nonequality);
4921 }
4922
4923 if (code == NE && GET_CODE (op1) == SUBREG
4924 && subreg_lowpart_p (op1)
4925 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1))))
4926 {
4927 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4928 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4929
4930 record_jump_cond (code, mode, SUBREG_REG (op1),
4931 tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
4932 reversed_nonequality);
4933 }
4934
4935 /* Hash both operands. */
4936
4937 do_not_record = 0;
4938 hash_arg_in_memory = 0;
4939 hash_arg_in_struct = 0;
4940 op0_hash_code = HASH (op0, mode);
4941 op0_in_memory = hash_arg_in_memory;
4942 op0_in_struct = hash_arg_in_struct;
4943
4944 if (do_not_record)
4945 return;
4946
4947 do_not_record = 0;
4948 hash_arg_in_memory = 0;
4949 hash_arg_in_struct = 0;
4950 op1_hash_code = HASH (op1, mode);
4951 op1_in_memory = hash_arg_in_memory;
4952 op1_in_struct = hash_arg_in_struct;
4953
4954 if (do_not_record)
4955 return;
4956
4957 /* Look up both operands. */
4958 op0_elt = lookup (op0, op0_hash_code, mode);
4959 op1_elt = lookup (op1, op1_hash_code, mode);
4960
4961 /* If we aren't setting two things equal all we can do is save this
4962 comparison. */
4963 if (code != EQ)
4964 {
4965 /* If we reversed a floating-point comparison, if OP0 is not a
4966 register, or if OP1 is neither a register or constant, we can't
4967 do anything. */
4968
4969 if (GET_CODE (op1) != REG)
4970 op1 = equiv_constant (op1);
4971
4972 if ((reversed_nonequality && GET_MODE_CLASS (mode) != MODE_INT)
4973 || GET_CODE (op0) != REG || op1 == 0)
4974 return;
4975
4976 /* Put OP0 in the hash table if it isn't already. This gives it a
4977 new quantity number. */
4978 if (op0_elt == 0)
4979 {
4980 if (insert_regs (op0, 0, 0))
4981 {
4982 rehash_using_reg (op0);
4983 op0_hash_code = HASH (op0, mode);
4984 }
4985
4986 op0_elt = insert (op0, 0, op0_hash_code, mode);
4987 op0_elt->in_memory = op0_in_memory;
4988 op0_elt->in_struct = op0_in_struct;
4989 }
4990
4991 qty_comparison_code[reg_qty[REGNO (op0)]] = code;
4992 if (GET_CODE (op1) == REG)
4993 {
4994 /* Put OP1 in the hash table so it gets a new quantity number. */
4995 if (op1_elt == 0)
4996 {
4997 if (insert_regs (op1, 0, 0))
4998 {
4999 rehash_using_reg (op1);
5000 op1_hash_code = HASH (op1, mode);
5001 }
5002
5003 op1_elt = insert (op1, 0, op1_hash_code, mode);
5004 op1_elt->in_memory = op1_in_memory;
5005 op1_elt->in_struct = op1_in_struct;
5006 }
5007
5008 qty_comparison_qty[reg_qty[REGNO (op0)]] = reg_qty[REGNO (op1)];
5009 qty_comparison_const[reg_qty[REGNO (op0)]] = 0;
5010 }
5011 else
5012 {
5013 qty_comparison_qty[reg_qty[REGNO (op0)]] = -1;
5014 qty_comparison_const[reg_qty[REGNO (op0)]] = op1;
5015 }
5016
5017 return;
5018 }
5019
5020 /* If both are equivalent, merge the two classes. Save this class for
5021 `cse_set_around_loop'. */
5022 if (op0_elt && op1_elt)
5023 {
5024 merge_equiv_classes (op0_elt, op1_elt);
5025 last_jump_equiv_class = op0_elt;
5026 }
5027
5028 /* For whichever side doesn't have an equivalence, make one. */
5029 if (op0_elt == 0)
5030 {
5031 if (insert_regs (op0, op1_elt, 0))
5032 {
5033 rehash_using_reg (op0);
5034 op0_hash_code = HASH (op0, mode);
5035 }
5036
5037 op0_elt = insert (op0, op1_elt, op0_hash_code, mode);
5038 op0_elt->in_memory = op0_in_memory;
5039 op0_elt->in_struct = op0_in_struct;
5040 last_jump_equiv_class = op0_elt;
5041 }
5042
5043 if (op1_elt == 0)
5044 {
5045 if (insert_regs (op1, op0_elt, 0))
5046 {
5047 rehash_using_reg (op1);
5048 op1_hash_code = HASH (op1, mode);
5049 }
5050
5051 op1_elt = insert (op1, op0_elt, op1_hash_code, mode);
5052 op1_elt->in_memory = op1_in_memory;
5053 op1_elt->in_struct = op1_in_struct;
5054 last_jump_equiv_class = op1_elt;
5055 }
5056 }
5057 \f
5058 /* CSE processing for one instruction.
5059 First simplify sources and addresses of all assignments
5060 in the instruction, using previously-computed equivalents values.
5061 Then install the new sources and destinations in the table
5062 of available values.
5063
5064 If IN_LIBCALL_BLOCK is nonzero, don't record any equivalence made in
5065 the insn. */
5066
5067 /* Data on one SET contained in the instruction. */
5068
5069 struct set
5070 {
5071 /* The SET rtx itself. */
5072 rtx rtl;
5073 /* The SET_SRC of the rtx (the original value, if it is changing). */
5074 rtx src;
5075 /* The hash-table element for the SET_SRC of the SET. */
5076 struct table_elt *src_elt;
5077 /* Hash code for the SET_SRC. */
5078 int src_hash_code;
5079 /* Hash code for the SET_DEST. */
5080 int dest_hash_code;
5081 /* The SET_DEST, with SUBREG, etc., stripped. */
5082 rtx inner_dest;
5083 /* Place where the pointer to the INNER_DEST was found. */
5084 rtx *inner_dest_loc;
5085 /* Nonzero if the SET_SRC is in memory. */
5086 char src_in_memory;
5087 /* Nonzero if the SET_SRC is in a structure. */
5088 char src_in_struct;
5089 /* Nonzero if the SET_SRC contains something
5090 whose value cannot be predicted and understood. */
5091 char src_volatile;
5092 /* Original machine mode, in case it becomes a CONST_INT. */
5093 enum machine_mode mode;
5094 /* A constant equivalent for SET_SRC, if any. */
5095 rtx src_const;
5096 /* Hash code of constant equivalent for SET_SRC. */
5097 int src_const_hash_code;
5098 /* Table entry for constant equivalent for SET_SRC, if any. */
5099 struct table_elt *src_const_elt;
5100 };
5101
5102 static void
5103 cse_insn (insn, in_libcall_block)
5104 rtx insn;
5105 int in_libcall_block;
5106 {
5107 register rtx x = PATTERN (insn);
5108 rtx tem;
5109 register int i;
5110 register int n_sets = 0;
5111
5112 /* Records what this insn does to set CC0. */
5113 rtx this_insn_cc0 = 0;
5114 enum machine_mode this_insn_cc0_mode;
5115 struct write_data writes_memory;
5116 static struct write_data init = {0, 0, 0, 0};
5117
5118 rtx src_eqv = 0;
5119 struct table_elt *src_eqv_elt = 0;
5120 int src_eqv_volatile;
5121 int src_eqv_in_memory;
5122 int src_eqv_in_struct;
5123 int src_eqv_hash_code;
5124
5125 struct set *sets;
5126
5127 this_insn = insn;
5128 writes_memory = init;
5129
5130 /* Find all the SETs and CLOBBERs in this instruction.
5131 Record all the SETs in the array `set' and count them.
5132 Also determine whether there is a CLOBBER that invalidates
5133 all memory references, or all references at varying addresses. */
5134
5135 if (GET_CODE (x) == SET)
5136 {
5137 sets = (struct set *) alloca (sizeof (struct set));
5138 sets[0].rtl = x;
5139
5140 /* Ignore SETs that are unconditional jumps.
5141 They never need cse processing, so this does not hurt.
5142 The reason is not efficiency but rather
5143 so that we can test at the end for instructions
5144 that have been simplified to unconditional jumps
5145 and not be misled by unchanged instructions
5146 that were unconditional jumps to begin with. */
5147 if (SET_DEST (x) == pc_rtx
5148 && GET_CODE (SET_SRC (x)) == LABEL_REF)
5149 ;
5150
5151 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
5152 The hard function value register is used only once, to copy to
5153 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
5154 Ensure we invalidate the destination register. On the 80386 no
5155 other code would invalidate it since it is a fixed_reg. */
5156
5157 else if (GET_CODE (SET_SRC (x)) == CALL)
5158 {
5159 canon_reg (SET_SRC (x), insn);
5160 fold_rtx (SET_SRC (x), insn);
5161 invalidate (SET_DEST (x));
5162 }
5163 else
5164 n_sets = 1;
5165 }
5166 else if (GET_CODE (x) == PARALLEL)
5167 {
5168 register int lim = XVECLEN (x, 0);
5169
5170 sets = (struct set *) alloca (lim * sizeof (struct set));
5171
5172 /* Find all regs explicitly clobbered in this insn,
5173 and ensure they are not replaced with any other regs
5174 elsewhere in this insn.
5175 When a reg that is clobbered is also used for input,
5176 we should presume that that is for a reason,
5177 and we should not substitute some other register
5178 which is not supposed to be clobbered.
5179 Therefore, this loop cannot be merged into the one below
5180 because a CALL may preceed a CLOBBER and refer to the
5181 value clobbered. We must not let a canonicalization do
5182 anything in that case. */
5183 for (i = 0; i < lim; i++)
5184 {
5185 register rtx y = XVECEXP (x, 0, i);
5186 if (GET_CODE (y) == CLOBBER && GET_CODE (XEXP (y, 0)) == REG)
5187 invalidate (XEXP (y, 0));
5188 }
5189
5190 for (i = 0; i < lim; i++)
5191 {
5192 register rtx y = XVECEXP (x, 0, i);
5193 if (GET_CODE (y) == SET)
5194 {
5195 /* As above, we ignore unconditional jumps and call-insns. */
5196 if (GET_CODE (SET_SRC (y)) == CALL)
5197 {
5198 canon_reg (SET_SRC (y), insn);
5199 fold_rtx (SET_SRC (y), insn);
5200 invalidate (SET_DEST (y));
5201 }
5202 else if (SET_DEST (y) == pc_rtx
5203 && GET_CODE (SET_SRC (y)) == LABEL_REF)
5204 ;
5205 else
5206 sets[n_sets++].rtl = y;
5207 }
5208 else if (GET_CODE (y) == CLOBBER)
5209 {
5210 /* If we clobber memory, take note of that,
5211 and canon the address.
5212 This does nothing when a register is clobbered
5213 because we have already invalidated the reg. */
5214 if (GET_CODE (XEXP (y, 0)) == MEM)
5215 {
5216 canon_reg (XEXP (y, 0), 0);
5217 note_mem_written (XEXP (y, 0), &writes_memory);
5218 }
5219 }
5220 else if (GET_CODE (y) == USE
5221 && ! (GET_CODE (XEXP (y, 0)) == REG
5222 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
5223 canon_reg (y, 0);
5224 else if (GET_CODE (y) == CALL)
5225 {
5226 canon_reg (y, insn);
5227 fold_rtx (y, insn);
5228 }
5229 }
5230 }
5231 else if (GET_CODE (x) == CLOBBER)
5232 {
5233 if (GET_CODE (XEXP (x, 0)) == MEM)
5234 {
5235 canon_reg (XEXP (x, 0), 0);
5236 note_mem_written (XEXP (x, 0), &writes_memory);
5237 }
5238 }
5239
5240 /* Canonicalize a USE of a pseudo register or memory location. */
5241 else if (GET_CODE (x) == USE
5242 && ! (GET_CODE (XEXP (x, 0)) == REG
5243 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
5244 canon_reg (XEXP (x, 0), 0);
5245 else if (GET_CODE (x) == CALL)
5246 {
5247 canon_reg (x, insn);
5248 fold_rtx (x, insn);
5249 }
5250
5251 if (n_sets == 1 && REG_NOTES (insn) != 0)
5252 {
5253 /* Store the equivalent value in SRC_EQV, if different. */
5254 rtx tem = find_reg_note (insn, REG_EQUAL, 0);
5255
5256 if (tem && ! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl)))
5257 src_eqv = canon_reg (XEXP (tem, 0), 0);
5258 }
5259
5260 /* Canonicalize sources and addresses of destinations.
5261 We do this in a separate pass to avoid problems when a MATCH_DUP is
5262 present in the insn pattern. In that case, we want to ensure that
5263 we don't break the duplicate nature of the pattern. So we will replace
5264 both operands at the same time. Otherwise, we would fail to find an
5265 equivalent substitution in the loop calling validate_change below.
5266 (We also speed up that loop when a canonicalization was done since
5267 recog_memoized need not be called for just a canonicalization unless
5268 a pseudo register is being replaced by a hard reg of vice versa.)
5269
5270 We used to suppress canonicalization of DEST if it appears in SRC,
5271 but we don't do this any more.
5272
5273 ??? The way this code is written now, if we have a MATCH_DUP between
5274 two operands that are pseudos and we would want to canonicalize them
5275 to a hard register, we won't do that. The only time this would happen
5276 is if the hard reg was a fixed register, and this should be rare.
5277
5278 ??? This won't work if there is a MATCH_DUP between an input and an
5279 output, but these never worked and must be declared invalid. */
5280
5281 for (i = 0; i < n_sets; i++)
5282 {
5283 rtx dest = SET_DEST (sets[i].rtl);
5284 rtx src = SET_SRC (sets[i].rtl);
5285 rtx new = canon_reg (src, insn);
5286
5287 if (GET_CODE (new) == REG && GET_CODE (src) == REG
5288 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
5289 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
5290 validate_change (insn, &SET_SRC (sets[i].rtl), new, 0);
5291 else
5292 SET_SRC (sets[i].rtl) = new;
5293
5294 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
5295 {
5296 validate_change (insn, &XEXP (dest, 1),
5297 canon_reg (XEXP (dest, 1), insn), 0);
5298 validate_change (insn, &XEXP (dest, 2),
5299 canon_reg (XEXP (dest, 2), insn), 0);
5300 }
5301
5302 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
5303 || GET_CODE (dest) == ZERO_EXTRACT
5304 || GET_CODE (dest) == SIGN_EXTRACT)
5305 dest = XEXP (dest, 0);
5306
5307 if (GET_CODE (dest) == MEM)
5308 canon_reg (dest, insn);
5309 }
5310
5311 /* Set sets[i].src_elt to the class each source belongs to.
5312 Detect assignments from or to volatile things
5313 and set set[i] to zero so they will be ignored
5314 in the rest of this function.
5315
5316 Nothing in this loop changes the hash table or the register chains. */
5317
5318 for (i = 0; i < n_sets; i++)
5319 {
5320 register rtx src, dest;
5321 register rtx src_folded;
5322 register struct table_elt *elt = 0, *p;
5323 enum machine_mode mode;
5324 rtx src_eqv_here;
5325 rtx src_const = 0;
5326 rtx src_related = 0;
5327 struct table_elt *src_const_elt = 0;
5328 int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
5329 int src_related_cost = 10000, src_elt_cost = 10000;
5330 /* Set non-zero if we need to call force_const_mem on with the
5331 contents of src_folded before using it. */
5332 int src_folded_force_flag = 0;
5333
5334 dest = SET_DEST (sets[i].rtl);
5335 src = SET_SRC (sets[i].rtl);
5336
5337 /* If SRC is a constant that has no machine mode,
5338 hash it with the destination's machine mode.
5339 This way we can keep different modes separate. */
5340
5341 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5342 sets[i].mode = mode;
5343
5344 if (src_eqv)
5345 {
5346 enum machine_mode eqvmode = mode;
5347 if (GET_CODE (dest) == STRICT_LOW_PART)
5348 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5349 do_not_record = 0;
5350 hash_arg_in_memory = 0;
5351 hash_arg_in_struct = 0;
5352 src_eqv = fold_rtx (src_eqv, insn);
5353 src_eqv_hash_code = HASH (src_eqv, eqvmode);
5354
5355 /* Find the equivalence class for the equivalent expression. */
5356
5357 if (!do_not_record)
5358 src_eqv_elt = lookup (src_eqv, src_eqv_hash_code, eqvmode);
5359
5360 src_eqv_volatile = do_not_record;
5361 src_eqv_in_memory = hash_arg_in_memory;
5362 src_eqv_in_struct = hash_arg_in_struct;
5363 }
5364
5365 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
5366 value of the INNER register, not the destination. So it is not
5367 a legal substitution for the source. But save it for later. */
5368 if (GET_CODE (dest) == STRICT_LOW_PART)
5369 src_eqv_here = 0;
5370 else
5371 src_eqv_here = src_eqv;
5372
5373 /* Simplify and foldable subexpressions in SRC. Then get the fully-
5374 simplified result, which may not necessarily be valid. */
5375 src_folded = fold_rtx (src, insn);
5376
5377 /* If storing a constant in a bitfield, pre-truncate the constant
5378 so we will be able to record it later. */
5379 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5380 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5381 {
5382 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5383
5384 if (GET_CODE (src) == CONST_INT
5385 && GET_CODE (width) == CONST_INT
5386 && INTVAL (width) < HOST_BITS_PER_INT
5387 && (INTVAL (src) & ((-1) << INTVAL (width))))
5388 src_folded = gen_rtx (CONST_INT, VOIDmode,
5389 INTVAL (src) & ((1 << INTVAL (width)) - 1));
5390 }
5391
5392 /* Compute SRC's hash code, and also notice if it
5393 should not be recorded at all. In that case,
5394 prevent any further processing of this assignment. */
5395 do_not_record = 0;
5396 hash_arg_in_memory = 0;
5397 hash_arg_in_struct = 0;
5398
5399 sets[i].src = src;
5400 sets[i].src_hash_code = HASH (src, mode);
5401 sets[i].src_volatile = do_not_record;
5402 sets[i].src_in_memory = hash_arg_in_memory;
5403 sets[i].src_in_struct = hash_arg_in_struct;
5404
5405 /* If source is a perverse subreg (such as QI treated as an SI),
5406 treat it as volatile. It may do the work of an SI in one context
5407 where the extra bits are not being used, but cannot replace an SI
5408 in general. */
5409 if (GET_CODE (src) == SUBREG
5410 && (GET_MODE_SIZE (GET_MODE (src))
5411 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5412 sets[i].src_volatile = 1;
5413
5414 /* Locate all possible equivalent forms for SRC. Try to replace
5415 SRC in the insn with each cheaper equivalent.
5416
5417 We have the following types of equivalents: SRC itself, a folded
5418 version, a value given in a REG_EQUAL note, or a value related
5419 to a constant.
5420
5421 Each of these equivalents may be part of an additional class
5422 of equivalents (if more than one is in the table, they must be in
5423 the same class; we check for this).
5424
5425 If the source is volatile, we don't do any table lookups.
5426
5427 We note any constant equivalent for possible later use in a
5428 REG_NOTE. */
5429
5430 if (!sets[i].src_volatile)
5431 elt = lookup (src, sets[i].src_hash_code, mode);
5432
5433 sets[i].src_elt = elt;
5434
5435 if (elt && src_eqv_here && src_eqv_elt)
5436 {
5437 if (elt->first_same_value != src_eqv_elt->first_same_value)
5438 {
5439 /* The REG_EQUAL is indicating that two formerly distinct
5440 classes are now equivalent. So merge them. */
5441 merge_equiv_classes (elt, src_eqv_elt);
5442 src_eqv_hash_code = HASH (src_eqv, elt->mode);
5443 src_eqv_elt = lookup (src_eqv, src_eqv_hash_code, elt->mode);
5444 }
5445
5446 src_eqv_here = 0;
5447 }
5448
5449 else if (src_eqv_elt)
5450 elt = src_eqv_elt;
5451
5452 /* Try to find a constant somewhere and record it in `src_const'.
5453 Record its table element, if any, in `src_const_elt'. Look in
5454 any known equivalences first. (If the constant is not in the
5455 table, also set `sets[i].src_const_hash_code'). */
5456 if (elt)
5457 for (p = elt->first_same_value; p; p = p->next_same_value)
5458 if (p->is_const)
5459 {
5460 src_const = p->exp;
5461 src_const_elt = elt;
5462 break;
5463 }
5464
5465 if (src_const == 0
5466 && (CONSTANT_P (src_folded)
5467 /* Consider (minus (label_ref L1) (label_ref L2)) as
5468 "constant" here so we will record it. This allows us
5469 to fold switch statements when an ADDR_DIFF_VEC is used. */
5470 || (GET_CODE (src_folded) == MINUS
5471 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5472 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5473 src_const = src_folded, src_const_elt = elt;
5474 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5475 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5476
5477 /* If we don't know if the constant is in the table, get its
5478 hash code and look it up. */
5479 if (src_const && src_const_elt == 0)
5480 {
5481 sets[i].src_const_hash_code = HASH (src_const, mode);
5482 src_const_elt = lookup (src_const, sets[i].src_const_hash_code,
5483 mode);
5484 }
5485
5486 sets[i].src_const = src_const;
5487 sets[i].src_const_elt = src_const_elt;
5488
5489 /* If the constant and our source are both in the table, mark them as
5490 equivalent. Otherwise, if a constant is in the table but the source
5491 isn't, set ELT to it. */
5492 if (src_const_elt && elt
5493 && src_const_elt->first_same_value != elt->first_same_value)
5494 merge_equiv_classes (elt, src_const_elt);
5495 else if (src_const_elt && elt == 0)
5496 elt = src_const_elt;
5497
5498 /* See if there is a register linearly related to a constant
5499 equivalent of SRC. */
5500 if (src_const
5501 && (GET_CODE (src_const) == CONST
5502 || (src_const_elt && src_const_elt->related_value != 0)))
5503 {
5504 src_related = use_related_value (src_const, src_const_elt);
5505 if (src_related)
5506 {
5507 struct table_elt *src_related_elt
5508 = lookup (src_related, HASH (src_related, mode), mode);
5509 if (src_related_elt && elt)
5510 {
5511 if (elt->first_same_value
5512 != src_related_elt->first_same_value)
5513 /* This can occur when we previously saw a CONST
5514 involving a SYMBOL_REF and then see the SYMBOL_REF
5515 twice. Merge the involved classes. */
5516 merge_equiv_classes (elt, src_related_elt);
5517
5518 src_related = 0;
5519 src_related_elt = 0;
5520 }
5521 else if (src_related_elt && elt == 0)
5522 elt = src_related_elt;
5523 }
5524 }
5525
5526 /* Another possibility is that we have an AND with a constant in
5527 a mode narrower than a word. If so, it might have been generated
5528 as part of an "if" which would narrow the AND. If we already
5529 have done the AND in a wider mode, we can use a SUBREG of that
5530 value. */
5531
5532 if (flag_expensive_optimizations && ! src_related
5533 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5534 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5535 {
5536 enum machine_mode tmode;
5537 rtx new_and = gen_rtx (AND, VOIDmode, 0, XEXP (src, 1));
5538
5539 for (tmode = GET_MODE_WIDER_MODE (mode);
5540 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5541 tmode = GET_MODE_WIDER_MODE (tmode))
5542 {
5543 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
5544 struct table_elt *larger_elt;
5545
5546 if (inner)
5547 {
5548 PUT_MODE (new_and, tmode);
5549 XEXP (new_and, 0) = inner;
5550 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5551 if (larger_elt == 0)
5552 continue;
5553
5554 for (larger_elt = larger_elt->first_same_value;
5555 larger_elt; larger_elt = larger_elt->next_same_value)
5556 if (GET_CODE (larger_elt->exp) == REG)
5557 {
5558 src_related
5559 = gen_lowpart_if_possible (mode, larger_elt->exp);
5560 break;
5561 }
5562
5563 if (src_related)
5564 break;
5565 }
5566 }
5567 }
5568
5569 if (src == src_folded)
5570 src_folded = 0;
5571
5572 /* At this point, ELT, if non-zero, points to a class of expressions
5573 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5574 and SRC_RELATED, if non-zero, each contain additional equivalent
5575 expressions. Prune these latter expressions by deleting expressions
5576 already in the equivalence class.
5577
5578 Check for an equivalent identical to the destination. If found,
5579 this is the preferred equivalent since it will likely lead to
5580 elimination of the insn. Indicate this by placing it in
5581 `src_related'. */
5582
5583 if (elt) elt = elt->first_same_value;
5584 for (p = elt; p; p = p->next_same_value)
5585 {
5586 enum rtx_code code = GET_CODE (p->exp);
5587
5588 /* If the expression is not valid, ignore it. Then we do not
5589 have to check for validity below. In most cases, we can use
5590 `rtx_equal_p', since canonicalization has already been done. */
5591 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5592 continue;
5593
5594 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5595 src = 0;
5596 else if (src_folded && GET_CODE (src_folded) == code
5597 && rtx_equal_p (src_folded, p->exp))
5598 src_folded = 0;
5599 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5600 && rtx_equal_p (src_eqv_here, p->exp))
5601 src_eqv_here = 0;
5602 else if (src_related && GET_CODE (src_related) == code
5603 && rtx_equal_p (src_related, p->exp))
5604 src_related = 0;
5605
5606 /* This is the same as the destination of the insns, we want
5607 to prefer it. Copy it to src_related. The code below will
5608 then give it a negative cost. */
5609 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5610 src_related = dest;
5611
5612 }
5613
5614 /* Find the cheapest valid equivalent, trying all the available
5615 possibilities. Prefer items not in the hash table to ones
5616 that are when they are equal cost. Note that we can never
5617 worsen an insn as the current contents will also succeed.
5618 If we find an equivalent identical to the source, use it as best,
5619 since this insn will probably be eliminated in that case. */
5620 if (src)
5621 {
5622 if (rtx_equal_p (src, dest))
5623 src_cost = -1;
5624 else
5625 src_cost = COST (src);
5626 }
5627
5628 if (src_eqv_here)
5629 {
5630 if (rtx_equal_p (src_eqv_here, dest))
5631 src_eqv_cost = -1;
5632 else
5633 src_eqv_cost = COST (src_eqv_here);
5634 }
5635
5636 if (src_folded)
5637 {
5638 if (rtx_equal_p (src_folded, dest))
5639 src_folded_cost = -1;
5640 else
5641 src_folded_cost = COST (src_folded);
5642 }
5643
5644 if (src_related)
5645 {
5646 if (rtx_equal_p (src_related, dest))
5647 src_related_cost = -1;
5648 else
5649 src_related_cost = COST (src_related);
5650 }
5651
5652 /* If this was an indirect jump insn, a known label will really be
5653 cheaper even though it looks more expensive. */
5654 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5655 src_folded = src_const, src_folded_cost = -1;
5656
5657 /* Terminate loop when replacement made. This must terminate since
5658 the current contents will be tested and will always be valid. */
5659 while (1)
5660 {
5661 rtx trial;
5662
5663 /* Skip invalid entries. */
5664 while (elt && GET_CODE (elt->exp) != REG
5665 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5666 elt = elt->next_same_value;
5667
5668 if (elt) src_elt_cost = elt->cost;
5669
5670 /* Find cheapest and skip it for the next time. For items
5671 of equal cost, use this order:
5672 src_folded, src, src_eqv, src_related and hash table entry. */
5673 if (src_folded_cost <= src_cost
5674 && src_folded_cost <= src_eqv_cost
5675 && src_folded_cost <= src_related_cost
5676 && src_folded_cost <= src_elt_cost)
5677 {
5678 trial = src_folded, src_folded_cost = 10000;
5679 if (src_folded_force_flag)
5680 trial = force_const_mem (mode, trial);
5681 }
5682 else if (src_cost <= src_eqv_cost
5683 && src_cost <= src_related_cost
5684 && src_cost <= src_elt_cost)
5685 trial = src, src_cost = 10000;
5686 else if (src_eqv_cost <= src_related_cost
5687 && src_eqv_cost <= src_elt_cost)
5688 trial = src_eqv_here, src_eqv_cost = 10000;
5689 else if (src_related_cost <= src_elt_cost)
5690 trial = src_related, src_related_cost = 10000;
5691 else
5692 {
5693 trial = canon_reg (copy_rtx (elt->exp), 0);
5694 elt = elt->next_same_value;
5695 src_elt_cost = 10000;
5696 }
5697
5698 /* We don't normally have an insn matching (set (pc) (pc)), so
5699 check for this separately here. We will delete such an
5700 insn below.
5701
5702 Tablejump insns contain a USE of the table, so simply replacing
5703 the operand with the constant won't match. This is simply an
5704 unconditional branch, however, and is therefore valid. Just
5705 insert the substitution here and we will delete and re-emit
5706 the insn later. */
5707
5708 if (n_sets == 1 && dest == pc_rtx
5709 && (trial == pc_rtx
5710 || (GET_CODE (trial) == LABEL_REF
5711 && ! condjump_p (insn))))
5712 {
5713 /* If TRIAL is a label in front of a jump table, we are
5714 really falling through the switch (this is how casesi
5715 insns work), so we must branch around the table. */
5716 if (GET_CODE (trial) == CODE_LABEL
5717 && NEXT_INSN (trial) != 0
5718 && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
5719 && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
5720 || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
5721
5722 trial = gen_rtx (LABEL_REF, Pmode, get_label_after (trial));
5723
5724 SET_SRC (sets[i].rtl) = trial;
5725 break;
5726 }
5727
5728 /* Look for a substitution that makes a valid insn. */
5729 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5730 break;
5731
5732 /* If we previously found constant pool entries for
5733 constants and this is a constant, try making a
5734 pool entry. Put it in src_folded unless we already have done
5735 this since that is where it likely came from. */
5736
5737 else if (constant_pool_entries_cost
5738 && CONSTANT_P (trial)
5739 && (src_folded == 0 || GET_CODE (src_folded) != MEM)
5740 && GET_MODE_CLASS (mode) != MODE_CC)
5741 {
5742 src_folded_force_flag = 1;
5743 src_folded = trial;
5744 src_folded_cost = constant_pool_entries_cost;
5745 }
5746 }
5747
5748 src = SET_SRC (sets[i].rtl);
5749
5750 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5751 However, there is an important exception: If both are registers
5752 that are not the head of their equivalence class, replace SET_SRC
5753 with the head of the class. If we do not do this, we will have
5754 both registers live over a portion of the basic block. This way,
5755 their lifetimes will likely abut instead of overlapping. */
5756 if (GET_CODE (dest) == REG
5757 && REGNO_QTY_VALID_P (REGNO (dest))
5758 && qty_mode[reg_qty[REGNO (dest)]] == GET_MODE (dest)
5759 && qty_first_reg[reg_qty[REGNO (dest)]] != REGNO (dest)
5760 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
5761 /* Don't do this if the original insn had a hard reg as
5762 SET_SRC. */
5763 && (GET_CODE (sets[i].src) != REG
5764 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER))
5765 /* We can't call canon_reg here because it won't do anything if
5766 SRC is a hard register. */
5767 {
5768 int first = qty_first_reg[reg_qty[REGNO (src)]];
5769
5770 src = SET_SRC (sets[i].rtl)
5771 = first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
5772 : gen_rtx (REG, GET_MODE (src), first);
5773
5774 /* If we had a constant that is cheaper than what we are now
5775 setting SRC to, use that constant. We ignored it when we
5776 thought we could make this into a no-op. */
5777 if (src_const && COST (src_const) < COST (src)
5778 && validate_change (insn, &SET_SRC (sets[i].rtl), src_const, 0))
5779 src = src_const;
5780 }
5781
5782 /* If we made a change, recompute SRC values. */
5783 if (src != sets[i].src)
5784 {
5785 do_not_record = 0;
5786 hash_arg_in_memory = 0;
5787 hash_arg_in_struct = 0;
5788 sets[i].src = src;
5789 sets[i].src_hash_code = HASH (src, mode);
5790 sets[i].src_volatile = do_not_record;
5791 sets[i].src_in_memory = hash_arg_in_memory;
5792 sets[i].src_in_struct = hash_arg_in_struct;
5793 sets[i].src_elt = lookup (src, sets[i].src_hash_code, mode);
5794 }
5795
5796 /* If this is a single SET, we are setting a register, and we have an
5797 equivalent constant, we want to add a REG_NOTE. We don't want
5798 to write a REG_EQUAL note for a constant pseudo since verifying that
5799 that pseudo hasn't been eliminated is a pain. Such a note also
5800 won't help anything. */
5801 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
5802 && GET_CODE (src_const) != REG)
5803 {
5804 rtx tem = find_reg_note (insn, REG_EQUAL, 0);
5805
5806 /* Record the actual constant value in a REG_EQUAL note, making
5807 a new one if one does not already exist. */
5808 if (tem)
5809 XEXP (tem, 0) = src_const;
5810 else
5811 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUAL,
5812 src_const, REG_NOTES (insn));
5813
5814 /* If storing a constant value in a register that
5815 previously held the constant value 0,
5816 record this fact with a REG_WAS_0 note on this insn.
5817
5818 Note that the *register* is required to have previously held 0,
5819 not just any register in the quantity and we must point to the
5820 insn that set that register to zero.
5821
5822 Rather than track each register individually, we just see if
5823 the last set for this quantity was for this register. */
5824
5825 if (REGNO_QTY_VALID_P (REGNO (dest))
5826 && qty_const[reg_qty[REGNO (dest)]] == const0_rtx)
5827 {
5828 /* See if we previously had a REG_WAS_0 note. */
5829 rtx note = find_reg_note (insn, REG_WAS_0, 0);
5830 rtx const_insn = qty_const_insn[reg_qty[REGNO (dest)]];
5831
5832 if ((tem = single_set (const_insn)) != 0
5833 && rtx_equal_p (SET_DEST (tem), dest))
5834 {
5835 if (note)
5836 XEXP (note, 0) = const_insn;
5837 else
5838 REG_NOTES (insn) = gen_rtx (INSN_LIST, REG_WAS_0,
5839 const_insn, REG_NOTES (insn));
5840 }
5841 }
5842 }
5843
5844 /* Now deal with the destination. */
5845 do_not_record = 0;
5846 sets[i].inner_dest_loc = &SET_DEST (sets[0].rtl);
5847
5848 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5849 to the MEM or REG within it. */
5850 while (GET_CODE (dest) == SIGN_EXTRACT
5851 || GET_CODE (dest) == ZERO_EXTRACT
5852 || GET_CODE (dest) == SUBREG
5853 || GET_CODE (dest) == STRICT_LOW_PART)
5854 {
5855 sets[i].inner_dest_loc = &XEXP (dest, 0);
5856 dest = XEXP (dest, 0);
5857 }
5858
5859 sets[i].inner_dest = dest;
5860
5861 if (GET_CODE (dest) == MEM)
5862 {
5863 dest = fold_rtx (dest, insn);
5864
5865 /* Decide whether we invalidate everything in memory,
5866 or just things at non-fixed places.
5867 Writing a large aggregate must invalidate everything
5868 because we don't know how long it is. */
5869 note_mem_written (dest, &writes_memory);
5870 }
5871
5872 /* Compute the hash code of the destination now,
5873 before the effects of this instruction are recorded,
5874 since the register values used in the address computation
5875 are those before this instruction. */
5876 sets[i].dest_hash_code = HASH (dest, mode);
5877
5878 /* Don't enter a bit-field in the hash table
5879 because the value in it after the store
5880 may not equal what was stored, due to truncation. */
5881
5882 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5883 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5884 {
5885 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5886
5887 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5888 && GET_CODE (width) == CONST_INT
5889 && INTVAL (width) < HOST_BITS_PER_INT
5890 && ! (INTVAL (src_const) & ((-1) << INTVAL (width))))
5891 /* Exception: if the value is constant,
5892 and it won't be truncated, record it. */
5893 ;
5894 else
5895 {
5896 /* This is chosen so that the destination will be invalidated
5897 but no new value will be recorded.
5898 We must invalidate because sometimes constant
5899 values can be recorded for bitfields. */
5900 sets[i].src_elt = 0;
5901 sets[i].src_volatile = 1;
5902 src_eqv = 0;
5903 src_eqv_elt = 0;
5904 }
5905 }
5906
5907 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5908 the insn. */
5909 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5910 {
5911 PUT_CODE (insn, NOTE);
5912 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
5913 NOTE_SOURCE_FILE (insn) = 0;
5914 cse_jumps_altered = 1;
5915 /* One less use of the label this insn used to jump to. */
5916 --LABEL_NUSES (JUMP_LABEL (insn));
5917 /* No more processing for this set. */
5918 sets[i].rtl = 0;
5919 }
5920
5921 /* If this SET is now setting PC to a label, we know it used to
5922 be a conditional or computed branch. So we see if we can follow
5923 it. If it was a computed branch, delete it and re-emit. */
5924 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5925 {
5926 rtx p;
5927
5928 /* If this is not in the format for a simple branch and
5929 we are the only SET in it, re-emit it. */
5930 if (! simplejump_p (insn) && n_sets == 1)
5931 {
5932 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
5933 JUMP_LABEL (new) = XEXP (src, 0);
5934 LABEL_NUSES (XEXP (src, 0))++;
5935 delete_insn (insn);
5936 insn = new;
5937 }
5938
5939 /* Now that we've converted this jump to an unconditional jump,
5940 there is dead code after it. Delete the dead code until we
5941 reach a BARRIER, the end of the function, or a label. Do
5942 not delete NOTEs except for NOTE_INSN_DELETED since later
5943 phases assume these notes are retained. */
5944
5945 p = insn;
5946
5947 while (NEXT_INSN (p) != 0
5948 && GET_CODE (NEXT_INSN (p)) != BARRIER
5949 && GET_CODE (NEXT_INSN (p)) != CODE_LABEL)
5950 {
5951 if (GET_CODE (NEXT_INSN (p)) != NOTE
5952 || NOTE_LINE_NUMBER (NEXT_INSN (p)) == NOTE_INSN_DELETED)
5953 delete_insn (NEXT_INSN (p));
5954 else
5955 p = NEXT_INSN (p);
5956 }
5957
5958 /* If we don't have a BARRIER immediately after INSN, put one there.
5959 Much code assumes that there are no NOTEs between a JUMP_INSN and
5960 BARRIER. */
5961
5962 if (NEXT_INSN (insn) == 0
5963 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5964 emit_barrier_after (insn);
5965
5966 /* We might have two BARRIERs separated by notes. Delete the second
5967 one if so. */
5968
5969 if (p != insn && NEXT_INSN (p) != 0
5970 && GET_CODE (NEXT_INSN (p)) == BARRIER)
5971 delete_insn (NEXT_INSN (p));
5972
5973 cse_jumps_altered = 1;
5974 sets[i].rtl = 0;
5975 }
5976
5977 /* No further processing for this assignment if destination
5978 is volatile. */
5979
5980 else if (do_not_record)
5981 sets[i].rtl = 0;
5982
5983 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5984 sets[i].dest_hash_code = HASH (SET_DEST (sets[i].rtl), mode);
5985
5986 #ifdef HAVE_cc0
5987 /* If setting CC0, record what it was set to, or a constant, if it
5988 is equivalent to a constant. If it is being set to a floating-point
5989 value, make a COMPARE with the appropriate constant of 0. If we
5990 don't do this, later code can interpret this as a test against
5991 const0_rtx, which can cause problems if we try to put it into an
5992 insn as a floating-point operand. */
5993 if (dest == cc0_rtx)
5994 {
5995 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5996 this_insn_cc0_mode = mode;
5997 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5998 this_insn_cc0 = gen_rtx (COMPARE, VOIDmode, this_insn_cc0,
5999 CONST0_RTX (mode));
6000 }
6001 #endif
6002 }
6003
6004 /* Now enter all non-volatile source expressions in the hash table
6005 if they are not already present.
6006 Record their equivalence classes in src_elt.
6007 This way we can insert the corresponding destinations into
6008 the same classes even if the actual sources are no longer in them
6009 (having been invalidated). */
6010
6011 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
6012 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
6013 {
6014 register struct table_elt *elt;
6015 register struct table_elt *classp = sets[0].src_elt;
6016 rtx dest = SET_DEST (sets[0].rtl);
6017 enum machine_mode eqvmode = GET_MODE (dest);
6018
6019 if (GET_CODE (dest) == STRICT_LOW_PART)
6020 {
6021 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6022 classp = 0;
6023 }
6024 if (insert_regs (src_eqv, classp, 0))
6025 src_eqv_hash_code = HASH (src_eqv, eqvmode);
6026 elt = insert (src_eqv, classp, src_eqv_hash_code, eqvmode);
6027 elt->in_memory = src_eqv_in_memory;
6028 elt->in_struct = src_eqv_in_struct;
6029 src_eqv_elt = elt;
6030 }
6031
6032 for (i = 0; i < n_sets; i++)
6033 if (sets[i].rtl && ! sets[i].src_volatile
6034 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
6035 {
6036 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
6037 {
6038 /* REG_EQUAL in setting a STRICT_LOW_PART
6039 gives an equivalent for the entire destination register,
6040 not just for the subreg being stored in now.
6041 This is a more interesting equivalence, so we arrange later
6042 to treat the entire reg as the destination. */
6043 sets[i].src_elt = src_eqv_elt;
6044 sets[i].src_hash_code = src_eqv_hash_code;
6045 }
6046 else
6047 {
6048 /* Insert source and constant equivalent into hash table, if not
6049 already present. */
6050 register struct table_elt *classp = src_eqv_elt;
6051 register rtx src = sets[i].src;
6052 register rtx dest = SET_DEST (sets[i].rtl);
6053 enum machine_mode mode
6054 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6055
6056 if (sets[i].src_elt == 0)
6057 {
6058 register struct table_elt *elt;
6059
6060 /* Note that these insert_regs calls cannot remove
6061 any of the src_elt's, because they would have failed to
6062 match if not still valid. */
6063 if (insert_regs (src, classp, 0))
6064 sets[i].src_hash_code = HASH (src, mode);
6065 elt = insert (src, classp, sets[i].src_hash_code, mode);
6066 elt->in_memory = sets[i].src_in_memory;
6067 elt->in_struct = sets[i].src_in_struct;
6068 sets[i].src_elt = classp = elt;
6069 }
6070
6071 if (sets[i].src_const && sets[i].src_const_elt == 0
6072 && src != sets[i].src_const
6073 && ! rtx_equal_p (sets[i].src_const, src))
6074 sets[i].src_elt = insert (sets[i].src_const, classp,
6075 sets[i].src_const_hash_code, mode);
6076 }
6077 }
6078 else if (sets[i].src_elt == 0)
6079 /* If we did not insert the source into the hash table (e.g., it was
6080 volatile), note the equivalence class for the REG_EQUAL value, if any,
6081 so that the destination goes into that class. */
6082 sets[i].src_elt = src_eqv_elt;
6083
6084 invalidate_from_clobbers (&writes_memory, x);
6085 /* Memory, and some registers, are invalidate by subroutine calls. */
6086 if (GET_CODE (insn) == CALL_INSN)
6087 {
6088 static struct write_data everything = {0, 1, 1, 1};
6089 invalidate_memory (&everything);
6090 invalidate_for_call ();
6091 }
6092
6093 /* Now invalidate everything set by this instruction.
6094 If a SUBREG or other funny destination is being set,
6095 sets[i].rtl is still nonzero, so here we invalidate the reg
6096 a part of which is being set. */
6097
6098 for (i = 0; i < n_sets; i++)
6099 if (sets[i].rtl)
6100 {
6101 register rtx dest = sets[i].inner_dest;
6102
6103 /* Needed for registers to remove the register from its
6104 previous quantity's chain.
6105 Needed for memory if this is a nonvarying address, unless
6106 we have just done an invalidate_memory that covers even those. */
6107 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
6108 || (! writes_memory.all && ! cse_rtx_addr_varies_p (dest)))
6109 invalidate (dest);
6110 }
6111
6112 /* Make sure registers mentioned in destinations
6113 are safe for use in an expression to be inserted.
6114 This removes from the hash table
6115 any invalid entry that refers to one of these registers.
6116
6117 We don't care about the return value from mention_regs because
6118 we are going to hash the SET_DEST values unconditionally. */
6119
6120 for (i = 0; i < n_sets; i++)
6121 if (sets[i].rtl && GET_CODE (SET_DEST (sets[i].rtl)) != REG)
6122 mention_regs (SET_DEST (sets[i].rtl));
6123
6124 /* We may have just removed some of the src_elt's from the hash table.
6125 So replace each one with the current head of the same class. */
6126
6127 for (i = 0; i < n_sets; i++)
6128 if (sets[i].rtl)
6129 {
6130 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6131 /* If elt was removed, find current head of same class,
6132 or 0 if nothing remains of that class. */
6133 {
6134 register struct table_elt *elt = sets[i].src_elt;
6135
6136 while (elt && elt->prev_same_value)
6137 elt = elt->prev_same_value;
6138
6139 while (elt && elt->first_same_value == 0)
6140 elt = elt->next_same_value;
6141 sets[i].src_elt = elt ? elt->first_same_value : 0;
6142 }
6143 }
6144
6145 /* Now insert the destinations into their equivalence classes. */
6146
6147 for (i = 0; i < n_sets; i++)
6148 if (sets[i].rtl)
6149 {
6150 register rtx dest = SET_DEST (sets[i].rtl);
6151 register struct table_elt *elt;
6152
6153 /* Don't record value if we are not supposed to risk allocating
6154 floating-point values in registers that might be wider than
6155 memory. */
6156 if ((flag_float_store
6157 && GET_CODE (dest) == MEM
6158 && GET_MODE_CLASS (GET_MODE (dest)) == MODE_FLOAT)
6159 /* Don't record values of destinations set inside a libcall block
6160 since we might delete the libcall. Things should have been set
6161 up so we won't want to reuse such a value, but we play it safe
6162 here. */
6163 || in_libcall_block
6164 /* If we didn't put a REG_EQUAL value or a source into the hash
6165 table, there is no point is recording DEST. */
6166 || sets[i].src_elt == 0)
6167 continue;
6168
6169 /* STRICT_LOW_PART isn't part of the value BEING set,
6170 and neither is the SUBREG inside it.
6171 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6172 if (GET_CODE (dest) == STRICT_LOW_PART)
6173 dest = SUBREG_REG (XEXP (dest, 0));
6174
6175 if (GET_CODE (dest) == REG)
6176 /* Registers must also be inserted into chains for quantities. */
6177 if (insert_regs (dest, sets[i].src_elt, 1))
6178 /* If `insert_regs' changes something, the hash code must be
6179 recalculated. */
6180 sets[i].dest_hash_code = HASH (dest, GET_MODE (dest));
6181
6182 elt = insert (dest, sets[i].src_elt,
6183 sets[i].dest_hash_code, GET_MODE (dest));
6184 elt->in_memory = GET_CODE (sets[i].inner_dest) == MEM;
6185 if (elt->in_memory)
6186 {
6187 /* This implicitly assumes a whole struct
6188 need not have MEM_IN_STRUCT_P.
6189 But a whole struct is *supposed* to have MEM_IN_STRUCT_P. */
6190 elt->in_struct = (MEM_IN_STRUCT_P (sets[i].inner_dest)
6191 || sets[i].inner_dest != SET_DEST (sets[i].rtl));
6192 }
6193
6194 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is wider
6195 than M2, and both M1 and M2 are a single word, we are also doing
6196 (set (reg:m2 foo) (subreg:m2 (bar:m1 0))) so make that equivalence
6197 as well.
6198
6199 However, BAR may have equivalences for which gen_lowpart_if_possible
6200 will produce a simpler value than gen_lowpart_if_possible applied to
6201 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6202 BAR's equivalences. If we don't get a simplified form, make
6203 the SUBREG. It will not be used in an equivalence, but will
6204 cause two similar assignments to be detected.
6205
6206 Note the loop below will find SUBREG_REG (DEST) since we have
6207 already entered SRC and DEST of the SET in the table. */
6208
6209 if (GET_CODE (dest) == SUBREG
6210 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
6211 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
6212 && (GET_MODE_SIZE (GET_MODE (dest))
6213 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6214 && sets[i].src_elt != 0)
6215 {
6216 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6217 struct table_elt *elt, *classp = 0;
6218
6219 for (elt = sets[i].src_elt->first_same_value; elt;
6220 elt = elt->next_same_value)
6221 {
6222 rtx new_src = 0;
6223 int src_hash;
6224 struct table_elt *src_elt;
6225
6226 /* Ignore invalid entries. */
6227 if (GET_CODE (elt->exp) != REG
6228 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6229 continue;
6230
6231 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
6232 if (new_src == 0)
6233 new_src = gen_rtx (SUBREG, new_mode, elt->exp, 0);
6234
6235 src_hash = HASH (new_src, new_mode);
6236 src_elt = lookup (new_src, src_hash, new_mode);
6237
6238 /* Put the new source in the hash table is if isn't
6239 already. */
6240 if (src_elt == 0)
6241 {
6242 if (insert_regs (new_src, classp, 0))
6243 src_hash = HASH (new_src, new_mode);
6244 src_elt = insert (new_src, classp, src_hash, new_mode);
6245 src_elt->in_memory = elt->in_memory;
6246 src_elt->in_struct = elt->in_struct;
6247 }
6248 else if (classp && classp != src_elt->first_same_value)
6249 /* Show that two things that we've seen before are
6250 actually the same. */
6251 merge_equiv_classes (src_elt, classp);
6252
6253 classp = src_elt->first_same_value;
6254 }
6255 }
6256 }
6257
6258 /* Special handling for (set REG0 REG1)
6259 where REG0 is the "cheapest", cheaper than REG1.
6260 After cse, REG1 will probably not be used in the sequel,
6261 so (if easily done) change this insn to (set REG1 REG0) and
6262 replace REG1 with REG0 in the previous insn that computed their value.
6263 Then REG1 will become a dead store and won't cloud the situation
6264 for later optimizations.
6265
6266 Do not make this change if REG1 is a hard register, because it will
6267 then be used in the sequel and we may be changing a two-operand insn
6268 into a three-operand insn.
6269
6270 Also do not do this if we are operating on a copy of INSN. */
6271
6272 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
6273 && NEXT_INSN (PREV_INSN (insn)) == insn
6274 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
6275 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6276 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl)))
6277 && (qty_first_reg[reg_qty[REGNO (SET_SRC (sets[0].rtl))]]
6278 == REGNO (SET_DEST (sets[0].rtl))))
6279 {
6280 rtx prev = PREV_INSN (insn);
6281 while (prev && GET_CODE (prev) == NOTE)
6282 prev = PREV_INSN (prev);
6283
6284 if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
6285 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
6286 {
6287 rtx dest = SET_DEST (sets[0].rtl);
6288 rtx note = find_reg_note (prev, REG_EQUIV, 0);
6289
6290 validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
6291 validate_change (insn, & SET_DEST (sets[0].rtl),
6292 SET_SRC (sets[0].rtl), 1);
6293 validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
6294 apply_change_group ();
6295
6296 /* If REG1 was equivalent to a constant, REG0 is not. */
6297 if (note)
6298 PUT_REG_NOTE_KIND (note, REG_EQUAL);
6299
6300 /* If there was a REG_WAS_0 note on PREV, remove it. Move
6301 any REG_WAS_0 note on INSN to PREV. */
6302 note = find_reg_note (prev, REG_WAS_0, 0);
6303 if (note)
6304 remove_note (prev, note);
6305
6306 note = find_reg_note (insn, REG_WAS_0, 0);
6307 if (note)
6308 {
6309 remove_note (insn, note);
6310 XEXP (note, 1) = REG_NOTES (prev);
6311 REG_NOTES (prev) = note;
6312 }
6313 }
6314 }
6315
6316 /* If this is a conditional jump insn, record any known equivalences due to
6317 the condition being tested. */
6318
6319 last_jump_equiv_class = 0;
6320 if (GET_CODE (insn) == JUMP_INSN
6321 && n_sets == 1 && GET_CODE (x) == SET
6322 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6323 record_jump_equiv (insn, 0);
6324
6325 #ifdef HAVE_cc0
6326 /* If the previous insn set CC0 and this insn no longer references CC0,
6327 delete the previous insn. Here we use the fact that nothing expects CC0
6328 to be valid over an insn, which is true until the final pass. */
6329 if (prev_insn && GET_CODE (prev_insn) == INSN
6330 && (tem = single_set (prev_insn)) != 0
6331 && SET_DEST (tem) == cc0_rtx
6332 && ! reg_mentioned_p (cc0_rtx, x))
6333 {
6334 PUT_CODE (prev_insn, NOTE);
6335 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
6336 NOTE_SOURCE_FILE (prev_insn) = 0;
6337 }
6338
6339 prev_insn_cc0 = this_insn_cc0;
6340 prev_insn_cc0_mode = this_insn_cc0_mode;
6341 #endif
6342
6343 prev_insn = insn;
6344 }
6345 \f
6346 /* Store 1 in *WRITES_PTR for those categories of memory ref
6347 that must be invalidated when the expression WRITTEN is stored in.
6348 If WRITTEN is null, say everything must be invalidated. */
6349
6350 static void
6351 note_mem_written (written, writes_ptr)
6352 rtx written;
6353 struct write_data *writes_ptr;
6354 {
6355 static struct write_data everything = {0, 1, 1, 1};
6356
6357 if (written == 0)
6358 *writes_ptr = everything;
6359 else if (GET_CODE (written) == MEM)
6360 {
6361 /* Pushing or popping the stack invalidates just the stack pointer. */
6362 rtx addr = XEXP (written, 0);
6363 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
6364 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
6365 && GET_CODE (XEXP (addr, 0)) == REG
6366 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6367 {
6368 writes_ptr->sp = 1;
6369 return;
6370 }
6371 else if (GET_MODE (written) == BLKmode)
6372 *writes_ptr = everything;
6373 else if (cse_rtx_addr_varies_p (written))
6374 {
6375 /* A varying address that is a sum indicates an array element,
6376 and that's just as good as a structure element
6377 in implying that we need not invalidate scalar variables. */
6378 if (!(MEM_IN_STRUCT_P (written)
6379 || GET_CODE (XEXP (written, 0)) == PLUS))
6380 writes_ptr->all = 1;
6381 writes_ptr->nonscalar = 1;
6382 }
6383 writes_ptr->var = 1;
6384 }
6385 }
6386
6387 /* Perform invalidation on the basis of everything about an insn
6388 except for invalidating the actual places that are SET in it.
6389 This includes the places CLOBBERed, and anything that might
6390 alias with something that is SET or CLOBBERed.
6391
6392 W points to the writes_memory for this insn, a struct write_data
6393 saying which kinds of memory references must be invalidated.
6394 X is the pattern of the insn. */
6395
6396 static void
6397 invalidate_from_clobbers (w, x)
6398 struct write_data *w;
6399 rtx x;
6400 {
6401 /* If W->var is not set, W specifies no action.
6402 If W->all is set, this step gets all memory refs
6403 so they can be ignored in the rest of this function. */
6404 if (w->var)
6405 invalidate_memory (w);
6406
6407 if (w->sp)
6408 {
6409 if (reg_tick[STACK_POINTER_REGNUM] >= 0)
6410 reg_tick[STACK_POINTER_REGNUM]++;
6411
6412 /* This should be *very* rare. */
6413 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6414 invalidate (stack_pointer_rtx);
6415 }
6416
6417 if (GET_CODE (x) == CLOBBER)
6418 {
6419 rtx ref = XEXP (x, 0);
6420 if (ref
6421 && (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6422 || (GET_CODE (ref) == MEM && ! w->all)))
6423 invalidate (ref);
6424 }
6425 else if (GET_CODE (x) == PARALLEL)
6426 {
6427 register int i;
6428 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6429 {
6430 register rtx y = XVECEXP (x, 0, i);
6431 if (GET_CODE (y) == CLOBBER)
6432 {
6433 rtx ref = XEXP (y, 0);
6434 if (ref
6435 &&(GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6436 || (GET_CODE (ref) == MEM && !w->all)))
6437 invalidate (ref);
6438 }
6439 }
6440 }
6441 }
6442 \f
6443 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6444 and replace any registers in them with either an equivalent constant
6445 or the canonical form of the register. If we are inside an address,
6446 only do this if the address remains valid.
6447
6448 OBJECT is 0 except when within a MEM in which case it is the MEM.
6449
6450 Return the replacement for X. */
6451
6452 static rtx
6453 cse_process_notes (x, object)
6454 rtx x;
6455 rtx object;
6456 {
6457 enum rtx_code code = GET_CODE (x);
6458 char *fmt = GET_RTX_FORMAT (code);
6459 int qty;
6460 int i;
6461
6462 switch (code)
6463 {
6464 case CONST_INT:
6465 case CONST:
6466 case SYMBOL_REF:
6467 case LABEL_REF:
6468 case CONST_DOUBLE:
6469 case PC:
6470 case CC0:
6471 case LO_SUM:
6472 return x;
6473
6474 case MEM:
6475 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
6476 return x;
6477
6478 case EXPR_LIST:
6479 case INSN_LIST:
6480 if (REG_NOTE_KIND (x) == REG_EQUAL)
6481 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), 0);
6482 if (XEXP (x, 1))
6483 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), 0);
6484 return x;
6485
6486 case REG:
6487 i = reg_qty[REGNO (x)];
6488
6489 /* Return a constant or a constant register. */
6490 if (REGNO_QTY_VALID_P (REGNO (x))
6491 && qty_const[i] != 0
6492 && (CONSTANT_P (qty_const[i])
6493 || GET_CODE (qty_const[i]) == REG))
6494 {
6495 rtx new = gen_lowpart_if_possible (GET_MODE (x), qty_const[i]);
6496 if (new)
6497 return new;
6498 }
6499
6500 /* Otherwise, canonicalize this register. */
6501 return canon_reg (x, 0);
6502 }
6503
6504 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6505 if (fmt[i] == 'e')
6506 validate_change (object, &XEXP (x, i),
6507 cse_process_notes (XEXP (x, i), object), 0);
6508
6509 return x;
6510 }
6511 \f
6512 /* Find common subexpressions between the end test of a loop and the beginning
6513 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
6514
6515 Often we have a loop where an expression in the exit test is used
6516 in the body of the loop. For example "while (*p) *q++ = *p++;".
6517 Because of the way we duplicate the loop exit test in front of the loop,
6518 however, we don't detect that common subexpression. This will be caught
6519 when global cse is implemented, but this is a quite common case.
6520
6521 This function handles the most common cases of these common expressions.
6522 It is called after we have processed the basic block ending with the
6523 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6524 jumps to a label used only once. */
6525
6526 static void
6527 cse_around_loop (loop_start)
6528 rtx loop_start;
6529 {
6530 rtx insn;
6531 int i;
6532 struct table_elt *p;
6533
6534 /* If the jump at the end of the loop doesn't go to the start, we don't
6535 do anything. */
6536 for (insn = PREV_INSN (loop_start);
6537 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
6538 insn = PREV_INSN (insn))
6539 ;
6540
6541 if (insn == 0
6542 || GET_CODE (insn) != NOTE
6543 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6544 return;
6545
6546 /* If the last insn of the loop (the end test) was an NE comparison,
6547 we will interpret it as an EQ comparison, since we fell through
6548 the loop. Any equivalances resulting from that comparison are
6549 therefore not valid and must be invalidated. */
6550 if (last_jump_equiv_class)
6551 for (p = last_jump_equiv_class->first_same_value; p;
6552 p = p->next_same_value)
6553 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
6554 || GET_CODE (p->exp) == SUBREG)
6555 invalidate (p->exp);
6556
6557 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6558 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6559
6560 The only thing we do with SET_DEST is invalidate entries, so we
6561 can safely process each SET in order. It is slightly less efficient
6562 to do so, but we only want to handle the most common cases. */
6563
6564 for (insn = NEXT_INSN (loop_start);
6565 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
6566 && ! (GET_CODE (insn) == NOTE
6567 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6568 insn = NEXT_INSN (insn))
6569 {
6570 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
6571 && (GET_CODE (PATTERN (insn)) == SET
6572 || GET_CODE (PATTERN (insn)) == CLOBBER))
6573 cse_set_around_loop (PATTERN (insn), insn, loop_start);
6574 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
6575 && GET_CODE (PATTERN (insn)) == PARALLEL)
6576 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6577 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6578 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6579 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6580 loop_start);
6581 }
6582 }
6583 \f
6584 /* Variable used for communications between the next two routines. */
6585
6586 static struct write_data skipped_writes_memory;
6587
6588 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6589 since they are done elsewhere. This function is called via note_stores. */
6590
6591 static void
6592 invalidate_skipped_set (dest, set)
6593 rtx set;
6594 rtx dest;
6595 {
6596 if (GET_CODE (set) == CLOBBER
6597 #ifdef HAVE_cc0
6598 || dest == cc0_rtx
6599 #endif
6600 || dest == pc_rtx)
6601 return;
6602
6603 if (GET_CODE (dest) == MEM)
6604 note_mem_written (dest, &skipped_writes_memory);
6605
6606 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
6607 || (! skipped_writes_memory.all && ! cse_rtx_addr_varies_p (dest)))
6608 invalidate (dest);
6609 }
6610
6611 /* Invalidate all insns from START up to the end of the function or the
6612 next label. This called when we wish to CSE around a block that is
6613 conditionally executed. */
6614
6615 static void
6616 invalidate_skipped_block (start)
6617 rtx start;
6618 {
6619 rtx insn;
6620 int i;
6621 static struct write_data init = {0, 0, 0, 0};
6622 static struct write_data everything = {0, 1, 1, 1};
6623
6624 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
6625 insn = NEXT_INSN (insn))
6626 {
6627 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
6628 continue;
6629
6630 skipped_writes_memory = init;
6631
6632 if (GET_CODE (insn) == CALL_INSN)
6633 {
6634 invalidate_for_call ();
6635 skipped_writes_memory = everything;
6636 }
6637
6638 note_stores (PATTERN (insn), invalidate_skipped_set);
6639 invalidate_from_clobbers (&skipped_writes_memory, PATTERN (insn));
6640 }
6641 }
6642 \f
6643 /* Used for communication between the following two routines; contains a
6644 value to be checked for modification. */
6645
6646 static rtx cse_check_loop_start_value;
6647
6648 /* If modifying X will modify the value in CSE_CHECK_LOOP_START_VALUE,
6649 indicate that fact by setting CSE_CHECK_LOOP_START_VALUE to 0. */
6650
6651 static void
6652 cse_check_loop_start (x, set)
6653 rtx x;
6654 rtx set;
6655 {
6656 if (cse_check_loop_start_value == 0
6657 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6658 return;
6659
6660 if ((GET_CODE (x) == MEM && GET_CODE (cse_check_loop_start_value) == MEM)
6661 || reg_overlap_mentioned_p (x, cse_check_loop_start_value))
6662 cse_check_loop_start_value = 0;
6663 }
6664
6665 /* X is a SET or CLOBBER contained in INSN that was found near the start of
6666 a loop that starts with the label at LOOP_START.
6667
6668 If X is a SET, we see if its SET_SRC is currently in our hash table.
6669 If so, we see if it has a value equal to some register used only in the
6670 loop exit code (as marked by jump.c).
6671
6672 If those two conditions are true, we search backwards from the start of
6673 the loop to see if that same value was loaded into a register that still
6674 retains its value at the start of the loop.
6675
6676 If so, we insert an insn after the load to copy the destination of that
6677 load into the equivalent register and (try to) replace our SET_SRC with that
6678 register.
6679
6680 In any event, we invalidate whatever this SET or CLOBBER modifies. */
6681
6682 static void
6683 cse_set_around_loop (x, insn, loop_start)
6684 rtx x;
6685 rtx insn;
6686 rtx loop_start;
6687 {
6688 rtx p;
6689 struct table_elt *src_elt;
6690 static struct write_data init = {0, 0, 0, 0};
6691 struct write_data writes_memory;
6692
6693 writes_memory = init;
6694
6695 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6696 are setting PC or CC0 or whose SET_SRC is already a register. */
6697 if (GET_CODE (x) == SET
6698 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6699 && GET_CODE (SET_SRC (x)) != REG)
6700 {
6701 src_elt = lookup (SET_SRC (x),
6702 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6703 GET_MODE (SET_DEST (x)));
6704
6705 if (src_elt)
6706 for (src_elt = src_elt->first_same_value; src_elt;
6707 src_elt = src_elt->next_same_value)
6708 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
6709 && COST (src_elt->exp) < COST (SET_SRC (x)))
6710 {
6711 rtx p, set;
6712
6713 /* Look for an insn in front of LOOP_START that sets
6714 something in the desired mode to SET_SRC (x) before we hit
6715 a label or CALL_INSN. */
6716
6717 for (p = prev_nonnote_insn (loop_start);
6718 p && GET_CODE (p) != CALL_INSN
6719 && GET_CODE (p) != CODE_LABEL;
6720 p = prev_nonnote_insn (p))
6721 if ((set = single_set (p)) != 0
6722 && GET_CODE (SET_DEST (set)) == REG
6723 && GET_MODE (SET_DEST (set)) == src_elt->mode
6724 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6725 {
6726 /* We now have to ensure that nothing between P
6727 and LOOP_START modified anything referenced in
6728 SET_SRC (x). We know that nothing within the loop
6729 can modify it, or we would have invalidated it in
6730 the hash table. */
6731 rtx q;
6732
6733 cse_check_loop_start_value = SET_SRC (x);
6734 for (q = p; q != loop_start; q = NEXT_INSN (q))
6735 if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
6736 note_stores (PATTERN (q), cse_check_loop_start);
6737
6738 /* If nothing was changed and we can replace our
6739 SET_SRC, add an insn after P to copy its destination
6740 to what we will be replacing SET_SRC with. */
6741 if (cse_check_loop_start_value
6742 && validate_change (insn, &SET_SRC (x),
6743 src_elt->exp, 0))
6744 emit_insn_after (gen_move_insn (src_elt->exp,
6745 SET_DEST (set)),
6746 p);
6747 break;
6748 }
6749 }
6750 }
6751
6752 /* Now invalidate anything modified by X. */
6753 note_mem_written (SET_DEST (x), &writes_memory);
6754
6755 if (writes_memory.var)
6756 invalidate_memory (&writes_memory);
6757
6758 /* See comment on similar code in cse_insn for explanation of these tests. */
6759 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
6760 || (GET_CODE (SET_DEST (x)) == MEM && ! writes_memory.all
6761 && ! cse_rtx_addr_varies_p (SET_DEST (x))))
6762 invalidate (SET_DEST (x));
6763 }
6764 \f
6765 /* Find the end of INSN's basic block and return its range,
6766 the total number of SETs in all the insns of the block, the last insn of the
6767 block, and the branch path.
6768
6769 The branch path indicates which branches should be followed. If a non-zero
6770 path size is specified, the block should be rescanned and a different set
6771 of branches will be taken. The branch path is only used if
6772 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
6773
6774 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6775 used to describe the block. It is filled in with the information about
6776 the current block. The incoming structure's branch path, if any, is used
6777 to construct the output branch path. */
6778
6779 /* Define maximum length of a branch path. */
6780
6781 #define PATHLENGTH 20
6782
6783 struct cse_basic_block_data {
6784 /* Lowest CUID value of insns in block. */
6785 int low_cuid;
6786 /* Highest CUID value of insns in block. */
6787 int high_cuid;
6788 /* Total number of SETs in block. */
6789 int nsets;
6790 /* Last insn in the block. */
6791 rtx last;
6792 /* Size of current branch path, if any. */
6793 int path_size;
6794 /* Current branch path, indicating which branches will be taken. */
6795 struct branch_path {
6796 /* The branch insn. */
6797 rtx branch;
6798 /* Whether it should be taken or not. AROUND is the same as taken
6799 except that it is used when the destination label is not preceded
6800 by a BARRIER. */
6801 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
6802 } path[PATHLENGTH];
6803 };
6804
6805 void
6806 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
6807 rtx insn;
6808 struct cse_basic_block_data *data;
6809 int follow_jumps;
6810 int after_loop;
6811 int skip_blocks;
6812 {
6813 rtx p = insn, q;
6814 int nsets = 0;
6815 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6816 int path_size = data->path_size;
6817 int path_entry = 0;
6818 int i;
6819
6820 /* Update the previous branch path, if any. If the last branch was
6821 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
6822 shorten the path by one and look at the previous branch. We know that
6823 at least one branch must have been taken if PATH_SIZE is non-zero. */
6824 while (path_size > 0)
6825 {
6826 if (data->path[path_size - 1].status != NOT_TAKEN)
6827 {
6828 data->path[path_size - 1].status = NOT_TAKEN;
6829 break;
6830 }
6831 else
6832 path_size--;
6833 }
6834
6835 /* Scan to end of this basic block. */
6836 while (p && GET_CODE (p) != CODE_LABEL)
6837 {
6838 /* Don't cse out the end of a loop. This makes a difference
6839 only for the unusual loops that always execute at least once;
6840 all other loops have labels there so we will stop in any case.
6841 Cse'ing out the end of the loop is dangerous because it
6842 might cause an invariant expression inside the loop
6843 to be reused after the end of the loop. This would make it
6844 hard to move the expression out of the loop in loop.c,
6845 especially if it is one of several equivalent expressions
6846 and loop.c would like to eliminate it.
6847
6848 If we are running after loop.c has finished, we can ignore
6849 the NOTE_INSN_LOOP_END. */
6850
6851 if (! after_loop && GET_CODE (p) == NOTE
6852 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6853 break;
6854
6855 /* Don't cse over a call to setjmp; on some machines (eg vax)
6856 the regs restored by the longjmp come from
6857 a later time than the setjmp. */
6858 if (GET_CODE (p) == NOTE
6859 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
6860 break;
6861
6862 /* A PARALLEL can have lots of SETs in it,
6863 especially if it is really an ASM_OPERANDS. */
6864 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6865 && GET_CODE (PATTERN (p)) == PARALLEL)
6866 nsets += XVECLEN (PATTERN (p), 0);
6867 else if (GET_CODE (p) != NOTE)
6868 nsets += 1;
6869
6870 if (INSN_CUID (p) > high_cuid)
6871 high_cuid = INSN_CUID (p);
6872 if (INSN_CUID (p) < low_cuid)
6873 low_cuid = INSN_CUID(p);
6874
6875 /* See if this insn is in our branch path. If it is and we are to
6876 take it, do so. */
6877 if (path_entry < path_size && data->path[path_entry].branch == p)
6878 {
6879 if (data->path[path_entry].status != NOT_TAKEN)
6880 p = JUMP_LABEL (p);
6881
6882 /* Point to next entry in path, if any. */
6883 path_entry++;
6884 }
6885
6886 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6887 was specified, we haven't reached our maximum path length, there are
6888 insns following the target of the jump, this is the only use of the
6889 jump label, and the target label is preceded by a BARRIER.
6890
6891 Alternatively, we can follow the jump if it branches around a
6892 block of code and there are no other branches into the block.
6893 In this case invalidate_skipped_block will be called to invalidate any
6894 registers set in the block when following the jump. */
6895
6896 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
6897 && GET_CODE (p) == JUMP_INSN
6898 && GET_CODE (PATTERN (p)) == SET
6899 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6900 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6901 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6902 {
6903 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6904 if ((GET_CODE (q) != NOTE
6905 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6906 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
6907 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
6908 break;
6909
6910 /* If we ran into a BARRIER, this code is an extension of the
6911 basic block when the branch is taken. */
6912 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
6913 {
6914 /* Don't allow ourself to keep walking around an
6915 always-executed loop. */
6916 if (next_real_insn (q) == next_real_insn (insn))
6917 break;
6918
6919 /* Similarly, don't put a branch in our path more than once. */
6920 for (i = 0; i < path_entry; i++)
6921 if (data->path[i].branch == p)
6922 break;
6923
6924 if (i != path_entry)
6925 break;
6926
6927 data->path[path_entry].branch = p;
6928 data->path[path_entry++].status = TAKEN;
6929
6930 /* This branch now ends our path. It was possible that we
6931 didn't see this branch the last time around (when the
6932 insn in front of the target was a JUMP_INSN that was
6933 turned into a no-op). */
6934 path_size = path_entry;
6935
6936 p = JUMP_LABEL (p);
6937 /* Mark block so we won't scan it again later. */
6938 PUT_MODE (NEXT_INSN (p), QImode);
6939 }
6940 /* Detect a branch around a block of code. */
6941 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
6942 {
6943 register rtx tmp;
6944
6945 if (next_real_insn (q) == next_real_insn (insn))
6946 break;
6947
6948 for (i = 0; i < path_entry; i++)
6949 if (data->path[i].branch == p)
6950 break;
6951
6952 if (i != path_entry)
6953 break;
6954
6955 /* This is no_labels_between_p (p, q) with an added check for
6956 reaching the end of a function (in case Q precedes P). */
6957 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6958 if (GET_CODE (tmp) == CODE_LABEL)
6959 break;
6960
6961 if (tmp == q)
6962 {
6963 data->path[path_entry].branch = p;
6964 data->path[path_entry++].status = AROUND;
6965
6966 path_size = path_entry;
6967
6968 p = JUMP_LABEL (p);
6969 /* Mark block so we won't scan it again later. */
6970 PUT_MODE (NEXT_INSN (p), QImode);
6971 }
6972 }
6973 }
6974 p = NEXT_INSN (p);
6975 }
6976
6977 data->low_cuid = low_cuid;
6978 data->high_cuid = high_cuid;
6979 data->nsets = nsets;
6980 data->last = p;
6981
6982 /* If all jumps in the path are not taken, set our path length to zero
6983 so a rescan won't be done. */
6984 for (i = path_size - 1; i >= 0; i--)
6985 if (data->path[i].status != NOT_TAKEN)
6986 break;
6987
6988 if (i == -1)
6989 data->path_size = 0;
6990 else
6991 data->path_size = path_size;
6992
6993 /* End the current branch path. */
6994 data->path[path_size].branch = 0;
6995 }
6996 \f
6997 static rtx cse_basic_block ();
6998
6999 /* Perform cse on the instructions of a function.
7000 F is the first instruction.
7001 NREGS is one plus the highest pseudo-reg number used in the instruction.
7002
7003 AFTER_LOOP is 1 if this is the cse call done after loop optimization
7004 (only if -frerun-cse-after-loop).
7005
7006 Returns 1 if jump_optimize should be redone due to simplifications
7007 in conditional jump instructions. */
7008
7009 int
7010 cse_main (f, nregs, after_loop, file)
7011 rtx f;
7012 int nregs;
7013 int after_loop;
7014 FILE *file;
7015 {
7016 struct cse_basic_block_data val;
7017 register rtx insn = f;
7018 register int i;
7019
7020 cse_jumps_altered = 0;
7021 constant_pool_entries_cost = 0;
7022 val.path_size = 0;
7023
7024 init_recog ();
7025
7026 max_reg = nregs;
7027
7028 all_minus_one = (int *) alloca (nregs * sizeof (int));
7029 consec_ints = (int *) alloca (nregs * sizeof (int));
7030
7031 for (i = 0; i < nregs; i++)
7032 {
7033 all_minus_one[i] = -1;
7034 consec_ints[i] = i;
7035 }
7036
7037 reg_next_eqv = (int *) alloca (nregs * sizeof (int));
7038 reg_prev_eqv = (int *) alloca (nregs * sizeof (int));
7039 reg_qty = (int *) alloca (nregs * sizeof (int));
7040 reg_in_table = (int *) alloca (nregs * sizeof (int));
7041 reg_tick = (int *) alloca (nregs * sizeof (int));
7042
7043 /* Discard all the free elements of the previous function
7044 since they are allocated in the temporarily obstack. */
7045 bzero (table, sizeof table);
7046 free_element_chain = 0;
7047 n_elements_made = 0;
7048
7049 /* Find the largest uid. */
7050
7051 i = get_max_uid ();
7052 uid_cuid = (short *) alloca ((i + 1) * sizeof (short));
7053 bzero (uid_cuid, (i + 1) * sizeof (short));
7054
7055 /* Compute the mapping from uids to cuids.
7056 CUIDs are numbers assigned to insns, like uids,
7057 except that cuids increase monotonically through the code.
7058 Don't assign cuids to line-number NOTEs, so that the distance in cuids
7059 between two insns is not affected by -g. */
7060
7061 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
7062 {
7063 if (GET_CODE (insn) != NOTE
7064 || NOTE_LINE_NUMBER (insn) < 0)
7065 INSN_CUID (insn) = ++i;
7066 else
7067 /* Give a line number note the same cuid as preceding insn. */
7068 INSN_CUID (insn) = i;
7069 }
7070
7071 /* Initialize which registers are clobbered by calls. */
7072
7073 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
7074
7075 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7076 if ((call_used_regs[i]
7077 /* Used to check !fixed_regs[i] here, but that isn't safe;
7078 fixed regs are still call-clobbered, and sched can get
7079 confused if they can "live across calls".
7080
7081 The frame pointer is always preserved across calls. The arg
7082 pointer is if it is fixed. The stack pointer usually is, unless
7083 RETURN_POPS_ARGS, in which case an explicit CLOBBER
7084 will be present. If we are generating PIC code, the PIC offset
7085 table register is preserved across calls. */
7086
7087 && i != STACK_POINTER_REGNUM
7088 && i != FRAME_POINTER_REGNUM
7089 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
7090 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
7091 #endif
7092 #ifdef PIC_OFFSET_TABLE_REGNUM
7093 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
7094 #endif
7095 )
7096 || global_regs[i])
7097 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
7098
7099 /* Loop over basic blocks.
7100 Compute the maximum number of qty's needed for each basic block
7101 (which is 2 for each SET). */
7102 insn = f;
7103 while (insn)
7104 {
7105 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
7106 flag_cse_skip_blocks);
7107
7108 /* If this basic block was already processed or has no sets, skip it. */
7109 if (val.nsets == 0 || GET_MODE (insn) == QImode)
7110 {
7111 PUT_MODE (insn, VOIDmode);
7112 insn = (val.last ? NEXT_INSN (val.last) : 0);
7113 val.path_size = 0;
7114 continue;
7115 }
7116
7117 cse_basic_block_start = val.low_cuid;
7118 cse_basic_block_end = val.high_cuid;
7119 max_qty = val.nsets * 2;
7120
7121 if (file)
7122 fprintf (file, ";; Processing block from %d to %d, %d sets.\n",
7123 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
7124 val.nsets);
7125
7126 /* Make MAX_QTY bigger to give us room to optimize
7127 past the end of this basic block, if that should prove useful. */
7128 if (max_qty < 500)
7129 max_qty = 500;
7130
7131 max_qty += max_reg;
7132
7133 /* If this basic block is being extended by following certain jumps,
7134 (see `cse_end_of_basic_block'), we reprocess the code from the start.
7135 Otherwise, we start after this basic block. */
7136 if (val.path_size > 0)
7137 cse_basic_block (insn, val.last, val.path, 0);
7138 else
7139 {
7140 int old_cse_jumps_altered = cse_jumps_altered;
7141 rtx temp;
7142
7143 /* When cse changes a conditional jump to an unconditional
7144 jump, we want to reprocess the block, since it will give
7145 us a new branch path to investigate. */
7146 cse_jumps_altered = 0;
7147 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
7148 if (cse_jumps_altered == 0
7149 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7150 insn = temp;
7151
7152 cse_jumps_altered |= old_cse_jumps_altered;
7153 }
7154
7155 #ifdef USE_C_ALLOCA
7156 alloca (0);
7157 #endif
7158 }
7159
7160 /* Tell refers_to_mem_p that qty_const info is not available. */
7161 qty_const = 0;
7162
7163 if (max_elements_made < n_elements_made)
7164 max_elements_made = n_elements_made;
7165
7166 return cse_jumps_altered;
7167 }
7168
7169 /* Process a single basic block. FROM and TO and the limits of the basic
7170 block. NEXT_BRANCH points to the branch path when following jumps or
7171 a null path when not following jumps.
7172
7173 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
7174 loop. This is true when we are being called for the last time on a
7175 block and this CSE pass is before loop.c. */
7176
7177 static rtx
7178 cse_basic_block (from, to, next_branch, around_loop)
7179 register rtx from, to;
7180 struct branch_path *next_branch;
7181 int around_loop;
7182 {
7183 register rtx insn;
7184 int to_usage = 0;
7185 int in_libcall_block = 0;
7186
7187 /* Each of these arrays is undefined before max_reg, so only allocate
7188 the space actually needed and adjust the start below. */
7189
7190 qty_first_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
7191 qty_last_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
7192 qty_mode= (enum machine_mode *) alloca ((max_qty - max_reg) * sizeof (enum machine_mode));
7193 qty_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
7194 qty_const_insn = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
7195 qty_comparison_code
7196 = (enum rtx_code *) alloca ((max_qty - max_reg) * sizeof (enum rtx_code));
7197 qty_comparison_qty = (int *) alloca ((max_qty - max_reg) * sizeof (int));
7198 qty_comparison_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
7199
7200 qty_first_reg -= max_reg;
7201 qty_last_reg -= max_reg;
7202 qty_mode -= max_reg;
7203 qty_const -= max_reg;
7204 qty_const_insn -= max_reg;
7205 qty_comparison_code -= max_reg;
7206 qty_comparison_qty -= max_reg;
7207 qty_comparison_const -= max_reg;
7208
7209 new_basic_block ();
7210
7211 /* TO might be a label. If so, protect it from being deleted. */
7212 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7213 ++LABEL_NUSES (to);
7214
7215 for (insn = from; insn != to; insn = NEXT_INSN (insn))
7216 {
7217 register enum rtx_code code;
7218
7219 /* See if this is a branch that is part of the path. If so, and it is
7220 to be taken, do so. */
7221 if (next_branch->branch == insn)
7222 {
7223 enum taken status = next_branch++->status;
7224 if (status != NOT_TAKEN)
7225 {
7226 if (status == TAKEN)
7227 record_jump_equiv (insn, 1);
7228 else
7229 invalidate_skipped_block (NEXT_INSN (insn));
7230
7231 /* Set the last insn as the jump insn; it doesn't affect cc0.
7232 Then follow this branch. */
7233 #ifdef HAVE_cc0
7234 prev_insn_cc0 = 0;
7235 #endif
7236 prev_insn = insn;
7237 insn = JUMP_LABEL (insn);
7238 continue;
7239 }
7240 }
7241
7242 code = GET_CODE (insn);
7243 if (GET_MODE (insn) == QImode)
7244 PUT_MODE (insn, VOIDmode);
7245
7246 if (GET_RTX_CLASS (code) == 'i')
7247 {
7248 /* Process notes first so we have all notes in canonical forms when
7249 looking for duplicate operations. */
7250
7251 if (REG_NOTES (insn))
7252 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), 0);
7253
7254 /* Track when we are inside in LIBCALL block. Inside such a block,
7255 we do not want to record destinations. The last insn of a
7256 LIBCALL block is not considered to be part of the block, since
7257 its desitination is the result of the block and hence should be
7258 recorded. */
7259
7260 if (find_reg_note (insn, REG_LIBCALL, 0))
7261 in_libcall_block = 1;
7262 else if (find_reg_note (insn, REG_RETVAL, 0))
7263 in_libcall_block = 0;
7264
7265 cse_insn (insn, in_libcall_block);
7266 }
7267
7268 /* If INSN is now an unconditional jump, skip to the end of our
7269 basic block by pretending that we just did the last insn in the
7270 basic block. If we are jumping to the end of our block, show
7271 that we can have one usage of TO. */
7272
7273 if (simplejump_p (insn))
7274 {
7275 if (to == 0)
7276 return 0;
7277
7278 if (JUMP_LABEL (insn) == to)
7279 to_usage = 1;
7280
7281 insn = PREV_INSN (to);
7282 }
7283
7284 /* See if it is ok to keep on going past the label
7285 which used to end our basic block. Remember that we incremented
7286 the count of that label, so we decrement it here. If we made
7287 a jump unconditional, TO_USAGE will be one; in that case, we don't
7288 want to count the use in that jump. */
7289
7290 if (to != 0 && NEXT_INSN (insn) == to
7291 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7292 {
7293 struct cse_basic_block_data val;
7294
7295 insn = NEXT_INSN (to);
7296
7297 if (LABEL_NUSES (to) == 0)
7298 delete_insn (to);
7299
7300 /* Find the end of the following block. Note that we won't be
7301 following branches in this case. If TO was the last insn
7302 in the function, we are done. Similarly, if we deleted the
7303 insn after TO, it must have been because it was preceded by
7304 a BARRIER. In that case, we are done with this block because it
7305 has no continuation. */
7306
7307 if (insn == 0 || INSN_DELETED_P (insn))
7308 return 0;
7309
7310 to_usage = 0;
7311 val.path_size = 0;
7312 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7313
7314 /* If the tables we allocated have enough space left
7315 to handle all the SETs in the next basic block,
7316 continue through it. Otherwise, return,
7317 and that block will be scanned individually. */
7318 if (val.nsets * 2 + next_qty > max_qty)
7319 break;
7320
7321 cse_basic_block_start = val.low_cuid;
7322 cse_basic_block_end = val.high_cuid;
7323 to = val.last;
7324
7325 /* Prevent TO from being deleted if it is a label. */
7326 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7327 ++LABEL_NUSES (to);
7328
7329 /* Back up so we process the first insn in the extension. */
7330 insn = PREV_INSN (insn);
7331 }
7332 }
7333
7334 if (next_qty > max_qty)
7335 abort ();
7336
7337 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7338 the previous insn is the only insn that branches to the head of a loop,
7339 we can cse into the loop. Don't do this if we changed the jump
7340 structure of a loop unless we aren't going to be following jumps. */
7341
7342 if ((cse_jumps_altered == 0
7343 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7344 && around_loop && to != 0
7345 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
7346 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
7347 && JUMP_LABEL (PREV_INSN (to)) != 0
7348 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
7349 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
7350
7351 return to ? NEXT_INSN (to) : 0;
7352 }
7353 \f
7354 /* Count the number of times registers are used (not set) in X.
7355 COUNTS is an array in which we accumulate the count, INCR is how much
7356 we count each register usage. */
7357
7358 static void
7359 count_reg_usage (x, counts, incr)
7360 rtx x;
7361 int *counts;
7362 int incr;
7363 {
7364 enum rtx_code code = GET_CODE (x);
7365 char *fmt;
7366 int i, j;
7367
7368 switch (code)
7369 {
7370 case REG:
7371 counts[REGNO (x)] += incr;
7372 return;
7373
7374 case PC:
7375 case CC0:
7376 case CONST:
7377 case CONST_INT:
7378 case CONST_DOUBLE:
7379 case SYMBOL_REF:
7380 case LABEL_REF:
7381 case CLOBBER:
7382 return;
7383
7384 case SET:
7385 /* Unless we are setting a REG, count everything in SET_DEST. */
7386 if (GET_CODE (SET_DEST (x)) != REG)
7387 count_reg_usage (SET_DEST (x), counts, incr);
7388 count_reg_usage (SET_SRC (x), counts, incr);
7389 return;
7390
7391 case INSN:
7392 case JUMP_INSN:
7393 case CALL_INSN:
7394 count_reg_usage (PATTERN (x), counts, incr);
7395
7396 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7397 use them. */
7398
7399 if (REG_NOTES (x))
7400 count_reg_usage (REG_NOTES (x), counts, incr);
7401 return;
7402
7403 case EXPR_LIST:
7404 case INSN_LIST:
7405 if (REG_NOTE_KIND (x) == REG_EQUAL)
7406 count_reg_usage (XEXP (x, 0), counts, incr);
7407 if (XEXP (x, 1))
7408 count_reg_usage (XEXP (x, 1), counts, incr);
7409 return;
7410 }
7411
7412 fmt = GET_RTX_FORMAT (code);
7413 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7414 {
7415 if (fmt[i] == 'e')
7416 count_reg_usage (XEXP (x, i), counts, incr);
7417 else if (fmt[i] == 'E')
7418 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7419 count_reg_usage (XVECEXP (x, i, j), counts, incr);
7420 }
7421 }
7422 \f
7423 /* Scan all the insns and delete any that are dead; i.e., they store a register
7424 that is never used or they copy a register to itself.
7425
7426 This is used to remove insns made obviously dead by cse. It improves the
7427 heuristics in loop since it won't try to move dead invariants out of loops
7428 or make givs for dead quantities. The remaining passes of the compilation
7429 are also sped up. */
7430
7431 void
7432 delete_dead_from_cse (insns, nreg)
7433 rtx insns;
7434 int nreg;
7435 {
7436 int *counts = (int *) alloca (nreg * sizeof (int));
7437 rtx insn;
7438 rtx tem;
7439 int i;
7440
7441 /* First count the number of times each register is used. */
7442 bzero (counts, sizeof (int) * nreg);
7443 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7444 count_reg_usage (insn, counts, 1);
7445
7446 /* Go from the last insn to the first and delete insns that only set unused
7447 registers or copy a register to itself. As we delete an insn, remove
7448 usage counts for registers it uses. */
7449 for (insn = prev_real_insn (get_last_insn ());
7450 insn; insn = prev_real_insn (insn))
7451 {
7452 int live_insn = 0;
7453
7454 if (GET_CODE (PATTERN (insn)) == SET)
7455 {
7456 if (GET_CODE (SET_DEST (PATTERN (insn))) == REG
7457 && SET_DEST (PATTERN (insn)) == SET_SRC (PATTERN (insn)))
7458 ;
7459
7460 #ifdef HAVE_cc0
7461 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
7462 && ! side_effects_p (SET_SRC (PATTERN (insn)))
7463 && ((tem = next_nonnote_insn (insn)) == 0
7464 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
7465 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
7466 ;
7467 #endif
7468 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
7469 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
7470 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
7471 || side_effects_p (SET_SRC (PATTERN (insn))))
7472 live_insn = 1;
7473 }
7474 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7475 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7476 {
7477 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7478
7479 if (GET_CODE (elt) == SET)
7480 {
7481 if (GET_CODE (SET_DEST (elt)) == REG
7482 && SET_DEST (elt) == SET_SRC (elt))
7483 ;
7484
7485 #ifdef HAVE_cc0
7486 else if (GET_CODE (SET_DEST (elt)) == CC0
7487 && ! side_effects_p (SET_SRC (elt))
7488 && ((tem = next_nonnote_insn (insn)) == 0
7489 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
7490 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
7491 ;
7492 #endif
7493 else if (GET_CODE (SET_DEST (elt)) != REG
7494 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
7495 || counts[REGNO (SET_DEST (elt))] != 0
7496 || side_effects_p (SET_SRC (elt)))
7497 live_insn = 1;
7498 }
7499 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7500 live_insn = 1;
7501 }
7502 else
7503 live_insn = 1;
7504
7505 /* If this is a dead insn, delete it and show registers in it aren't
7506 being used. If this is the last insn of a libcall sequence, don't
7507 delete it even if it is dead because we don't know how to do so
7508 here. */
7509
7510 if (! live_insn && ! find_reg_note (insn, REG_RETVAL, 0))
7511 {
7512 count_reg_usage (insn, counts, -1);
7513 PUT_CODE (insn, NOTE);
7514 NOTE_SOURCE_FILE (insn) = 0;
7515 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7516 }
7517 }
7518 }
This page took 0.505236 seconds and 5 git commands to generate.