]>
Commit | Line | Data |
---|---|---|
230d793d | 1 | /* Optimize by combining instructions for GNU compiler. |
e11fa86f | 2 | Copyright (C) 1987, 88, 92, 93, 94, 95, 1996 Free Software Foundation, Inc. |
230d793d RS |
3 | |
4 | This file is part of GNU CC. | |
5 | ||
6 | GNU CC is free software; you can redistribute it and/or modify | |
7 | it under the terms of the GNU General Public License as published by | |
8 | the Free Software Foundation; either version 2, or (at your option) | |
9 | any later version. | |
10 | ||
11 | GNU CC is distributed in the hope that it will be useful, | |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | GNU General Public License for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
17 | along with GNU CC; see the file COPYING. If not, write to | |
940d9d63 RK |
18 | the Free Software Foundation, 59 Temple Place - Suite 330, |
19 | Boston, MA 02111-1307, USA. */ | |
230d793d RS |
20 | |
21 | ||
22 | /* This module is essentially the "combiner" phase of the U. of Arizona | |
23 | Portable Optimizer, but redone to work on our list-structured | |
24 | representation for RTL instead of their string representation. | |
25 | ||
26 | The LOG_LINKS of each insn identify the most recent assignment | |
27 | to each REG used in the insn. It is a list of previous insns, | |
28 | each of which contains a SET for a REG that is used in this insn | |
29 | and not used or set in between. LOG_LINKs never cross basic blocks. | |
30 | They were set up by the preceding pass (lifetime analysis). | |
31 | ||
32 | We try to combine each pair of insns joined by a logical link. | |
33 | We also try to combine triples of insns A, B and C when | |
34 | C has a link back to B and B has a link back to A. | |
35 | ||
36 | LOG_LINKS does not have links for use of the CC0. They don't | |
37 | need to, because the insn that sets the CC0 is always immediately | |
38 | before the insn that tests it. So we always regard a branch | |
39 | insn as having a logical link to the preceding insn. The same is true | |
40 | for an insn explicitly using CC0. | |
41 | ||
42 | We check (with use_crosses_set_p) to avoid combining in such a way | |
43 | as to move a computation to a place where its value would be different. | |
44 | ||
45 | Combination is done by mathematically substituting the previous | |
46 | insn(s) values for the regs they set into the expressions in | |
47 | the later insns that refer to these regs. If the result is a valid insn | |
48 | for our target machine, according to the machine description, | |
49 | we install it, delete the earlier insns, and update the data flow | |
50 | information (LOG_LINKS and REG_NOTES) for what we did. | |
51 | ||
52 | There are a few exceptions where the dataflow information created by | |
53 | flow.c aren't completely updated: | |
54 | ||
55 | - reg_live_length is not updated | |
56 | - reg_n_refs is not adjusted in the rare case when a register is | |
57 | no longer required in a computation | |
58 | - there are extremely rare cases (see distribute_regnotes) when a | |
59 | REG_DEAD note is lost | |
60 | - a LOG_LINKS entry that refers to an insn with multiple SETs may be | |
61 | removed because there is no way to know which register it was | |
62 | linking | |
63 | ||
64 | To simplify substitution, we combine only when the earlier insn(s) | |
65 | consist of only a single assignment. To simplify updating afterward, | |
66 | we never combine when a subroutine call appears in the middle. | |
67 | ||
68 | Since we do not represent assignments to CC0 explicitly except when that | |
69 | is all an insn does, there is no LOG_LINKS entry in an insn that uses | |
70 | the condition code for the insn that set the condition code. | |
71 | Fortunately, these two insns must be consecutive. | |
72 | Therefore, every JUMP_INSN is taken to have an implicit logical link | |
73 | to the preceding insn. This is not quite right, since non-jumps can | |
74 | also use the condition code; but in practice such insns would not | |
75 | combine anyway. */ | |
76 | ||
230d793d | 77 | #include "config.h" |
4f90e4a0 | 78 | #ifdef __STDC__ |
04fe4385 | 79 | #include <stdarg.h> |
4f90e4a0 | 80 | #else |
04fe4385 | 81 | #include <varargs.h> |
4f90e4a0 | 82 | #endif |
dfa3449b | 83 | |
9c3b4c8b RS |
84 | /* Must precede rtl.h for FFS. */ |
85 | #include <stdio.h> | |
86 | ||
230d793d RS |
87 | #include "rtl.h" |
88 | #include "flags.h" | |
89 | #include "regs.h" | |
55310dad | 90 | #include "hard-reg-set.h" |
230d793d RS |
91 | #include "expr.h" |
92 | #include "basic-block.h" | |
93 | #include "insn-config.h" | |
94 | #include "insn-flags.h" | |
95 | #include "insn-codes.h" | |
96 | #include "insn-attr.h" | |
97 | #include "recog.h" | |
98 | #include "real.h" | |
99 | ||
100 | /* It is not safe to use ordinary gen_lowpart in combine. | |
101 | Use gen_lowpart_for_combine instead. See comments there. */ | |
102 | #define gen_lowpart dont_use_gen_lowpart_you_dummy | |
103 | ||
104 | /* Number of attempts to combine instructions in this function. */ | |
105 | ||
106 | static int combine_attempts; | |
107 | ||
108 | /* Number of attempts that got as far as substitution in this function. */ | |
109 | ||
110 | static int combine_merges; | |
111 | ||
112 | /* Number of instructions combined with added SETs in this function. */ | |
113 | ||
114 | static int combine_extras; | |
115 | ||
116 | /* Number of instructions combined in this function. */ | |
117 | ||
118 | static int combine_successes; | |
119 | ||
120 | /* Totals over entire compilation. */ | |
121 | ||
122 | static int total_attempts, total_merges, total_extras, total_successes; | |
9210df58 | 123 | |
ddd5a7c1 | 124 | /* Define a default value for REVERSIBLE_CC_MODE. |
9210df58 RK |
125 | We can never assume that a condition code mode is safe to reverse unless |
126 | the md tells us so. */ | |
127 | #ifndef REVERSIBLE_CC_MODE | |
128 | #define REVERSIBLE_CC_MODE(MODE) 0 | |
129 | #endif | |
230d793d RS |
130 | \f |
131 | /* Vector mapping INSN_UIDs to cuids. | |
5089e22e | 132 | The cuids are like uids but increase monotonically always. |
230d793d RS |
133 | Combine always uses cuids so that it can compare them. |
134 | But actually renumbering the uids, which we used to do, | |
135 | proves to be a bad idea because it makes it hard to compare | |
136 | the dumps produced by earlier passes with those from later passes. */ | |
137 | ||
138 | static int *uid_cuid; | |
4255220d | 139 | static int max_uid_cuid; |
230d793d RS |
140 | |
141 | /* Get the cuid of an insn. */ | |
142 | ||
1427d6d2 RK |
143 | #define INSN_CUID(INSN) \ |
144 | (INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)]) | |
230d793d RS |
145 | |
146 | /* Maximum register number, which is the size of the tables below. */ | |
147 | ||
148 | static int combine_max_regno; | |
149 | ||
150 | /* Record last point of death of (hard or pseudo) register n. */ | |
151 | ||
152 | static rtx *reg_last_death; | |
153 | ||
154 | /* Record last point of modification of (hard or pseudo) register n. */ | |
155 | ||
156 | static rtx *reg_last_set; | |
157 | ||
158 | /* Record the cuid of the last insn that invalidated memory | |
159 | (anything that writes memory, and subroutine calls, but not pushes). */ | |
160 | ||
161 | static int mem_last_set; | |
162 | ||
163 | /* Record the cuid of the last CALL_INSN | |
164 | so we can tell whether a potential combination crosses any calls. */ | |
165 | ||
166 | static int last_call_cuid; | |
167 | ||
168 | /* When `subst' is called, this is the insn that is being modified | |
169 | (by combining in a previous insn). The PATTERN of this insn | |
170 | is still the old pattern partially modified and it should not be | |
171 | looked at, but this may be used to examine the successors of the insn | |
172 | to judge whether a simplification is valid. */ | |
173 | ||
174 | static rtx subst_insn; | |
175 | ||
0d9641d1 JW |
176 | /* This is an insn that belongs before subst_insn, but is not currently |
177 | on the insn chain. */ | |
178 | ||
179 | static rtx subst_prev_insn; | |
180 | ||
230d793d RS |
181 | /* This is the lowest CUID that `subst' is currently dealing with. |
182 | get_last_value will not return a value if the register was set at or | |
183 | after this CUID. If not for this mechanism, we could get confused if | |
184 | I2 or I1 in try_combine were an insn that used the old value of a register | |
185 | to obtain a new value. In that case, we might erroneously get the | |
186 | new value of the register when we wanted the old one. */ | |
187 | ||
188 | static int subst_low_cuid; | |
189 | ||
6e25d159 RK |
190 | /* This contains any hard registers that are used in newpat; reg_dead_at_p |
191 | must consider all these registers to be always live. */ | |
192 | ||
193 | static HARD_REG_SET newpat_used_regs; | |
194 | ||
abe6e52f RK |
195 | /* This is an insn to which a LOG_LINKS entry has been added. If this |
196 | insn is the earlier than I2 or I3, combine should rescan starting at | |
197 | that location. */ | |
198 | ||
199 | static rtx added_links_insn; | |
200 | ||
230d793d RS |
201 | /* This is the value of undobuf.num_undo when we started processing this |
202 | substitution. This will prevent gen_rtx_combine from re-used a piece | |
203 | from the previous expression. Doing so can produce circular rtl | |
204 | structures. */ | |
205 | ||
206 | static int previous_num_undos; | |
ca5c3ef4 | 207 | |
0d4d42c3 RK |
208 | /* Basic block number of the block in which we are performing combines. */ |
209 | static int this_basic_block; | |
230d793d RS |
210 | \f |
211 | /* The next group of arrays allows the recording of the last value assigned | |
212 | to (hard or pseudo) register n. We use this information to see if a | |
5089e22e | 213 | operation being processed is redundant given a prior operation performed |
230d793d RS |
214 | on the register. For example, an `and' with a constant is redundant if |
215 | all the zero bits are already known to be turned off. | |
216 | ||
217 | We use an approach similar to that used by cse, but change it in the | |
218 | following ways: | |
219 | ||
220 | (1) We do not want to reinitialize at each label. | |
221 | (2) It is useful, but not critical, to know the actual value assigned | |
222 | to a register. Often just its form is helpful. | |
223 | ||
224 | Therefore, we maintain the following arrays: | |
225 | ||
226 | reg_last_set_value the last value assigned | |
227 | reg_last_set_label records the value of label_tick when the | |
228 | register was assigned | |
229 | reg_last_set_table_tick records the value of label_tick when a | |
230 | value using the register is assigned | |
231 | reg_last_set_invalid set to non-zero when it is not valid | |
232 | to use the value of this register in some | |
233 | register's value | |
234 | ||
235 | To understand the usage of these tables, it is important to understand | |
236 | the distinction between the value in reg_last_set_value being valid | |
237 | and the register being validly contained in some other expression in the | |
238 | table. | |
239 | ||
240 | Entry I in reg_last_set_value is valid if it is non-zero, and either | |
241 | reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick. | |
242 | ||
243 | Register I may validly appear in any expression returned for the value | |
244 | of another register if reg_n_sets[i] is 1. It may also appear in the | |
245 | value for register J if reg_last_set_label[i] < reg_last_set_label[j] or | |
246 | reg_last_set_invalid[j] is zero. | |
247 | ||
248 | If an expression is found in the table containing a register which may | |
249 | not validly appear in an expression, the register is replaced by | |
250 | something that won't match, (clobber (const_int 0)). | |
251 | ||
252 | reg_last_set_invalid[i] is set non-zero when register I is being assigned | |
253 | to and reg_last_set_table_tick[i] == label_tick. */ | |
254 | ||
255 | /* Record last value assigned to (hard or pseudo) register n. */ | |
256 | ||
257 | static rtx *reg_last_set_value; | |
258 | ||
259 | /* Record the value of label_tick when the value for register n is placed in | |
260 | reg_last_set_value[n]. */ | |
261 | ||
568356af | 262 | static int *reg_last_set_label; |
230d793d RS |
263 | |
264 | /* Record the value of label_tick when an expression involving register n | |
265 | is placed in reg_last_set_value. */ | |
266 | ||
568356af | 267 | static int *reg_last_set_table_tick; |
230d793d RS |
268 | |
269 | /* Set non-zero if references to register n in expressions should not be | |
270 | used. */ | |
271 | ||
272 | static char *reg_last_set_invalid; | |
273 | ||
274 | /* Incremented for each label. */ | |
275 | ||
568356af | 276 | static int label_tick; |
230d793d RS |
277 | |
278 | /* Some registers that are set more than once and used in more than one | |
279 | basic block are nevertheless always set in similar ways. For example, | |
280 | a QImode register may be loaded from memory in two places on a machine | |
281 | where byte loads zero extend. | |
282 | ||
951553af | 283 | We record in the following array what we know about the nonzero |
230d793d RS |
284 | bits of a register, specifically which bits are known to be zero. |
285 | ||
286 | If an entry is zero, it means that we don't know anything special. */ | |
287 | ||
55310dad | 288 | static unsigned HOST_WIDE_INT *reg_nonzero_bits; |
230d793d | 289 | |
951553af | 290 | /* Mode used to compute significance in reg_nonzero_bits. It is the largest |
5f4f0e22 | 291 | integer mode that can fit in HOST_BITS_PER_WIDE_INT. */ |
230d793d | 292 | |
951553af | 293 | static enum machine_mode nonzero_bits_mode; |
230d793d | 294 | |
d0ab8cd3 RK |
295 | /* Nonzero if we know that a register has some leading bits that are always |
296 | equal to the sign bit. */ | |
297 | ||
298 | static char *reg_sign_bit_copies; | |
299 | ||
951553af | 300 | /* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used. |
1a26b032 RK |
301 | It is zero while computing them and after combine has completed. This |
302 | former test prevents propagating values based on previously set values, | |
303 | which can be incorrect if a variable is modified in a loop. */ | |
230d793d | 304 | |
951553af | 305 | static int nonzero_sign_valid; |
55310dad RK |
306 | |
307 | /* These arrays are maintained in parallel with reg_last_set_value | |
308 | and are used to store the mode in which the register was last set, | |
309 | the bits that were known to be zero when it was last set, and the | |
310 | number of sign bits copies it was known to have when it was last set. */ | |
311 | ||
312 | static enum machine_mode *reg_last_set_mode; | |
313 | static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits; | |
314 | static char *reg_last_set_sign_bit_copies; | |
230d793d RS |
315 | \f |
316 | /* Record one modification to rtl structure | |
317 | to be undone by storing old_contents into *where. | |
318 | is_int is 1 if the contents are an int. */ | |
319 | ||
320 | struct undo | |
321 | { | |
230d793d | 322 | int is_int; |
f5393ab9 RS |
323 | union {rtx r; int i;} old_contents; |
324 | union {rtx *r; int *i;} where; | |
230d793d RS |
325 | }; |
326 | ||
327 | /* Record a bunch of changes to be undone, up to MAX_UNDO of them. | |
328 | num_undo says how many are currently recorded. | |
329 | ||
330 | storage is nonzero if we must undo the allocation of new storage. | |
331 | The value of storage is what to pass to obfree. | |
332 | ||
333 | other_insn is nonzero if we have modified some other insn in the process | |
334 | of working on subst_insn. It must be verified too. */ | |
335 | ||
336 | #define MAX_UNDO 50 | |
337 | ||
338 | struct undobuf | |
339 | { | |
340 | int num_undo; | |
341 | char *storage; | |
342 | struct undo undo[MAX_UNDO]; | |
343 | rtx other_insn; | |
344 | }; | |
345 | ||
346 | static struct undobuf undobuf; | |
347 | ||
cc876596 | 348 | /* Substitute NEWVAL, an rtx expression, into INTO, a place in some |
230d793d | 349 | insn. The substitution can be undone by undo_all. If INTO is already |
cc876596 RK |
350 | set to NEWVAL, do not record this change. Because computing NEWVAL might |
351 | also call SUBST, we have to compute it before we put anything into | |
352 | the undo table. */ | |
230d793d RS |
353 | |
354 | #define SUBST(INTO, NEWVAL) \ | |
cc876596 RK |
355 | do { rtx _new = (NEWVAL); \ |
356 | if (undobuf.num_undo < MAX_UNDO) \ | |
230d793d | 357 | { \ |
230d793d | 358 | undobuf.undo[undobuf.num_undo].is_int = 0; \ |
f5393ab9 RS |
359 | undobuf.undo[undobuf.num_undo].where.r = &INTO; \ |
360 | undobuf.undo[undobuf.num_undo].old_contents.r = INTO; \ | |
cc876596 | 361 | INTO = _new; \ |
f5393ab9 | 362 | if (undobuf.undo[undobuf.num_undo].old_contents.r != INTO) \ |
230d793d RS |
363 | undobuf.num_undo++; \ |
364 | } \ | |
365 | } while (0) | |
366 | ||
367 | /* Similar to SUBST, but NEWVAL is an int. INTO will normally be an XINT | |
368 | expression. | |
369 | Note that substitution for the value of a CONST_INT is not safe. */ | |
370 | ||
371 | #define SUBST_INT(INTO, NEWVAL) \ | |
372 | do { if (undobuf.num_undo < MAX_UNDO) \ | |
373 | { \ | |
7c046e4e RK |
374 | undobuf.undo[undobuf.num_undo].is_int = 1; \ |
375 | undobuf.undo[undobuf.num_undo].where.i = (int *) &INTO; \ | |
376 | undobuf.undo[undobuf.num_undo].old_contents.i = INTO; \ | |
230d793d | 377 | INTO = NEWVAL; \ |
7c046e4e | 378 | if (undobuf.undo[undobuf.num_undo].old_contents.i != INTO) \ |
230d793d RS |
379 | undobuf.num_undo++; \ |
380 | } \ | |
381 | } while (0) | |
382 | ||
383 | /* Number of times the pseudo being substituted for | |
384 | was found and replaced. */ | |
385 | ||
386 | static int n_occurrences; | |
387 | ||
c5ad722c RK |
388 | static void init_reg_last_arrays PROTO((void)); |
389 | static void setup_incoming_promotions PROTO((void)); | |
fe2db4fb RK |
390 | static void set_nonzero_bits_and_sign_copies PROTO((rtx, rtx)); |
391 | static int can_combine_p PROTO((rtx, rtx, rtx, rtx, rtx *, rtx *)); | |
392 | static int combinable_i3pat PROTO((rtx, rtx *, rtx, rtx, int, rtx *)); | |
393 | static rtx try_combine PROTO((rtx, rtx, rtx)); | |
394 | static void undo_all PROTO((void)); | |
395 | static rtx *find_split_point PROTO((rtx *, rtx)); | |
396 | static rtx subst PROTO((rtx, rtx, rtx, int, int)); | |
8079805d RK |
397 | static rtx simplify_rtx PROTO((rtx, enum machine_mode, int, int)); |
398 | static rtx simplify_if_then_else PROTO((rtx)); | |
399 | static rtx simplify_set PROTO((rtx)); | |
400 | static rtx simplify_logical PROTO((rtx, int)); | |
fe2db4fb RK |
401 | static rtx expand_compound_operation PROTO((rtx)); |
402 | static rtx expand_field_assignment PROTO((rtx)); | |
403 | static rtx make_extraction PROTO((enum machine_mode, rtx, int, rtx, int, | |
404 | int, int, int)); | |
71923da7 | 405 | static rtx extract_left_shift PROTO((rtx, int)); |
fe2db4fb RK |
406 | static rtx make_compound_operation PROTO((rtx, enum rtx_code)); |
407 | static int get_pos_from_mask PROTO((unsigned HOST_WIDE_INT, int *)); | |
6139ff20 | 408 | static rtx force_to_mode PROTO((rtx, enum machine_mode, |
e3d616e3 | 409 | unsigned HOST_WIDE_INT, rtx, int)); |
abe6e52f | 410 | static rtx if_then_else_cond PROTO((rtx, rtx *, rtx *)); |
fe2db4fb | 411 | static rtx known_cond PROTO((rtx, enum rtx_code, rtx, rtx)); |
e11fa86f | 412 | static int rtx_equal_for_field_assignment_p PROTO((rtx, rtx)); |
fe2db4fb RK |
413 | static rtx make_field_assignment PROTO((rtx)); |
414 | static rtx apply_distributive_law PROTO((rtx)); | |
415 | static rtx simplify_and_const_int PROTO((rtx, enum machine_mode, rtx, | |
416 | unsigned HOST_WIDE_INT)); | |
417 | static unsigned HOST_WIDE_INT nonzero_bits PROTO((rtx, enum machine_mode)); | |
418 | static int num_sign_bit_copies PROTO((rtx, enum machine_mode)); | |
419 | static int merge_outer_ops PROTO((enum rtx_code *, HOST_WIDE_INT *, | |
420 | enum rtx_code, HOST_WIDE_INT, | |
421 | enum machine_mode, int *)); | |
422 | static rtx simplify_shift_const PROTO((rtx, enum rtx_code, enum machine_mode, | |
423 | rtx, int)); | |
a29ca9db | 424 | static int recog_for_combine PROTO((rtx *, rtx, rtx *, int *)); |
fe2db4fb | 425 | static rtx gen_lowpart_for_combine PROTO((enum machine_mode, rtx)); |
d18225c4 | 426 | static rtx gen_rtx_combine PVPROTO((enum rtx_code code, enum machine_mode mode, |
4f90e4a0 | 427 | ...)); |
fe2db4fb RK |
428 | static rtx gen_binary PROTO((enum rtx_code, enum machine_mode, |
429 | rtx, rtx)); | |
0c1c8ea6 RK |
430 | static rtx gen_unary PROTO((enum rtx_code, enum machine_mode, |
431 | enum machine_mode, rtx)); | |
fe2db4fb RK |
432 | static enum rtx_code simplify_comparison PROTO((enum rtx_code, rtx *, rtx *)); |
433 | static int reversible_comparison_p PROTO((rtx)); | |
434 | static void update_table_tick PROTO((rtx)); | |
435 | static void record_value_for_reg PROTO((rtx, rtx, rtx)); | |
436 | static void record_dead_and_set_regs_1 PROTO((rtx, rtx)); | |
437 | static void record_dead_and_set_regs PROTO((rtx)); | |
438 | static int get_last_value_validate PROTO((rtx *, int, int)); | |
439 | static rtx get_last_value PROTO((rtx)); | |
440 | static int use_crosses_set_p PROTO((rtx, int)); | |
441 | static void reg_dead_at_p_1 PROTO((rtx, rtx)); | |
442 | static int reg_dead_at_p PROTO((rtx, rtx)); | |
443 | static void move_deaths PROTO((rtx, int, rtx, rtx *)); | |
444 | static int reg_bitfield_target_p PROTO((rtx, rtx)); | |
445 | static void distribute_notes PROTO((rtx, rtx, rtx, rtx, rtx, rtx)); | |
446 | static void distribute_links PROTO((rtx)); | |
6e25d159 | 447 | static void mark_used_regs_combine PROTO((rtx)); |
1427d6d2 | 448 | static int insn_cuid PROTO((rtx)); |
230d793d RS |
449 | \f |
450 | /* Main entry point for combiner. F is the first insn of the function. | |
451 | NREGS is the first unused pseudo-reg number. */ | |
452 | ||
453 | void | |
454 | combine_instructions (f, nregs) | |
455 | rtx f; | |
456 | int nregs; | |
457 | { | |
458 | register rtx insn, next, prev; | |
459 | register int i; | |
460 | register rtx links, nextlinks; | |
461 | ||
462 | combine_attempts = 0; | |
463 | combine_merges = 0; | |
464 | combine_extras = 0; | |
465 | combine_successes = 0; | |
bef9925b | 466 | undobuf.num_undo = previous_num_undos = 0; |
230d793d RS |
467 | |
468 | combine_max_regno = nregs; | |
469 | ||
ef026f91 RS |
470 | reg_nonzero_bits |
471 | = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT)); | |
472 | reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char)); | |
473 | ||
4c9a05bc | 474 | bzero ((char *) reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT)); |
ef026f91 RS |
475 | bzero (reg_sign_bit_copies, nregs * sizeof (char)); |
476 | ||
230d793d RS |
477 | reg_last_death = (rtx *) alloca (nregs * sizeof (rtx)); |
478 | reg_last_set = (rtx *) alloca (nregs * sizeof (rtx)); | |
479 | reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx)); | |
568356af RK |
480 | reg_last_set_table_tick = (int *) alloca (nregs * sizeof (int)); |
481 | reg_last_set_label = (int *) alloca (nregs * sizeof (int)); | |
5f4f0e22 | 482 | reg_last_set_invalid = (char *) alloca (nregs * sizeof (char)); |
55310dad RK |
483 | reg_last_set_mode |
484 | = (enum machine_mode *) alloca (nregs * sizeof (enum machine_mode)); | |
485 | reg_last_set_nonzero_bits | |
486 | = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT)); | |
487 | reg_last_set_sign_bit_copies | |
488 | = (char *) alloca (nregs * sizeof (char)); | |
489 | ||
ef026f91 | 490 | init_reg_last_arrays (); |
230d793d RS |
491 | |
492 | init_recog_no_volatile (); | |
493 | ||
494 | /* Compute maximum uid value so uid_cuid can be allocated. */ | |
495 | ||
496 | for (insn = f, i = 0; insn; insn = NEXT_INSN (insn)) | |
497 | if (INSN_UID (insn) > i) | |
498 | i = INSN_UID (insn); | |
499 | ||
500 | uid_cuid = (int *) alloca ((i + 1) * sizeof (int)); | |
4255220d | 501 | max_uid_cuid = i; |
230d793d | 502 | |
951553af | 503 | nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0); |
230d793d | 504 | |
951553af | 505 | /* Don't use reg_nonzero_bits when computing it. This can cause problems |
230d793d RS |
506 | when, for example, we have j <<= 1 in a loop. */ |
507 | ||
951553af | 508 | nonzero_sign_valid = 0; |
230d793d RS |
509 | |
510 | /* Compute the mapping from uids to cuids. | |
511 | Cuids are numbers assigned to insns, like uids, | |
512 | except that cuids increase monotonically through the code. | |
513 | ||
514 | Scan all SETs and see if we can deduce anything about what | |
951553af | 515 | bits are known to be zero for some registers and how many copies |
d79f08e0 RK |
516 | of the sign bit are known to exist for those registers. |
517 | ||
518 | Also set any known values so that we can use it while searching | |
519 | for what bits are known to be set. */ | |
520 | ||
521 | label_tick = 1; | |
230d793d | 522 | |
bcd49eb7 JW |
523 | /* We need to initialize it here, because record_dead_and_set_regs may call |
524 | get_last_value. */ | |
525 | subst_prev_insn = NULL_RTX; | |
526 | ||
7988fd36 RK |
527 | setup_incoming_promotions (); |
528 | ||
230d793d RS |
529 | for (insn = f, i = 0; insn; insn = NEXT_INSN (insn)) |
530 | { | |
4255220d | 531 | uid_cuid[INSN_UID (insn)] = ++i; |
d79f08e0 RK |
532 | subst_low_cuid = i; |
533 | subst_insn = insn; | |
534 | ||
230d793d | 535 | if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') |
d79f08e0 RK |
536 | { |
537 | note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies); | |
538 | record_dead_and_set_regs (insn); | |
2dab894a RK |
539 | |
540 | #ifdef AUTO_INC_DEC | |
541 | for (links = REG_NOTES (insn); links; links = XEXP (links, 1)) | |
542 | if (REG_NOTE_KIND (links) == REG_INC) | |
543 | set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX); | |
544 | #endif | |
d79f08e0 RK |
545 | } |
546 | ||
547 | if (GET_CODE (insn) == CODE_LABEL) | |
548 | label_tick++; | |
230d793d RS |
549 | } |
550 | ||
951553af | 551 | nonzero_sign_valid = 1; |
230d793d RS |
552 | |
553 | /* Now scan all the insns in forward order. */ | |
554 | ||
0d4d42c3 | 555 | this_basic_block = -1; |
230d793d RS |
556 | label_tick = 1; |
557 | last_call_cuid = 0; | |
558 | mem_last_set = 0; | |
ef026f91 | 559 | init_reg_last_arrays (); |
7988fd36 RK |
560 | setup_incoming_promotions (); |
561 | ||
230d793d RS |
562 | for (insn = f; insn; insn = next ? next : NEXT_INSN (insn)) |
563 | { | |
564 | next = 0; | |
565 | ||
0d4d42c3 | 566 | /* If INSN starts a new basic block, update our basic block number. */ |
f085c9cd | 567 | if (this_basic_block + 1 < n_basic_blocks |
0d4d42c3 RK |
568 | && basic_block_head[this_basic_block + 1] == insn) |
569 | this_basic_block++; | |
570 | ||
230d793d RS |
571 | if (GET_CODE (insn) == CODE_LABEL) |
572 | label_tick++; | |
573 | ||
0d4d42c3 | 574 | else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') |
230d793d RS |
575 | { |
576 | /* Try this insn with each insn it links back to. */ | |
577 | ||
578 | for (links = LOG_LINKS (insn); links; links = XEXP (links, 1)) | |
5f4f0e22 | 579 | if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0) |
230d793d RS |
580 | goto retry; |
581 | ||
582 | /* Try each sequence of three linked insns ending with this one. */ | |
583 | ||
584 | for (links = LOG_LINKS (insn); links; links = XEXP (links, 1)) | |
585 | for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks; | |
586 | nextlinks = XEXP (nextlinks, 1)) | |
587 | if ((next = try_combine (insn, XEXP (links, 0), | |
588 | XEXP (nextlinks, 0))) != 0) | |
589 | goto retry; | |
590 | ||
591 | #ifdef HAVE_cc0 | |
592 | /* Try to combine a jump insn that uses CC0 | |
593 | with a preceding insn that sets CC0, and maybe with its | |
594 | logical predecessor as well. | |
595 | This is how we make decrement-and-branch insns. | |
596 | We need this special code because data flow connections | |
597 | via CC0 do not get entered in LOG_LINKS. */ | |
598 | ||
599 | if (GET_CODE (insn) == JUMP_INSN | |
600 | && (prev = prev_nonnote_insn (insn)) != 0 | |
601 | && GET_CODE (prev) == INSN | |
602 | && sets_cc0_p (PATTERN (prev))) | |
603 | { | |
5f4f0e22 | 604 | if ((next = try_combine (insn, prev, NULL_RTX)) != 0) |
230d793d RS |
605 | goto retry; |
606 | ||
607 | for (nextlinks = LOG_LINKS (prev); nextlinks; | |
608 | nextlinks = XEXP (nextlinks, 1)) | |
609 | if ((next = try_combine (insn, prev, | |
610 | XEXP (nextlinks, 0))) != 0) | |
611 | goto retry; | |
612 | } | |
613 | ||
614 | /* Do the same for an insn that explicitly references CC0. */ | |
615 | if (GET_CODE (insn) == INSN | |
616 | && (prev = prev_nonnote_insn (insn)) != 0 | |
617 | && GET_CODE (prev) == INSN | |
618 | && sets_cc0_p (PATTERN (prev)) | |
619 | && GET_CODE (PATTERN (insn)) == SET | |
620 | && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn)))) | |
621 | { | |
5f4f0e22 | 622 | if ((next = try_combine (insn, prev, NULL_RTX)) != 0) |
230d793d RS |
623 | goto retry; |
624 | ||
625 | for (nextlinks = LOG_LINKS (prev); nextlinks; | |
626 | nextlinks = XEXP (nextlinks, 1)) | |
627 | if ((next = try_combine (insn, prev, | |
628 | XEXP (nextlinks, 0))) != 0) | |
629 | goto retry; | |
630 | } | |
631 | ||
632 | /* Finally, see if any of the insns that this insn links to | |
633 | explicitly references CC0. If so, try this insn, that insn, | |
5089e22e | 634 | and its predecessor if it sets CC0. */ |
230d793d RS |
635 | for (links = LOG_LINKS (insn); links; links = XEXP (links, 1)) |
636 | if (GET_CODE (XEXP (links, 0)) == INSN | |
637 | && GET_CODE (PATTERN (XEXP (links, 0))) == SET | |
638 | && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0)))) | |
639 | && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0 | |
640 | && GET_CODE (prev) == INSN | |
641 | && sets_cc0_p (PATTERN (prev)) | |
642 | && (next = try_combine (insn, XEXP (links, 0), prev)) != 0) | |
643 | goto retry; | |
644 | #endif | |
645 | ||
646 | /* Try combining an insn with two different insns whose results it | |
647 | uses. */ | |
648 | for (links = LOG_LINKS (insn); links; links = XEXP (links, 1)) | |
649 | for (nextlinks = XEXP (links, 1); nextlinks; | |
650 | nextlinks = XEXP (nextlinks, 1)) | |
651 | if ((next = try_combine (insn, XEXP (links, 0), | |
652 | XEXP (nextlinks, 0))) != 0) | |
653 | goto retry; | |
654 | ||
655 | if (GET_CODE (insn) != NOTE) | |
656 | record_dead_and_set_regs (insn); | |
657 | ||
658 | retry: | |
659 | ; | |
660 | } | |
661 | } | |
662 | ||
663 | total_attempts += combine_attempts; | |
664 | total_merges += combine_merges; | |
665 | total_extras += combine_extras; | |
666 | total_successes += combine_successes; | |
1a26b032 | 667 | |
951553af | 668 | nonzero_sign_valid = 0; |
230d793d | 669 | } |
ef026f91 RS |
670 | |
671 | /* Wipe the reg_last_xxx arrays in preparation for another pass. */ | |
672 | ||
673 | static void | |
674 | init_reg_last_arrays () | |
675 | { | |
676 | int nregs = combine_max_regno; | |
677 | ||
4c9a05bc RK |
678 | bzero ((char *) reg_last_death, nregs * sizeof (rtx)); |
679 | bzero ((char *) reg_last_set, nregs * sizeof (rtx)); | |
680 | bzero ((char *) reg_last_set_value, nregs * sizeof (rtx)); | |
681 | bzero ((char *) reg_last_set_table_tick, nregs * sizeof (int)); | |
682 | bzero ((char *) reg_last_set_label, nregs * sizeof (int)); | |
ef026f91 | 683 | bzero (reg_last_set_invalid, nregs * sizeof (char)); |
4c9a05bc RK |
684 | bzero ((char *) reg_last_set_mode, nregs * sizeof (enum machine_mode)); |
685 | bzero ((char *) reg_last_set_nonzero_bits, nregs * sizeof (HOST_WIDE_INT)); | |
ef026f91 RS |
686 | bzero (reg_last_set_sign_bit_copies, nregs * sizeof (char)); |
687 | } | |
230d793d | 688 | \f |
7988fd36 RK |
689 | /* Set up any promoted values for incoming argument registers. */ |
690 | ||
ee791cc3 | 691 | static void |
7988fd36 RK |
692 | setup_incoming_promotions () |
693 | { | |
694 | #ifdef PROMOTE_FUNCTION_ARGS | |
695 | int regno; | |
696 | rtx reg; | |
697 | enum machine_mode mode; | |
698 | int unsignedp; | |
699 | rtx first = get_insns (); | |
700 | ||
701 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
702 | if (FUNCTION_ARG_REGNO_P (regno) | |
703 | && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0) | |
704 | record_value_for_reg (reg, first, | |
705 | gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, | |
500c518b RK |
706 | GET_MODE (reg), |
707 | gen_rtx (CLOBBER, mode, const0_rtx))); | |
7988fd36 RK |
708 | #endif |
709 | } | |
710 | \f | |
91102d5a RK |
711 | /* Called via note_stores. If X is a pseudo that is narrower than |
712 | HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero. | |
230d793d RS |
713 | |
714 | If we are setting only a portion of X and we can't figure out what | |
715 | portion, assume all bits will be used since we don't know what will | |
d0ab8cd3 RK |
716 | be happening. |
717 | ||
718 | Similarly, set how many bits of X are known to be copies of the sign bit | |
719 | at all locations in the function. This is the smallest number implied | |
720 | by any set of X. */ | |
230d793d RS |
721 | |
722 | static void | |
951553af | 723 | set_nonzero_bits_and_sign_copies (x, set) |
230d793d RS |
724 | rtx x; |
725 | rtx set; | |
726 | { | |
d0ab8cd3 RK |
727 | int num; |
728 | ||
230d793d RS |
729 | if (GET_CODE (x) == REG |
730 | && REGNO (x) >= FIRST_PSEUDO_REGISTER | |
e8095e80 RK |
731 | /* If this register is undefined at the start of the file, we can't |
732 | say what its contents were. */ | |
733 | && ! (basic_block_live_at_start[0][REGNO (x) / REGSET_ELT_BITS] | |
734 | & ((REGSET_ELT_TYPE) 1 << (REGNO (x) % REGSET_ELT_BITS))) | |
5f4f0e22 | 735 | && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT) |
230d793d | 736 | { |
2dab894a | 737 | if (set == 0 || GET_CODE (set) == CLOBBER) |
e8095e80 RK |
738 | { |
739 | reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x)); | |
88306d12 | 740 | reg_sign_bit_copies[REGNO (x)] = 1; |
e8095e80 RK |
741 | return; |
742 | } | |
230d793d RS |
743 | |
744 | /* If this is a complex assignment, see if we can convert it into a | |
5089e22e | 745 | simple assignment. */ |
230d793d | 746 | set = expand_field_assignment (set); |
d79f08e0 RK |
747 | |
748 | /* If this is a simple assignment, or we have a paradoxical SUBREG, | |
749 | set what we know about X. */ | |
750 | ||
751 | if (SET_DEST (set) == x | |
752 | || (GET_CODE (SET_DEST (set)) == SUBREG | |
705c7b3b JW |
753 | && (GET_MODE_SIZE (GET_MODE (SET_DEST (set))) |
754 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set))))) | |
d79f08e0 | 755 | && SUBREG_REG (SET_DEST (set)) == x)) |
d0ab8cd3 | 756 | { |
9afa3d54 RK |
757 | rtx src = SET_SRC (set); |
758 | ||
759 | #ifdef SHORT_IMMEDIATES_SIGN_EXTEND | |
760 | /* If X is narrower than a word and SRC is a non-negative | |
761 | constant that would appear negative in the mode of X, | |
762 | sign-extend it for use in reg_nonzero_bits because some | |
763 | machines (maybe most) will actually do the sign-extension | |
764 | and this is the conservative approach. | |
765 | ||
766 | ??? For 2.5, try to tighten up the MD files in this regard | |
767 | instead of this kludge. */ | |
768 | ||
769 | if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD | |
770 | && GET_CODE (src) == CONST_INT | |
771 | && INTVAL (src) > 0 | |
772 | && 0 != (INTVAL (src) | |
773 | & ((HOST_WIDE_INT) 1 | |
9e69be8c | 774 | << (GET_MODE_BITSIZE (GET_MODE (x)) - 1)))) |
9afa3d54 RK |
775 | src = GEN_INT (INTVAL (src) |
776 | | ((HOST_WIDE_INT) (-1) | |
777 | << GET_MODE_BITSIZE (GET_MODE (x)))); | |
778 | #endif | |
779 | ||
951553af | 780 | reg_nonzero_bits[REGNO (x)] |
9afa3d54 | 781 | |= nonzero_bits (src, nonzero_bits_mode); |
d0ab8cd3 RK |
782 | num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x)); |
783 | if (reg_sign_bit_copies[REGNO (x)] == 0 | |
784 | || reg_sign_bit_copies[REGNO (x)] > num) | |
785 | reg_sign_bit_copies[REGNO (x)] = num; | |
786 | } | |
230d793d | 787 | else |
d0ab8cd3 | 788 | { |
951553af | 789 | reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x)); |
88306d12 | 790 | reg_sign_bit_copies[REGNO (x)] = 1; |
d0ab8cd3 | 791 | } |
230d793d RS |
792 | } |
793 | } | |
794 | \f | |
795 | /* See if INSN can be combined into I3. PRED and SUCC are optionally | |
796 | insns that were previously combined into I3 or that will be combined | |
797 | into the merger of INSN and I3. | |
798 | ||
799 | Return 0 if the combination is not allowed for any reason. | |
800 | ||
801 | If the combination is allowed, *PDEST will be set to the single | |
802 | destination of INSN and *PSRC to the single source, and this function | |
803 | will return 1. */ | |
804 | ||
805 | static int | |
806 | can_combine_p (insn, i3, pred, succ, pdest, psrc) | |
807 | rtx insn; | |
808 | rtx i3; | |
809 | rtx pred, succ; | |
810 | rtx *pdest, *psrc; | |
811 | { | |
812 | int i; | |
813 | rtx set = 0, src, dest; | |
814 | rtx p, link; | |
815 | int all_adjacent = (succ ? (next_active_insn (insn) == succ | |
816 | && next_active_insn (succ) == i3) | |
817 | : next_active_insn (insn) == i3); | |
818 | ||
819 | /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0. | |
820 | or a PARALLEL consisting of such a SET and CLOBBERs. | |
821 | ||
822 | If INSN has CLOBBER parallel parts, ignore them for our processing. | |
823 | By definition, these happen during the execution of the insn. When it | |
824 | is merged with another insn, all bets are off. If they are, in fact, | |
825 | needed and aren't also supplied in I3, they may be added by | |
826 | recog_for_combine. Otherwise, it won't match. | |
827 | ||
828 | We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED | |
829 | note. | |
830 | ||
831 | Get the source and destination of INSN. If more than one, can't | |
832 | combine. */ | |
833 | ||
834 | if (GET_CODE (PATTERN (insn)) == SET) | |
835 | set = PATTERN (insn); | |
836 | else if (GET_CODE (PATTERN (insn)) == PARALLEL | |
837 | && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET) | |
838 | { | |
839 | for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++) | |
840 | { | |
841 | rtx elt = XVECEXP (PATTERN (insn), 0, i); | |
842 | ||
843 | switch (GET_CODE (elt)) | |
844 | { | |
845 | /* We can ignore CLOBBERs. */ | |
846 | case CLOBBER: | |
847 | break; | |
848 | ||
849 | case SET: | |
850 | /* Ignore SETs whose result isn't used but not those that | |
851 | have side-effects. */ | |
852 | if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt)) | |
853 | && ! side_effects_p (elt)) | |
854 | break; | |
855 | ||
856 | /* If we have already found a SET, this is a second one and | |
857 | so we cannot combine with this insn. */ | |
858 | if (set) | |
859 | return 0; | |
860 | ||
861 | set = elt; | |
862 | break; | |
863 | ||
864 | default: | |
865 | /* Anything else means we can't combine. */ | |
866 | return 0; | |
867 | } | |
868 | } | |
869 | ||
870 | if (set == 0 | |
871 | /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs, | |
872 | so don't do anything with it. */ | |
873 | || GET_CODE (SET_SRC (set)) == ASM_OPERANDS) | |
874 | return 0; | |
875 | } | |
876 | else | |
877 | return 0; | |
878 | ||
879 | if (set == 0) | |
880 | return 0; | |
881 | ||
882 | set = expand_field_assignment (set); | |
883 | src = SET_SRC (set), dest = SET_DEST (set); | |
884 | ||
885 | /* Don't eliminate a store in the stack pointer. */ | |
886 | if (dest == stack_pointer_rtx | |
230d793d RS |
887 | /* If we couldn't eliminate a field assignment, we can't combine. */ |
888 | || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART | |
889 | /* Don't combine with an insn that sets a register to itself if it has | |
890 | a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */ | |
5f4f0e22 | 891 | || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX)) |
230d793d RS |
892 | /* Can't merge a function call. */ |
893 | || GET_CODE (src) == CALL | |
cd5e8f1f | 894 | /* Don't eliminate a function call argument. */ |
4dca5ec5 RK |
895 | || (GET_CODE (i3) == CALL_INSN |
896 | && (find_reg_fusage (i3, USE, dest) | |
897 | || (GET_CODE (dest) == REG | |
898 | && REGNO (dest) < FIRST_PSEUDO_REGISTER | |
899 | && global_regs[REGNO (dest)]))) | |
230d793d RS |
900 | /* Don't substitute into an incremented register. */ |
901 | || FIND_REG_INC_NOTE (i3, dest) | |
902 | || (succ && FIND_REG_INC_NOTE (succ, dest)) | |
903 | /* Don't combine the end of a libcall into anything. */ | |
5f4f0e22 | 904 | || find_reg_note (insn, REG_RETVAL, NULL_RTX) |
230d793d RS |
905 | /* Make sure that DEST is not used after SUCC but before I3. */ |
906 | || (succ && ! all_adjacent | |
907 | && reg_used_between_p (dest, succ, i3)) | |
908 | /* Make sure that the value that is to be substituted for the register | |
909 | does not use any registers whose values alter in between. However, | |
910 | If the insns are adjacent, a use can't cross a set even though we | |
911 | think it might (this can happen for a sequence of insns each setting | |
912 | the same destination; reg_last_set of that register might point to | |
d81481d3 RK |
913 | a NOTE). If INSN has a REG_EQUIV note, the register is always |
914 | equivalent to the memory so the substitution is valid even if there | |
915 | are intervening stores. Also, don't move a volatile asm or | |
916 | UNSPEC_VOLATILE across any other insns. */ | |
230d793d | 917 | || (! all_adjacent |
d81481d3 RK |
918 | && (((GET_CODE (src) != MEM |
919 | || ! find_reg_note (insn, REG_EQUIV, src)) | |
920 | && use_crosses_set_p (src, INSN_CUID (insn))) | |
a66a10c7 RS |
921 | || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src)) |
922 | || GET_CODE (src) == UNSPEC_VOLATILE)) | |
230d793d RS |
923 | /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get |
924 | better register allocation by not doing the combine. */ | |
925 | || find_reg_note (i3, REG_NO_CONFLICT, dest) | |
926 | || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest)) | |
927 | /* Don't combine across a CALL_INSN, because that would possibly | |
928 | change whether the life span of some REGs crosses calls or not, | |
929 | and it is a pain to update that information. | |
930 | Exception: if source is a constant, moving it later can't hurt. | |
931 | Accept that special case, because it helps -fforce-addr a lot. */ | |
932 | || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src))) | |
933 | return 0; | |
934 | ||
935 | /* DEST must either be a REG or CC0. */ | |
936 | if (GET_CODE (dest) == REG) | |
937 | { | |
938 | /* If register alignment is being enforced for multi-word items in all | |
939 | cases except for parameters, it is possible to have a register copy | |
940 | insn referencing a hard register that is not allowed to contain the | |
941 | mode being copied and which would not be valid as an operand of most | |
942 | insns. Eliminate this problem by not combining with such an insn. | |
943 | ||
944 | Also, on some machines we don't want to extend the life of a hard | |
945 | register. */ | |
946 | ||
947 | if (GET_CODE (src) == REG | |
948 | && ((REGNO (dest) < FIRST_PSEUDO_REGISTER | |
949 | && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest))) | |
c448a43e RK |
950 | /* Don't extend the life of a hard register unless it is |
951 | user variable (if we have few registers) or it can't | |
952 | fit into the desired register (meaning something special | |
953 | is going on). */ | |
230d793d | 954 | || (REGNO (src) < FIRST_PSEUDO_REGISTER |
c448a43e RK |
955 | && (! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)) |
956 | #ifdef SMALL_REGISTER_CLASSES | |
957 | || ! REG_USERVAR_P (src) | |
230d793d | 958 | #endif |
c448a43e | 959 | )))) |
230d793d RS |
960 | return 0; |
961 | } | |
962 | else if (GET_CODE (dest) != CC0) | |
963 | return 0; | |
964 | ||
5f96750d RS |
965 | /* Don't substitute for a register intended as a clobberable operand. |
966 | Similarly, don't substitute an expression containing a register that | |
967 | will be clobbered in I3. */ | |
230d793d RS |
968 | if (GET_CODE (PATTERN (i3)) == PARALLEL) |
969 | for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--) | |
970 | if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER | |
5f96750d RS |
971 | && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), |
972 | src) | |
973 | || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest))) | |
230d793d RS |
974 | return 0; |
975 | ||
976 | /* If INSN contains anything volatile, or is an `asm' (whether volatile | |
977 | or not), reject, unless nothing volatile comes between it and I3, | |
978 | with the exception of SUCC. */ | |
979 | ||
980 | if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src)) | |
981 | for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p)) | |
982 | if (GET_RTX_CLASS (GET_CODE (p)) == 'i' | |
983 | && p != succ && volatile_refs_p (PATTERN (p))) | |
984 | return 0; | |
985 | ||
4b2cb4a2 RS |
986 | /* If there are any volatile insns between INSN and I3, reject, because |
987 | they might affect machine state. */ | |
988 | ||
989 | for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p)) | |
990 | if (GET_RTX_CLASS (GET_CODE (p)) == 'i' | |
991 | && p != succ && volatile_insn_p (PATTERN (p))) | |
992 | return 0; | |
993 | ||
230d793d RS |
994 | /* If INSN or I2 contains an autoincrement or autodecrement, |
995 | make sure that register is not used between there and I3, | |
996 | and not already used in I3 either. | |
997 | Also insist that I3 not be a jump; if it were one | |
998 | and the incremented register were spilled, we would lose. */ | |
999 | ||
1000 | #ifdef AUTO_INC_DEC | |
1001 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) | |
1002 | if (REG_NOTE_KIND (link) == REG_INC | |
1003 | && (GET_CODE (i3) == JUMP_INSN | |
1004 | || reg_used_between_p (XEXP (link, 0), insn, i3) | |
1005 | || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3)))) | |
1006 | return 0; | |
1007 | #endif | |
1008 | ||
1009 | #ifdef HAVE_cc0 | |
1010 | /* Don't combine an insn that follows a CC0-setting insn. | |
1011 | An insn that uses CC0 must not be separated from the one that sets it. | |
1012 | We do, however, allow I2 to follow a CC0-setting insn if that insn | |
1013 | is passed as I1; in that case it will be deleted also. | |
1014 | We also allow combining in this case if all the insns are adjacent | |
1015 | because that would leave the two CC0 insns adjacent as well. | |
1016 | It would be more logical to test whether CC0 occurs inside I1 or I2, | |
1017 | but that would be much slower, and this ought to be equivalent. */ | |
1018 | ||
1019 | p = prev_nonnote_insn (insn); | |
1020 | if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p)) | |
1021 | && ! all_adjacent) | |
1022 | return 0; | |
1023 | #endif | |
1024 | ||
1025 | /* If we get here, we have passed all the tests and the combination is | |
1026 | to be allowed. */ | |
1027 | ||
1028 | *pdest = dest; | |
1029 | *psrc = src; | |
1030 | ||
1031 | return 1; | |
1032 | } | |
1033 | \f | |
1034 | /* LOC is the location within I3 that contains its pattern or the component | |
1035 | of a PARALLEL of the pattern. We validate that it is valid for combining. | |
1036 | ||
1037 | One problem is if I3 modifies its output, as opposed to replacing it | |
1038 | entirely, we can't allow the output to contain I2DEST or I1DEST as doing | |
1039 | so would produce an insn that is not equivalent to the original insns. | |
1040 | ||
1041 | Consider: | |
1042 | ||
1043 | (set (reg:DI 101) (reg:DI 100)) | |
1044 | (set (subreg:SI (reg:DI 101) 0) <foo>) | |
1045 | ||
1046 | This is NOT equivalent to: | |
1047 | ||
1048 | (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>) | |
1049 | (set (reg:DI 101) (reg:DI 100))]) | |
1050 | ||
1051 | Not only does this modify 100 (in which case it might still be valid | |
1052 | if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100. | |
1053 | ||
1054 | We can also run into a problem if I2 sets a register that I1 | |
1055 | uses and I1 gets directly substituted into I3 (not via I2). In that | |
1056 | case, we would be getting the wrong value of I2DEST into I3, so we | |
1057 | must reject the combination. This case occurs when I2 and I1 both | |
1058 | feed into I3, rather than when I1 feeds into I2, which feeds into I3. | |
1059 | If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source | |
1060 | of a SET must prevent combination from occurring. | |
1061 | ||
1062 | On machines where SMALL_REGISTER_CLASSES is defined, we don't combine | |
c448a43e RK |
1063 | if the destination of a SET is a hard register that isn't a user |
1064 | variable. | |
230d793d RS |
1065 | |
1066 | Before doing the above check, we first try to expand a field assignment | |
1067 | into a set of logical operations. | |
1068 | ||
1069 | If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which | |
1070 | we place a register that is both set and used within I3. If more than one | |
1071 | such register is detected, we fail. | |
1072 | ||
1073 | Return 1 if the combination is valid, zero otherwise. */ | |
1074 | ||
1075 | static int | |
1076 | combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed) | |
1077 | rtx i3; | |
1078 | rtx *loc; | |
1079 | rtx i2dest; | |
1080 | rtx i1dest; | |
1081 | int i1_not_in_src; | |
1082 | rtx *pi3dest_killed; | |
1083 | { | |
1084 | rtx x = *loc; | |
1085 | ||
1086 | if (GET_CODE (x) == SET) | |
1087 | { | |
1088 | rtx set = expand_field_assignment (x); | |
1089 | rtx dest = SET_DEST (set); | |
1090 | rtx src = SET_SRC (set); | |
1091 | rtx inner_dest = dest, inner_src = src; | |
1092 | ||
1093 | SUBST (*loc, set); | |
1094 | ||
1095 | while (GET_CODE (inner_dest) == STRICT_LOW_PART | |
1096 | || GET_CODE (inner_dest) == SUBREG | |
1097 | || GET_CODE (inner_dest) == ZERO_EXTRACT) | |
1098 | inner_dest = XEXP (inner_dest, 0); | |
1099 | ||
1100 | /* We probably don't need this any more now that LIMIT_RELOAD_CLASS | |
1101 | was added. */ | |
1102 | #if 0 | |
1103 | while (GET_CODE (inner_src) == STRICT_LOW_PART | |
1104 | || GET_CODE (inner_src) == SUBREG | |
1105 | || GET_CODE (inner_src) == ZERO_EXTRACT) | |
1106 | inner_src = XEXP (inner_src, 0); | |
1107 | ||
1108 | /* If it is better that two different modes keep two different pseudos, | |
1109 | avoid combining them. This avoids producing the following pattern | |
1110 | on a 386: | |
1111 | (set (subreg:SI (reg/v:QI 21) 0) | |
1112 | (lshiftrt:SI (reg/v:SI 20) | |
1113 | (const_int 24))) | |
1114 | If that were made, reload could not handle the pair of | |
1115 | reg 20/21, since it would try to get any GENERAL_REGS | |
1116 | but some of them don't handle QImode. */ | |
1117 | ||
1118 | if (rtx_equal_p (inner_src, i2dest) | |
1119 | && GET_CODE (inner_dest) == REG | |
1120 | && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest))) | |
1121 | return 0; | |
1122 | #endif | |
1123 | ||
1124 | /* Check for the case where I3 modifies its output, as | |
1125 | discussed above. */ | |
1126 | if ((inner_dest != dest | |
1127 | && (reg_overlap_mentioned_p (i2dest, inner_dest) | |
1128 | || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest)))) | |
3f508eca RK |
1129 | /* This is the same test done in can_combine_p except that we |
1130 | allow a hard register with SMALL_REGISTER_CLASSES if SRC is a | |
1131 | CALL operation. */ | |
230d793d | 1132 | || (GET_CODE (inner_dest) == REG |
dfbe1b2f | 1133 | && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER |
c448a43e RK |
1134 | && (! HARD_REGNO_MODE_OK (REGNO (inner_dest), |
1135 | GET_MODE (inner_dest)) | |
3f508eca | 1136 | #ifdef SMALL_REGISTER_CLASSES |
c448a43e | 1137 | || (GET_CODE (src) != CALL && ! REG_USERVAR_P (inner_dest)) |
230d793d | 1138 | #endif |
c448a43e | 1139 | )) |
230d793d RS |
1140 | || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src))) |
1141 | return 0; | |
1142 | ||
1143 | /* If DEST is used in I3, it is being killed in this insn, | |
36a9c2e9 JL |
1144 | so record that for later. |
1145 | Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the | |
1146 | STACK_POINTER_REGNUM, since these are always considered to be | |
1147 | live. Similarly for ARG_POINTER_REGNUM if it is fixed. */ | |
230d793d | 1148 | if (pi3dest_killed && GET_CODE (dest) == REG |
36a9c2e9 JL |
1149 | && reg_referenced_p (dest, PATTERN (i3)) |
1150 | && REGNO (dest) != FRAME_POINTER_REGNUM | |
6d7096b0 DE |
1151 | #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM |
1152 | && REGNO (dest) != HARD_FRAME_POINTER_REGNUM | |
1153 | #endif | |
36a9c2e9 JL |
1154 | #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM |
1155 | && (REGNO (dest) != ARG_POINTER_REGNUM | |
1156 | || ! fixed_regs [REGNO (dest)]) | |
1157 | #endif | |
1158 | && REGNO (dest) != STACK_POINTER_REGNUM) | |
230d793d RS |
1159 | { |
1160 | if (*pi3dest_killed) | |
1161 | return 0; | |
1162 | ||
1163 | *pi3dest_killed = dest; | |
1164 | } | |
1165 | } | |
1166 | ||
1167 | else if (GET_CODE (x) == PARALLEL) | |
1168 | { | |
1169 | int i; | |
1170 | ||
1171 | for (i = 0; i < XVECLEN (x, 0); i++) | |
1172 | if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest, | |
1173 | i1_not_in_src, pi3dest_killed)) | |
1174 | return 0; | |
1175 | } | |
1176 | ||
1177 | return 1; | |
1178 | } | |
1179 | \f | |
1180 | /* Try to combine the insns I1 and I2 into I3. | |
1181 | Here I1 and I2 appear earlier than I3. | |
1182 | I1 can be zero; then we combine just I2 into I3. | |
1183 | ||
1184 | It we are combining three insns and the resulting insn is not recognized, | |
1185 | try splitting it into two insns. If that happens, I2 and I3 are retained | |
1186 | and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2 | |
1187 | are pseudo-deleted. | |
1188 | ||
abe6e52f RK |
1189 | Return 0 if the combination does not work. Then nothing is changed. |
1190 | If we did the combination, return the insn at which combine should | |
1191 | resume scanning. */ | |
230d793d RS |
1192 | |
1193 | static rtx | |
1194 | try_combine (i3, i2, i1) | |
1195 | register rtx i3, i2, i1; | |
1196 | { | |
1197 | /* New patterns for I3 and I3, respectively. */ | |
1198 | rtx newpat, newi2pat = 0; | |
1199 | /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */ | |
1200 | int added_sets_1, added_sets_2; | |
1201 | /* Total number of SETs to put into I3. */ | |
1202 | int total_sets; | |
1203 | /* Nonzero is I2's body now appears in I3. */ | |
1204 | int i2_is_used; | |
1205 | /* INSN_CODEs for new I3, new I2, and user of condition code. */ | |
1206 | int insn_code_number, i2_code_number, other_code_number; | |
1207 | /* Contains I3 if the destination of I3 is used in its source, which means | |
1208 | that the old life of I3 is being killed. If that usage is placed into | |
1209 | I2 and not in I3, a REG_DEAD note must be made. */ | |
1210 | rtx i3dest_killed = 0; | |
1211 | /* SET_DEST and SET_SRC of I2 and I1. */ | |
1212 | rtx i2dest, i2src, i1dest = 0, i1src = 0; | |
1213 | /* PATTERN (I2), or a copy of it in certain cases. */ | |
1214 | rtx i2pat; | |
1215 | /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */ | |
c4e861e8 | 1216 | int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0; |
230d793d RS |
1217 | int i1_feeds_i3 = 0; |
1218 | /* Notes that must be added to REG_NOTES in I3 and I2. */ | |
1219 | rtx new_i3_notes, new_i2_notes; | |
176c9e6b JW |
1220 | /* Notes that we substituted I3 into I2 instead of the normal case. */ |
1221 | int i3_subst_into_i2 = 0; | |
df7d75de RK |
1222 | /* Notes that I1, I2 or I3 is a MULT operation. */ |
1223 | int have_mult = 0; | |
a29ca9db RK |
1224 | /* Number of clobbers of SCRATCH we had to add. */ |
1225 | int i3_scratches = 0, i2_scratches = 0, other_scratches = 0; | |
230d793d RS |
1226 | |
1227 | int maxreg; | |
1228 | rtx temp; | |
1229 | register rtx link; | |
1230 | int i; | |
1231 | ||
1232 | /* If any of I1, I2, and I3 isn't really an insn, we can't do anything. | |
1233 | This can occur when flow deletes an insn that it has merged into an | |
1234 | auto-increment address. We also can't do anything if I3 has a | |
1235 | REG_LIBCALL note since we don't want to disrupt the contiguity of a | |
1236 | libcall. */ | |
1237 | ||
1238 | if (GET_RTX_CLASS (GET_CODE (i3)) != 'i' | |
1239 | || GET_RTX_CLASS (GET_CODE (i2)) != 'i' | |
1240 | || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i') | |
5f4f0e22 | 1241 | || find_reg_note (i3, REG_LIBCALL, NULL_RTX)) |
230d793d RS |
1242 | return 0; |
1243 | ||
1244 | combine_attempts++; | |
1245 | ||
1246 | undobuf.num_undo = previous_num_undos = 0; | |
1247 | undobuf.other_insn = 0; | |
1248 | ||
1249 | /* Save the current high-water-mark so we can free storage if we didn't | |
1250 | accept this combination. */ | |
1251 | undobuf.storage = (char *) oballoc (0); | |
1252 | ||
6e25d159 RK |
1253 | /* Reset the hard register usage information. */ |
1254 | CLEAR_HARD_REG_SET (newpat_used_regs); | |
1255 | ||
230d793d RS |
1256 | /* If I1 and I2 both feed I3, they can be in any order. To simplify the |
1257 | code below, set I1 to be the earlier of the two insns. */ | |
1258 | if (i1 && INSN_CUID (i1) > INSN_CUID (i2)) | |
1259 | temp = i1, i1 = i2, i2 = temp; | |
1260 | ||
abe6e52f | 1261 | added_links_insn = 0; |
137e889e | 1262 | |
230d793d RS |
1263 | /* First check for one important special-case that the code below will |
1264 | not handle. Namely, the case where I1 is zero, I2 has multiple sets, | |
1265 | and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case, | |
1266 | we may be able to replace that destination with the destination of I3. | |
1267 | This occurs in the common code where we compute both a quotient and | |
1268 | remainder into a structure, in which case we want to do the computation | |
1269 | directly into the structure to avoid register-register copies. | |
1270 | ||
1271 | We make very conservative checks below and only try to handle the | |
1272 | most common cases of this. For example, we only handle the case | |
1273 | where I2 and I3 are adjacent to avoid making difficult register | |
1274 | usage tests. */ | |
1275 | ||
1276 | if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET | |
1277 | && GET_CODE (SET_SRC (PATTERN (i3))) == REG | |
1278 | && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER | |
1279 | #ifdef SMALL_REGISTER_CLASSES | |
1280 | && (GET_CODE (SET_DEST (PATTERN (i3))) != REG | |
c448a43e RK |
1281 | || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER |
1282 | || REG_USERVAR_P (SET_DEST (PATTERN (i3)))) | |
230d793d RS |
1283 | #endif |
1284 | && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3))) | |
1285 | && GET_CODE (PATTERN (i2)) == PARALLEL | |
1286 | && ! side_effects_p (SET_DEST (PATTERN (i3))) | |
5089e22e RS |
1287 | /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code |
1288 | below would need to check what is inside (and reg_overlap_mentioned_p | |
1289 | doesn't support those codes anyway). Don't allow those destinations; | |
1290 | the resulting insn isn't likely to be recognized anyway. */ | |
1291 | && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT | |
1292 | && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART | |
230d793d RS |
1293 | && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)), |
1294 | SET_DEST (PATTERN (i3))) | |
1295 | && next_real_insn (i2) == i3) | |
5089e22e RS |
1296 | { |
1297 | rtx p2 = PATTERN (i2); | |
1298 | ||
1299 | /* Make sure that the destination of I3, | |
1300 | which we are going to substitute into one output of I2, | |
1301 | is not used within another output of I2. We must avoid making this: | |
1302 | (parallel [(set (mem (reg 69)) ...) | |
1303 | (set (reg 69) ...)]) | |
1304 | which is not well-defined as to order of actions. | |
1305 | (Besides, reload can't handle output reloads for this.) | |
1306 | ||
1307 | The problem can also happen if the dest of I3 is a memory ref, | |
1308 | if another dest in I2 is an indirect memory ref. */ | |
1309 | for (i = 0; i < XVECLEN (p2, 0); i++) | |
7ca919b7 RK |
1310 | if ((GET_CODE (XVECEXP (p2, 0, i)) == SET |
1311 | || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER) | |
5089e22e RS |
1312 | && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)), |
1313 | SET_DEST (XVECEXP (p2, 0, i)))) | |
1314 | break; | |
230d793d | 1315 | |
5089e22e RS |
1316 | if (i == XVECLEN (p2, 0)) |
1317 | for (i = 0; i < XVECLEN (p2, 0); i++) | |
1318 | if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3))) | |
1319 | { | |
1320 | combine_merges++; | |
230d793d | 1321 | |
5089e22e RS |
1322 | subst_insn = i3; |
1323 | subst_low_cuid = INSN_CUID (i2); | |
230d793d | 1324 | |
c4e861e8 | 1325 | added_sets_2 = added_sets_1 = 0; |
5089e22e | 1326 | i2dest = SET_SRC (PATTERN (i3)); |
230d793d | 1327 | |
5089e22e RS |
1328 | /* Replace the dest in I2 with our dest and make the resulting |
1329 | insn the new pattern for I3. Then skip to where we | |
1330 | validate the pattern. Everything was set up above. */ | |
1331 | SUBST (SET_DEST (XVECEXP (p2, 0, i)), | |
1332 | SET_DEST (PATTERN (i3))); | |
1333 | ||
1334 | newpat = p2; | |
176c9e6b | 1335 | i3_subst_into_i2 = 1; |
5089e22e RS |
1336 | goto validate_replacement; |
1337 | } | |
1338 | } | |
230d793d RS |
1339 | |
1340 | #ifndef HAVE_cc0 | |
1341 | /* If we have no I1 and I2 looks like: | |
1342 | (parallel [(set (reg:CC X) (compare:CC OP (const_int 0))) | |
1343 | (set Y OP)]) | |
1344 | make up a dummy I1 that is | |
1345 | (set Y OP) | |
1346 | and change I2 to be | |
1347 | (set (reg:CC X) (compare:CC Y (const_int 0))) | |
1348 | ||
1349 | (We can ignore any trailing CLOBBERs.) | |
1350 | ||
1351 | This undoes a previous combination and allows us to match a branch-and- | |
1352 | decrement insn. */ | |
1353 | ||
1354 | if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL | |
1355 | && XVECLEN (PATTERN (i2), 0) >= 2 | |
1356 | && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET | |
1357 | && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0)))) | |
1358 | == MODE_CC) | |
1359 | && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE | |
1360 | && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx | |
1361 | && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET | |
1362 | && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG | |
1363 | && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0), | |
1364 | SET_SRC (XVECEXP (PATTERN (i2), 0, 1)))) | |
1365 | { | |
1366 | for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--) | |
1367 | if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER) | |
1368 | break; | |
1369 | ||
1370 | if (i == 1) | |
1371 | { | |
1372 | /* We make I1 with the same INSN_UID as I2. This gives it | |
1373 | the same INSN_CUID for value tracking. Our fake I1 will | |
1374 | never appear in the insn stream so giving it the same INSN_UID | |
1375 | as I2 will not cause a problem. */ | |
1376 | ||
0d9641d1 JW |
1377 | subst_prev_insn = i1 |
1378 | = gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2, | |
1379 | XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0); | |
230d793d RS |
1380 | |
1381 | SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0)); | |
1382 | SUBST (XEXP (SET_SRC (PATTERN (i2)), 0), | |
1383 | SET_DEST (PATTERN (i1))); | |
1384 | } | |
1385 | } | |
1386 | #endif | |
1387 | ||
1388 | /* Verify that I2 and I1 are valid for combining. */ | |
5f4f0e22 CH |
1389 | if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src) |
1390 | || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src))) | |
230d793d RS |
1391 | { |
1392 | undo_all (); | |
1393 | return 0; | |
1394 | } | |
1395 | ||
1396 | /* Record whether I2DEST is used in I2SRC and similarly for the other | |
1397 | cases. Knowing this will help in register status updating below. */ | |
1398 | i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src); | |
1399 | i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src); | |
1400 | i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src); | |
1401 | ||
916f14f1 | 1402 | /* See if I1 directly feeds into I3. It does if I1DEST is not used |
230d793d RS |
1403 | in I2SRC. */ |
1404 | i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src); | |
1405 | ||
1406 | /* Ensure that I3's pattern can be the destination of combines. */ | |
1407 | if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest, | |
1408 | i1 && i2dest_in_i1src && i1_feeds_i3, | |
1409 | &i3dest_killed)) | |
1410 | { | |
1411 | undo_all (); | |
1412 | return 0; | |
1413 | } | |
1414 | ||
df7d75de RK |
1415 | /* See if any of the insns is a MULT operation. Unless one is, we will |
1416 | reject a combination that is, since it must be slower. Be conservative | |
1417 | here. */ | |
1418 | if (GET_CODE (i2src) == MULT | |
1419 | || (i1 != 0 && GET_CODE (i1src) == MULT) | |
1420 | || (GET_CODE (PATTERN (i3)) == SET | |
1421 | && GET_CODE (SET_SRC (PATTERN (i3))) == MULT)) | |
1422 | have_mult = 1; | |
1423 | ||
230d793d RS |
1424 | /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd. |
1425 | We used to do this EXCEPT in one case: I3 has a post-inc in an | |
1426 | output operand. However, that exception can give rise to insns like | |
1427 | mov r3,(r3)+ | |
1428 | which is a famous insn on the PDP-11 where the value of r3 used as the | |
5089e22e | 1429 | source was model-dependent. Avoid this sort of thing. */ |
230d793d RS |
1430 | |
1431 | #if 0 | |
1432 | if (!(GET_CODE (PATTERN (i3)) == SET | |
1433 | && GET_CODE (SET_SRC (PATTERN (i3))) == REG | |
1434 | && GET_CODE (SET_DEST (PATTERN (i3))) == MEM | |
1435 | && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC | |
1436 | || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC))) | |
1437 | /* It's not the exception. */ | |
1438 | #endif | |
1439 | #ifdef AUTO_INC_DEC | |
1440 | for (link = REG_NOTES (i3); link; link = XEXP (link, 1)) | |
1441 | if (REG_NOTE_KIND (link) == REG_INC | |
1442 | && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2)) | |
1443 | || (i1 != 0 | |
1444 | && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1))))) | |
1445 | { | |
1446 | undo_all (); | |
1447 | return 0; | |
1448 | } | |
1449 | #endif | |
1450 | ||
1451 | /* See if the SETs in I1 or I2 need to be kept around in the merged | |
1452 | instruction: whenever the value set there is still needed past I3. | |
1453 | For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3. | |
1454 | ||
1455 | For the SET in I1, we have two cases: If I1 and I2 independently | |
1456 | feed into I3, the set in I1 needs to be kept around if I1DEST dies | |
1457 | or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set | |
1458 | in I1 needs to be kept around unless I1DEST dies or is set in either | |
1459 | I2 or I3. We can distinguish these cases by seeing if I2SRC mentions | |
1460 | I1DEST. If so, we know I1 feeds into I2. */ | |
1461 | ||
1462 | added_sets_2 = ! dead_or_set_p (i3, i2dest); | |
1463 | ||
1464 | added_sets_1 | |
1465 | = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest) | |
1466 | : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest))); | |
1467 | ||
1468 | /* If the set in I2 needs to be kept around, we must make a copy of | |
1469 | PATTERN (I2), so that when we substitute I1SRC for I1DEST in | |
5089e22e | 1470 | PATTERN (I2), we are only substituting for the original I1DEST, not into |
230d793d RS |
1471 | an already-substituted copy. This also prevents making self-referential |
1472 | rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to | |
1473 | I2DEST. */ | |
1474 | ||
1475 | i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL | |
1476 | ? gen_rtx (SET, VOIDmode, i2dest, i2src) | |
1477 | : PATTERN (i2)); | |
1478 | ||
1479 | if (added_sets_2) | |
1480 | i2pat = copy_rtx (i2pat); | |
1481 | ||
1482 | combine_merges++; | |
1483 | ||
1484 | /* Substitute in the latest insn for the regs set by the earlier ones. */ | |
1485 | ||
1486 | maxreg = max_reg_num (); | |
1487 | ||
1488 | subst_insn = i3; | |
230d793d RS |
1489 | |
1490 | /* It is possible that the source of I2 or I1 may be performing an | |
1491 | unneeded operation, such as a ZERO_EXTEND of something that is known | |
1492 | to have the high part zero. Handle that case by letting subst look at | |
1493 | the innermost one of them. | |
1494 | ||
1495 | Another way to do this would be to have a function that tries to | |
1496 | simplify a single insn instead of merging two or more insns. We don't | |
1497 | do this because of the potential of infinite loops and because | |
1498 | of the potential extra memory required. However, doing it the way | |
1499 | we are is a bit of a kludge and doesn't catch all cases. | |
1500 | ||
1501 | But only do this if -fexpensive-optimizations since it slows things down | |
1502 | and doesn't usually win. */ | |
1503 | ||
1504 | if (flag_expensive_optimizations) | |
1505 | { | |
1506 | /* Pass pc_rtx so no substitutions are done, just simplifications. | |
1507 | The cases that we are interested in here do not involve the few | |
1508 | cases were is_replaced is checked. */ | |
1509 | if (i1) | |
d0ab8cd3 RK |
1510 | { |
1511 | subst_low_cuid = INSN_CUID (i1); | |
1512 | i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0); | |
1513 | } | |
230d793d | 1514 | else |
d0ab8cd3 RK |
1515 | { |
1516 | subst_low_cuid = INSN_CUID (i2); | |
1517 | i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0); | |
1518 | } | |
230d793d RS |
1519 | |
1520 | previous_num_undos = undobuf.num_undo; | |
1521 | } | |
1522 | ||
1523 | #ifndef HAVE_cc0 | |
1524 | /* Many machines that don't use CC0 have insns that can both perform an | |
1525 | arithmetic operation and set the condition code. These operations will | |
1526 | be represented as a PARALLEL with the first element of the vector | |
1527 | being a COMPARE of an arithmetic operation with the constant zero. | |
1528 | The second element of the vector will set some pseudo to the result | |
1529 | of the same arithmetic operation. If we simplify the COMPARE, we won't | |
1530 | match such a pattern and so will generate an extra insn. Here we test | |
1531 | for this case, where both the comparison and the operation result are | |
1532 | needed, and make the PARALLEL by just replacing I2DEST in I3SRC with | |
1533 | I2SRC. Later we will make the PARALLEL that contains I2. */ | |
1534 | ||
1535 | if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET | |
1536 | && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE | |
1537 | && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx | |
1538 | && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest)) | |
1539 | { | |
1540 | rtx *cc_use; | |
1541 | enum machine_mode compare_mode; | |
1542 | ||
1543 | newpat = PATTERN (i3); | |
1544 | SUBST (XEXP (SET_SRC (newpat), 0), i2src); | |
1545 | ||
1546 | i2_is_used = 1; | |
1547 | ||
1548 | #ifdef EXTRA_CC_MODES | |
1549 | /* See if a COMPARE with the operand we substituted in should be done | |
1550 | with the mode that is currently being used. If not, do the same | |
1551 | processing we do in `subst' for a SET; namely, if the destination | |
1552 | is used only once, try to replace it with a register of the proper | |
1553 | mode and also replace the COMPARE. */ | |
1554 | if (undobuf.other_insn == 0 | |
1555 | && (cc_use = find_single_use (SET_DEST (newpat), i3, | |
1556 | &undobuf.other_insn)) | |
77fa0940 RK |
1557 | && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use), |
1558 | i2src, const0_rtx)) | |
230d793d RS |
1559 | != GET_MODE (SET_DEST (newpat)))) |
1560 | { | |
1561 | int regno = REGNO (SET_DEST (newpat)); | |
1562 | rtx new_dest = gen_rtx (REG, compare_mode, regno); | |
1563 | ||
1564 | if (regno < FIRST_PSEUDO_REGISTER | |
1565 | || (reg_n_sets[regno] == 1 && ! added_sets_2 | |
1566 | && ! REG_USERVAR_P (SET_DEST (newpat)))) | |
1567 | { | |
1568 | if (regno >= FIRST_PSEUDO_REGISTER) | |
1569 | SUBST (regno_reg_rtx[regno], new_dest); | |
1570 | ||
1571 | SUBST (SET_DEST (newpat), new_dest); | |
1572 | SUBST (XEXP (*cc_use, 0), new_dest); | |
1573 | SUBST (SET_SRC (newpat), | |
1574 | gen_rtx_combine (COMPARE, compare_mode, | |
1575 | i2src, const0_rtx)); | |
1576 | } | |
1577 | else | |
1578 | undobuf.other_insn = 0; | |
1579 | } | |
1580 | #endif | |
1581 | } | |
1582 | else | |
1583 | #endif | |
1584 | { | |
1585 | n_occurrences = 0; /* `subst' counts here */ | |
1586 | ||
1587 | /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we | |
1588 | need to make a unique copy of I2SRC each time we substitute it | |
1589 | to avoid self-referential rtl. */ | |
1590 | ||
d0ab8cd3 | 1591 | subst_low_cuid = INSN_CUID (i2); |
230d793d RS |
1592 | newpat = subst (PATTERN (i3), i2dest, i2src, 0, |
1593 | ! i1_feeds_i3 && i1dest_in_i1src); | |
1594 | previous_num_undos = undobuf.num_undo; | |
1595 | ||
1596 | /* Record whether i2's body now appears within i3's body. */ | |
1597 | i2_is_used = n_occurrences; | |
1598 | } | |
1599 | ||
1600 | /* If we already got a failure, don't try to do more. Otherwise, | |
1601 | try to substitute in I1 if we have it. */ | |
1602 | ||
1603 | if (i1 && GET_CODE (newpat) != CLOBBER) | |
1604 | { | |
1605 | /* Before we can do this substitution, we must redo the test done | |
1606 | above (see detailed comments there) that ensures that I1DEST | |
1607 | isn't mentioned in any SETs in NEWPAT that are field assignments. */ | |
1608 | ||
5f4f0e22 CH |
1609 | if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX, |
1610 | 0, NULL_PTR)) | |
230d793d RS |
1611 | { |
1612 | undo_all (); | |
1613 | return 0; | |
1614 | } | |
1615 | ||
1616 | n_occurrences = 0; | |
d0ab8cd3 | 1617 | subst_low_cuid = INSN_CUID (i1); |
230d793d RS |
1618 | newpat = subst (newpat, i1dest, i1src, 0, 0); |
1619 | previous_num_undos = undobuf.num_undo; | |
1620 | } | |
1621 | ||
916f14f1 RK |
1622 | /* Fail if an autoincrement side-effect has been duplicated. Be careful |
1623 | to count all the ways that I2SRC and I1SRC can be used. */ | |
5f4f0e22 | 1624 | if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0 |
916f14f1 | 1625 | && i2_is_used + added_sets_2 > 1) |
5f4f0e22 | 1626 | || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0 |
916f14f1 RK |
1627 | && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3) |
1628 | > 1)) | |
230d793d RS |
1629 | /* Fail if we tried to make a new register (we used to abort, but there's |
1630 | really no reason to). */ | |
1631 | || max_reg_num () != maxreg | |
1632 | /* Fail if we couldn't do something and have a CLOBBER. */ | |
df7d75de RK |
1633 | || GET_CODE (newpat) == CLOBBER |
1634 | /* Fail if this new pattern is a MULT and we didn't have one before | |
1635 | at the outer level. */ | |
1636 | || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT | |
1637 | && ! have_mult)) | |
230d793d RS |
1638 | { |
1639 | undo_all (); | |
1640 | return 0; | |
1641 | } | |
1642 | ||
1643 | /* If the actions of the earlier insns must be kept | |
1644 | in addition to substituting them into the latest one, | |
1645 | we must make a new PARALLEL for the latest insn | |
1646 | to hold additional the SETs. */ | |
1647 | ||
1648 | if (added_sets_1 || added_sets_2) | |
1649 | { | |
1650 | combine_extras++; | |
1651 | ||
1652 | if (GET_CODE (newpat) == PARALLEL) | |
1653 | { | |
1654 | rtvec old = XVEC (newpat, 0); | |
1655 | total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2; | |
1656 | newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets)); | |
4c9a05bc | 1657 | bcopy ((char *) &old->elem[0], (char *) &XVECEXP (newpat, 0, 0), |
230d793d RS |
1658 | sizeof (old->elem[0]) * old->num_elem); |
1659 | } | |
1660 | else | |
1661 | { | |
1662 | rtx old = newpat; | |
1663 | total_sets = 1 + added_sets_1 + added_sets_2; | |
1664 | newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets)); | |
1665 | XVECEXP (newpat, 0, 0) = old; | |
1666 | } | |
1667 | ||
1668 | if (added_sets_1) | |
1669 | XVECEXP (newpat, 0, --total_sets) | |
1670 | = (GET_CODE (PATTERN (i1)) == PARALLEL | |
1671 | ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1)); | |
1672 | ||
1673 | if (added_sets_2) | |
1674 | { | |
1675 | /* If there is no I1, use I2's body as is. We used to also not do | |
1676 | the subst call below if I2 was substituted into I3, | |
1677 | but that could lose a simplification. */ | |
1678 | if (i1 == 0) | |
1679 | XVECEXP (newpat, 0, --total_sets) = i2pat; | |
1680 | else | |
1681 | /* See comment where i2pat is assigned. */ | |
1682 | XVECEXP (newpat, 0, --total_sets) | |
1683 | = subst (i2pat, i1dest, i1src, 0, 0); | |
1684 | } | |
1685 | } | |
1686 | ||
1687 | /* We come here when we are replacing a destination in I2 with the | |
1688 | destination of I3. */ | |
1689 | validate_replacement: | |
1690 | ||
6e25d159 RK |
1691 | /* Note which hard regs this insn has as inputs. */ |
1692 | mark_used_regs_combine (newpat); | |
1693 | ||
230d793d | 1694 | /* Is the result of combination a valid instruction? */ |
a29ca9db RK |
1695 | insn_code_number |
1696 | = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches); | |
230d793d RS |
1697 | |
1698 | /* If the result isn't valid, see if it is a PARALLEL of two SETs where | |
1699 | the second SET's destination is a register that is unused. In that case, | |
1700 | we just need the first SET. This can occur when simplifying a divmod | |
1701 | insn. We *must* test for this case here because the code below that | |
1702 | splits two independent SETs doesn't handle this case correctly when it | |
1703 | updates the register status. Also check the case where the first | |
1704 | SET's destination is unused. That would not cause incorrect code, but | |
1705 | does cause an unneeded insn to remain. */ | |
1706 | ||
1707 | if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL | |
1708 | && XVECLEN (newpat, 0) == 2 | |
1709 | && GET_CODE (XVECEXP (newpat, 0, 0)) == SET | |
1710 | && GET_CODE (XVECEXP (newpat, 0, 1)) == SET | |
1711 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG | |
1712 | && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1))) | |
1713 | && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1))) | |
1714 | && asm_noperands (newpat) < 0) | |
1715 | { | |
1716 | newpat = XVECEXP (newpat, 0, 0); | |
a29ca9db RK |
1717 | insn_code_number |
1718 | = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches); | |
230d793d RS |
1719 | } |
1720 | ||
1721 | else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL | |
1722 | && XVECLEN (newpat, 0) == 2 | |
1723 | && GET_CODE (XVECEXP (newpat, 0, 0)) == SET | |
1724 | && GET_CODE (XVECEXP (newpat, 0, 1)) == SET | |
1725 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG | |
1726 | && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0))) | |
1727 | && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0))) | |
1728 | && asm_noperands (newpat) < 0) | |
1729 | { | |
1730 | newpat = XVECEXP (newpat, 0, 1); | |
a29ca9db RK |
1731 | insn_code_number |
1732 | = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches); | |
230d793d RS |
1733 | } |
1734 | ||
1735 | /* If we were combining three insns and the result is a simple SET | |
1736 | with no ASM_OPERANDS that wasn't recognized, try to split it into two | |
916f14f1 RK |
1737 | insns. There are two ways to do this. It can be split using a |
1738 | machine-specific method (like when you have an addition of a large | |
1739 | constant) or by combine in the function find_split_point. */ | |
1740 | ||
230d793d RS |
1741 | if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET |
1742 | && asm_noperands (newpat) < 0) | |
1743 | { | |
916f14f1 | 1744 | rtx m_split, *split; |
42495ca0 | 1745 | rtx ni2dest = i2dest; |
916f14f1 RK |
1746 | |
1747 | /* See if the MD file can split NEWPAT. If it can't, see if letting it | |
42495ca0 RK |
1748 | use I2DEST as a scratch register will help. In the latter case, |
1749 | convert I2DEST to the mode of the source of NEWPAT if we can. */ | |
916f14f1 RK |
1750 | |
1751 | m_split = split_insns (newpat, i3); | |
a70c61d9 JW |
1752 | |
1753 | /* We can only use I2DEST as a scratch reg if it doesn't overlap any | |
1754 | inputs of NEWPAT. */ | |
1755 | ||
1756 | /* ??? If I2DEST is not safe, and I1DEST exists, then it would be | |
1757 | possible to try that as a scratch reg. This would require adding | |
1758 | more code to make it work though. */ | |
1759 | ||
1760 | if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat)) | |
42495ca0 RK |
1761 | { |
1762 | /* If I2DEST is a hard register or the only use of a pseudo, | |
1763 | we can change its mode. */ | |
1764 | if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest) | |
02f4ada4 | 1765 | && GET_MODE (SET_DEST (newpat)) != VOIDmode |
60654f77 | 1766 | && GET_CODE (i2dest) == REG |
42495ca0 RK |
1767 | && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER |
1768 | || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2 | |
1769 | && ! REG_USERVAR_P (i2dest)))) | |
1770 | ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)), | |
1771 | REGNO (i2dest)); | |
1772 | ||
1773 | m_split = split_insns (gen_rtx (PARALLEL, VOIDmode, | |
1774 | gen_rtvec (2, newpat, | |
1775 | gen_rtx (CLOBBER, | |
1776 | VOIDmode, | |
1777 | ni2dest))), | |
1778 | i3); | |
1779 | } | |
916f14f1 RK |
1780 | |
1781 | if (m_split && GET_CODE (m_split) == SEQUENCE | |
3f508eca RK |
1782 | && XVECLEN (m_split, 0) == 2 |
1783 | && (next_real_insn (i2) == i3 | |
1784 | || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)), | |
1785 | INSN_CUID (i2)))) | |
916f14f1 | 1786 | { |
1a26b032 | 1787 | rtx i2set, i3set; |
d0ab8cd3 | 1788 | rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1)); |
916f14f1 | 1789 | newi2pat = PATTERN (XVECEXP (m_split, 0, 0)); |
916f14f1 | 1790 | |
e4ba89be RK |
1791 | i3set = single_set (XVECEXP (m_split, 0, 1)); |
1792 | i2set = single_set (XVECEXP (m_split, 0, 0)); | |
1a26b032 | 1793 | |
42495ca0 RK |
1794 | /* In case we changed the mode of I2DEST, replace it in the |
1795 | pseudo-register table here. We can't do it above in case this | |
1796 | code doesn't get executed and we do a split the other way. */ | |
1797 | ||
1798 | if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER) | |
1799 | SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest); | |
1800 | ||
a29ca9db RK |
1801 | i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes, |
1802 | &i2_scratches); | |
1a26b032 RK |
1803 | |
1804 | /* If I2 or I3 has multiple SETs, we won't know how to track | |
9cc96794 RK |
1805 | register status, so don't use these insns. If I2's destination |
1806 | is used between I2 and I3, we also can't use these insns. */ | |
1a26b032 | 1807 | |
9cc96794 RK |
1808 | if (i2_code_number >= 0 && i2set && i3set |
1809 | && (next_real_insn (i2) == i3 | |
1810 | || ! reg_used_between_p (SET_DEST (i2set), i2, i3))) | |
a29ca9db RK |
1811 | insn_code_number = recog_for_combine (&newi3pat, i3, &new_i3_notes, |
1812 | &i3_scratches); | |
d0ab8cd3 RK |
1813 | if (insn_code_number >= 0) |
1814 | newpat = newi3pat; | |
1815 | ||
c767f54b | 1816 | /* It is possible that both insns now set the destination of I3. |
22609cbf | 1817 | If so, we must show an extra use of it. */ |
c767f54b | 1818 | |
393de53f RK |
1819 | if (insn_code_number >= 0) |
1820 | { | |
1821 | rtx new_i3_dest = SET_DEST (i3set); | |
1822 | rtx new_i2_dest = SET_DEST (i2set); | |
1823 | ||
1824 | while (GET_CODE (new_i3_dest) == ZERO_EXTRACT | |
1825 | || GET_CODE (new_i3_dest) == STRICT_LOW_PART | |
1826 | || GET_CODE (new_i3_dest) == SUBREG) | |
1827 | new_i3_dest = XEXP (new_i3_dest, 0); | |
1828 | ||
1829 | if (GET_CODE (new_i3_dest) == REG | |
1830 | && GET_CODE (new_i2_dest) == REG | |
1831 | && REGNO (new_i3_dest) == REGNO (new_i2_dest)) | |
1832 | reg_n_sets[REGNO (SET_DEST (i2set))]++; | |
1833 | } | |
916f14f1 | 1834 | } |
230d793d RS |
1835 | |
1836 | /* If we can split it and use I2DEST, go ahead and see if that | |
1837 | helps things be recognized. Verify that none of the registers | |
1838 | are set between I2 and I3. */ | |
d0ab8cd3 | 1839 | if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0 |
230d793d RS |
1840 | #ifdef HAVE_cc0 |
1841 | && GET_CODE (i2dest) == REG | |
1842 | #endif | |
1843 | /* We need I2DEST in the proper mode. If it is a hard register | |
1844 | or the only use of a pseudo, we can change its mode. */ | |
1845 | && (GET_MODE (*split) == GET_MODE (i2dest) | |
1846 | || GET_MODE (*split) == VOIDmode | |
1847 | || REGNO (i2dest) < FIRST_PSEUDO_REGISTER | |
1848 | || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2 | |
1849 | && ! REG_USERVAR_P (i2dest))) | |
1850 | && (next_real_insn (i2) == i3 | |
1851 | || ! use_crosses_set_p (*split, INSN_CUID (i2))) | |
1852 | /* We can't overwrite I2DEST if its value is still used by | |
1853 | NEWPAT. */ | |
1854 | && ! reg_referenced_p (i2dest, newpat)) | |
1855 | { | |
1856 | rtx newdest = i2dest; | |
df7d75de RK |
1857 | enum rtx_code split_code = GET_CODE (*split); |
1858 | enum machine_mode split_mode = GET_MODE (*split); | |
230d793d RS |
1859 | |
1860 | /* Get NEWDEST as a register in the proper mode. We have already | |
1861 | validated that we can do this. */ | |
df7d75de | 1862 | if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode) |
230d793d | 1863 | { |
df7d75de | 1864 | newdest = gen_rtx (REG, split_mode, REGNO (i2dest)); |
230d793d RS |
1865 | |
1866 | if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER) | |
1867 | SUBST (regno_reg_rtx[REGNO (i2dest)], newdest); | |
1868 | } | |
1869 | ||
1870 | /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to | |
1871 | an ASHIFT. This can occur if it was inside a PLUS and hence | |
1872 | appeared to be a memory address. This is a kludge. */ | |
df7d75de | 1873 | if (split_code == MULT |
230d793d RS |
1874 | && GET_CODE (XEXP (*split, 1)) == CONST_INT |
1875 | && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0) | |
1dc8a823 JW |
1876 | { |
1877 | SUBST (*split, gen_rtx_combine (ASHIFT, split_mode, | |
1878 | XEXP (*split, 0), GEN_INT (i))); | |
1879 | /* Update split_code because we may not have a multiply | |
1880 | anymore. */ | |
1881 | split_code = GET_CODE (*split); | |
1882 | } | |
230d793d RS |
1883 | |
1884 | #ifdef INSN_SCHEDULING | |
1885 | /* If *SPLIT is a paradoxical SUBREG, when we split it, it should | |
1886 | be written as a ZERO_EXTEND. */ | |
df7d75de RK |
1887 | if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM) |
1888 | SUBST (*split, gen_rtx_combine (ZERO_EXTEND, split_mode, | |
230d793d RS |
1889 | XEXP (*split, 0))); |
1890 | #endif | |
1891 | ||
1892 | newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split); | |
1893 | SUBST (*split, newdest); | |
a29ca9db RK |
1894 | i2_code_number |
1895 | = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches); | |
df7d75de RK |
1896 | |
1897 | /* If the split point was a MULT and we didn't have one before, | |
1898 | don't use one now. */ | |
1899 | if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult)) | |
a29ca9db RK |
1900 | insn_code_number |
1901 | = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches); | |
230d793d RS |
1902 | } |
1903 | } | |
1904 | ||
1905 | /* Check for a case where we loaded from memory in a narrow mode and | |
1906 | then sign extended it, but we need both registers. In that case, | |
1907 | we have a PARALLEL with both loads from the same memory location. | |
1908 | We can split this into a load from memory followed by a register-register | |
1909 | copy. This saves at least one insn, more if register allocation can | |
f0343c74 RK |
1910 | eliminate the copy. |
1911 | ||
1912 | We cannot do this if the destination of the second assignment is | |
1913 | a register that we have already assumed is zero-extended. Similarly | |
1914 | for a SUBREG of such a register. */ | |
230d793d RS |
1915 | |
1916 | else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0 | |
1917 | && GET_CODE (newpat) == PARALLEL | |
1918 | && XVECLEN (newpat, 0) == 2 | |
1919 | && GET_CODE (XVECEXP (newpat, 0, 0)) == SET | |
1920 | && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND | |
1921 | && GET_CODE (XVECEXP (newpat, 0, 1)) == SET | |
1922 | && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)), | |
1923 | XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0)) | |
1924 | && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)), | |
1925 | INSN_CUID (i2)) | |
1926 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT | |
1927 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART | |
f0343c74 RK |
1928 | && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)), |
1929 | (GET_CODE (temp) == REG | |
1930 | && reg_nonzero_bits[REGNO (temp)] != 0 | |
1931 | && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD | |
1932 | && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT | |
1933 | && (reg_nonzero_bits[REGNO (temp)] | |
1934 | != GET_MODE_MASK (word_mode)))) | |
1935 | && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG | |
1936 | && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))), | |
1937 | (GET_CODE (temp) == REG | |
1938 | && reg_nonzero_bits[REGNO (temp)] != 0 | |
1939 | && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD | |
1940 | && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT | |
1941 | && (reg_nonzero_bits[REGNO (temp)] | |
1942 | != GET_MODE_MASK (word_mode))))) | |
230d793d RS |
1943 | && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)), |
1944 | SET_SRC (XVECEXP (newpat, 0, 1))) | |
1945 | && ! find_reg_note (i3, REG_UNUSED, | |
1946 | SET_DEST (XVECEXP (newpat, 0, 0)))) | |
1947 | { | |
472fbdd1 RK |
1948 | rtx ni2dest; |
1949 | ||
230d793d | 1950 | newi2pat = XVECEXP (newpat, 0, 0); |
472fbdd1 | 1951 | ni2dest = SET_DEST (XVECEXP (newpat, 0, 0)); |
230d793d RS |
1952 | newpat = XVECEXP (newpat, 0, 1); |
1953 | SUBST (SET_SRC (newpat), | |
472fbdd1 | 1954 | gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest)); |
a29ca9db RK |
1955 | i2_code_number |
1956 | = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches); | |
1957 | ||
230d793d | 1958 | if (i2_code_number >= 0) |
a29ca9db RK |
1959 | insn_code_number |
1960 | = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches); | |
5089e22e RS |
1961 | |
1962 | if (insn_code_number >= 0) | |
1963 | { | |
1964 | rtx insn; | |
1965 | rtx link; | |
1966 | ||
1967 | /* If we will be able to accept this, we have made a change to the | |
1968 | destination of I3. This can invalidate a LOG_LINKS pointing | |
1969 | to I3. No other part of combine.c makes such a transformation. | |
1970 | ||
1971 | The new I3 will have a destination that was previously the | |
1972 | destination of I1 or I2 and which was used in i2 or I3. Call | |
1973 | distribute_links to make a LOG_LINK from the next use of | |
1974 | that destination. */ | |
1975 | ||
1976 | PATTERN (i3) = newpat; | |
5f4f0e22 | 1977 | distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX)); |
5089e22e RS |
1978 | |
1979 | /* I3 now uses what used to be its destination and which is | |
1980 | now I2's destination. That means we need a LOG_LINK from | |
1981 | I3 to I2. But we used to have one, so we still will. | |
1982 | ||
1983 | However, some later insn might be using I2's dest and have | |
1984 | a LOG_LINK pointing at I3. We must remove this link. | |
1985 | The simplest way to remove the link is to point it at I1, | |
1986 | which we know will be a NOTE. */ | |
1987 | ||
1988 | for (insn = NEXT_INSN (i3); | |
0d4d42c3 RK |
1989 | insn && (this_basic_block == n_basic_blocks - 1 |
1990 | || insn != basic_block_head[this_basic_block + 1]); | |
5089e22e RS |
1991 | insn = NEXT_INSN (insn)) |
1992 | { | |
1993 | if (GET_RTX_CLASS (GET_CODE (insn)) == 'i' | |
472fbdd1 | 1994 | && reg_referenced_p (ni2dest, PATTERN (insn))) |
5089e22e RS |
1995 | { |
1996 | for (link = LOG_LINKS (insn); link; | |
1997 | link = XEXP (link, 1)) | |
1998 | if (XEXP (link, 0) == i3) | |
1999 | XEXP (link, 0) = i1; | |
2000 | ||
2001 | break; | |
2002 | } | |
2003 | } | |
2004 | } | |
230d793d RS |
2005 | } |
2006 | ||
2007 | /* Similarly, check for a case where we have a PARALLEL of two independent | |
2008 | SETs but we started with three insns. In this case, we can do the sets | |
2009 | as two separate insns. This case occurs when some SET allows two | |
2010 | other insns to combine, but the destination of that SET is still live. */ | |
2011 | ||
2012 | else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0 | |
2013 | && GET_CODE (newpat) == PARALLEL | |
2014 | && XVECLEN (newpat, 0) == 2 | |
2015 | && GET_CODE (XVECEXP (newpat, 0, 0)) == SET | |
2016 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT | |
2017 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART | |
2018 | && GET_CODE (XVECEXP (newpat, 0, 1)) == SET | |
2019 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT | |
2020 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART | |
2021 | && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)), | |
2022 | INSN_CUID (i2)) | |
2023 | /* Don't pass sets with (USE (MEM ...)) dests to the following. */ | |
2024 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE | |
2025 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE | |
2026 | && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)), | |
2027 | XVECEXP (newpat, 0, 0)) | |
2028 | && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)), | |
2029 | XVECEXP (newpat, 0, 1))) | |
2030 | { | |
2031 | newi2pat = XVECEXP (newpat, 0, 1); | |
2032 | newpat = XVECEXP (newpat, 0, 0); | |
2033 | ||
a29ca9db RK |
2034 | i2_code_number |
2035 | = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches); | |
2036 | ||
230d793d | 2037 | if (i2_code_number >= 0) |
a29ca9db RK |
2038 | insn_code_number |
2039 | = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches); | |
230d793d RS |
2040 | } |
2041 | ||
2042 | /* If it still isn't recognized, fail and change things back the way they | |
2043 | were. */ | |
2044 | if ((insn_code_number < 0 | |
2045 | /* Is the result a reasonable ASM_OPERANDS? */ | |
2046 | && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2))) | |
2047 | { | |
2048 | undo_all (); | |
2049 | return 0; | |
2050 | } | |
2051 | ||
2052 | /* If we had to change another insn, make sure it is valid also. */ | |
2053 | if (undobuf.other_insn) | |
2054 | { | |
230d793d RS |
2055 | rtx other_pat = PATTERN (undobuf.other_insn); |
2056 | rtx new_other_notes; | |
2057 | rtx note, next; | |
2058 | ||
6e25d159 RK |
2059 | CLEAR_HARD_REG_SET (newpat_used_regs); |
2060 | ||
a29ca9db RK |
2061 | other_code_number |
2062 | = recog_for_combine (&other_pat, undobuf.other_insn, | |
2063 | &new_other_notes, &other_scratches); | |
230d793d RS |
2064 | |
2065 | if (other_code_number < 0 && ! check_asm_operands (other_pat)) | |
2066 | { | |
2067 | undo_all (); | |
2068 | return 0; | |
2069 | } | |
2070 | ||
2071 | PATTERN (undobuf.other_insn) = other_pat; | |
2072 | ||
2073 | /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they | |
2074 | are still valid. Then add any non-duplicate notes added by | |
2075 | recog_for_combine. */ | |
2076 | for (note = REG_NOTES (undobuf.other_insn); note; note = next) | |
2077 | { | |
2078 | next = XEXP (note, 1); | |
2079 | ||
2080 | if (REG_NOTE_KIND (note) == REG_UNUSED | |
2081 | && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn))) | |
1a26b032 RK |
2082 | { |
2083 | if (GET_CODE (XEXP (note, 0)) == REG) | |
2084 | reg_n_deaths[REGNO (XEXP (note, 0))]--; | |
2085 | ||
2086 | remove_note (undobuf.other_insn, note); | |
2087 | } | |
230d793d RS |
2088 | } |
2089 | ||
1a26b032 RK |
2090 | for (note = new_other_notes; note; note = XEXP (note, 1)) |
2091 | if (GET_CODE (XEXP (note, 0)) == REG) | |
2092 | reg_n_deaths[REGNO (XEXP (note, 0))]++; | |
2093 | ||
230d793d | 2094 | distribute_notes (new_other_notes, undobuf.other_insn, |
5f4f0e22 | 2095 | undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX); |
230d793d RS |
2096 | } |
2097 | ||
2098 | /* We now know that we can do this combination. Merge the insns and | |
2099 | update the status of registers and LOG_LINKS. */ | |
2100 | ||
2101 | { | |
2102 | rtx i3notes, i2notes, i1notes = 0; | |
2103 | rtx i3links, i2links, i1links = 0; | |
2104 | rtx midnotes = 0; | |
230d793d RS |
2105 | register int regno; |
2106 | /* Compute which registers we expect to eliminate. */ | |
2107 | rtx elim_i2 = (newi2pat || i2dest_in_i2src || i2dest_in_i1src | |
2108 | ? 0 : i2dest); | |
2109 | rtx elim_i1 = i1 == 0 || i1dest_in_i1src ? 0 : i1dest; | |
2110 | ||
2111 | /* Get the old REG_NOTES and LOG_LINKS from all our insns and | |
2112 | clear them. */ | |
2113 | i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3); | |
2114 | i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2); | |
2115 | if (i1) | |
2116 | i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1); | |
2117 | ||
2118 | /* Ensure that we do not have something that should not be shared but | |
2119 | occurs multiple times in the new insns. Check this by first | |
5089e22e | 2120 | resetting all the `used' flags and then copying anything is shared. */ |
230d793d RS |
2121 | |
2122 | reset_used_flags (i3notes); | |
2123 | reset_used_flags (i2notes); | |
2124 | reset_used_flags (i1notes); | |
2125 | reset_used_flags (newpat); | |
2126 | reset_used_flags (newi2pat); | |
2127 | if (undobuf.other_insn) | |
2128 | reset_used_flags (PATTERN (undobuf.other_insn)); | |
2129 | ||
2130 | i3notes = copy_rtx_if_shared (i3notes); | |
2131 | i2notes = copy_rtx_if_shared (i2notes); | |
2132 | i1notes = copy_rtx_if_shared (i1notes); | |
2133 | newpat = copy_rtx_if_shared (newpat); | |
2134 | newi2pat = copy_rtx_if_shared (newi2pat); | |
2135 | if (undobuf.other_insn) | |
2136 | reset_used_flags (PATTERN (undobuf.other_insn)); | |
2137 | ||
2138 | INSN_CODE (i3) = insn_code_number; | |
2139 | PATTERN (i3) = newpat; | |
2140 | if (undobuf.other_insn) | |
2141 | INSN_CODE (undobuf.other_insn) = other_code_number; | |
2142 | ||
2143 | /* We had one special case above where I2 had more than one set and | |
2144 | we replaced a destination of one of those sets with the destination | |
2145 | of I3. In that case, we have to update LOG_LINKS of insns later | |
176c9e6b JW |
2146 | in this basic block. Note that this (expensive) case is rare. |
2147 | ||
2148 | Also, in this case, we must pretend that all REG_NOTEs for I2 | |
2149 | actually came from I3, so that REG_UNUSED notes from I2 will be | |
2150 | properly handled. */ | |
2151 | ||
2152 | if (i3_subst_into_i2) | |
2153 | { | |
2154 | for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++) | |
2155 | if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG | |
2156 | && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest | |
2157 | && ! find_reg_note (i2, REG_UNUSED, | |
2158 | SET_DEST (XVECEXP (PATTERN (i2), 0, i)))) | |
2159 | for (temp = NEXT_INSN (i2); | |
2160 | temp && (this_basic_block == n_basic_blocks - 1 | |
2161 | || basic_block_head[this_basic_block] != temp); | |
2162 | temp = NEXT_INSN (temp)) | |
2163 | if (temp != i3 && GET_RTX_CLASS (GET_CODE (temp)) == 'i') | |
2164 | for (link = LOG_LINKS (temp); link; link = XEXP (link, 1)) | |
2165 | if (XEXP (link, 0) == i2) | |
2166 | XEXP (link, 0) = i3; | |
2167 | ||
2168 | if (i3notes) | |
2169 | { | |
2170 | rtx link = i3notes; | |
2171 | while (XEXP (link, 1)) | |
2172 | link = XEXP (link, 1); | |
2173 | XEXP (link, 1) = i2notes; | |
2174 | } | |
2175 | else | |
2176 | i3notes = i2notes; | |
2177 | i2notes = 0; | |
2178 | } | |
230d793d RS |
2179 | |
2180 | LOG_LINKS (i3) = 0; | |
2181 | REG_NOTES (i3) = 0; | |
2182 | LOG_LINKS (i2) = 0; | |
2183 | REG_NOTES (i2) = 0; | |
2184 | ||
2185 | if (newi2pat) | |
2186 | { | |
2187 | INSN_CODE (i2) = i2_code_number; | |
2188 | PATTERN (i2) = newi2pat; | |
2189 | } | |
2190 | else | |
2191 | { | |
2192 | PUT_CODE (i2, NOTE); | |
2193 | NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED; | |
2194 | NOTE_SOURCE_FILE (i2) = 0; | |
2195 | } | |
2196 | ||
2197 | if (i1) | |
2198 | { | |
2199 | LOG_LINKS (i1) = 0; | |
2200 | REG_NOTES (i1) = 0; | |
2201 | PUT_CODE (i1, NOTE); | |
2202 | NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED; | |
2203 | NOTE_SOURCE_FILE (i1) = 0; | |
2204 | } | |
2205 | ||
2206 | /* Get death notes for everything that is now used in either I3 or | |
2207 | I2 and used to die in a previous insn. */ | |
2208 | ||
2209 | move_deaths (newpat, i1 ? INSN_CUID (i1) : INSN_CUID (i2), i3, &midnotes); | |
2210 | if (newi2pat) | |
2211 | move_deaths (newi2pat, INSN_CUID (i1), i2, &midnotes); | |
2212 | ||
2213 | /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */ | |
2214 | if (i3notes) | |
5f4f0e22 CH |
2215 | distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX, |
2216 | elim_i2, elim_i1); | |
230d793d | 2217 | if (i2notes) |
5f4f0e22 CH |
2218 | distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX, |
2219 | elim_i2, elim_i1); | |
230d793d | 2220 | if (i1notes) |
5f4f0e22 CH |
2221 | distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX, |
2222 | elim_i2, elim_i1); | |
230d793d | 2223 | if (midnotes) |
5f4f0e22 CH |
2224 | distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX, |
2225 | elim_i2, elim_i1); | |
230d793d RS |
2226 | |
2227 | /* Distribute any notes added to I2 or I3 by recog_for_combine. We | |
2228 | know these are REG_UNUSED and want them to go to the desired insn, | |
1a26b032 RK |
2229 | so we always pass it as i3. We have not counted the notes in |
2230 | reg_n_deaths yet, so we need to do so now. */ | |
2231 | ||
230d793d | 2232 | if (newi2pat && new_i2_notes) |
1a26b032 RK |
2233 | { |
2234 | for (temp = new_i2_notes; temp; temp = XEXP (temp, 1)) | |
2235 | if (GET_CODE (XEXP (temp, 0)) == REG) | |
2236 | reg_n_deaths[REGNO (XEXP (temp, 0))]++; | |
2237 | ||
2238 | distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX); | |
2239 | } | |
2240 | ||
230d793d | 2241 | if (new_i3_notes) |
1a26b032 RK |
2242 | { |
2243 | for (temp = new_i3_notes; temp; temp = XEXP (temp, 1)) | |
2244 | if (GET_CODE (XEXP (temp, 0)) == REG) | |
2245 | reg_n_deaths[REGNO (XEXP (temp, 0))]++; | |
2246 | ||
2247 | distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX); | |
2248 | } | |
230d793d RS |
2249 | |
2250 | /* If I3DEST was used in I3SRC, it really died in I3. We may need to | |
1a26b032 RK |
2251 | put a REG_DEAD note for it somewhere. Similarly for I2 and I1. |
2252 | Show an additional death due to the REG_DEAD note we make here. If | |
2253 | we discard it in distribute_notes, we will decrement it again. */ | |
d0ab8cd3 | 2254 | |
230d793d | 2255 | if (i3dest_killed) |
1a26b032 RK |
2256 | { |
2257 | if (GET_CODE (i3dest_killed) == REG) | |
2258 | reg_n_deaths[REGNO (i3dest_killed)]++; | |
2259 | ||
2260 | distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed, | |
2261 | NULL_RTX), | |
2262 | NULL_RTX, i3, newi2pat ? i2 : NULL_RTX, | |
2263 | NULL_RTX, NULL_RTX); | |
2264 | } | |
58c8c593 RK |
2265 | |
2266 | /* For I2 and I1, we have to be careful. If NEWI2PAT exists and sets | |
2267 | I2DEST or I1DEST, the death must be somewhere before I2, not I3. If | |
2268 | we passed I3 in that case, it might delete I2. */ | |
2269 | ||
230d793d | 2270 | if (i2dest_in_i2src) |
58c8c593 | 2271 | { |
1a26b032 RK |
2272 | if (GET_CODE (i2dest) == REG) |
2273 | reg_n_deaths[REGNO (i2dest)]++; | |
2274 | ||
58c8c593 RK |
2275 | if (newi2pat && reg_set_p (i2dest, newi2pat)) |
2276 | distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX), | |
2277 | NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX); | |
2278 | else | |
2279 | distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX), | |
2280 | NULL_RTX, i3, newi2pat ? i2 : NULL_RTX, | |
2281 | NULL_RTX, NULL_RTX); | |
2282 | } | |
2283 | ||
230d793d | 2284 | if (i1dest_in_i1src) |
58c8c593 | 2285 | { |
1a26b032 RK |
2286 | if (GET_CODE (i1dest) == REG) |
2287 | reg_n_deaths[REGNO (i1dest)]++; | |
2288 | ||
58c8c593 RK |
2289 | if (newi2pat && reg_set_p (i1dest, newi2pat)) |
2290 | distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX), | |
2291 | NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX); | |
2292 | else | |
2293 | distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX), | |
2294 | NULL_RTX, i3, newi2pat ? i2 : NULL_RTX, | |
2295 | NULL_RTX, NULL_RTX); | |
2296 | } | |
230d793d RS |
2297 | |
2298 | distribute_links (i3links); | |
2299 | distribute_links (i2links); | |
2300 | distribute_links (i1links); | |
2301 | ||
2302 | if (GET_CODE (i2dest) == REG) | |
2303 | { | |
d0ab8cd3 RK |
2304 | rtx link; |
2305 | rtx i2_insn = 0, i2_val = 0, set; | |
2306 | ||
2307 | /* The insn that used to set this register doesn't exist, and | |
2308 | this life of the register may not exist either. See if one of | |
2309 | I3's links points to an insn that sets I2DEST. If it does, | |
2310 | that is now the last known value for I2DEST. If we don't update | |
2311 | this and I2 set the register to a value that depended on its old | |
230d793d RS |
2312 | contents, we will get confused. If this insn is used, thing |
2313 | will be set correctly in combine_instructions. */ | |
d0ab8cd3 RK |
2314 | |
2315 | for (link = LOG_LINKS (i3); link; link = XEXP (link, 1)) | |
2316 | if ((set = single_set (XEXP (link, 0))) != 0 | |
2317 | && rtx_equal_p (i2dest, SET_DEST (set))) | |
2318 | i2_insn = XEXP (link, 0), i2_val = SET_SRC (set); | |
2319 | ||
2320 | record_value_for_reg (i2dest, i2_insn, i2_val); | |
230d793d RS |
2321 | |
2322 | /* If the reg formerly set in I2 died only once and that was in I3, | |
2323 | zero its use count so it won't make `reload' do any work. */ | |
5af91171 | 2324 | if (! added_sets_2 && newi2pat == 0 && ! i2dest_in_i2src) |
230d793d RS |
2325 | { |
2326 | regno = REGNO (i2dest); | |
2327 | reg_n_sets[regno]--; | |
2328 | if (reg_n_sets[regno] == 0 | |
5f4f0e22 CH |
2329 | && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS] |
2330 | & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS)))) | |
230d793d RS |
2331 | reg_n_refs[regno] = 0; |
2332 | } | |
2333 | } | |
2334 | ||
2335 | if (i1 && GET_CODE (i1dest) == REG) | |
2336 | { | |
d0ab8cd3 RK |
2337 | rtx link; |
2338 | rtx i1_insn = 0, i1_val = 0, set; | |
2339 | ||
2340 | for (link = LOG_LINKS (i3); link; link = XEXP (link, 1)) | |
2341 | if ((set = single_set (XEXP (link, 0))) != 0 | |
2342 | && rtx_equal_p (i1dest, SET_DEST (set))) | |
2343 | i1_insn = XEXP (link, 0), i1_val = SET_SRC (set); | |
2344 | ||
2345 | record_value_for_reg (i1dest, i1_insn, i1_val); | |
2346 | ||
230d793d | 2347 | regno = REGNO (i1dest); |
5af91171 | 2348 | if (! added_sets_1 && ! i1dest_in_i1src) |
230d793d RS |
2349 | { |
2350 | reg_n_sets[regno]--; | |
2351 | if (reg_n_sets[regno] == 0 | |
5f4f0e22 CH |
2352 | && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS] |
2353 | & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS)))) | |
230d793d RS |
2354 | reg_n_refs[regno] = 0; |
2355 | } | |
2356 | } | |
2357 | ||
951553af | 2358 | /* Update reg_nonzero_bits et al for any changes that may have been made |
22609cbf RK |
2359 | to this insn. */ |
2360 | ||
951553af | 2361 | note_stores (newpat, set_nonzero_bits_and_sign_copies); |
22609cbf | 2362 | if (newi2pat) |
951553af | 2363 | note_stores (newi2pat, set_nonzero_bits_and_sign_copies); |
22609cbf | 2364 | |
a29ca9db RK |
2365 | /* If we added any (clobber (scratch)), add them to the max for a |
2366 | block. This is a very pessimistic calculation, since we might | |
2367 | have had them already and this might not be the worst block, but | |
2368 | it's not worth doing any better. */ | |
2369 | max_scratch += i3_scratches + i2_scratches + other_scratches; | |
2370 | ||
230d793d RS |
2371 | /* If I3 is now an unconditional jump, ensure that it has a |
2372 | BARRIER following it since it may have initially been a | |
381ee8af | 2373 | conditional jump. It may also be the last nonnote insn. */ |
230d793d RS |
2374 | |
2375 | if ((GET_CODE (newpat) == RETURN || simplejump_p (i3)) | |
381ee8af TW |
2376 | && ((temp = next_nonnote_insn (i3)) == NULL_RTX |
2377 | || GET_CODE (temp) != BARRIER)) | |
230d793d RS |
2378 | emit_barrier_after (i3); |
2379 | } | |
2380 | ||
2381 | combine_successes++; | |
2382 | ||
bcd49eb7 JW |
2383 | /* Clear this here, so that subsequent get_last_value calls are not |
2384 | affected. */ | |
2385 | subst_prev_insn = NULL_RTX; | |
2386 | ||
abe6e52f RK |
2387 | if (added_links_insn |
2388 | && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2)) | |
2389 | && INSN_CUID (added_links_insn) < INSN_CUID (i3)) | |
2390 | return added_links_insn; | |
2391 | else | |
2392 | return newi2pat ? i2 : i3; | |
230d793d RS |
2393 | } |
2394 | \f | |
2395 | /* Undo all the modifications recorded in undobuf. */ | |
2396 | ||
2397 | static void | |
2398 | undo_all () | |
2399 | { | |
2400 | register int i; | |
2401 | if (undobuf.num_undo > MAX_UNDO) | |
2402 | undobuf.num_undo = MAX_UNDO; | |
2403 | for (i = undobuf.num_undo - 1; i >= 0; i--) | |
7c046e4e RK |
2404 | { |
2405 | if (undobuf.undo[i].is_int) | |
2406 | *undobuf.undo[i].where.i = undobuf.undo[i].old_contents.i; | |
2407 | else | |
f5393ab9 | 2408 | *undobuf.undo[i].where.r = undobuf.undo[i].old_contents.r; |
7c046e4e RK |
2409 | |
2410 | } | |
230d793d RS |
2411 | |
2412 | obfree (undobuf.storage); | |
2413 | undobuf.num_undo = 0; | |
bcd49eb7 JW |
2414 | |
2415 | /* Clear this here, so that subsequent get_last_value calls are not | |
2416 | affected. */ | |
2417 | subst_prev_insn = NULL_RTX; | |
230d793d RS |
2418 | } |
2419 | \f | |
2420 | /* Find the innermost point within the rtx at LOC, possibly LOC itself, | |
d0ab8cd3 RK |
2421 | where we have an arithmetic expression and return that point. LOC will |
2422 | be inside INSN. | |
230d793d RS |
2423 | |
2424 | try_combine will call this function to see if an insn can be split into | |
2425 | two insns. */ | |
2426 | ||
2427 | static rtx * | |
d0ab8cd3 | 2428 | find_split_point (loc, insn) |
230d793d | 2429 | rtx *loc; |
d0ab8cd3 | 2430 | rtx insn; |
230d793d RS |
2431 | { |
2432 | rtx x = *loc; | |
2433 | enum rtx_code code = GET_CODE (x); | |
2434 | rtx *split; | |
2435 | int len = 0, pos, unsignedp; | |
2436 | rtx inner; | |
2437 | ||
2438 | /* First special-case some codes. */ | |
2439 | switch (code) | |
2440 | { | |
2441 | case SUBREG: | |
2442 | #ifdef INSN_SCHEDULING | |
2443 | /* If we are making a paradoxical SUBREG invalid, it becomes a split | |
2444 | point. */ | |
2445 | if (GET_CODE (SUBREG_REG (x)) == MEM) | |
2446 | return loc; | |
2447 | #endif | |
d0ab8cd3 | 2448 | return find_split_point (&SUBREG_REG (x), insn); |
230d793d | 2449 | |
230d793d | 2450 | case MEM: |
916f14f1 | 2451 | #ifdef HAVE_lo_sum |
230d793d RS |
2452 | /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it |
2453 | using LO_SUM and HIGH. */ | |
2454 | if (GET_CODE (XEXP (x, 0)) == CONST | |
2455 | || GET_CODE (XEXP (x, 0)) == SYMBOL_REF) | |
2456 | { | |
2457 | SUBST (XEXP (x, 0), | |
2458 | gen_rtx_combine (LO_SUM, Pmode, | |
2459 | gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)), | |
2460 | XEXP (x, 0))); | |
2461 | return &XEXP (XEXP (x, 0), 0); | |
2462 | } | |
230d793d RS |
2463 | #endif |
2464 | ||
916f14f1 RK |
2465 | /* If we have a PLUS whose second operand is a constant and the |
2466 | address is not valid, perhaps will can split it up using | |
2467 | the machine-specific way to split large constants. We use | |
ddd5a7c1 | 2468 | the first pseudo-reg (one of the virtual regs) as a placeholder; |
916f14f1 RK |
2469 | it will not remain in the result. */ |
2470 | if (GET_CODE (XEXP (x, 0)) == PLUS | |
2471 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
2472 | && ! memory_address_p (GET_MODE (x), XEXP (x, 0))) | |
2473 | { | |
2474 | rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER]; | |
2475 | rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)), | |
2476 | subst_insn); | |
2477 | ||
2478 | /* This should have produced two insns, each of which sets our | |
2479 | placeholder. If the source of the second is a valid address, | |
2480 | we can make put both sources together and make a split point | |
2481 | in the middle. */ | |
2482 | ||
2483 | if (seq && XVECLEN (seq, 0) == 2 | |
2484 | && GET_CODE (XVECEXP (seq, 0, 0)) == INSN | |
2485 | && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET | |
2486 | && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg | |
2487 | && ! reg_mentioned_p (reg, | |
2488 | SET_SRC (PATTERN (XVECEXP (seq, 0, 0)))) | |
2489 | && GET_CODE (XVECEXP (seq, 0, 1)) == INSN | |
2490 | && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET | |
2491 | && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg | |
2492 | && memory_address_p (GET_MODE (x), | |
2493 | SET_SRC (PATTERN (XVECEXP (seq, 0, 1))))) | |
2494 | { | |
2495 | rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0))); | |
2496 | rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1))); | |
2497 | ||
2498 | /* Replace the placeholder in SRC2 with SRC1. If we can | |
2499 | find where in SRC2 it was placed, that can become our | |
2500 | split point and we can replace this address with SRC2. | |
2501 | Just try two obvious places. */ | |
2502 | ||
2503 | src2 = replace_rtx (src2, reg, src1); | |
2504 | split = 0; | |
2505 | if (XEXP (src2, 0) == src1) | |
2506 | split = &XEXP (src2, 0); | |
2507 | else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e' | |
2508 | && XEXP (XEXP (src2, 0), 0) == src1) | |
2509 | split = &XEXP (XEXP (src2, 0), 0); | |
2510 | ||
2511 | if (split) | |
2512 | { | |
2513 | SUBST (XEXP (x, 0), src2); | |
2514 | return split; | |
2515 | } | |
2516 | } | |
1a26b032 RK |
2517 | |
2518 | /* If that didn't work, perhaps the first operand is complex and | |
2519 | needs to be computed separately, so make a split point there. | |
2520 | This will occur on machines that just support REG + CONST | |
2521 | and have a constant moved through some previous computation. */ | |
2522 | ||
2523 | else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o' | |
2524 | && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG | |
2525 | && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0)))) | |
2526 | == 'o'))) | |
2527 | return &XEXP (XEXP (x, 0), 0); | |
916f14f1 RK |
2528 | } |
2529 | break; | |
2530 | ||
230d793d RS |
2531 | case SET: |
2532 | #ifdef HAVE_cc0 | |
2533 | /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a | |
2534 | ZERO_EXTRACT, the most likely reason why this doesn't match is that | |
2535 | we need to put the operand into a register. So split at that | |
2536 | point. */ | |
2537 | ||
2538 | if (SET_DEST (x) == cc0_rtx | |
2539 | && GET_CODE (SET_SRC (x)) != COMPARE | |
2540 | && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT | |
2541 | && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o' | |
2542 | && ! (GET_CODE (SET_SRC (x)) == SUBREG | |
2543 | && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o')) | |
2544 | return &SET_SRC (x); | |
2545 | #endif | |
2546 | ||
2547 | /* See if we can split SET_SRC as it stands. */ | |
d0ab8cd3 | 2548 | split = find_split_point (&SET_SRC (x), insn); |
230d793d RS |
2549 | if (split && split != &SET_SRC (x)) |
2550 | return split; | |
2551 | ||
041d7180 JL |
2552 | /* See if we can split SET_DEST as it stands. */ |
2553 | split = find_split_point (&SET_DEST (x), insn); | |
2554 | if (split && split != &SET_DEST (x)) | |
2555 | return split; | |
2556 | ||
230d793d RS |
2557 | /* See if this is a bitfield assignment with everything constant. If |
2558 | so, this is an IOR of an AND, so split it into that. */ | |
2559 | if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT | |
2560 | && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))) | |
5f4f0e22 | 2561 | <= HOST_BITS_PER_WIDE_INT) |
230d793d RS |
2562 | && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT |
2563 | && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT | |
2564 | && GET_CODE (SET_SRC (x)) == CONST_INT | |
2565 | && ((INTVAL (XEXP (SET_DEST (x), 1)) | |
2566 | + INTVAL (XEXP (SET_DEST (x), 2))) | |
2567 | <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))) | |
2568 | && ! side_effects_p (XEXP (SET_DEST (x), 0))) | |
2569 | { | |
2570 | int pos = INTVAL (XEXP (SET_DEST (x), 2)); | |
2571 | int len = INTVAL (XEXP (SET_DEST (x), 1)); | |
2572 | int src = INTVAL (SET_SRC (x)); | |
2573 | rtx dest = XEXP (SET_DEST (x), 0); | |
2574 | enum machine_mode mode = GET_MODE (dest); | |
5f4f0e22 | 2575 | unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1; |
230d793d | 2576 | |
f76b9db2 ILT |
2577 | if (BITS_BIG_ENDIAN) |
2578 | pos = GET_MODE_BITSIZE (mode) - len - pos; | |
230d793d RS |
2579 | |
2580 | if (src == mask) | |
2581 | SUBST (SET_SRC (x), | |
5f4f0e22 | 2582 | gen_binary (IOR, mode, dest, GEN_INT (src << pos))); |
230d793d RS |
2583 | else |
2584 | SUBST (SET_SRC (x), | |
2585 | gen_binary (IOR, mode, | |
2586 | gen_binary (AND, mode, dest, | |
5f4f0e22 CH |
2587 | GEN_INT (~ (mask << pos) |
2588 | & GET_MODE_MASK (mode))), | |
2589 | GEN_INT (src << pos))); | |
230d793d RS |
2590 | |
2591 | SUBST (SET_DEST (x), dest); | |
2592 | ||
d0ab8cd3 | 2593 | split = find_split_point (&SET_SRC (x), insn); |
230d793d RS |
2594 | if (split && split != &SET_SRC (x)) |
2595 | return split; | |
2596 | } | |
2597 | ||
2598 | /* Otherwise, see if this is an operation that we can split into two. | |
2599 | If so, try to split that. */ | |
2600 | code = GET_CODE (SET_SRC (x)); | |
2601 | ||
2602 | switch (code) | |
2603 | { | |
d0ab8cd3 RK |
2604 | case AND: |
2605 | /* If we are AND'ing with a large constant that is only a single | |
2606 | bit and the result is only being used in a context where we | |
2607 | need to know if it is zero or non-zero, replace it with a bit | |
2608 | extraction. This will avoid the large constant, which might | |
2609 | have taken more than one insn to make. If the constant were | |
2610 | not a valid argument to the AND but took only one insn to make, | |
2611 | this is no worse, but if it took more than one insn, it will | |
2612 | be better. */ | |
2613 | ||
2614 | if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT | |
2615 | && GET_CODE (XEXP (SET_SRC (x), 0)) == REG | |
2616 | && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7 | |
2617 | && GET_CODE (SET_DEST (x)) == REG | |
2618 | && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0 | |
2619 | && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE) | |
2620 | && XEXP (*split, 0) == SET_DEST (x) | |
2621 | && XEXP (*split, 1) == const0_rtx) | |
2622 | { | |
76184def DE |
2623 | rtx extraction = make_extraction (GET_MODE (SET_DEST (x)), |
2624 | XEXP (SET_SRC (x), 0), | |
2625 | pos, NULL_RTX, 1, 1, 0, 0); | |
2626 | if (extraction != 0) | |
2627 | { | |
2628 | SUBST (SET_SRC (x), extraction); | |
2629 | return find_split_point (loc, insn); | |
2630 | } | |
d0ab8cd3 RK |
2631 | } |
2632 | break; | |
2633 | ||
230d793d RS |
2634 | case SIGN_EXTEND: |
2635 | inner = XEXP (SET_SRC (x), 0); | |
2636 | pos = 0; | |
2637 | len = GET_MODE_BITSIZE (GET_MODE (inner)); | |
2638 | unsignedp = 0; | |
2639 | break; | |
2640 | ||
2641 | case SIGN_EXTRACT: | |
2642 | case ZERO_EXTRACT: | |
2643 | if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT | |
2644 | && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT) | |
2645 | { | |
2646 | inner = XEXP (SET_SRC (x), 0); | |
2647 | len = INTVAL (XEXP (SET_SRC (x), 1)); | |
2648 | pos = INTVAL (XEXP (SET_SRC (x), 2)); | |
2649 | ||
f76b9db2 ILT |
2650 | if (BITS_BIG_ENDIAN) |
2651 | pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos; | |
230d793d RS |
2652 | unsignedp = (code == ZERO_EXTRACT); |
2653 | } | |
2654 | break; | |
2655 | } | |
2656 | ||
2657 | if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner))) | |
2658 | { | |
2659 | enum machine_mode mode = GET_MODE (SET_SRC (x)); | |
2660 | ||
d0ab8cd3 RK |
2661 | /* For unsigned, we have a choice of a shift followed by an |
2662 | AND or two shifts. Use two shifts for field sizes where the | |
2663 | constant might be too large. We assume here that we can | |
2664 | always at least get 8-bit constants in an AND insn, which is | |
2665 | true for every current RISC. */ | |
2666 | ||
2667 | if (unsignedp && len <= 8) | |
230d793d RS |
2668 | { |
2669 | SUBST (SET_SRC (x), | |
2670 | gen_rtx_combine | |
2671 | (AND, mode, | |
2672 | gen_rtx_combine (LSHIFTRT, mode, | |
2673 | gen_lowpart_for_combine (mode, inner), | |
5f4f0e22 CH |
2674 | GEN_INT (pos)), |
2675 | GEN_INT (((HOST_WIDE_INT) 1 << len) - 1))); | |
230d793d | 2676 | |
d0ab8cd3 | 2677 | split = find_split_point (&SET_SRC (x), insn); |
230d793d RS |
2678 | if (split && split != &SET_SRC (x)) |
2679 | return split; | |
2680 | } | |
2681 | else | |
2682 | { | |
2683 | SUBST (SET_SRC (x), | |
2684 | gen_rtx_combine | |
d0ab8cd3 | 2685 | (unsignedp ? LSHIFTRT : ASHIFTRT, mode, |
230d793d RS |
2686 | gen_rtx_combine (ASHIFT, mode, |
2687 | gen_lowpart_for_combine (mode, inner), | |
5f4f0e22 CH |
2688 | GEN_INT (GET_MODE_BITSIZE (mode) |
2689 | - len - pos)), | |
2690 | GEN_INT (GET_MODE_BITSIZE (mode) - len))); | |
230d793d | 2691 | |
d0ab8cd3 | 2692 | split = find_split_point (&SET_SRC (x), insn); |
230d793d RS |
2693 | if (split && split != &SET_SRC (x)) |
2694 | return split; | |
2695 | } | |
2696 | } | |
2697 | ||
2698 | /* See if this is a simple operation with a constant as the second | |
2699 | operand. It might be that this constant is out of range and hence | |
2700 | could be used as a split point. */ | |
2701 | if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2' | |
2702 | || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c' | |
2703 | || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<') | |
2704 | && CONSTANT_P (XEXP (SET_SRC (x), 1)) | |
2705 | && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o' | |
2706 | || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG | |
2707 | && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0)))) | |
2708 | == 'o')))) | |
2709 | return &XEXP (SET_SRC (x), 1); | |
2710 | ||
2711 | /* Finally, see if this is a simple operation with its first operand | |
2712 | not in a register. The operation might require this operand in a | |
2713 | register, so return it as a split point. We can always do this | |
2714 | because if the first operand were another operation, we would have | |
2715 | already found it as a split point. */ | |
2716 | if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2' | |
2717 | || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c' | |
2718 | || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<' | |
2719 | || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1') | |
2720 | && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode)) | |
2721 | return &XEXP (SET_SRC (x), 0); | |
2722 | ||
2723 | return 0; | |
2724 | ||
2725 | case AND: | |
2726 | case IOR: | |
2727 | /* We write NOR as (and (not A) (not B)), but if we don't have a NOR, | |
2728 | it is better to write this as (not (ior A B)) so we can split it. | |
2729 | Similarly for IOR. */ | |
2730 | if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT) | |
2731 | { | |
2732 | SUBST (*loc, | |
2733 | gen_rtx_combine (NOT, GET_MODE (x), | |
2734 | gen_rtx_combine (code == IOR ? AND : IOR, | |
2735 | GET_MODE (x), | |
2736 | XEXP (XEXP (x, 0), 0), | |
2737 | XEXP (XEXP (x, 1), 0)))); | |
d0ab8cd3 | 2738 | return find_split_point (loc, insn); |
230d793d RS |
2739 | } |
2740 | ||
2741 | /* Many RISC machines have a large set of logical insns. If the | |
2742 | second operand is a NOT, put it first so we will try to split the | |
2743 | other operand first. */ | |
2744 | if (GET_CODE (XEXP (x, 1)) == NOT) | |
2745 | { | |
2746 | rtx tem = XEXP (x, 0); | |
2747 | SUBST (XEXP (x, 0), XEXP (x, 1)); | |
2748 | SUBST (XEXP (x, 1), tem); | |
2749 | } | |
2750 | break; | |
2751 | } | |
2752 | ||
2753 | /* Otherwise, select our actions depending on our rtx class. */ | |
2754 | switch (GET_RTX_CLASS (code)) | |
2755 | { | |
2756 | case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */ | |
2757 | case '3': | |
d0ab8cd3 | 2758 | split = find_split_point (&XEXP (x, 2), insn); |
230d793d RS |
2759 | if (split) |
2760 | return split; | |
2761 | /* ... fall through ... */ | |
2762 | case '2': | |
2763 | case 'c': | |
2764 | case '<': | |
d0ab8cd3 | 2765 | split = find_split_point (&XEXP (x, 1), insn); |
230d793d RS |
2766 | if (split) |
2767 | return split; | |
2768 | /* ... fall through ... */ | |
2769 | case '1': | |
2770 | /* Some machines have (and (shift ...) ...) insns. If X is not | |
2771 | an AND, but XEXP (X, 0) is, use it as our split point. */ | |
2772 | if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND) | |
2773 | return &XEXP (x, 0); | |
2774 | ||
d0ab8cd3 | 2775 | split = find_split_point (&XEXP (x, 0), insn); |
230d793d RS |
2776 | if (split) |
2777 | return split; | |
2778 | return loc; | |
2779 | } | |
2780 | ||
2781 | /* Otherwise, we don't have a split point. */ | |
2782 | return 0; | |
2783 | } | |
2784 | \f | |
2785 | /* Throughout X, replace FROM with TO, and return the result. | |
2786 | The result is TO if X is FROM; | |
2787 | otherwise the result is X, but its contents may have been modified. | |
2788 | If they were modified, a record was made in undobuf so that | |
2789 | undo_all will (among other things) return X to its original state. | |
2790 | ||
2791 | If the number of changes necessary is too much to record to undo, | |
2792 | the excess changes are not made, so the result is invalid. | |
2793 | The changes already made can still be undone. | |
2794 | undobuf.num_undo is incremented for such changes, so by testing that | |
2795 | the caller can tell whether the result is valid. | |
2796 | ||
2797 | `n_occurrences' is incremented each time FROM is replaced. | |
2798 | ||
2799 | IN_DEST is non-zero if we are processing the SET_DEST of a SET. | |
2800 | ||
5089e22e | 2801 | UNIQUE_COPY is non-zero if each substitution must be unique. We do this |
230d793d RS |
2802 | by copying if `n_occurrences' is non-zero. */ |
2803 | ||
2804 | static rtx | |
2805 | subst (x, from, to, in_dest, unique_copy) | |
2806 | register rtx x, from, to; | |
2807 | int in_dest; | |
2808 | int unique_copy; | |
2809 | { | |
f24ad0e4 | 2810 | register enum rtx_code code = GET_CODE (x); |
230d793d | 2811 | enum machine_mode op0_mode = VOIDmode; |
8079805d RK |
2812 | register char *fmt; |
2813 | register int len, i; | |
2814 | rtx new; | |
230d793d RS |
2815 | |
2816 | /* Two expressions are equal if they are identical copies of a shared | |
2817 | RTX or if they are both registers with the same register number | |
2818 | and mode. */ | |
2819 | ||
2820 | #define COMBINE_RTX_EQUAL_P(X,Y) \ | |
2821 | ((X) == (Y) \ | |
2822 | || (GET_CODE (X) == REG && GET_CODE (Y) == REG \ | |
2823 | && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y))) | |
2824 | ||
2825 | if (! in_dest && COMBINE_RTX_EQUAL_P (x, from)) | |
2826 | { | |
2827 | n_occurrences++; | |
2828 | return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to); | |
2829 | } | |
2830 | ||
2831 | /* If X and FROM are the same register but different modes, they will | |
2832 | not have been seen as equal above. However, flow.c will make a | |
2833 | LOG_LINKS entry for that case. If we do nothing, we will try to | |
2834 | rerecognize our original insn and, when it succeeds, we will | |
2835 | delete the feeding insn, which is incorrect. | |
2836 | ||
2837 | So force this insn not to match in this (rare) case. */ | |
2838 | if (! in_dest && code == REG && GET_CODE (from) == REG | |
2839 | && REGNO (x) == REGNO (from)) | |
2840 | return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx); | |
2841 | ||
2842 | /* If this is an object, we are done unless it is a MEM or LO_SUM, both | |
2843 | of which may contain things that can be combined. */ | |
2844 | if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o') | |
2845 | return x; | |
2846 | ||
2847 | /* It is possible to have a subexpression appear twice in the insn. | |
2848 | Suppose that FROM is a register that appears within TO. | |
2849 | Then, after that subexpression has been scanned once by `subst', | |
2850 | the second time it is scanned, TO may be found. If we were | |
2851 | to scan TO here, we would find FROM within it and create a | |
2852 | self-referent rtl structure which is completely wrong. */ | |
2853 | if (COMBINE_RTX_EQUAL_P (x, to)) | |
2854 | return to; | |
2855 | ||
2856 | len = GET_RTX_LENGTH (code); | |
2857 | fmt = GET_RTX_FORMAT (code); | |
2858 | ||
2859 | /* We don't need to process a SET_DEST that is a register, CC0, or PC, so | |
2860 | set up to skip this common case. All other cases where we want to | |
2861 | suppress replacing something inside a SET_SRC are handled via the | |
2862 | IN_DEST operand. */ | |
2863 | if (code == SET | |
2864 | && (GET_CODE (SET_DEST (x)) == REG | |
2865 | || GET_CODE (SET_DEST (x)) == CC0 | |
2866 | || GET_CODE (SET_DEST (x)) == PC)) | |
2867 | fmt = "ie"; | |
2868 | ||
2869 | /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a constant. */ | |
2870 | if (fmt[0] == 'e') | |
2871 | op0_mode = GET_MODE (XEXP (x, 0)); | |
2872 | ||
2873 | for (i = 0; i < len; i++) | |
2874 | { | |
2875 | if (fmt[i] == 'E') | |
2876 | { | |
2877 | register int j; | |
2878 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
2879 | { | |
230d793d RS |
2880 | if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from)) |
2881 | { | |
2882 | new = (unique_copy && n_occurrences ? copy_rtx (to) : to); | |
2883 | n_occurrences++; | |
2884 | } | |
2885 | else | |
2886 | { | |
2887 | new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy); | |
2888 | ||
2889 | /* If this substitution failed, this whole thing fails. */ | |
2890 | if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx) | |
2891 | return new; | |
2892 | } | |
2893 | ||
2894 | SUBST (XVECEXP (x, i, j), new); | |
2895 | } | |
2896 | } | |
2897 | else if (fmt[i] == 'e') | |
2898 | { | |
230d793d RS |
2899 | if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from)) |
2900 | { | |
42301240 RK |
2901 | /* In general, don't install a subreg involving two modes not |
2902 | tieable. It can worsen register allocation, and can even | |
2903 | make invalid reload insns, since the reg inside may need to | |
2904 | be copied from in the outside mode, and that may be invalid | |
2905 | if it is an fp reg copied in integer mode. | |
2906 | ||
2907 | We allow two exceptions to this: It is valid if it is inside | |
2908 | another SUBREG and the mode of that SUBREG and the mode of | |
2909 | the inside of TO is tieable and it is valid if X is a SET | |
2910 | that copies FROM to CC0. */ | |
2911 | if (GET_CODE (to) == SUBREG | |
2912 | && ! MODES_TIEABLE_P (GET_MODE (to), | |
2913 | GET_MODE (SUBREG_REG (to))) | |
2914 | && ! (code == SUBREG | |
8079805d RK |
2915 | && MODES_TIEABLE_P (GET_MODE (x), |
2916 | GET_MODE (SUBREG_REG (to)))) | |
42301240 RK |
2917 | #ifdef HAVE_cc0 |
2918 | && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx) | |
2919 | #endif | |
2920 | ) | |
2921 | return gen_rtx (CLOBBER, VOIDmode, const0_rtx); | |
2922 | ||
230d793d RS |
2923 | new = (unique_copy && n_occurrences ? copy_rtx (to) : to); |
2924 | n_occurrences++; | |
2925 | } | |
2926 | else | |
2927 | /* If we are in a SET_DEST, suppress most cases unless we | |
2928 | have gone inside a MEM, in which case we want to | |
2929 | simplify the address. We assume here that things that | |
2930 | are actually part of the destination have their inner | |
2931 | parts in the first expression. This is true for SUBREG, | |
2932 | STRICT_LOW_PART, and ZERO_EXTRACT, which are the only | |
2933 | things aside from REG and MEM that should appear in a | |
2934 | SET_DEST. */ | |
2935 | new = subst (XEXP (x, i), from, to, | |
2936 | (((in_dest | |
2937 | && (code == SUBREG || code == STRICT_LOW_PART | |
2938 | || code == ZERO_EXTRACT)) | |
2939 | || code == SET) | |
2940 | && i == 0), unique_copy); | |
2941 | ||
2942 | /* If we found that we will have to reject this combination, | |
2943 | indicate that by returning the CLOBBER ourselves, rather than | |
2944 | an expression containing it. This will speed things up as | |
2945 | well as prevent accidents where two CLOBBERs are considered | |
2946 | to be equal, thus producing an incorrect simplification. */ | |
2947 | ||
2948 | if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx) | |
2949 | return new; | |
2950 | ||
2951 | SUBST (XEXP (x, i), new); | |
2952 | } | |
2953 | } | |
2954 | ||
8079805d RK |
2955 | /* Try to simplify X. If the simplification changed the code, it is likely |
2956 | that further simplification will help, so loop, but limit the number | |
2957 | of repetitions that will be performed. */ | |
2958 | ||
2959 | for (i = 0; i < 4; i++) | |
2960 | { | |
2961 | /* If X is sufficiently simple, don't bother trying to do anything | |
2962 | with it. */ | |
2963 | if (code != CONST_INT && code != REG && code != CLOBBER) | |
2964 | x = simplify_rtx (x, op0_mode, i == 3, in_dest); | |
d0ab8cd3 | 2965 | |
8079805d RK |
2966 | if (GET_CODE (x) == code) |
2967 | break; | |
d0ab8cd3 | 2968 | |
8079805d | 2969 | code = GET_CODE (x); |
eeb43d32 | 2970 | |
8079805d RK |
2971 | /* We no longer know the original mode of operand 0 since we |
2972 | have changed the form of X) */ | |
2973 | op0_mode = VOIDmode; | |
2974 | } | |
eeb43d32 | 2975 | |
8079805d RK |
2976 | return x; |
2977 | } | |
2978 | \f | |
2979 | /* Simplify X, a piece of RTL. We just operate on the expression at the | |
2980 | outer level; call `subst' to simplify recursively. Return the new | |
2981 | expression. | |
2982 | ||
2983 | OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this | |
2984 | will be the iteration even if an expression with a code different from | |
2985 | X is returned; IN_DEST is nonzero if we are inside a SET_DEST. */ | |
eeb43d32 | 2986 | |
8079805d RK |
2987 | static rtx |
2988 | simplify_rtx (x, op0_mode, last, in_dest) | |
2989 | rtx x; | |
2990 | enum machine_mode op0_mode; | |
2991 | int last; | |
2992 | int in_dest; | |
2993 | { | |
2994 | enum rtx_code code = GET_CODE (x); | |
2995 | enum machine_mode mode = GET_MODE (x); | |
2996 | rtx temp; | |
2997 | int i; | |
d0ab8cd3 | 2998 | |
230d793d RS |
2999 | /* If this is a commutative operation, put a constant last and a complex |
3000 | expression first. We don't need to do this for comparisons here. */ | |
3001 | if (GET_RTX_CLASS (code) == 'c' | |
3002 | && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT) | |
3003 | || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o' | |
3004 | && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o') | |
3005 | || (GET_CODE (XEXP (x, 0)) == SUBREG | |
3006 | && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o' | |
3007 | && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'))) | |
3008 | { | |
3009 | temp = XEXP (x, 0); | |
3010 | SUBST (XEXP (x, 0), XEXP (x, 1)); | |
3011 | SUBST (XEXP (x, 1), temp); | |
3012 | } | |
3013 | ||
22609cbf RK |
3014 | /* If this is a PLUS, MINUS, or MULT, and the first operand is the |
3015 | sign extension of a PLUS with a constant, reverse the order of the sign | |
3016 | extension and the addition. Note that this not the same as the original | |
3017 | code, but overflow is undefined for signed values. Also note that the | |
3018 | PLUS will have been partially moved "inside" the sign-extension, so that | |
3019 | the first operand of X will really look like: | |
3020 | (ashiftrt (plus (ashift A C4) C5) C4). | |
3021 | We convert this to | |
3022 | (plus (ashiftrt (ashift A C4) C2) C4) | |
3023 | and replace the first operand of X with that expression. Later parts | |
3024 | of this function may simplify the expression further. | |
3025 | ||
3026 | For example, if we start with (mult (sign_extend (plus A C1)) C2), | |
3027 | we swap the SIGN_EXTEND and PLUS. Later code will apply the | |
3028 | distributive law to produce (plus (mult (sign_extend X) C1) C3). | |
3029 | ||
3030 | We do this to simplify address expressions. */ | |
3031 | ||
3032 | if ((code == PLUS || code == MINUS || code == MULT) | |
3033 | && GET_CODE (XEXP (x, 0)) == ASHIFTRT | |
3034 | && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS | |
3035 | && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT | |
3036 | && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT | |
3037 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
3038 | && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1) | |
3039 | && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT | |
3040 | && (temp = simplify_binary_operation (ASHIFTRT, mode, | |
3041 | XEXP (XEXP (XEXP (x, 0), 0), 1), | |
3042 | XEXP (XEXP (x, 0), 1))) != 0) | |
3043 | { | |
3044 | rtx new | |
3045 | = simplify_shift_const (NULL_RTX, ASHIFT, mode, | |
3046 | XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0), | |
3047 | INTVAL (XEXP (XEXP (x, 0), 1))); | |
3048 | ||
3049 | new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new, | |
3050 | INTVAL (XEXP (XEXP (x, 0), 1))); | |
3051 | ||
3052 | SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp)); | |
3053 | } | |
3054 | ||
d0ab8cd3 RK |
3055 | /* If this is a simple operation applied to an IF_THEN_ELSE, try |
3056 | applying it to the arms of the IF_THEN_ELSE. This often simplifies | |
abe6e52f RK |
3057 | things. Check for cases where both arms are testing the same |
3058 | condition. | |
3059 | ||
3060 | Don't do anything if all operands are very simple. */ | |
3061 | ||
3062 | if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c' | |
3063 | || GET_RTX_CLASS (code) == '<') | |
3064 | && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o' | |
3065 | && ! (GET_CODE (XEXP (x, 0)) == SUBREG | |
3066 | && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) | |
3067 | == 'o'))) | |
3068 | || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o' | |
3069 | && ! (GET_CODE (XEXP (x, 1)) == SUBREG | |
3070 | && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1)))) | |
3071 | == 'o'))))) | |
3072 | || (GET_RTX_CLASS (code) == '1' | |
3073 | && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o' | |
3074 | && ! (GET_CODE (XEXP (x, 0)) == SUBREG | |
3075 | && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) | |
3076 | == 'o')))))) | |
d0ab8cd3 | 3077 | { |
abe6e52f RK |
3078 | rtx cond, true, false; |
3079 | ||
3080 | cond = if_then_else_cond (x, &true, &false); | |
3081 | if (cond != 0) | |
3082 | { | |
3083 | rtx cop1 = const0_rtx; | |
3084 | enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1); | |
3085 | ||
15448afc RK |
3086 | if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<') |
3087 | return x; | |
3088 | ||
9210df58 RK |
3089 | /* Simplify the alternative arms; this may collapse the true and |
3090 | false arms to store-flag values. */ | |
3091 | true = subst (true, pc_rtx, pc_rtx, 0, 0); | |
3092 | false = subst (false, pc_rtx, pc_rtx, 0, 0); | |
3093 | ||
3094 | /* Restarting if we generate a store-flag expression will cause | |
3095 | us to loop. Just drop through in this case. */ | |
3096 | ||
abe6e52f RK |
3097 | /* If the result values are STORE_FLAG_VALUE and zero, we can |
3098 | just make the comparison operation. */ | |
3099 | if (true == const_true_rtx && false == const0_rtx) | |
3100 | x = gen_binary (cond_code, mode, cond, cop1); | |
3101 | else if (true == const0_rtx && false == const_true_rtx) | |
3102 | x = gen_binary (reverse_condition (cond_code), mode, cond, cop1); | |
3103 | ||
3104 | /* Likewise, we can make the negate of a comparison operation | |
3105 | if the result values are - STORE_FLAG_VALUE and zero. */ | |
3106 | else if (GET_CODE (true) == CONST_INT | |
3107 | && INTVAL (true) == - STORE_FLAG_VALUE | |
3108 | && false == const0_rtx) | |
0c1c8ea6 | 3109 | x = gen_unary (NEG, mode, mode, |
abe6e52f RK |
3110 | gen_binary (cond_code, mode, cond, cop1)); |
3111 | else if (GET_CODE (false) == CONST_INT | |
3112 | && INTVAL (false) == - STORE_FLAG_VALUE | |
3113 | && true == const0_rtx) | |
0c1c8ea6 | 3114 | x = gen_unary (NEG, mode, mode, |
abe6e52f RK |
3115 | gen_binary (reverse_condition (cond_code), |
3116 | mode, cond, cop1)); | |
3117 | else | |
8079805d RK |
3118 | return gen_rtx (IF_THEN_ELSE, mode, |
3119 | gen_binary (cond_code, VOIDmode, cond, cop1), | |
3120 | true, false); | |
5109d49f | 3121 | |
9210df58 RK |
3122 | code = GET_CODE (x); |
3123 | op0_mode = VOIDmode; | |
abe6e52f | 3124 | } |
d0ab8cd3 RK |
3125 | } |
3126 | ||
230d793d RS |
3127 | /* Try to fold this expression in case we have constants that weren't |
3128 | present before. */ | |
3129 | temp = 0; | |
3130 | switch (GET_RTX_CLASS (code)) | |
3131 | { | |
3132 | case '1': | |
3133 | temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode); | |
3134 | break; | |
3135 | case '<': | |
3136 | temp = simplify_relational_operation (code, op0_mode, | |
3137 | XEXP (x, 0), XEXP (x, 1)); | |
77fa0940 RK |
3138 | #ifdef FLOAT_STORE_FLAG_VALUE |
3139 | if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT) | |
3140 | temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x)) | |
3141 | : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x))); | |
3142 | #endif | |
230d793d RS |
3143 | break; |
3144 | case 'c': | |
3145 | case '2': | |
3146 | temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1)); | |
3147 | break; | |
3148 | case 'b': | |
3149 | case '3': | |
3150 | temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0), | |
3151 | XEXP (x, 1), XEXP (x, 2)); | |
3152 | break; | |
3153 | } | |
3154 | ||
3155 | if (temp) | |
d0ab8cd3 | 3156 | x = temp, code = GET_CODE (temp); |
230d793d | 3157 | |
230d793d | 3158 | /* First see if we can apply the inverse distributive law. */ |
224eeff2 RK |
3159 | if (code == PLUS || code == MINUS |
3160 | || code == AND || code == IOR || code == XOR) | |
230d793d RS |
3161 | { |
3162 | x = apply_distributive_law (x); | |
3163 | code = GET_CODE (x); | |
3164 | } | |
3165 | ||
3166 | /* If CODE is an associative operation not otherwise handled, see if we | |
3167 | can associate some operands. This can win if they are constants or | |
3168 | if they are logically related (i.e. (a & b) & a. */ | |
3169 | if ((code == PLUS || code == MINUS | |
3170 | || code == MULT || code == AND || code == IOR || code == XOR | |
3171 | || code == DIV || code == UDIV | |
3172 | || code == SMAX || code == SMIN || code == UMAX || code == UMIN) | |
3ad2180a | 3173 | && INTEGRAL_MODE_P (mode)) |
230d793d RS |
3174 | { |
3175 | if (GET_CODE (XEXP (x, 0)) == code) | |
3176 | { | |
3177 | rtx other = XEXP (XEXP (x, 0), 0); | |
3178 | rtx inner_op0 = XEXP (XEXP (x, 0), 1); | |
3179 | rtx inner_op1 = XEXP (x, 1); | |
3180 | rtx inner; | |
3181 | ||
3182 | /* Make sure we pass the constant operand if any as the second | |
3183 | one if this is a commutative operation. */ | |
3184 | if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c') | |
3185 | { | |
3186 | rtx tem = inner_op0; | |
3187 | inner_op0 = inner_op1; | |
3188 | inner_op1 = tem; | |
3189 | } | |
3190 | inner = simplify_binary_operation (code == MINUS ? PLUS | |
3191 | : code == DIV ? MULT | |
3192 | : code == UDIV ? MULT | |
3193 | : code, | |
3194 | mode, inner_op0, inner_op1); | |
3195 | ||
3196 | /* For commutative operations, try the other pair if that one | |
3197 | didn't simplify. */ | |
3198 | if (inner == 0 && GET_RTX_CLASS (code) == 'c') | |
3199 | { | |
3200 | other = XEXP (XEXP (x, 0), 1); | |
3201 | inner = simplify_binary_operation (code, mode, | |
3202 | XEXP (XEXP (x, 0), 0), | |
3203 | XEXP (x, 1)); | |
3204 | } | |
3205 | ||
3206 | if (inner) | |
8079805d | 3207 | return gen_binary (code, mode, other, inner); |
230d793d RS |
3208 | } |
3209 | } | |
3210 | ||
3211 | /* A little bit of algebraic simplification here. */ | |
3212 | switch (code) | |
3213 | { | |
3214 | case MEM: | |
3215 | /* Ensure that our address has any ASHIFTs converted to MULT in case | |
3216 | address-recognizing predicates are called later. */ | |
3217 | temp = make_compound_operation (XEXP (x, 0), MEM); | |
3218 | SUBST (XEXP (x, 0), temp); | |
3219 | break; | |
3220 | ||
3221 | case SUBREG: | |
3222 | /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG | |
3223 | is paradoxical. If we can't do that safely, then it becomes | |
3224 | something nonsensical so that this combination won't take place. */ | |
3225 | ||
3226 | if (GET_CODE (SUBREG_REG (x)) == MEM | |
3227 | && (GET_MODE_SIZE (mode) | |
3228 | <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))) | |
3229 | { | |
3230 | rtx inner = SUBREG_REG (x); | |
3231 | int endian_offset = 0; | |
3232 | /* Don't change the mode of the MEM | |
3233 | if that would change the meaning of the address. */ | |
3234 | if (MEM_VOLATILE_P (SUBREG_REG (x)) | |
3235 | || mode_dependent_address_p (XEXP (inner, 0))) | |
3236 | return gen_rtx (CLOBBER, mode, const0_rtx); | |
3237 | ||
f76b9db2 ILT |
3238 | if (BYTES_BIG_ENDIAN) |
3239 | { | |
3240 | if (GET_MODE_SIZE (mode) < UNITS_PER_WORD) | |
3241 | endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode); | |
3242 | if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD) | |
3243 | endian_offset -= (UNITS_PER_WORD | |
3244 | - GET_MODE_SIZE (GET_MODE (inner))); | |
3245 | } | |
230d793d RS |
3246 | /* Note if the plus_constant doesn't make a valid address |
3247 | then this combination won't be accepted. */ | |
3248 | x = gen_rtx (MEM, mode, | |
3249 | plus_constant (XEXP (inner, 0), | |
3250 | (SUBREG_WORD (x) * UNITS_PER_WORD | |
3251 | + endian_offset))); | |
3252 | MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner); | |
3253 | RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner); | |
3254 | MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner); | |
3255 | return x; | |
3256 | } | |
3257 | ||
3258 | /* If we are in a SET_DEST, these other cases can't apply. */ | |
3259 | if (in_dest) | |
3260 | return x; | |
3261 | ||
3262 | /* Changing mode twice with SUBREG => just change it once, | |
3263 | or not at all if changing back to starting mode. */ | |
3264 | if (GET_CODE (SUBREG_REG (x)) == SUBREG) | |
3265 | { | |
3266 | if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x))) | |
3267 | && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0) | |
3268 | return SUBREG_REG (SUBREG_REG (x)); | |
3269 | ||
3270 | SUBST_INT (SUBREG_WORD (x), | |
3271 | SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x))); | |
3272 | SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x))); | |
3273 | } | |
3274 | ||
3275 | /* SUBREG of a hard register => just change the register number | |
3276 | and/or mode. If the hard register is not valid in that mode, | |
26ecfc76 RK |
3277 | suppress this combination. If the hard register is the stack, |
3278 | frame, or argument pointer, leave this as a SUBREG. */ | |
230d793d RS |
3279 | |
3280 | if (GET_CODE (SUBREG_REG (x)) == REG | |
26ecfc76 RK |
3281 | && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER |
3282 | && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM | |
6d7096b0 DE |
3283 | #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM |
3284 | && REGNO (SUBREG_REG (x)) != HARD_FRAME_POINTER_REGNUM | |
3285 | #endif | |
26ecfc76 RK |
3286 | #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM |
3287 | && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM | |
3288 | #endif | |
3289 | && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM) | |
230d793d RS |
3290 | { |
3291 | if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x), | |
3292 | mode)) | |
3293 | return gen_rtx (REG, mode, | |
3294 | REGNO (SUBREG_REG (x)) + SUBREG_WORD (x)); | |
3295 | else | |
3296 | return gen_rtx (CLOBBER, mode, const0_rtx); | |
3297 | } | |
3298 | ||
3299 | /* For a constant, try to pick up the part we want. Handle a full | |
a4bde0b1 RK |
3300 | word and low-order part. Only do this if we are narrowing |
3301 | the constant; if it is being widened, we have no idea what | |
3302 | the extra bits will have been set to. */ | |
230d793d RS |
3303 | |
3304 | if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode | |
3305 | && GET_MODE_SIZE (mode) == UNITS_PER_WORD | |
a4bde0b1 | 3306 | && GET_MODE_SIZE (op0_mode) < UNITS_PER_WORD |
230d793d RS |
3307 | && GET_MODE_CLASS (mode) == MODE_INT) |
3308 | { | |
3309 | temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x), | |
5f4f0e22 | 3310 | 0, op0_mode); |
230d793d RS |
3311 | if (temp) |
3312 | return temp; | |
3313 | } | |
3314 | ||
19808e22 RS |
3315 | /* If we want a subreg of a constant, at offset 0, |
3316 | take the low bits. On a little-endian machine, that's | |
3317 | always valid. On a big-endian machine, it's valid | |
3318 | only if the constant's mode fits in one word. */ | |
a4bde0b1 | 3319 | if (CONSTANT_P (SUBREG_REG (x)) && subreg_lowpart_p (x) |
f82da7d2 | 3320 | && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (op0_mode) |
f76b9db2 ILT |
3321 | && (! WORDS_BIG_ENDIAN |
3322 | || GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD)) | |
230d793d RS |
3323 | return gen_lowpart_for_combine (mode, SUBREG_REG (x)); |
3324 | ||
b65c1b5b RK |
3325 | /* A paradoxical SUBREG of a VOIDmode constant is the same constant, |
3326 | since we are saying that the high bits don't matter. */ | |
3327 | if (CONSTANT_P (SUBREG_REG (x)) && GET_MODE (SUBREG_REG (x)) == VOIDmode | |
3328 | && GET_MODE_SIZE (mode) > GET_MODE_SIZE (op0_mode)) | |
3329 | return SUBREG_REG (x); | |
3330 | ||
87e3e0c1 RK |
3331 | /* Note that we cannot do any narrowing for non-constants since |
3332 | we might have been counting on using the fact that some bits were | |
3333 | zero. We now do this in the SET. */ | |
3334 | ||
230d793d RS |
3335 | break; |
3336 | ||
3337 | case NOT: | |
3338 | /* (not (plus X -1)) can become (neg X). */ | |
3339 | if (GET_CODE (XEXP (x, 0)) == PLUS | |
3340 | && XEXP (XEXP (x, 0), 1) == constm1_rtx) | |
8079805d | 3341 | return gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0)); |
230d793d RS |
3342 | |
3343 | /* Similarly, (not (neg X)) is (plus X -1). */ | |
3344 | if (GET_CODE (XEXP (x, 0)) == NEG) | |
8079805d RK |
3345 | return gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), |
3346 | constm1_rtx); | |
230d793d | 3347 | |
d0ab8cd3 RK |
3348 | /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */ |
3349 | if (GET_CODE (XEXP (x, 0)) == XOR | |
3350 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
3351 | && (temp = simplify_unary_operation (NOT, mode, | |
3352 | XEXP (XEXP (x, 0), 1), | |
3353 | mode)) != 0) | |
787745f5 | 3354 | return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp); |
d0ab8cd3 | 3355 | |
230d793d RS |
3356 | /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands |
3357 | other than 1, but that is not valid. We could do a similar | |
3358 | simplification for (not (lshiftrt C X)) where C is just the sign bit, | |
3359 | but this doesn't seem common enough to bother with. */ | |
3360 | if (GET_CODE (XEXP (x, 0)) == ASHIFT | |
3361 | && XEXP (XEXP (x, 0), 0) == const1_rtx) | |
0c1c8ea6 | 3362 | return gen_rtx (ROTATE, mode, gen_unary (NOT, mode, mode, const1_rtx), |
8079805d | 3363 | XEXP (XEXP (x, 0), 1)); |
230d793d RS |
3364 | |
3365 | if (GET_CODE (XEXP (x, 0)) == SUBREG | |
3366 | && subreg_lowpart_p (XEXP (x, 0)) | |
3367 | && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) | |
3368 | < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0))))) | |
3369 | && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT | |
3370 | && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx) | |
3371 | { | |
3372 | enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0))); | |
3373 | ||
3374 | x = gen_rtx (ROTATE, inner_mode, | |
0c1c8ea6 | 3375 | gen_unary (NOT, inner_mode, inner_mode, const1_rtx), |
230d793d | 3376 | XEXP (SUBREG_REG (XEXP (x, 0)), 1)); |
8079805d | 3377 | return gen_lowpart_for_combine (mode, x); |
230d793d RS |
3378 | } |
3379 | ||
3380 | #if STORE_FLAG_VALUE == -1 | |
3381 | /* (not (comparison foo bar)) can be done by reversing the comparison | |
3382 | code if valid. */ | |
3383 | if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<' | |
3384 | && reversible_comparison_p (XEXP (x, 0))) | |
3385 | return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))), | |
3386 | mode, XEXP (XEXP (x, 0), 0), | |
3387 | XEXP (XEXP (x, 0), 1)); | |
500c518b RK |
3388 | |
3389 | /* (ashiftrt foo C) where C is the number of bits in FOO minus 1 | |
3390 | is (lt foo (const_int 0)), so we can perform the above | |
3391 | simplification. */ | |
3392 | ||
3393 | if (XEXP (x, 1) == const1_rtx | |
3394 | && GET_CODE (XEXP (x, 0)) == ASHIFTRT | |
3395 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
3396 | && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1) | |
3397 | return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx); | |
230d793d RS |
3398 | #endif |
3399 | ||
3400 | /* Apply De Morgan's laws to reduce number of patterns for machines | |
3401 | with negating logical insns (and-not, nand, etc.). If result has | |
3402 | only one NOT, put it first, since that is how the patterns are | |
3403 | coded. */ | |
3404 | ||
3405 | if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND) | |
3406 | { | |
3407 | rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1); | |
3408 | ||
3409 | if (GET_CODE (in1) == NOT) | |
3410 | in1 = XEXP (in1, 0); | |
3411 | else | |
3412 | in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1); | |
3413 | ||
3414 | if (GET_CODE (in2) == NOT) | |
3415 | in2 = XEXP (in2, 0); | |
3416 | else if (GET_CODE (in2) == CONST_INT | |
5f4f0e22 CH |
3417 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) |
3418 | in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2)); | |
230d793d RS |
3419 | else |
3420 | in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2); | |
3421 | ||
3422 | if (GET_CODE (in2) == NOT) | |
3423 | { | |
3424 | rtx tem = in2; | |
3425 | in2 = in1; in1 = tem; | |
3426 | } | |
3427 | ||
8079805d RK |
3428 | return gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR, |
3429 | mode, in1, in2); | |
230d793d RS |
3430 | } |
3431 | break; | |
3432 | ||
3433 | case NEG: | |
3434 | /* (neg (plus X 1)) can become (not X). */ | |
3435 | if (GET_CODE (XEXP (x, 0)) == PLUS | |
3436 | && XEXP (XEXP (x, 0), 1) == const1_rtx) | |
8079805d | 3437 | return gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0)); |
230d793d RS |
3438 | |
3439 | /* Similarly, (neg (not X)) is (plus X 1). */ | |
3440 | if (GET_CODE (XEXP (x, 0)) == NOT) | |
8079805d | 3441 | return plus_constant (XEXP (XEXP (x, 0), 0), 1); |
230d793d | 3442 | |
230d793d RS |
3443 | /* (neg (minus X Y)) can become (minus Y X). */ |
3444 | if (GET_CODE (XEXP (x, 0)) == MINUS | |
3ad2180a | 3445 | && (! FLOAT_MODE_P (mode) |
230d793d | 3446 | /* x-y != -(y-x) with IEEE floating point. */ |
7e2a0d8e RK |
3447 | || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT |
3448 | || flag_fast_math)) | |
8079805d RK |
3449 | return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1), |
3450 | XEXP (XEXP (x, 0), 0)); | |
230d793d | 3451 | |
d0ab8cd3 RK |
3452 | /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */ |
3453 | if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx | |
951553af | 3454 | && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1) |
8079805d | 3455 | return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx); |
d0ab8cd3 | 3456 | |
230d793d RS |
3457 | /* NEG commutes with ASHIFT since it is multiplication. Only do this |
3458 | if we can then eliminate the NEG (e.g., | |
3459 | if the operand is a constant). */ | |
3460 | ||
3461 | if (GET_CODE (XEXP (x, 0)) == ASHIFT) | |
3462 | { | |
3463 | temp = simplify_unary_operation (NEG, mode, | |
3464 | XEXP (XEXP (x, 0), 0), mode); | |
3465 | if (temp) | |
3466 | { | |
3467 | SUBST (XEXP (XEXP (x, 0), 0), temp); | |
3468 | return XEXP (x, 0); | |
3469 | } | |
3470 | } | |
3471 | ||
3472 | temp = expand_compound_operation (XEXP (x, 0)); | |
3473 | ||
3474 | /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be | |
3475 | replaced by (lshiftrt X C). This will convert | |
3476 | (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */ | |
3477 | ||
3478 | if (GET_CODE (temp) == ASHIFTRT | |
3479 | && GET_CODE (XEXP (temp, 1)) == CONST_INT | |
3480 | && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1) | |
8079805d RK |
3481 | return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0), |
3482 | INTVAL (XEXP (temp, 1))); | |
230d793d | 3483 | |
951553af | 3484 | /* If X has only a single bit that might be nonzero, say, bit I, convert |
230d793d RS |
3485 | (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of |
3486 | MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to | |
3487 | (sign_extract X 1 Y). But only do this if TEMP isn't a register | |
3488 | or a SUBREG of one since we'd be making the expression more | |
3489 | complex if it was just a register. */ | |
3490 | ||
3491 | if (GET_CODE (temp) != REG | |
3492 | && ! (GET_CODE (temp) == SUBREG | |
3493 | && GET_CODE (SUBREG_REG (temp)) == REG) | |
951553af | 3494 | && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0) |
230d793d RS |
3495 | { |
3496 | rtx temp1 = simplify_shift_const | |
5f4f0e22 CH |
3497 | (NULL_RTX, ASHIFTRT, mode, |
3498 | simplify_shift_const (NULL_RTX, ASHIFT, mode, temp, | |
230d793d RS |
3499 | GET_MODE_BITSIZE (mode) - 1 - i), |
3500 | GET_MODE_BITSIZE (mode) - 1 - i); | |
3501 | ||
3502 | /* If all we did was surround TEMP with the two shifts, we | |
3503 | haven't improved anything, so don't use it. Otherwise, | |
3504 | we are better off with TEMP1. */ | |
3505 | if (GET_CODE (temp1) != ASHIFTRT | |
3506 | || GET_CODE (XEXP (temp1, 0)) != ASHIFT | |
3507 | || XEXP (XEXP (temp1, 0), 0) != temp) | |
8079805d | 3508 | return temp1; |
230d793d RS |
3509 | } |
3510 | break; | |
3511 | ||
2ca9ae17 JW |
3512 | case TRUNCATE: |
3513 | if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) | |
3514 | SUBST (XEXP (x, 0), | |
3515 | force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)), | |
3516 | GET_MODE_MASK (mode), NULL_RTX, 0)); | |
3517 | break; | |
3518 | ||
230d793d RS |
3519 | case FLOAT_TRUNCATE: |
3520 | /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */ | |
3521 | if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND | |
3522 | && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode) | |
3523 | return XEXP (XEXP (x, 0), 0); | |
4635f748 RK |
3524 | |
3525 | /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is | |
3526 | (OP:SF foo:SF) if OP is NEG or ABS. */ | |
3527 | if ((GET_CODE (XEXP (x, 0)) == ABS | |
3528 | || GET_CODE (XEXP (x, 0)) == NEG) | |
3529 | && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND | |
3530 | && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode) | |
0c1c8ea6 RK |
3531 | return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode, |
3532 | XEXP (XEXP (XEXP (x, 0), 0), 0)); | |
1d12df72 RK |
3533 | |
3534 | /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0)) | |
3535 | is (float_truncate:SF x). */ | |
3536 | if (GET_CODE (XEXP (x, 0)) == SUBREG | |
3537 | && subreg_lowpart_p (XEXP (x, 0)) | |
3538 | && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE) | |
3539 | return SUBREG_REG (XEXP (x, 0)); | |
230d793d RS |
3540 | break; |
3541 | ||
3542 | #ifdef HAVE_cc0 | |
3543 | case COMPARE: | |
3544 | /* Convert (compare FOO (const_int 0)) to FOO unless we aren't | |
3545 | using cc0, in which case we want to leave it as a COMPARE | |
3546 | so we can distinguish it from a register-register-copy. */ | |
3547 | if (XEXP (x, 1) == const0_rtx) | |
3548 | return XEXP (x, 0); | |
3549 | ||
3550 | /* In IEEE floating point, x-0 is not the same as x. */ | |
3551 | if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT | |
7e2a0d8e RK |
3552 | || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))) |
3553 | || flag_fast_math) | |
230d793d RS |
3554 | && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0)))) |
3555 | return XEXP (x, 0); | |
3556 | break; | |
3557 | #endif | |
3558 | ||
3559 | case CONST: | |
3560 | /* (const (const X)) can become (const X). Do it this way rather than | |
3561 | returning the inner CONST since CONST can be shared with a | |
3562 | REG_EQUAL note. */ | |
3563 | if (GET_CODE (XEXP (x, 0)) == CONST) | |
3564 | SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0)); | |
3565 | break; | |
3566 | ||
3567 | #ifdef HAVE_lo_sum | |
3568 | case LO_SUM: | |
3569 | /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we | |
3570 | can add in an offset. find_split_point will split this address up | |
3571 | again if it doesn't match. */ | |
3572 | if (GET_CODE (XEXP (x, 0)) == HIGH | |
3573 | && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))) | |
3574 | return XEXP (x, 1); | |
3575 | break; | |
3576 | #endif | |
3577 | ||
3578 | case PLUS: | |
3579 | /* If we have (plus (plus (A const) B)), associate it so that CONST is | |
3580 | outermost. That's because that's the way indexed addresses are | |
3581 | supposed to appear. This code used to check many more cases, but | |
3582 | they are now checked elsewhere. */ | |
3583 | if (GET_CODE (XEXP (x, 0)) == PLUS | |
3584 | && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1))) | |
3585 | return gen_binary (PLUS, mode, | |
3586 | gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), | |
3587 | XEXP (x, 1)), | |
3588 | XEXP (XEXP (x, 0), 1)); | |
3589 | ||
3590 | /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>) | |
3591 | when c is (const_int (pow2 + 1) / 2) is a sign extension of a | |
3592 | bit-field and can be replaced by either a sign_extend or a | |
3593 | sign_extract. The `and' may be a zero_extend. */ | |
3594 | if (GET_CODE (XEXP (x, 0)) == XOR | |
3595 | && GET_CODE (XEXP (x, 1)) == CONST_INT | |
3596 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
3597 | && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1)) | |
3598 | && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0 | |
5f4f0e22 | 3599 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
230d793d RS |
3600 | && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND |
3601 | && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT | |
3602 | && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)) | |
5f4f0e22 | 3603 | == ((HOST_WIDE_INT) 1 << (i + 1)) - 1)) |
230d793d RS |
3604 | || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND |
3605 | && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0))) | |
3606 | == i + 1)))) | |
8079805d RK |
3607 | return simplify_shift_const |
3608 | (NULL_RTX, ASHIFTRT, mode, | |
3609 | simplify_shift_const (NULL_RTX, ASHIFT, mode, | |
3610 | XEXP (XEXP (XEXP (x, 0), 0), 0), | |
3611 | GET_MODE_BITSIZE (mode) - (i + 1)), | |
3612 | GET_MODE_BITSIZE (mode) - (i + 1)); | |
230d793d | 3613 | |
bc0776c6 RK |
3614 | /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if |
3615 | C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE | |
3616 | is 1. This produces better code than the alternative immediately | |
3617 | below. */ | |
3618 | if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<' | |
3619 | && reversible_comparison_p (XEXP (x, 0)) | |
3620 | && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx) | |
3621 | || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx))) | |
8079805d | 3622 | return |
0c1c8ea6 | 3623 | gen_unary (NEG, mode, mode, |
8079805d RK |
3624 | gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))), |
3625 | mode, XEXP (XEXP (x, 0), 0), | |
3626 | XEXP (XEXP (x, 0), 1))); | |
bc0776c6 RK |
3627 | |
3628 | /* If only the low-order bit of X is possibly nonzero, (plus x -1) | |
230d793d RS |
3629 | can become (ashiftrt (ashift (xor x 1) C) C) where C is |
3630 | the bitsize of the mode - 1. This allows simplification of | |
3631 | "a = (b & 8) == 0;" */ | |
3632 | if (XEXP (x, 1) == constm1_rtx | |
3633 | && GET_CODE (XEXP (x, 0)) != REG | |
3634 | && ! (GET_CODE (XEXP (x,0)) == SUBREG | |
3635 | && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG) | |
951553af | 3636 | && nonzero_bits (XEXP (x, 0), mode) == 1) |
8079805d RK |
3637 | return simplify_shift_const (NULL_RTX, ASHIFTRT, mode, |
3638 | simplify_shift_const (NULL_RTX, ASHIFT, mode, | |
3639 | gen_rtx_combine (XOR, mode, | |
3640 | XEXP (x, 0), const1_rtx), | |
3641 | GET_MODE_BITSIZE (mode) - 1), | |
3642 | GET_MODE_BITSIZE (mode) - 1); | |
02f4ada4 RK |
3643 | |
3644 | /* If we are adding two things that have no bits in common, convert | |
3645 | the addition into an IOR. This will often be further simplified, | |
3646 | for example in cases like ((a & 1) + (a & 2)), which can | |
3647 | become a & 3. */ | |
3648 | ||
ac49a949 | 3649 | if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
951553af RK |
3650 | && (nonzero_bits (XEXP (x, 0), mode) |
3651 | & nonzero_bits (XEXP (x, 1), mode)) == 0) | |
8079805d | 3652 | return gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1)); |
230d793d RS |
3653 | break; |
3654 | ||
3655 | case MINUS: | |
5109d49f RK |
3656 | #if STORE_FLAG_VALUE == 1 |
3657 | /* (minus 1 (comparison foo bar)) can be done by reversing the comparison | |
3658 | code if valid. */ | |
3659 | if (XEXP (x, 0) == const1_rtx | |
3660 | && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<' | |
3661 | && reversible_comparison_p (XEXP (x, 1))) | |
3662 | return gen_binary (reverse_condition (GET_CODE (XEXP (x, 1))), | |
3663 | mode, XEXP (XEXP (x, 1), 0), | |
3664 | XEXP (XEXP (x, 1), 1)); | |
3665 | #endif | |
3666 | ||
230d793d RS |
3667 | /* (minus <foo> (and <foo> (const_int -pow2))) becomes |
3668 | (and <foo> (const_int pow2-1)) */ | |
3669 | if (GET_CODE (XEXP (x, 1)) == AND | |
3670 | && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT | |
3671 | && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0 | |
3672 | && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0))) | |
8079805d RK |
3673 | return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0), |
3674 | - INTVAL (XEXP (XEXP (x, 1), 1)) - 1); | |
7bef8680 RK |
3675 | |
3676 | /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for | |
3677 | integers. */ | |
3678 | if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode)) | |
8079805d RK |
3679 | return gen_binary (MINUS, mode, |
3680 | gen_binary (MINUS, mode, XEXP (x, 0), | |
3681 | XEXP (XEXP (x, 1), 0)), | |
3682 | XEXP (XEXP (x, 1), 1)); | |
230d793d RS |
3683 | break; |
3684 | ||
3685 | case MULT: | |
3686 | /* If we have (mult (plus A B) C), apply the distributive law and then | |
3687 | the inverse distributive law to see if things simplify. This | |
3688 | occurs mostly in addresses, often when unrolling loops. */ | |
3689 | ||
3690 | if (GET_CODE (XEXP (x, 0)) == PLUS) | |
3691 | { | |
3692 | x = apply_distributive_law | |
3693 | (gen_binary (PLUS, mode, | |
3694 | gen_binary (MULT, mode, | |
3695 | XEXP (XEXP (x, 0), 0), XEXP (x, 1)), | |
3696 | gen_binary (MULT, mode, | |
3697 | XEXP (XEXP (x, 0), 1), XEXP (x, 1)))); | |
3698 | ||
3699 | if (GET_CODE (x) != MULT) | |
8079805d | 3700 | return x; |
230d793d | 3701 | } |
230d793d RS |
3702 | break; |
3703 | ||
3704 | case UDIV: | |
3705 | /* If this is a divide by a power of two, treat it as a shift if | |
3706 | its first operand is a shift. */ | |
3707 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
3708 | && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0 | |
3709 | && (GET_CODE (XEXP (x, 0)) == ASHIFT | |
3710 | || GET_CODE (XEXP (x, 0)) == LSHIFTRT | |
3711 | || GET_CODE (XEXP (x, 0)) == ASHIFTRT | |
3712 | || GET_CODE (XEXP (x, 0)) == ROTATE | |
3713 | || GET_CODE (XEXP (x, 0)) == ROTATERT)) | |
8079805d | 3714 | return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i); |
230d793d RS |
3715 | break; |
3716 | ||
3717 | case EQ: case NE: | |
3718 | case GT: case GTU: case GE: case GEU: | |
3719 | case LT: case LTU: case LE: case LEU: | |
3720 | /* If the first operand is a condition code, we can't do anything | |
3721 | with it. */ | |
3722 | if (GET_CODE (XEXP (x, 0)) == COMPARE | |
3723 | || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC | |
3724 | #ifdef HAVE_cc0 | |
3725 | && XEXP (x, 0) != cc0_rtx | |
3726 | #endif | |
3727 | )) | |
3728 | { | |
3729 | rtx op0 = XEXP (x, 0); | |
3730 | rtx op1 = XEXP (x, 1); | |
3731 | enum rtx_code new_code; | |
3732 | ||
3733 | if (GET_CODE (op0) == COMPARE) | |
3734 | op1 = XEXP (op0, 1), op0 = XEXP (op0, 0); | |
3735 | ||
3736 | /* Simplify our comparison, if possible. */ | |
3737 | new_code = simplify_comparison (code, &op0, &op1); | |
3738 | ||
3739 | #if STORE_FLAG_VALUE == 1 | |
3740 | /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X | |
951553af | 3741 | if only the low-order bit is possibly nonzero in X (such as when |
5109d49f RK |
3742 | X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to |
3743 | (xor X 1) or (minus 1 X); we use the former. Finally, if X is | |
3744 | known to be either 0 or -1, NE becomes a NEG and EQ becomes | |
3745 | (plus X 1). | |
3746 | ||
3747 | Remove any ZERO_EXTRACT we made when thinking this was a | |
3748 | comparison. It may now be simpler to use, e.g., an AND. If a | |
3749 | ZERO_EXTRACT is indeed appropriate, it will be placed back by | |
3750 | the call to make_compound_operation in the SET case. */ | |
3751 | ||
3f508eca | 3752 | if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT |
230d793d | 3753 | && op1 == const0_rtx |
5109d49f | 3754 | && nonzero_bits (op0, mode) == 1) |
818b11b9 RK |
3755 | return gen_lowpart_for_combine (mode, |
3756 | expand_compound_operation (op0)); | |
5109d49f RK |
3757 | |
3758 | else if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT | |
3759 | && op1 == const0_rtx | |
3760 | && (num_sign_bit_copies (op0, mode) | |
3761 | == GET_MODE_BITSIZE (mode))) | |
3762 | { | |
3763 | op0 = expand_compound_operation (op0); | |
0c1c8ea6 | 3764 | return gen_unary (NEG, mode, mode, |
8079805d | 3765 | gen_lowpart_for_combine (mode, op0)); |
5109d49f RK |
3766 | } |
3767 | ||
3f508eca | 3768 | else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT |
230d793d | 3769 | && op1 == const0_rtx |
5109d49f | 3770 | && nonzero_bits (op0, mode) == 1) |
818b11b9 RK |
3771 | { |
3772 | op0 = expand_compound_operation (op0); | |
8079805d RK |
3773 | return gen_binary (XOR, mode, |
3774 | gen_lowpart_for_combine (mode, op0), | |
3775 | const1_rtx); | |
5109d49f | 3776 | } |
818b11b9 | 3777 | |
5109d49f RK |
3778 | else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT |
3779 | && op1 == const0_rtx | |
3780 | && (num_sign_bit_copies (op0, mode) | |
3781 | == GET_MODE_BITSIZE (mode))) | |
3782 | { | |
3783 | op0 = expand_compound_operation (op0); | |
8079805d | 3784 | return plus_constant (gen_lowpart_for_combine (mode, op0), 1); |
818b11b9 | 3785 | } |
230d793d RS |
3786 | #endif |
3787 | ||
3788 | #if STORE_FLAG_VALUE == -1 | |
5109d49f RK |
3789 | /* If STORE_FLAG_VALUE is -1, we have cases similar to |
3790 | those above. */ | |
3f508eca | 3791 | if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT |
230d793d | 3792 | && op1 == const0_rtx |
5109d49f RK |
3793 | && (num_sign_bit_copies (op0, mode) |
3794 | == GET_MODE_BITSIZE (mode))) | |
3795 | return gen_lowpart_for_combine (mode, | |
3796 | expand_compound_operation (op0)); | |
3797 | ||
3798 | else if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT | |
3799 | && op1 == const0_rtx | |
3800 | && nonzero_bits (op0, mode) == 1) | |
3801 | { | |
3802 | op0 = expand_compound_operation (op0); | |
0c1c8ea6 | 3803 | return gen_unary (NEG, mode, mode, |
8079805d | 3804 | gen_lowpart_for_combine (mode, op0)); |
5109d49f RK |
3805 | } |
3806 | ||
3807 | else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT | |
3808 | && op1 == const0_rtx | |
3809 | && (num_sign_bit_copies (op0, mode) | |
3810 | == GET_MODE_BITSIZE (mode))) | |
230d793d | 3811 | { |
818b11b9 | 3812 | op0 = expand_compound_operation (op0); |
0c1c8ea6 | 3813 | return gen_unary (NOT, mode, mode, |
8079805d | 3814 | gen_lowpart_for_combine (mode, op0)); |
5109d49f RK |
3815 | } |
3816 | ||
3817 | /* If X is 0/1, (eq X 0) is X-1. */ | |
3818 | else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT | |
3819 | && op1 == const0_rtx | |
3820 | && nonzero_bits (op0, mode) == 1) | |
3821 | { | |
3822 | op0 = expand_compound_operation (op0); | |
8079805d | 3823 | return plus_constant (gen_lowpart_for_combine (mode, op0), -1); |
230d793d RS |
3824 | } |
3825 | #endif | |
3826 | ||
3827 | /* If STORE_FLAG_VALUE says to just test the sign bit and X has just | |
951553af RK |
3828 | one bit that might be nonzero, we can convert (ne x 0) to |
3829 | (ashift x c) where C puts the bit in the sign bit. Remove any | |
3830 | AND with STORE_FLAG_VALUE when we are done, since we are only | |
3831 | going to test the sign bit. */ | |
3f508eca | 3832 | if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT |
5f4f0e22 CH |
3833 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
3834 | && (STORE_FLAG_VALUE | |
3835 | == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1)) | |
230d793d RS |
3836 | && op1 == const0_rtx |
3837 | && mode == GET_MODE (op0) | |
5109d49f | 3838 | && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0) |
230d793d | 3839 | { |
818b11b9 RK |
3840 | x = simplify_shift_const (NULL_RTX, ASHIFT, mode, |
3841 | expand_compound_operation (op0), | |
230d793d RS |
3842 | GET_MODE_BITSIZE (mode) - 1 - i); |
3843 | if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx) | |
3844 | return XEXP (x, 0); | |
3845 | else | |
3846 | return x; | |
3847 | } | |
3848 | ||
3849 | /* If the code changed, return a whole new comparison. */ | |
3850 | if (new_code != code) | |
3851 | return gen_rtx_combine (new_code, mode, op0, op1); | |
3852 | ||
3853 | /* Otherwise, keep this operation, but maybe change its operands. | |
3854 | This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */ | |
3855 | SUBST (XEXP (x, 0), op0); | |
3856 | SUBST (XEXP (x, 1), op1); | |
3857 | } | |
3858 | break; | |
3859 | ||
3860 | case IF_THEN_ELSE: | |
8079805d | 3861 | return simplify_if_then_else (x); |
9210df58 | 3862 | |
8079805d RK |
3863 | case ZERO_EXTRACT: |
3864 | case SIGN_EXTRACT: | |
3865 | case ZERO_EXTEND: | |
3866 | case SIGN_EXTEND: | |
3867 | /* If we are processing SET_DEST, we are done. */ | |
3868 | if (in_dest) | |
3869 | return x; | |
d0ab8cd3 | 3870 | |
8079805d | 3871 | return expand_compound_operation (x); |
d0ab8cd3 | 3872 | |
8079805d RK |
3873 | case SET: |
3874 | return simplify_set (x); | |
1a26b032 | 3875 | |
8079805d RK |
3876 | case AND: |
3877 | case IOR: | |
3878 | case XOR: | |
3879 | return simplify_logical (x, last); | |
d0ab8cd3 | 3880 | |
8079805d RK |
3881 | case ABS: |
3882 | /* (abs (neg <foo>)) -> (abs <foo>) */ | |
3883 | if (GET_CODE (XEXP (x, 0)) == NEG) | |
3884 | SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0)); | |
1a26b032 | 3885 | |
8079805d RK |
3886 | /* If operand is something known to be positive, ignore the ABS. */ |
3887 | if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS | |
3888 | || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) | |
3889 | <= HOST_BITS_PER_WIDE_INT) | |
3890 | && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0))) | |
3891 | & ((HOST_WIDE_INT) 1 | |
3892 | << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))) | |
3893 | == 0))) | |
3894 | return XEXP (x, 0); | |
1a26b032 | 3895 | |
1a26b032 | 3896 | |
8079805d RK |
3897 | /* If operand is known to be only -1 or 0, convert ABS to NEG. */ |
3898 | if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode)) | |
3899 | return gen_rtx_combine (NEG, mode, XEXP (x, 0)); | |
1a26b032 | 3900 | |
8079805d | 3901 | break; |
1a26b032 | 3902 | |
8079805d RK |
3903 | case FFS: |
3904 | /* (ffs (*_extend <X>)) = (ffs <X>) */ | |
3905 | if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND | |
3906 | || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND) | |
3907 | SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0)); | |
3908 | break; | |
1a26b032 | 3909 | |
8079805d RK |
3910 | case FLOAT: |
3911 | /* (float (sign_extend <X>)) = (float <X>). */ | |
3912 | if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND) | |
3913 | SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0)); | |
3914 | break; | |
1a26b032 | 3915 | |
8079805d RK |
3916 | case ASHIFT: |
3917 | case LSHIFTRT: | |
3918 | case ASHIFTRT: | |
3919 | case ROTATE: | |
3920 | case ROTATERT: | |
3921 | /* If this is a shift by a constant amount, simplify it. */ | |
3922 | if (GET_CODE (XEXP (x, 1)) == CONST_INT) | |
3923 | return simplify_shift_const (x, code, mode, XEXP (x, 0), | |
3924 | INTVAL (XEXP (x, 1))); | |
3925 | ||
3926 | #ifdef SHIFT_COUNT_TRUNCATED | |
3927 | else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG) | |
3928 | SUBST (XEXP (x, 1), | |
3929 | force_to_mode (XEXP (x, 1), GET_MODE (x), | |
3930 | ((HOST_WIDE_INT) 1 | |
3931 | << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x)))) | |
3932 | - 1, | |
3933 | NULL_RTX, 0)); | |
3934 | #endif | |
3935 | ||
3936 | break; | |
3937 | } | |
3938 | ||
3939 | return x; | |
3940 | } | |
3941 | \f | |
3942 | /* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */ | |
5109d49f | 3943 | |
8079805d RK |
3944 | static rtx |
3945 | simplify_if_then_else (x) | |
3946 | rtx x; | |
3947 | { | |
3948 | enum machine_mode mode = GET_MODE (x); | |
3949 | rtx cond = XEXP (x, 0); | |
3950 | rtx true = XEXP (x, 1); | |
3951 | rtx false = XEXP (x, 2); | |
3952 | enum rtx_code true_code = GET_CODE (cond); | |
3953 | int comparison_p = GET_RTX_CLASS (true_code) == '<'; | |
3954 | rtx temp; | |
3955 | int i; | |
3956 | ||
3957 | /* Simplify storing of the truth value. */ | |
3958 | if (comparison_p && true == const_true_rtx && false == const0_rtx) | |
3959 | return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1)); | |
3960 | ||
3961 | /* Also when the truth value has to be reversed. */ | |
3962 | if (comparison_p && reversible_comparison_p (cond) | |
3963 | && true == const0_rtx && false == const_true_rtx) | |
3964 | return gen_binary (reverse_condition (true_code), | |
3965 | mode, XEXP (cond, 0), XEXP (cond, 1)); | |
3966 | ||
3967 | /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used | |
3968 | in it is being compared against certain values. Get the true and false | |
3969 | comparisons and see if that says anything about the value of each arm. */ | |
3970 | ||
3971 | if (comparison_p && reversible_comparison_p (cond) | |
3972 | && GET_CODE (XEXP (cond, 0)) == REG) | |
3973 | { | |
3974 | HOST_WIDE_INT nzb; | |
3975 | rtx from = XEXP (cond, 0); | |
3976 | enum rtx_code false_code = reverse_condition (true_code); | |
3977 | rtx true_val = XEXP (cond, 1); | |
3978 | rtx false_val = true_val; | |
3979 | int swapped = 0; | |
9210df58 | 3980 | |
8079805d | 3981 | /* If FALSE_CODE is EQ, swap the codes and arms. */ |
5109d49f | 3982 | |
8079805d | 3983 | if (false_code == EQ) |
1a26b032 | 3984 | { |
8079805d RK |
3985 | swapped = 1, true_code = EQ, false_code = NE; |
3986 | temp = true, true = false, false = temp; | |
3987 | } | |
5109d49f | 3988 | |
8079805d RK |
3989 | /* If we are comparing against zero and the expression being tested has |
3990 | only a single bit that might be nonzero, that is its value when it is | |
3991 | not equal to zero. Similarly if it is known to be -1 or 0. */ | |
3992 | ||
3993 | if (true_code == EQ && true_val == const0_rtx | |
3994 | && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0) | |
3995 | false_code = EQ, false_val = GEN_INT (nzb); | |
3996 | else if (true_code == EQ && true_val == const0_rtx | |
3997 | && (num_sign_bit_copies (from, GET_MODE (from)) | |
3998 | == GET_MODE_BITSIZE (GET_MODE (from)))) | |
3999 | false_code = EQ, false_val = constm1_rtx; | |
4000 | ||
4001 | /* Now simplify an arm if we know the value of the register in the | |
4002 | branch and it is used in the arm. Be careful due to the potential | |
4003 | of locally-shared RTL. */ | |
4004 | ||
4005 | if (reg_mentioned_p (from, true)) | |
4006 | true = subst (known_cond (copy_rtx (true), true_code, from, true_val), | |
4007 | pc_rtx, pc_rtx, 0, 0); | |
4008 | if (reg_mentioned_p (from, false)) | |
4009 | false = subst (known_cond (copy_rtx (false), false_code, | |
4010 | from, false_val), | |
4011 | pc_rtx, pc_rtx, 0, 0); | |
4012 | ||
4013 | SUBST (XEXP (x, 1), swapped ? false : true); | |
4014 | SUBST (XEXP (x, 2), swapped ? true : false); | |
4015 | ||
4016 | true = XEXP (x, 1), false = XEXP (x, 2), true_code = GET_CODE (cond); | |
4017 | } | |
5109d49f | 4018 | |
8079805d RK |
4019 | /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be |
4020 | reversed, do so to avoid needing two sets of patterns for | |
4021 | subtract-and-branch insns. Similarly if we have a constant in the true | |
4022 | arm, the false arm is the same as the first operand of the comparison, or | |
4023 | the false arm is more complicated than the true arm. */ | |
4024 | ||
4025 | if (comparison_p && reversible_comparison_p (cond) | |
4026 | && (true == pc_rtx | |
4027 | || (CONSTANT_P (true) | |
4028 | && GET_CODE (false) != CONST_INT && false != pc_rtx) | |
4029 | || true == const0_rtx | |
4030 | || (GET_RTX_CLASS (GET_CODE (true)) == 'o' | |
4031 | && GET_RTX_CLASS (GET_CODE (false)) != 'o') | |
4032 | || (GET_CODE (true) == SUBREG | |
4033 | && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true))) == 'o' | |
4034 | && GET_RTX_CLASS (GET_CODE (false)) != 'o') | |
4035 | || reg_mentioned_p (true, false) | |
4036 | || rtx_equal_p (false, XEXP (cond, 0)))) | |
4037 | { | |
4038 | true_code = reverse_condition (true_code); | |
4039 | SUBST (XEXP (x, 0), | |
4040 | gen_binary (true_code, GET_MODE (cond), XEXP (cond, 0), | |
4041 | XEXP (cond, 1))); | |
5109d49f | 4042 | |
8079805d RK |
4043 | SUBST (XEXP (x, 1), false); |
4044 | SUBST (XEXP (x, 2), true); | |
1a26b032 | 4045 | |
8079805d | 4046 | temp = true, true = false, false = temp, cond = XEXP (x, 0); |
bb821298 RK |
4047 | |
4048 | /* It is possible that the conditional has been simplified out. */ | |
4049 | true_code = GET_CODE (cond); | |
4050 | comparison_p = GET_RTX_CLASS (true_code) == '<'; | |
8079805d | 4051 | } |
abe6e52f | 4052 | |
8079805d | 4053 | /* If the two arms are identical, we don't need the comparison. */ |
1a26b032 | 4054 | |
8079805d RK |
4055 | if (rtx_equal_p (true, false) && ! side_effects_p (cond)) |
4056 | return true; | |
1a26b032 | 4057 | |
5be669c7 RK |
4058 | /* Convert a == b ? b : a to "a". */ |
4059 | if (true_code == EQ && ! side_effects_p (cond) | |
4060 | && rtx_equal_p (XEXP (cond, 0), false) | |
4061 | && rtx_equal_p (XEXP (cond, 1), true)) | |
4062 | return false; | |
4063 | else if (true_code == NE && ! side_effects_p (cond) | |
4064 | && rtx_equal_p (XEXP (cond, 0), true) | |
4065 | && rtx_equal_p (XEXP (cond, 1), false)) | |
4066 | return true; | |
4067 | ||
8079805d RK |
4068 | /* Look for cases where we have (abs x) or (neg (abs X)). */ |
4069 | ||
4070 | if (GET_MODE_CLASS (mode) == MODE_INT | |
4071 | && GET_CODE (false) == NEG | |
4072 | && rtx_equal_p (true, XEXP (false, 0)) | |
4073 | && comparison_p | |
4074 | && rtx_equal_p (true, XEXP (cond, 0)) | |
4075 | && ! side_effects_p (true)) | |
4076 | switch (true_code) | |
4077 | { | |
4078 | case GT: | |
4079 | case GE: | |
0c1c8ea6 | 4080 | return gen_unary (ABS, mode, mode, true); |
8079805d RK |
4081 | case LT: |
4082 | case LE: | |
0c1c8ea6 | 4083 | return gen_unary (NEG, mode, mode, gen_unary (ABS, mode, mode, true)); |
8079805d RK |
4084 | } |
4085 | ||
4086 | /* Look for MIN or MAX. */ | |
4087 | ||
34c8be72 | 4088 | if ((! FLOAT_MODE_P (mode) || flag_fast_math) |
8079805d RK |
4089 | && comparison_p |
4090 | && rtx_equal_p (XEXP (cond, 0), true) | |
4091 | && rtx_equal_p (XEXP (cond, 1), false) | |
4092 | && ! side_effects_p (cond)) | |
4093 | switch (true_code) | |
4094 | { | |
4095 | case GE: | |
4096 | case GT: | |
4097 | return gen_binary (SMAX, mode, true, false); | |
4098 | case LE: | |
4099 | case LT: | |
4100 | return gen_binary (SMIN, mode, true, false); | |
4101 | case GEU: | |
4102 | case GTU: | |
4103 | return gen_binary (UMAX, mode, true, false); | |
4104 | case LEU: | |
4105 | case LTU: | |
4106 | return gen_binary (UMIN, mode, true, false); | |
4107 | } | |
4108 | ||
4109 | #if STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1 | |
4110 | ||
4111 | /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its | |
4112 | second operand is zero, this can be done as (OP Z (mult COND C2)) where | |
4113 | C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or | |
4114 | SIGN_EXTEND as long as Z is already extended (so we don't destroy it). | |
4115 | We can do this kind of thing in some cases when STORE_FLAG_VALUE is | |
d5a4ebdc | 4116 | neither of the above, but it isn't worth checking for. */ |
8079805d RK |
4117 | |
4118 | if (comparison_p && mode != VOIDmode && ! side_effects_p (x)) | |
4119 | { | |
4120 | rtx t = make_compound_operation (true, SET); | |
4121 | rtx f = make_compound_operation (false, SET); | |
4122 | rtx cond_op0 = XEXP (cond, 0); | |
4123 | rtx cond_op1 = XEXP (cond, 1); | |
4124 | enum rtx_code op, extend_op = NIL; | |
4125 | enum machine_mode m = mode; | |
f24ad0e4 | 4126 | rtx z = 0, c1; |
8079805d | 4127 | |
8079805d RK |
4128 | if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS |
4129 | || GET_CODE (t) == IOR || GET_CODE (t) == XOR | |
4130 | || GET_CODE (t) == ASHIFT | |
4131 | || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT) | |
4132 | && rtx_equal_p (XEXP (t, 0), f)) | |
4133 | c1 = XEXP (t, 1), op = GET_CODE (t), z = f; | |
4134 | ||
4135 | /* If an identity-zero op is commutative, check whether there | |
4136 | would be a match if we swapped the operands. */ | |
4137 | else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR | |
4138 | || GET_CODE (t) == XOR) | |
4139 | && rtx_equal_p (XEXP (t, 1), f)) | |
4140 | c1 = XEXP (t, 0), op = GET_CODE (t), z = f; | |
4141 | else if (GET_CODE (t) == SIGN_EXTEND | |
4142 | && (GET_CODE (XEXP (t, 0)) == PLUS | |
4143 | || GET_CODE (XEXP (t, 0)) == MINUS | |
4144 | || GET_CODE (XEXP (t, 0)) == IOR | |
4145 | || GET_CODE (XEXP (t, 0)) == XOR | |
4146 | || GET_CODE (XEXP (t, 0)) == ASHIFT | |
4147 | || GET_CODE (XEXP (t, 0)) == LSHIFTRT | |
4148 | || GET_CODE (XEXP (t, 0)) == ASHIFTRT) | |
4149 | && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG | |
4150 | && subreg_lowpart_p (XEXP (XEXP (t, 0), 0)) | |
4151 | && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f) | |
4152 | && (num_sign_bit_copies (f, GET_MODE (f)) | |
4153 | > (GET_MODE_BITSIZE (mode) | |
4154 | - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0)))))) | |
4155 | { | |
4156 | c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0)); | |
4157 | extend_op = SIGN_EXTEND; | |
4158 | m = GET_MODE (XEXP (t, 0)); | |
1a26b032 | 4159 | } |
8079805d RK |
4160 | else if (GET_CODE (t) == SIGN_EXTEND |
4161 | && (GET_CODE (XEXP (t, 0)) == PLUS | |
4162 | || GET_CODE (XEXP (t, 0)) == IOR | |
4163 | || GET_CODE (XEXP (t, 0)) == XOR) | |
4164 | && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG | |
4165 | && subreg_lowpart_p (XEXP (XEXP (t, 0), 1)) | |
4166 | && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f) | |
4167 | && (num_sign_bit_copies (f, GET_MODE (f)) | |
4168 | > (GET_MODE_BITSIZE (mode) | |
4169 | - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1)))))) | |
4170 | { | |
4171 | c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0)); | |
4172 | extend_op = SIGN_EXTEND; | |
4173 | m = GET_MODE (XEXP (t, 0)); | |
4174 | } | |
4175 | else if (GET_CODE (t) == ZERO_EXTEND | |
4176 | && (GET_CODE (XEXP (t, 0)) == PLUS | |
4177 | || GET_CODE (XEXP (t, 0)) == MINUS | |
4178 | || GET_CODE (XEXP (t, 0)) == IOR | |
4179 | || GET_CODE (XEXP (t, 0)) == XOR | |
4180 | || GET_CODE (XEXP (t, 0)) == ASHIFT | |
4181 | || GET_CODE (XEXP (t, 0)) == LSHIFTRT | |
4182 | || GET_CODE (XEXP (t, 0)) == ASHIFTRT) | |
4183 | && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG | |
4184 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
4185 | && subreg_lowpart_p (XEXP (XEXP (t, 0), 0)) | |
4186 | && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f) | |
4187 | && ((nonzero_bits (f, GET_MODE (f)) | |
4188 | & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0)))) | |
4189 | == 0)) | |
4190 | { | |
4191 | c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0)); | |
4192 | extend_op = ZERO_EXTEND; | |
4193 | m = GET_MODE (XEXP (t, 0)); | |
4194 | } | |
4195 | else if (GET_CODE (t) == ZERO_EXTEND | |
4196 | && (GET_CODE (XEXP (t, 0)) == PLUS | |
4197 | || GET_CODE (XEXP (t, 0)) == IOR | |
4198 | || GET_CODE (XEXP (t, 0)) == XOR) | |
4199 | && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG | |
4200 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
4201 | && subreg_lowpart_p (XEXP (XEXP (t, 0), 1)) | |
4202 | && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f) | |
4203 | && ((nonzero_bits (f, GET_MODE (f)) | |
4204 | & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1)))) | |
4205 | == 0)) | |
4206 | { | |
4207 | c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0)); | |
4208 | extend_op = ZERO_EXTEND; | |
4209 | m = GET_MODE (XEXP (t, 0)); | |
4210 | } | |
4211 | ||
4212 | if (z) | |
4213 | { | |
4214 | temp = subst (gen_binary (true_code, m, cond_op0, cond_op1), | |
4215 | pc_rtx, pc_rtx, 0, 0); | |
4216 | temp = gen_binary (MULT, m, temp, | |
4217 | gen_binary (MULT, m, c1, const_true_rtx)); | |
4218 | temp = subst (temp, pc_rtx, pc_rtx, 0, 0); | |
4219 | temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp); | |
4220 | ||
4221 | if (extend_op != NIL) | |
0c1c8ea6 | 4222 | temp = gen_unary (extend_op, mode, m, temp); |
8079805d RK |
4223 | |
4224 | return temp; | |
4225 | } | |
4226 | } | |
5109d49f | 4227 | #endif |
224eeff2 | 4228 | |
8079805d RK |
4229 | /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or |
4230 | 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the | |
4231 | negation of a single bit, we can convert this operation to a shift. We | |
4232 | can actually do this more generally, but it doesn't seem worth it. */ | |
4233 | ||
4234 | if (true_code == NE && XEXP (cond, 1) == const0_rtx | |
4235 | && false == const0_rtx && GET_CODE (true) == CONST_INT | |
4236 | && ((1 == nonzero_bits (XEXP (cond, 0), mode) | |
4237 | && (i = exact_log2 (INTVAL (true))) >= 0) | |
4238 | || ((num_sign_bit_copies (XEXP (cond, 0), mode) | |
4239 | == GET_MODE_BITSIZE (mode)) | |
4240 | && (i = exact_log2 (- INTVAL (true))) >= 0))) | |
4241 | return | |
4242 | simplify_shift_const (NULL_RTX, ASHIFT, mode, | |
4243 | gen_lowpart_for_combine (mode, XEXP (cond, 0)), i); | |
230d793d | 4244 | |
8079805d RK |
4245 | return x; |
4246 | } | |
4247 | \f | |
4248 | /* Simplify X, a SET expression. Return the new expression. */ | |
230d793d | 4249 | |
8079805d RK |
4250 | static rtx |
4251 | simplify_set (x) | |
4252 | rtx x; | |
4253 | { | |
4254 | rtx src = SET_SRC (x); | |
4255 | rtx dest = SET_DEST (x); | |
4256 | enum machine_mode mode | |
4257 | = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest); | |
4258 | rtx other_insn; | |
4259 | rtx *cc_use; | |
4260 | ||
4261 | /* (set (pc) (return)) gets written as (return). */ | |
4262 | if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN) | |
4263 | return src; | |
230d793d | 4264 | |
87e3e0c1 RK |
4265 | /* Now that we know for sure which bits of SRC we are using, see if we can |
4266 | simplify the expression for the object knowing that we only need the | |
4267 | low-order bits. */ | |
4268 | ||
4269 | if (GET_MODE_CLASS (mode) == MODE_INT) | |
4270 | src = force_to_mode (src, mode, GET_MODE_MASK (mode), NULL_RTX, 0); | |
4271 | ||
8079805d RK |
4272 | /* If we are setting CC0 or if the source is a COMPARE, look for the use of |
4273 | the comparison result and try to simplify it unless we already have used | |
4274 | undobuf.other_insn. */ | |
4275 | if ((GET_CODE (src) == COMPARE | |
230d793d | 4276 | #ifdef HAVE_cc0 |
8079805d | 4277 | || dest == cc0_rtx |
230d793d | 4278 | #endif |
8079805d RK |
4279 | ) |
4280 | && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0 | |
4281 | && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn) | |
4282 | && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<' | |
c0d3ac4d | 4283 | && rtx_equal_p (XEXP (*cc_use, 0), dest)) |
8079805d RK |
4284 | { |
4285 | enum rtx_code old_code = GET_CODE (*cc_use); | |
4286 | enum rtx_code new_code; | |
4287 | rtx op0, op1; | |
4288 | int other_changed = 0; | |
4289 | enum machine_mode compare_mode = GET_MODE (dest); | |
4290 | ||
4291 | if (GET_CODE (src) == COMPARE) | |
4292 | op0 = XEXP (src, 0), op1 = XEXP (src, 1); | |
4293 | else | |
4294 | op0 = src, op1 = const0_rtx; | |
230d793d | 4295 | |
8079805d RK |
4296 | /* Simplify our comparison, if possible. */ |
4297 | new_code = simplify_comparison (old_code, &op0, &op1); | |
230d793d | 4298 | |
c141a106 | 4299 | #ifdef EXTRA_CC_MODES |
8079805d RK |
4300 | /* If this machine has CC modes other than CCmode, check to see if we |
4301 | need to use a different CC mode here. */ | |
4302 | compare_mode = SELECT_CC_MODE (new_code, op0, op1); | |
c141a106 | 4303 | #endif /* EXTRA_CC_MODES */ |
230d793d | 4304 | |
c141a106 | 4305 | #if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES) |
8079805d RK |
4306 | /* If the mode changed, we have to change SET_DEST, the mode in the |
4307 | compare, and the mode in the place SET_DEST is used. If SET_DEST is | |
4308 | a hard register, just build new versions with the proper mode. If it | |
4309 | is a pseudo, we lose unless it is only time we set the pseudo, in | |
4310 | which case we can safely change its mode. */ | |
4311 | if (compare_mode != GET_MODE (dest)) | |
4312 | { | |
4313 | int regno = REGNO (dest); | |
4314 | rtx new_dest = gen_rtx (REG, compare_mode, regno); | |
4315 | ||
4316 | if (regno < FIRST_PSEUDO_REGISTER | |
4317 | || (reg_n_sets[regno] == 1 && ! REG_USERVAR_P (dest))) | |
230d793d | 4318 | { |
8079805d RK |
4319 | if (regno >= FIRST_PSEUDO_REGISTER) |
4320 | SUBST (regno_reg_rtx[regno], new_dest); | |
230d793d | 4321 | |
8079805d RK |
4322 | SUBST (SET_DEST (x), new_dest); |
4323 | SUBST (XEXP (*cc_use, 0), new_dest); | |
4324 | other_changed = 1; | |
230d793d | 4325 | |
8079805d | 4326 | dest = new_dest; |
230d793d | 4327 | } |
8079805d | 4328 | } |
230d793d RS |
4329 | #endif |
4330 | ||
8079805d RK |
4331 | /* If the code changed, we have to build a new comparison in |
4332 | undobuf.other_insn. */ | |
4333 | if (new_code != old_code) | |
4334 | { | |
4335 | unsigned HOST_WIDE_INT mask; | |
4336 | ||
4337 | SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use), | |
4338 | dest, const0_rtx)); | |
4339 | ||
4340 | /* If the only change we made was to change an EQ into an NE or | |
4341 | vice versa, OP0 has only one bit that might be nonzero, and OP1 | |
4342 | is zero, check if changing the user of the condition code will | |
4343 | produce a valid insn. If it won't, we can keep the original code | |
4344 | in that insn by surrounding our operation with an XOR. */ | |
4345 | ||
4346 | if (((old_code == NE && new_code == EQ) | |
4347 | || (old_code == EQ && new_code == NE)) | |
4348 | && ! other_changed && op1 == const0_rtx | |
4349 | && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT | |
4350 | && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0) | |
230d793d | 4351 | { |
8079805d | 4352 | rtx pat = PATTERN (other_insn), note = 0; |
a29ca9db | 4353 | int scratches; |
230d793d | 4354 | |
a29ca9db | 4355 | if ((recog_for_combine (&pat, other_insn, ¬e, &scratches) < 0 |
8079805d RK |
4356 | && ! check_asm_operands (pat))) |
4357 | { | |
4358 | PUT_CODE (*cc_use, old_code); | |
4359 | other_insn = 0; | |
230d793d | 4360 | |
8079805d | 4361 | op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask)); |
230d793d | 4362 | } |
230d793d RS |
4363 | } |
4364 | ||
8079805d RK |
4365 | other_changed = 1; |
4366 | } | |
4367 | ||
4368 | if (other_changed) | |
4369 | undobuf.other_insn = other_insn; | |
230d793d RS |
4370 | |
4371 | #ifdef HAVE_cc0 | |
8079805d RK |
4372 | /* If we are now comparing against zero, change our source if |
4373 | needed. If we do not use cc0, we always have a COMPARE. */ | |
4374 | if (op1 == const0_rtx && dest == cc0_rtx) | |
4375 | { | |
4376 | SUBST (SET_SRC (x), op0); | |
4377 | src = op0; | |
4378 | } | |
4379 | else | |
230d793d RS |
4380 | #endif |
4381 | ||
8079805d RK |
4382 | /* Otherwise, if we didn't previously have a COMPARE in the |
4383 | correct mode, we need one. */ | |
4384 | if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode) | |
4385 | { | |
4386 | SUBST (SET_SRC (x), | |
4387 | gen_rtx_combine (COMPARE, compare_mode, op0, op1)); | |
4388 | src = SET_SRC (x); | |
230d793d RS |
4389 | } |
4390 | else | |
4391 | { | |
8079805d RK |
4392 | /* Otherwise, update the COMPARE if needed. */ |
4393 | SUBST (XEXP (src, 0), op0); | |
4394 | SUBST (XEXP (src, 1), op1); | |
230d793d | 4395 | } |
8079805d RK |
4396 | } |
4397 | else | |
4398 | { | |
4399 | /* Get SET_SRC in a form where we have placed back any | |
4400 | compound expressions. Then do the checks below. */ | |
4401 | src = make_compound_operation (src, SET); | |
4402 | SUBST (SET_SRC (x), src); | |
4403 | } | |
230d793d | 4404 | |
8079805d RK |
4405 | /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation, |
4406 | and X being a REG or (subreg (reg)), we may be able to convert this to | |
4407 | (set (subreg:m2 x) (op)). | |
df62f951 | 4408 | |
8079805d RK |
4409 | We can always do this if M1 is narrower than M2 because that means that |
4410 | we only care about the low bits of the result. | |
df62f951 | 4411 | |
8079805d RK |
4412 | However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot |
4413 | perform a narrower operation that requested since the high-order bits will | |
4414 | be undefined. On machine where it is defined, this transformation is safe | |
4415 | as long as M1 and M2 have the same number of words. */ | |
df62f951 | 4416 | |
8079805d RK |
4417 | if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src) |
4418 | && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o' | |
4419 | && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1)) | |
4420 | / UNITS_PER_WORD) | |
4421 | == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))) | |
4422 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)) | |
8baf60bb | 4423 | #ifndef WORD_REGISTER_OPERATIONS |
8079805d RK |
4424 | && (GET_MODE_SIZE (GET_MODE (src)) |
4425 | < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))) | |
df62f951 | 4426 | #endif |
f507a070 RK |
4427 | #ifdef CLASS_CANNOT_CHANGE_SIZE |
4428 | && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER | |
4429 | && (TEST_HARD_REG_BIT | |
4430 | (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE], | |
4431 | REGNO (dest))) | |
4432 | && (GET_MODE_SIZE (GET_MODE (src)) | |
4433 | != GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))) | |
4434 | #endif | |
8079805d RK |
4435 | && (GET_CODE (dest) == REG |
4436 | || (GET_CODE (dest) == SUBREG | |
4437 | && GET_CODE (SUBREG_REG (dest)) == REG))) | |
4438 | { | |
4439 | SUBST (SET_DEST (x), | |
4440 | gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)), | |
4441 | dest)); | |
4442 | SUBST (SET_SRC (x), SUBREG_REG (src)); | |
4443 | ||
4444 | src = SET_SRC (x), dest = SET_DEST (x); | |
4445 | } | |
df62f951 | 4446 | |
8baf60bb | 4447 | #ifdef LOAD_EXTEND_OP |
8079805d RK |
4448 | /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this |
4449 | would require a paradoxical subreg. Replace the subreg with a | |
4450 | zero_extend to avoid the reload that would otherwise be required. */ | |
4451 | ||
4452 | if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src) | |
4453 | && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL | |
4454 | && SUBREG_WORD (src) == 0 | |
4455 | && (GET_MODE_SIZE (GET_MODE (src)) | |
4456 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))) | |
4457 | && GET_CODE (SUBREG_REG (src)) == MEM) | |
4458 | { | |
4459 | SUBST (SET_SRC (x), | |
4460 | gen_rtx_combine (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))), | |
4461 | GET_MODE (src), XEXP (src, 0))); | |
4462 | ||
4463 | src = SET_SRC (x); | |
4464 | } | |
230d793d RS |
4465 | #endif |
4466 | ||
8079805d RK |
4467 | /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we |
4468 | are comparing an item known to be 0 or -1 against 0, use a logical | |
4469 | operation instead. Check for one of the arms being an IOR of the other | |
4470 | arm with some value. We compute three terms to be IOR'ed together. In | |
4471 | practice, at most two will be nonzero. Then we do the IOR's. */ | |
4472 | ||
4473 | if (GET_CODE (dest) != PC | |
4474 | && GET_CODE (src) == IF_THEN_ELSE | |
36b8d792 | 4475 | && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT |
8079805d RK |
4476 | && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE) |
4477 | && XEXP (XEXP (src, 0), 1) == const0_rtx | |
6dd49058 | 4478 | && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0)) |
ea414472 DE |
4479 | #ifdef HAVE_conditional_move |
4480 | && ! can_conditionally_move_p (GET_MODE (src)) | |
4481 | #endif | |
8079805d RK |
4482 | && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0), |
4483 | GET_MODE (XEXP (XEXP (src, 0), 0))) | |
4484 | == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0)))) | |
4485 | && ! side_effects_p (src)) | |
4486 | { | |
4487 | rtx true = (GET_CODE (XEXP (src, 0)) == NE | |
4488 | ? XEXP (src, 1) : XEXP (src, 2)); | |
4489 | rtx false = (GET_CODE (XEXP (src, 0)) == NE | |
4490 | ? XEXP (src, 2) : XEXP (src, 1)); | |
4491 | rtx term1 = const0_rtx, term2, term3; | |
4492 | ||
4493 | if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false)) | |
4494 | term1 = false, true = XEXP (true, 1), false = const0_rtx; | |
4495 | else if (GET_CODE (true) == IOR | |
4496 | && rtx_equal_p (XEXP (true, 1), false)) | |
4497 | term1 = false, true = XEXP (true, 0), false = const0_rtx; | |
4498 | else if (GET_CODE (false) == IOR | |
4499 | && rtx_equal_p (XEXP (false, 0), true)) | |
4500 | term1 = true, false = XEXP (false, 1), true = const0_rtx; | |
4501 | else if (GET_CODE (false) == IOR | |
4502 | && rtx_equal_p (XEXP (false, 1), true)) | |
4503 | term1 = true, false = XEXP (false, 0), true = const0_rtx; | |
4504 | ||
4505 | term2 = gen_binary (AND, GET_MODE (src), XEXP (XEXP (src, 0), 0), true); | |
4506 | term3 = gen_binary (AND, GET_MODE (src), | |
0c1c8ea6 | 4507 | gen_unary (NOT, GET_MODE (src), GET_MODE (src), |
8079805d RK |
4508 | XEXP (XEXP (src, 0), 0)), |
4509 | false); | |
4510 | ||
4511 | SUBST (SET_SRC (x), | |
4512 | gen_binary (IOR, GET_MODE (src), | |
4513 | gen_binary (IOR, GET_MODE (src), term1, term2), | |
4514 | term3)); | |
4515 | ||
4516 | src = SET_SRC (x); | |
4517 | } | |
230d793d | 4518 | |
246e00f2 RK |
4519 | /* If either SRC or DEST is a CLOBBER of (const_int 0), make this |
4520 | whole thing fail. */ | |
4521 | if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx) | |
4522 | return src; | |
4523 | else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx) | |
4524 | return dest; | |
4525 | else | |
4526 | /* Convert this into a field assignment operation, if possible. */ | |
4527 | return make_field_assignment (x); | |
8079805d RK |
4528 | } |
4529 | \f | |
4530 | /* Simplify, X, and AND, IOR, or XOR operation, and return the simplified | |
4531 | result. LAST is nonzero if this is the last retry. */ | |
4532 | ||
4533 | static rtx | |
4534 | simplify_logical (x, last) | |
4535 | rtx x; | |
4536 | int last; | |
4537 | { | |
4538 | enum machine_mode mode = GET_MODE (x); | |
4539 | rtx op0 = XEXP (x, 0); | |
4540 | rtx op1 = XEXP (x, 1); | |
4541 | ||
4542 | switch (GET_CODE (x)) | |
4543 | { | |
230d793d | 4544 | case AND: |
8079805d RK |
4545 | /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single |
4546 | insn (and may simplify more). */ | |
4547 | if (GET_CODE (op0) == XOR | |
4548 | && rtx_equal_p (XEXP (op0, 0), op1) | |
4549 | && ! side_effects_p (op1)) | |
0c1c8ea6 RK |
4550 | x = gen_binary (AND, mode, |
4551 | gen_unary (NOT, mode, mode, XEXP (op0, 1)), op1); | |
8079805d RK |
4552 | |
4553 | if (GET_CODE (op0) == XOR | |
4554 | && rtx_equal_p (XEXP (op0, 1), op1) | |
4555 | && ! side_effects_p (op1)) | |
0c1c8ea6 RK |
4556 | x = gen_binary (AND, mode, |
4557 | gen_unary (NOT, mode, mode, XEXP (op0, 0)), op1); | |
8079805d RK |
4558 | |
4559 | /* Similarly for (~ (A ^ B)) & A. */ | |
4560 | if (GET_CODE (op0) == NOT | |
4561 | && GET_CODE (XEXP (op0, 0)) == XOR | |
4562 | && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1) | |
4563 | && ! side_effects_p (op1)) | |
4564 | x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1); | |
4565 | ||
4566 | if (GET_CODE (op0) == NOT | |
4567 | && GET_CODE (XEXP (op0, 0)) == XOR | |
4568 | && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1) | |
4569 | && ! side_effects_p (op1)) | |
4570 | x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1); | |
4571 | ||
4572 | if (GET_CODE (op1) == CONST_INT) | |
230d793d | 4573 | { |
8079805d | 4574 | x = simplify_and_const_int (x, mode, op0, INTVAL (op1)); |
230d793d RS |
4575 | |
4576 | /* If we have (ior (and (X C1) C2)) and the next restart would be | |
4577 | the last, simplify this by making C1 as small as possible | |
4578 | and then exit. */ | |
8079805d RK |
4579 | if (last |
4580 | && GET_CODE (x) == IOR && GET_CODE (op0) == AND | |
4581 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
4582 | && GET_CODE (op1) == CONST_INT) | |
4583 | return gen_binary (IOR, mode, | |
4584 | gen_binary (AND, mode, XEXP (op0, 0), | |
4585 | GEN_INT (INTVAL (XEXP (op0, 1)) | |
4586 | & ~ INTVAL (op1))), op1); | |
230d793d RS |
4587 | |
4588 | if (GET_CODE (x) != AND) | |
8079805d | 4589 | return x; |
0e32506c RK |
4590 | |
4591 | if (GET_RTX_CLASS (GET_CODE (x)) == 'c' | |
4592 | || GET_RTX_CLASS (GET_CODE (x)) == '2') | |
4593 | op0 = XEXP (x, 0), op1 = XEXP (x, 1); | |
230d793d RS |
4594 | } |
4595 | ||
4596 | /* Convert (A | B) & A to A. */ | |
8079805d RK |
4597 | if (GET_CODE (op0) == IOR |
4598 | && (rtx_equal_p (XEXP (op0, 0), op1) | |
4599 | || rtx_equal_p (XEXP (op0, 1), op1)) | |
4600 | && ! side_effects_p (XEXP (op0, 0)) | |
4601 | && ! side_effects_p (XEXP (op0, 1))) | |
4602 | return op1; | |
230d793d | 4603 | |
d0ab8cd3 | 4604 | /* In the following group of tests (and those in case IOR below), |
230d793d RS |
4605 | we start with some combination of logical operations and apply |
4606 | the distributive law followed by the inverse distributive law. | |
4607 | Most of the time, this results in no change. However, if some of | |
4608 | the operands are the same or inverses of each other, simplifications | |
4609 | will result. | |
4610 | ||
4611 | For example, (and (ior A B) (not B)) can occur as the result of | |
4612 | expanding a bit field assignment. When we apply the distributive | |
4613 | law to this, we get (ior (and (A (not B))) (and (B (not B)))), | |
8079805d | 4614 | which then simplifies to (and (A (not B))). |
230d793d | 4615 | |
8079805d | 4616 | If we have (and (ior A B) C), apply the distributive law and then |
230d793d RS |
4617 | the inverse distributive law to see if things simplify. */ |
4618 | ||
8079805d | 4619 | if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR) |
230d793d RS |
4620 | { |
4621 | x = apply_distributive_law | |
8079805d RK |
4622 | (gen_binary (GET_CODE (op0), mode, |
4623 | gen_binary (AND, mode, XEXP (op0, 0), op1), | |
4624 | gen_binary (AND, mode, XEXP (op0, 1), op1))); | |
230d793d | 4625 | if (GET_CODE (x) != AND) |
8079805d | 4626 | return x; |
230d793d RS |
4627 | } |
4628 | ||
8079805d RK |
4629 | if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR) |
4630 | return apply_distributive_law | |
4631 | (gen_binary (GET_CODE (op1), mode, | |
4632 | gen_binary (AND, mode, XEXP (op1, 0), op0), | |
4633 | gen_binary (AND, mode, XEXP (op1, 1), op0))); | |
230d793d RS |
4634 | |
4635 | /* Similarly, taking advantage of the fact that | |
4636 | (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */ | |
4637 | ||
8079805d RK |
4638 | if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR) |
4639 | return apply_distributive_law | |
4640 | (gen_binary (XOR, mode, | |
4641 | gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)), | |
4642 | gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 1)))); | |
230d793d | 4643 | |
8079805d RK |
4644 | else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR) |
4645 | return apply_distributive_law | |
4646 | (gen_binary (XOR, mode, | |
4647 | gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)), | |
4648 | gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 1)))); | |
230d793d RS |
4649 | break; |
4650 | ||
4651 | case IOR: | |
951553af | 4652 | /* (ior A C) is C if all bits of A that might be nonzero are on in C. */ |
8079805d | 4653 | if (GET_CODE (op1) == CONST_INT |
ac49a949 | 4654 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
8079805d RK |
4655 | && (nonzero_bits (op0, mode) & ~ INTVAL (op1)) == 0) |
4656 | return op1; | |
d0ab8cd3 | 4657 | |
230d793d | 4658 | /* Convert (A & B) | A to A. */ |
8079805d RK |
4659 | if (GET_CODE (op0) == AND |
4660 | && (rtx_equal_p (XEXP (op0, 0), op1) | |
4661 | || rtx_equal_p (XEXP (op0, 1), op1)) | |
4662 | && ! side_effects_p (XEXP (op0, 0)) | |
4663 | && ! side_effects_p (XEXP (op0, 1))) | |
4664 | return op1; | |
230d793d RS |
4665 | |
4666 | /* If we have (ior (and A B) C), apply the distributive law and then | |
4667 | the inverse distributive law to see if things simplify. */ | |
4668 | ||
8079805d | 4669 | if (GET_CODE (op0) == AND) |
230d793d RS |
4670 | { |
4671 | x = apply_distributive_law | |
4672 | (gen_binary (AND, mode, | |
8079805d RK |
4673 | gen_binary (IOR, mode, XEXP (op0, 0), op1), |
4674 | gen_binary (IOR, mode, XEXP (op0, 1), op1))); | |
230d793d RS |
4675 | |
4676 | if (GET_CODE (x) != IOR) | |
8079805d | 4677 | return x; |
230d793d RS |
4678 | } |
4679 | ||
8079805d | 4680 | if (GET_CODE (op1) == AND) |
230d793d RS |
4681 | { |
4682 | x = apply_distributive_law | |
4683 | (gen_binary (AND, mode, | |
8079805d RK |
4684 | gen_binary (IOR, mode, XEXP (op1, 0), op0), |
4685 | gen_binary (IOR, mode, XEXP (op1, 1), op0))); | |
230d793d RS |
4686 | |
4687 | if (GET_CODE (x) != IOR) | |
8079805d | 4688 | return x; |
230d793d RS |
4689 | } |
4690 | ||
4691 | /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the | |
4692 | mode size to (rotate A CX). */ | |
4693 | ||
8079805d RK |
4694 | if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT) |
4695 | || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT)) | |
4696 | && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0)) | |
4697 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
4698 | && GET_CODE (XEXP (op1, 1)) == CONST_INT | |
4699 | && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1)) | |
230d793d | 4700 | == GET_MODE_BITSIZE (mode))) |
8079805d RK |
4701 | return gen_rtx (ROTATE, mode, XEXP (op0, 0), |
4702 | (GET_CODE (op0) == ASHIFT | |
4703 | ? XEXP (op0, 1) : XEXP (op1, 1))); | |
230d793d | 4704 | |
71923da7 RK |
4705 | /* If OP0 is (ashiftrt (plus ...) C), it might actually be |
4706 | a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS | |
4707 | does not affect any of the bits in OP1, it can really be done | |
4708 | as a PLUS and we can associate. We do this by seeing if OP1 | |
4709 | can be safely shifted left C bits. */ | |
4710 | if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT | |
4711 | && GET_CODE (XEXP (op0, 0)) == PLUS | |
4712 | && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT | |
4713 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
4714 | && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT) | |
4715 | { | |
4716 | int count = INTVAL (XEXP (op0, 1)); | |
4717 | HOST_WIDE_INT mask = INTVAL (op1) << count; | |
4718 | ||
4719 | if (mask >> count == INTVAL (op1) | |
4720 | && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0) | |
4721 | { | |
4722 | SUBST (XEXP (XEXP (op0, 0), 1), | |
4723 | GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask)); | |
4724 | return op0; | |
4725 | } | |
4726 | } | |
230d793d RS |
4727 | break; |
4728 | ||
4729 | case XOR: | |
4730 | /* Convert (XOR (NOT x) (NOT y)) to (XOR x y). | |
4731 | Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for | |
4732 | (NOT y). */ | |
4733 | { | |
4734 | int num_negated = 0; | |
230d793d | 4735 | |
8079805d RK |
4736 | if (GET_CODE (op0) == NOT) |
4737 | num_negated++, op0 = XEXP (op0, 0); | |
4738 | if (GET_CODE (op1) == NOT) | |
4739 | num_negated++, op1 = XEXP (op1, 0); | |
230d793d RS |
4740 | |
4741 | if (num_negated == 2) | |
4742 | { | |
8079805d RK |
4743 | SUBST (XEXP (x, 0), op0); |
4744 | SUBST (XEXP (x, 1), op1); | |
230d793d RS |
4745 | } |
4746 | else if (num_negated == 1) | |
0c1c8ea6 | 4747 | return gen_unary (NOT, mode, mode, gen_binary (XOR, mode, op0, op1)); |
230d793d RS |
4748 | } |
4749 | ||
4750 | /* Convert (xor (and A B) B) to (and (not A) B). The latter may | |
4751 | correspond to a machine insn or result in further simplifications | |
4752 | if B is a constant. */ | |
4753 | ||
8079805d RK |
4754 | if (GET_CODE (op0) == AND |
4755 | && rtx_equal_p (XEXP (op0, 1), op1) | |
4756 | && ! side_effects_p (op1)) | |
0c1c8ea6 RK |
4757 | return gen_binary (AND, mode, |
4758 | gen_unary (NOT, mode, mode, XEXP (op0, 0)), | |
8079805d | 4759 | op1); |
230d793d | 4760 | |
8079805d RK |
4761 | else if (GET_CODE (op0) == AND |
4762 | && rtx_equal_p (XEXP (op0, 0), op1) | |
4763 | && ! side_effects_p (op1)) | |
0c1c8ea6 RK |
4764 | return gen_binary (AND, mode, |
4765 | gen_unary (NOT, mode, mode, XEXP (op0, 1)), | |
8079805d | 4766 | op1); |
230d793d RS |
4767 | |
4768 | #if STORE_FLAG_VALUE == 1 | |
4769 | /* (xor (comparison foo bar) (const_int 1)) can become the reversed | |
4770 | comparison. */ | |
8079805d RK |
4771 | if (op1 == const1_rtx |
4772 | && GET_RTX_CLASS (GET_CODE (op0)) == '<' | |
4773 | && reversible_comparison_p (op0)) | |
4774 | return gen_rtx_combine (reverse_condition (GET_CODE (op0)), | |
4775 | mode, XEXP (op0, 0), XEXP (op0, 1)); | |
500c518b RK |
4776 | |
4777 | /* (lshiftrt foo C) where C is the number of bits in FOO minus 1 | |
4778 | is (lt foo (const_int 0)), so we can perform the above | |
4779 | simplification. */ | |
4780 | ||
8079805d RK |
4781 | if (op1 == const1_rtx |
4782 | && GET_CODE (op0) == LSHIFTRT | |
4783 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
4784 | && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1) | |
4785 | return gen_rtx_combine (GE, mode, XEXP (op0, 0), const0_rtx); | |
230d793d RS |
4786 | #endif |
4787 | ||
4788 | /* (xor (comparison foo bar) (const_int sign-bit)) | |
4789 | when STORE_FLAG_VALUE is the sign bit. */ | |
5f4f0e22 CH |
4790 | if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
4791 | && (STORE_FLAG_VALUE | |
4792 | == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1)) | |
8079805d RK |
4793 | && op1 == const_true_rtx |
4794 | && GET_RTX_CLASS (GET_CODE (op0)) == '<' | |
4795 | && reversible_comparison_p (op0)) | |
4796 | return gen_rtx_combine (reverse_condition (GET_CODE (op0)), | |
4797 | mode, XEXP (op0, 0), XEXP (op0, 1)); | |
230d793d RS |
4798 | break; |
4799 | } | |
4800 | ||
4801 | return x; | |
4802 | } | |
4803 | \f | |
4804 | /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound | |
4805 | operations" because they can be replaced with two more basic operations. | |
4806 | ZERO_EXTEND is also considered "compound" because it can be replaced with | |
4807 | an AND operation, which is simpler, though only one operation. | |
4808 | ||
4809 | The function expand_compound_operation is called with an rtx expression | |
4810 | and will convert it to the appropriate shifts and AND operations, | |
4811 | simplifying at each stage. | |
4812 | ||
4813 | The function make_compound_operation is called to convert an expression | |
4814 | consisting of shifts and ANDs into the equivalent compound expression. | |
4815 | It is the inverse of this function, loosely speaking. */ | |
4816 | ||
4817 | static rtx | |
4818 | expand_compound_operation (x) | |
4819 | rtx x; | |
4820 | { | |
4821 | int pos = 0, len; | |
4822 | int unsignedp = 0; | |
4823 | int modewidth; | |
4824 | rtx tem; | |
4825 | ||
4826 | switch (GET_CODE (x)) | |
4827 | { | |
4828 | case ZERO_EXTEND: | |
4829 | unsignedp = 1; | |
4830 | case SIGN_EXTEND: | |
75473182 RS |
4831 | /* We can't necessarily use a const_int for a multiword mode; |
4832 | it depends on implicitly extending the value. | |
4833 | Since we don't know the right way to extend it, | |
4834 | we can't tell whether the implicit way is right. | |
4835 | ||
4836 | Even for a mode that is no wider than a const_int, | |
4837 | we can't win, because we need to sign extend one of its bits through | |
4838 | the rest of it, and we don't know which bit. */ | |
230d793d | 4839 | if (GET_CODE (XEXP (x, 0)) == CONST_INT) |
75473182 | 4840 | return x; |
230d793d | 4841 | |
8079805d RK |
4842 | /* Return if (subreg:MODE FROM 0) is not a safe replacement for |
4843 | (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM | |
4844 | because (SUBREG (MEM...)) is guaranteed to cause the MEM to be | |
4845 | reloaded. If not for that, MEM's would very rarely be safe. | |
4846 | ||
4847 | Reject MODEs bigger than a word, because we might not be able | |
4848 | to reference a two-register group starting with an arbitrary register | |
4849 | (and currently gen_lowpart might crash for a SUBREG). */ | |
4850 | ||
4851 | if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD) | |
230d793d RS |
4852 | return x; |
4853 | ||
4854 | len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))); | |
4855 | /* If the inner object has VOIDmode (the only way this can happen | |
4856 | is if it is a ASM_OPERANDS), we can't do anything since we don't | |
4857 | know how much masking to do. */ | |
4858 | if (len == 0) | |
4859 | return x; | |
4860 | ||
4861 | break; | |
4862 | ||
4863 | case ZERO_EXTRACT: | |
4864 | unsignedp = 1; | |
4865 | case SIGN_EXTRACT: | |
4866 | /* If the operand is a CLOBBER, just return it. */ | |
4867 | if (GET_CODE (XEXP (x, 0)) == CLOBBER) | |
4868 | return XEXP (x, 0); | |
4869 | ||
4870 | if (GET_CODE (XEXP (x, 1)) != CONST_INT | |
4871 | || GET_CODE (XEXP (x, 2)) != CONST_INT | |
4872 | || GET_MODE (XEXP (x, 0)) == VOIDmode) | |
4873 | return x; | |
4874 | ||
4875 | len = INTVAL (XEXP (x, 1)); | |
4876 | pos = INTVAL (XEXP (x, 2)); | |
4877 | ||
4878 | /* If this goes outside the object being extracted, replace the object | |
4879 | with a (use (mem ...)) construct that only combine understands | |
4880 | and is used only for this purpose. */ | |
4881 | if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))) | |
4882 | SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0))); | |
4883 | ||
f76b9db2 ILT |
4884 | if (BITS_BIG_ENDIAN) |
4885 | pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos; | |
4886 | ||
230d793d RS |
4887 | break; |
4888 | ||
4889 | default: | |
4890 | return x; | |
4891 | } | |
4892 | ||
4893 | /* If we reach here, we want to return a pair of shifts. The inner | |
4894 | shift is a left shift of BITSIZE - POS - LEN bits. The outer | |
4895 | shift is a right shift of BITSIZE - LEN bits. It is arithmetic or | |
4896 | logical depending on the value of UNSIGNEDP. | |
4897 | ||
4898 | If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be | |
4899 | converted into an AND of a shift. | |
4900 | ||
4901 | We must check for the case where the left shift would have a negative | |
4902 | count. This can happen in a case like (x >> 31) & 255 on machines | |
4903 | that can't shift by a constant. On those machines, we would first | |
4904 | combine the shift with the AND to produce a variable-position | |
4905 | extraction. Then the constant of 31 would be substituted in to produce | |
4906 | a such a position. */ | |
4907 | ||
4908 | modewidth = GET_MODE_BITSIZE (GET_MODE (x)); | |
4909 | if (modewidth >= pos - len) | |
5f4f0e22 | 4910 | tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT, |
230d793d | 4911 | GET_MODE (x), |
5f4f0e22 CH |
4912 | simplify_shift_const (NULL_RTX, ASHIFT, |
4913 | GET_MODE (x), | |
230d793d RS |
4914 | XEXP (x, 0), |
4915 | modewidth - pos - len), | |
4916 | modewidth - len); | |
4917 | ||
5f4f0e22 CH |
4918 | else if (unsignedp && len < HOST_BITS_PER_WIDE_INT) |
4919 | tem = simplify_and_const_int (NULL_RTX, GET_MODE (x), | |
4920 | simplify_shift_const (NULL_RTX, LSHIFTRT, | |
230d793d RS |
4921 | GET_MODE (x), |
4922 | XEXP (x, 0), pos), | |
5f4f0e22 | 4923 | ((HOST_WIDE_INT) 1 << len) - 1); |
230d793d RS |
4924 | else |
4925 | /* Any other cases we can't handle. */ | |
4926 | return x; | |
4927 | ||
4928 | ||
4929 | /* If we couldn't do this for some reason, return the original | |
4930 | expression. */ | |
4931 | if (GET_CODE (tem) == CLOBBER) | |
4932 | return x; | |
4933 | ||
4934 | return tem; | |
4935 | } | |
4936 | \f | |
4937 | /* X is a SET which contains an assignment of one object into | |
4938 | a part of another (such as a bit-field assignment, STRICT_LOW_PART, | |
4939 | or certain SUBREGS). If possible, convert it into a series of | |
4940 | logical operations. | |
4941 | ||
4942 | We half-heartedly support variable positions, but do not at all | |
4943 | support variable lengths. */ | |
4944 | ||
4945 | static rtx | |
4946 | expand_field_assignment (x) | |
4947 | rtx x; | |
4948 | { | |
4949 | rtx inner; | |
4950 | rtx pos; /* Always counts from low bit. */ | |
4951 | int len; | |
4952 | rtx mask; | |
4953 | enum machine_mode compute_mode; | |
4954 | ||
4955 | /* Loop until we find something we can't simplify. */ | |
4956 | while (1) | |
4957 | { | |
4958 | if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART | |
4959 | && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG) | |
4960 | { | |
4961 | inner = SUBREG_REG (XEXP (SET_DEST (x), 0)); | |
4962 | len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))); | |
4d9cfc7b | 4963 | pos = GEN_INT (BITS_PER_WORD * SUBREG_WORD (XEXP (SET_DEST (x), 0))); |
230d793d RS |
4964 | } |
4965 | else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT | |
4966 | && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT) | |
4967 | { | |
4968 | inner = XEXP (SET_DEST (x), 0); | |
4969 | len = INTVAL (XEXP (SET_DEST (x), 1)); | |
4970 | pos = XEXP (SET_DEST (x), 2); | |
4971 | ||
4972 | /* If the position is constant and spans the width of INNER, | |
4973 | surround INNER with a USE to indicate this. */ | |
4974 | if (GET_CODE (pos) == CONST_INT | |
4975 | && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner))) | |
4976 | inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner); | |
4977 | ||
f76b9db2 ILT |
4978 | if (BITS_BIG_ENDIAN) |
4979 | { | |
4980 | if (GET_CODE (pos) == CONST_INT) | |
4981 | pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len | |
4982 | - INTVAL (pos)); | |
4983 | else if (GET_CODE (pos) == MINUS | |
4984 | && GET_CODE (XEXP (pos, 1)) == CONST_INT | |
4985 | && (INTVAL (XEXP (pos, 1)) | |
4986 | == GET_MODE_BITSIZE (GET_MODE (inner)) - len)) | |
4987 | /* If position is ADJUST - X, new position is X. */ | |
4988 | pos = XEXP (pos, 0); | |
4989 | else | |
4990 | pos = gen_binary (MINUS, GET_MODE (pos), | |
4991 | GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) | |
4992 | - len), | |
4993 | pos); | |
4994 | } | |
230d793d RS |
4995 | } |
4996 | ||
4997 | /* A SUBREG between two modes that occupy the same numbers of words | |
4998 | can be done by moving the SUBREG to the source. */ | |
4999 | else if (GET_CODE (SET_DEST (x)) == SUBREG | |
5000 | && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x))) | |
5001 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD) | |
5002 | == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x)))) | |
5003 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))) | |
5004 | { | |
5005 | x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)), | |
5006 | gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))), | |
5007 | SET_SRC (x))); | |
5008 | continue; | |
5009 | } | |
5010 | else | |
5011 | break; | |
5012 | ||
5013 | while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner)) | |
5014 | inner = SUBREG_REG (inner); | |
5015 | ||
5016 | compute_mode = GET_MODE (inner); | |
5017 | ||
5018 | /* Compute a mask of LEN bits, if we can do this on the host machine. */ | |
5f4f0e22 CH |
5019 | if (len < HOST_BITS_PER_WIDE_INT) |
5020 | mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1); | |
230d793d RS |
5021 | else |
5022 | break; | |
5023 | ||
5024 | /* Now compute the equivalent expression. Make a copy of INNER | |
5025 | for the SET_DEST in case it is a MEM into which we will substitute; | |
5026 | we don't want shared RTL in that case. */ | |
5027 | x = gen_rtx (SET, VOIDmode, copy_rtx (inner), | |
5028 | gen_binary (IOR, compute_mode, | |
5029 | gen_binary (AND, compute_mode, | |
5030 | gen_unary (NOT, compute_mode, | |
0c1c8ea6 | 5031 | compute_mode, |
230d793d RS |
5032 | gen_binary (ASHIFT, |
5033 | compute_mode, | |
5034 | mask, pos)), | |
5035 | inner), | |
5036 | gen_binary (ASHIFT, compute_mode, | |
5037 | gen_binary (AND, compute_mode, | |
5038 | gen_lowpart_for_combine | |
5039 | (compute_mode, | |
5040 | SET_SRC (x)), | |
5041 | mask), | |
5042 | pos))); | |
5043 | } | |
5044 | ||
5045 | return x; | |
5046 | } | |
5047 | \f | |
8999a12e RK |
5048 | /* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero, |
5049 | it is an RTX that represents a variable starting position; otherwise, | |
5050 | POS is the (constant) starting bit position (counted from the LSB). | |
230d793d RS |
5051 | |
5052 | INNER may be a USE. This will occur when we started with a bitfield | |
5053 | that went outside the boundary of the object in memory, which is | |
5054 | allowed on most machines. To isolate this case, we produce a USE | |
5055 | whose mode is wide enough and surround the MEM with it. The only | |
5056 | code that understands the USE is this routine. If it is not removed, | |
5057 | it will cause the resulting insn not to match. | |
5058 | ||
5059 | UNSIGNEDP is non-zero for an unsigned reference and zero for a | |
5060 | signed reference. | |
5061 | ||
5062 | IN_DEST is non-zero if this is a reference in the destination of a | |
5063 | SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero, | |
5064 | a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will | |
5065 | be used. | |
5066 | ||
5067 | IN_COMPARE is non-zero if we are in a COMPARE. This means that a | |
5068 | ZERO_EXTRACT should be built even for bits starting at bit 0. | |
5069 | ||
76184def DE |
5070 | MODE is the desired mode of the result (if IN_DEST == 0). |
5071 | ||
5072 | The result is an RTX for the extraction or NULL_RTX if the target | |
5073 | can't handle it. */ | |
230d793d RS |
5074 | |
5075 | static rtx | |
5076 | make_extraction (mode, inner, pos, pos_rtx, len, | |
5077 | unsignedp, in_dest, in_compare) | |
5078 | enum machine_mode mode; | |
5079 | rtx inner; | |
5080 | int pos; | |
5081 | rtx pos_rtx; | |
5082 | int len; | |
5083 | int unsignedp; | |
5084 | int in_dest, in_compare; | |
5085 | { | |
94b4b17a RS |
5086 | /* This mode describes the size of the storage area |
5087 | to fetch the overall value from. Within that, we | |
5088 | ignore the POS lowest bits, etc. */ | |
230d793d RS |
5089 | enum machine_mode is_mode = GET_MODE (inner); |
5090 | enum machine_mode inner_mode; | |
d7cd794f RK |
5091 | enum machine_mode wanted_inner_mode = byte_mode; |
5092 | enum machine_mode wanted_inner_reg_mode = word_mode; | |
230d793d RS |
5093 | enum machine_mode pos_mode = word_mode; |
5094 | enum machine_mode extraction_mode = word_mode; | |
5095 | enum machine_mode tmode = mode_for_size (len, MODE_INT, 1); | |
5096 | int spans_byte = 0; | |
5097 | rtx new = 0; | |
8999a12e | 5098 | rtx orig_pos_rtx = pos_rtx; |
6139ff20 | 5099 | int orig_pos; |
230d793d RS |
5100 | |
5101 | /* Get some information about INNER and get the innermost object. */ | |
5102 | if (GET_CODE (inner) == USE) | |
94b4b17a | 5103 | /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */ |
230d793d RS |
5104 | /* We don't need to adjust the position because we set up the USE |
5105 | to pretend that it was a full-word object. */ | |
5106 | spans_byte = 1, inner = XEXP (inner, 0); | |
5107 | else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner)) | |
94b4b17a RS |
5108 | { |
5109 | /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...), | |
5110 | consider just the QI as the memory to extract from. | |
5111 | The subreg adds or removes high bits; its mode is | |
5112 | irrelevant to the meaning of this extraction, | |
5113 | since POS and LEN count from the lsb. */ | |
5114 | if (GET_CODE (SUBREG_REG (inner)) == MEM) | |
5115 | is_mode = GET_MODE (SUBREG_REG (inner)); | |
5116 | inner = SUBREG_REG (inner); | |
5117 | } | |
230d793d RS |
5118 | |
5119 | inner_mode = GET_MODE (inner); | |
5120 | ||
5121 | if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT) | |
8999a12e | 5122 | pos = INTVAL (pos_rtx), pos_rtx = 0; |
230d793d RS |
5123 | |
5124 | /* See if this can be done without an extraction. We never can if the | |
5125 | width of the field is not the same as that of some integer mode. For | |
5126 | registers, we can only avoid the extraction if the position is at the | |
5127 | low-order bit and this is either not in the destination or we have the | |
5128 | appropriate STRICT_LOW_PART operation available. | |
5129 | ||
5130 | For MEM, we can avoid an extract if the field starts on an appropriate | |
5131 | boundary and we can change the mode of the memory reference. However, | |
5132 | we cannot directly access the MEM if we have a USE and the underlying | |
5133 | MEM is not TMODE. This combination means that MEM was being used in a | |
5134 | context where bits outside its mode were being referenced; that is only | |
5135 | valid in bit-field insns. */ | |
5136 | ||
5137 | if (tmode != BLKmode | |
5138 | && ! (spans_byte && inner_mode != tmode) | |
4d9cfc7b RK |
5139 | && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0 |
5140 | && GET_CODE (inner) != MEM | |
230d793d | 5141 | && (! in_dest |
df62f951 RK |
5142 | || (GET_CODE (inner) == REG |
5143 | && (movstrict_optab->handlers[(int) tmode].insn_code | |
5144 | != CODE_FOR_nothing)))) | |
8999a12e | 5145 | || (GET_CODE (inner) == MEM && pos_rtx == 0 |
dfbe1b2f RK |
5146 | && (pos |
5147 | % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode) | |
5148 | : BITS_PER_UNIT)) == 0 | |
230d793d RS |
5149 | /* We can't do this if we are widening INNER_MODE (it |
5150 | may not be aligned, for one thing). */ | |
5151 | && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode) | |
5152 | && (inner_mode == tmode | |
5153 | || (! mode_dependent_address_p (XEXP (inner, 0)) | |
5154 | && ! MEM_VOLATILE_P (inner)))))) | |
5155 | { | |
230d793d RS |
5156 | /* If INNER is a MEM, make a new MEM that encompasses just the desired |
5157 | field. If the original and current mode are the same, we need not | |
5158 | adjust the offset. Otherwise, we do if bytes big endian. | |
5159 | ||
4d9cfc7b RK |
5160 | If INNER is not a MEM, get a piece consisting of just the field |
5161 | of interest (in this case POS % BITS_PER_WORD must be 0). */ | |
230d793d RS |
5162 | |
5163 | if (GET_CODE (inner) == MEM) | |
5164 | { | |
94b4b17a RS |
5165 | int offset; |
5166 | /* POS counts from lsb, but make OFFSET count in memory order. */ | |
5167 | if (BYTES_BIG_ENDIAN) | |
5168 | offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT; | |
5169 | else | |
5170 | offset = pos / BITS_PER_UNIT; | |
230d793d RS |
5171 | |
5172 | new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset)); | |
5173 | RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner); | |
5174 | MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner); | |
5175 | MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner); | |
5176 | } | |
df62f951 | 5177 | else if (GET_CODE (inner) == REG) |
c0d3ac4d RK |
5178 | { |
5179 | /* We can't call gen_lowpart_for_combine here since we always want | |
5180 | a SUBREG and it would sometimes return a new hard register. */ | |
5181 | if (tmode != inner_mode) | |
5182 | new = gen_rtx (SUBREG, tmode, inner, | |
5183 | (WORDS_BIG_ENDIAN | |
5184 | && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD | |
4d9cfc7b RK |
5185 | ? (((GET_MODE_SIZE (inner_mode) |
5186 | - GET_MODE_SIZE (tmode)) | |
5187 | / UNITS_PER_WORD) | |
5188 | - pos / BITS_PER_WORD) | |
5189 | : pos / BITS_PER_WORD)); | |
c0d3ac4d RK |
5190 | else |
5191 | new = inner; | |
5192 | } | |
230d793d | 5193 | else |
6139ff20 RK |
5194 | new = force_to_mode (inner, tmode, |
5195 | len >= HOST_BITS_PER_WIDE_INT | |
5196 | ? GET_MODE_MASK (tmode) | |
5197 | : ((HOST_WIDE_INT) 1 << len) - 1, | |
e3d616e3 | 5198 | NULL_RTX, 0); |
230d793d RS |
5199 | |
5200 | /* If this extraction is going into the destination of a SET, | |
5201 | make a STRICT_LOW_PART unless we made a MEM. */ | |
5202 | ||
5203 | if (in_dest) | |
5204 | return (GET_CODE (new) == MEM ? new | |
77fa0940 RK |
5205 | : (GET_CODE (new) != SUBREG |
5206 | ? gen_rtx (CLOBBER, tmode, const0_rtx) | |
5207 | : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new))); | |
230d793d RS |
5208 | |
5209 | /* Otherwise, sign- or zero-extend unless we already are in the | |
5210 | proper mode. */ | |
5211 | ||
5212 | return (mode == tmode ? new | |
5213 | : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, | |
5214 | mode, new)); | |
5215 | } | |
5216 | ||
cc471082 RS |
5217 | /* Unless this is a COMPARE or we have a funny memory reference, |
5218 | don't do anything with zero-extending field extracts starting at | |
5219 | the low-order bit since they are simple AND operations. */ | |
8999a12e RK |
5220 | if (pos_rtx == 0 && pos == 0 && ! in_dest |
5221 | && ! in_compare && ! spans_byte && unsignedp) | |
230d793d RS |
5222 | return 0; |
5223 | ||
e7373556 RK |
5224 | /* Unless we are allowed to span bytes, reject this if we would be |
5225 | spanning bytes or if the position is not a constant and the length | |
5226 | is not 1. In all other cases, we would only be going outside | |
5227 | out object in cases when an original shift would have been | |
5228 | undefined. */ | |
5229 | if (! spans_byte | |
5230 | && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode)) | |
5231 | || (pos_rtx != 0 && len != 1))) | |
5232 | return 0; | |
5233 | ||
d7cd794f | 5234 | /* Get the mode to use should INNER not be a MEM, the mode for the position, |
230d793d RS |
5235 | and the mode for the result. */ |
5236 | #ifdef HAVE_insv | |
5237 | if (in_dest) | |
5238 | { | |
d7cd794f | 5239 | wanted_inner_reg_mode = insn_operand_mode[(int) CODE_FOR_insv][0]; |
230d793d RS |
5240 | pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2]; |
5241 | extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3]; | |
5242 | } | |
5243 | #endif | |
5244 | ||
5245 | #ifdef HAVE_extzv | |
5246 | if (! in_dest && unsignedp) | |
5247 | { | |
d7cd794f | 5248 | wanted_inner_reg_mode = insn_operand_mode[(int) CODE_FOR_extzv][1]; |
230d793d RS |
5249 | pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3]; |
5250 | extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0]; | |
5251 | } | |
5252 | #endif | |
5253 | ||
5254 | #ifdef HAVE_extv | |
5255 | if (! in_dest && ! unsignedp) | |
5256 | { | |
d7cd794f | 5257 | wanted_inner_reg_mode = insn_operand_mode[(int) CODE_FOR_extv][1]; |
230d793d RS |
5258 | pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3]; |
5259 | extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0]; | |
5260 | } | |
5261 | #endif | |
5262 | ||
5263 | /* Never narrow an object, since that might not be safe. */ | |
5264 | ||
5265 | if (mode != VOIDmode | |
5266 | && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode)) | |
5267 | extraction_mode = mode; | |
5268 | ||
5269 | if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode | |
5270 | && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx))) | |
5271 | pos_mode = GET_MODE (pos_rtx); | |
5272 | ||
d7cd794f RK |
5273 | /* If this is not from memory, the desired mode is wanted_inner_reg_mode; |
5274 | if we have to change the mode of memory and cannot, the desired mode is | |
5275 | EXTRACTION_MODE. */ | |
5276 | if (GET_CODE (inner) != MEM) | |
5277 | wanted_inner_mode = wanted_inner_reg_mode; | |
5278 | else if (inner_mode != wanted_inner_mode | |
5279 | && (mode_dependent_address_p (XEXP (inner, 0)) | |
5280 | || MEM_VOLATILE_P (inner))) | |
5281 | wanted_inner_mode = extraction_mode; | |
230d793d | 5282 | |
6139ff20 RK |
5283 | orig_pos = pos; |
5284 | ||
f76b9db2 ILT |
5285 | if (BITS_BIG_ENDIAN) |
5286 | { | |
cf54c2cd DE |
5287 | /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to |
5288 | BITS_BIG_ENDIAN style. If position is constant, compute new | |
5289 | position. Otherwise, build subtraction. | |
5290 | Note that POS is relative to the mode of the original argument. | |
5291 | If it's a MEM we need to recompute POS relative to that. | |
5292 | However, if we're extracting from (or inserting into) a register, | |
5293 | we want to recompute POS relative to wanted_inner_mode. */ | |
5294 | int width = (GET_CODE (inner) == MEM | |
5295 | ? GET_MODE_BITSIZE (is_mode) | |
5296 | : GET_MODE_BITSIZE (wanted_inner_mode)); | |
5297 | ||
f76b9db2 | 5298 | if (pos_rtx == 0) |
cf54c2cd | 5299 | pos = width - len - pos; |
f76b9db2 ILT |
5300 | else |
5301 | pos_rtx | |
5302 | = gen_rtx_combine (MINUS, GET_MODE (pos_rtx), | |
cf54c2cd DE |
5303 | GEN_INT (width - len), pos_rtx); |
5304 | /* POS may be less than 0 now, but we check for that below. | |
5305 | Note that it can only be less than 0 if GET_CODE (inner) != MEM. */ | |
f76b9db2 | 5306 | } |
230d793d RS |
5307 | |
5308 | /* If INNER has a wider mode, make it smaller. If this is a constant | |
5309 | extract, try to adjust the byte to point to the byte containing | |
5310 | the value. */ | |
d7cd794f RK |
5311 | if (wanted_inner_mode != VOIDmode |
5312 | && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode) | |
230d793d | 5313 | && ((GET_CODE (inner) == MEM |
d7cd794f | 5314 | && (inner_mode == wanted_inner_mode |
230d793d RS |
5315 | || (! mode_dependent_address_p (XEXP (inner, 0)) |
5316 | && ! MEM_VOLATILE_P (inner)))))) | |
5317 | { | |
5318 | int offset = 0; | |
5319 | ||
5320 | /* The computations below will be correct if the machine is big | |
5321 | endian in both bits and bytes or little endian in bits and bytes. | |
5322 | If it is mixed, we must adjust. */ | |
5323 | ||
230d793d RS |
5324 | /* If bytes are big endian and we had a paradoxical SUBREG, we must |
5325 | adjust OFFSET to compensate. */ | |
f76b9db2 ILT |
5326 | if (BYTES_BIG_ENDIAN |
5327 | && ! spans_byte | |
230d793d RS |
5328 | && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode)) |
5329 | offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode); | |
230d793d RS |
5330 | |
5331 | /* If this is a constant position, we can move to the desired byte. */ | |
8999a12e | 5332 | if (pos_rtx == 0) |
230d793d RS |
5333 | { |
5334 | offset += pos / BITS_PER_UNIT; | |
d7cd794f | 5335 | pos %= GET_MODE_BITSIZE (wanted_inner_mode); |
230d793d RS |
5336 | } |
5337 | ||
f76b9db2 ILT |
5338 | if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN |
5339 | && ! spans_byte | |
d7cd794f | 5340 | && is_mode != wanted_inner_mode) |
c6b3f1f2 | 5341 | offset = (GET_MODE_SIZE (is_mode) |
d7cd794f | 5342 | - GET_MODE_SIZE (wanted_inner_mode) - offset); |
c6b3f1f2 | 5343 | |
d7cd794f | 5344 | if (offset != 0 || inner_mode != wanted_inner_mode) |
230d793d | 5345 | { |
d7cd794f | 5346 | rtx newmem = gen_rtx (MEM, wanted_inner_mode, |
230d793d RS |
5347 | plus_constant (XEXP (inner, 0), offset)); |
5348 | RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner); | |
5349 | MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner); | |
5350 | MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner); | |
5351 | inner = newmem; | |
5352 | } | |
5353 | } | |
5354 | ||
9e74dc41 RK |
5355 | /* If INNER is not memory, we can always get it into the proper mode. If we |
5356 | are changing its mode, POS must be a constant and smaller than the size | |
5357 | of the new mode. */ | |
230d793d | 5358 | else if (GET_CODE (inner) != MEM) |
9e74dc41 RK |
5359 | { |
5360 | if (GET_MODE (inner) != wanted_inner_mode | |
5361 | && (pos_rtx != 0 | |
5362 | || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode))) | |
5363 | return 0; | |
5364 | ||
5365 | inner = force_to_mode (inner, wanted_inner_mode, | |
5366 | pos_rtx | |
5367 | || len + orig_pos >= HOST_BITS_PER_WIDE_INT | |
5368 | ? GET_MODE_MASK (wanted_inner_mode) | |
5369 | : (((HOST_WIDE_INT) 1 << len) - 1) << orig_pos, | |
5370 | NULL_RTX, 0); | |
5371 | } | |
230d793d RS |
5372 | |
5373 | /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we | |
5374 | have to zero extend. Otherwise, we can just use a SUBREG. */ | |
8999a12e | 5375 | if (pos_rtx != 0 |
230d793d RS |
5376 | && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx))) |
5377 | pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx); | |
8999a12e | 5378 | else if (pos_rtx != 0 |
230d793d RS |
5379 | && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx))) |
5380 | pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx); | |
5381 | ||
8999a12e RK |
5382 | /* Make POS_RTX unless we already have it and it is correct. If we don't |
5383 | have a POS_RTX but we do have an ORIG_POS_RTX, the latter must | |
5384 | be a CONST_INT. */ | |
5385 | if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos) | |
5386 | pos_rtx = orig_pos_rtx; | |
5387 | ||
5388 | else if (pos_rtx == 0) | |
5f4f0e22 | 5389 | pos_rtx = GEN_INT (pos); |
230d793d RS |
5390 | |
5391 | /* Make the required operation. See if we can use existing rtx. */ | |
5392 | new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT, | |
5f4f0e22 | 5393 | extraction_mode, inner, GEN_INT (len), pos_rtx); |
230d793d RS |
5394 | if (! in_dest) |
5395 | new = gen_lowpart_for_combine (mode, new); | |
5396 | ||
5397 | return new; | |
5398 | } | |
5399 | \f | |
71923da7 RK |
5400 | /* See if X contains an ASHIFT of COUNT or more bits that can be commuted |
5401 | with any other operations in X. Return X without that shift if so. */ | |
5402 | ||
5403 | static rtx | |
5404 | extract_left_shift (x, count) | |
5405 | rtx x; | |
5406 | int count; | |
5407 | { | |
5408 | enum rtx_code code = GET_CODE (x); | |
5409 | enum machine_mode mode = GET_MODE (x); | |
5410 | rtx tem; | |
5411 | ||
5412 | switch (code) | |
5413 | { | |
5414 | case ASHIFT: | |
5415 | /* This is the shift itself. If it is wide enough, we will return | |
5416 | either the value being shifted if the shift count is equal to | |
5417 | COUNT or a shift for the difference. */ | |
5418 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
5419 | && INTVAL (XEXP (x, 1)) >= count) | |
5420 | return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0), | |
5421 | INTVAL (XEXP (x, 1)) - count); | |
5422 | break; | |
5423 | ||
5424 | case NEG: case NOT: | |
5425 | if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0) | |
0c1c8ea6 | 5426 | return gen_unary (code, mode, mode, tem); |
71923da7 RK |
5427 | |
5428 | break; | |
5429 | ||
5430 | case PLUS: case IOR: case XOR: case AND: | |
5431 | /* If we can safely shift this constant and we find the inner shift, | |
5432 | make a new operation. */ | |
5433 | if (GET_CODE (XEXP (x,1)) == CONST_INT | |
5434 | && (INTVAL (XEXP (x, 1)) & (((HOST_WIDE_INT) 1 << count)) - 1) == 0 | |
5435 | && (tem = extract_left_shift (XEXP (x, 0), count)) != 0) | |
5436 | return gen_binary (code, mode, tem, | |
5437 | GEN_INT (INTVAL (XEXP (x, 1)) >> count)); | |
5438 | ||
5439 | break; | |
5440 | } | |
5441 | ||
5442 | return 0; | |
5443 | } | |
5444 | \f | |
230d793d RS |
5445 | /* Look at the expression rooted at X. Look for expressions |
5446 | equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND. | |
5447 | Form these expressions. | |
5448 | ||
5449 | Return the new rtx, usually just X. | |
5450 | ||
5451 | Also, for machines like the Vax that don't have logical shift insns, | |
5452 | try to convert logical to arithmetic shift operations in cases where | |
5453 | they are equivalent. This undoes the canonicalizations to logical | |
5454 | shifts done elsewhere. | |
5455 | ||
5456 | We try, as much as possible, to re-use rtl expressions to save memory. | |
5457 | ||
5458 | IN_CODE says what kind of expression we are processing. Normally, it is | |
42495ca0 RK |
5459 | SET. In a memory address (inside a MEM, PLUS or minus, the latter two |
5460 | being kludges), it is MEM. When processing the arguments of a comparison | |
230d793d RS |
5461 | or a COMPARE against zero, it is COMPARE. */ |
5462 | ||
5463 | static rtx | |
5464 | make_compound_operation (x, in_code) | |
5465 | rtx x; | |
5466 | enum rtx_code in_code; | |
5467 | { | |
5468 | enum rtx_code code = GET_CODE (x); | |
5469 | enum machine_mode mode = GET_MODE (x); | |
5470 | int mode_width = GET_MODE_BITSIZE (mode); | |
71923da7 | 5471 | rtx rhs, lhs; |
230d793d | 5472 | enum rtx_code next_code; |
f24ad0e4 | 5473 | int i; |
230d793d | 5474 | rtx new = 0; |
280f58ba | 5475 | rtx tem; |
230d793d RS |
5476 | char *fmt; |
5477 | ||
5478 | /* Select the code to be used in recursive calls. Once we are inside an | |
5479 | address, we stay there. If we have a comparison, set to COMPARE, | |
5480 | but once inside, go back to our default of SET. */ | |
5481 | ||
42495ca0 | 5482 | next_code = (code == MEM || code == PLUS || code == MINUS ? MEM |
230d793d RS |
5483 | : ((code == COMPARE || GET_RTX_CLASS (code) == '<') |
5484 | && XEXP (x, 1) == const0_rtx) ? COMPARE | |
5485 | : in_code == COMPARE ? SET : in_code); | |
5486 | ||
5487 | /* Process depending on the code of this operation. If NEW is set | |
5488 | non-zero, it will be returned. */ | |
5489 | ||
5490 | switch (code) | |
5491 | { | |
5492 | case ASHIFT: | |
230d793d RS |
5493 | /* Convert shifts by constants into multiplications if inside |
5494 | an address. */ | |
5495 | if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT | |
5f4f0e22 | 5496 | && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT |
230d793d | 5497 | && INTVAL (XEXP (x, 1)) >= 0) |
280f58ba RK |
5498 | { |
5499 | new = make_compound_operation (XEXP (x, 0), next_code); | |
5500 | new = gen_rtx_combine (MULT, mode, new, | |
5501 | GEN_INT ((HOST_WIDE_INT) 1 | |
5502 | << INTVAL (XEXP (x, 1)))); | |
5503 | } | |
230d793d RS |
5504 | break; |
5505 | ||
5506 | case AND: | |
5507 | /* If the second operand is not a constant, we can't do anything | |
5508 | with it. */ | |
5509 | if (GET_CODE (XEXP (x, 1)) != CONST_INT) | |
5510 | break; | |
5511 | ||
5512 | /* If the constant is a power of two minus one and the first operand | |
5513 | is a logical right shift, make an extraction. */ | |
5514 | if (GET_CODE (XEXP (x, 0)) == LSHIFTRT | |
5515 | && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0) | |
280f58ba RK |
5516 | { |
5517 | new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code); | |
5518 | new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1, | |
5519 | 0, in_code == COMPARE); | |
5520 | } | |
dfbe1b2f | 5521 | |
230d793d RS |
5522 | /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */ |
5523 | else if (GET_CODE (XEXP (x, 0)) == SUBREG | |
5524 | && subreg_lowpart_p (XEXP (x, 0)) | |
5525 | && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT | |
5526 | && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0) | |
280f58ba RK |
5527 | { |
5528 | new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0), | |
5529 | next_code); | |
2f99f437 | 5530 | new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0, |
280f58ba RK |
5531 | XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1, |
5532 | 0, in_code == COMPARE); | |
5533 | } | |
45620ed4 | 5534 | /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */ |
c2f9f64e JW |
5535 | else if ((GET_CODE (XEXP (x, 0)) == XOR |
5536 | || GET_CODE (XEXP (x, 0)) == IOR) | |
5537 | && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT | |
5538 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT | |
5539 | && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0) | |
5540 | { | |
5541 | /* Apply the distributive law, and then try to make extractions. */ | |
5542 | new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode, | |
5543 | gen_rtx (AND, mode, XEXP (XEXP (x, 0), 0), | |
5544 | XEXP (x, 1)), | |
5545 | gen_rtx (AND, mode, XEXP (XEXP (x, 0), 1), | |
5546 | XEXP (x, 1))); | |
5547 | new = make_compound_operation (new, in_code); | |
5548 | } | |
a7c99304 RK |
5549 | |
5550 | /* If we are have (and (rotate X C) M) and C is larger than the number | |
5551 | of bits in M, this is an extraction. */ | |
5552 | ||
5553 | else if (GET_CODE (XEXP (x, 0)) == ROTATE | |
5554 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
5555 | && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0 | |
5556 | && i <= INTVAL (XEXP (XEXP (x, 0), 1))) | |
280f58ba RK |
5557 | { |
5558 | new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code); | |
5559 | new = make_extraction (mode, new, | |
5560 | (GET_MODE_BITSIZE (mode) | |
5561 | - INTVAL (XEXP (XEXP (x, 0), 1))), | |
5562 | NULL_RTX, i, 1, 0, in_code == COMPARE); | |
5563 | } | |
a7c99304 RK |
5564 | |
5565 | /* On machines without logical shifts, if the operand of the AND is | |
230d793d RS |
5566 | a logical shift and our mask turns off all the propagated sign |
5567 | bits, we can replace the logical shift with an arithmetic shift. */ | |
d0ab8cd3 RK |
5568 | else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing |
5569 | && (lshr_optab->handlers[(int) mode].insn_code | |
5570 | == CODE_FOR_nothing) | |
230d793d RS |
5571 | && GET_CODE (XEXP (x, 0)) == LSHIFTRT |
5572 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
5573 | && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0 | |
5f4f0e22 CH |
5574 | && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT |
5575 | && mode_width <= HOST_BITS_PER_WIDE_INT) | |
230d793d | 5576 | { |
5f4f0e22 | 5577 | unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode); |
230d793d RS |
5578 | |
5579 | mask >>= INTVAL (XEXP (XEXP (x, 0), 1)); | |
5580 | if ((INTVAL (XEXP (x, 1)) & ~mask) == 0) | |
5581 | SUBST (XEXP (x, 0), | |
280f58ba RK |
5582 | gen_rtx_combine (ASHIFTRT, mode, |
5583 | make_compound_operation (XEXP (XEXP (x, 0), 0), | |
5584 | next_code), | |
230d793d RS |
5585 | XEXP (XEXP (x, 0), 1))); |
5586 | } | |
5587 | ||
5588 | /* If the constant is one less than a power of two, this might be | |
5589 | representable by an extraction even if no shift is present. | |
5590 | If it doesn't end up being a ZERO_EXTEND, we will ignore it unless | |
5591 | we are in a COMPARE. */ | |
5592 | else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0) | |
280f58ba RK |
5593 | new = make_extraction (mode, |
5594 | make_compound_operation (XEXP (x, 0), | |
5595 | next_code), | |
5596 | 0, NULL_RTX, i, 1, 0, in_code == COMPARE); | |
230d793d RS |
5597 | |
5598 | /* If we are in a comparison and this is an AND with a power of two, | |
5599 | convert this into the appropriate bit extract. */ | |
5600 | else if (in_code == COMPARE | |
5601 | && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0) | |
280f58ba RK |
5602 | new = make_extraction (mode, |
5603 | make_compound_operation (XEXP (x, 0), | |
5604 | next_code), | |
5605 | i, NULL_RTX, 1, 1, 0, 1); | |
230d793d RS |
5606 | |
5607 | break; | |
5608 | ||
5609 | case LSHIFTRT: | |
5610 | /* If the sign bit is known to be zero, replace this with an | |
5611 | arithmetic shift. */ | |
d0ab8cd3 RK |
5612 | if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing |
5613 | && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing | |
5f4f0e22 | 5614 | && mode_width <= HOST_BITS_PER_WIDE_INT |
951553af | 5615 | && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0) |
230d793d | 5616 | { |
280f58ba RK |
5617 | new = gen_rtx_combine (ASHIFTRT, mode, |
5618 | make_compound_operation (XEXP (x, 0), | |
5619 | next_code), | |
5620 | XEXP (x, 1)); | |
230d793d RS |
5621 | break; |
5622 | } | |
5623 | ||
5624 | /* ... fall through ... */ | |
5625 | ||
5626 | case ASHIFTRT: | |
71923da7 RK |
5627 | lhs = XEXP (x, 0); |
5628 | rhs = XEXP (x, 1); | |
5629 | ||
230d793d RS |
5630 | /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1, |
5631 | this is a SIGN_EXTRACT. */ | |
71923da7 RK |
5632 | if (GET_CODE (rhs) == CONST_INT |
5633 | && GET_CODE (lhs) == ASHIFT | |
5634 | && GET_CODE (XEXP (lhs, 1)) == CONST_INT | |
5635 | && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1))) | |
280f58ba | 5636 | { |
71923da7 | 5637 | new = make_compound_operation (XEXP (lhs, 0), next_code); |
280f58ba | 5638 | new = make_extraction (mode, new, |
71923da7 RK |
5639 | INTVAL (rhs) - INTVAL (XEXP (lhs, 1)), |
5640 | NULL_RTX, mode_width - INTVAL (rhs), | |
d0ab8cd3 RK |
5641 | code == LSHIFTRT, 0, in_code == COMPARE); |
5642 | } | |
5643 | ||
71923da7 RK |
5644 | /* See if we have operations between an ASHIFTRT and an ASHIFT. |
5645 | If so, try to merge the shifts into a SIGN_EXTEND. We could | |
5646 | also do this for some cases of SIGN_EXTRACT, but it doesn't | |
5647 | seem worth the effort; the case checked for occurs on Alpha. */ | |
5648 | ||
5649 | if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o' | |
5650 | && ! (GET_CODE (lhs) == SUBREG | |
5651 | && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o')) | |
5652 | && GET_CODE (rhs) == CONST_INT | |
5653 | && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT | |
5654 | && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0) | |
5655 | new = make_extraction (mode, make_compound_operation (new, next_code), | |
5656 | 0, NULL_RTX, mode_width - INTVAL (rhs), | |
5657 | code == LSHIFTRT, 0, in_code == COMPARE); | |
5658 | ||
230d793d | 5659 | break; |
280f58ba RK |
5660 | |
5661 | case SUBREG: | |
5662 | /* Call ourselves recursively on the inner expression. If we are | |
5663 | narrowing the object and it has a different RTL code from | |
5664 | what it originally did, do this SUBREG as a force_to_mode. */ | |
5665 | ||
0a5cbff6 | 5666 | tem = make_compound_operation (SUBREG_REG (x), in_code); |
280f58ba RK |
5667 | if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x)) |
5668 | && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem)) | |
5669 | && subreg_lowpart_p (x)) | |
0a5cbff6 RK |
5670 | { |
5671 | rtx newer = force_to_mode (tem, mode, | |
e3d616e3 | 5672 | GET_MODE_MASK (mode), NULL_RTX, 0); |
0a5cbff6 RK |
5673 | |
5674 | /* If we have something other than a SUBREG, we might have | |
5675 | done an expansion, so rerun outselves. */ | |
5676 | if (GET_CODE (newer) != SUBREG) | |
5677 | newer = make_compound_operation (newer, in_code); | |
5678 | ||
5679 | return newer; | |
5680 | } | |
230d793d RS |
5681 | } |
5682 | ||
5683 | if (new) | |
5684 | { | |
df62f951 | 5685 | x = gen_lowpart_for_combine (mode, new); |
230d793d RS |
5686 | code = GET_CODE (x); |
5687 | } | |
5688 | ||
5689 | /* Now recursively process each operand of this operation. */ | |
5690 | fmt = GET_RTX_FORMAT (code); | |
5691 | for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
5692 | if (fmt[i] == 'e') | |
5693 | { | |
5694 | new = make_compound_operation (XEXP (x, i), next_code); | |
5695 | SUBST (XEXP (x, i), new); | |
5696 | } | |
5697 | ||
5698 | return x; | |
5699 | } | |
5700 | \f | |
5701 | /* Given M see if it is a value that would select a field of bits | |
5702 | within an item, but not the entire word. Return -1 if not. | |
5703 | Otherwise, return the starting position of the field, where 0 is the | |
5704 | low-order bit. | |
5705 | ||
5706 | *PLEN is set to the length of the field. */ | |
5707 | ||
5708 | static int | |
5709 | get_pos_from_mask (m, plen) | |
5f4f0e22 | 5710 | unsigned HOST_WIDE_INT m; |
230d793d RS |
5711 | int *plen; |
5712 | { | |
5713 | /* Get the bit number of the first 1 bit from the right, -1 if none. */ | |
5714 | int pos = exact_log2 (m & - m); | |
5715 | ||
5716 | if (pos < 0) | |
5717 | return -1; | |
5718 | ||
5719 | /* Now shift off the low-order zero bits and see if we have a power of | |
5720 | two minus 1. */ | |
5721 | *plen = exact_log2 ((m >> pos) + 1); | |
5722 | ||
5723 | if (*plen <= 0) | |
5724 | return -1; | |
5725 | ||
5726 | return pos; | |
5727 | } | |
5728 | \f | |
6139ff20 RK |
5729 | /* See if X can be simplified knowing that we will only refer to it in |
5730 | MODE and will only refer to those bits that are nonzero in MASK. | |
5731 | If other bits are being computed or if masking operations are done | |
5732 | that select a superset of the bits in MASK, they can sometimes be | |
5733 | ignored. | |
5734 | ||
5735 | Return a possibly simplified expression, but always convert X to | |
5736 | MODE. If X is a CONST_INT, AND the CONST_INT with MASK. | |
dfbe1b2f RK |
5737 | |
5738 | Also, if REG is non-zero and X is a register equal in value to REG, | |
e3d616e3 RK |
5739 | replace X with REG. |
5740 | ||
5741 | If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK | |
5742 | are all off in X. This is used when X will be complemented, by either | |
180b8e4b | 5743 | NOT, NEG, or XOR. */ |
dfbe1b2f RK |
5744 | |
5745 | static rtx | |
e3d616e3 | 5746 | force_to_mode (x, mode, mask, reg, just_select) |
dfbe1b2f RK |
5747 | rtx x; |
5748 | enum machine_mode mode; | |
6139ff20 | 5749 | unsigned HOST_WIDE_INT mask; |
dfbe1b2f | 5750 | rtx reg; |
e3d616e3 | 5751 | int just_select; |
dfbe1b2f RK |
5752 | { |
5753 | enum rtx_code code = GET_CODE (x); | |
180b8e4b | 5754 | int next_select = just_select || code == XOR || code == NOT || code == NEG; |
ef026f91 RS |
5755 | enum machine_mode op_mode; |
5756 | unsigned HOST_WIDE_INT fuller_mask, nonzero; | |
6139ff20 RK |
5757 | rtx op0, op1, temp; |
5758 | ||
246e00f2 RK |
5759 | /* If this is a CALL, don't do anything. Some of the code below |
5760 | will do the wrong thing since the mode of a CALL is VOIDmode. */ | |
5761 | if (code == CALL) | |
5762 | return x; | |
5763 | ||
6139ff20 RK |
5764 | /* We want to perform the operation is its present mode unless we know |
5765 | that the operation is valid in MODE, in which case we do the operation | |
5766 | in MODE. */ | |
1c75dfa4 RK |
5767 | op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x)) |
5768 | && code_to_optab[(int) code] != 0 | |
ef026f91 RS |
5769 | && (code_to_optab[(int) code]->handlers[(int) mode].insn_code |
5770 | != CODE_FOR_nothing)) | |
5771 | ? mode : GET_MODE (x)); | |
e3d616e3 | 5772 | |
aa988991 RS |
5773 | /* It is not valid to do a right-shift in a narrower mode |
5774 | than the one it came in with. */ | |
5775 | if ((code == LSHIFTRT || code == ASHIFTRT) | |
5776 | && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x))) | |
5777 | op_mode = GET_MODE (x); | |
ef026f91 RS |
5778 | |
5779 | /* Truncate MASK to fit OP_MODE. */ | |
5780 | if (op_mode) | |
5781 | mask &= GET_MODE_MASK (op_mode); | |
6139ff20 RK |
5782 | |
5783 | /* When we have an arithmetic operation, or a shift whose count we | |
5784 | do not know, we need to assume that all bit the up to the highest-order | |
5785 | bit in MASK will be needed. This is how we form such a mask. */ | |
ef026f91 RS |
5786 | if (op_mode) |
5787 | fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT | |
5788 | ? GET_MODE_MASK (op_mode) | |
5789 | : ((HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1)) - 1); | |
5790 | else | |
5791 | fuller_mask = ~ (HOST_WIDE_INT) 0; | |
5792 | ||
5793 | /* Determine what bits of X are guaranteed to be (non)zero. */ | |
5794 | nonzero = nonzero_bits (x, mode); | |
6139ff20 RK |
5795 | |
5796 | /* If none of the bits in X are needed, return a zero. */ | |
e3d616e3 | 5797 | if (! just_select && (nonzero & mask) == 0) |
6139ff20 | 5798 | return const0_rtx; |
dfbe1b2f | 5799 | |
6139ff20 RK |
5800 | /* If X is a CONST_INT, return a new one. Do this here since the |
5801 | test below will fail. */ | |
5802 | if (GET_CODE (x) == CONST_INT) | |
ceb7983c RK |
5803 | { |
5804 | HOST_WIDE_INT cval = INTVAL (x) & mask; | |
5805 | int width = GET_MODE_BITSIZE (mode); | |
5806 | ||
5807 | /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative | |
5808 | number, sign extend it. */ | |
5809 | if (width > 0 && width < HOST_BITS_PER_WIDE_INT | |
5810 | && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0) | |
5811 | cval |= (HOST_WIDE_INT) -1 << width; | |
5812 | ||
5813 | return GEN_INT (cval); | |
5814 | } | |
dfbe1b2f | 5815 | |
180b8e4b RK |
5816 | /* If X is narrower than MODE and we want all the bits in X's mode, just |
5817 | get X in the proper mode. */ | |
5818 | if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode) | |
5819 | && (GET_MODE_MASK (GET_MODE (x)) & ~ mask) == 0) | |
dfbe1b2f RK |
5820 | return gen_lowpart_for_combine (mode, x); |
5821 | ||
71923da7 RK |
5822 | /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in |
5823 | MASK are already known to be zero in X, we need not do anything. */ | |
5824 | if (GET_MODE (x) == mode && code != SUBREG && (~ mask & nonzero) == 0) | |
6139ff20 RK |
5825 | return x; |
5826 | ||
dfbe1b2f RK |
5827 | switch (code) |
5828 | { | |
6139ff20 RK |
5829 | case CLOBBER: |
5830 | /* If X is a (clobber (const_int)), return it since we know we are | |
5831 | generating something that won't match. */ | |
5832 | return x; | |
5833 | ||
6139ff20 RK |
5834 | case USE: |
5835 | /* X is a (use (mem ..)) that was made from a bit-field extraction that | |
5836 | spanned the boundary of the MEM. If we are now masking so it is | |
5837 | within that boundary, we don't need the USE any more. */ | |
f76b9db2 ILT |
5838 | if (! BITS_BIG_ENDIAN |
5839 | && (mask & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0) | |
e3d616e3 | 5840 | return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select); |
f76b9db2 | 5841 | break; |
6139ff20 | 5842 | |
dfbe1b2f RK |
5843 | case SIGN_EXTEND: |
5844 | case ZERO_EXTEND: | |
5845 | case ZERO_EXTRACT: | |
5846 | case SIGN_EXTRACT: | |
5847 | x = expand_compound_operation (x); | |
5848 | if (GET_CODE (x) != code) | |
e3d616e3 | 5849 | return force_to_mode (x, mode, mask, reg, next_select); |
dfbe1b2f RK |
5850 | break; |
5851 | ||
5852 | case REG: | |
5853 | if (reg != 0 && (rtx_equal_p (get_last_value (reg), x) | |
5854 | || rtx_equal_p (reg, get_last_value (x)))) | |
5855 | x = reg; | |
5856 | break; | |
5857 | ||
dfbe1b2f | 5858 | case SUBREG: |
6139ff20 | 5859 | if (subreg_lowpart_p (x) |
180b8e4b RK |
5860 | /* We can ignore the effect of this SUBREG if it narrows the mode or |
5861 | if the constant masks to zero all the bits the mode doesn't | |
5862 | have. */ | |
6139ff20 RK |
5863 | && ((GET_MODE_SIZE (GET_MODE (x)) |
5864 | < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) | |
6139ff20 RK |
5865 | || (0 == (mask |
5866 | & GET_MODE_MASK (GET_MODE (x)) | |
180b8e4b | 5867 | & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))))))) |
e3d616e3 | 5868 | return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select); |
dfbe1b2f RK |
5869 | break; |
5870 | ||
5871 | case AND: | |
6139ff20 RK |
5872 | /* If this is an AND with a constant, convert it into an AND |
5873 | whose constant is the AND of that constant with MASK. If it | |
5874 | remains an AND of MASK, delete it since it is redundant. */ | |
dfbe1b2f | 5875 | |
2ca9ae17 | 5876 | if (GET_CODE (XEXP (x, 1)) == CONST_INT) |
dfbe1b2f | 5877 | { |
6139ff20 RK |
5878 | x = simplify_and_const_int (x, op_mode, XEXP (x, 0), |
5879 | mask & INTVAL (XEXP (x, 1))); | |
dfbe1b2f RK |
5880 | |
5881 | /* If X is still an AND, see if it is an AND with a mask that | |
71923da7 RK |
5882 | is just some low-order bits. If so, and it is MASK, we don't |
5883 | need it. */ | |
dfbe1b2f RK |
5884 | |
5885 | if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT | |
6139ff20 | 5886 | && INTVAL (XEXP (x, 1)) == mask) |
dfbe1b2f | 5887 | x = XEXP (x, 0); |
d0ab8cd3 | 5888 | |
71923da7 RK |
5889 | /* If it remains an AND, try making another AND with the bits |
5890 | in the mode mask that aren't in MASK turned on. If the | |
5891 | constant in the AND is wide enough, this might make a | |
5892 | cheaper constant. */ | |
5893 | ||
5894 | if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT | |
2ca9ae17 JW |
5895 | && GET_MODE_MASK (GET_MODE (x)) != mask |
5896 | && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT) | |
71923da7 RK |
5897 | { |
5898 | HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1)) | |
5899 | | (GET_MODE_MASK (GET_MODE (x)) & ~ mask)); | |
5900 | int width = GET_MODE_BITSIZE (GET_MODE (x)); | |
5901 | rtx y; | |
5902 | ||
5903 | /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative | |
5904 | number, sign extend it. */ | |
5905 | if (width > 0 && width < HOST_BITS_PER_WIDE_INT | |
5906 | && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0) | |
5907 | cval |= (HOST_WIDE_INT) -1 << width; | |
5908 | ||
5909 | y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval)); | |
5910 | if (rtx_cost (y, SET) < rtx_cost (x, SET)) | |
5911 | x = y; | |
5912 | } | |
5913 | ||
d0ab8cd3 | 5914 | break; |
dfbe1b2f RK |
5915 | } |
5916 | ||
6139ff20 | 5917 | goto binop; |
dfbe1b2f RK |
5918 | |
5919 | case PLUS: | |
6139ff20 RK |
5920 | /* In (and (plus FOO C1) M), if M is a mask that just turns off |
5921 | low-order bits (as in an alignment operation) and FOO is already | |
5922 | aligned to that boundary, mask C1 to that boundary as well. | |
5923 | This may eliminate that PLUS and, later, the AND. */ | |
9fa6d012 TG |
5924 | |
5925 | { | |
5926 | int width = GET_MODE_BITSIZE (mode); | |
5927 | unsigned HOST_WIDE_INT smask = mask; | |
5928 | ||
5929 | /* If MODE is narrower than HOST_WIDE_INT and mask is a negative | |
5930 | number, sign extend it. */ | |
5931 | ||
5932 | if (width < HOST_BITS_PER_WIDE_INT | |
5933 | && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0) | |
5934 | smask |= (HOST_WIDE_INT) -1 << width; | |
5935 | ||
5936 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
5937 | && exact_log2 (- smask) >= 0 | |
5938 | && (nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0 | |
5939 | && (INTVAL (XEXP (x, 1)) & ~ mask) != 0) | |
5940 | return force_to_mode (plus_constant (XEXP (x, 0), | |
5941 | INTVAL (XEXP (x, 1)) & mask), | |
5942 | mode, mask, reg, next_select); | |
5943 | } | |
6139ff20 RK |
5944 | |
5945 | /* ... fall through ... */ | |
5946 | ||
dfbe1b2f RK |
5947 | case MINUS: |
5948 | case MULT: | |
6139ff20 RK |
5949 | /* For PLUS, MINUS and MULT, we need any bits less significant than the |
5950 | most significant bit in MASK since carries from those bits will | |
5951 | affect the bits we are interested in. */ | |
5952 | mask = fuller_mask; | |
5953 | goto binop; | |
5954 | ||
dfbe1b2f RK |
5955 | case IOR: |
5956 | case XOR: | |
6139ff20 RK |
5957 | /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and |
5958 | LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...) | |
5959 | operation which may be a bitfield extraction. Ensure that the | |
5960 | constant we form is not wider than the mode of X. */ | |
5961 | ||
5962 | if (GET_CODE (XEXP (x, 0)) == LSHIFTRT | |
5963 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
5964 | && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0 | |
5965 | && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT | |
5966 | && GET_CODE (XEXP (x, 1)) == CONST_INT | |
5967 | && ((INTVAL (XEXP (XEXP (x, 0), 1)) | |
5968 | + floor_log2 (INTVAL (XEXP (x, 1)))) | |
5969 | < GET_MODE_BITSIZE (GET_MODE (x))) | |
5970 | && (INTVAL (XEXP (x, 1)) | |
01c82bbb | 5971 | & ~ nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0) |
6139ff20 RK |
5972 | { |
5973 | temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask) | |
5974 | << INTVAL (XEXP (XEXP (x, 0), 1))); | |
5975 | temp = gen_binary (GET_CODE (x), GET_MODE (x), | |
5976 | XEXP (XEXP (x, 0), 0), temp); | |
d4d2b13f RK |
5977 | x = gen_binary (LSHIFTRT, GET_MODE (x), temp, |
5978 | XEXP (XEXP (x, 0), 1)); | |
e3d616e3 | 5979 | return force_to_mode (x, mode, mask, reg, next_select); |
6139ff20 RK |
5980 | } |
5981 | ||
5982 | binop: | |
dfbe1b2f | 5983 | /* For most binary operations, just propagate into the operation and |
6139ff20 RK |
5984 | change the mode if we have an operation of that mode. */ |
5985 | ||
e3d616e3 RK |
5986 | op0 = gen_lowpart_for_combine (op_mode, |
5987 | force_to_mode (XEXP (x, 0), mode, mask, | |
5988 | reg, next_select)); | |
5989 | op1 = gen_lowpart_for_combine (op_mode, | |
5990 | force_to_mode (XEXP (x, 1), mode, mask, | |
5991 | reg, next_select)); | |
6139ff20 | 5992 | |
2dd484ed RK |
5993 | /* If OP1 is a CONST_INT and X is an IOR or XOR, clear bits outside |
5994 | MASK since OP1 might have been sign-extended but we never want | |
5995 | to turn on extra bits, since combine might have previously relied | |
5996 | on them being off. */ | |
5997 | if (GET_CODE (op1) == CONST_INT && (code == IOR || code == XOR) | |
5998 | && (INTVAL (op1) & mask) != 0) | |
5999 | op1 = GEN_INT (INTVAL (op1) & mask); | |
6000 | ||
6139ff20 RK |
6001 | if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1)) |
6002 | x = gen_binary (code, op_mode, op0, op1); | |
d0ab8cd3 | 6003 | break; |
dfbe1b2f RK |
6004 | |
6005 | case ASHIFT: | |
dfbe1b2f | 6006 | /* For left shifts, do the same, but just for the first operand. |
f6785026 RK |
6007 | However, we cannot do anything with shifts where we cannot |
6008 | guarantee that the counts are smaller than the size of the mode | |
6009 | because such a count will have a different meaning in a | |
6139ff20 | 6010 | wider mode. */ |
f6785026 RK |
6011 | |
6012 | if (! (GET_CODE (XEXP (x, 1)) == CONST_INT | |
6139ff20 | 6013 | && INTVAL (XEXP (x, 1)) >= 0 |
f6785026 RK |
6014 | && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode)) |
6015 | && ! (GET_MODE (XEXP (x, 1)) != VOIDmode | |
6016 | && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1))) | |
adb7a1cb | 6017 | < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode)))) |
f6785026 RK |
6018 | break; |
6019 | ||
6139ff20 RK |
6020 | /* If the shift count is a constant and we can do arithmetic in |
6021 | the mode of the shift, refine which bits we need. Otherwise, use the | |
6022 | conservative form of the mask. */ | |
6023 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
6024 | && INTVAL (XEXP (x, 1)) >= 0 | |
6025 | && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode) | |
6026 | && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT) | |
6027 | mask >>= INTVAL (XEXP (x, 1)); | |
6028 | else | |
6029 | mask = fuller_mask; | |
6030 | ||
6031 | op0 = gen_lowpart_for_combine (op_mode, | |
6032 | force_to_mode (XEXP (x, 0), op_mode, | |
e3d616e3 | 6033 | mask, reg, next_select)); |
6139ff20 RK |
6034 | |
6035 | if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0)) | |
6036 | x = gen_binary (code, op_mode, op0, XEXP (x, 1)); | |
d0ab8cd3 | 6037 | break; |
dfbe1b2f RK |
6038 | |
6039 | case LSHIFTRT: | |
1347292b JW |
6040 | /* Here we can only do something if the shift count is a constant, |
6041 | this shift constant is valid for the host, and we can do arithmetic | |
6042 | in OP_MODE. */ | |
dfbe1b2f RK |
6043 | |
6044 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
1347292b | 6045 | && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT |
6139ff20 | 6046 | && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT) |
d0ab8cd3 | 6047 | { |
6139ff20 RK |
6048 | rtx inner = XEXP (x, 0); |
6049 | ||
6050 | /* Select the mask of the bits we need for the shift operand. */ | |
6051 | mask <<= INTVAL (XEXP (x, 1)); | |
d0ab8cd3 | 6052 | |
6139ff20 RK |
6053 | /* We can only change the mode of the shift if we can do arithmetic |
6054 | in the mode of the shift and MASK is no wider than the width of | |
6055 | OP_MODE. */ | |
6056 | if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT | |
6057 | || (mask & ~ GET_MODE_MASK (op_mode)) != 0) | |
d0ab8cd3 RK |
6058 | op_mode = GET_MODE (x); |
6059 | ||
e3d616e3 | 6060 | inner = force_to_mode (inner, op_mode, mask, reg, next_select); |
6139ff20 RK |
6061 | |
6062 | if (GET_MODE (x) != op_mode || inner != XEXP (x, 0)) | |
6063 | x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1)); | |
d0ab8cd3 | 6064 | } |
6139ff20 RK |
6065 | |
6066 | /* If we have (and (lshiftrt FOO C1) C2) where the combination of the | |
6067 | shift and AND produces only copies of the sign bit (C2 is one less | |
6068 | than a power of two), we can do this with just a shift. */ | |
6069 | ||
6070 | if (GET_CODE (x) == LSHIFTRT | |
6071 | && GET_CODE (XEXP (x, 1)) == CONST_INT | |
6072 | && ((INTVAL (XEXP (x, 1)) | |
6073 | + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))) | |
6074 | >= GET_MODE_BITSIZE (GET_MODE (x))) | |
6075 | && exact_log2 (mask + 1) >= 0 | |
6076 | && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))) | |
6077 | >= exact_log2 (mask + 1))) | |
6078 | x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), | |
6079 | GEN_INT (GET_MODE_BITSIZE (GET_MODE (x)) | |
6080 | - exact_log2 (mask + 1))); | |
d0ab8cd3 RK |
6081 | break; |
6082 | ||
6083 | case ASHIFTRT: | |
6139ff20 RK |
6084 | /* If we are just looking for the sign bit, we don't need this shift at |
6085 | all, even if it has a variable count. */ | |
9bf22b75 RK |
6086 | if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT |
6087 | && (mask == ((HOST_WIDE_INT) 1 | |
6088 | << (GET_MODE_BITSIZE (GET_MODE (x)) - 1)))) | |
e3d616e3 | 6089 | return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select); |
6139ff20 RK |
6090 | |
6091 | /* If this is a shift by a constant, get a mask that contains those bits | |
6092 | that are not copies of the sign bit. We then have two cases: If | |
6093 | MASK only includes those bits, this can be a logical shift, which may | |
6094 | allow simplifications. If MASK is a single-bit field not within | |
6095 | those bits, we are requesting a copy of the sign bit and hence can | |
6096 | shift the sign bit to the appropriate location. */ | |
6097 | ||
6098 | if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0 | |
6099 | && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT) | |
6100 | { | |
6101 | int i = -1; | |
6102 | ||
b69960ac RK |
6103 | /* If the considered data is wider then HOST_WIDE_INT, we can't |
6104 | represent a mask for all its bits in a single scalar. | |
6105 | But we only care about the lower bits, so calculate these. */ | |
6106 | ||
6a11342f | 6107 | if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT) |
b69960ac RK |
6108 | { |
6109 | nonzero = ~(HOST_WIDE_INT)0; | |
6110 | ||
6111 | /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1)) | |
6112 | is the number of bits a full-width mask would have set. | |
6113 | We need only shift if these are fewer than nonzero can | |
6114 | hold. If not, we must keep all bits set in nonzero. */ | |
6115 | ||
6116 | if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1)) | |
6117 | < HOST_BITS_PER_WIDE_INT) | |
6118 | nonzero >>= INTVAL (XEXP (x, 1)) | |
6119 | + HOST_BITS_PER_WIDE_INT | |
6120 | - GET_MODE_BITSIZE (GET_MODE (x)) ; | |
6121 | } | |
6122 | else | |
6123 | { | |
6124 | nonzero = GET_MODE_MASK (GET_MODE (x)); | |
6125 | nonzero >>= INTVAL (XEXP (x, 1)); | |
6126 | } | |
6139ff20 RK |
6127 | |
6128 | if ((mask & ~ nonzero) == 0 | |
6129 | || (i = exact_log2 (mask)) >= 0) | |
6130 | { | |
6131 | x = simplify_shift_const | |
6132 | (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0), | |
6133 | i < 0 ? INTVAL (XEXP (x, 1)) | |
6134 | : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i); | |
6135 | ||
6136 | if (GET_CODE (x) != ASHIFTRT) | |
e3d616e3 | 6137 | return force_to_mode (x, mode, mask, reg, next_select); |
6139ff20 RK |
6138 | } |
6139 | } | |
6140 | ||
6141 | /* If MASK is 1, convert this to a LSHIFTRT. This can be done | |
6142 | even if the shift count isn't a constant. */ | |
6143 | if (mask == 1) | |
6144 | x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)); | |
6145 | ||
d0ab8cd3 | 6146 | /* If this is a sign-extension operation that just affects bits |
4c002f29 RK |
6147 | we don't care about, remove it. Be sure the call above returned |
6148 | something that is still a shift. */ | |
d0ab8cd3 | 6149 | |
4c002f29 RK |
6150 | if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT) |
6151 | && GET_CODE (XEXP (x, 1)) == CONST_INT | |
d0ab8cd3 | 6152 | && INTVAL (XEXP (x, 1)) >= 0 |
6139ff20 RK |
6153 | && (INTVAL (XEXP (x, 1)) |
6154 | <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1)) | |
d0ab8cd3 RK |
6155 | && GET_CODE (XEXP (x, 0)) == ASHIFT |
6156 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
6157 | && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1))) | |
e3d616e3 RK |
6158 | return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask, |
6159 | reg, next_select); | |
6139ff20 | 6160 | |
dfbe1b2f RK |
6161 | break; |
6162 | ||
6139ff20 RK |
6163 | case ROTATE: |
6164 | case ROTATERT: | |
6165 | /* If the shift count is constant and we can do computations | |
6166 | in the mode of X, compute where the bits we care about are. | |
6167 | Otherwise, we can't do anything. Don't change the mode of | |
6168 | the shift or propagate MODE into the shift, though. */ | |
6169 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
6170 | && INTVAL (XEXP (x, 1)) >= 0) | |
6171 | { | |
6172 | temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE, | |
6173 | GET_MODE (x), GEN_INT (mask), | |
6174 | XEXP (x, 1)); | |
7d171a1e | 6175 | if (temp && GET_CODE(temp) == CONST_INT) |
6139ff20 RK |
6176 | SUBST (XEXP (x, 0), |
6177 | force_to_mode (XEXP (x, 0), GET_MODE (x), | |
e3d616e3 | 6178 | INTVAL (temp), reg, next_select)); |
6139ff20 RK |
6179 | } |
6180 | break; | |
6181 | ||
dfbe1b2f | 6182 | case NEG: |
180b8e4b RK |
6183 | /* If we just want the low-order bit, the NEG isn't needed since it |
6184 | won't change the low-order bit. */ | |
6185 | if (mask == 1) | |
6186 | return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select); | |
6187 | ||
6139ff20 RK |
6188 | /* We need any bits less significant than the most significant bit in |
6189 | MASK since carries from those bits will affect the bits we are | |
6190 | interested in. */ | |
6191 | mask = fuller_mask; | |
6192 | goto unop; | |
6193 | ||
dfbe1b2f | 6194 | case NOT: |
6139ff20 RK |
6195 | /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the |
6196 | same as the XOR case above. Ensure that the constant we form is not | |
6197 | wider than the mode of X. */ | |
6198 | ||
6199 | if (GET_CODE (XEXP (x, 0)) == LSHIFTRT | |
6200 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
6201 | && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0 | |
6202 | && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask) | |
6203 | < GET_MODE_BITSIZE (GET_MODE (x))) | |
6204 | && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT) | |
6205 | { | |
6206 | temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1))); | |
6207 | temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp); | |
6208 | x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1)); | |
6209 | ||
e3d616e3 | 6210 | return force_to_mode (x, mode, mask, reg, next_select); |
6139ff20 RK |
6211 | } |
6212 | ||
f82da7d2 JW |
6213 | /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must |
6214 | use the full mask inside the NOT. */ | |
6215 | mask = fuller_mask; | |
6216 | ||
6139ff20 | 6217 | unop: |
e3d616e3 RK |
6218 | op0 = gen_lowpart_for_combine (op_mode, |
6219 | force_to_mode (XEXP (x, 0), mode, mask, | |
6220 | reg, next_select)); | |
6139ff20 | 6221 | if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0)) |
0c1c8ea6 | 6222 | x = gen_unary (code, op_mode, op_mode, op0); |
6139ff20 RK |
6223 | break; |
6224 | ||
6225 | case NE: | |
6226 | /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included | |
6227 | in STORE_FLAG_VALUE and FOO has no bits that might be nonzero not | |
6228 | in CONST. */ | |
6229 | if ((mask & ~ STORE_FLAG_VALUE) == 0 && XEXP (x, 0) == const0_rtx | |
6230 | && (nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0) | |
e3d616e3 | 6231 | return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select); |
6139ff20 | 6232 | |
d0ab8cd3 RK |
6233 | break; |
6234 | ||
6235 | case IF_THEN_ELSE: | |
6236 | /* We have no way of knowing if the IF_THEN_ELSE can itself be | |
6237 | written in a narrower mode. We play it safe and do not do so. */ | |
6238 | ||
6239 | SUBST (XEXP (x, 1), | |
6240 | gen_lowpart_for_combine (GET_MODE (x), | |
6241 | force_to_mode (XEXP (x, 1), mode, | |
e3d616e3 | 6242 | mask, reg, next_select))); |
d0ab8cd3 RK |
6243 | SUBST (XEXP (x, 2), |
6244 | gen_lowpart_for_combine (GET_MODE (x), | |
6245 | force_to_mode (XEXP (x, 2), mode, | |
e3d616e3 | 6246 | mask, reg,next_select))); |
d0ab8cd3 | 6247 | break; |
dfbe1b2f RK |
6248 | } |
6249 | ||
d0ab8cd3 | 6250 | /* Ensure we return a value of the proper mode. */ |
dfbe1b2f RK |
6251 | return gen_lowpart_for_combine (mode, x); |
6252 | } | |
6253 | \f | |
abe6e52f RK |
6254 | /* Return nonzero if X is an expression that has one of two values depending on |
6255 | whether some other value is zero or nonzero. In that case, we return the | |
6256 | value that is being tested, *PTRUE is set to the value if the rtx being | |
6257 | returned has a nonzero value, and *PFALSE is set to the other alternative. | |
6258 | ||
6259 | If we return zero, we set *PTRUE and *PFALSE to X. */ | |
6260 | ||
6261 | static rtx | |
6262 | if_then_else_cond (x, ptrue, pfalse) | |
6263 | rtx x; | |
6264 | rtx *ptrue, *pfalse; | |
6265 | { | |
6266 | enum machine_mode mode = GET_MODE (x); | |
6267 | enum rtx_code code = GET_CODE (x); | |
6268 | int size = GET_MODE_BITSIZE (mode); | |
6269 | rtx cond0, cond1, true0, true1, false0, false1; | |
6270 | unsigned HOST_WIDE_INT nz; | |
6271 | ||
6272 | /* If this is a unary operation whose operand has one of two values, apply | |
6273 | our opcode to compute those values. */ | |
6274 | if (GET_RTX_CLASS (code) == '1' | |
6275 | && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0) | |
6276 | { | |
0c1c8ea6 RK |
6277 | *ptrue = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), true0); |
6278 | *pfalse = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), false0); | |
abe6e52f RK |
6279 | return cond0; |
6280 | } | |
6281 | ||
3a19aabc | 6282 | /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would |
ddd5a7c1 | 6283 | make can't possibly match and would suppress other optimizations. */ |
3a19aabc RK |
6284 | else if (code == COMPARE) |
6285 | ; | |
6286 | ||
abe6e52f RK |
6287 | /* If this is a binary operation, see if either side has only one of two |
6288 | values. If either one does or if both do and they are conditional on | |
6289 | the same value, compute the new true and false values. */ | |
6290 | else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2' | |
6291 | || GET_RTX_CLASS (code) == '<') | |
6292 | { | |
6293 | cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0); | |
6294 | cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1); | |
6295 | ||
6296 | if ((cond0 != 0 || cond1 != 0) | |
6297 | && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1))) | |
6298 | { | |
6299 | *ptrue = gen_binary (code, mode, true0, true1); | |
6300 | *pfalse = gen_binary (code, mode, false0, false1); | |
6301 | return cond0 ? cond0 : cond1; | |
6302 | } | |
9210df58 RK |
6303 | |
6304 | #if STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1 | |
6305 | ||
6306 | /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the | |
6307 | operands is zero when the other is non-zero, and vice-versa. */ | |
6308 | ||
6309 | if ((code == PLUS || code == IOR || code == XOR || code == MINUS | |
6310 | || code == UMAX) | |
6311 | && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT) | |
6312 | { | |
6313 | rtx op0 = XEXP (XEXP (x, 0), 1); | |
6314 | rtx op1 = XEXP (XEXP (x, 1), 1); | |
6315 | ||
6316 | cond0 = XEXP (XEXP (x, 0), 0); | |
6317 | cond1 = XEXP (XEXP (x, 1), 0); | |
6318 | ||
6319 | if (GET_RTX_CLASS (GET_CODE (cond0)) == '<' | |
6320 | && GET_RTX_CLASS (GET_CODE (cond1)) == '<' | |
6321 | && reversible_comparison_p (cond1) | |
6322 | && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1)) | |
6323 | && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0)) | |
6324 | && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1))) | |
6325 | || ((swap_condition (GET_CODE (cond0)) | |
6326 | == reverse_condition (GET_CODE (cond1))) | |
6327 | && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1)) | |
6328 | && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0)))) | |
6329 | && ! side_effects_p (x)) | |
6330 | { | |
6331 | *ptrue = gen_binary (MULT, mode, op0, const_true_rtx); | |
6332 | *pfalse = gen_binary (MULT, mode, | |
6333 | (code == MINUS | |
0c1c8ea6 | 6334 | ? gen_unary (NEG, mode, mode, op1) : op1), |
9210df58 RK |
6335 | const_true_rtx); |
6336 | return cond0; | |
6337 | } | |
6338 | } | |
6339 | ||
6340 | /* Similarly for MULT, AND and UMIN, execpt that for these the result | |
6341 | is always zero. */ | |
6342 | if ((code == MULT || code == AND || code == UMIN) | |
6343 | && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT) | |
6344 | { | |
6345 | cond0 = XEXP (XEXP (x, 0), 0); | |
6346 | cond1 = XEXP (XEXP (x, 1), 0); | |
6347 | ||
6348 | if (GET_RTX_CLASS (GET_CODE (cond0)) == '<' | |
6349 | && GET_RTX_CLASS (GET_CODE (cond1)) == '<' | |
6350 | && reversible_comparison_p (cond1) | |
6351 | && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1)) | |
6352 | && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0)) | |
6353 | && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1))) | |
6354 | || ((swap_condition (GET_CODE (cond0)) | |
6355 | == reverse_condition (GET_CODE (cond1))) | |
6356 | && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1)) | |
6357 | && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0)))) | |
6358 | && ! side_effects_p (x)) | |
6359 | { | |
6360 | *ptrue = *pfalse = const0_rtx; | |
6361 | return cond0; | |
6362 | } | |
6363 | } | |
6364 | #endif | |
abe6e52f RK |
6365 | } |
6366 | ||
6367 | else if (code == IF_THEN_ELSE) | |
6368 | { | |
6369 | /* If we have IF_THEN_ELSE already, extract the condition and | |
6370 | canonicalize it if it is NE or EQ. */ | |
6371 | cond0 = XEXP (x, 0); | |
6372 | *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2); | |
6373 | if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx) | |
6374 | return XEXP (cond0, 0); | |
6375 | else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx) | |
6376 | { | |
6377 | *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1); | |
6378 | return XEXP (cond0, 0); | |
6379 | } | |
6380 | else | |
6381 | return cond0; | |
6382 | } | |
6383 | ||
6384 | /* If X is a normal SUBREG with both inner and outer modes integral, | |
6385 | we can narrow both the true and false values of the inner expression, | |
6386 | if there is a condition. */ | |
6387 | else if (code == SUBREG && GET_MODE_CLASS (mode) == MODE_INT | |
6388 | && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT | |
6389 | && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) | |
6390 | && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x), | |
6391 | &true0, &false0))) | |
6392 | { | |
00244e6b RK |
6393 | *ptrue = force_to_mode (true0, mode, GET_MODE_MASK (mode), NULL_RTX, 0); |
6394 | *pfalse | |
6395 | = force_to_mode (false0, mode, GET_MODE_MASK (mode), NULL_RTX, 0); | |
abe6e52f | 6396 | |
abe6e52f RK |
6397 | return cond0; |
6398 | } | |
6399 | ||
6400 | /* If X is a constant, this isn't special and will cause confusions | |
6401 | if we treat it as such. Likewise if it is equivalent to a constant. */ | |
6402 | else if (CONSTANT_P (x) | |
6403 | || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0))) | |
6404 | ; | |
6405 | ||
6406 | /* If X is known to be either 0 or -1, those are the true and | |
6407 | false values when testing X. */ | |
6408 | else if (num_sign_bit_copies (x, mode) == size) | |
6409 | { | |
6410 | *ptrue = constm1_rtx, *pfalse = const0_rtx; | |
6411 | return x; | |
6412 | } | |
6413 | ||
6414 | /* Likewise for 0 or a single bit. */ | |
6415 | else if (exact_log2 (nz = nonzero_bits (x, mode)) >= 0) | |
6416 | { | |
6417 | *ptrue = GEN_INT (nz), *pfalse = const0_rtx; | |
6418 | return x; | |
6419 | } | |
6420 | ||
6421 | /* Otherwise fail; show no condition with true and false values the same. */ | |
6422 | *ptrue = *pfalse = x; | |
6423 | return 0; | |
6424 | } | |
6425 | \f | |
1a26b032 RK |
6426 | /* Return the value of expression X given the fact that condition COND |
6427 | is known to be true when applied to REG as its first operand and VAL | |
6428 | as its second. X is known to not be shared and so can be modified in | |
6429 | place. | |
6430 | ||
6431 | We only handle the simplest cases, and specifically those cases that | |
6432 | arise with IF_THEN_ELSE expressions. */ | |
6433 | ||
6434 | static rtx | |
6435 | known_cond (x, cond, reg, val) | |
6436 | rtx x; | |
6437 | enum rtx_code cond; | |
6438 | rtx reg, val; | |
6439 | { | |
6440 | enum rtx_code code = GET_CODE (x); | |
f24ad0e4 | 6441 | rtx temp; |
1a26b032 RK |
6442 | char *fmt; |
6443 | int i, j; | |
6444 | ||
6445 | if (side_effects_p (x)) | |
6446 | return x; | |
6447 | ||
6448 | if (cond == EQ && rtx_equal_p (x, reg)) | |
6449 | return val; | |
6450 | ||
6451 | /* If X is (abs REG) and we know something about REG's relationship | |
6452 | with zero, we may be able to simplify this. */ | |
6453 | ||
6454 | if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx) | |
6455 | switch (cond) | |
6456 | { | |
6457 | case GE: case GT: case EQ: | |
6458 | return XEXP (x, 0); | |
6459 | case LT: case LE: | |
0c1c8ea6 RK |
6460 | return gen_unary (NEG, GET_MODE (XEXP (x, 0)), GET_MODE (XEXP (x, 0)), |
6461 | XEXP (x, 0)); | |
1a26b032 RK |
6462 | } |
6463 | ||
6464 | /* The only other cases we handle are MIN, MAX, and comparisons if the | |
6465 | operands are the same as REG and VAL. */ | |
6466 | ||
6467 | else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c') | |
6468 | { | |
6469 | if (rtx_equal_p (XEXP (x, 0), val)) | |
6470 | cond = swap_condition (cond), temp = val, val = reg, reg = temp; | |
6471 | ||
6472 | if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val)) | |
6473 | { | |
6474 | if (GET_RTX_CLASS (code) == '<') | |
6475 | return (comparison_dominates_p (cond, code) ? const_true_rtx | |
6476 | : (comparison_dominates_p (cond, | |
6477 | reverse_condition (code)) | |
6478 | ? const0_rtx : x)); | |
6479 | ||
6480 | else if (code == SMAX || code == SMIN | |
6481 | || code == UMIN || code == UMAX) | |
6482 | { | |
6483 | int unsignedp = (code == UMIN || code == UMAX); | |
6484 | ||
6485 | if (code == SMAX || code == UMAX) | |
6486 | cond = reverse_condition (cond); | |
6487 | ||
6488 | switch (cond) | |
6489 | { | |
6490 | case GE: case GT: | |
6491 | return unsignedp ? x : XEXP (x, 1); | |
6492 | case LE: case LT: | |
6493 | return unsignedp ? x : XEXP (x, 0); | |
6494 | case GEU: case GTU: | |
6495 | return unsignedp ? XEXP (x, 1) : x; | |
6496 | case LEU: case LTU: | |
6497 | return unsignedp ? XEXP (x, 0) : x; | |
6498 | } | |
6499 | } | |
6500 | } | |
6501 | } | |
6502 | ||
6503 | fmt = GET_RTX_FORMAT (code); | |
6504 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
6505 | { | |
6506 | if (fmt[i] == 'e') | |
6507 | SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val)); | |
6508 | else if (fmt[i] == 'E') | |
6509 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
6510 | SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j), | |
6511 | cond, reg, val)); | |
6512 | } | |
6513 | ||
6514 | return x; | |
6515 | } | |
6516 | \f | |
e11fa86f RK |
6517 | /* See if X and Y are equal for the purposes of seeing if we can rewrite an |
6518 | assignment as a field assignment. */ | |
6519 | ||
6520 | static int | |
6521 | rtx_equal_for_field_assignment_p (x, y) | |
6522 | rtx x; | |
6523 | rtx y; | |
6524 | { | |
6525 | rtx last_x, last_y; | |
6526 | ||
6527 | if (x == y || rtx_equal_p (x, y)) | |
6528 | return 1; | |
6529 | ||
6530 | if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y)) | |
6531 | return 0; | |
6532 | ||
6533 | /* Check for a paradoxical SUBREG of a MEM compared with the MEM. | |
6534 | Note that all SUBREGs of MEM are paradoxical; otherwise they | |
6535 | would have been rewritten. */ | |
6536 | if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG | |
6537 | && GET_CODE (SUBREG_REG (y)) == MEM | |
6538 | && rtx_equal_p (SUBREG_REG (y), | |
6539 | gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x))) | |
6540 | return 1; | |
6541 | ||
6542 | if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG | |
6543 | && GET_CODE (SUBREG_REG (x)) == MEM | |
6544 | && rtx_equal_p (SUBREG_REG (x), | |
6545 | gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y))) | |
6546 | return 1; | |
6547 | ||
6548 | last_x = get_last_value (x); | |
6549 | last_y = get_last_value (y); | |
6550 | ||
6551 | return ((last_x != 0 && rtx_equal_for_field_assignment_p (last_x, y)) | |
6552 | || (last_y != 0 && rtx_equal_for_field_assignment_p (x, last_y)) | |
6553 | || (last_x != 0 && last_y != 0 | |
6554 | && rtx_equal_for_field_assignment_p (last_x, last_y))); | |
6555 | } | |
6556 | \f | |
230d793d RS |
6557 | /* See if X, a SET operation, can be rewritten as a bit-field assignment. |
6558 | Return that assignment if so. | |
6559 | ||
6560 | We only handle the most common cases. */ | |
6561 | ||
6562 | static rtx | |
6563 | make_field_assignment (x) | |
6564 | rtx x; | |
6565 | { | |
6566 | rtx dest = SET_DEST (x); | |
6567 | rtx src = SET_SRC (x); | |
dfbe1b2f | 6568 | rtx assign; |
e11fa86f | 6569 | rtx rhs, lhs; |
5f4f0e22 CH |
6570 | HOST_WIDE_INT c1; |
6571 | int pos, len; | |
dfbe1b2f RK |
6572 | rtx other; |
6573 | enum machine_mode mode; | |
230d793d RS |
6574 | |
6575 | /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is | |
6576 | a clear of a one-bit field. We will have changed it to | |
6577 | (and (rotate (const_int -2) POS) DEST), so check for that. Also check | |
6578 | for a SUBREG. */ | |
6579 | ||
6580 | if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE | |
6581 | && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT | |
6582 | && INTVAL (XEXP (XEXP (src, 0), 0)) == -2 | |
e11fa86f | 6583 | && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1))) |
230d793d | 6584 | { |
8999a12e | 6585 | assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1), |
230d793d | 6586 | 1, 1, 1, 0); |
76184def DE |
6587 | if (assign != 0) |
6588 | return gen_rtx (SET, VOIDmode, assign, const0_rtx); | |
6589 | return x; | |
230d793d RS |
6590 | } |
6591 | ||
6592 | else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG | |
6593 | && subreg_lowpart_p (XEXP (src, 0)) | |
6594 | && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0))) | |
6595 | < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0))))) | |
6596 | && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE | |
6597 | && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2 | |
e11fa86f | 6598 | && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1))) |
230d793d | 6599 | { |
8999a12e | 6600 | assign = make_extraction (VOIDmode, dest, 0, |
230d793d RS |
6601 | XEXP (SUBREG_REG (XEXP (src, 0)), 1), |
6602 | 1, 1, 1, 0); | |
76184def DE |
6603 | if (assign != 0) |
6604 | return gen_rtx (SET, VOIDmode, assign, const0_rtx); | |
6605 | return x; | |
230d793d RS |
6606 | } |
6607 | ||
9dd11dcb | 6608 | /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a |
230d793d RS |
6609 | one-bit field. */ |
6610 | else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT | |
6611 | && XEXP (XEXP (src, 0), 0) == const1_rtx | |
e11fa86f | 6612 | && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1))) |
230d793d | 6613 | { |
8999a12e | 6614 | assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1), |
230d793d | 6615 | 1, 1, 1, 0); |
76184def DE |
6616 | if (assign != 0) |
6617 | return gen_rtx (SET, VOIDmode, assign, const1_rtx); | |
6618 | return x; | |
230d793d RS |
6619 | } |
6620 | ||
dfbe1b2f | 6621 | /* The other case we handle is assignments into a constant-position |
9dd11dcb | 6622 | field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents |
dfbe1b2f RK |
6623 | a mask that has all one bits except for a group of zero bits and |
6624 | OTHER is known to have zeros where C1 has ones, this is such an | |
6625 | assignment. Compute the position and length from C1. Shift OTHER | |
6626 | to the appropriate position, force it to the required mode, and | |
6627 | make the extraction. Check for the AND in both operands. */ | |
6628 | ||
9dd11dcb | 6629 | if (GET_CODE (src) != IOR && GET_CODE (src) != XOR) |
e11fa86f RK |
6630 | return x; |
6631 | ||
6632 | rhs = expand_compound_operation (XEXP (src, 0)); | |
6633 | lhs = expand_compound_operation (XEXP (src, 1)); | |
6634 | ||
6635 | if (GET_CODE (rhs) == AND | |
6636 | && GET_CODE (XEXP (rhs, 1)) == CONST_INT | |
6637 | && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest)) | |
6638 | c1 = INTVAL (XEXP (rhs, 1)), other = lhs; | |
6639 | else if (GET_CODE (lhs) == AND | |
6640 | && GET_CODE (XEXP (lhs, 1)) == CONST_INT | |
6641 | && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest)) | |
6642 | c1 = INTVAL (XEXP (lhs, 1)), other = rhs; | |
dfbe1b2f RK |
6643 | else |
6644 | return x; | |
230d793d | 6645 | |
e11fa86f | 6646 | pos = get_pos_from_mask ((~ c1) & GET_MODE_MASK (GET_MODE (dest)), &len); |
dfbe1b2f | 6647 | if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest)) |
ac49a949 | 6648 | || (GET_MODE_BITSIZE (GET_MODE (other)) <= HOST_BITS_PER_WIDE_INT |
951553af | 6649 | && (c1 & nonzero_bits (other, GET_MODE (other))) != 0)) |
dfbe1b2f | 6650 | return x; |
230d793d | 6651 | |
5f4f0e22 | 6652 | assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0); |
76184def DE |
6653 | if (assign == 0) |
6654 | return x; | |
230d793d | 6655 | |
dfbe1b2f RK |
6656 | /* The mode to use for the source is the mode of the assignment, or of |
6657 | what is inside a possible STRICT_LOW_PART. */ | |
6658 | mode = (GET_CODE (assign) == STRICT_LOW_PART | |
6659 | ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign)); | |
230d793d | 6660 | |
dfbe1b2f RK |
6661 | /* Shift OTHER right POS places and make it the source, restricting it |
6662 | to the proper length and mode. */ | |
230d793d | 6663 | |
5f4f0e22 CH |
6664 | src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT, |
6665 | GET_MODE (src), other, pos), | |
6139ff20 RK |
6666 | mode, |
6667 | GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT | |
6668 | ? GET_MODE_MASK (mode) | |
6669 | : ((HOST_WIDE_INT) 1 << len) - 1, | |
e3d616e3 | 6670 | dest, 0); |
230d793d | 6671 | |
dfbe1b2f | 6672 | return gen_rtx_combine (SET, VOIDmode, assign, src); |
230d793d RS |
6673 | } |
6674 | \f | |
6675 | /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c) | |
6676 | if so. */ | |
6677 | ||
6678 | static rtx | |
6679 | apply_distributive_law (x) | |
6680 | rtx x; | |
6681 | { | |
6682 | enum rtx_code code = GET_CODE (x); | |
6683 | rtx lhs, rhs, other; | |
6684 | rtx tem; | |
6685 | enum rtx_code inner_code; | |
6686 | ||
d8a8a4da RS |
6687 | /* Distributivity is not true for floating point. |
6688 | It can change the value. So don't do it. | |
6689 | -- rms and moshier@world.std.com. */ | |
3ad2180a | 6690 | if (FLOAT_MODE_P (GET_MODE (x))) |
d8a8a4da RS |
6691 | return x; |
6692 | ||
230d793d RS |
6693 | /* The outer operation can only be one of the following: */ |
6694 | if (code != IOR && code != AND && code != XOR | |
6695 | && code != PLUS && code != MINUS) | |
6696 | return x; | |
6697 | ||
6698 | lhs = XEXP (x, 0), rhs = XEXP (x, 1); | |
6699 | ||
dfbe1b2f | 6700 | /* If either operand is a primitive we can't do anything, so get out fast. */ |
230d793d | 6701 | if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o' |
dfbe1b2f | 6702 | || GET_RTX_CLASS (GET_CODE (rhs)) == 'o') |
230d793d RS |
6703 | return x; |
6704 | ||
6705 | lhs = expand_compound_operation (lhs); | |
6706 | rhs = expand_compound_operation (rhs); | |
6707 | inner_code = GET_CODE (lhs); | |
6708 | if (inner_code != GET_CODE (rhs)) | |
6709 | return x; | |
6710 | ||
6711 | /* See if the inner and outer operations distribute. */ | |
6712 | switch (inner_code) | |
6713 | { | |
6714 | case LSHIFTRT: | |
6715 | case ASHIFTRT: | |
6716 | case AND: | |
6717 | case IOR: | |
6718 | /* These all distribute except over PLUS. */ | |
6719 | if (code == PLUS || code == MINUS) | |
6720 | return x; | |
6721 | break; | |
6722 | ||
6723 | case MULT: | |
6724 | if (code != PLUS && code != MINUS) | |
6725 | return x; | |
6726 | break; | |
6727 | ||
6728 | case ASHIFT: | |
45620ed4 | 6729 | /* This is also a multiply, so it distributes over everything. */ |
230d793d RS |
6730 | break; |
6731 | ||
6732 | case SUBREG: | |
dfbe1b2f RK |
6733 | /* Non-paradoxical SUBREGs distributes over all operations, provided |
6734 | the inner modes and word numbers are the same, this is an extraction | |
2b4bd1bc JW |
6735 | of a low-order part, we don't convert an fp operation to int or |
6736 | vice versa, and we would not be converting a single-word | |
dfbe1b2f | 6737 | operation into a multi-word operation. The latter test is not |
2b4bd1bc | 6738 | required, but it prevents generating unneeded multi-word operations. |
dfbe1b2f RK |
6739 | Some of the previous tests are redundant given the latter test, but |
6740 | are retained because they are required for correctness. | |
6741 | ||
6742 | We produce the result slightly differently in this case. */ | |
6743 | ||
6744 | if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs)) | |
6745 | || SUBREG_WORD (lhs) != SUBREG_WORD (rhs) | |
6746 | || ! subreg_lowpart_p (lhs) | |
2b4bd1bc JW |
6747 | || (GET_MODE_CLASS (GET_MODE (lhs)) |
6748 | != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs)))) | |
dfbe1b2f | 6749 | || (GET_MODE_SIZE (GET_MODE (lhs)) |
8af24e26 | 6750 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs)))) |
dfbe1b2f | 6751 | || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD) |
230d793d RS |
6752 | return x; |
6753 | ||
6754 | tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)), | |
6755 | SUBREG_REG (lhs), SUBREG_REG (rhs)); | |
6756 | return gen_lowpart_for_combine (GET_MODE (x), tem); | |
6757 | ||
6758 | default: | |
6759 | return x; | |
6760 | } | |
6761 | ||
6762 | /* Set LHS and RHS to the inner operands (A and B in the example | |
6763 | above) and set OTHER to the common operand (C in the example). | |
6764 | These is only one way to do this unless the inner operation is | |
6765 | commutative. */ | |
6766 | if (GET_RTX_CLASS (inner_code) == 'c' | |
6767 | && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0))) | |
6768 | other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1); | |
6769 | else if (GET_RTX_CLASS (inner_code) == 'c' | |
6770 | && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1))) | |
6771 | other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0); | |
6772 | else if (GET_RTX_CLASS (inner_code) == 'c' | |
6773 | && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0))) | |
6774 | other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1); | |
6775 | else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1))) | |
6776 | other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0); | |
6777 | else | |
6778 | return x; | |
6779 | ||
6780 | /* Form the new inner operation, seeing if it simplifies first. */ | |
6781 | tem = gen_binary (code, GET_MODE (x), lhs, rhs); | |
6782 | ||
6783 | /* There is one exception to the general way of distributing: | |
6784 | (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */ | |
6785 | if (code == XOR && inner_code == IOR) | |
6786 | { | |
6787 | inner_code = AND; | |
0c1c8ea6 | 6788 | other = gen_unary (NOT, GET_MODE (x), GET_MODE (x), other); |
230d793d RS |
6789 | } |
6790 | ||
6791 | /* We may be able to continuing distributing the result, so call | |
6792 | ourselves recursively on the inner operation before forming the | |
6793 | outer operation, which we return. */ | |
6794 | return gen_binary (inner_code, GET_MODE (x), | |
6795 | apply_distributive_law (tem), other); | |
6796 | } | |
6797 | \f | |
6798 | /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done | |
6799 | in MODE. | |
6800 | ||
6801 | Return an equivalent form, if different from X. Otherwise, return X. If | |
6802 | X is zero, we are to always construct the equivalent form. */ | |
6803 | ||
6804 | static rtx | |
6805 | simplify_and_const_int (x, mode, varop, constop) | |
6806 | rtx x; | |
6807 | enum machine_mode mode; | |
6808 | rtx varop; | |
5f4f0e22 | 6809 | unsigned HOST_WIDE_INT constop; |
230d793d | 6810 | { |
951553af | 6811 | unsigned HOST_WIDE_INT nonzero; |
9fa6d012 | 6812 | int width = GET_MODE_BITSIZE (mode); |
42301240 | 6813 | int i; |
230d793d | 6814 | |
6139ff20 RK |
6815 | /* Simplify VAROP knowing that we will be only looking at some of the |
6816 | bits in it. */ | |
e3d616e3 | 6817 | varop = force_to_mode (varop, mode, constop, NULL_RTX, 0); |
230d793d | 6818 | |
6139ff20 RK |
6819 | /* If VAROP is a CLOBBER, we will fail so return it; if it is a |
6820 | CONST_INT, we are done. */ | |
6821 | if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT) | |
6822 | return varop; | |
230d793d | 6823 | |
fc06d7aa RK |
6824 | /* See what bits may be nonzero in VAROP. Unlike the general case of |
6825 | a call to nonzero_bits, here we don't care about bits outside | |
6826 | MODE. */ | |
6827 | ||
6828 | nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode); | |
230d793d | 6829 | |
9fa6d012 TG |
6830 | /* If this would be an entire word for the target, but is not for |
6831 | the host, then sign-extend on the host so that the number will look | |
6832 | the same way on the host that it would on the target. | |
6833 | ||
6834 | For example, when building a 64 bit alpha hosted 32 bit sparc | |
6835 | targeted compiler, then we want the 32 bit unsigned value -1 to be | |
6836 | represented as a 64 bit value -1, and not as 0x00000000ffffffff. | |
6837 | The later confuses the sparc backend. */ | |
6838 | ||
6839 | if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width | |
6840 | && (nonzero & ((HOST_WIDE_INT) 1 << (width - 1)))) | |
6841 | nonzero |= ((HOST_WIDE_INT) (-1) << width); | |
6842 | ||
230d793d | 6843 | /* Turn off all bits in the constant that are known to already be zero. |
951553af | 6844 | Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS |
230d793d RS |
6845 | which is tested below. */ |
6846 | ||
951553af | 6847 | constop &= nonzero; |
230d793d RS |
6848 | |
6849 | /* If we don't have any bits left, return zero. */ | |
6850 | if (constop == 0) | |
6851 | return const0_rtx; | |
6852 | ||
42301240 RK |
6853 | /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is |
6854 | a power of two, we can replace this with a ASHIFT. */ | |
6855 | if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1 | |
6856 | && (i = exact_log2 (constop)) >= 0) | |
6857 | return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i); | |
6858 | ||
6139ff20 RK |
6859 | /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR |
6860 | or XOR, then try to apply the distributive law. This may eliminate | |
6861 | operations if either branch can be simplified because of the AND. | |
6862 | It may also make some cases more complex, but those cases probably | |
6863 | won't match a pattern either with or without this. */ | |
6864 | ||
6865 | if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR) | |
6866 | return | |
6867 | gen_lowpart_for_combine | |
6868 | (mode, | |
6869 | apply_distributive_law | |
6870 | (gen_binary (GET_CODE (varop), GET_MODE (varop), | |
6871 | simplify_and_const_int (NULL_RTX, GET_MODE (varop), | |
6872 | XEXP (varop, 0), constop), | |
6873 | simplify_and_const_int (NULL_RTX, GET_MODE (varop), | |
6874 | XEXP (varop, 1), constop)))); | |
6875 | ||
230d793d RS |
6876 | /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG |
6877 | if we already had one (just check for the simplest cases). */ | |
6878 | if (x && GET_CODE (XEXP (x, 0)) == SUBREG | |
6879 | && GET_MODE (XEXP (x, 0)) == mode | |
6880 | && SUBREG_REG (XEXP (x, 0)) == varop) | |
6881 | varop = XEXP (x, 0); | |
6882 | else | |
6883 | varop = gen_lowpart_for_combine (mode, varop); | |
6884 | ||
6885 | /* If we can't make the SUBREG, try to return what we were given. */ | |
6886 | if (GET_CODE (varop) == CLOBBER) | |
6887 | return x ? x : varop; | |
6888 | ||
6889 | /* If we are only masking insignificant bits, return VAROP. */ | |
951553af | 6890 | if (constop == nonzero) |
230d793d RS |
6891 | x = varop; |
6892 | ||
6893 | /* Otherwise, return an AND. See how much, if any, of X we can use. */ | |
6894 | else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode) | |
6139ff20 | 6895 | x = gen_binary (AND, mode, varop, GEN_INT (constop)); |
230d793d RS |
6896 | |
6897 | else | |
6898 | { | |
6899 | if (GET_CODE (XEXP (x, 1)) != CONST_INT | |
6900 | || INTVAL (XEXP (x, 1)) != constop) | |
5f4f0e22 | 6901 | SUBST (XEXP (x, 1), GEN_INT (constop)); |
230d793d RS |
6902 | |
6903 | SUBST (XEXP (x, 0), varop); | |
6904 | } | |
6905 | ||
6906 | return x; | |
6907 | } | |
6908 | \f | |
6909 | /* Given an expression, X, compute which bits in X can be non-zero. | |
6910 | We don't care about bits outside of those defined in MODE. | |
6911 | ||
6912 | For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is | |
6913 | a shift, AND, or zero_extract, we can do better. */ | |
6914 | ||
5f4f0e22 | 6915 | static unsigned HOST_WIDE_INT |
951553af | 6916 | nonzero_bits (x, mode) |
230d793d RS |
6917 | rtx x; |
6918 | enum machine_mode mode; | |
6919 | { | |
951553af RK |
6920 | unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode); |
6921 | unsigned HOST_WIDE_INT inner_nz; | |
230d793d RS |
6922 | enum rtx_code code; |
6923 | int mode_width = GET_MODE_BITSIZE (mode); | |
6924 | rtx tem; | |
6925 | ||
1c75dfa4 RK |
6926 | /* For floating-point values, assume all bits are needed. */ |
6927 | if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode)) | |
6928 | return nonzero; | |
6929 | ||
230d793d RS |
6930 | /* If X is wider than MODE, use its mode instead. */ |
6931 | if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width) | |
6932 | { | |
6933 | mode = GET_MODE (x); | |
951553af | 6934 | nonzero = GET_MODE_MASK (mode); |
230d793d RS |
6935 | mode_width = GET_MODE_BITSIZE (mode); |
6936 | } | |
6937 | ||
5f4f0e22 | 6938 | if (mode_width > HOST_BITS_PER_WIDE_INT) |
230d793d RS |
6939 | /* Our only callers in this case look for single bit values. So |
6940 | just return the mode mask. Those tests will then be false. */ | |
951553af | 6941 | return nonzero; |
230d793d | 6942 | |
8baf60bb | 6943 | #ifndef WORD_REGISTER_OPERATIONS |
c6965c0f | 6944 | /* If MODE is wider than X, but both are a single word for both the host |
0840fd91 RK |
6945 | and target machines, we can compute this from which bits of the |
6946 | object might be nonzero in its own mode, taking into account the fact | |
6947 | that on many CISC machines, accessing an object in a wider mode | |
6948 | causes the high-order bits to become undefined. So they are | |
6949 | not known to be zero. */ | |
6950 | ||
6951 | if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode | |
6952 | && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD | |
6953 | && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT | |
c6965c0f | 6954 | && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x))) |
0840fd91 RK |
6955 | { |
6956 | nonzero &= nonzero_bits (x, GET_MODE (x)); | |
6957 | nonzero |= GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)); | |
6958 | return nonzero; | |
6959 | } | |
6960 | #endif | |
6961 | ||
230d793d RS |
6962 | code = GET_CODE (x); |
6963 | switch (code) | |
6964 | { | |
6965 | case REG: | |
320dd7a7 RK |
6966 | #ifdef POINTERS_EXTEND_UNSIGNED |
6967 | /* If pointers extend unsigned and this is a pointer in Pmode, say that | |
6968 | all the bits above ptr_mode are known to be zero. */ | |
6969 | if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode | |
6970 | && REGNO_POINTER_FLAG (REGNO (x))) | |
6971 | nonzero &= GET_MODE_MASK (ptr_mode); | |
6972 | #endif | |
6973 | ||
b0d71df9 RK |
6974 | #ifdef STACK_BOUNDARY |
6975 | /* If this is the stack pointer, we may know something about its | |
6976 | alignment. If PUSH_ROUNDING is defined, it is possible for the | |
230d793d RS |
6977 | stack to be momentarily aligned only to that amount, so we pick |
6978 | the least alignment. */ | |
6979 | ||
ee49a9c7 JW |
6980 | /* We can't check for arg_pointer_rtx here, because it is not |
6981 | guaranteed to have as much alignment as the stack pointer. | |
6982 | In particular, in the Irix6 n64 ABI, the stack has 128 bit | |
6983 | alignment but the argument pointer has only 64 bit alignment. */ | |
6984 | ||
b0d71df9 | 6985 | if (x == stack_pointer_rtx || x == frame_pointer_rtx |
ee49a9c7 | 6986 | || x == hard_frame_pointer_rtx |
b0d71df9 RK |
6987 | || (REGNO (x) >= FIRST_VIRTUAL_REGISTER |
6988 | && REGNO (x) <= LAST_VIRTUAL_REGISTER)) | |
230d793d | 6989 | { |
b0d71df9 | 6990 | int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT; |
230d793d RS |
6991 | |
6992 | #ifdef PUSH_ROUNDING | |
91102d5a | 6993 | if (REGNO (x) == STACK_POINTER_REGNUM) |
b0d71df9 | 6994 | sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment); |
230d793d RS |
6995 | #endif |
6996 | ||
320dd7a7 RK |
6997 | /* We must return here, otherwise we may get a worse result from |
6998 | one of the choices below. There is nothing useful below as | |
6999 | far as the stack pointer is concerned. */ | |
b0d71df9 | 7000 | return nonzero &= ~ (sp_alignment - 1); |
230d793d | 7001 | } |
b0d71df9 | 7002 | #endif |
230d793d | 7003 | |
55310dad RK |
7004 | /* If X is a register whose nonzero bits value is current, use it. |
7005 | Otherwise, if X is a register whose value we can find, use that | |
7006 | value. Otherwise, use the previously-computed global nonzero bits | |
7007 | for this register. */ | |
7008 | ||
7009 | if (reg_last_set_value[REGNO (x)] != 0 | |
7010 | && reg_last_set_mode[REGNO (x)] == mode | |
7011 | && (reg_n_sets[REGNO (x)] == 1 | |
7012 | || reg_last_set_label[REGNO (x)] == label_tick) | |
7013 | && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid) | |
7014 | return reg_last_set_nonzero_bits[REGNO (x)]; | |
230d793d RS |
7015 | |
7016 | tem = get_last_value (x); | |
9afa3d54 | 7017 | |
230d793d | 7018 | if (tem) |
9afa3d54 RK |
7019 | { |
7020 | #ifdef SHORT_IMMEDIATES_SIGN_EXTEND | |
7021 | /* If X is narrower than MODE and TEM is a non-negative | |
7022 | constant that would appear negative in the mode of X, | |
7023 | sign-extend it for use in reg_nonzero_bits because some | |
7024 | machines (maybe most) will actually do the sign-extension | |
7025 | and this is the conservative approach. | |
7026 | ||
7027 | ??? For 2.5, try to tighten up the MD files in this regard | |
7028 | instead of this kludge. */ | |
7029 | ||
7030 | if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width | |
7031 | && GET_CODE (tem) == CONST_INT | |
7032 | && INTVAL (tem) > 0 | |
7033 | && 0 != (INTVAL (tem) | |
7034 | & ((HOST_WIDE_INT) 1 | |
9e69be8c | 7035 | << (GET_MODE_BITSIZE (GET_MODE (x)) - 1)))) |
9afa3d54 RK |
7036 | tem = GEN_INT (INTVAL (tem) |
7037 | | ((HOST_WIDE_INT) (-1) | |
7038 | << GET_MODE_BITSIZE (GET_MODE (x)))); | |
7039 | #endif | |
7040 | return nonzero_bits (tem, mode); | |
7041 | } | |
951553af RK |
7042 | else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)]) |
7043 | return reg_nonzero_bits[REGNO (x)] & nonzero; | |
230d793d | 7044 | else |
951553af | 7045 | return nonzero; |
230d793d RS |
7046 | |
7047 | case CONST_INT: | |
9afa3d54 RK |
7048 | #ifdef SHORT_IMMEDIATES_SIGN_EXTEND |
7049 | /* If X is negative in MODE, sign-extend the value. */ | |
9e69be8c RK |
7050 | if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD |
7051 | && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1)))) | |
7052 | return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width)); | |
9afa3d54 RK |
7053 | #endif |
7054 | ||
230d793d RS |
7055 | return INTVAL (x); |
7056 | ||
230d793d | 7057 | case MEM: |
8baf60bb | 7058 | #ifdef LOAD_EXTEND_OP |
230d793d RS |
7059 | /* In many, if not most, RISC machines, reading a byte from memory |
7060 | zeros the rest of the register. Noticing that fact saves a lot | |
7061 | of extra zero-extends. */ | |
8baf60bb RK |
7062 | if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND) |
7063 | nonzero &= GET_MODE_MASK (GET_MODE (x)); | |
230d793d | 7064 | #endif |
8baf60bb | 7065 | break; |
230d793d | 7066 | |
230d793d RS |
7067 | case EQ: case NE: |
7068 | case GT: case GTU: | |
7069 | case LT: case LTU: | |
7070 | case GE: case GEU: | |
7071 | case LE: case LEU: | |
3f508eca | 7072 | |
c6965c0f RK |
7073 | /* If this produces an integer result, we know which bits are set. |
7074 | Code here used to clear bits outside the mode of X, but that is | |
7075 | now done above. */ | |
230d793d | 7076 | |
c6965c0f RK |
7077 | if (GET_MODE_CLASS (mode) == MODE_INT |
7078 | && mode_width <= HOST_BITS_PER_WIDE_INT) | |
7079 | nonzero = STORE_FLAG_VALUE; | |
230d793d | 7080 | break; |
230d793d | 7081 | |
230d793d | 7082 | case NEG: |
d0ab8cd3 RK |
7083 | if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x)) |
7084 | == GET_MODE_BITSIZE (GET_MODE (x))) | |
951553af | 7085 | nonzero = 1; |
230d793d RS |
7086 | |
7087 | if (GET_MODE_SIZE (GET_MODE (x)) < mode_width) | |
951553af | 7088 | nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x))); |
230d793d | 7089 | break; |
d0ab8cd3 RK |
7090 | |
7091 | case ABS: | |
7092 | if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x)) | |
7093 | == GET_MODE_BITSIZE (GET_MODE (x))) | |
951553af | 7094 | nonzero = 1; |
d0ab8cd3 | 7095 | break; |
230d793d RS |
7096 | |
7097 | case TRUNCATE: | |
951553af | 7098 | nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode)); |
230d793d RS |
7099 | break; |
7100 | ||
7101 | case ZERO_EXTEND: | |
951553af | 7102 | nonzero &= nonzero_bits (XEXP (x, 0), mode); |
230d793d | 7103 | if (GET_MODE (XEXP (x, 0)) != VOIDmode) |
951553af | 7104 | nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0))); |
230d793d RS |
7105 | break; |
7106 | ||
7107 | case SIGN_EXTEND: | |
7108 | /* If the sign bit is known clear, this is the same as ZERO_EXTEND. | |
7109 | Otherwise, show all the bits in the outer mode but not the inner | |
7110 | may be non-zero. */ | |
951553af | 7111 | inner_nz = nonzero_bits (XEXP (x, 0), mode); |
230d793d RS |
7112 | if (GET_MODE (XEXP (x, 0)) != VOIDmode) |
7113 | { | |
951553af RK |
7114 | inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0))); |
7115 | if (inner_nz & | |
5f4f0e22 CH |
7116 | (((HOST_WIDE_INT) 1 |
7117 | << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))) | |
951553af | 7118 | inner_nz |= (GET_MODE_MASK (mode) |
230d793d RS |
7119 | & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))); |
7120 | } | |
7121 | ||
951553af | 7122 | nonzero &= inner_nz; |
230d793d RS |
7123 | break; |
7124 | ||
7125 | case AND: | |
951553af RK |
7126 | nonzero &= (nonzero_bits (XEXP (x, 0), mode) |
7127 | & nonzero_bits (XEXP (x, 1), mode)); | |
230d793d RS |
7128 | break; |
7129 | ||
d0ab8cd3 RK |
7130 | case XOR: case IOR: |
7131 | case UMIN: case UMAX: case SMIN: case SMAX: | |
951553af RK |
7132 | nonzero &= (nonzero_bits (XEXP (x, 0), mode) |
7133 | | nonzero_bits (XEXP (x, 1), mode)); | |
230d793d RS |
7134 | break; |
7135 | ||
7136 | case PLUS: case MINUS: | |
7137 | case MULT: | |
7138 | case DIV: case UDIV: | |
7139 | case MOD: case UMOD: | |
7140 | /* We can apply the rules of arithmetic to compute the number of | |
7141 | high- and low-order zero bits of these operations. We start by | |
7142 | computing the width (position of the highest-order non-zero bit) | |
7143 | and the number of low-order zero bits for each value. */ | |
7144 | { | |
951553af RK |
7145 | unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode); |
7146 | unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode); | |
7147 | int width0 = floor_log2 (nz0) + 1; | |
7148 | int width1 = floor_log2 (nz1) + 1; | |
7149 | int low0 = floor_log2 (nz0 & -nz0); | |
7150 | int low1 = floor_log2 (nz1 & -nz1); | |
318b149c RK |
7151 | HOST_WIDE_INT op0_maybe_minusp |
7152 | = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1))); | |
7153 | HOST_WIDE_INT op1_maybe_minusp | |
7154 | = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1))); | |
230d793d RS |
7155 | int result_width = mode_width; |
7156 | int result_low = 0; | |
7157 | ||
7158 | switch (code) | |
7159 | { | |
7160 | case PLUS: | |
7161 | result_width = MAX (width0, width1) + 1; | |
7162 | result_low = MIN (low0, low1); | |
7163 | break; | |
7164 | case MINUS: | |
7165 | result_low = MIN (low0, low1); | |
7166 | break; | |
7167 | case MULT: | |
7168 | result_width = width0 + width1; | |
7169 | result_low = low0 + low1; | |
7170 | break; | |
7171 | case DIV: | |
7172 | if (! op0_maybe_minusp && ! op1_maybe_minusp) | |
7173 | result_width = width0; | |
7174 | break; | |
7175 | case UDIV: | |
7176 | result_width = width0; | |
7177 | break; | |
7178 | case MOD: | |
7179 | if (! op0_maybe_minusp && ! op1_maybe_minusp) | |
7180 | result_width = MIN (width0, width1); | |
7181 | result_low = MIN (low0, low1); | |
7182 | break; | |
7183 | case UMOD: | |
7184 | result_width = MIN (width0, width1); | |
7185 | result_low = MIN (low0, low1); | |
7186 | break; | |
7187 | } | |
7188 | ||
7189 | if (result_width < mode_width) | |
951553af | 7190 | nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1; |
230d793d RS |
7191 | |
7192 | if (result_low > 0) | |
951553af | 7193 | nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1); |
230d793d RS |
7194 | } |
7195 | break; | |
7196 | ||
7197 | case ZERO_EXTRACT: | |
7198 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
5f4f0e22 | 7199 | && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT) |
951553af | 7200 | nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1; |
230d793d RS |
7201 | break; |
7202 | ||
7203 | case SUBREG: | |
c3c2cb37 RK |
7204 | /* If this is a SUBREG formed for a promoted variable that has |
7205 | been zero-extended, we know that at least the high-order bits | |
7206 | are zero, though others might be too. */ | |
7207 | ||
7208 | if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x)) | |
951553af RK |
7209 | nonzero = (GET_MODE_MASK (GET_MODE (x)) |
7210 | & nonzero_bits (SUBREG_REG (x), GET_MODE (x))); | |
c3c2cb37 | 7211 | |
230d793d RS |
7212 | /* If the inner mode is a single word for both the host and target |
7213 | machines, we can compute this from which bits of the inner | |
951553af | 7214 | object might be nonzero. */ |
230d793d | 7215 | if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD |
5f4f0e22 CH |
7216 | && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) |
7217 | <= HOST_BITS_PER_WIDE_INT)) | |
230d793d | 7218 | { |
951553af | 7219 | nonzero &= nonzero_bits (SUBREG_REG (x), mode); |
8baf60bb RK |
7220 | |
7221 | #ifndef WORD_REGISTER_OPERATIONS | |
230d793d RS |
7222 | /* On many CISC machines, accessing an object in a wider mode |
7223 | causes the high-order bits to become undefined. So they are | |
7224 | not known to be zero. */ | |
7225 | if (GET_MODE_SIZE (GET_MODE (x)) | |
7226 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) | |
951553af RK |
7227 | nonzero |= (GET_MODE_MASK (GET_MODE (x)) |
7228 | & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))); | |
230d793d RS |
7229 | #endif |
7230 | } | |
7231 | break; | |
7232 | ||
7233 | case ASHIFTRT: | |
7234 | case LSHIFTRT: | |
7235 | case ASHIFT: | |
230d793d | 7236 | case ROTATE: |
951553af | 7237 | /* The nonzero bits are in two classes: any bits within MODE |
230d793d | 7238 | that aren't in GET_MODE (x) are always significant. The rest of the |
951553af | 7239 | nonzero bits are those that are significant in the operand of |
230d793d RS |
7240 | the shift when shifted the appropriate number of bits. This |
7241 | shows that high-order bits are cleared by the right shift and | |
7242 | low-order bits by left shifts. */ | |
7243 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
7244 | && INTVAL (XEXP (x, 1)) >= 0 | |
5f4f0e22 | 7245 | && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT) |
230d793d RS |
7246 | { |
7247 | enum machine_mode inner_mode = GET_MODE (x); | |
7248 | int width = GET_MODE_BITSIZE (inner_mode); | |
7249 | int count = INTVAL (XEXP (x, 1)); | |
5f4f0e22 | 7250 | unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode); |
951553af RK |
7251 | unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode); |
7252 | unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask; | |
5f4f0e22 | 7253 | unsigned HOST_WIDE_INT outer = 0; |
230d793d RS |
7254 | |
7255 | if (mode_width > width) | |
951553af | 7256 | outer = (op_nonzero & nonzero & ~ mode_mask); |
230d793d RS |
7257 | |
7258 | if (code == LSHIFTRT) | |
7259 | inner >>= count; | |
7260 | else if (code == ASHIFTRT) | |
7261 | { | |
7262 | inner >>= count; | |
7263 | ||
951553af | 7264 | /* If the sign bit may have been nonzero before the shift, we |
230d793d | 7265 | need to mark all the places it could have been copied to |
951553af | 7266 | by the shift as possibly nonzero. */ |
5f4f0e22 CH |
7267 | if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count))) |
7268 | inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count); | |
230d793d | 7269 | } |
45620ed4 | 7270 | else if (code == ASHIFT) |
230d793d RS |
7271 | inner <<= count; |
7272 | else | |
7273 | inner = ((inner << (count % width) | |
7274 | | (inner >> (width - (count % width)))) & mode_mask); | |
7275 | ||
951553af | 7276 | nonzero &= (outer | inner); |
230d793d RS |
7277 | } |
7278 | break; | |
7279 | ||
7280 | case FFS: | |
7281 | /* This is at most the number of bits in the mode. */ | |
951553af | 7282 | nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1; |
230d793d | 7283 | break; |
d0ab8cd3 RK |
7284 | |
7285 | case IF_THEN_ELSE: | |
951553af RK |
7286 | nonzero &= (nonzero_bits (XEXP (x, 1), mode) |
7287 | | nonzero_bits (XEXP (x, 2), mode)); | |
d0ab8cd3 | 7288 | break; |
230d793d RS |
7289 | } |
7290 | ||
951553af | 7291 | return nonzero; |
230d793d RS |
7292 | } |
7293 | \f | |
d0ab8cd3 | 7294 | /* Return the number of bits at the high-order end of X that are known to |
5109d49f RK |
7295 | be equal to the sign bit. X will be used in mode MODE; if MODE is |
7296 | VOIDmode, X will be used in its own mode. The returned value will always | |
7297 | be between 1 and the number of bits in MODE. */ | |
d0ab8cd3 RK |
7298 | |
7299 | static int | |
7300 | num_sign_bit_copies (x, mode) | |
7301 | rtx x; | |
7302 | enum machine_mode mode; | |
7303 | { | |
7304 | enum rtx_code code = GET_CODE (x); | |
7305 | int bitwidth; | |
7306 | int num0, num1, result; | |
951553af | 7307 | unsigned HOST_WIDE_INT nonzero; |
d0ab8cd3 RK |
7308 | rtx tem; |
7309 | ||
7310 | /* If we weren't given a mode, use the mode of X. If the mode is still | |
1c75dfa4 RK |
7311 | VOIDmode, we don't know anything. Likewise if one of the modes is |
7312 | floating-point. */ | |
d0ab8cd3 RK |
7313 | |
7314 | if (mode == VOIDmode) | |
7315 | mode = GET_MODE (x); | |
7316 | ||
1c75dfa4 | 7317 | if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x))) |
6752e8d2 | 7318 | return 1; |
d0ab8cd3 RK |
7319 | |
7320 | bitwidth = GET_MODE_BITSIZE (mode); | |
7321 | ||
312def2e RK |
7322 | /* For a smaller object, just ignore the high bits. */ |
7323 | if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x))) | |
7324 | return MAX (1, (num_sign_bit_copies (x, GET_MODE (x)) | |
7325 | - (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth))); | |
7326 | ||
0c314d1a RK |
7327 | #ifndef WORD_REGISTER_OPERATIONS |
7328 | /* If this machine does not do all register operations on the entire | |
7329 | register and MODE is wider than the mode of X, we can say nothing | |
7330 | at all about the high-order bits. */ | |
7331 | if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x))) | |
7332 | return 1; | |
7333 | #endif | |
7334 | ||
d0ab8cd3 RK |
7335 | switch (code) |
7336 | { | |
7337 | case REG: | |
55310dad | 7338 | |
ff0dbdd1 RK |
7339 | #ifdef POINTERS_EXTEND_UNSIGNED |
7340 | /* If pointers extend signed and this is a pointer in Pmode, say that | |
7341 | all the bits above ptr_mode are known to be sign bit copies. */ | |
7342 | if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode | |
7343 | && REGNO_POINTER_FLAG (REGNO (x))) | |
7344 | return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1; | |
7345 | #endif | |
7346 | ||
55310dad RK |
7347 | if (reg_last_set_value[REGNO (x)] != 0 |
7348 | && reg_last_set_mode[REGNO (x)] == mode | |
7349 | && (reg_n_sets[REGNO (x)] == 1 | |
7350 | || reg_last_set_label[REGNO (x)] == label_tick) | |
7351 | && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid) | |
7352 | return reg_last_set_sign_bit_copies[REGNO (x)]; | |
d0ab8cd3 RK |
7353 | |
7354 | tem = get_last_value (x); | |
7355 | if (tem != 0) | |
7356 | return num_sign_bit_copies (tem, mode); | |
55310dad RK |
7357 | |
7358 | if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0) | |
7359 | return reg_sign_bit_copies[REGNO (x)]; | |
d0ab8cd3 RK |
7360 | break; |
7361 | ||
457816e2 | 7362 | case MEM: |
8baf60bb | 7363 | #ifdef LOAD_EXTEND_OP |
457816e2 | 7364 | /* Some RISC machines sign-extend all loads of smaller than a word. */ |
8baf60bb RK |
7365 | if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND) |
7366 | return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1); | |
457816e2 | 7367 | #endif |
8baf60bb | 7368 | break; |
457816e2 | 7369 | |
d0ab8cd3 RK |
7370 | case CONST_INT: |
7371 | /* If the constant is negative, take its 1's complement and remask. | |
7372 | Then see how many zero bits we have. */ | |
951553af | 7373 | nonzero = INTVAL (x) & GET_MODE_MASK (mode); |
ac49a949 | 7374 | if (bitwidth <= HOST_BITS_PER_WIDE_INT |
951553af RK |
7375 | && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) |
7376 | nonzero = (~ nonzero) & GET_MODE_MASK (mode); | |
d0ab8cd3 | 7377 | |
951553af | 7378 | return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1); |
d0ab8cd3 RK |
7379 | |
7380 | case SUBREG: | |
c3c2cb37 RK |
7381 | /* If this is a SUBREG for a promoted object that is sign-extended |
7382 | and we are looking at it in a wider mode, we know that at least the | |
7383 | high-order bits are known to be sign bit copies. */ | |
7384 | ||
7385 | if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x)) | |
dc3e17ad RK |
7386 | return MAX (bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1, |
7387 | num_sign_bit_copies (SUBREG_REG (x), mode)); | |
c3c2cb37 | 7388 | |
d0ab8cd3 RK |
7389 | /* For a smaller object, just ignore the high bits. */ |
7390 | if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))) | |
7391 | { | |
7392 | num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode); | |
7393 | return MAX (1, (num0 | |
7394 | - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) | |
7395 | - bitwidth))); | |
7396 | } | |
457816e2 | 7397 | |
8baf60bb | 7398 | #ifdef WORD_REGISTER_OPERATIONS |
2aec5b7a | 7399 | #ifdef LOAD_EXTEND_OP |
8baf60bb RK |
7400 | /* For paradoxical SUBREGs on machines where all register operations |
7401 | affect the entire register, just look inside. Note that we are | |
7402 | passing MODE to the recursive call, so the number of sign bit copies | |
7403 | will remain relative to that mode, not the inner mode. */ | |
457816e2 | 7404 | |
2aec5b7a JW |
7405 | /* This works only if loads sign extend. Otherwise, if we get a |
7406 | reload for the inner part, it may be loaded from the stack, and | |
7407 | then we lose all sign bit copies that existed before the store | |
7408 | to the stack. */ | |
7409 | ||
7410 | if ((GET_MODE_SIZE (GET_MODE (x)) | |
7411 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) | |
7412 | && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND) | |
457816e2 | 7413 | return num_sign_bit_copies (SUBREG_REG (x), mode); |
2aec5b7a | 7414 | #endif |
457816e2 | 7415 | #endif |
d0ab8cd3 RK |
7416 | break; |
7417 | ||
7418 | case SIGN_EXTRACT: | |
7419 | if (GET_CODE (XEXP (x, 1)) == CONST_INT) | |
7420 | return MAX (1, bitwidth - INTVAL (XEXP (x, 1))); | |
7421 | break; | |
7422 | ||
7423 | case SIGN_EXTEND: | |
7424 | return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) | |
7425 | + num_sign_bit_copies (XEXP (x, 0), VOIDmode)); | |
7426 | ||
7427 | case TRUNCATE: | |
7428 | /* For a smaller object, just ignore the high bits. */ | |
7429 | num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode); | |
7430 | return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) | |
7431 | - bitwidth))); | |
7432 | ||
7433 | case NOT: | |
7434 | return num_sign_bit_copies (XEXP (x, 0), mode); | |
7435 | ||
7436 | case ROTATE: case ROTATERT: | |
7437 | /* If we are rotating left by a number of bits less than the number | |
7438 | of sign bit copies, we can just subtract that amount from the | |
7439 | number. */ | |
7440 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
7441 | && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth) | |
7442 | { | |
7443 | num0 = num_sign_bit_copies (XEXP (x, 0), mode); | |
7444 | return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1)) | |
7445 | : bitwidth - INTVAL (XEXP (x, 1)))); | |
7446 | } | |
7447 | break; | |
7448 | ||
7449 | case NEG: | |
7450 | /* In general, this subtracts one sign bit copy. But if the value | |
7451 | is known to be positive, the number of sign bit copies is the | |
951553af RK |
7452 | same as that of the input. Finally, if the input has just one bit |
7453 | that might be nonzero, all the bits are copies of the sign bit. */ | |
7454 | nonzero = nonzero_bits (XEXP (x, 0), mode); | |
7455 | if (nonzero == 1) | |
d0ab8cd3 RK |
7456 | return bitwidth; |
7457 | ||
7458 | num0 = num_sign_bit_copies (XEXP (x, 0), mode); | |
7459 | if (num0 > 1 | |
ac49a949 | 7460 | && bitwidth <= HOST_BITS_PER_WIDE_INT |
951553af | 7461 | && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero)) |
d0ab8cd3 RK |
7462 | num0--; |
7463 | ||
7464 | return num0; | |
7465 | ||
7466 | case IOR: case AND: case XOR: | |
7467 | case SMIN: case SMAX: case UMIN: case UMAX: | |
7468 | /* Logical operations will preserve the number of sign-bit copies. | |
7469 | MIN and MAX operations always return one of the operands. */ | |
7470 | num0 = num_sign_bit_copies (XEXP (x, 0), mode); | |
7471 | num1 = num_sign_bit_copies (XEXP (x, 1), mode); | |
7472 | return MIN (num0, num1); | |
7473 | ||
7474 | case PLUS: case MINUS: | |
7475 | /* For addition and subtraction, we can have a 1-bit carry. However, | |
7476 | if we are subtracting 1 from a positive number, there will not | |
7477 | be such a carry. Furthermore, if the positive number is known to | |
7478 | be 0 or 1, we know the result is either -1 or 0. */ | |
7479 | ||
3e3ea975 | 7480 | if (code == PLUS && XEXP (x, 1) == constm1_rtx |
9295e6af | 7481 | && bitwidth <= HOST_BITS_PER_WIDE_INT) |
d0ab8cd3 | 7482 | { |
951553af RK |
7483 | nonzero = nonzero_bits (XEXP (x, 0), mode); |
7484 | if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0) | |
7485 | return (nonzero == 1 || nonzero == 0 ? bitwidth | |
7486 | : bitwidth - floor_log2 (nonzero) - 1); | |
d0ab8cd3 RK |
7487 | } |
7488 | ||
7489 | num0 = num_sign_bit_copies (XEXP (x, 0), mode); | |
7490 | num1 = num_sign_bit_copies (XEXP (x, 1), mode); | |
7491 | return MAX (1, MIN (num0, num1) - 1); | |
7492 | ||
7493 | case MULT: | |
7494 | /* The number of bits of the product is the sum of the number of | |
7495 | bits of both terms. However, unless one of the terms if known | |
7496 | to be positive, we must allow for an additional bit since negating | |
7497 | a negative number can remove one sign bit copy. */ | |
7498 | ||
7499 | num0 = num_sign_bit_copies (XEXP (x, 0), mode); | |
7500 | num1 = num_sign_bit_copies (XEXP (x, 1), mode); | |
7501 | ||
7502 | result = bitwidth - (bitwidth - num0) - (bitwidth - num1); | |
7503 | if (result > 0 | |
9295e6af | 7504 | && bitwidth <= HOST_BITS_PER_WIDE_INT |
951553af | 7505 | && ((nonzero_bits (XEXP (x, 0), mode) |
d0ab8cd3 | 7506 | & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) |
01c82bbb RK |
7507 | && ((nonzero_bits (XEXP (x, 1), mode) |
7508 | & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)) | |
d0ab8cd3 RK |
7509 | result--; |
7510 | ||
7511 | return MAX (1, result); | |
7512 | ||
7513 | case UDIV: | |
7514 | /* The result must be <= the first operand. */ | |
7515 | return num_sign_bit_copies (XEXP (x, 0), mode); | |
7516 | ||
7517 | case UMOD: | |
7518 | /* The result must be <= the scond operand. */ | |
7519 | return num_sign_bit_copies (XEXP (x, 1), mode); | |
7520 | ||
7521 | case DIV: | |
7522 | /* Similar to unsigned division, except that we have to worry about | |
7523 | the case where the divisor is negative, in which case we have | |
7524 | to add 1. */ | |
7525 | result = num_sign_bit_copies (XEXP (x, 0), mode); | |
7526 | if (result > 1 | |
ac49a949 | 7527 | && bitwidth <= HOST_BITS_PER_WIDE_INT |
951553af | 7528 | && (nonzero_bits (XEXP (x, 1), mode) |
d0ab8cd3 RK |
7529 | & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) |
7530 | result --; | |
7531 | ||
7532 | return result; | |
7533 | ||
7534 | case MOD: | |
7535 | result = num_sign_bit_copies (XEXP (x, 1), mode); | |
7536 | if (result > 1 | |
ac49a949 | 7537 | && bitwidth <= HOST_BITS_PER_WIDE_INT |
951553af | 7538 | && (nonzero_bits (XEXP (x, 1), mode) |
d0ab8cd3 RK |
7539 | & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) |
7540 | result --; | |
7541 | ||
7542 | return result; | |
7543 | ||
7544 | case ASHIFTRT: | |
7545 | /* Shifts by a constant add to the number of bits equal to the | |
7546 | sign bit. */ | |
7547 | num0 = num_sign_bit_copies (XEXP (x, 0), mode); | |
7548 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
7549 | && INTVAL (XEXP (x, 1)) > 0) | |
7550 | num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1))); | |
7551 | ||
7552 | return num0; | |
7553 | ||
7554 | case ASHIFT: | |
d0ab8cd3 RK |
7555 | /* Left shifts destroy copies. */ |
7556 | if (GET_CODE (XEXP (x, 1)) != CONST_INT | |
7557 | || INTVAL (XEXP (x, 1)) < 0 | |
7558 | || INTVAL (XEXP (x, 1)) >= bitwidth) | |
7559 | return 1; | |
7560 | ||
7561 | num0 = num_sign_bit_copies (XEXP (x, 0), mode); | |
7562 | return MAX (1, num0 - INTVAL (XEXP (x, 1))); | |
7563 | ||
7564 | case IF_THEN_ELSE: | |
7565 | num0 = num_sign_bit_copies (XEXP (x, 1), mode); | |
7566 | num1 = num_sign_bit_copies (XEXP (x, 2), mode); | |
7567 | return MIN (num0, num1); | |
7568 | ||
7569 | #if STORE_FLAG_VALUE == -1 | |
7570 | case EQ: case NE: case GE: case GT: case LE: case LT: | |
7571 | case GEU: case GTU: case LEU: case LTU: | |
7572 | return bitwidth; | |
7573 | #endif | |
7574 | } | |
7575 | ||
7576 | /* If we haven't been able to figure it out by one of the above rules, | |
7577 | see if some of the high-order bits are known to be zero. If so, | |
ac49a949 RS |
7578 | count those bits and return one less than that amount. If we can't |
7579 | safely compute the mask for this mode, always return BITWIDTH. */ | |
7580 | ||
7581 | if (bitwidth > HOST_BITS_PER_WIDE_INT) | |
6752e8d2 | 7582 | return 1; |
d0ab8cd3 | 7583 | |
951553af | 7584 | nonzero = nonzero_bits (x, mode); |
df6f4086 | 7585 | return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1)) |
951553af | 7586 | ? 1 : bitwidth - floor_log2 (nonzero) - 1); |
d0ab8cd3 RK |
7587 | } |
7588 | \f | |
1a26b032 RK |
7589 | /* Return the number of "extended" bits there are in X, when interpreted |
7590 | as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For | |
7591 | unsigned quantities, this is the number of high-order zero bits. | |
7592 | For signed quantities, this is the number of copies of the sign bit | |
7593 | minus 1. In both case, this function returns the number of "spare" | |
7594 | bits. For example, if two quantities for which this function returns | |
7595 | at least 1 are added, the addition is known not to overflow. | |
7596 | ||
7597 | This function will always return 0 unless called during combine, which | |
7598 | implies that it must be called from a define_split. */ | |
7599 | ||
7600 | int | |
7601 | extended_count (x, mode, unsignedp) | |
7602 | rtx x; | |
7603 | enum machine_mode mode; | |
7604 | int unsignedp; | |
7605 | { | |
951553af | 7606 | if (nonzero_sign_valid == 0) |
1a26b032 RK |
7607 | return 0; |
7608 | ||
7609 | return (unsignedp | |
ac49a949 RS |
7610 | ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
7611 | && (GET_MODE_BITSIZE (mode) - 1 | |
951553af | 7612 | - floor_log2 (nonzero_bits (x, mode)))) |
1a26b032 RK |
7613 | : num_sign_bit_copies (x, mode) - 1); |
7614 | } | |
7615 | \f | |
230d793d RS |
7616 | /* This function is called from `simplify_shift_const' to merge two |
7617 | outer operations. Specifically, we have already found that we need | |
7618 | to perform operation *POP0 with constant *PCONST0 at the outermost | |
7619 | position. We would now like to also perform OP1 with constant CONST1 | |
7620 | (with *POP0 being done last). | |
7621 | ||
7622 | Return 1 if we can do the operation and update *POP0 and *PCONST0 with | |
7623 | the resulting operation. *PCOMP_P is set to 1 if we would need to | |
7624 | complement the innermost operand, otherwise it is unchanged. | |
7625 | ||
7626 | MODE is the mode in which the operation will be done. No bits outside | |
7627 | the width of this mode matter. It is assumed that the width of this mode | |
5f4f0e22 | 7628 | is smaller than or equal to HOST_BITS_PER_WIDE_INT. |
230d793d RS |
7629 | |
7630 | If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS, | |
7631 | IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper | |
7632 | result is simply *PCONST0. | |
7633 | ||
7634 | If the resulting operation cannot be expressed as one operation, we | |
7635 | return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */ | |
7636 | ||
7637 | static int | |
7638 | merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p) | |
7639 | enum rtx_code *pop0; | |
5f4f0e22 | 7640 | HOST_WIDE_INT *pconst0; |
230d793d | 7641 | enum rtx_code op1; |
5f4f0e22 | 7642 | HOST_WIDE_INT const1; |
230d793d RS |
7643 | enum machine_mode mode; |
7644 | int *pcomp_p; | |
7645 | { | |
7646 | enum rtx_code op0 = *pop0; | |
5f4f0e22 | 7647 | HOST_WIDE_INT const0 = *pconst0; |
9fa6d012 | 7648 | int width = GET_MODE_BITSIZE (mode); |
230d793d RS |
7649 | |
7650 | const0 &= GET_MODE_MASK (mode); | |
7651 | const1 &= GET_MODE_MASK (mode); | |
7652 | ||
7653 | /* If OP0 is an AND, clear unimportant bits in CONST1. */ | |
7654 | if (op0 == AND) | |
7655 | const1 &= const0; | |
7656 | ||
7657 | /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or | |
7658 | if OP0 is SET. */ | |
7659 | ||
7660 | if (op1 == NIL || op0 == SET) | |
7661 | return 1; | |
7662 | ||
7663 | else if (op0 == NIL) | |
7664 | op0 = op1, const0 = const1; | |
7665 | ||
7666 | else if (op0 == op1) | |
7667 | { | |
7668 | switch (op0) | |
7669 | { | |
7670 | case AND: | |
7671 | const0 &= const1; | |
7672 | break; | |
7673 | case IOR: | |
7674 | const0 |= const1; | |
7675 | break; | |
7676 | case XOR: | |
7677 | const0 ^= const1; | |
7678 | break; | |
7679 | case PLUS: | |
7680 | const0 += const1; | |
7681 | break; | |
7682 | case NEG: | |
7683 | op0 = NIL; | |
7684 | break; | |
7685 | } | |
7686 | } | |
7687 | ||
7688 | /* Otherwise, if either is a PLUS or NEG, we can't do anything. */ | |
7689 | else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG) | |
7690 | return 0; | |
7691 | ||
7692 | /* If the two constants aren't the same, we can't do anything. The | |
7693 | remaining six cases can all be done. */ | |
7694 | else if (const0 != const1) | |
7695 | return 0; | |
7696 | ||
7697 | else | |
7698 | switch (op0) | |
7699 | { | |
7700 | case IOR: | |
7701 | if (op1 == AND) | |
7702 | /* (a & b) | b == b */ | |
7703 | op0 = SET; | |
7704 | else /* op1 == XOR */ | |
7705 | /* (a ^ b) | b == a | b */ | |
7706 | ; | |
7707 | break; | |
7708 | ||
7709 | case XOR: | |
7710 | if (op1 == AND) | |
7711 | /* (a & b) ^ b == (~a) & b */ | |
7712 | op0 = AND, *pcomp_p = 1; | |
7713 | else /* op1 == IOR */ | |
7714 | /* (a | b) ^ b == a & ~b */ | |
7715 | op0 = AND, *pconst0 = ~ const0; | |
7716 | break; | |
7717 | ||
7718 | case AND: | |
7719 | if (op1 == IOR) | |
7720 | /* (a | b) & b == b */ | |
7721 | op0 = SET; | |
7722 | else /* op1 == XOR */ | |
7723 | /* (a ^ b) & b) == (~a) & b */ | |
7724 | *pcomp_p = 1; | |
7725 | break; | |
7726 | } | |
7727 | ||
7728 | /* Check for NO-OP cases. */ | |
7729 | const0 &= GET_MODE_MASK (mode); | |
7730 | if (const0 == 0 | |
7731 | && (op0 == IOR || op0 == XOR || op0 == PLUS)) | |
7732 | op0 = NIL; | |
7733 | else if (const0 == 0 && op0 == AND) | |
7734 | op0 = SET; | |
7735 | else if (const0 == GET_MODE_MASK (mode) && op0 == AND) | |
7736 | op0 = NIL; | |
7737 | ||
9fa6d012 TG |
7738 | /* If this would be an entire word for the target, but is not for |
7739 | the host, then sign-extend on the host so that the number will look | |
7740 | the same way on the host that it would on the target. | |
7741 | ||
7742 | For example, when building a 64 bit alpha hosted 32 bit sparc | |
7743 | targeted compiler, then we want the 32 bit unsigned value -1 to be | |
7744 | represented as a 64 bit value -1, and not as 0x00000000ffffffff. | |
7745 | The later confuses the sparc backend. */ | |
7746 | ||
7747 | if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width | |
7748 | && (const0 & ((HOST_WIDE_INT) 1 << (width - 1)))) | |
7749 | const0 |= ((HOST_WIDE_INT) (-1) << width); | |
7750 | ||
230d793d RS |
7751 | *pop0 = op0; |
7752 | *pconst0 = const0; | |
7753 | ||
7754 | return 1; | |
7755 | } | |
7756 | \f | |
7757 | /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift. | |
7758 | The result of the shift is RESULT_MODE. X, if non-zero, is an expression | |
7759 | that we started with. | |
7760 | ||
7761 | The shift is normally computed in the widest mode we find in VAROP, as | |
7762 | long as it isn't a different number of words than RESULT_MODE. Exceptions | |
7763 | are ASHIFTRT and ROTATE, which are always done in their original mode, */ | |
7764 | ||
7765 | static rtx | |
7766 | simplify_shift_const (x, code, result_mode, varop, count) | |
7767 | rtx x; | |
7768 | enum rtx_code code; | |
7769 | enum machine_mode result_mode; | |
7770 | rtx varop; | |
7771 | int count; | |
7772 | { | |
7773 | enum rtx_code orig_code = code; | |
7774 | int orig_count = count; | |
7775 | enum machine_mode mode = result_mode; | |
7776 | enum machine_mode shift_mode, tmode; | |
7777 | int mode_words | |
7778 | = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD; | |
7779 | /* We form (outer_op (code varop count) (outer_const)). */ | |
7780 | enum rtx_code outer_op = NIL; | |
c4e861e8 | 7781 | HOST_WIDE_INT outer_const = 0; |
230d793d RS |
7782 | rtx const_rtx; |
7783 | int complement_p = 0; | |
7784 | rtx new; | |
7785 | ||
7786 | /* If we were given an invalid count, don't do anything except exactly | |
7787 | what was requested. */ | |
7788 | ||
7789 | if (count < 0 || count > GET_MODE_BITSIZE (mode)) | |
7790 | { | |
7791 | if (x) | |
7792 | return x; | |
7793 | ||
5f4f0e22 | 7794 | return gen_rtx (code, mode, varop, GEN_INT (count)); |
230d793d RS |
7795 | } |
7796 | ||
7797 | /* Unless one of the branches of the `if' in this loop does a `continue', | |
7798 | we will `break' the loop after the `if'. */ | |
7799 | ||
7800 | while (count != 0) | |
7801 | { | |
7802 | /* If we have an operand of (clobber (const_int 0)), just return that | |
7803 | value. */ | |
7804 | if (GET_CODE (varop) == CLOBBER) | |
7805 | return varop; | |
7806 | ||
7807 | /* If we discovered we had to complement VAROP, leave. Making a NOT | |
7808 | here would cause an infinite loop. */ | |
7809 | if (complement_p) | |
7810 | break; | |
7811 | ||
abc95ed3 | 7812 | /* Convert ROTATERT to ROTATE. */ |
230d793d RS |
7813 | if (code == ROTATERT) |
7814 | code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count; | |
7815 | ||
230d793d | 7816 | /* We need to determine what mode we will do the shift in. If the |
f6789c77 RK |
7817 | shift is a right shift or a ROTATE, we must always do it in the mode |
7818 | it was originally done in. Otherwise, we can do it in MODE, the | |
7819 | widest mode encountered. */ | |
7820 | shift_mode | |
7821 | = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE | |
7822 | ? result_mode : mode); | |
230d793d RS |
7823 | |
7824 | /* Handle cases where the count is greater than the size of the mode | |
7825 | minus 1. For ASHIFT, use the size minus one as the count (this can | |
7826 | occur when simplifying (lshiftrt (ashiftrt ..))). For rotates, | |
7827 | take the count modulo the size. For other shifts, the result is | |
7828 | zero. | |
7829 | ||
7830 | Since these shifts are being produced by the compiler by combining | |
7831 | multiple operations, each of which are defined, we know what the | |
7832 | result is supposed to be. */ | |
7833 | ||
7834 | if (count > GET_MODE_BITSIZE (shift_mode) - 1) | |
7835 | { | |
7836 | if (code == ASHIFTRT) | |
7837 | count = GET_MODE_BITSIZE (shift_mode) - 1; | |
7838 | else if (code == ROTATE || code == ROTATERT) | |
7839 | count %= GET_MODE_BITSIZE (shift_mode); | |
7840 | else | |
7841 | { | |
7842 | /* We can't simply return zero because there may be an | |
7843 | outer op. */ | |
7844 | varop = const0_rtx; | |
7845 | count = 0; | |
7846 | break; | |
7847 | } | |
7848 | } | |
7849 | ||
7850 | /* Negative counts are invalid and should not have been made (a | |
7851 | programmer-specified negative count should have been handled | |
7852 | above). */ | |
7853 | else if (count < 0) | |
7854 | abort (); | |
7855 | ||
312def2e RK |
7856 | /* An arithmetic right shift of a quantity known to be -1 or 0 |
7857 | is a no-op. */ | |
7858 | if (code == ASHIFTRT | |
7859 | && (num_sign_bit_copies (varop, shift_mode) | |
7860 | == GET_MODE_BITSIZE (shift_mode))) | |
d0ab8cd3 | 7861 | { |
312def2e RK |
7862 | count = 0; |
7863 | break; | |
7864 | } | |
d0ab8cd3 | 7865 | |
312def2e RK |
7866 | /* If we are doing an arithmetic right shift and discarding all but |
7867 | the sign bit copies, this is equivalent to doing a shift by the | |
7868 | bitsize minus one. Convert it into that shift because it will often | |
7869 | allow other simplifications. */ | |
500c518b | 7870 | |
312def2e RK |
7871 | if (code == ASHIFTRT |
7872 | && (count + num_sign_bit_copies (varop, shift_mode) | |
7873 | >= GET_MODE_BITSIZE (shift_mode))) | |
7874 | count = GET_MODE_BITSIZE (shift_mode) - 1; | |
500c518b | 7875 | |
230d793d RS |
7876 | /* We simplify the tests below and elsewhere by converting |
7877 | ASHIFTRT to LSHIFTRT if we know the sign bit is clear. | |
7878 | `make_compound_operation' will convert it to a ASHIFTRT for | |
7879 | those machines (such as Vax) that don't have a LSHIFTRT. */ | |
5f4f0e22 | 7880 | if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT |
230d793d | 7881 | && code == ASHIFTRT |
951553af | 7882 | && ((nonzero_bits (varop, shift_mode) |
5f4f0e22 CH |
7883 | & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1))) |
7884 | == 0)) | |
230d793d RS |
7885 | code = LSHIFTRT; |
7886 | ||
7887 | switch (GET_CODE (varop)) | |
7888 | { | |
7889 | case SIGN_EXTEND: | |
7890 | case ZERO_EXTEND: | |
7891 | case SIGN_EXTRACT: | |
7892 | case ZERO_EXTRACT: | |
7893 | new = expand_compound_operation (varop); | |
7894 | if (new != varop) | |
7895 | { | |
7896 | varop = new; | |
7897 | continue; | |
7898 | } | |
7899 | break; | |
7900 | ||
7901 | case MEM: | |
7902 | /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH | |
7903 | minus the width of a smaller mode, we can do this with a | |
7904 | SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */ | |
7905 | if ((code == ASHIFTRT || code == LSHIFTRT) | |
7906 | && ! mode_dependent_address_p (XEXP (varop, 0)) | |
7907 | && ! MEM_VOLATILE_P (varop) | |
7908 | && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count, | |
7909 | MODE_INT, 1)) != BLKmode) | |
7910 | { | |
f76b9db2 ILT |
7911 | if (BYTES_BIG_ENDIAN) |
7912 | new = gen_rtx (MEM, tmode, XEXP (varop, 0)); | |
7913 | else | |
e24b00c8 ILT |
7914 | new = gen_rtx (MEM, tmode, |
7915 | plus_constant (XEXP (varop, 0), | |
7916 | count / BITS_PER_UNIT)); | |
7917 | RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop); | |
7918 | MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop); | |
7919 | MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop); | |
230d793d RS |
7920 | varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND |
7921 | : ZERO_EXTEND, mode, new); | |
7922 | count = 0; | |
7923 | continue; | |
7924 | } | |
7925 | break; | |
7926 | ||
7927 | case USE: | |
7928 | /* Similar to the case above, except that we can only do this if | |
7929 | the resulting mode is the same as that of the underlying | |
7930 | MEM and adjust the address depending on the *bits* endianness | |
7931 | because of the way that bit-field extract insns are defined. */ | |
7932 | if ((code == ASHIFTRT || code == LSHIFTRT) | |
7933 | && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count, | |
7934 | MODE_INT, 1)) != BLKmode | |
7935 | && tmode == GET_MODE (XEXP (varop, 0))) | |
7936 | { | |
f76b9db2 ILT |
7937 | if (BITS_BIG_ENDIAN) |
7938 | new = XEXP (varop, 0); | |
7939 | else | |
7940 | { | |
7941 | new = copy_rtx (XEXP (varop, 0)); | |
7942 | SUBST (XEXP (new, 0), | |
7943 | plus_constant (XEXP (new, 0), | |
7944 | count / BITS_PER_UNIT)); | |
7945 | } | |
230d793d RS |
7946 | |
7947 | varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND | |
7948 | : ZERO_EXTEND, mode, new); | |
7949 | count = 0; | |
7950 | continue; | |
7951 | } | |
7952 | break; | |
7953 | ||
7954 | case SUBREG: | |
7955 | /* If VAROP is a SUBREG, strip it as long as the inner operand has | |
7956 | the same number of words as what we've seen so far. Then store | |
7957 | the widest mode in MODE. */ | |
f9e67232 RS |
7958 | if (subreg_lowpart_p (varop) |
7959 | && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop))) | |
7960 | > GET_MODE_SIZE (GET_MODE (varop))) | |
230d793d RS |
7961 | && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop))) |
7962 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD) | |
7963 | == mode_words)) | |
7964 | { | |
7965 | varop = SUBREG_REG (varop); | |
7966 | if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode)) | |
7967 | mode = GET_MODE (varop); | |
7968 | continue; | |
7969 | } | |
7970 | break; | |
7971 | ||
7972 | case MULT: | |
7973 | /* Some machines use MULT instead of ASHIFT because MULT | |
7974 | is cheaper. But it is still better on those machines to | |
7975 | merge two shifts into one. */ | |
7976 | if (GET_CODE (XEXP (varop, 1)) == CONST_INT | |
7977 | && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0) | |
7978 | { | |
7979 | varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0), | |
5f4f0e22 | 7980 | GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));; |
230d793d RS |
7981 | continue; |
7982 | } | |
7983 | break; | |
7984 | ||
7985 | case UDIV: | |
7986 | /* Similar, for when divides are cheaper. */ | |
7987 | if (GET_CODE (XEXP (varop, 1)) == CONST_INT | |
7988 | && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0) | |
7989 | { | |
7990 | varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0), | |
5f4f0e22 | 7991 | GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1))))); |
230d793d RS |
7992 | continue; |
7993 | } | |
7994 | break; | |
7995 | ||
7996 | case ASHIFTRT: | |
7997 | /* If we are extracting just the sign bit of an arithmetic right | |
7998 | shift, that shift is not needed. */ | |
7999 | if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1) | |
8000 | { | |
8001 | varop = XEXP (varop, 0); | |
8002 | continue; | |
8003 | } | |
8004 | ||
8005 | /* ... fall through ... */ | |
8006 | ||
8007 | case LSHIFTRT: | |
8008 | case ASHIFT: | |
230d793d RS |
8009 | case ROTATE: |
8010 | /* Here we have two nested shifts. The result is usually the | |
8011 | AND of a new shift with a mask. We compute the result below. */ | |
8012 | if (GET_CODE (XEXP (varop, 1)) == CONST_INT | |
8013 | && INTVAL (XEXP (varop, 1)) >= 0 | |
8014 | && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop)) | |
5f4f0e22 CH |
8015 | && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT |
8016 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) | |
230d793d RS |
8017 | { |
8018 | enum rtx_code first_code = GET_CODE (varop); | |
8019 | int first_count = INTVAL (XEXP (varop, 1)); | |
5f4f0e22 | 8020 | unsigned HOST_WIDE_INT mask; |
230d793d | 8021 | rtx mask_rtx; |
230d793d | 8022 | |
230d793d RS |
8023 | /* We have one common special case. We can't do any merging if |
8024 | the inner code is an ASHIFTRT of a smaller mode. However, if | |
8025 | we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2) | |
8026 | with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2), | |
8027 | we can convert it to | |
8028 | (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1). | |
8029 | This simplifies certain SIGN_EXTEND operations. */ | |
8030 | if (code == ASHIFT && first_code == ASHIFTRT | |
8031 | && (GET_MODE_BITSIZE (result_mode) | |
8032 | - GET_MODE_BITSIZE (GET_MODE (varop))) == count) | |
8033 | { | |
8034 | /* C3 has the low-order C1 bits zero. */ | |
8035 | ||
5f4f0e22 CH |
8036 | mask = (GET_MODE_MASK (mode) |
8037 | & ~ (((HOST_WIDE_INT) 1 << first_count) - 1)); | |
230d793d | 8038 | |
5f4f0e22 | 8039 | varop = simplify_and_const_int (NULL_RTX, result_mode, |
230d793d | 8040 | XEXP (varop, 0), mask); |
5f4f0e22 | 8041 | varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode, |
230d793d RS |
8042 | varop, count); |
8043 | count = first_count; | |
8044 | code = ASHIFTRT; | |
8045 | continue; | |
8046 | } | |
8047 | ||
d0ab8cd3 RK |
8048 | /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more |
8049 | than C1 high-order bits equal to the sign bit, we can convert | |
8050 | this to either an ASHIFT or a ASHIFTRT depending on the | |
8051 | two counts. | |
230d793d RS |
8052 | |
8053 | We cannot do this if VAROP's mode is not SHIFT_MODE. */ | |
8054 | ||
8055 | if (code == ASHIFTRT && first_code == ASHIFT | |
8056 | && GET_MODE (varop) == shift_mode | |
d0ab8cd3 RK |
8057 | && (num_sign_bit_copies (XEXP (varop, 0), shift_mode) |
8058 | > first_count)) | |
230d793d | 8059 | { |
d0ab8cd3 RK |
8060 | count -= first_count; |
8061 | if (count < 0) | |
8062 | count = - count, code = ASHIFT; | |
8063 | varop = XEXP (varop, 0); | |
8064 | continue; | |
230d793d RS |
8065 | } |
8066 | ||
8067 | /* There are some cases we can't do. If CODE is ASHIFTRT, | |
8068 | we can only do this if FIRST_CODE is also ASHIFTRT. | |
8069 | ||
8070 | We can't do the case when CODE is ROTATE and FIRST_CODE is | |
8071 | ASHIFTRT. | |
8072 | ||
8073 | If the mode of this shift is not the mode of the outer shift, | |
bdaae9a0 | 8074 | we can't do this if either shift is a right shift or ROTATE. |
230d793d RS |
8075 | |
8076 | Finally, we can't do any of these if the mode is too wide | |
8077 | unless the codes are the same. | |
8078 | ||
8079 | Handle the case where the shift codes are the same | |
8080 | first. */ | |
8081 | ||
8082 | if (code == first_code) | |
8083 | { | |
8084 | if (GET_MODE (varop) != result_mode | |
bdaae9a0 RK |
8085 | && (code == ASHIFTRT || code == LSHIFTRT |
8086 | || code == ROTATE)) | |
230d793d RS |
8087 | break; |
8088 | ||
8089 | count += first_count; | |
8090 | varop = XEXP (varop, 0); | |
8091 | continue; | |
8092 | } | |
8093 | ||
8094 | if (code == ASHIFTRT | |
8095 | || (code == ROTATE && first_code == ASHIFTRT) | |
5f4f0e22 | 8096 | || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT |
230d793d | 8097 | || (GET_MODE (varop) != result_mode |
bdaae9a0 RK |
8098 | && (first_code == ASHIFTRT || first_code == LSHIFTRT |
8099 | || first_code == ROTATE | |
230d793d RS |
8100 | || code == ROTATE))) |
8101 | break; | |
8102 | ||
8103 | /* To compute the mask to apply after the shift, shift the | |
951553af | 8104 | nonzero bits of the inner shift the same way the |
230d793d RS |
8105 | outer shift will. */ |
8106 | ||
951553af | 8107 | mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop))); |
230d793d RS |
8108 | |
8109 | mask_rtx | |
8110 | = simplify_binary_operation (code, result_mode, mask_rtx, | |
5f4f0e22 | 8111 | GEN_INT (count)); |
230d793d RS |
8112 | |
8113 | /* Give up if we can't compute an outer operation to use. */ | |
8114 | if (mask_rtx == 0 | |
8115 | || GET_CODE (mask_rtx) != CONST_INT | |
8116 | || ! merge_outer_ops (&outer_op, &outer_const, AND, | |
8117 | INTVAL (mask_rtx), | |
8118 | result_mode, &complement_p)) | |
8119 | break; | |
8120 | ||
8121 | /* If the shifts are in the same direction, we add the | |
8122 | counts. Otherwise, we subtract them. */ | |
8123 | if ((code == ASHIFTRT || code == LSHIFTRT) | |
8124 | == (first_code == ASHIFTRT || first_code == LSHIFTRT)) | |
8125 | count += first_count; | |
8126 | else | |
8127 | count -= first_count; | |
8128 | ||
8129 | /* If COUNT is positive, the new shift is usually CODE, | |
8130 | except for the two exceptions below, in which case it is | |
8131 | FIRST_CODE. If the count is negative, FIRST_CODE should | |
8132 | always be used */ | |
8133 | if (count > 0 | |
8134 | && ((first_code == ROTATE && code == ASHIFT) | |
8135 | || (first_code == ASHIFTRT && code == LSHIFTRT))) | |
8136 | code = first_code; | |
8137 | else if (count < 0) | |
8138 | code = first_code, count = - count; | |
8139 | ||
8140 | varop = XEXP (varop, 0); | |
8141 | continue; | |
8142 | } | |
8143 | ||
8144 | /* If we have (A << B << C) for any shift, we can convert this to | |
8145 | (A << C << B). This wins if A is a constant. Only try this if | |
8146 | B is not a constant. */ | |
8147 | ||
8148 | else if (GET_CODE (varop) == code | |
8149 | && GET_CODE (XEXP (varop, 1)) != CONST_INT | |
8150 | && 0 != (new | |
8151 | = simplify_binary_operation (code, mode, | |
8152 | XEXP (varop, 0), | |
5f4f0e22 | 8153 | GEN_INT (count)))) |
230d793d RS |
8154 | { |
8155 | varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1)); | |
8156 | count = 0; | |
8157 | continue; | |
8158 | } | |
8159 | break; | |
8160 | ||
8161 | case NOT: | |
8162 | /* Make this fit the case below. */ | |
8163 | varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0), | |
5f4f0e22 | 8164 | GEN_INT (GET_MODE_MASK (mode))); |
230d793d RS |
8165 | continue; |
8166 | ||
8167 | case IOR: | |
8168 | case AND: | |
8169 | case XOR: | |
8170 | /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C) | |
8171 | with C the size of VAROP - 1 and the shift is logical if | |
8172 | STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1, | |
8173 | we have an (le X 0) operation. If we have an arithmetic shift | |
8174 | and STORE_FLAG_VALUE is 1 or we have a logical shift with | |
8175 | STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */ | |
8176 | ||
8177 | if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS | |
8178 | && XEXP (XEXP (varop, 0), 1) == constm1_rtx | |
8179 | && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1) | |
8180 | && (code == LSHIFTRT || code == ASHIFTRT) | |
8181 | && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1 | |
8182 | && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1))) | |
8183 | { | |
8184 | count = 0; | |
8185 | varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1), | |
8186 | const0_rtx); | |
8187 | ||
8188 | if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT) | |
8189 | varop = gen_rtx_combine (NEG, GET_MODE (varop), varop); | |
8190 | ||
8191 | continue; | |
8192 | } | |
8193 | ||
8194 | /* If we have (shift (logical)), move the logical to the outside | |
8195 | to allow it to possibly combine with another logical and the | |
8196 | shift to combine with another shift. This also canonicalizes to | |
8197 | what a ZERO_EXTRACT looks like. Also, some machines have | |
8198 | (and (shift)) insns. */ | |
8199 | ||
8200 | if (GET_CODE (XEXP (varop, 1)) == CONST_INT | |
8201 | && (new = simplify_binary_operation (code, result_mode, | |
8202 | XEXP (varop, 1), | |
5f4f0e22 | 8203 | GEN_INT (count))) != 0 |
7d171a1e | 8204 | && GET_CODE(new) == CONST_INT |
230d793d RS |
8205 | && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop), |
8206 | INTVAL (new), result_mode, &complement_p)) | |
8207 | { | |
8208 | varop = XEXP (varop, 0); | |
8209 | continue; | |
8210 | } | |
8211 | ||
8212 | /* If we can't do that, try to simplify the shift in each arm of the | |
8213 | logical expression, make a new logical expression, and apply | |
8214 | the inverse distributive law. */ | |
8215 | { | |
00d4ca1c | 8216 | rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode, |
230d793d | 8217 | XEXP (varop, 0), count); |
00d4ca1c | 8218 | rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode, |
230d793d RS |
8219 | XEXP (varop, 1), count); |
8220 | ||
21a64bf1 | 8221 | varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs); |
230d793d RS |
8222 | varop = apply_distributive_law (varop); |
8223 | ||
8224 | count = 0; | |
8225 | } | |
8226 | break; | |
8227 | ||
8228 | case EQ: | |
45620ed4 | 8229 | /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE |
230d793d | 8230 | says that the sign bit can be tested, FOO has mode MODE, C is |
45620ed4 RK |
8231 | GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit |
8232 | that may be nonzero. */ | |
8233 | if (code == LSHIFTRT | |
230d793d RS |
8234 | && XEXP (varop, 1) == const0_rtx |
8235 | && GET_MODE (XEXP (varop, 0)) == result_mode | |
8236 | && count == GET_MODE_BITSIZE (result_mode) - 1 | |
5f4f0e22 | 8237 | && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT |
230d793d | 8238 | && ((STORE_FLAG_VALUE |
5f4f0e22 | 8239 | & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1)))) |
951553af | 8240 | && nonzero_bits (XEXP (varop, 0), result_mode) == 1 |
5f4f0e22 CH |
8241 | && merge_outer_ops (&outer_op, &outer_const, XOR, |
8242 | (HOST_WIDE_INT) 1, result_mode, | |
8243 | &complement_p)) | |
230d793d RS |
8244 | { |
8245 | varop = XEXP (varop, 0); | |
8246 | count = 0; | |
8247 | continue; | |
8248 | } | |
8249 | break; | |
8250 | ||
8251 | case NEG: | |
d0ab8cd3 RK |
8252 | /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less |
8253 | than the number of bits in the mode is equivalent to A. */ | |
8254 | if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1 | |
951553af | 8255 | && nonzero_bits (XEXP (varop, 0), result_mode) == 1) |
230d793d | 8256 | { |
d0ab8cd3 | 8257 | varop = XEXP (varop, 0); |
230d793d RS |
8258 | count = 0; |
8259 | continue; | |
8260 | } | |
8261 | ||
8262 | /* NEG commutes with ASHIFT since it is multiplication. Move the | |
8263 | NEG outside to allow shifts to combine. */ | |
8264 | if (code == ASHIFT | |
5f4f0e22 CH |
8265 | && merge_outer_ops (&outer_op, &outer_const, NEG, |
8266 | (HOST_WIDE_INT) 0, result_mode, | |
8267 | &complement_p)) | |
230d793d RS |
8268 | { |
8269 | varop = XEXP (varop, 0); | |
8270 | continue; | |
8271 | } | |
8272 | break; | |
8273 | ||
8274 | case PLUS: | |
d0ab8cd3 RK |
8275 | /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C |
8276 | is one less than the number of bits in the mode is | |
8277 | equivalent to (xor A 1). */ | |
230d793d RS |
8278 | if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1 |
8279 | && XEXP (varop, 1) == constm1_rtx | |
951553af | 8280 | && nonzero_bits (XEXP (varop, 0), result_mode) == 1 |
5f4f0e22 CH |
8281 | && merge_outer_ops (&outer_op, &outer_const, XOR, |
8282 | (HOST_WIDE_INT) 1, result_mode, | |
8283 | &complement_p)) | |
230d793d RS |
8284 | { |
8285 | count = 0; | |
8286 | varop = XEXP (varop, 0); | |
8287 | continue; | |
8288 | } | |
8289 | ||
3f508eca | 8290 | /* If we have (xshiftrt (plus FOO BAR) C), and the only bits |
951553af | 8291 | that might be nonzero in BAR are those being shifted out and those |
3f508eca RK |
8292 | bits are known zero in FOO, we can replace the PLUS with FOO. |
8293 | Similarly in the other operand order. This code occurs when | |
8294 | we are computing the size of a variable-size array. */ | |
8295 | ||
8296 | if ((code == ASHIFTRT || code == LSHIFTRT) | |
5f4f0e22 | 8297 | && count < HOST_BITS_PER_WIDE_INT |
951553af RK |
8298 | && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0 |
8299 | && (nonzero_bits (XEXP (varop, 1), result_mode) | |
8300 | & nonzero_bits (XEXP (varop, 0), result_mode)) == 0) | |
3f508eca RK |
8301 | { |
8302 | varop = XEXP (varop, 0); | |
8303 | continue; | |
8304 | } | |
8305 | else if ((code == ASHIFTRT || code == LSHIFTRT) | |
5f4f0e22 | 8306 | && count < HOST_BITS_PER_WIDE_INT |
ac49a949 | 8307 | && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT |
951553af | 8308 | && 0 == (nonzero_bits (XEXP (varop, 0), result_mode) |
3f508eca | 8309 | >> count) |
951553af RK |
8310 | && 0 == (nonzero_bits (XEXP (varop, 0), result_mode) |
8311 | & nonzero_bits (XEXP (varop, 1), | |
3f508eca RK |
8312 | result_mode))) |
8313 | { | |
8314 | varop = XEXP (varop, 1); | |
8315 | continue; | |
8316 | } | |
8317 | ||
230d793d RS |
8318 | /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */ |
8319 | if (code == ASHIFT | |
8320 | && GET_CODE (XEXP (varop, 1)) == CONST_INT | |
8321 | && (new = simplify_binary_operation (ASHIFT, result_mode, | |
8322 | XEXP (varop, 1), | |
5f4f0e22 | 8323 | GEN_INT (count))) != 0 |
7d171a1e | 8324 | && GET_CODE(new) == CONST_INT |
230d793d RS |
8325 | && merge_outer_ops (&outer_op, &outer_const, PLUS, |
8326 | INTVAL (new), result_mode, &complement_p)) | |
8327 | { | |
8328 | varop = XEXP (varop, 0); | |
8329 | continue; | |
8330 | } | |
8331 | break; | |
8332 | ||
8333 | case MINUS: | |
8334 | /* If we have (xshiftrt (minus (ashiftrt X C)) X) C) | |
8335 | with C the size of VAROP - 1 and the shift is logical if | |
8336 | STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1, | |
8337 | we have a (gt X 0) operation. If the shift is arithmetic with | |
8338 | STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1, | |
8339 | we have a (neg (gt X 0)) operation. */ | |
8340 | ||
8341 | if (GET_CODE (XEXP (varop, 0)) == ASHIFTRT | |
8342 | && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1 | |
8343 | && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1) | |
8344 | && (code == LSHIFTRT || code == ASHIFTRT) | |
8345 | && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT | |
8346 | && INTVAL (XEXP (XEXP (varop, 0), 1)) == count | |
8347 | && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1))) | |
8348 | { | |
8349 | count = 0; | |
8350 | varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1), | |
8351 | const0_rtx); | |
8352 | ||
8353 | if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT) | |
8354 | varop = gen_rtx_combine (NEG, GET_MODE (varop), varop); | |
8355 | ||
8356 | continue; | |
8357 | } | |
8358 | break; | |
8359 | } | |
8360 | ||
8361 | break; | |
8362 | } | |
8363 | ||
8364 | /* We need to determine what mode to do the shift in. If the shift is | |
f6789c77 RK |
8365 | a right shift or ROTATE, we must always do it in the mode it was |
8366 | originally done in. Otherwise, we can do it in MODE, the widest mode | |
8367 | encountered. The code we care about is that of the shift that will | |
8368 | actually be done, not the shift that was originally requested. */ | |
8369 | shift_mode | |
8370 | = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE | |
8371 | ? result_mode : mode); | |
230d793d RS |
8372 | |
8373 | /* We have now finished analyzing the shift. The result should be | |
8374 | a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If | |
8375 | OUTER_OP is non-NIL, it is an operation that needs to be applied | |
8376 | to the result of the shift. OUTER_CONST is the relevant constant, | |
8377 | but we must turn off all bits turned off in the shift. | |
8378 | ||
8379 | If we were passed a value for X, see if we can use any pieces of | |
8380 | it. If not, make new rtx. */ | |
8381 | ||
8382 | if (x && GET_RTX_CLASS (GET_CODE (x)) == '2' | |
8383 | && GET_CODE (XEXP (x, 1)) == CONST_INT | |
8384 | && INTVAL (XEXP (x, 1)) == count) | |
8385 | const_rtx = XEXP (x, 1); | |
8386 | else | |
5f4f0e22 | 8387 | const_rtx = GEN_INT (count); |
230d793d RS |
8388 | |
8389 | if (x && GET_CODE (XEXP (x, 0)) == SUBREG | |
8390 | && GET_MODE (XEXP (x, 0)) == shift_mode | |
8391 | && SUBREG_REG (XEXP (x, 0)) == varop) | |
8392 | varop = XEXP (x, 0); | |
8393 | else if (GET_MODE (varop) != shift_mode) | |
8394 | varop = gen_lowpart_for_combine (shift_mode, varop); | |
8395 | ||
8396 | /* If we can't make the SUBREG, try to return what we were given. */ | |
8397 | if (GET_CODE (varop) == CLOBBER) | |
8398 | return x ? x : varop; | |
8399 | ||
8400 | new = simplify_binary_operation (code, shift_mode, varop, const_rtx); | |
8401 | if (new != 0) | |
8402 | x = new; | |
8403 | else | |
8404 | { | |
8405 | if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode) | |
8406 | x = gen_rtx_combine (code, shift_mode, varop, const_rtx); | |
8407 | ||
8408 | SUBST (XEXP (x, 0), varop); | |
8409 | SUBST (XEXP (x, 1), const_rtx); | |
8410 | } | |
8411 | ||
224eeff2 RK |
8412 | /* If we have an outer operation and we just made a shift, it is |
8413 | possible that we could have simplified the shift were it not | |
8414 | for the outer operation. So try to do the simplification | |
8415 | recursively. */ | |
8416 | ||
8417 | if (outer_op != NIL && GET_CODE (x) == code | |
8418 | && GET_CODE (XEXP (x, 1)) == CONST_INT) | |
8419 | x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0), | |
8420 | INTVAL (XEXP (x, 1))); | |
8421 | ||
230d793d RS |
8422 | /* If we were doing a LSHIFTRT in a wider mode than it was originally, |
8423 | turn off all the bits that the shift would have turned off. */ | |
8424 | if (orig_code == LSHIFTRT && result_mode != shift_mode) | |
5f4f0e22 | 8425 | x = simplify_and_const_int (NULL_RTX, shift_mode, x, |
230d793d RS |
8426 | GET_MODE_MASK (result_mode) >> orig_count); |
8427 | ||
8428 | /* Do the remainder of the processing in RESULT_MODE. */ | |
8429 | x = gen_lowpart_for_combine (result_mode, x); | |
8430 | ||
8431 | /* If COMPLEMENT_P is set, we have to complement X before doing the outer | |
8432 | operation. */ | |
8433 | if (complement_p) | |
0c1c8ea6 | 8434 | x = gen_unary (NOT, result_mode, result_mode, x); |
230d793d RS |
8435 | |
8436 | if (outer_op != NIL) | |
8437 | { | |
5f4f0e22 | 8438 | if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT) |
9fa6d012 TG |
8439 | { |
8440 | int width = GET_MODE_BITSIZE (result_mode); | |
8441 | ||
8442 | outer_const &= GET_MODE_MASK (result_mode); | |
8443 | ||
8444 | /* If this would be an entire word for the target, but is not for | |
8445 | the host, then sign-extend on the host so that the number will | |
8446 | look the same way on the host that it would on the target. | |
8447 | ||
8448 | For example, when building a 64 bit alpha hosted 32 bit sparc | |
8449 | targeted compiler, then we want the 32 bit unsigned value -1 to be | |
8450 | represented as a 64 bit value -1, and not as 0x00000000ffffffff. | |
8451 | The later confuses the sparc backend. */ | |
8452 | ||
8453 | if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width | |
8454 | && (outer_const & ((HOST_WIDE_INT) 1 << (width - 1)))) | |
8455 | outer_const |= ((HOST_WIDE_INT) (-1) << width); | |
8456 | } | |
230d793d RS |
8457 | |
8458 | if (outer_op == AND) | |
5f4f0e22 | 8459 | x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const); |
230d793d RS |
8460 | else if (outer_op == SET) |
8461 | /* This means that we have determined that the result is | |
8462 | equivalent to a constant. This should be rare. */ | |
5f4f0e22 | 8463 | x = GEN_INT (outer_const); |
230d793d | 8464 | else if (GET_RTX_CLASS (outer_op) == '1') |
0c1c8ea6 | 8465 | x = gen_unary (outer_op, result_mode, result_mode, x); |
230d793d | 8466 | else |
5f4f0e22 | 8467 | x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const)); |
230d793d RS |
8468 | } |
8469 | ||
8470 | return x; | |
8471 | } | |
8472 | \f | |
8473 | /* Like recog, but we receive the address of a pointer to a new pattern. | |
8474 | We try to match the rtx that the pointer points to. | |
8475 | If that fails, we may try to modify or replace the pattern, | |
8476 | storing the replacement into the same pointer object. | |
8477 | ||
8478 | Modifications include deletion or addition of CLOBBERs. | |
8479 | ||
8480 | PNOTES is a pointer to a location where any REG_UNUSED notes added for | |
8481 | the CLOBBERs are placed. | |
8482 | ||
a29ca9db RK |
8483 | PADDED_SCRATCHES is set to the number of (clobber (scratch)) patterns |
8484 | we had to add. | |
8485 | ||
230d793d RS |
8486 | The value is the final insn code from the pattern ultimately matched, |
8487 | or -1. */ | |
8488 | ||
8489 | static int | |
a29ca9db | 8490 | recog_for_combine (pnewpat, insn, pnotes, padded_scratches) |
230d793d RS |
8491 | rtx *pnewpat; |
8492 | rtx insn; | |
8493 | rtx *pnotes; | |
a29ca9db | 8494 | int *padded_scratches; |
230d793d RS |
8495 | { |
8496 | register rtx pat = *pnewpat; | |
8497 | int insn_code_number; | |
8498 | int num_clobbers_to_add = 0; | |
8499 | int i; | |
8500 | rtx notes = 0; | |
8501 | ||
a29ca9db RK |
8502 | *padded_scratches = 0; |
8503 | ||
974f4146 RK |
8504 | /* If PAT is a PARALLEL, check to see if it contains the CLOBBER |
8505 | we use to indicate that something didn't match. If we find such a | |
8506 | thing, force rejection. */ | |
d96023cf | 8507 | if (GET_CODE (pat) == PARALLEL) |
974f4146 | 8508 | for (i = XVECLEN (pat, 0) - 1; i >= 0; i--) |
d96023cf RK |
8509 | if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER |
8510 | && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx) | |
974f4146 RK |
8511 | return -1; |
8512 | ||
230d793d RS |
8513 | /* Is the result of combination a valid instruction? */ |
8514 | insn_code_number = recog (pat, insn, &num_clobbers_to_add); | |
8515 | ||
8516 | /* If it isn't, there is the possibility that we previously had an insn | |
8517 | that clobbered some register as a side effect, but the combined | |
8518 | insn doesn't need to do that. So try once more without the clobbers | |
8519 | unless this represents an ASM insn. */ | |
8520 | ||
8521 | if (insn_code_number < 0 && ! check_asm_operands (pat) | |
8522 | && GET_CODE (pat) == PARALLEL) | |
8523 | { | |
8524 | int pos; | |
8525 | ||
8526 | for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++) | |
8527 | if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER) | |
8528 | { | |
8529 | if (i != pos) | |
8530 | SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i)); | |
8531 | pos++; | |
8532 | } | |
8533 | ||
8534 | SUBST_INT (XVECLEN (pat, 0), pos); | |
8535 | ||
8536 | if (pos == 1) | |
8537 | pat = XVECEXP (pat, 0, 0); | |
8538 | ||
8539 | insn_code_number = recog (pat, insn, &num_clobbers_to_add); | |
8540 | } | |
8541 | ||
8542 | /* If we had any clobbers to add, make a new pattern than contains | |
8543 | them. Then check to make sure that all of them are dead. */ | |
8544 | if (num_clobbers_to_add) | |
8545 | { | |
8546 | rtx newpat = gen_rtx (PARALLEL, VOIDmode, | |
8547 | gen_rtvec (GET_CODE (pat) == PARALLEL | |
8548 | ? XVECLEN (pat, 0) + num_clobbers_to_add | |
8549 | : num_clobbers_to_add + 1)); | |
8550 | ||
8551 | if (GET_CODE (pat) == PARALLEL) | |
8552 | for (i = 0; i < XVECLEN (pat, 0); i++) | |
8553 | XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i); | |
8554 | else | |
8555 | XVECEXP (newpat, 0, 0) = pat; | |
8556 | ||
8557 | add_clobbers (newpat, insn_code_number); | |
8558 | ||
8559 | for (i = XVECLEN (newpat, 0) - num_clobbers_to_add; | |
8560 | i < XVECLEN (newpat, 0); i++) | |
8561 | { | |
8562 | if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG | |
8563 | && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn)) | |
8564 | return -1; | |
a29ca9db RK |
8565 | else if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == SCRATCH) |
8566 | (*padded_scratches)++; | |
230d793d RS |
8567 | notes = gen_rtx (EXPR_LIST, REG_UNUSED, |
8568 | XEXP (XVECEXP (newpat, 0, i), 0), notes); | |
8569 | } | |
8570 | pat = newpat; | |
8571 | } | |
8572 | ||
8573 | *pnewpat = pat; | |
8574 | *pnotes = notes; | |
8575 | ||
8576 | return insn_code_number; | |
8577 | } | |
8578 | \f | |
8579 | /* Like gen_lowpart but for use by combine. In combine it is not possible | |
8580 | to create any new pseudoregs. However, it is safe to create | |
8581 | invalid memory addresses, because combine will try to recognize | |
8582 | them and all they will do is make the combine attempt fail. | |
8583 | ||
8584 | If for some reason this cannot do its job, an rtx | |
8585 | (clobber (const_int 0)) is returned. | |
8586 | An insn containing that will not be recognized. */ | |
8587 | ||
8588 | #undef gen_lowpart | |
8589 | ||
8590 | static rtx | |
8591 | gen_lowpart_for_combine (mode, x) | |
8592 | enum machine_mode mode; | |
8593 | register rtx x; | |
8594 | { | |
8595 | rtx result; | |
8596 | ||
8597 | if (GET_MODE (x) == mode) | |
8598 | return x; | |
8599 | ||
eae957a8 RK |
8600 | /* We can only support MODE being wider than a word if X is a |
8601 | constant integer or has a mode the same size. */ | |
8602 | ||
8603 | if (GET_MODE_SIZE (mode) > UNITS_PER_WORD | |
8604 | && ! ((GET_MODE (x) == VOIDmode | |
8605 | && (GET_CODE (x) == CONST_INT | |
8606 | || GET_CODE (x) == CONST_DOUBLE)) | |
8607 | || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode))) | |
230d793d RS |
8608 | return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx); |
8609 | ||
8610 | /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart | |
8611 | won't know what to do. So we will strip off the SUBREG here and | |
8612 | process normally. */ | |
8613 | if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM) | |
8614 | { | |
8615 | x = SUBREG_REG (x); | |
8616 | if (GET_MODE (x) == mode) | |
8617 | return x; | |
8618 | } | |
8619 | ||
8620 | result = gen_lowpart_common (mode, x); | |
64bf47a2 RK |
8621 | if (result != 0 |
8622 | && GET_CODE (result) == SUBREG | |
8623 | && GET_CODE (SUBREG_REG (result)) == REG | |
8624 | && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER | |
8625 | && (GET_MODE_SIZE (GET_MODE (result)) | |
8626 | != GET_MODE_SIZE (GET_MODE (SUBREG_REG (result))))) | |
8627 | reg_changes_size[REGNO (SUBREG_REG (result))] = 1; | |
8628 | ||
230d793d RS |
8629 | if (result) |
8630 | return result; | |
8631 | ||
8632 | if (GET_CODE (x) == MEM) | |
8633 | { | |
8634 | register int offset = 0; | |
8635 | rtx new; | |
8636 | ||
8637 | /* Refuse to work on a volatile memory ref or one with a mode-dependent | |
8638 | address. */ | |
8639 | if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0))) | |
8640 | return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx); | |
8641 | ||
8642 | /* If we want to refer to something bigger than the original memref, | |
8643 | generate a perverse subreg instead. That will force a reload | |
8644 | of the original memref X. */ | |
8645 | if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)) | |
8646 | return gen_rtx (SUBREG, mode, x, 0); | |
8647 | ||
f76b9db2 ILT |
8648 | if (WORDS_BIG_ENDIAN) |
8649 | offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD) | |
8650 | - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD)); | |
8651 | if (BYTES_BIG_ENDIAN) | |
8652 | { | |
8653 | /* Adjust the address so that the address-after-the-data is | |
8654 | unchanged. */ | |
8655 | offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)) | |
8656 | - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x)))); | |
8657 | } | |
230d793d RS |
8658 | new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset)); |
8659 | RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x); | |
8660 | MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x); | |
8661 | MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x); | |
8662 | return new; | |
8663 | } | |
8664 | ||
8665 | /* If X is a comparison operator, rewrite it in a new mode. This | |
8666 | probably won't match, but may allow further simplifications. */ | |
8667 | else if (GET_RTX_CLASS (GET_CODE (x)) == '<') | |
8668 | return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1)); | |
8669 | ||
8670 | /* If we couldn't simplify X any other way, just enclose it in a | |
8671 | SUBREG. Normally, this SUBREG won't match, but some patterns may | |
a7c99304 | 8672 | include an explicit SUBREG or we may simplify it further in combine. */ |
230d793d | 8673 | else |
dfbe1b2f RK |
8674 | { |
8675 | int word = 0; | |
8676 | ||
8677 | if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD) | |
8678 | word = ((GET_MODE_SIZE (GET_MODE (x)) | |
8679 | - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD)) | |
8680 | / UNITS_PER_WORD); | |
8681 | return gen_rtx (SUBREG, mode, x, word); | |
8682 | } | |
230d793d RS |
8683 | } |
8684 | \f | |
8685 | /* Make an rtx expression. This is a subset of gen_rtx and only supports | |
8686 | expressions of 1, 2, or 3 operands, each of which are rtx expressions. | |
8687 | ||
8688 | If the identical expression was previously in the insn (in the undobuf), | |
8689 | it will be returned. Only if it is not found will a new expression | |
8690 | be made. */ | |
8691 | ||
8692 | /*VARARGS2*/ | |
8693 | static rtx | |
4f90e4a0 | 8694 | gen_rtx_combine VPROTO((enum rtx_code code, enum machine_mode mode, ...)) |
230d793d | 8695 | { |
4f90e4a0 | 8696 | #ifndef __STDC__ |
230d793d RS |
8697 | enum rtx_code code; |
8698 | enum machine_mode mode; | |
4f90e4a0 RK |
8699 | #endif |
8700 | va_list p; | |
230d793d RS |
8701 | int n_args; |
8702 | rtx args[3]; | |
8703 | int i, j; | |
8704 | char *fmt; | |
8705 | rtx rt; | |
8706 | ||
4f90e4a0 RK |
8707 | VA_START (p, mode); |
8708 | ||
8709 | #ifndef __STDC__ | |
230d793d RS |
8710 | code = va_arg (p, enum rtx_code); |
8711 | mode = va_arg (p, enum machine_mode); | |
4f90e4a0 RK |
8712 | #endif |
8713 | ||
230d793d RS |
8714 | n_args = GET_RTX_LENGTH (code); |
8715 | fmt = GET_RTX_FORMAT (code); | |
8716 | ||
8717 | if (n_args == 0 || n_args > 3) | |
8718 | abort (); | |
8719 | ||
8720 | /* Get each arg and verify that it is supposed to be an expression. */ | |
8721 | for (j = 0; j < n_args; j++) | |
8722 | { | |
8723 | if (*fmt++ != 'e') | |
8724 | abort (); | |
8725 | ||
8726 | args[j] = va_arg (p, rtx); | |
8727 | } | |
8728 | ||
8729 | /* See if this is in undobuf. Be sure we don't use objects that came | |
8730 | from another insn; this could produce circular rtl structures. */ | |
8731 | ||
8732 | for (i = previous_num_undos; i < undobuf.num_undo; i++) | |
8733 | if (!undobuf.undo[i].is_int | |
f5393ab9 RS |
8734 | && GET_CODE (undobuf.undo[i].old_contents.r) == code |
8735 | && GET_MODE (undobuf.undo[i].old_contents.r) == mode) | |
230d793d RS |
8736 | { |
8737 | for (j = 0; j < n_args; j++) | |
f5393ab9 | 8738 | if (XEXP (undobuf.undo[i].old_contents.r, j) != args[j]) |
230d793d RS |
8739 | break; |
8740 | ||
8741 | if (j == n_args) | |
f5393ab9 | 8742 | return undobuf.undo[i].old_contents.r; |
230d793d RS |
8743 | } |
8744 | ||
8745 | /* Otherwise make a new rtx. We know we have 1, 2, or 3 args. | |
8746 | Use rtx_alloc instead of gen_rtx because it's faster on RISC. */ | |
8747 | rt = rtx_alloc (code); | |
8748 | PUT_MODE (rt, mode); | |
8749 | XEXP (rt, 0) = args[0]; | |
8750 | if (n_args > 1) | |
8751 | { | |
8752 | XEXP (rt, 1) = args[1]; | |
8753 | if (n_args > 2) | |
8754 | XEXP (rt, 2) = args[2]; | |
8755 | } | |
8756 | return rt; | |
8757 | } | |
8758 | ||
8759 | /* These routines make binary and unary operations by first seeing if they | |
8760 | fold; if not, a new expression is allocated. */ | |
8761 | ||
8762 | static rtx | |
8763 | gen_binary (code, mode, op0, op1) | |
8764 | enum rtx_code code; | |
8765 | enum machine_mode mode; | |
8766 | rtx op0, op1; | |
8767 | { | |
8768 | rtx result; | |
1a26b032 RK |
8769 | rtx tem; |
8770 | ||
8771 | if (GET_RTX_CLASS (code) == 'c' | |
8772 | && (GET_CODE (op0) == CONST_INT | |
8773 | || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT))) | |
8774 | tem = op0, op0 = op1, op1 = tem; | |
230d793d RS |
8775 | |
8776 | if (GET_RTX_CLASS (code) == '<') | |
8777 | { | |
8778 | enum machine_mode op_mode = GET_MODE (op0); | |
9210df58 RK |
8779 | |
8780 | /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get | |
8781 | just (REL_OP X Y). */ | |
8782 | if (GET_CODE (op0) == COMPARE && op1 == const0_rtx) | |
8783 | { | |
8784 | op1 = XEXP (op0, 1); | |
8785 | op0 = XEXP (op0, 0); | |
8786 | op_mode = GET_MODE (op0); | |
8787 | } | |
8788 | ||
230d793d RS |
8789 | if (op_mode == VOIDmode) |
8790 | op_mode = GET_MODE (op1); | |
8791 | result = simplify_relational_operation (code, op_mode, op0, op1); | |
8792 | } | |
8793 | else | |
8794 | result = simplify_binary_operation (code, mode, op0, op1); | |
8795 | ||
8796 | if (result) | |
8797 | return result; | |
8798 | ||
8799 | /* Put complex operands first and constants second. */ | |
8800 | if (GET_RTX_CLASS (code) == 'c' | |
8801 | && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT) | |
8802 | || (GET_RTX_CLASS (GET_CODE (op0)) == 'o' | |
8803 | && GET_RTX_CLASS (GET_CODE (op1)) != 'o') | |
8804 | || (GET_CODE (op0) == SUBREG | |
8805 | && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o' | |
8806 | && GET_RTX_CLASS (GET_CODE (op1)) != 'o'))) | |
8807 | return gen_rtx_combine (code, mode, op1, op0); | |
8808 | ||
8809 | return gen_rtx_combine (code, mode, op0, op1); | |
8810 | } | |
8811 | ||
8812 | static rtx | |
0c1c8ea6 | 8813 | gen_unary (code, mode, op0_mode, op0) |
230d793d | 8814 | enum rtx_code code; |
0c1c8ea6 | 8815 | enum machine_mode mode, op0_mode; |
230d793d RS |
8816 | rtx op0; |
8817 | { | |
0c1c8ea6 | 8818 | rtx result = simplify_unary_operation (code, mode, op0, op0_mode); |
230d793d RS |
8819 | |
8820 | if (result) | |
8821 | return result; | |
8822 | ||
8823 | return gen_rtx_combine (code, mode, op0); | |
8824 | } | |
8825 | \f | |
8826 | /* Simplify a comparison between *POP0 and *POP1 where CODE is the | |
8827 | comparison code that will be tested. | |
8828 | ||
8829 | The result is a possibly different comparison code to use. *POP0 and | |
8830 | *POP1 may be updated. | |
8831 | ||
8832 | It is possible that we might detect that a comparison is either always | |
8833 | true or always false. However, we do not perform general constant | |
5089e22e | 8834 | folding in combine, so this knowledge isn't useful. Such tautologies |
230d793d RS |
8835 | should have been detected earlier. Hence we ignore all such cases. */ |
8836 | ||
8837 | static enum rtx_code | |
8838 | simplify_comparison (code, pop0, pop1) | |
8839 | enum rtx_code code; | |
8840 | rtx *pop0; | |
8841 | rtx *pop1; | |
8842 | { | |
8843 | rtx op0 = *pop0; | |
8844 | rtx op1 = *pop1; | |
8845 | rtx tem, tem1; | |
8846 | int i; | |
8847 | enum machine_mode mode, tmode; | |
8848 | ||
8849 | /* Try a few ways of applying the same transformation to both operands. */ | |
8850 | while (1) | |
8851 | { | |
3a19aabc RK |
8852 | #ifndef WORD_REGISTER_OPERATIONS |
8853 | /* The test below this one won't handle SIGN_EXTENDs on these machines, | |
8854 | so check specially. */ | |
8855 | if (code != GTU && code != GEU && code != LTU && code != LEU | |
8856 | && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT | |
8857 | && GET_CODE (XEXP (op0, 0)) == ASHIFT | |
8858 | && GET_CODE (XEXP (op1, 0)) == ASHIFT | |
8859 | && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG | |
8860 | && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG | |
8861 | && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))) | |
ad25ba17 | 8862 | == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0)))) |
3a19aabc RK |
8863 | && GET_CODE (XEXP (op0, 1)) == CONST_INT |
8864 | && GET_CODE (XEXP (op1, 1)) == CONST_INT | |
8865 | && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT | |
8866 | && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT | |
8867 | && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1)) | |
8868 | && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1)) | |
8869 | && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1)) | |
8870 | && (INTVAL (XEXP (op0, 1)) | |
8871 | == (GET_MODE_BITSIZE (GET_MODE (op0)) | |
8872 | - (GET_MODE_BITSIZE | |
8873 | (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))))))) | |
8874 | { | |
8875 | op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0)); | |
8876 | op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0)); | |
8877 | } | |
8878 | #endif | |
8879 | ||
230d793d RS |
8880 | /* If both operands are the same constant shift, see if we can ignore the |
8881 | shift. We can if the shift is a rotate or if the bits shifted out of | |
951553af | 8882 | this shift are known to be zero for both inputs and if the type of |
230d793d | 8883 | comparison is compatible with the shift. */ |
67232b23 RK |
8884 | if (GET_CODE (op0) == GET_CODE (op1) |
8885 | && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT | |
8886 | && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ)) | |
45620ed4 | 8887 | || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT) |
67232b23 RK |
8888 | && (code != GT && code != LT && code != GE && code != LE)) |
8889 | || (GET_CODE (op0) == ASHIFTRT | |
8890 | && (code != GTU && code != LTU | |
8891 | && code != GEU && code != GEU))) | |
8892 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
8893 | && INTVAL (XEXP (op0, 1)) >= 0 | |
8894 | && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT | |
8895 | && XEXP (op0, 1) == XEXP (op1, 1)) | |
230d793d RS |
8896 | { |
8897 | enum machine_mode mode = GET_MODE (op0); | |
5f4f0e22 | 8898 | unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode); |
230d793d RS |
8899 | int shift_count = INTVAL (XEXP (op0, 1)); |
8900 | ||
8901 | if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT) | |
8902 | mask &= (mask >> shift_count) << shift_count; | |
45620ed4 | 8903 | else if (GET_CODE (op0) == ASHIFT) |
230d793d RS |
8904 | mask = (mask & (mask << shift_count)) >> shift_count; |
8905 | ||
951553af RK |
8906 | if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0 |
8907 | && (nonzero_bits (XEXP (op1, 0), mode) & ~ mask) == 0) | |
230d793d RS |
8908 | op0 = XEXP (op0, 0), op1 = XEXP (op1, 0); |
8909 | else | |
8910 | break; | |
8911 | } | |
8912 | ||
8913 | /* If both operands are AND's of a paradoxical SUBREG by constant, the | |
8914 | SUBREGs are of the same mode, and, in both cases, the AND would | |
8915 | be redundant if the comparison was done in the narrower mode, | |
8916 | do the comparison in the narrower mode (e.g., we are AND'ing with 1 | |
951553af RK |
8917 | and the operand's possibly nonzero bits are 0xffffff01; in that case |
8918 | if we only care about QImode, we don't need the AND). This case | |
8919 | occurs if the output mode of an scc insn is not SImode and | |
7e4dc511 RK |
8920 | STORE_FLAG_VALUE == 1 (e.g., the 386). |
8921 | ||
8922 | Similarly, check for a case where the AND's are ZERO_EXTEND | |
8923 | operations from some narrower mode even though a SUBREG is not | |
8924 | present. */ | |
230d793d RS |
8925 | |
8926 | else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND | |
8927 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
7e4dc511 | 8928 | && GET_CODE (XEXP (op1, 1)) == CONST_INT) |
230d793d | 8929 | { |
7e4dc511 RK |
8930 | rtx inner_op0 = XEXP (op0, 0); |
8931 | rtx inner_op1 = XEXP (op1, 0); | |
8932 | HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1)); | |
8933 | HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1)); | |
8934 | int changed = 0; | |
8935 | ||
8936 | if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG | |
8937 | && (GET_MODE_SIZE (GET_MODE (inner_op0)) | |
8938 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0)))) | |
8939 | && (GET_MODE (SUBREG_REG (inner_op0)) | |
8940 | == GET_MODE (SUBREG_REG (inner_op1))) | |
8941 | && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) | |
8942 | <= HOST_BITS_PER_WIDE_INT) | |
01c82bbb RK |
8943 | && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0), |
8944 | GET_MODE (SUBREG_REG (op0))))) | |
8945 | && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1), | |
8946 | GET_MODE (SUBREG_REG (inner_op1)))))) | |
7e4dc511 RK |
8947 | { |
8948 | op0 = SUBREG_REG (inner_op0); | |
8949 | op1 = SUBREG_REG (inner_op1); | |
8950 | ||
8951 | /* The resulting comparison is always unsigned since we masked | |
8952 | off the original sign bit. */ | |
8953 | code = unsigned_condition (code); | |
8954 | ||
8955 | changed = 1; | |
8956 | } | |
230d793d | 8957 | |
7e4dc511 RK |
8958 | else if (c0 == c1) |
8959 | for (tmode = GET_CLASS_NARROWEST_MODE | |
8960 | (GET_MODE_CLASS (GET_MODE (op0))); | |
8961 | tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode)) | |
8962 | if (c0 == GET_MODE_MASK (tmode)) | |
8963 | { | |
8964 | op0 = gen_lowpart_for_combine (tmode, inner_op0); | |
8965 | op1 = gen_lowpart_for_combine (tmode, inner_op1); | |
66415c8b | 8966 | code = unsigned_condition (code); |
7e4dc511 RK |
8967 | changed = 1; |
8968 | break; | |
8969 | } | |
8970 | ||
8971 | if (! changed) | |
8972 | break; | |
230d793d | 8973 | } |
3a19aabc | 8974 | |
ad25ba17 RK |
8975 | /* If both operands are NOT, we can strip off the outer operation |
8976 | and adjust the comparison code for swapped operands; similarly for | |
8977 | NEG, except that this must be an equality comparison. */ | |
8978 | else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT) | |
8979 | || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG | |
8980 | && (code == EQ || code == NE))) | |
8981 | op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code); | |
3a19aabc | 8982 | |
230d793d RS |
8983 | else |
8984 | break; | |
8985 | } | |
8986 | ||
8987 | /* If the first operand is a constant, swap the operands and adjust the | |
8988 | comparison code appropriately. */ | |
8989 | if (CONSTANT_P (op0)) | |
8990 | { | |
8991 | tem = op0, op0 = op1, op1 = tem; | |
8992 | code = swap_condition (code); | |
8993 | } | |
8994 | ||
8995 | /* We now enter a loop during which we will try to simplify the comparison. | |
8996 | For the most part, we only are concerned with comparisons with zero, | |
8997 | but some things may really be comparisons with zero but not start | |
8998 | out looking that way. */ | |
8999 | ||
9000 | while (GET_CODE (op1) == CONST_INT) | |
9001 | { | |
9002 | enum machine_mode mode = GET_MODE (op0); | |
9003 | int mode_width = GET_MODE_BITSIZE (mode); | |
5f4f0e22 | 9004 | unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode); |
230d793d RS |
9005 | int equality_comparison_p; |
9006 | int sign_bit_comparison_p; | |
9007 | int unsigned_comparison_p; | |
5f4f0e22 | 9008 | HOST_WIDE_INT const_op; |
230d793d RS |
9009 | |
9010 | /* We only want to handle integral modes. This catches VOIDmode, | |
9011 | CCmode, and the floating-point modes. An exception is that we | |
9012 | can handle VOIDmode if OP0 is a COMPARE or a comparison | |
9013 | operation. */ | |
9014 | ||
9015 | if (GET_MODE_CLASS (mode) != MODE_INT | |
9016 | && ! (mode == VOIDmode | |
9017 | && (GET_CODE (op0) == COMPARE | |
9018 | || GET_RTX_CLASS (GET_CODE (op0)) == '<'))) | |
9019 | break; | |
9020 | ||
9021 | /* Get the constant we are comparing against and turn off all bits | |
9022 | not on in our mode. */ | |
9023 | const_op = INTVAL (op1); | |
5f4f0e22 | 9024 | if (mode_width <= HOST_BITS_PER_WIDE_INT) |
4803a34a | 9025 | const_op &= mask; |
230d793d RS |
9026 | |
9027 | /* If we are comparing against a constant power of two and the value | |
951553af | 9028 | being compared can only have that single bit nonzero (e.g., it was |
230d793d RS |
9029 | `and'ed with that bit), we can replace this with a comparison |
9030 | with zero. */ | |
9031 | if (const_op | |
9032 | && (code == EQ || code == NE || code == GE || code == GEU | |
9033 | || code == LT || code == LTU) | |
5f4f0e22 | 9034 | && mode_width <= HOST_BITS_PER_WIDE_INT |
230d793d | 9035 | && exact_log2 (const_op) >= 0 |
951553af | 9036 | && nonzero_bits (op0, mode) == const_op) |
230d793d RS |
9037 | { |
9038 | code = (code == EQ || code == GE || code == GEU ? NE : EQ); | |
9039 | op1 = const0_rtx, const_op = 0; | |
9040 | } | |
9041 | ||
d0ab8cd3 RK |
9042 | /* Similarly, if we are comparing a value known to be either -1 or |
9043 | 0 with -1, change it to the opposite comparison against zero. */ | |
9044 | ||
9045 | if (const_op == -1 | |
9046 | && (code == EQ || code == NE || code == GT || code == LE | |
9047 | || code == GEU || code == LTU) | |
9048 | && num_sign_bit_copies (op0, mode) == mode_width) | |
9049 | { | |
9050 | code = (code == EQ || code == LE || code == GEU ? NE : EQ); | |
9051 | op1 = const0_rtx, const_op = 0; | |
9052 | } | |
9053 | ||
230d793d | 9054 | /* Do some canonicalizations based on the comparison code. We prefer |
4803a34a RK |
9055 | comparisons against zero and then prefer equality comparisons. |
9056 | If we can reduce the size of a constant, we will do that too. */ | |
230d793d RS |
9057 | |
9058 | switch (code) | |
9059 | { | |
9060 | case LT: | |
4803a34a RK |
9061 | /* < C is equivalent to <= (C - 1) */ |
9062 | if (const_op > 0) | |
230d793d | 9063 | { |
4803a34a | 9064 | const_op -= 1; |
5f4f0e22 | 9065 | op1 = GEN_INT (const_op); |
230d793d RS |
9066 | code = LE; |
9067 | /* ... fall through to LE case below. */ | |
9068 | } | |
9069 | else | |
9070 | break; | |
9071 | ||
9072 | case LE: | |
4803a34a RK |
9073 | /* <= C is equivalent to < (C + 1); we do this for C < 0 */ |
9074 | if (const_op < 0) | |
9075 | { | |
9076 | const_op += 1; | |
5f4f0e22 | 9077 | op1 = GEN_INT (const_op); |
4803a34a RK |
9078 | code = LT; |
9079 | } | |
230d793d RS |
9080 | |
9081 | /* If we are doing a <= 0 comparison on a value known to have | |
9082 | a zero sign bit, we can replace this with == 0. */ | |
9083 | else if (const_op == 0 | |
5f4f0e22 | 9084 | && mode_width <= HOST_BITS_PER_WIDE_INT |
951553af | 9085 | && (nonzero_bits (op0, mode) |
5f4f0e22 | 9086 | & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0) |
230d793d RS |
9087 | code = EQ; |
9088 | break; | |
9089 | ||
9090 | case GE: | |
4803a34a RK |
9091 | /* >= C is equivalent to > (C - 1). */ |
9092 | if (const_op > 0) | |
230d793d | 9093 | { |
4803a34a | 9094 | const_op -= 1; |
5f4f0e22 | 9095 | op1 = GEN_INT (const_op); |
230d793d RS |
9096 | code = GT; |
9097 | /* ... fall through to GT below. */ | |
9098 | } | |
9099 | else | |
9100 | break; | |
9101 | ||
9102 | case GT: | |
4803a34a RK |
9103 | /* > C is equivalent to >= (C + 1); we do this for C < 0*/ |
9104 | if (const_op < 0) | |
9105 | { | |
9106 | const_op += 1; | |
5f4f0e22 | 9107 | op1 = GEN_INT (const_op); |
4803a34a RK |
9108 | code = GE; |
9109 | } | |
230d793d RS |
9110 | |
9111 | /* If we are doing a > 0 comparison on a value known to have | |
9112 | a zero sign bit, we can replace this with != 0. */ | |
9113 | else if (const_op == 0 | |
5f4f0e22 | 9114 | && mode_width <= HOST_BITS_PER_WIDE_INT |
951553af | 9115 | && (nonzero_bits (op0, mode) |
5f4f0e22 | 9116 | & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0) |
230d793d RS |
9117 | code = NE; |
9118 | break; | |
9119 | ||
230d793d | 9120 | case LTU: |
4803a34a RK |
9121 | /* < C is equivalent to <= (C - 1). */ |
9122 | if (const_op > 0) | |
9123 | { | |
9124 | const_op -= 1; | |
5f4f0e22 | 9125 | op1 = GEN_INT (const_op); |
4803a34a RK |
9126 | code = LEU; |
9127 | /* ... fall through ... */ | |
9128 | } | |
d0ab8cd3 RK |
9129 | |
9130 | /* (unsigned) < 0x80000000 is equivalent to >= 0. */ | |
9131 | else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)) | |
9132 | { | |
9133 | const_op = 0, op1 = const0_rtx; | |
9134 | code = GE; | |
9135 | break; | |
9136 | } | |
4803a34a RK |
9137 | else |
9138 | break; | |
230d793d RS |
9139 | |
9140 | case LEU: | |
9141 | /* unsigned <= 0 is equivalent to == 0 */ | |
9142 | if (const_op == 0) | |
9143 | code = EQ; | |
d0ab8cd3 RK |
9144 | |
9145 | /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */ | |
9146 | else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1) | |
9147 | { | |
9148 | const_op = 0, op1 = const0_rtx; | |
9149 | code = GE; | |
9150 | } | |
230d793d RS |
9151 | break; |
9152 | ||
4803a34a RK |
9153 | case GEU: |
9154 | /* >= C is equivalent to < (C - 1). */ | |
9155 | if (const_op > 1) | |
9156 | { | |
9157 | const_op -= 1; | |
5f4f0e22 | 9158 | op1 = GEN_INT (const_op); |
4803a34a RK |
9159 | code = GTU; |
9160 | /* ... fall through ... */ | |
9161 | } | |
d0ab8cd3 RK |
9162 | |
9163 | /* (unsigned) >= 0x80000000 is equivalent to < 0. */ | |
9164 | else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)) | |
9165 | { | |
9166 | const_op = 0, op1 = const0_rtx; | |
9167 | code = LT; | |
8b2e69e1 | 9168 | break; |
d0ab8cd3 | 9169 | } |
4803a34a RK |
9170 | else |
9171 | break; | |
9172 | ||
230d793d RS |
9173 | case GTU: |
9174 | /* unsigned > 0 is equivalent to != 0 */ | |
9175 | if (const_op == 0) | |
9176 | code = NE; | |
d0ab8cd3 RK |
9177 | |
9178 | /* (unsigned) > 0x7fffffff is equivalent to < 0. */ | |
9179 | else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1) | |
9180 | { | |
9181 | const_op = 0, op1 = const0_rtx; | |
9182 | code = LT; | |
9183 | } | |
230d793d RS |
9184 | break; |
9185 | } | |
9186 | ||
9187 | /* Compute some predicates to simplify code below. */ | |
9188 | ||
9189 | equality_comparison_p = (code == EQ || code == NE); | |
9190 | sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0); | |
9191 | unsigned_comparison_p = (code == LTU || code == LEU || code == GTU | |
9192 | || code == LEU); | |
9193 | ||
6139ff20 RK |
9194 | /* If this is a sign bit comparison and we can do arithmetic in |
9195 | MODE, say that we will only be needing the sign bit of OP0. */ | |
9196 | if (sign_bit_comparison_p | |
9197 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) | |
9198 | op0 = force_to_mode (op0, mode, | |
9199 | ((HOST_WIDE_INT) 1 | |
9200 | << (GET_MODE_BITSIZE (mode) - 1)), | |
e3d616e3 | 9201 | NULL_RTX, 0); |
6139ff20 | 9202 | |
230d793d RS |
9203 | /* Now try cases based on the opcode of OP0. If none of the cases |
9204 | does a "continue", we exit this loop immediately after the | |
9205 | switch. */ | |
9206 | ||
9207 | switch (GET_CODE (op0)) | |
9208 | { | |
9209 | case ZERO_EXTRACT: | |
9210 | /* If we are extracting a single bit from a variable position in | |
9211 | a constant that has only a single bit set and are comparing it | |
9212 | with zero, we can convert this into an equality comparison | |
d7cd794f | 9213 | between the position and the location of the single bit. */ |
230d793d | 9214 | |
230d793d RS |
9215 | if (GET_CODE (XEXP (op0, 0)) == CONST_INT |
9216 | && XEXP (op0, 1) == const1_rtx | |
9217 | && equality_comparison_p && const_op == 0 | |
d7cd794f | 9218 | && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0) |
230d793d | 9219 | { |
f76b9db2 | 9220 | if (BITS_BIG_ENDIAN) |
d7cd794f | 9221 | #ifdef HAVE_extzv |
f76b9db2 ILT |
9222 | i = (GET_MODE_BITSIZE |
9223 | (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i); | |
d7cd794f RK |
9224 | #else |
9225 | i = BITS_PER_WORD - 1 - i; | |
230d793d RS |
9226 | #endif |
9227 | ||
9228 | op0 = XEXP (op0, 2); | |
5f4f0e22 | 9229 | op1 = GEN_INT (i); |
230d793d RS |
9230 | const_op = i; |
9231 | ||
9232 | /* Result is nonzero iff shift count is equal to I. */ | |
9233 | code = reverse_condition (code); | |
9234 | continue; | |
9235 | } | |
230d793d RS |
9236 | |
9237 | /* ... fall through ... */ | |
9238 | ||
9239 | case SIGN_EXTRACT: | |
9240 | tem = expand_compound_operation (op0); | |
9241 | if (tem != op0) | |
9242 | { | |
9243 | op0 = tem; | |
9244 | continue; | |
9245 | } | |
9246 | break; | |
9247 | ||
9248 | case NOT: | |
9249 | /* If testing for equality, we can take the NOT of the constant. */ | |
9250 | if (equality_comparison_p | |
9251 | && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0) | |
9252 | { | |
9253 | op0 = XEXP (op0, 0); | |
9254 | op1 = tem; | |
9255 | continue; | |
9256 | } | |
9257 | ||
9258 | /* If just looking at the sign bit, reverse the sense of the | |
9259 | comparison. */ | |
9260 | if (sign_bit_comparison_p) | |
9261 | { | |
9262 | op0 = XEXP (op0, 0); | |
9263 | code = (code == GE ? LT : GE); | |
9264 | continue; | |
9265 | } | |
9266 | break; | |
9267 | ||
9268 | case NEG: | |
9269 | /* If testing for equality, we can take the NEG of the constant. */ | |
9270 | if (equality_comparison_p | |
9271 | && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0) | |
9272 | { | |
9273 | op0 = XEXP (op0, 0); | |
9274 | op1 = tem; | |
9275 | continue; | |
9276 | } | |
9277 | ||
9278 | /* The remaining cases only apply to comparisons with zero. */ | |
9279 | if (const_op != 0) | |
9280 | break; | |
9281 | ||
9282 | /* When X is ABS or is known positive, | |
9283 | (neg X) is < 0 if and only if X != 0. */ | |
9284 | ||
9285 | if (sign_bit_comparison_p | |
9286 | && (GET_CODE (XEXP (op0, 0)) == ABS | |
5f4f0e22 | 9287 | || (mode_width <= HOST_BITS_PER_WIDE_INT |
951553af | 9288 | && (nonzero_bits (XEXP (op0, 0), mode) |
5f4f0e22 | 9289 | & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0))) |
230d793d RS |
9290 | { |
9291 | op0 = XEXP (op0, 0); | |
9292 | code = (code == LT ? NE : EQ); | |
9293 | continue; | |
9294 | } | |
9295 | ||
3bed8141 RK |
9296 | /* If we have NEG of something whose two high-order bits are the |
9297 | same, we know that "(-a) < 0" is equivalent to "a > 0". */ | |
9298 | if (num_sign_bit_copies (op0, mode) >= 2) | |
230d793d RS |
9299 | { |
9300 | op0 = XEXP (op0, 0); | |
9301 | code = swap_condition (code); | |
9302 | continue; | |
9303 | } | |
9304 | break; | |
9305 | ||
9306 | case ROTATE: | |
9307 | /* If we are testing equality and our count is a constant, we | |
9308 | can perform the inverse operation on our RHS. */ | |
9309 | if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
9310 | && (tem = simplify_binary_operation (ROTATERT, mode, | |
9311 | op1, XEXP (op0, 1))) != 0) | |
9312 | { | |
9313 | op0 = XEXP (op0, 0); | |
9314 | op1 = tem; | |
9315 | continue; | |
9316 | } | |
9317 | ||
9318 | /* If we are doing a < 0 or >= 0 comparison, it means we are testing | |
9319 | a particular bit. Convert it to an AND of a constant of that | |
9320 | bit. This will be converted into a ZERO_EXTRACT. */ | |
9321 | if (const_op == 0 && sign_bit_comparison_p | |
9322 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
5f4f0e22 | 9323 | && mode_width <= HOST_BITS_PER_WIDE_INT) |
230d793d | 9324 | { |
5f4f0e22 CH |
9325 | op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0), |
9326 | ((HOST_WIDE_INT) 1 | |
9327 | << (mode_width - 1 | |
9328 | - INTVAL (XEXP (op0, 1))))); | |
230d793d RS |
9329 | code = (code == LT ? NE : EQ); |
9330 | continue; | |
9331 | } | |
9332 | ||
9333 | /* ... fall through ... */ | |
9334 | ||
9335 | case ABS: | |
9336 | /* ABS is ignorable inside an equality comparison with zero. */ | |
9337 | if (const_op == 0 && equality_comparison_p) | |
9338 | { | |
9339 | op0 = XEXP (op0, 0); | |
9340 | continue; | |
9341 | } | |
9342 | break; | |
9343 | ||
9344 | ||
9345 | case SIGN_EXTEND: | |
9346 | /* Can simplify (compare (zero/sign_extend FOO) CONST) | |
9347 | to (compare FOO CONST) if CONST fits in FOO's mode and we | |
9348 | are either testing inequality or have an unsigned comparison | |
9349 | with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */ | |
9350 | if (! unsigned_comparison_p | |
9351 | && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) | |
5f4f0e22 CH |
9352 | <= HOST_BITS_PER_WIDE_INT) |
9353 | && ((unsigned HOST_WIDE_INT) const_op | |
9354 | < (((HOST_WIDE_INT) 1 | |
9355 | << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1))))) | |
230d793d RS |
9356 | { |
9357 | op0 = XEXP (op0, 0); | |
9358 | continue; | |
9359 | } | |
9360 | break; | |
9361 | ||
9362 | case SUBREG: | |
a687e897 | 9363 | /* Check for the case where we are comparing A - C1 with C2, |
abc95ed3 | 9364 | both constants are smaller than 1/2 the maximum positive |
a687e897 RK |
9365 | value in MODE, and the comparison is equality or unsigned. |
9366 | In that case, if A is either zero-extended to MODE or has | |
9367 | sufficient sign bits so that the high-order bit in MODE | |
9368 | is a copy of the sign in the inner mode, we can prove that it is | |
9369 | safe to do the operation in the wider mode. This simplifies | |
9370 | many range checks. */ | |
9371 | ||
9372 | if (mode_width <= HOST_BITS_PER_WIDE_INT | |
9373 | && subreg_lowpart_p (op0) | |
9374 | && GET_CODE (SUBREG_REG (op0)) == PLUS | |
9375 | && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT | |
9376 | && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0 | |
9377 | && (- INTVAL (XEXP (SUBREG_REG (op0), 1)) | |
9378 | < GET_MODE_MASK (mode) / 2) | |
adb7a1cb | 9379 | && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2 |
951553af RK |
9380 | && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0), |
9381 | GET_MODE (SUBREG_REG (op0))) | |
a687e897 RK |
9382 | & ~ GET_MODE_MASK (mode)) |
9383 | || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0), | |
9384 | GET_MODE (SUBREG_REG (op0))) | |
9385 | > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) | |
9386 | - GET_MODE_BITSIZE (mode))))) | |
9387 | { | |
9388 | op0 = SUBREG_REG (op0); | |
9389 | continue; | |
9390 | } | |
9391 | ||
fe0cf571 RK |
9392 | /* If the inner mode is narrower and we are extracting the low part, |
9393 | we can treat the SUBREG as if it were a ZERO_EXTEND. */ | |
9394 | if (subreg_lowpart_p (op0) | |
89f1c7f2 RS |
9395 | && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width) |
9396 | /* Fall through */ ; | |
9397 | else | |
230d793d RS |
9398 | break; |
9399 | ||
9400 | /* ... fall through ... */ | |
9401 | ||
9402 | case ZERO_EXTEND: | |
9403 | if ((unsigned_comparison_p || equality_comparison_p) | |
9404 | && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) | |
5f4f0e22 CH |
9405 | <= HOST_BITS_PER_WIDE_INT) |
9406 | && ((unsigned HOST_WIDE_INT) const_op | |
230d793d RS |
9407 | < GET_MODE_MASK (GET_MODE (XEXP (op0, 0))))) |
9408 | { | |
9409 | op0 = XEXP (op0, 0); | |
9410 | continue; | |
9411 | } | |
9412 | break; | |
9413 | ||
9414 | case PLUS: | |
20fdd649 | 9415 | /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do |
5089e22e | 9416 | this for equality comparisons due to pathological cases involving |
230d793d | 9417 | overflows. */ |
20fdd649 RK |
9418 | if (equality_comparison_p |
9419 | && 0 != (tem = simplify_binary_operation (MINUS, mode, | |
9420 | op1, XEXP (op0, 1)))) | |
230d793d RS |
9421 | { |
9422 | op0 = XEXP (op0, 0); | |
9423 | op1 = tem; | |
9424 | continue; | |
9425 | } | |
9426 | ||
9427 | /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */ | |
9428 | if (const_op == 0 && XEXP (op0, 1) == constm1_rtx | |
9429 | && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p) | |
9430 | { | |
9431 | op0 = XEXP (XEXP (op0, 0), 0); | |
9432 | code = (code == LT ? EQ : NE); | |
9433 | continue; | |
9434 | } | |
9435 | break; | |
9436 | ||
9437 | case MINUS: | |
20fdd649 RK |
9438 | /* (eq (minus A B) C) -> (eq A (plus B C)) or |
9439 | (eq B (minus A C)), whichever simplifies. We can only do | |
9440 | this for equality comparisons due to pathological cases involving | |
9441 | overflows. */ | |
9442 | if (equality_comparison_p | |
9443 | && 0 != (tem = simplify_binary_operation (PLUS, mode, | |
9444 | XEXP (op0, 1), op1))) | |
9445 | { | |
9446 | op0 = XEXP (op0, 0); | |
9447 | op1 = tem; | |
9448 | continue; | |
9449 | } | |
9450 | ||
9451 | if (equality_comparison_p | |
9452 | && 0 != (tem = simplify_binary_operation (MINUS, mode, | |
9453 | XEXP (op0, 0), op1))) | |
9454 | { | |
9455 | op0 = XEXP (op0, 1); | |
9456 | op1 = tem; | |
9457 | continue; | |
9458 | } | |
9459 | ||
230d793d RS |
9460 | /* The sign bit of (minus (ashiftrt X C) X), where C is the number |
9461 | of bits in X minus 1, is one iff X > 0. */ | |
9462 | if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT | |
9463 | && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT | |
9464 | && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1 | |
9465 | && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1))) | |
9466 | { | |
9467 | op0 = XEXP (op0, 1); | |
9468 | code = (code == GE ? LE : GT); | |
9469 | continue; | |
9470 | } | |
9471 | break; | |
9472 | ||
9473 | case XOR: | |
9474 | /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification | |
9475 | if C is zero or B is a constant. */ | |
9476 | if (equality_comparison_p | |
9477 | && 0 != (tem = simplify_binary_operation (XOR, mode, | |
9478 | XEXP (op0, 1), op1))) | |
9479 | { | |
9480 | op0 = XEXP (op0, 0); | |
9481 | op1 = tem; | |
9482 | continue; | |
9483 | } | |
9484 | break; | |
9485 | ||
9486 | case EQ: case NE: | |
9487 | case LT: case LTU: case LE: case LEU: | |
9488 | case GT: case GTU: case GE: case GEU: | |
9489 | /* We can't do anything if OP0 is a condition code value, rather | |
9490 | than an actual data value. */ | |
9491 | if (const_op != 0 | |
9492 | #ifdef HAVE_cc0 | |
9493 | || XEXP (op0, 0) == cc0_rtx | |
9494 | #endif | |
9495 | || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC) | |
9496 | break; | |
9497 | ||
9498 | /* Get the two operands being compared. */ | |
9499 | if (GET_CODE (XEXP (op0, 0)) == COMPARE) | |
9500 | tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1); | |
9501 | else | |
9502 | tem = XEXP (op0, 0), tem1 = XEXP (op0, 1); | |
9503 | ||
9504 | /* Check for the cases where we simply want the result of the | |
9505 | earlier test or the opposite of that result. */ | |
9506 | if (code == NE | |
9507 | || (code == EQ && reversible_comparison_p (op0)) | |
5f4f0e22 | 9508 | || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT |
3f508eca | 9509 | && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT |
230d793d | 9510 | && (STORE_FLAG_VALUE |
5f4f0e22 CH |
9511 | & (((HOST_WIDE_INT) 1 |
9512 | << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1)))) | |
230d793d RS |
9513 | && (code == LT |
9514 | || (code == GE && reversible_comparison_p (op0))))) | |
9515 | { | |
9516 | code = (code == LT || code == NE | |
9517 | ? GET_CODE (op0) : reverse_condition (GET_CODE (op0))); | |
9518 | op0 = tem, op1 = tem1; | |
9519 | continue; | |
9520 | } | |
9521 | break; | |
9522 | ||
9523 | case IOR: | |
9524 | /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero | |
9525 | iff X <= 0. */ | |
9526 | if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS | |
9527 | && XEXP (XEXP (op0, 0), 1) == constm1_rtx | |
9528 | && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1))) | |
9529 | { | |
9530 | op0 = XEXP (op0, 1); | |
9531 | code = (code == GE ? GT : LE); | |
9532 | continue; | |
9533 | } | |
9534 | break; | |
9535 | ||
9536 | case AND: | |
9537 | /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This | |
9538 | will be converted to a ZERO_EXTRACT later. */ | |
9539 | if (const_op == 0 && equality_comparison_p | |
45620ed4 | 9540 | && GET_CODE (XEXP (op0, 0)) == ASHIFT |
230d793d RS |
9541 | && XEXP (XEXP (op0, 0), 0) == const1_rtx) |
9542 | { | |
9543 | op0 = simplify_and_const_int | |
9544 | (op0, mode, gen_rtx_combine (LSHIFTRT, mode, | |
9545 | XEXP (op0, 1), | |
9546 | XEXP (XEXP (op0, 0), 1)), | |
5f4f0e22 | 9547 | (HOST_WIDE_INT) 1); |
230d793d RS |
9548 | continue; |
9549 | } | |
9550 | ||
9551 | /* If we are comparing (and (lshiftrt X C1) C2) for equality with | |
9552 | zero and X is a comparison and C1 and C2 describe only bits set | |
9553 | in STORE_FLAG_VALUE, we can compare with X. */ | |
9554 | if (const_op == 0 && equality_comparison_p | |
5f4f0e22 | 9555 | && mode_width <= HOST_BITS_PER_WIDE_INT |
230d793d RS |
9556 | && GET_CODE (XEXP (op0, 1)) == CONST_INT |
9557 | && GET_CODE (XEXP (op0, 0)) == LSHIFTRT | |
9558 | && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT | |
9559 | && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0 | |
5f4f0e22 | 9560 | && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT) |
230d793d RS |
9561 | { |
9562 | mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode)) | |
9563 | << INTVAL (XEXP (XEXP (op0, 0), 1))); | |
9564 | if ((~ STORE_FLAG_VALUE & mask) == 0 | |
9565 | && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<' | |
9566 | || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0 | |
9567 | && GET_RTX_CLASS (GET_CODE (tem)) == '<'))) | |
9568 | { | |
9569 | op0 = XEXP (XEXP (op0, 0), 0); | |
9570 | continue; | |
9571 | } | |
9572 | } | |
9573 | ||
9574 | /* If we are doing an equality comparison of an AND of a bit equal | |
9575 | to the sign bit, replace this with a LT or GE comparison of | |
9576 | the underlying value. */ | |
9577 | if (equality_comparison_p | |
9578 | && const_op == 0 | |
9579 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
5f4f0e22 | 9580 | && mode_width <= HOST_BITS_PER_WIDE_INT |
230d793d | 9581 | && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode)) |
5f4f0e22 | 9582 | == (HOST_WIDE_INT) 1 << (mode_width - 1))) |
230d793d RS |
9583 | { |
9584 | op0 = XEXP (op0, 0); | |
9585 | code = (code == EQ ? GE : LT); | |
9586 | continue; | |
9587 | } | |
9588 | ||
9589 | /* If this AND operation is really a ZERO_EXTEND from a narrower | |
9590 | mode, the constant fits within that mode, and this is either an | |
9591 | equality or unsigned comparison, try to do this comparison in | |
9592 | the narrower mode. */ | |
9593 | if ((equality_comparison_p || unsigned_comparison_p) | |
9594 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
9595 | && (i = exact_log2 ((INTVAL (XEXP (op0, 1)) | |
9596 | & GET_MODE_MASK (mode)) | |
9597 | + 1)) >= 0 | |
9598 | && const_op >> i == 0 | |
9599 | && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode) | |
9600 | { | |
9601 | op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0)); | |
9602 | continue; | |
9603 | } | |
9604 | break; | |
9605 | ||
9606 | case ASHIFT: | |
45620ed4 | 9607 | /* If we have (compare (ashift FOO N) (const_int C)) and |
230d793d | 9608 | the high order N bits of FOO (N+1 if an inequality comparison) |
951553af | 9609 | are known to be zero, we can do this by comparing FOO with C |
230d793d RS |
9610 | shifted right N bits so long as the low-order N bits of C are |
9611 | zero. */ | |
9612 | if (GET_CODE (XEXP (op0, 1)) == CONST_INT | |
9613 | && INTVAL (XEXP (op0, 1)) >= 0 | |
9614 | && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p) | |
5f4f0e22 CH |
9615 | < HOST_BITS_PER_WIDE_INT) |
9616 | && ((const_op | |
34785d05 | 9617 | & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0) |
5f4f0e22 | 9618 | && mode_width <= HOST_BITS_PER_WIDE_INT |
951553af | 9619 | && (nonzero_bits (XEXP (op0, 0), mode) |
230d793d RS |
9620 | & ~ (mask >> (INTVAL (XEXP (op0, 1)) |
9621 | + ! equality_comparison_p))) == 0) | |
9622 | { | |
9623 | const_op >>= INTVAL (XEXP (op0, 1)); | |
5f4f0e22 | 9624 | op1 = GEN_INT (const_op); |
230d793d RS |
9625 | op0 = XEXP (op0, 0); |
9626 | continue; | |
9627 | } | |
9628 | ||
dfbe1b2f | 9629 | /* If we are doing a sign bit comparison, it means we are testing |
230d793d | 9630 | a particular bit. Convert it to the appropriate AND. */ |
dfbe1b2f | 9631 | if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT |
5f4f0e22 | 9632 | && mode_width <= HOST_BITS_PER_WIDE_INT) |
230d793d | 9633 | { |
5f4f0e22 CH |
9634 | op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0), |
9635 | ((HOST_WIDE_INT) 1 | |
9636 | << (mode_width - 1 | |
9637 | - INTVAL (XEXP (op0, 1))))); | |
230d793d RS |
9638 | code = (code == LT ? NE : EQ); |
9639 | continue; | |
9640 | } | |
dfbe1b2f RK |
9641 | |
9642 | /* If this an equality comparison with zero and we are shifting | |
9643 | the low bit to the sign bit, we can convert this to an AND of the | |
9644 | low-order bit. */ | |
9645 | if (const_op == 0 && equality_comparison_p | |
9646 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
9647 | && INTVAL (XEXP (op0, 1)) == mode_width - 1) | |
9648 | { | |
5f4f0e22 CH |
9649 | op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0), |
9650 | (HOST_WIDE_INT) 1); | |
dfbe1b2f RK |
9651 | continue; |
9652 | } | |
230d793d RS |
9653 | break; |
9654 | ||
9655 | case ASHIFTRT: | |
d0ab8cd3 RK |
9656 | /* If this is an equality comparison with zero, we can do this |
9657 | as a logical shift, which might be much simpler. */ | |
9658 | if (equality_comparison_p && const_op == 0 | |
9659 | && GET_CODE (XEXP (op0, 1)) == CONST_INT) | |
9660 | { | |
9661 | op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode, | |
9662 | XEXP (op0, 0), | |
9663 | INTVAL (XEXP (op0, 1))); | |
9664 | continue; | |
9665 | } | |
9666 | ||
230d793d RS |
9667 | /* If OP0 is a sign extension and CODE is not an unsigned comparison, |
9668 | do the comparison in a narrower mode. */ | |
9669 | if (! unsigned_comparison_p | |
9670 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
9671 | && GET_CODE (XEXP (op0, 0)) == ASHIFT | |
9672 | && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1) | |
9673 | && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)), | |
22331794 | 9674 | MODE_INT, 1)) != BLKmode |
5f4f0e22 CH |
9675 | && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode) |
9676 | || ((unsigned HOST_WIDE_INT) - const_op | |
9677 | <= GET_MODE_MASK (tmode)))) | |
230d793d RS |
9678 | { |
9679 | op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0)); | |
9680 | continue; | |
9681 | } | |
9682 | ||
9683 | /* ... fall through ... */ | |
9684 | case LSHIFTRT: | |
9685 | /* If we have (compare (xshiftrt FOO N) (const_int C)) and | |
951553af | 9686 | the low order N bits of FOO are known to be zero, we can do this |
230d793d RS |
9687 | by comparing FOO with C shifted left N bits so long as no |
9688 | overflow occurs. */ | |
9689 | if (GET_CODE (XEXP (op0, 1)) == CONST_INT | |
9690 | && INTVAL (XEXP (op0, 1)) >= 0 | |
5f4f0e22 CH |
9691 | && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT |
9692 | && mode_width <= HOST_BITS_PER_WIDE_INT | |
951553af | 9693 | && (nonzero_bits (XEXP (op0, 0), mode) |
5f4f0e22 | 9694 | & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0 |
230d793d RS |
9695 | && (const_op == 0 |
9696 | || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1)) | |
9697 | < mode_width))) | |
9698 | { | |
9699 | const_op <<= INTVAL (XEXP (op0, 1)); | |
5f4f0e22 | 9700 | op1 = GEN_INT (const_op); |
230d793d RS |
9701 | op0 = XEXP (op0, 0); |
9702 | continue; | |
9703 | } | |
9704 | ||
9705 | /* If we are using this shift to extract just the sign bit, we | |
9706 | can replace this with an LT or GE comparison. */ | |
9707 | if (const_op == 0 | |
9708 | && (equality_comparison_p || sign_bit_comparison_p) | |
9709 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
9710 | && INTVAL (XEXP (op0, 1)) == mode_width - 1) | |
9711 | { | |
9712 | op0 = XEXP (op0, 0); | |
9713 | code = (code == NE || code == GT ? LT : GE); | |
9714 | continue; | |
9715 | } | |
9716 | break; | |
9717 | } | |
9718 | ||
9719 | break; | |
9720 | } | |
9721 | ||
9722 | /* Now make any compound operations involved in this comparison. Then, | |
9723 | check for an outmost SUBREG on OP0 that isn't doing anything or is | |
9724 | paradoxical. The latter case can only occur when it is known that the | |
9725 | "extra" bits will be zero. Therefore, it is safe to remove the SUBREG. | |
9726 | We can never remove a SUBREG for a non-equality comparison because the | |
9727 | sign bit is in a different place in the underlying object. */ | |
9728 | ||
9729 | op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET); | |
9730 | op1 = make_compound_operation (op1, SET); | |
9731 | ||
9732 | if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0) | |
9733 | && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT | |
9734 | && (code == NE || code == EQ) | |
9735 | && ((GET_MODE_SIZE (GET_MODE (op0)) | |
9736 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))) | |
9737 | { | |
9738 | op0 = SUBREG_REG (op0); | |
9739 | op1 = gen_lowpart_for_combine (GET_MODE (op0), op1); | |
9740 | } | |
9741 | ||
9742 | else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0) | |
9743 | && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT | |
9744 | && (code == NE || code == EQ) | |
ac49a949 RS |
9745 | && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) |
9746 | <= HOST_BITS_PER_WIDE_INT) | |
951553af | 9747 | && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0))) |
230d793d RS |
9748 | & ~ GET_MODE_MASK (GET_MODE (op0))) == 0 |
9749 | && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)), | |
9750 | op1), | |
951553af | 9751 | (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0))) |
230d793d RS |
9752 | & ~ GET_MODE_MASK (GET_MODE (op0))) == 0)) |
9753 | op0 = SUBREG_REG (op0), op1 = tem; | |
9754 | ||
9755 | /* We now do the opposite procedure: Some machines don't have compare | |
9756 | insns in all modes. If OP0's mode is an integer mode smaller than a | |
9757 | word and we can't do a compare in that mode, see if there is a larger | |
a687e897 RK |
9758 | mode for which we can do the compare. There are a number of cases in |
9759 | which we can use the wider mode. */ | |
230d793d RS |
9760 | |
9761 | mode = GET_MODE (op0); | |
9762 | if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT | |
9763 | && GET_MODE_SIZE (mode) < UNITS_PER_WORD | |
9764 | && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing) | |
9765 | for (tmode = GET_MODE_WIDER_MODE (mode); | |
5f4f0e22 CH |
9766 | (tmode != VOIDmode |
9767 | && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT); | |
230d793d | 9768 | tmode = GET_MODE_WIDER_MODE (tmode)) |
a687e897 | 9769 | if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing) |
230d793d | 9770 | { |
951553af | 9771 | /* If the only nonzero bits in OP0 and OP1 are those in the |
a687e897 RK |
9772 | narrower mode and this is an equality or unsigned comparison, |
9773 | we can use the wider mode. Similarly for sign-extended | |
7e4dc511 | 9774 | values, in which case it is true for all comparisons. */ |
a687e897 RK |
9775 | if (((code == EQ || code == NE |
9776 | || code == GEU || code == GTU || code == LEU || code == LTU) | |
951553af RK |
9777 | && (nonzero_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0 |
9778 | && (nonzero_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0) | |
7e4dc511 RK |
9779 | || ((num_sign_bit_copies (op0, tmode) |
9780 | > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode)) | |
a687e897 | 9781 | && (num_sign_bit_copies (op1, tmode) |
58744483 | 9782 | > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode)))) |
a687e897 RK |
9783 | { |
9784 | op0 = gen_lowpart_for_combine (tmode, op0); | |
9785 | op1 = gen_lowpart_for_combine (tmode, op1); | |
9786 | break; | |
9787 | } | |
230d793d | 9788 | |
a687e897 RK |
9789 | /* If this is a test for negative, we can make an explicit |
9790 | test of the sign bit. */ | |
9791 | ||
9792 | if (op1 == const0_rtx && (code == LT || code == GE) | |
9793 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) | |
230d793d | 9794 | { |
a687e897 RK |
9795 | op0 = gen_binary (AND, tmode, |
9796 | gen_lowpart_for_combine (tmode, op0), | |
5f4f0e22 CH |
9797 | GEN_INT ((HOST_WIDE_INT) 1 |
9798 | << (GET_MODE_BITSIZE (mode) - 1))); | |
230d793d | 9799 | code = (code == LT) ? NE : EQ; |
a687e897 | 9800 | break; |
230d793d | 9801 | } |
230d793d RS |
9802 | } |
9803 | ||
b7a775b2 RK |
9804 | #ifdef CANONICALIZE_COMPARISON |
9805 | /* If this machine only supports a subset of valid comparisons, see if we | |
9806 | can convert an unsupported one into a supported one. */ | |
9807 | CANONICALIZE_COMPARISON (code, op0, op1); | |
9808 | #endif | |
9809 | ||
230d793d RS |
9810 | *pop0 = op0; |
9811 | *pop1 = op1; | |
9812 | ||
9813 | return code; | |
9814 | } | |
9815 | \f | |
9816 | /* Return 1 if we know that X, a comparison operation, is not operating | |
9817 | on a floating-point value or is EQ or NE, meaning that we can safely | |
9818 | reverse it. */ | |
9819 | ||
9820 | static int | |
9821 | reversible_comparison_p (x) | |
9822 | rtx x; | |
9823 | { | |
9824 | if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT | |
7e2a0d8e | 9825 | || flag_fast_math |
230d793d RS |
9826 | || GET_CODE (x) == NE || GET_CODE (x) == EQ) |
9827 | return 1; | |
9828 | ||
9829 | switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0)))) | |
9830 | { | |
9831 | case MODE_INT: | |
3ad2180a RK |
9832 | case MODE_PARTIAL_INT: |
9833 | case MODE_COMPLEX_INT: | |
230d793d RS |
9834 | return 1; |
9835 | ||
9836 | case MODE_CC: | |
9210df58 RK |
9837 | /* If the mode of the condition codes tells us that this is safe, |
9838 | we need look no further. */ | |
9839 | if (REVERSIBLE_CC_MODE (GET_MODE (XEXP (x, 0)))) | |
9840 | return 1; | |
9841 | ||
9842 | /* Otherwise try and find where the condition codes were last set and | |
9843 | use that. */ | |
230d793d RS |
9844 | x = get_last_value (XEXP (x, 0)); |
9845 | return (x && GET_CODE (x) == COMPARE | |
3ad2180a | 9846 | && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))); |
230d793d RS |
9847 | } |
9848 | ||
9849 | return 0; | |
9850 | } | |
9851 | \f | |
9852 | /* Utility function for following routine. Called when X is part of a value | |
9853 | being stored into reg_last_set_value. Sets reg_last_set_table_tick | |
9854 | for each register mentioned. Similar to mention_regs in cse.c */ | |
9855 | ||
9856 | static void | |
9857 | update_table_tick (x) | |
9858 | rtx x; | |
9859 | { | |
9860 | register enum rtx_code code = GET_CODE (x); | |
9861 | register char *fmt = GET_RTX_FORMAT (code); | |
9862 | register int i; | |
9863 | ||
9864 | if (code == REG) | |
9865 | { | |
9866 | int regno = REGNO (x); | |
9867 | int endregno = regno + (regno < FIRST_PSEUDO_REGISTER | |
9868 | ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1); | |
9869 | ||
9870 | for (i = regno; i < endregno; i++) | |
9871 | reg_last_set_table_tick[i] = label_tick; | |
9872 | ||
9873 | return; | |
9874 | } | |
9875 | ||
9876 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
9877 | /* Note that we can't have an "E" in values stored; see | |
9878 | get_last_value_validate. */ | |
9879 | if (fmt[i] == 'e') | |
9880 | update_table_tick (XEXP (x, i)); | |
9881 | } | |
9882 | ||
9883 | /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we | |
9884 | are saying that the register is clobbered and we no longer know its | |
7988fd36 RK |
9885 | value. If INSN is zero, don't update reg_last_set; this is only permitted |
9886 | with VALUE also zero and is used to invalidate the register. */ | |
230d793d RS |
9887 | |
9888 | static void | |
9889 | record_value_for_reg (reg, insn, value) | |
9890 | rtx reg; | |
9891 | rtx insn; | |
9892 | rtx value; | |
9893 | { | |
9894 | int regno = REGNO (reg); | |
9895 | int endregno = regno + (regno < FIRST_PSEUDO_REGISTER | |
9896 | ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1); | |
9897 | int i; | |
9898 | ||
9899 | /* If VALUE contains REG and we have a previous value for REG, substitute | |
9900 | the previous value. */ | |
9901 | if (value && insn && reg_overlap_mentioned_p (reg, value)) | |
9902 | { | |
9903 | rtx tem; | |
9904 | ||
9905 | /* Set things up so get_last_value is allowed to see anything set up to | |
9906 | our insn. */ | |
9907 | subst_low_cuid = INSN_CUID (insn); | |
9908 | tem = get_last_value (reg); | |
9909 | ||
9910 | if (tem) | |
9911 | value = replace_rtx (copy_rtx (value), reg, tem); | |
9912 | } | |
9913 | ||
9914 | /* For each register modified, show we don't know its value, that | |
ef026f91 RS |
9915 | we don't know about its bitwise content, that its value has been |
9916 | updated, and that we don't know the location of the death of the | |
9917 | register. */ | |
230d793d RS |
9918 | for (i = regno; i < endregno; i ++) |
9919 | { | |
9920 | if (insn) | |
9921 | reg_last_set[i] = insn; | |
9922 | reg_last_set_value[i] = 0; | |
ef026f91 RS |
9923 | reg_last_set_mode[i] = 0; |
9924 | reg_last_set_nonzero_bits[i] = 0; | |
9925 | reg_last_set_sign_bit_copies[i] = 0; | |
230d793d RS |
9926 | reg_last_death[i] = 0; |
9927 | } | |
9928 | ||
9929 | /* Mark registers that are being referenced in this value. */ | |
9930 | if (value) | |
9931 | update_table_tick (value); | |
9932 | ||
9933 | /* Now update the status of each register being set. | |
9934 | If someone is using this register in this block, set this register | |
9935 | to invalid since we will get confused between the two lives in this | |
9936 | basic block. This makes using this register always invalid. In cse, we | |
9937 | scan the table to invalidate all entries using this register, but this | |
9938 | is too much work for us. */ | |
9939 | ||
9940 | for (i = regno; i < endregno; i++) | |
9941 | { | |
9942 | reg_last_set_label[i] = label_tick; | |
9943 | if (value && reg_last_set_table_tick[i] == label_tick) | |
9944 | reg_last_set_invalid[i] = 1; | |
9945 | else | |
9946 | reg_last_set_invalid[i] = 0; | |
9947 | } | |
9948 | ||
9949 | /* The value being assigned might refer to X (like in "x++;"). In that | |
9950 | case, we must replace it with (clobber (const_int 0)) to prevent | |
9951 | infinite loops. */ | |
9952 | if (value && ! get_last_value_validate (&value, | |
9953 | reg_last_set_label[regno], 0)) | |
9954 | { | |
9955 | value = copy_rtx (value); | |
9956 | if (! get_last_value_validate (&value, reg_last_set_label[regno], 1)) | |
9957 | value = 0; | |
9958 | } | |
9959 | ||
55310dad RK |
9960 | /* For the main register being modified, update the value, the mode, the |
9961 | nonzero bits, and the number of sign bit copies. */ | |
9962 | ||
230d793d RS |
9963 | reg_last_set_value[regno] = value; |
9964 | ||
55310dad RK |
9965 | if (value) |
9966 | { | |
2afabb48 | 9967 | subst_low_cuid = INSN_CUID (insn); |
55310dad RK |
9968 | reg_last_set_mode[regno] = GET_MODE (reg); |
9969 | reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg)); | |
9970 | reg_last_set_sign_bit_copies[regno] | |
9971 | = num_sign_bit_copies (value, GET_MODE (reg)); | |
9972 | } | |
230d793d RS |
9973 | } |
9974 | ||
9975 | /* Used for communication between the following two routines. */ | |
9976 | static rtx record_dead_insn; | |
9977 | ||
9978 | /* Called via note_stores from record_dead_and_set_regs to handle one | |
9979 | SET or CLOBBER in an insn. */ | |
9980 | ||
9981 | static void | |
9982 | record_dead_and_set_regs_1 (dest, setter) | |
9983 | rtx dest, setter; | |
9984 | { | |
ca89d290 RK |
9985 | if (GET_CODE (dest) == SUBREG) |
9986 | dest = SUBREG_REG (dest); | |
9987 | ||
230d793d RS |
9988 | if (GET_CODE (dest) == REG) |
9989 | { | |
9990 | /* If we are setting the whole register, we know its value. Otherwise | |
9991 | show that we don't know the value. We can handle SUBREG in | |
9992 | some cases. */ | |
9993 | if (GET_CODE (setter) == SET && dest == SET_DEST (setter)) | |
9994 | record_value_for_reg (dest, record_dead_insn, SET_SRC (setter)); | |
9995 | else if (GET_CODE (setter) == SET | |
9996 | && GET_CODE (SET_DEST (setter)) == SUBREG | |
9997 | && SUBREG_REG (SET_DEST (setter)) == dest | |
90bf8081 | 9998 | && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD |
230d793d | 9999 | && subreg_lowpart_p (SET_DEST (setter))) |
d0ab8cd3 RK |
10000 | record_value_for_reg (dest, record_dead_insn, |
10001 | gen_lowpart_for_combine (GET_MODE (dest), | |
10002 | SET_SRC (setter))); | |
230d793d | 10003 | else |
5f4f0e22 | 10004 | record_value_for_reg (dest, record_dead_insn, NULL_RTX); |
230d793d RS |
10005 | } |
10006 | else if (GET_CODE (dest) == MEM | |
10007 | /* Ignore pushes, they clobber nothing. */ | |
10008 | && ! push_operand (dest, GET_MODE (dest))) | |
10009 | mem_last_set = INSN_CUID (record_dead_insn); | |
10010 | } | |
10011 | ||
10012 | /* Update the records of when each REG was most recently set or killed | |
10013 | for the things done by INSN. This is the last thing done in processing | |
10014 | INSN in the combiner loop. | |
10015 | ||
ef026f91 RS |
10016 | We update reg_last_set, reg_last_set_value, reg_last_set_mode, |
10017 | reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death, | |
10018 | and also the similar information mem_last_set (which insn most recently | |
10019 | modified memory) and last_call_cuid (which insn was the most recent | |
10020 | subroutine call). */ | |
230d793d RS |
10021 | |
10022 | static void | |
10023 | record_dead_and_set_regs (insn) | |
10024 | rtx insn; | |
10025 | { | |
10026 | register rtx link; | |
55310dad RK |
10027 | int i; |
10028 | ||
230d793d RS |
10029 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
10030 | { | |
dbc131f3 RK |
10031 | if (REG_NOTE_KIND (link) == REG_DEAD |
10032 | && GET_CODE (XEXP (link, 0)) == REG) | |
10033 | { | |
10034 | int regno = REGNO (XEXP (link, 0)); | |
10035 | int endregno | |
10036 | = regno + (regno < FIRST_PSEUDO_REGISTER | |
10037 | ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0))) | |
10038 | : 1); | |
dbc131f3 RK |
10039 | |
10040 | for (i = regno; i < endregno; i++) | |
10041 | reg_last_death[i] = insn; | |
10042 | } | |
230d793d | 10043 | else if (REG_NOTE_KIND (link) == REG_INC) |
5f4f0e22 | 10044 | record_value_for_reg (XEXP (link, 0), insn, NULL_RTX); |
230d793d RS |
10045 | } |
10046 | ||
10047 | if (GET_CODE (insn) == CALL_INSN) | |
55310dad RK |
10048 | { |
10049 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
10050 | if (call_used_regs[i]) | |
10051 | { | |
10052 | reg_last_set_value[i] = 0; | |
ef026f91 RS |
10053 | reg_last_set_mode[i] = 0; |
10054 | reg_last_set_nonzero_bits[i] = 0; | |
10055 | reg_last_set_sign_bit_copies[i] = 0; | |
55310dad RK |
10056 | reg_last_death[i] = 0; |
10057 | } | |
10058 | ||
10059 | last_call_cuid = mem_last_set = INSN_CUID (insn); | |
10060 | } | |
230d793d RS |
10061 | |
10062 | record_dead_insn = insn; | |
10063 | note_stores (PATTERN (insn), record_dead_and_set_regs_1); | |
10064 | } | |
10065 | \f | |
10066 | /* Utility routine for the following function. Verify that all the registers | |
10067 | mentioned in *LOC are valid when *LOC was part of a value set when | |
10068 | label_tick == TICK. Return 0 if some are not. | |
10069 | ||
10070 | If REPLACE is non-zero, replace the invalid reference with | |
10071 | (clobber (const_int 0)) and return 1. This replacement is useful because | |
10072 | we often can get useful information about the form of a value (e.g., if | |
10073 | it was produced by a shift that always produces -1 or 0) even though | |
10074 | we don't know exactly what registers it was produced from. */ | |
10075 | ||
10076 | static int | |
10077 | get_last_value_validate (loc, tick, replace) | |
10078 | rtx *loc; | |
10079 | int tick; | |
10080 | int replace; | |
10081 | { | |
10082 | rtx x = *loc; | |
10083 | char *fmt = GET_RTX_FORMAT (GET_CODE (x)); | |
10084 | int len = GET_RTX_LENGTH (GET_CODE (x)); | |
10085 | int i; | |
10086 | ||
10087 | if (GET_CODE (x) == REG) | |
10088 | { | |
10089 | int regno = REGNO (x); | |
10090 | int endregno = regno + (regno < FIRST_PSEUDO_REGISTER | |
10091 | ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1); | |
10092 | int j; | |
10093 | ||
10094 | for (j = regno; j < endregno; j++) | |
10095 | if (reg_last_set_invalid[j] | |
10096 | /* If this is a pseudo-register that was only set once, it is | |
10097 | always valid. */ | |
10098 | || (! (regno >= FIRST_PSEUDO_REGISTER && reg_n_sets[regno] == 1) | |
10099 | && reg_last_set_label[j] > tick)) | |
10100 | { | |
10101 | if (replace) | |
10102 | *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx); | |
10103 | return replace; | |
10104 | } | |
10105 | ||
10106 | return 1; | |
10107 | } | |
10108 | ||
10109 | for (i = 0; i < len; i++) | |
10110 | if ((fmt[i] == 'e' | |
10111 | && get_last_value_validate (&XEXP (x, i), tick, replace) == 0) | |
10112 | /* Don't bother with these. They shouldn't occur anyway. */ | |
10113 | || fmt[i] == 'E') | |
10114 | return 0; | |
10115 | ||
10116 | /* If we haven't found a reason for it to be invalid, it is valid. */ | |
10117 | return 1; | |
10118 | } | |
10119 | ||
10120 | /* Get the last value assigned to X, if known. Some registers | |
10121 | in the value may be replaced with (clobber (const_int 0)) if their value | |
10122 | is known longer known reliably. */ | |
10123 | ||
10124 | static rtx | |
10125 | get_last_value (x) | |
10126 | rtx x; | |
10127 | { | |
10128 | int regno; | |
10129 | rtx value; | |
10130 | ||
10131 | /* If this is a non-paradoxical SUBREG, get the value of its operand and | |
10132 | then convert it to the desired mode. If this is a paradoxical SUBREG, | |
10133 | we cannot predict what values the "extra" bits might have. */ | |
10134 | if (GET_CODE (x) == SUBREG | |
10135 | && subreg_lowpart_p (x) | |
10136 | && (GET_MODE_SIZE (GET_MODE (x)) | |
10137 | <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) | |
10138 | && (value = get_last_value (SUBREG_REG (x))) != 0) | |
10139 | return gen_lowpart_for_combine (GET_MODE (x), value); | |
10140 | ||
10141 | if (GET_CODE (x) != REG) | |
10142 | return 0; | |
10143 | ||
10144 | regno = REGNO (x); | |
10145 | value = reg_last_set_value[regno]; | |
10146 | ||
d0ab8cd3 | 10147 | /* If we don't have a value or if it isn't for this basic block, return 0. */ |
230d793d RS |
10148 | |
10149 | if (value == 0 | |
10150 | || (reg_n_sets[regno] != 1 | |
55310dad | 10151 | && reg_last_set_label[regno] != label_tick)) |
230d793d RS |
10152 | return 0; |
10153 | ||
4255220d | 10154 | /* If the value was set in a later insn than the ones we are processing, |
4090a6b3 RK |
10155 | we can't use it even if the register was only set once, but make a quick |
10156 | check to see if the previous insn set it to something. This is commonly | |
0d9641d1 JW |
10157 | the case when the same pseudo is used by repeated insns. |
10158 | ||
10159 | This does not work if there exists an instruction which is temporarily | |
10160 | not on the insn chain. */ | |
d0ab8cd3 | 10161 | |
bcd49eb7 | 10162 | if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid) |
d0ab8cd3 RK |
10163 | { |
10164 | rtx insn, set; | |
10165 | ||
bcd49eb7 JW |
10166 | /* We can not do anything useful in this case, because there is |
10167 | an instruction which is not on the insn chain. */ | |
10168 | if (subst_prev_insn) | |
10169 | return 0; | |
10170 | ||
4255220d JW |
10171 | /* Skip over USE insns. They are not useful here, and they may have |
10172 | been made by combine, in which case they do not have a INSN_CUID | |
d6c80562 | 10173 | value. We can't use prev_real_insn, because that would incorrectly |
e340018d JW |
10174 | take us backwards across labels. Skip over BARRIERs also, since |
10175 | they could have been made by combine. If we see one, we must be | |
10176 | optimizing dead code, so it doesn't matter what we do. */ | |
d6c80562 JW |
10177 | for (insn = prev_nonnote_insn (subst_insn); |
10178 | insn && ((GET_CODE (insn) == INSN | |
10179 | && GET_CODE (PATTERN (insn)) == USE) | |
e340018d | 10180 | || GET_CODE (insn) == BARRIER |
4255220d | 10181 | || INSN_CUID (insn) >= subst_low_cuid); |
d6c80562 | 10182 | insn = prev_nonnote_insn (insn)) |
3adde2a5 | 10183 | ; |
d0ab8cd3 RK |
10184 | |
10185 | if (insn | |
10186 | && (set = single_set (insn)) != 0 | |
10187 | && rtx_equal_p (SET_DEST (set), x)) | |
10188 | { | |
10189 | value = SET_SRC (set); | |
10190 | ||
10191 | /* Make sure that VALUE doesn't reference X. Replace any | |
ddd5a7c1 | 10192 | explicit references with a CLOBBER. If there are any remaining |
d0ab8cd3 RK |
10193 | references (rare), don't use the value. */ |
10194 | ||
10195 | if (reg_mentioned_p (x, value)) | |
10196 | value = replace_rtx (copy_rtx (value), x, | |
10197 | gen_rtx (CLOBBER, GET_MODE (x), const0_rtx)); | |
10198 | ||
10199 | if (reg_overlap_mentioned_p (x, value)) | |
10200 | return 0; | |
10201 | } | |
10202 | else | |
10203 | return 0; | |
10204 | } | |
10205 | ||
10206 | /* If the value has all its registers valid, return it. */ | |
230d793d RS |
10207 | if (get_last_value_validate (&value, reg_last_set_label[regno], 0)) |
10208 | return value; | |
10209 | ||
10210 | /* Otherwise, make a copy and replace any invalid register with | |
10211 | (clobber (const_int 0)). If that fails for some reason, return 0. */ | |
10212 | ||
10213 | value = copy_rtx (value); | |
10214 | if (get_last_value_validate (&value, reg_last_set_label[regno], 1)) | |
10215 | return value; | |
10216 | ||
10217 | return 0; | |
10218 | } | |
10219 | \f | |
10220 | /* Return nonzero if expression X refers to a REG or to memory | |
10221 | that is set in an instruction more recent than FROM_CUID. */ | |
10222 | ||
10223 | static int | |
10224 | use_crosses_set_p (x, from_cuid) | |
10225 | register rtx x; | |
10226 | int from_cuid; | |
10227 | { | |
10228 | register char *fmt; | |
10229 | register int i; | |
10230 | register enum rtx_code code = GET_CODE (x); | |
10231 | ||
10232 | if (code == REG) | |
10233 | { | |
10234 | register int regno = REGNO (x); | |
e28f5732 RK |
10235 | int endreg = regno + (regno < FIRST_PSEUDO_REGISTER |
10236 | ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1); | |
10237 | ||
230d793d RS |
10238 | #ifdef PUSH_ROUNDING |
10239 | /* Don't allow uses of the stack pointer to be moved, | |
10240 | because we don't know whether the move crosses a push insn. */ | |
10241 | if (regno == STACK_POINTER_REGNUM) | |
10242 | return 1; | |
10243 | #endif | |
e28f5732 RK |
10244 | for (;regno < endreg; regno++) |
10245 | if (reg_last_set[regno] | |
10246 | && INSN_CUID (reg_last_set[regno]) > from_cuid) | |
10247 | return 1; | |
10248 | return 0; | |
230d793d RS |
10249 | } |
10250 | ||
10251 | if (code == MEM && mem_last_set > from_cuid) | |
10252 | return 1; | |
10253 | ||
10254 | fmt = GET_RTX_FORMAT (code); | |
10255 | ||
10256 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
10257 | { | |
10258 | if (fmt[i] == 'E') | |
10259 | { | |
10260 | register int j; | |
10261 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
10262 | if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid)) | |
10263 | return 1; | |
10264 | } | |
10265 | else if (fmt[i] == 'e' | |
10266 | && use_crosses_set_p (XEXP (x, i), from_cuid)) | |
10267 | return 1; | |
10268 | } | |
10269 | return 0; | |
10270 | } | |
10271 | \f | |
10272 | /* Define three variables used for communication between the following | |
10273 | routines. */ | |
10274 | ||
10275 | static int reg_dead_regno, reg_dead_endregno; | |
10276 | static int reg_dead_flag; | |
10277 | ||
10278 | /* Function called via note_stores from reg_dead_at_p. | |
10279 | ||
ddd5a7c1 | 10280 | If DEST is within [reg_dead_regno, reg_dead_endregno), set |
230d793d RS |
10281 | reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */ |
10282 | ||
10283 | static void | |
10284 | reg_dead_at_p_1 (dest, x) | |
10285 | rtx dest; | |
10286 | rtx x; | |
10287 | { | |
10288 | int regno, endregno; | |
10289 | ||
10290 | if (GET_CODE (dest) != REG) | |
10291 | return; | |
10292 | ||
10293 | regno = REGNO (dest); | |
10294 | endregno = regno + (regno < FIRST_PSEUDO_REGISTER | |
10295 | ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1); | |
10296 | ||
10297 | if (reg_dead_endregno > regno && reg_dead_regno < endregno) | |
10298 | reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1; | |
10299 | } | |
10300 | ||
10301 | /* Return non-zero if REG is known to be dead at INSN. | |
10302 | ||
10303 | We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER | |
10304 | referencing REG, it is dead. If we hit a SET referencing REG, it is | |
10305 | live. Otherwise, see if it is live or dead at the start of the basic | |
6e25d159 RK |
10306 | block we are in. Hard regs marked as being live in NEWPAT_USED_REGS |
10307 | must be assumed to be always live. */ | |
230d793d RS |
10308 | |
10309 | static int | |
10310 | reg_dead_at_p (reg, insn) | |
10311 | rtx reg; | |
10312 | rtx insn; | |
10313 | { | |
10314 | int block, i; | |
10315 | ||
10316 | /* Set variables for reg_dead_at_p_1. */ | |
10317 | reg_dead_regno = REGNO (reg); | |
10318 | reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER | |
10319 | ? HARD_REGNO_NREGS (reg_dead_regno, | |
10320 | GET_MODE (reg)) | |
10321 | : 1); | |
10322 | ||
10323 | reg_dead_flag = 0; | |
10324 | ||
6e25d159 RK |
10325 | /* Check that reg isn't mentioned in NEWPAT_USED_REGS. */ |
10326 | if (reg_dead_regno < FIRST_PSEUDO_REGISTER) | |
10327 | { | |
10328 | for (i = reg_dead_regno; i < reg_dead_endregno; i++) | |
10329 | if (TEST_HARD_REG_BIT (newpat_used_regs, i)) | |
10330 | return 0; | |
10331 | } | |
10332 | ||
230d793d RS |
10333 | /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or |
10334 | beginning of function. */ | |
60715d0b | 10335 | for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER; |
230d793d RS |
10336 | insn = prev_nonnote_insn (insn)) |
10337 | { | |
10338 | note_stores (PATTERN (insn), reg_dead_at_p_1); | |
10339 | if (reg_dead_flag) | |
10340 | return reg_dead_flag == 1 ? 1 : 0; | |
10341 | ||
10342 | if (find_regno_note (insn, REG_DEAD, reg_dead_regno)) | |
10343 | return 1; | |
10344 | } | |
10345 | ||
10346 | /* Get the basic block number that we were in. */ | |
10347 | if (insn == 0) | |
10348 | block = 0; | |
10349 | else | |
10350 | { | |
10351 | for (block = 0; block < n_basic_blocks; block++) | |
10352 | if (insn == basic_block_head[block]) | |
10353 | break; | |
10354 | ||
10355 | if (block == n_basic_blocks) | |
10356 | return 0; | |
10357 | } | |
10358 | ||
10359 | for (i = reg_dead_regno; i < reg_dead_endregno; i++) | |
5f4f0e22 CH |
10360 | if (basic_block_live_at_start[block][i / REGSET_ELT_BITS] |
10361 | & ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS))) | |
230d793d RS |
10362 | return 0; |
10363 | ||
10364 | return 1; | |
10365 | } | |
6e25d159 RK |
10366 | \f |
10367 | /* Note hard registers in X that are used. This code is similar to | |
10368 | that in flow.c, but much simpler since we don't care about pseudos. */ | |
10369 | ||
10370 | static void | |
10371 | mark_used_regs_combine (x) | |
10372 | rtx x; | |
10373 | { | |
10374 | register RTX_CODE code = GET_CODE (x); | |
10375 | register int regno; | |
10376 | int i; | |
10377 | ||
10378 | switch (code) | |
10379 | { | |
10380 | case LABEL_REF: | |
10381 | case SYMBOL_REF: | |
10382 | case CONST_INT: | |
10383 | case CONST: | |
10384 | case CONST_DOUBLE: | |
10385 | case PC: | |
10386 | case ADDR_VEC: | |
10387 | case ADDR_DIFF_VEC: | |
10388 | case ASM_INPUT: | |
10389 | #ifdef HAVE_cc0 | |
10390 | /* CC0 must die in the insn after it is set, so we don't need to take | |
10391 | special note of it here. */ | |
10392 | case CC0: | |
10393 | #endif | |
10394 | return; | |
10395 | ||
10396 | case CLOBBER: | |
10397 | /* If we are clobbering a MEM, mark any hard registers inside the | |
10398 | address as used. */ | |
10399 | if (GET_CODE (XEXP (x, 0)) == MEM) | |
10400 | mark_used_regs_combine (XEXP (XEXP (x, 0), 0)); | |
10401 | return; | |
10402 | ||
10403 | case REG: | |
10404 | regno = REGNO (x); | |
10405 | /* A hard reg in a wide mode may really be multiple registers. | |
10406 | If so, mark all of them just like the first. */ | |
10407 | if (regno < FIRST_PSEUDO_REGISTER) | |
10408 | { | |
10409 | /* None of this applies to the stack, frame or arg pointers */ | |
10410 | if (regno == STACK_POINTER_REGNUM | |
10411 | #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM | |
10412 | || regno == HARD_FRAME_POINTER_REGNUM | |
10413 | #endif | |
10414 | #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM | |
10415 | || (regno == ARG_POINTER_REGNUM && fixed_regs[regno]) | |
10416 | #endif | |
10417 | || regno == FRAME_POINTER_REGNUM) | |
10418 | return; | |
10419 | ||
10420 | i = HARD_REGNO_NREGS (regno, GET_MODE (x)); | |
10421 | while (i-- > 0) | |
10422 | SET_HARD_REG_BIT (newpat_used_regs, regno + i); | |
10423 | } | |
10424 | return; | |
10425 | ||
10426 | case SET: | |
10427 | { | |
10428 | /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in | |
10429 | the address. */ | |
10430 | register rtx testreg = SET_DEST (x); | |
10431 | ||
e048778f RK |
10432 | while (GET_CODE (testreg) == SUBREG |
10433 | || GET_CODE (testreg) == ZERO_EXTRACT | |
10434 | || GET_CODE (testreg) == SIGN_EXTRACT | |
10435 | || GET_CODE (testreg) == STRICT_LOW_PART) | |
6e25d159 RK |
10436 | testreg = XEXP (testreg, 0); |
10437 | ||
10438 | if (GET_CODE (testreg) == MEM) | |
10439 | mark_used_regs_combine (XEXP (testreg, 0)); | |
10440 | ||
10441 | mark_used_regs_combine (SET_SRC (x)); | |
10442 | return; | |
10443 | } | |
10444 | } | |
10445 | ||
10446 | /* Recursively scan the operands of this expression. */ | |
10447 | ||
10448 | { | |
10449 | register char *fmt = GET_RTX_FORMAT (code); | |
10450 | ||
10451 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
10452 | { | |
10453 | if (fmt[i] == 'e') | |
10454 | mark_used_regs_combine (XEXP (x, i)); | |
10455 | else if (fmt[i] == 'E') | |
10456 | { | |
10457 | register int j; | |
10458 | ||
10459 | for (j = 0; j < XVECLEN (x, i); j++) | |
10460 | mark_used_regs_combine (XVECEXP (x, i, j)); | |
10461 | } | |
10462 | } | |
10463 | } | |
10464 | } | |
10465 | ||
230d793d RS |
10466 | \f |
10467 | /* Remove register number REGNO from the dead registers list of INSN. | |
10468 | ||
10469 | Return the note used to record the death, if there was one. */ | |
10470 | ||
10471 | rtx | |
10472 | remove_death (regno, insn) | |
10473 | int regno; | |
10474 | rtx insn; | |
10475 | { | |
10476 | register rtx note = find_regno_note (insn, REG_DEAD, regno); | |
10477 | ||
10478 | if (note) | |
1a26b032 RK |
10479 | { |
10480 | reg_n_deaths[regno]--; | |
10481 | remove_note (insn, note); | |
10482 | } | |
230d793d RS |
10483 | |
10484 | return note; | |
10485 | } | |
10486 | ||
10487 | /* For each register (hardware or pseudo) used within expression X, if its | |
10488 | death is in an instruction with cuid between FROM_CUID (inclusive) and | |
10489 | TO_INSN (exclusive), put a REG_DEAD note for that register in the | |
10490 | list headed by PNOTES. | |
10491 | ||
10492 | This is done when X is being merged by combination into TO_INSN. These | |
10493 | notes will then be distributed as needed. */ | |
10494 | ||
10495 | static void | |
10496 | move_deaths (x, from_cuid, to_insn, pnotes) | |
10497 | rtx x; | |
10498 | int from_cuid; | |
10499 | rtx to_insn; | |
10500 | rtx *pnotes; | |
10501 | { | |
10502 | register char *fmt; | |
10503 | register int len, i; | |
10504 | register enum rtx_code code = GET_CODE (x); | |
10505 | ||
10506 | if (code == REG) | |
10507 | { | |
10508 | register int regno = REGNO (x); | |
10509 | register rtx where_dead = reg_last_death[regno]; | |
e340018d JW |
10510 | register rtx before_dead, after_dead; |
10511 | ||
10512 | /* WHERE_DEAD could be a USE insn made by combine, so first we | |
10513 | make sure that we have insns with valid INSN_CUID values. */ | |
10514 | before_dead = where_dead; | |
10515 | while (before_dead && INSN_UID (before_dead) > max_uid_cuid) | |
10516 | before_dead = PREV_INSN (before_dead); | |
10517 | after_dead = where_dead; | |
10518 | while (after_dead && INSN_UID (after_dead) > max_uid_cuid) | |
10519 | after_dead = NEXT_INSN (after_dead); | |
10520 | ||
10521 | if (before_dead && after_dead | |
10522 | && INSN_CUID (before_dead) >= from_cuid | |
10523 | && (INSN_CUID (after_dead) < INSN_CUID (to_insn) | |
10524 | || (where_dead != after_dead | |
10525 | && INSN_CUID (after_dead) == INSN_CUID (to_insn)))) | |
230d793d | 10526 | { |
dbc131f3 | 10527 | rtx note = remove_death (regno, where_dead); |
230d793d RS |
10528 | |
10529 | /* It is possible for the call above to return 0. This can occur | |
10530 | when reg_last_death points to I2 or I1 that we combined with. | |
dbc131f3 RK |
10531 | In that case make a new note. |
10532 | ||
10533 | We must also check for the case where X is a hard register | |
10534 | and NOTE is a death note for a range of hard registers | |
10535 | including X. In that case, we must put REG_DEAD notes for | |
10536 | the remaining registers in place of NOTE. */ | |
10537 | ||
10538 | if (note != 0 && regno < FIRST_PSEUDO_REGISTER | |
10539 | && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0))) | |
10540 | != GET_MODE_SIZE (GET_MODE (x)))) | |
10541 | { | |
10542 | int deadregno = REGNO (XEXP (note, 0)); | |
10543 | int deadend | |
10544 | = (deadregno + HARD_REGNO_NREGS (deadregno, | |
10545 | GET_MODE (XEXP (note, 0)))); | |
10546 | int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x)); | |
10547 | int i; | |
10548 | ||
10549 | for (i = deadregno; i < deadend; i++) | |
10550 | if (i < regno || i >= ourend) | |
10551 | REG_NOTES (where_dead) | |
10552 | = gen_rtx (EXPR_LIST, REG_DEAD, | |
36b878d1 | 10553 | gen_rtx (REG, reg_raw_mode[i], i), |
dbc131f3 RK |
10554 | REG_NOTES (where_dead)); |
10555 | } | |
fabd69e8 RK |
10556 | /* If we didn't find any note, and we have a multi-reg hard |
10557 | register, then to be safe we must check for REG_DEAD notes | |
10558 | for each register other than the first. They could have | |
10559 | their own REG_DEAD notes lying around. */ | |
10560 | else if (note == 0 && regno < FIRST_PSEUDO_REGISTER | |
10561 | && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1) | |
10562 | { | |
10563 | int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x)); | |
10564 | int i; | |
10565 | rtx oldnotes = 0; | |
10566 | ||
10567 | for (i = regno + 1; i < ourend; i++) | |
10568 | move_deaths (gen_rtx (REG, reg_raw_mode[i], i), | |
10569 | from_cuid, to_insn, &oldnotes); | |
10570 | } | |
230d793d | 10571 | |
dbc131f3 | 10572 | if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x)) |
230d793d RS |
10573 | { |
10574 | XEXP (note, 1) = *pnotes; | |
10575 | *pnotes = note; | |
10576 | } | |
10577 | else | |
10578 | *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes); | |
1a26b032 RK |
10579 | |
10580 | reg_n_deaths[regno]++; | |
230d793d RS |
10581 | } |
10582 | ||
10583 | return; | |
10584 | } | |
10585 | ||
10586 | else if (GET_CODE (x) == SET) | |
10587 | { | |
10588 | rtx dest = SET_DEST (x); | |
10589 | ||
10590 | move_deaths (SET_SRC (x), from_cuid, to_insn, pnotes); | |
10591 | ||
a7c99304 RK |
10592 | /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG |
10593 | that accesses one word of a multi-word item, some | |
10594 | piece of everything register in the expression is used by | |
10595 | this insn, so remove any old death. */ | |
10596 | ||
10597 | if (GET_CODE (dest) == ZERO_EXTRACT | |
10598 | || GET_CODE (dest) == STRICT_LOW_PART | |
10599 | || (GET_CODE (dest) == SUBREG | |
10600 | && (((GET_MODE_SIZE (GET_MODE (dest)) | |
10601 | + UNITS_PER_WORD - 1) / UNITS_PER_WORD) | |
10602 | == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) | |
10603 | + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))) | |
230d793d | 10604 | { |
a7c99304 RK |
10605 | move_deaths (dest, from_cuid, to_insn, pnotes); |
10606 | return; | |
230d793d RS |
10607 | } |
10608 | ||
a7c99304 RK |
10609 | /* If this is some other SUBREG, we know it replaces the entire |
10610 | value, so use that as the destination. */ | |
10611 | if (GET_CODE (dest) == SUBREG) | |
10612 | dest = SUBREG_REG (dest); | |
10613 | ||
10614 | /* If this is a MEM, adjust deaths of anything used in the address. | |
10615 | For a REG (the only other possibility), the entire value is | |
10616 | being replaced so the old value is not used in this insn. */ | |
230d793d RS |
10617 | |
10618 | if (GET_CODE (dest) == MEM) | |
10619 | move_deaths (XEXP (dest, 0), from_cuid, to_insn, pnotes); | |
10620 | return; | |
10621 | } | |
10622 | ||
10623 | else if (GET_CODE (x) == CLOBBER) | |
10624 | return; | |
10625 | ||
10626 | len = GET_RTX_LENGTH (code); | |
10627 | fmt = GET_RTX_FORMAT (code); | |
10628 | ||
10629 | for (i = 0; i < len; i++) | |
10630 | { | |
10631 | if (fmt[i] == 'E') | |
10632 | { | |
10633 | register int j; | |
10634 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
10635 | move_deaths (XVECEXP (x, i, j), from_cuid, to_insn, pnotes); | |
10636 | } | |
10637 | else if (fmt[i] == 'e') | |
10638 | move_deaths (XEXP (x, i), from_cuid, to_insn, pnotes); | |
10639 | } | |
10640 | } | |
10641 | \f | |
a7c99304 RK |
10642 | /* Return 1 if X is the target of a bit-field assignment in BODY, the |
10643 | pattern of an insn. X must be a REG. */ | |
230d793d RS |
10644 | |
10645 | static int | |
a7c99304 RK |
10646 | reg_bitfield_target_p (x, body) |
10647 | rtx x; | |
230d793d RS |
10648 | rtx body; |
10649 | { | |
10650 | int i; | |
10651 | ||
10652 | if (GET_CODE (body) == SET) | |
a7c99304 RK |
10653 | { |
10654 | rtx dest = SET_DEST (body); | |
10655 | rtx target; | |
10656 | int regno, tregno, endregno, endtregno; | |
10657 | ||
10658 | if (GET_CODE (dest) == ZERO_EXTRACT) | |
10659 | target = XEXP (dest, 0); | |
10660 | else if (GET_CODE (dest) == STRICT_LOW_PART) | |
10661 | target = SUBREG_REG (XEXP (dest, 0)); | |
10662 | else | |
10663 | return 0; | |
10664 | ||
10665 | if (GET_CODE (target) == SUBREG) | |
10666 | target = SUBREG_REG (target); | |
10667 | ||
10668 | if (GET_CODE (target) != REG) | |
10669 | return 0; | |
10670 | ||
10671 | tregno = REGNO (target), regno = REGNO (x); | |
10672 | if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER) | |
10673 | return target == x; | |
10674 | ||
10675 | endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target)); | |
10676 | endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x)); | |
10677 | ||
10678 | return endregno > tregno && regno < endtregno; | |
10679 | } | |
230d793d RS |
10680 | |
10681 | else if (GET_CODE (body) == PARALLEL) | |
10682 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) | |
a7c99304 | 10683 | if (reg_bitfield_target_p (x, XVECEXP (body, 0, i))) |
230d793d RS |
10684 | return 1; |
10685 | ||
10686 | return 0; | |
10687 | } | |
10688 | \f | |
10689 | /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them | |
10690 | as appropriate. I3 and I2 are the insns resulting from the combination | |
10691 | insns including FROM (I2 may be zero). | |
10692 | ||
10693 | ELIM_I2 and ELIM_I1 are either zero or registers that we know will | |
10694 | not need REG_DEAD notes because they are being substituted for. This | |
10695 | saves searching in the most common cases. | |
10696 | ||
10697 | Each note in the list is either ignored or placed on some insns, depending | |
10698 | on the type of note. */ | |
10699 | ||
10700 | static void | |
10701 | distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1) | |
10702 | rtx notes; | |
10703 | rtx from_insn; | |
10704 | rtx i3, i2; | |
10705 | rtx elim_i2, elim_i1; | |
10706 | { | |
10707 | rtx note, next_note; | |
10708 | rtx tem; | |
10709 | ||
10710 | for (note = notes; note; note = next_note) | |
10711 | { | |
10712 | rtx place = 0, place2 = 0; | |
10713 | ||
10714 | /* If this NOTE references a pseudo register, ensure it references | |
10715 | the latest copy of that register. */ | |
10716 | if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG | |
10717 | && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER) | |
10718 | XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))]; | |
10719 | ||
10720 | next_note = XEXP (note, 1); | |
10721 | switch (REG_NOTE_KIND (note)) | |
10722 | { | |
10723 | case REG_UNUSED: | |
07d0cbdd | 10724 | /* Any clobbers for i3 may still exist, and so we must process |
176c9e6b JW |
10725 | REG_UNUSED notes from that insn. |
10726 | ||
10727 | Any clobbers from i2 or i1 can only exist if they were added by | |
10728 | recog_for_combine. In that case, recog_for_combine created the | |
10729 | necessary REG_UNUSED notes. Trying to keep any original | |
10730 | REG_UNUSED notes from these insns can cause incorrect output | |
10731 | if it is for the same register as the original i3 dest. | |
10732 | In that case, we will notice that the register is set in i3, | |
10733 | and then add a REG_UNUSED note for the destination of i3, which | |
07d0cbdd JW |
10734 | is wrong. However, it is possible to have REG_UNUSED notes from |
10735 | i2 or i1 for register which were both used and clobbered, so | |
10736 | we keep notes from i2 or i1 if they will turn into REG_DEAD | |
10737 | notes. */ | |
176c9e6b | 10738 | |
230d793d RS |
10739 | /* If this register is set or clobbered in I3, put the note there |
10740 | unless there is one already. */ | |
07d0cbdd | 10741 | if (reg_set_p (XEXP (note, 0), PATTERN (i3))) |
230d793d | 10742 | { |
07d0cbdd JW |
10743 | if (from_insn != i3) |
10744 | break; | |
10745 | ||
230d793d RS |
10746 | if (! (GET_CODE (XEXP (note, 0)) == REG |
10747 | ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0))) | |
10748 | : find_reg_note (i3, REG_UNUSED, XEXP (note, 0)))) | |
10749 | place = i3; | |
10750 | } | |
10751 | /* Otherwise, if this register is used by I3, then this register | |
10752 | now dies here, so we must put a REG_DEAD note here unless there | |
10753 | is one already. */ | |
10754 | else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)) | |
10755 | && ! (GET_CODE (XEXP (note, 0)) == REG | |
10756 | ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0))) | |
10757 | : find_reg_note (i3, REG_DEAD, XEXP (note, 0)))) | |
10758 | { | |
10759 | PUT_REG_NOTE_KIND (note, REG_DEAD); | |
10760 | place = i3; | |
10761 | } | |
10762 | break; | |
10763 | ||
10764 | case REG_EQUAL: | |
10765 | case REG_EQUIV: | |
10766 | case REG_NONNEG: | |
10767 | /* These notes say something about results of an insn. We can | |
10768 | only support them if they used to be on I3 in which case they | |
a687e897 RK |
10769 | remain on I3. Otherwise they are ignored. |
10770 | ||
10771 | If the note refers to an expression that is not a constant, we | |
10772 | must also ignore the note since we cannot tell whether the | |
10773 | equivalence is still true. It might be possible to do | |
10774 | slightly better than this (we only have a problem if I2DEST | |
10775 | or I1DEST is present in the expression), but it doesn't | |
10776 | seem worth the trouble. */ | |
10777 | ||
10778 | if (from_insn == i3 | |
10779 | && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0)))) | |
230d793d RS |
10780 | place = i3; |
10781 | break; | |
10782 | ||
10783 | case REG_INC: | |
10784 | case REG_NO_CONFLICT: | |
10785 | case REG_LABEL: | |
10786 | /* These notes say something about how a register is used. They must | |
10787 | be present on any use of the register in I2 or I3. */ | |
10788 | if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))) | |
10789 | place = i3; | |
10790 | ||
10791 | if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2))) | |
10792 | { | |
10793 | if (place) | |
10794 | place2 = i2; | |
10795 | else | |
10796 | place = i2; | |
10797 | } | |
10798 | break; | |
10799 | ||
10800 | case REG_WAS_0: | |
10801 | /* It is too much trouble to try to see if this note is still | |
10802 | correct in all situations. It is better to simply delete it. */ | |
10803 | break; | |
10804 | ||
10805 | case REG_RETVAL: | |
10806 | /* If the insn previously containing this note still exists, | |
10807 | put it back where it was. Otherwise move it to the previous | |
10808 | insn. Adjust the corresponding REG_LIBCALL note. */ | |
10809 | if (GET_CODE (from_insn) != NOTE) | |
10810 | place = from_insn; | |
10811 | else | |
10812 | { | |
5f4f0e22 | 10813 | tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX); |
230d793d RS |
10814 | place = prev_real_insn (from_insn); |
10815 | if (tem && place) | |
10816 | XEXP (tem, 0) = place; | |
10817 | } | |
10818 | break; | |
10819 | ||
10820 | case REG_LIBCALL: | |
10821 | /* This is handled similarly to REG_RETVAL. */ | |
10822 | if (GET_CODE (from_insn) != NOTE) | |
10823 | place = from_insn; | |
10824 | else | |
10825 | { | |
5f4f0e22 | 10826 | tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX); |
230d793d RS |
10827 | place = next_real_insn (from_insn); |
10828 | if (tem && place) | |
10829 | XEXP (tem, 0) = place; | |
10830 | } | |
10831 | break; | |
10832 | ||
10833 | case REG_DEAD: | |
10834 | /* If the register is used as an input in I3, it dies there. | |
10835 | Similarly for I2, if it is non-zero and adjacent to I3. | |
10836 | ||
10837 | If the register is not used as an input in either I3 or I2 | |
10838 | and it is not one of the registers we were supposed to eliminate, | |
10839 | there are two possibilities. We might have a non-adjacent I2 | |
10840 | or we might have somehow eliminated an additional register | |
10841 | from a computation. For example, we might have had A & B where | |
10842 | we discover that B will always be zero. In this case we will | |
10843 | eliminate the reference to A. | |
10844 | ||
10845 | In both cases, we must search to see if we can find a previous | |
10846 | use of A and put the death note there. */ | |
10847 | ||
6e2d1486 RK |
10848 | if (from_insn |
10849 | && GET_CODE (from_insn) == CALL_INSN | |
10850 | && find_reg_fusage (from_insn, USE, XEXP (note, 0))) | |
10851 | place = from_insn; | |
10852 | else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))) | |
230d793d RS |
10853 | place = i3; |
10854 | else if (i2 != 0 && next_nonnote_insn (i2) == i3 | |
10855 | && reg_referenced_p (XEXP (note, 0), PATTERN (i2))) | |
10856 | place = i2; | |
10857 | ||
10858 | if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1) | |
10859 | break; | |
10860 | ||
510dd77e RK |
10861 | /* If the register is used in both I2 and I3 and it dies in I3, |
10862 | we might have added another reference to it. If reg_n_refs | |
10863 | was 2, bump it to 3. This has to be correct since the | |
10864 | register must have been set somewhere. The reason this is | |
10865 | done is because local-alloc.c treats 2 references as a | |
10866 | special case. */ | |
10867 | ||
10868 | if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG | |
10869 | && reg_n_refs[REGNO (XEXP (note, 0))]== 2 | |
10870 | && reg_referenced_p (XEXP (note, 0), PATTERN (i2))) | |
10871 | reg_n_refs[REGNO (XEXP (note, 0))] = 3; | |
10872 | ||
230d793d | 10873 | if (place == 0) |
38d8473f RK |
10874 | { |
10875 | for (tem = prev_nonnote_insn (i3); | |
10876 | place == 0 && tem | |
10877 | && (GET_CODE (tem) == INSN || GET_CODE (tem) == CALL_INSN); | |
10878 | tem = prev_nonnote_insn (tem)) | |
10879 | { | |
10880 | /* If the register is being set at TEM, see if that is all | |
10881 | TEM is doing. If so, delete TEM. Otherwise, make this | |
10882 | into a REG_UNUSED note instead. */ | |
10883 | if (reg_set_p (XEXP (note, 0), PATTERN (tem))) | |
10884 | { | |
10885 | rtx set = single_set (tem); | |
10886 | ||
10887 | /* Verify that it was the set, and not a clobber that | |
10888 | modified the register. */ | |
10889 | ||
10890 | if (set != 0 && ! side_effects_p (SET_SRC (set)) | |
d02089a5 RK |
10891 | && (rtx_equal_p (XEXP (note, 0), SET_DEST (set)) |
10892 | || (GET_CODE (SET_DEST (set)) == SUBREG | |
10893 | && rtx_equal_p (XEXP (note, 0), | |
10894 | XEXP (SET_DEST (set), 0))))) | |
38d8473f RK |
10895 | { |
10896 | /* Move the notes and links of TEM elsewhere. | |
10897 | This might delete other dead insns recursively. | |
10898 | First set the pattern to something that won't use | |
10899 | any register. */ | |
10900 | ||
10901 | PATTERN (tem) = pc_rtx; | |
10902 | ||
10903 | distribute_notes (REG_NOTES (tem), tem, tem, | |
10904 | NULL_RTX, NULL_RTX, NULL_RTX); | |
10905 | distribute_links (LOG_LINKS (tem)); | |
10906 | ||
10907 | PUT_CODE (tem, NOTE); | |
10908 | NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED; | |
10909 | NOTE_SOURCE_FILE (tem) = 0; | |
10910 | } | |
10911 | else | |
10912 | { | |
10913 | PUT_REG_NOTE_KIND (note, REG_UNUSED); | |
10914 | ||
10915 | /* If there isn't already a REG_UNUSED note, put one | |
10916 | here. */ | |
10917 | if (! find_regno_note (tem, REG_UNUSED, | |
10918 | REGNO (XEXP (note, 0)))) | |
10919 | place = tem; | |
10920 | break; | |
230d793d RS |
10921 | } |
10922 | } | |
13018fad RE |
10923 | else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem)) |
10924 | || (GET_CODE (tem) == CALL_INSN | |
10925 | && find_reg_fusage (tem, USE, XEXP (note, 0)))) | |
230d793d RS |
10926 | { |
10927 | place = tem; | |
932d1119 RK |
10928 | |
10929 | /* If we are doing a 3->2 combination, and we have a | |
10930 | register which formerly died in i3 and was not used | |
10931 | by i2, which now no longer dies in i3 and is used in | |
10932 | i2 but does not die in i2, and place is between i2 | |
10933 | and i3, then we may need to move a link from place to | |
10934 | i2. */ | |
a8908849 RK |
10935 | if (i2 && INSN_UID (place) <= max_uid_cuid |
10936 | && INSN_CUID (place) > INSN_CUID (i2) | |
932d1119 RK |
10937 | && from_insn && INSN_CUID (from_insn) > INSN_CUID (i2) |
10938 | && reg_referenced_p (XEXP (note, 0), PATTERN (i2))) | |
10939 | { | |
10940 | rtx links = LOG_LINKS (place); | |
10941 | LOG_LINKS (place) = 0; | |
10942 | distribute_links (links); | |
10943 | } | |
230d793d RS |
10944 | break; |
10945 | } | |
38d8473f RK |
10946 | } |
10947 | ||
10948 | /* If we haven't found an insn for the death note and it | |
10949 | is still a REG_DEAD note, but we have hit a CODE_LABEL, | |
10950 | insert a USE insn for the register at that label and | |
10951 | put the death node there. This prevents problems with | |
10952 | call-state tracking in caller-save.c. */ | |
10953 | if (REG_NOTE_KIND (note) == REG_DEAD && place == 0 && tem != 0) | |
e2cce0cf RK |
10954 | { |
10955 | place | |
10956 | = emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (note, 0)), | |
10957 | tem); | |
10958 | ||
10959 | /* If this insn was emitted between blocks, then update | |
10960 | basic_block_head of the current block to include it. */ | |
10961 | if (basic_block_end[this_basic_block - 1] == tem) | |
10962 | basic_block_head[this_basic_block] = place; | |
10963 | } | |
38d8473f | 10964 | } |
230d793d RS |
10965 | |
10966 | /* If the register is set or already dead at PLACE, we needn't do | |
10967 | anything with this note if it is still a REG_DEAD note. | |
10968 | ||
10969 | Note that we cannot use just `dead_or_set_p' here since we can | |
10970 | convert an assignment to a register into a bit-field assignment. | |
10971 | Therefore, we must also omit the note if the register is the | |
10972 | target of a bitfield assignment. */ | |
10973 | ||
10974 | if (place && REG_NOTE_KIND (note) == REG_DEAD) | |
10975 | { | |
10976 | int regno = REGNO (XEXP (note, 0)); | |
10977 | ||
10978 | if (dead_or_set_p (place, XEXP (note, 0)) | |
10979 | || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place))) | |
10980 | { | |
10981 | /* Unless the register previously died in PLACE, clear | |
10982 | reg_last_death. [I no longer understand why this is | |
10983 | being done.] */ | |
10984 | if (reg_last_death[regno] != place) | |
10985 | reg_last_death[regno] = 0; | |
10986 | place = 0; | |
10987 | } | |
10988 | else | |
10989 | reg_last_death[regno] = place; | |
10990 | ||
10991 | /* If this is a death note for a hard reg that is occupying | |
10992 | multiple registers, ensure that we are still using all | |
10993 | parts of the object. If we find a piece of the object | |
10994 | that is unused, we must add a USE for that piece before | |
10995 | PLACE and put the appropriate REG_DEAD note on it. | |
10996 | ||
10997 | An alternative would be to put a REG_UNUSED for the pieces | |
10998 | on the insn that set the register, but that can't be done if | |
10999 | it is not in the same block. It is simpler, though less | |
11000 | efficient, to add the USE insns. */ | |
11001 | ||
11002 | if (place && regno < FIRST_PSEUDO_REGISTER | |
11003 | && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1) | |
11004 | { | |
11005 | int endregno | |
11006 | = regno + HARD_REGNO_NREGS (regno, | |
11007 | GET_MODE (XEXP (note, 0))); | |
11008 | int all_used = 1; | |
11009 | int i; | |
11010 | ||
11011 | for (i = regno; i < endregno; i++) | |
9fd5bb62 JW |
11012 | if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0) |
11013 | && ! find_regno_fusage (place, USE, i)) | |
230d793d | 11014 | { |
485eeec4 | 11015 | rtx piece = gen_rtx (REG, reg_raw_mode[i], i); |
28f6d3af RK |
11016 | rtx p; |
11017 | ||
11018 | /* See if we already placed a USE note for this | |
11019 | register in front of PLACE. */ | |
11020 | for (p = place; | |
11021 | GET_CODE (PREV_INSN (p)) == INSN | |
11022 | && GET_CODE (PATTERN (PREV_INSN (p))) == USE; | |
11023 | p = PREV_INSN (p)) | |
11024 | if (rtx_equal_p (piece, | |
11025 | XEXP (PATTERN (PREV_INSN (p)), 0))) | |
11026 | { | |
11027 | p = 0; | |
11028 | break; | |
11029 | } | |
11030 | ||
11031 | if (p) | |
11032 | { | |
11033 | rtx use_insn | |
11034 | = emit_insn_before (gen_rtx (USE, VOIDmode, | |
11035 | piece), | |
11036 | p); | |
11037 | REG_NOTES (use_insn) | |
11038 | = gen_rtx (EXPR_LIST, REG_DEAD, piece, | |
11039 | REG_NOTES (use_insn)); | |
11040 | } | |
230d793d | 11041 | |
5089e22e | 11042 | all_used = 0; |
230d793d RS |
11043 | } |
11044 | ||
a394b17b JW |
11045 | /* Check for the case where the register dying partially |
11046 | overlaps the register set by this insn. */ | |
11047 | if (all_used) | |
11048 | for (i = regno; i < endregno; i++) | |
11049 | if (dead_or_set_regno_p (place, i)) | |
11050 | { | |
11051 | all_used = 0; | |
11052 | break; | |
11053 | } | |
11054 | ||
230d793d RS |
11055 | if (! all_used) |
11056 | { | |
11057 | /* Put only REG_DEAD notes for pieces that are | |
11058 | still used and that are not already dead or set. */ | |
11059 | ||
11060 | for (i = regno; i < endregno; i++) | |
11061 | { | |
485eeec4 | 11062 | rtx piece = gen_rtx (REG, reg_raw_mode[i], i); |
230d793d | 11063 | |
17cbf358 JW |
11064 | if ((reg_referenced_p (piece, PATTERN (place)) |
11065 | || (GET_CODE (place) == CALL_INSN | |
11066 | && find_reg_fusage (place, USE, piece))) | |
230d793d RS |
11067 | && ! dead_or_set_p (place, piece) |
11068 | && ! reg_bitfield_target_p (piece, | |
11069 | PATTERN (place))) | |
11070 | REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD, | |
11071 | piece, | |
11072 | REG_NOTES (place)); | |
11073 | } | |
11074 | ||
11075 | place = 0; | |
11076 | } | |
11077 | } | |
11078 | } | |
11079 | break; | |
11080 | ||
11081 | default: | |
11082 | /* Any other notes should not be present at this point in the | |
11083 | compilation. */ | |
11084 | abort (); | |
11085 | } | |
11086 | ||
11087 | if (place) | |
11088 | { | |
11089 | XEXP (note, 1) = REG_NOTES (place); | |
11090 | REG_NOTES (place) = note; | |
11091 | } | |
1a26b032 RK |
11092 | else if ((REG_NOTE_KIND (note) == REG_DEAD |
11093 | || REG_NOTE_KIND (note) == REG_UNUSED) | |
11094 | && GET_CODE (XEXP (note, 0)) == REG) | |
11095 | reg_n_deaths[REGNO (XEXP (note, 0))]--; | |
230d793d RS |
11096 | |
11097 | if (place2) | |
1a26b032 RK |
11098 | { |
11099 | if ((REG_NOTE_KIND (note) == REG_DEAD | |
11100 | || REG_NOTE_KIND (note) == REG_UNUSED) | |
11101 | && GET_CODE (XEXP (note, 0)) == REG) | |
11102 | reg_n_deaths[REGNO (XEXP (note, 0))]++; | |
11103 | ||
11104 | REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note), | |
11105 | XEXP (note, 0), REG_NOTES (place2)); | |
11106 | } | |
230d793d RS |
11107 | } |
11108 | } | |
11109 | \f | |
11110 | /* Similarly to above, distribute the LOG_LINKS that used to be present on | |
5089e22e RS |
11111 | I3, I2, and I1 to new locations. This is also called in one case to |
11112 | add a link pointing at I3 when I3's destination is changed. */ | |
230d793d RS |
11113 | |
11114 | static void | |
11115 | distribute_links (links) | |
11116 | rtx links; | |
11117 | { | |
11118 | rtx link, next_link; | |
11119 | ||
11120 | for (link = links; link; link = next_link) | |
11121 | { | |
11122 | rtx place = 0; | |
11123 | rtx insn; | |
11124 | rtx set, reg; | |
11125 | ||
11126 | next_link = XEXP (link, 1); | |
11127 | ||
11128 | /* If the insn that this link points to is a NOTE or isn't a single | |
11129 | set, ignore it. In the latter case, it isn't clear what we | |
11130 | can do other than ignore the link, since we can't tell which | |
11131 | register it was for. Such links wouldn't be used by combine | |
11132 | anyway. | |
11133 | ||
11134 | It is not possible for the destination of the target of the link to | |
11135 | have been changed by combine. The only potential of this is if we | |
11136 | replace I3, I2, and I1 by I3 and I2. But in that case the | |
11137 | destination of I2 also remains unchanged. */ | |
11138 | ||
11139 | if (GET_CODE (XEXP (link, 0)) == NOTE | |
11140 | || (set = single_set (XEXP (link, 0))) == 0) | |
11141 | continue; | |
11142 | ||
11143 | reg = SET_DEST (set); | |
11144 | while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT | |
11145 | || GET_CODE (reg) == SIGN_EXTRACT | |
11146 | || GET_CODE (reg) == STRICT_LOW_PART) | |
11147 | reg = XEXP (reg, 0); | |
11148 | ||
11149 | /* A LOG_LINK is defined as being placed on the first insn that uses | |
11150 | a register and points to the insn that sets the register. Start | |
11151 | searching at the next insn after the target of the link and stop | |
11152 | when we reach a set of the register or the end of the basic block. | |
11153 | ||
11154 | Note that this correctly handles the link that used to point from | |
5089e22e | 11155 | I3 to I2. Also note that not much searching is typically done here |
230d793d RS |
11156 | since most links don't point very far away. */ |
11157 | ||
11158 | for (insn = NEXT_INSN (XEXP (link, 0)); | |
0d4d42c3 RK |
11159 | (insn && (this_basic_block == n_basic_blocks - 1 |
11160 | || basic_block_head[this_basic_block + 1] != insn)); | |
230d793d RS |
11161 | insn = NEXT_INSN (insn)) |
11162 | if (GET_RTX_CLASS (GET_CODE (insn)) == 'i' | |
11163 | && reg_overlap_mentioned_p (reg, PATTERN (insn))) | |
11164 | { | |
11165 | if (reg_referenced_p (reg, PATTERN (insn))) | |
11166 | place = insn; | |
11167 | break; | |
11168 | } | |
6e2d1486 RK |
11169 | else if (GET_CODE (insn) == CALL_INSN |
11170 | && find_reg_fusage (insn, USE, reg)) | |
11171 | { | |
11172 | place = insn; | |
11173 | break; | |
11174 | } | |
230d793d RS |
11175 | |
11176 | /* If we found a place to put the link, place it there unless there | |
11177 | is already a link to the same insn as LINK at that point. */ | |
11178 | ||
11179 | if (place) | |
11180 | { | |
11181 | rtx link2; | |
11182 | ||
11183 | for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1)) | |
11184 | if (XEXP (link2, 0) == XEXP (link, 0)) | |
11185 | break; | |
11186 | ||
11187 | if (link2 == 0) | |
11188 | { | |
11189 | XEXP (link, 1) = LOG_LINKS (place); | |
11190 | LOG_LINKS (place) = link; | |
abe6e52f RK |
11191 | |
11192 | /* Set added_links_insn to the earliest insn we added a | |
11193 | link to. */ | |
11194 | if (added_links_insn == 0 | |
11195 | || INSN_CUID (added_links_insn) > INSN_CUID (place)) | |
11196 | added_links_insn = place; | |
230d793d RS |
11197 | } |
11198 | } | |
11199 | } | |
11200 | } | |
11201 | \f | |
1427d6d2 RK |
11202 | /* Compute INSN_CUID for INSN, which is an insn made by combine. */ |
11203 | ||
11204 | static int | |
11205 | insn_cuid (insn) | |
11206 | rtx insn; | |
11207 | { | |
11208 | while (insn != 0 && INSN_UID (insn) > max_uid_cuid | |
11209 | && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE) | |
11210 | insn = NEXT_INSN (insn); | |
11211 | ||
11212 | if (INSN_UID (insn) > max_uid_cuid) | |
11213 | abort (); | |
11214 | ||
11215 | return INSN_CUID (insn); | |
11216 | } | |
11217 | \f | |
230d793d RS |
11218 | void |
11219 | dump_combine_stats (file) | |
11220 | FILE *file; | |
11221 | { | |
11222 | fprintf | |
11223 | (file, | |
11224 | ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n", | |
11225 | combine_attempts, combine_merges, combine_extras, combine_successes); | |
11226 | } | |
11227 | ||
11228 | void | |
11229 | dump_combine_total_stats (file) | |
11230 | FILE *file; | |
11231 | { | |
11232 | fprintf | |
11233 | (file, | |
11234 | "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n", | |
11235 | total_attempts, total_merges, total_extras, total_successes); | |
11236 | } |