]>
Commit | Line | Data |
---|---|---|
f4e584dc | 1 | /* Global common subexpression elimination/Partial redundancy elimination |
7506f491 | 2 | and global constant/copy propagation for GNU compiler. |
62e5bf5d RS |
3 | Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, |
4 | 2006, 2007 Free Software Foundation, Inc. | |
7506f491 | 5 | |
1322177d | 6 | This file is part of GCC. |
7506f491 | 7 | |
1322177d LB |
8 | GCC is free software; you can redistribute it and/or modify it under |
9 | the terms of the GNU General Public License as published by the Free | |
9dcd6f09 | 10 | Software Foundation; either version 3, or (at your option) any later |
1322177d | 11 | version. |
7506f491 | 12 | |
1322177d LB |
13 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
14 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 | for more details. | |
7506f491 DE |
17 | |
18 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
19 | along with GCC; see the file COPYING3. If not see |
20 | <http://www.gnu.org/licenses/>. */ | |
7506f491 DE |
21 | |
22 | /* TODO | |
23 | - reordering of memory allocation and freeing to be more space efficient | |
24 | - do rough calc of how many regs are needed in each block, and a rough | |
25 | calc of how many regs are available in each class and use that to | |
26 | throttle back the code in cases where RTX_COST is minimal. | |
f4e584dc JL |
27 | - a store to the same address as a load does not kill the load if the |
28 | source of the store is also the destination of the load. Handling this | |
29 | allows more load motion, particularly out of loops. | |
7506f491 DE |
30 | - ability to realloc sbitmap vectors would allow one initial computation |
31 | of reg_set_in_block with only subsequent additions, rather than | |
32 | recomputing it for each pass | |
33 | ||
7506f491 DE |
34 | */ |
35 | ||
36 | /* References searched while implementing this. | |
7506f491 DE |
37 | |
38 | Compilers Principles, Techniques and Tools | |
39 | Aho, Sethi, Ullman | |
40 | Addison-Wesley, 1988 | |
41 | ||
42 | Global Optimization by Suppression of Partial Redundancies | |
43 | E. Morel, C. Renvoise | |
44 | communications of the acm, Vol. 22, Num. 2, Feb. 1979 | |
45 | ||
46 | A Portable Machine-Independent Global Optimizer - Design and Measurements | |
47 | Frederick Chow | |
48 | Stanford Ph.D. thesis, Dec. 1983 | |
49 | ||
7506f491 DE |
50 | A Fast Algorithm for Code Movement Optimization |
51 | D.M. Dhamdhere | |
52 | SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988 | |
53 | ||
54 | A Solution to a Problem with Morel and Renvoise's | |
55 | Global Optimization by Suppression of Partial Redundancies | |
56 | K-H Drechsler, M.P. Stadel | |
57 | ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988 | |
58 | ||
59 | Practical Adaptation of the Global Optimization | |
60 | Algorithm of Morel and Renvoise | |
61 | D.M. Dhamdhere | |
62 | ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991 | |
63 | ||
64 | Efficiently Computing Static Single Assignment Form and the Control | |
65 | Dependence Graph | |
66 | R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck | |
67 | ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991 | |
68 | ||
7506f491 DE |
69 | Lazy Code Motion |
70 | J. Knoop, O. Ruthing, B. Steffen | |
71 | ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI | |
72 | ||
73 | What's In a Region? Or Computing Control Dependence Regions in Near-Linear | |
74 | Time for Reducible Flow Control | |
75 | Thomas Ball | |
76 | ACM Letters on Programming Languages and Systems, | |
77 | Vol. 2, Num. 1-4, Mar-Dec 1993 | |
78 | ||
79 | An Efficient Representation for Sparse Sets | |
80 | Preston Briggs, Linda Torczon | |
81 | ACM Letters on Programming Languages and Systems, | |
82 | Vol. 2, Num. 1-4, Mar-Dec 1993 | |
83 | ||
84 | A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion | |
85 | K-H Drechsler, M.P. Stadel | |
86 | ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993 | |
87 | ||
88 | Partial Dead Code Elimination | |
89 | J. Knoop, O. Ruthing, B. Steffen | |
90 | ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994 | |
91 | ||
92 | Effective Partial Redundancy Elimination | |
93 | P. Briggs, K.D. Cooper | |
94 | ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994 | |
95 | ||
96 | The Program Structure Tree: Computing Control Regions in Linear Time | |
97 | R. Johnson, D. Pearson, K. Pingali | |
98 | ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994 | |
99 | ||
100 | Optimal Code Motion: Theory and Practice | |
101 | J. Knoop, O. Ruthing, B. Steffen | |
102 | ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994 | |
103 | ||
104 | The power of assignment motion | |
105 | J. Knoop, O. Ruthing, B. Steffen | |
106 | ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI | |
107 | ||
108 | Global code motion / global value numbering | |
109 | C. Click | |
110 | ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI | |
111 | ||
112 | Value Driven Redundancy Elimination | |
113 | L.T. Simpson | |
114 | Rice University Ph.D. thesis, Apr. 1996 | |
115 | ||
116 | Value Numbering | |
117 | L.T. Simpson | |
118 | Massively Scalar Compiler Project, Rice University, Sep. 1996 | |
119 | ||
120 | High Performance Compilers for Parallel Computing | |
121 | Michael Wolfe | |
122 | Addison-Wesley, 1996 | |
123 | ||
f4e584dc JL |
124 | Advanced Compiler Design and Implementation |
125 | Steven Muchnick | |
126 | Morgan Kaufmann, 1997 | |
127 | ||
a42cd965 AM |
128 | Building an Optimizing Compiler |
129 | Robert Morgan | |
130 | Digital Press, 1998 | |
131 | ||
f4e584dc JL |
132 | People wishing to speed up the code here should read: |
133 | Elimination Algorithms for Data Flow Analysis | |
134 | B.G. Ryder, M.C. Paull | |
135 | ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986 | |
136 | ||
137 | How to Analyze Large Programs Efficiently and Informatively | |
138 | D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck | |
139 | ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI | |
140 | ||
7506f491 DE |
141 | People wishing to do something different can find various possibilities |
142 | in the above papers and elsewhere. | |
143 | */ | |
144 | ||
145 | #include "config.h" | |
50b2596f | 146 | #include "system.h" |
4977bab6 ZW |
147 | #include "coretypes.h" |
148 | #include "tm.h" | |
01198c2f | 149 | #include "toplev.h" |
7506f491 DE |
150 | |
151 | #include "rtl.h" | |
b0656d8b | 152 | #include "tree.h" |
6baf1cc8 | 153 | #include "tm_p.h" |
7506f491 DE |
154 | #include "regs.h" |
155 | #include "hard-reg-set.h" | |
156 | #include "flags.h" | |
157 | #include "real.h" | |
158 | #include "insn-config.h" | |
159 | #include "recog.h" | |
160 | #include "basic-block.h" | |
50b2596f | 161 | #include "output.h" |
49ad7cfa | 162 | #include "function.h" |
589005ff | 163 | #include "expr.h" |
e7d482b9 | 164 | #include "except.h" |
fb0c0a12 | 165 | #include "ggc.h" |
f1fa37ff | 166 | #include "params.h" |
ae860ff7 | 167 | #include "cselib.h" |
d128effb | 168 | #include "intl.h" |
7506f491 | 169 | #include "obstack.h" |
27fb79ad | 170 | #include "timevar.h" |
ef330312 | 171 | #include "tree-pass.h" |
9727e468 | 172 | #include "hashtab.h" |
6fb5fa3c DB |
173 | #include "df.h" |
174 | #include "dbgcnt.h" | |
4fa31c2a | 175 | |
7506f491 DE |
176 | /* Propagate flow information through back edges and thus enable PRE's |
177 | moving loop invariant calculations out of loops. | |
178 | ||
179 | Originally this tended to create worse overall code, but several | |
180 | improvements during the development of PRE seem to have made following | |
181 | back edges generally a win. | |
182 | ||
183 | Note much of the loop invariant code motion done here would normally | |
184 | be done by loop.c, which has more heuristics for when to move invariants | |
185 | out of loops. At some point we might need to move some of those | |
186 | heuristics into gcse.c. */ | |
7506f491 | 187 | |
f4e584dc JL |
188 | /* We support GCSE via Partial Redundancy Elimination. PRE optimizations |
189 | are a superset of those done by GCSE. | |
7506f491 | 190 | |
f4e584dc | 191 | We perform the following steps: |
7506f491 DE |
192 | |
193 | 1) Compute basic block information. | |
194 | ||
195 | 2) Compute table of places where registers are set. | |
196 | ||
197 | 3) Perform copy/constant propagation. | |
198 | ||
e83f4801 SB |
199 | 4) Perform global cse using lazy code motion if not optimizing |
200 | for size, or code hoisting if we are. | |
7506f491 | 201 | |
e78d9500 | 202 | 5) Perform another pass of copy/constant propagation. |
7506f491 DE |
203 | |
204 | Two passes of copy/constant propagation are done because the first one | |
205 | enables more GCSE and the second one helps to clean up the copies that | |
206 | GCSE creates. This is needed more for PRE than for Classic because Classic | |
207 | GCSE will try to use an existing register containing the common | |
208 | subexpression rather than create a new one. This is harder to do for PRE | |
209 | because of the code motion (which Classic GCSE doesn't do). | |
210 | ||
211 | Expressions we are interested in GCSE-ing are of the form | |
212 | (set (pseudo-reg) (expression)). | |
213 | Function want_to_gcse_p says what these are. | |
214 | ||
215 | PRE handles moving invariant expressions out of loops (by treating them as | |
f4e584dc | 216 | partially redundant). |
7506f491 DE |
217 | |
218 | Eventually it would be nice to replace cse.c/gcse.c with SSA (static single | |
219 | assignment) based GVN (global value numbering). L. T. Simpson's paper | |
220 | (Rice University) on value numbering is a useful reference for this. | |
221 | ||
222 | ********************** | |
223 | ||
224 | We used to support multiple passes but there are diminishing returns in | |
225 | doing so. The first pass usually makes 90% of the changes that are doable. | |
226 | A second pass can make a few more changes made possible by the first pass. | |
227 | Experiments show any further passes don't make enough changes to justify | |
228 | the expense. | |
229 | ||
230 | A study of spec92 using an unlimited number of passes: | |
231 | [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83, | |
232 | [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2, | |
233 | [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1 | |
234 | ||
235 | It was found doing copy propagation between each pass enables further | |
236 | substitutions. | |
237 | ||
238 | PRE is quite expensive in complicated functions because the DFA can take | |
7b1b4aed SB |
239 | a while to converge. Hence we only perform one pass. The parameter |
240 | max-gcse-passes can be modified if one wants to experiment. | |
7506f491 DE |
241 | |
242 | ********************** | |
243 | ||
244 | The steps for PRE are: | |
245 | ||
246 | 1) Build the hash table of expressions we wish to GCSE (expr_hash_table). | |
247 | ||
248 | 2) Perform the data flow analysis for PRE. | |
249 | ||
250 | 3) Delete the redundant instructions | |
251 | ||
252 | 4) Insert the required copies [if any] that make the partially | |
253 | redundant instructions fully redundant. | |
254 | ||
255 | 5) For other reaching expressions, insert an instruction to copy the value | |
256 | to a newly created pseudo that will reach the redundant instruction. | |
257 | ||
258 | The deletion is done first so that when we do insertions we | |
259 | know which pseudo reg to use. | |
260 | ||
261 | Various papers have argued that PRE DFA is expensive (O(n^2)) and others | |
262 | argue it is not. The number of iterations for the algorithm to converge | |
263 | is typically 2-4 so I don't view it as that expensive (relatively speaking). | |
264 | ||
f4e584dc | 265 | PRE GCSE depends heavily on the second CSE pass to clean up the copies |
7506f491 DE |
266 | we create. To make an expression reach the place where it's redundant, |
267 | the result of the expression is copied to a new register, and the redundant | |
268 | expression is deleted by replacing it with this new register. Classic GCSE | |
269 | doesn't have this problem as much as it computes the reaching defs of | |
a3c28ba2 KH |
270 | each register in each block and thus can try to use an existing |
271 | register. */ | |
7506f491 DE |
272 | \f |
273 | /* GCSE global vars. */ | |
274 | ||
f4e584dc JL |
275 | /* Note whether or not we should run jump optimization after gcse. We |
276 | want to do this for two cases. | |
277 | ||
278 | * If we changed any jumps via cprop. | |
279 | ||
280 | * If we added any labels via edge splitting. */ | |
f4e584dc JL |
281 | static int run_jump_opt_after_gcse; |
282 | ||
7506f491 DE |
283 | /* An obstack for our working variables. */ |
284 | static struct obstack gcse_obstack; | |
285 | ||
c4c81601 | 286 | struct reg_use {rtx reg_rtx; }; |
abd535b6 | 287 | |
7506f491 DE |
288 | /* Hash table of expressions. */ |
289 | ||
290 | struct expr | |
291 | { | |
292 | /* The expression (SET_SRC for expressions, PATTERN for assignments). */ | |
293 | rtx expr; | |
294 | /* Index in the available expression bitmaps. */ | |
295 | int bitmap_index; | |
296 | /* Next entry with the same hash. */ | |
297 | struct expr *next_same_hash; | |
298 | /* List of anticipatable occurrences in basic blocks in the function. | |
299 | An "anticipatable occurrence" is one that is the first occurrence in the | |
f4e584dc JL |
300 | basic block, the operands are not modified in the basic block prior |
301 | to the occurrence and the output is not used between the start of | |
302 | the block and the occurrence. */ | |
7506f491 DE |
303 | struct occr *antic_occr; |
304 | /* List of available occurrence in basic blocks in the function. | |
305 | An "available occurrence" is one that is the last occurrence in the | |
306 | basic block and the operands are not modified by following statements in | |
307 | the basic block [including this insn]. */ | |
308 | struct occr *avail_occr; | |
309 | /* Non-null if the computation is PRE redundant. | |
310 | The value is the newly created pseudo-reg to record a copy of the | |
311 | expression in all the places that reach the redundant copy. */ | |
312 | rtx reaching_reg; | |
313 | }; | |
314 | ||
315 | /* Occurrence of an expression. | |
316 | There is one per basic block. If a pattern appears more than once the | |
317 | last appearance is used [or first for anticipatable expressions]. */ | |
318 | ||
319 | struct occr | |
320 | { | |
321 | /* Next occurrence of this expression. */ | |
322 | struct occr *next; | |
323 | /* The insn that computes the expression. */ | |
324 | rtx insn; | |
cc2902df | 325 | /* Nonzero if this [anticipatable] occurrence has been deleted. */ |
7506f491 | 326 | char deleted_p; |
cc2902df | 327 | /* Nonzero if this [available] occurrence has been copied to |
7506f491 DE |
328 | reaching_reg. */ |
329 | /* ??? This is mutually exclusive with deleted_p, so they could share | |
330 | the same byte. */ | |
331 | char copied_p; | |
332 | }; | |
333 | ||
334 | /* Expression and copy propagation hash tables. | |
335 | Each hash table is an array of buckets. | |
336 | ??? It is known that if it were an array of entries, structure elements | |
337 | `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is | |
338 | not clear whether in the final analysis a sufficient amount of memory would | |
339 | be saved as the size of the available expression bitmaps would be larger | |
340 | [one could build a mapping table without holes afterwards though]. | |
c4c81601 | 341 | Someday I'll perform the computation and figure it out. */ |
7506f491 | 342 | |
02280659 ZD |
343 | struct hash_table |
344 | { | |
345 | /* The table itself. | |
346 | This is an array of `expr_hash_table_size' elements. */ | |
347 | struct expr **table; | |
348 | ||
349 | /* Size of the hash table, in elements. */ | |
350 | unsigned int size; | |
2e653e39 | 351 | |
02280659 ZD |
352 | /* Number of hash table elements. */ |
353 | unsigned int n_elems; | |
7506f491 | 354 | |
02280659 ZD |
355 | /* Whether the table is expression of copy propagation one. */ |
356 | int set_p; | |
357 | }; | |
c4c81601 | 358 | |
02280659 ZD |
359 | /* Expression hash table. */ |
360 | static struct hash_table expr_hash_table; | |
361 | ||
362 | /* Copy propagation hash table. */ | |
363 | static struct hash_table set_hash_table; | |
7506f491 DE |
364 | |
365 | /* Mapping of uids to cuids. | |
366 | Only real insns get cuids. */ | |
367 | static int *uid_cuid; | |
368 | ||
369 | /* Highest UID in UID_CUID. */ | |
370 | static int max_uid; | |
371 | ||
372 | /* Get the cuid of an insn. */ | |
b86db3eb | 373 | #ifdef ENABLE_CHECKING |
282899df NS |
374 | #define INSN_CUID(INSN) \ |
375 | (gcc_assert (INSN_UID (INSN) <= max_uid), uid_cuid[INSN_UID (INSN)]) | |
b86db3eb | 376 | #else |
7506f491 | 377 | #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)]) |
b86db3eb | 378 | #endif |
7506f491 DE |
379 | |
380 | /* Number of cuids. */ | |
381 | static int max_cuid; | |
382 | ||
383 | /* Mapping of cuids to insns. */ | |
384 | static rtx *cuid_insn; | |
385 | ||
386 | /* Get insn from cuid. */ | |
387 | #define CUID_INSN(CUID) (cuid_insn[CUID]) | |
388 | ||
389 | /* Maximum register number in function prior to doing gcse + 1. | |
390 | Registers created during this pass have regno >= max_gcse_regno. | |
391 | This is named with "gcse" to not collide with global of same name. */ | |
770ae6cc | 392 | static unsigned int max_gcse_regno; |
7506f491 | 393 | |
7506f491 | 394 | /* Table of registers that are modified. |
c4c81601 | 395 | |
7506f491 DE |
396 | For each register, each element is a list of places where the pseudo-reg |
397 | is set. | |
398 | ||
399 | For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only | |
400 | requires knowledge of which blocks kill which regs [and thus could use | |
f4e584dc | 401 | a bitmap instead of the lists `reg_set_table' uses]. |
7506f491 | 402 | |
c4c81601 RK |
403 | `reg_set_table' and could be turned into an array of bitmaps (num-bbs x |
404 | num-regs) [however perhaps it may be useful to keep the data as is]. One | |
405 | advantage of recording things this way is that `reg_set_table' is fairly | |
406 | sparse with respect to pseudo regs but for hard regs could be fairly dense | |
407 | [relatively speaking]. And recording sets of pseudo-regs in lists speeds | |
7506f491 DE |
408 | up functions like compute_transp since in the case of pseudo-regs we only |
409 | need to iterate over the number of times a pseudo-reg is set, not over the | |
410 | number of basic blocks [clearly there is a bit of a slow down in the cases | |
411 | where a pseudo is set more than once in a block, however it is believed | |
412 | that the net effect is to speed things up]. This isn't done for hard-regs | |
413 | because recording call-clobbered hard-regs in `reg_set_table' at each | |
c4c81601 RK |
414 | function call can consume a fair bit of memory, and iterating over |
415 | hard-regs stored this way in compute_transp will be more expensive. */ | |
7506f491 | 416 | |
c4c81601 RK |
417 | typedef struct reg_set |
418 | { | |
7506f491 DE |
419 | /* The next setting of this register. */ |
420 | struct reg_set *next; | |
ed425871 JL |
421 | /* The index of the block where it was set. */ |
422 | int bb_index; | |
7506f491 | 423 | } reg_set; |
c4c81601 | 424 | |
7506f491 | 425 | static reg_set **reg_set_table; |
c4c81601 | 426 | |
7506f491 DE |
427 | /* Size of `reg_set_table'. |
428 | The table starts out at max_gcse_regno + slop, and is enlarged as | |
429 | necessary. */ | |
430 | static int reg_set_table_size; | |
c4c81601 | 431 | |
7506f491 DE |
432 | /* Amount to grow `reg_set_table' by when it's full. */ |
433 | #define REG_SET_TABLE_SLOP 100 | |
434 | ||
a13d4ebf | 435 | /* This is a list of expressions which are MEMs and will be used by load |
589005ff | 436 | or store motion. |
a13d4ebf | 437 | Load motion tracks MEMs which aren't killed by |
454ff5cb | 438 | anything except itself. (i.e., loads and stores to a single location). |
589005ff | 439 | We can then allow movement of these MEM refs with a little special |
a13d4ebf AM |
440 | allowance. (all stores copy the same value to the reaching reg used |
441 | for the loads). This means all values used to store into memory must have | |
589005ff | 442 | no side effects so we can re-issue the setter value. |
a13d4ebf AM |
443 | Store Motion uses this structure as an expression table to track stores |
444 | which look interesting, and might be moveable towards the exit block. */ | |
445 | ||
446 | struct ls_expr | |
447 | { | |
448 | struct expr * expr; /* Gcse expression reference for LM. */ | |
449 | rtx pattern; /* Pattern of this mem. */ | |
47a3dae1 | 450 | rtx pattern_regs; /* List of registers mentioned by the mem. */ |
aaa4ca30 AJ |
451 | rtx loads; /* INSN list of loads seen. */ |
452 | rtx stores; /* INSN list of stores seen. */ | |
a13d4ebf AM |
453 | struct ls_expr * next; /* Next in the list. */ |
454 | int invalid; /* Invalid for some reason. */ | |
455 | int index; /* If it maps to a bitmap index. */ | |
b58b21d5 | 456 | unsigned int hash_index; /* Index when in a hash table. */ |
a13d4ebf AM |
457 | rtx reaching_reg; /* Register to use when re-writing. */ |
458 | }; | |
459 | ||
fbef91d8 RS |
460 | /* Array of implicit set patterns indexed by basic block index. */ |
461 | static rtx *implicit_sets; | |
462 | ||
a13d4ebf AM |
463 | /* Head of the list of load/store memory refs. */ |
464 | static struct ls_expr * pre_ldst_mems = NULL; | |
465 | ||
9727e468 RG |
466 | /* Hashtable for the load/store memory refs. */ |
467 | static htab_t pre_ldst_table = NULL; | |
468 | ||
7506f491 DE |
469 | /* Bitmap containing one bit for each register in the program. |
470 | Used when performing GCSE to track which registers have been set since | |
471 | the start of the basic block. */ | |
73991d6a | 472 | static regset reg_set_bitmap; |
7506f491 DE |
473 | |
474 | /* For each block, a bitmap of registers set in the block. | |
e83f4801 | 475 | This is used by compute_transp. |
7506f491 DE |
476 | It is computed during hash table computation and not by compute_sets |
477 | as it includes registers added since the last pass (or between cprop and | |
478 | gcse) and it's currently not easy to realloc sbitmap vectors. */ | |
479 | static sbitmap *reg_set_in_block; | |
480 | ||
a13d4ebf AM |
481 | /* Array, indexed by basic block number for a list of insns which modify |
482 | memory within that block. */ | |
483 | static rtx * modify_mem_list; | |
0516f6fe | 484 | static bitmap modify_mem_list_set; |
a13d4ebf AM |
485 | |
486 | /* This array parallels modify_mem_list, but is kept canonicalized. */ | |
487 | static rtx * canon_modify_mem_list; | |
0516f6fe | 488 | |
aa47fcfa JL |
489 | /* Bitmap indexed by block numbers to record which blocks contain |
490 | function calls. */ | |
491 | static bitmap blocks_with_calls; | |
492 | ||
7506f491 DE |
493 | /* Various variables for statistics gathering. */ |
494 | ||
495 | /* Memory used in a pass. | |
496 | This isn't intended to be absolutely precise. Its intent is only | |
497 | to keep an eye on memory usage. */ | |
498 | static int bytes_used; | |
c4c81601 | 499 | |
7506f491 DE |
500 | /* GCSE substitutions made. */ |
501 | static int gcse_subst_count; | |
502 | /* Number of copy instructions created. */ | |
503 | static int gcse_create_count; | |
27fb79ad SB |
504 | /* Number of local constants propagated. */ |
505 | static int local_const_prop_count; | |
0fa2e4df | 506 | /* Number of local copies propagated. */ |
27fb79ad SB |
507 | static int local_copy_prop_count; |
508 | /* Number of global constants propagated. */ | |
509 | static int global_const_prop_count; | |
0fa2e4df | 510 | /* Number of global copies propagated. */ |
27fb79ad | 511 | static int global_copy_prop_count; |
7506f491 | 512 | \f |
e83f4801 SB |
513 | /* For available exprs */ |
514 | static sbitmap *ae_kill, *ae_gen; | |
7506f491 | 515 | \f |
1d088dee | 516 | static void compute_can_copy (void); |
9fe15a12 KG |
517 | static void *gmalloc (size_t) ATTRIBUTE_MALLOC; |
518 | static void *gcalloc (size_t, size_t) ATTRIBUTE_MALLOC; | |
519 | static void *grealloc (void *, size_t); | |
703ad42b | 520 | static void *gcse_alloc (unsigned long); |
eb232f4e | 521 | static void alloc_gcse_mem (void); |
1d088dee AJ |
522 | static void free_gcse_mem (void); |
523 | static void alloc_reg_set_mem (int); | |
524 | static void free_reg_set_mem (void); | |
1d088dee | 525 | static void record_one_set (int, rtx); |
7bc980e1 | 526 | static void record_set_info (rtx, const_rtx, void *); |
eb232f4e | 527 | static void compute_sets (void); |
1d088dee AJ |
528 | static void hash_scan_insn (rtx, struct hash_table *, int); |
529 | static void hash_scan_set (rtx, rtx, struct hash_table *); | |
530 | static void hash_scan_clobber (rtx, rtx, struct hash_table *); | |
531 | static void hash_scan_call (rtx, rtx, struct hash_table *); | |
532 | static int want_to_gcse_p (rtx); | |
1707bafa | 533 | static bool can_assign_to_reg_p (rtx); |
ed7a4b4b KG |
534 | static bool gcse_constant_p (const_rtx); |
535 | static int oprs_unchanged_p (const_rtx, const_rtx, int); | |
536 | static int oprs_anticipatable_p (const_rtx, const_rtx); | |
537 | static int oprs_available_p (const_rtx, const_rtx); | |
1d088dee AJ |
538 | static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int, |
539 | struct hash_table *); | |
540 | static void insert_set_in_table (rtx, rtx, struct hash_table *); | |
ed7a4b4b | 541 | static unsigned int hash_expr (const_rtx, enum machine_mode, int *, int); |
1d088dee | 542 | static unsigned int hash_set (int, int); |
ed7a4b4b | 543 | static int expr_equiv_p (const_rtx, const_rtx); |
1d088dee AJ |
544 | static void record_last_reg_set_info (rtx, int); |
545 | static void record_last_mem_set_info (rtx); | |
7bc980e1 | 546 | static void record_last_set_info (rtx, const_rtx, void *); |
1d088dee AJ |
547 | static void compute_hash_table (struct hash_table *); |
548 | static void alloc_hash_table (int, struct hash_table *, int); | |
549 | static void free_hash_table (struct hash_table *); | |
550 | static void compute_hash_table_work (struct hash_table *); | |
551 | static void dump_hash_table (FILE *, const char *, struct hash_table *); | |
1d088dee AJ |
552 | static struct expr *lookup_set (unsigned int, struct hash_table *); |
553 | static struct expr *next_set (unsigned int, struct expr *); | |
554 | static void reset_opr_set_tables (void); | |
ed7a4b4b | 555 | static int oprs_not_set_p (const_rtx, const_rtx); |
1d088dee AJ |
556 | static void mark_call (rtx); |
557 | static void mark_set (rtx, rtx); | |
558 | static void mark_clobber (rtx, rtx); | |
559 | static void mark_oprs_set (rtx); | |
560 | static void alloc_cprop_mem (int, int); | |
561 | static void free_cprop_mem (void); | |
ed7a4b4b | 562 | static void compute_transp (const_rtx, int, sbitmap *, int); |
1d088dee AJ |
563 | static void compute_transpout (void); |
564 | static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *, | |
565 | struct hash_table *); | |
566 | static void compute_cprop_data (void); | |
567 | static void find_used_regs (rtx *, void *); | |
568 | static int try_replace_reg (rtx, rtx, rtx); | |
569 | static struct expr *find_avail_set (int, rtx); | |
570 | static int cprop_jump (basic_block, rtx, rtx, rtx, rtx); | |
7bc980e1 | 571 | static void mems_conflict_for_gcse_p (rtx, const_rtx, void *); |
ed7a4b4b | 572 | static int load_killed_in_block_p (const_basic_block, int, const_rtx, int); |
7bc980e1 | 573 | static void canon_list_insert (rtx, const_rtx, void *); |
1d088dee AJ |
574 | static int cprop_insn (rtx, int); |
575 | static int cprop (int); | |
576 | static void find_implicit_sets (void); | |
eb232f4e SB |
577 | static int one_cprop_pass (int, bool, bool); |
578 | static bool constprop_register (rtx, rtx, rtx, bool); | |
1d088dee | 579 | static struct expr *find_bypass_set (int, int); |
ed7a4b4b | 580 | static bool reg_killed_on_edge (const_rtx, const_edge); |
1d088dee AJ |
581 | static int bypass_block (basic_block, rtx, rtx); |
582 | static int bypass_conditional_jumps (void); | |
583 | static void alloc_pre_mem (int, int); | |
584 | static void free_pre_mem (void); | |
585 | static void compute_pre_data (void); | |
586 | static int pre_expr_reaches_here_p (basic_block, struct expr *, | |
587 | basic_block); | |
6fb5fa3c | 588 | static void insert_insn_end_basic_block (struct expr *, basic_block, int); |
1d088dee AJ |
589 | static void pre_insert_copy_insn (struct expr *, rtx); |
590 | static void pre_insert_copies (void); | |
591 | static int pre_delete (void); | |
592 | static int pre_gcse (void); | |
593 | static int one_pre_gcse_pass (int); | |
594 | static void add_label_notes (rtx, rtx); | |
595 | static void alloc_code_hoist_mem (int, int); | |
596 | static void free_code_hoist_mem (void); | |
597 | static void compute_code_hoist_vbeinout (void); | |
598 | static void compute_code_hoist_data (void); | |
599 | static int hoist_expr_reaches_here_p (basic_block, int, basic_block, char *); | |
600 | static void hoist_code (void); | |
601 | static int one_code_hoisting_pass (void); | |
1d088dee AJ |
602 | static rtx process_insert_insn (struct expr *); |
603 | static int pre_edge_insert (struct edge_list *, struct expr **); | |
1d088dee AJ |
604 | static int pre_expr_reaches_here_p_work (basic_block, struct expr *, |
605 | basic_block, char *); | |
606 | static struct ls_expr * ldst_entry (rtx); | |
607 | static void free_ldst_entry (struct ls_expr *); | |
608 | static void free_ldst_mems (void); | |
609 | static void print_ldst_list (FILE *); | |
610 | static struct ls_expr * find_rtx_in_ldst (rtx); | |
611 | static int enumerate_ldsts (void); | |
612 | static inline struct ls_expr * first_ls_expr (void); | |
613 | static inline struct ls_expr * next_ls_expr (struct ls_expr *); | |
ed7a4b4b | 614 | static int simple_mem (const_rtx); |
1d088dee AJ |
615 | static void invalidate_any_buried_refs (rtx); |
616 | static void compute_ld_motion_mems (void); | |
617 | static void trim_ld_motion_mems (void); | |
618 | static void update_ld_motion_stores (struct expr *); | |
7bc980e1 KG |
619 | static void reg_set_info (rtx, const_rtx, void *); |
620 | static void reg_clear_last_set (rtx, const_rtx, void *); | |
ed7a4b4b | 621 | static bool store_ops_ok (const_rtx, int *); |
1d088dee AJ |
622 | static rtx extract_mentioned_regs (rtx); |
623 | static rtx extract_mentioned_regs_helper (rtx, rtx); | |
624 | static void find_moveable_store (rtx, int *, int *); | |
625 | static int compute_store_table (void); | |
ed7a4b4b KG |
626 | static bool load_kills_store (const_rtx, const_rtx, int); |
627 | static bool find_loads (const_rtx, const_rtx, int); | |
628 | static bool store_killed_in_insn (const_rtx, const_rtx, const_rtx, int); | |
629 | static bool store_killed_after (const_rtx, const_rtx, const_rtx, const_basic_block, int *, rtx *); | |
630 | static bool store_killed_before (const_rtx, const_rtx, const_rtx, const_basic_block, int *); | |
1d088dee | 631 | static void build_store_vectors (void); |
6fb5fa3c | 632 | static void insert_insn_start_basic_block (rtx, basic_block); |
1d088dee | 633 | static int insert_store (struct ls_expr *, edge); |
d088acea ZD |
634 | static void remove_reachable_equiv_notes (basic_block, struct ls_expr *); |
635 | static void replace_store_insn (rtx, rtx, basic_block, struct ls_expr *); | |
1d088dee AJ |
636 | static void delete_store (struct ls_expr *, basic_block); |
637 | static void free_store_memory (void); | |
638 | static void store_motion (void); | |
639 | static void free_insn_expr_list_list (rtx *); | |
640 | static void clear_modify_mem_tables (void); | |
641 | static void free_modify_mem_tables (void); | |
642 | static rtx gcse_emit_move_after (rtx, rtx, rtx); | |
643 | static void local_cprop_find_used_regs (rtx *, void *); | |
eb232f4e | 644 | static bool do_local_cprop (rtx, rtx, bool, rtx*); |
1d088dee | 645 | static bool adjust_libcall_notes (rtx, rtx, rtx, rtx*); |
eb232f4e | 646 | static void local_cprop_pass (bool); |
d128effb | 647 | static bool is_too_expensive (const char *); |
7506f491 | 648 | \f |
d128effb | 649 | |
7506f491 | 650 | /* Entry point for global common subexpression elimination. |
b732f36f KH |
651 | F is the first instruction in the function. Return nonzero if a |
652 | change is mode. */ | |
7506f491 | 653 | |
65727068 | 654 | static int |
10d22567 | 655 | gcse_main (rtx f ATTRIBUTE_UNUSED) |
7506f491 DE |
656 | { |
657 | int changed, pass; | |
658 | /* Bytes used at start of pass. */ | |
659 | int initial_bytes_used; | |
660 | /* Maximum number of bytes used by a pass. */ | |
661 | int max_pass_bytes; | |
662 | /* Point to release obstack data from for each pass. */ | |
663 | char *gcse_obstack_bottom; | |
664 | ||
b5ce41ff JL |
665 | /* We do not construct an accurate cfg in functions which call |
666 | setjmp, so just punt to be safe. */ | |
7506f491 | 667 | if (current_function_calls_setjmp) |
e78d9500 | 668 | return 0; |
589005ff | 669 | |
b5ce41ff JL |
670 | /* Assume that we do not need to run jump optimizations after gcse. */ |
671 | run_jump_opt_after_gcse = 0; | |
672 | ||
b5ce41ff JL |
673 | /* Identify the basic block information for this function, including |
674 | successors and predecessors. */ | |
7506f491 | 675 | max_gcse_regno = max_reg_num (); |
7506f491 | 676 | |
6fb5fa3c DB |
677 | df_note_add_problem (); |
678 | df_analyze (); | |
679 | ||
10d22567 | 680 | if (dump_file) |
5b4fdb20 | 681 | dump_flow_info (dump_file, dump_flags); |
a42cd965 | 682 | |
d128effb | 683 | /* Return if there's nothing to do, or it is too expensive. */ |
ab9a1ff8 SB |
684 | if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1 |
685 | || is_too_expensive (_("GCSE disabled"))) | |
a18820c6 | 686 | return 0; |
7b1b4aed | 687 | |
7506f491 | 688 | gcc_obstack_init (&gcse_obstack); |
a42cd965 | 689 | bytes_used = 0; |
7506f491 | 690 | |
a13d4ebf AM |
691 | /* We need alias. */ |
692 | init_alias_analysis (); | |
c4c81601 RK |
693 | /* Record where pseudo-registers are set. This data is kept accurate |
694 | during each pass. ??? We could also record hard-reg information here | |
695 | [since it's unchanging], however it is currently done during hash table | |
696 | computation. | |
b5ce41ff | 697 | |
c4c81601 RK |
698 | It may be tempting to compute MEM set information here too, but MEM sets |
699 | will be subject to code motion one day and thus we need to compute | |
b5ce41ff | 700 | information about memory sets when we build the hash tables. */ |
7506f491 DE |
701 | |
702 | alloc_reg_set_mem (max_gcse_regno); | |
eb232f4e | 703 | compute_sets (); |
7506f491 DE |
704 | |
705 | pass = 0; | |
706 | initial_bytes_used = bytes_used; | |
707 | max_pass_bytes = 0; | |
708 | gcse_obstack_bottom = gcse_alloc (1); | |
709 | changed = 1; | |
740f35a0 | 710 | while (changed && pass < MAX_GCSE_PASSES) |
7506f491 DE |
711 | { |
712 | changed = 0; | |
10d22567 ZD |
713 | if (dump_file) |
714 | fprintf (dump_file, "GCSE pass %d\n\n", pass + 1); | |
7506f491 DE |
715 | |
716 | /* Initialize bytes_used to the space for the pred/succ lists, | |
717 | and the reg_set_table data. */ | |
718 | bytes_used = initial_bytes_used; | |
719 | ||
720 | /* Each pass may create new registers, so recalculate each time. */ | |
721 | max_gcse_regno = max_reg_num (); | |
722 | ||
eb232f4e | 723 | alloc_gcse_mem (); |
7506f491 | 724 | |
b5ce41ff JL |
725 | /* Don't allow constant propagation to modify jumps |
726 | during this pass. */ | |
27fb79ad | 727 | timevar_push (TV_CPROP1); |
eb232f4e | 728 | changed = one_cprop_pass (pass + 1, false, false); |
27fb79ad | 729 | timevar_pop (TV_CPROP1); |
7506f491 DE |
730 | |
731 | if (optimize_size) | |
e83f4801 | 732 | /* Do nothing. */ ; |
7506f491 | 733 | else |
589005ff | 734 | { |
27fb79ad | 735 | timevar_push (TV_PRE); |
a42cd965 | 736 | changed |= one_pre_gcse_pass (pass + 1); |
a13d4ebf AM |
737 | /* We may have just created new basic blocks. Release and |
738 | recompute various things which are sized on the number of | |
739 | basic blocks. */ | |
740 | if (changed) | |
741 | { | |
73991d6a | 742 | free_modify_mem_tables (); |
9fe15a12 KG |
743 | modify_mem_list = gcalloc (last_basic_block, sizeof (rtx)); |
744 | canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx)); | |
a13d4ebf | 745 | } |
a42cd965 AM |
746 | free_reg_set_mem (); |
747 | alloc_reg_set_mem (max_reg_num ()); | |
eb232f4e | 748 | compute_sets (); |
a42cd965 | 749 | run_jump_opt_after_gcse = 1; |
27fb79ad | 750 | timevar_pop (TV_PRE); |
a42cd965 | 751 | } |
7506f491 DE |
752 | |
753 | if (max_pass_bytes < bytes_used) | |
754 | max_pass_bytes = bytes_used; | |
755 | ||
bb457bd9 JL |
756 | /* Free up memory, then reallocate for code hoisting. We can |
757 | not re-use the existing allocated memory because the tables | |
758 | will not have info for the insns or registers created by | |
759 | partial redundancy elimination. */ | |
7506f491 DE |
760 | free_gcse_mem (); |
761 | ||
5d3cc252 | 762 | /* It does not make sense to run code hoisting unless we are optimizing |
bb457bd9 JL |
763 | for code size -- it rarely makes programs faster, and can make |
764 | them bigger if we did partial redundancy elimination (when optimizing | |
e83f4801 | 765 | for space, we don't run the partial redundancy algorithms). */ |
bb457bd9 | 766 | if (optimize_size) |
589005ff | 767 | { |
27fb79ad | 768 | timevar_push (TV_HOIST); |
bb457bd9 | 769 | max_gcse_regno = max_reg_num (); |
eb232f4e | 770 | alloc_gcse_mem (); |
bb457bd9 JL |
771 | changed |= one_code_hoisting_pass (); |
772 | free_gcse_mem (); | |
773 | ||
774 | if (max_pass_bytes < bytes_used) | |
775 | max_pass_bytes = bytes_used; | |
27fb79ad | 776 | timevar_pop (TV_HOIST); |
589005ff | 777 | } |
bb457bd9 | 778 | |
10d22567 | 779 | if (dump_file) |
7506f491 | 780 | { |
10d22567 ZD |
781 | fprintf (dump_file, "\n"); |
782 | fflush (dump_file); | |
7506f491 | 783 | } |
c4c81601 | 784 | |
7506f491 DE |
785 | obstack_free (&gcse_obstack, gcse_obstack_bottom); |
786 | pass++; | |
787 | } | |
788 | ||
b5ce41ff JL |
789 | /* Do one last pass of copy propagation, including cprop into |
790 | conditional jumps. */ | |
791 | ||
792 | max_gcse_regno = max_reg_num (); | |
eb232f4e | 793 | alloc_gcse_mem (); |
b5ce41ff | 794 | /* This time, go ahead and allow cprop to alter jumps. */ |
27fb79ad | 795 | timevar_push (TV_CPROP2); |
bae8b6b2 | 796 | one_cprop_pass (pass + 1, true, true); |
27fb79ad | 797 | timevar_pop (TV_CPROP2); |
b5ce41ff | 798 | free_gcse_mem (); |
7506f491 | 799 | |
10d22567 | 800 | if (dump_file) |
7506f491 | 801 | { |
10d22567 | 802 | fprintf (dump_file, "GCSE of %s: %d basic blocks, ", |
faed5cc3 | 803 | current_function_name (), n_basic_blocks); |
10d22567 | 804 | fprintf (dump_file, "%d pass%s, %d bytes\n\n", |
7506f491 DE |
805 | pass, pass > 1 ? "es" : "", max_pass_bytes); |
806 | } | |
807 | ||
6496a589 | 808 | obstack_free (&gcse_obstack, NULL); |
7506f491 | 809 | free_reg_set_mem (); |
7b1b4aed | 810 | |
a13d4ebf AM |
811 | /* We are finished with alias. */ |
812 | end_alias_analysis (); | |
a13d4ebf | 813 | |
47a3dae1 | 814 | if (!optimize_size && flag_gcse_sm) |
27fb79ad SB |
815 | { |
816 | timevar_push (TV_LSM); | |
817 | store_motion (); | |
818 | timevar_pop (TV_LSM); | |
819 | } | |
47a3dae1 | 820 | |
a13d4ebf | 821 | /* Record where pseudo-registers are set. */ |
e78d9500 | 822 | return run_jump_opt_after_gcse; |
7506f491 DE |
823 | } |
824 | \f | |
825 | /* Misc. utilities. */ | |
826 | ||
773eae39 EB |
827 | /* Nonzero for each mode that supports (set (reg) (reg)). |
828 | This is trivially true for integer and floating point values. | |
829 | It may or may not be true for condition codes. */ | |
830 | static char can_copy[(int) NUM_MACHINE_MODES]; | |
831 | ||
7506f491 DE |
832 | /* Compute which modes support reg/reg copy operations. */ |
833 | ||
834 | static void | |
1d088dee | 835 | compute_can_copy (void) |
7506f491 DE |
836 | { |
837 | int i; | |
50b2596f | 838 | #ifndef AVOID_CCMODE_COPIES |
8e42ace1 | 839 | rtx reg, insn; |
50b2596f | 840 | #endif |
773eae39 | 841 | memset (can_copy, 0, NUM_MACHINE_MODES); |
7506f491 DE |
842 | |
843 | start_sequence (); | |
844 | for (i = 0; i < NUM_MACHINE_MODES; i++) | |
c4c81601 RK |
845 | if (GET_MODE_CLASS (i) == MODE_CC) |
846 | { | |
7506f491 | 847 | #ifdef AVOID_CCMODE_COPIES |
773eae39 | 848 | can_copy[i] = 0; |
7506f491 | 849 | #else |
c4c81601 RK |
850 | reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1); |
851 | insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg)); | |
9714cf43 | 852 | if (recog (PATTERN (insn), insn, NULL) >= 0) |
773eae39 | 853 | can_copy[i] = 1; |
7506f491 | 854 | #endif |
c4c81601 | 855 | } |
141b5810 | 856 | else |
773eae39 | 857 | can_copy[i] = 1; |
c4c81601 | 858 | |
7506f491 | 859 | end_sequence (); |
7506f491 | 860 | } |
773eae39 EB |
861 | |
862 | /* Returns whether the mode supports reg/reg copy operations. */ | |
863 | ||
864 | bool | |
1d088dee | 865 | can_copy_p (enum machine_mode mode) |
773eae39 EB |
866 | { |
867 | static bool can_copy_init_p = false; | |
868 | ||
869 | if (! can_copy_init_p) | |
870 | { | |
871 | compute_can_copy (); | |
872 | can_copy_init_p = true; | |
873 | } | |
874 | ||
875 | return can_copy[mode] != 0; | |
876 | } | |
7506f491 DE |
877 | \f |
878 | /* Cover function to xmalloc to record bytes allocated. */ | |
879 | ||
703ad42b | 880 | static void * |
4ac11022 | 881 | gmalloc (size_t size) |
7506f491 DE |
882 | { |
883 | bytes_used += size; | |
884 | return xmalloc (size); | |
885 | } | |
886 | ||
9fe15a12 KG |
887 | /* Cover function to xcalloc to record bytes allocated. */ |
888 | ||
889 | static void * | |
890 | gcalloc (size_t nelem, size_t elsize) | |
891 | { | |
892 | bytes_used += nelem * elsize; | |
893 | return xcalloc (nelem, elsize); | |
894 | } | |
895 | ||
7506f491 DE |
896 | /* Cover function to xrealloc. |
897 | We don't record the additional size since we don't know it. | |
898 | It won't affect memory usage stats much anyway. */ | |
899 | ||
703ad42b | 900 | static void * |
9fe15a12 | 901 | grealloc (void *ptr, size_t size) |
7506f491 DE |
902 | { |
903 | return xrealloc (ptr, size); | |
904 | } | |
905 | ||
77bbd421 | 906 | /* Cover function to obstack_alloc. */ |
7506f491 | 907 | |
703ad42b | 908 | static void * |
1d088dee | 909 | gcse_alloc (unsigned long size) |
7506f491 | 910 | { |
77bbd421 | 911 | bytes_used += size; |
703ad42b | 912 | return obstack_alloc (&gcse_obstack, size); |
7506f491 DE |
913 | } |
914 | ||
915 | /* Allocate memory for the cuid mapping array, | |
916 | and reg/memory set tracking tables. | |
917 | ||
918 | This is called at the start of each pass. */ | |
919 | ||
920 | static void | |
eb232f4e | 921 | alloc_gcse_mem (void) |
7506f491 | 922 | { |
9fe15a12 | 923 | int i; |
eb232f4e | 924 | basic_block bb; |
7506f491 DE |
925 | rtx insn; |
926 | ||
927 | /* Find the largest UID and create a mapping from UIDs to CUIDs. | |
928 | CUIDs are like UIDs except they increase monotonically, have no gaps, | |
eb232f4e SB |
929 | and only apply to real insns. |
930 | (Actually, there are gaps, for insn that are not inside a basic block. | |
931 | but we should never see those anyway, so this is OK.) */ | |
7506f491 DE |
932 | |
933 | max_uid = get_max_uid (); | |
9fe15a12 | 934 | uid_cuid = gcalloc (max_uid + 1, sizeof (int)); |
eb232f4e SB |
935 | i = 0; |
936 | FOR_EACH_BB (bb) | |
937 | FOR_BB_INSNS (bb, insn) | |
938 | { | |
939 | if (INSN_P (insn)) | |
940 | uid_cuid[INSN_UID (insn)] = i++; | |
941 | else | |
942 | uid_cuid[INSN_UID (insn)] = i; | |
943 | } | |
7506f491 DE |
944 | |
945 | /* Create a table mapping cuids to insns. */ | |
946 | ||
947 | max_cuid = i; | |
9fe15a12 | 948 | cuid_insn = gcalloc (max_cuid + 1, sizeof (rtx)); |
eb232f4e SB |
949 | i = 0; |
950 | FOR_EACH_BB (bb) | |
951 | FOR_BB_INSNS (bb, insn) | |
952 | if (INSN_P (insn)) | |
953 | CUID_INSN (i++) = insn; | |
7506f491 DE |
954 | |
955 | /* Allocate vars to track sets of regs. */ | |
8bdbfff5 | 956 | reg_set_bitmap = BITMAP_ALLOC (NULL); |
7506f491 DE |
957 | |
958 | /* Allocate vars to track sets of regs, memory per block. */ | |
703ad42b | 959 | reg_set_in_block = sbitmap_vector_alloc (last_basic_block, max_gcse_regno); |
a13d4ebf AM |
960 | /* Allocate array to keep a list of insns which modify memory in each |
961 | basic block. */ | |
9fe15a12 KG |
962 | modify_mem_list = gcalloc (last_basic_block, sizeof (rtx)); |
963 | canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx)); | |
8bdbfff5 NS |
964 | modify_mem_list_set = BITMAP_ALLOC (NULL); |
965 | blocks_with_calls = BITMAP_ALLOC (NULL); | |
7506f491 DE |
966 | } |
967 | ||
968 | /* Free memory allocated by alloc_gcse_mem. */ | |
969 | ||
970 | static void | |
1d088dee | 971 | free_gcse_mem (void) |
7506f491 DE |
972 | { |
973 | free (uid_cuid); | |
974 | free (cuid_insn); | |
975 | ||
8bdbfff5 | 976 | BITMAP_FREE (reg_set_bitmap); |
7506f491 | 977 | |
5a660bff | 978 | sbitmap_vector_free (reg_set_in_block); |
73991d6a | 979 | free_modify_mem_tables (); |
8bdbfff5 NS |
980 | BITMAP_FREE (modify_mem_list_set); |
981 | BITMAP_FREE (blocks_with_calls); | |
7506f491 | 982 | } |
b5ce41ff JL |
983 | \f |
984 | /* Compute the local properties of each recorded expression. | |
c4c81601 RK |
985 | |
986 | Local properties are those that are defined by the block, irrespective of | |
987 | other blocks. | |
b5ce41ff JL |
988 | |
989 | An expression is transparent in a block if its operands are not modified | |
990 | in the block. | |
991 | ||
992 | An expression is computed (locally available) in a block if it is computed | |
993 | at least once and expression would contain the same value if the | |
994 | computation was moved to the end of the block. | |
995 | ||
996 | An expression is locally anticipatable in a block if it is computed at | |
997 | least once and expression would contain the same value if the computation | |
998 | was moved to the beginning of the block. | |
999 | ||
c4c81601 RK |
1000 | We call this routine for cprop, pre and code hoisting. They all compute |
1001 | basically the same information and thus can easily share this code. | |
7506f491 | 1002 | |
c4c81601 RK |
1003 | TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local |
1004 | properties. If NULL, then it is not necessary to compute or record that | |
1005 | particular property. | |
b5ce41ff | 1006 | |
02280659 ZD |
1007 | TABLE controls which hash table to look at. If it is set hash table, |
1008 | additionally, TRANSP is computed as ~TRANSP, since this is really cprop's | |
c4c81601 | 1009 | ABSALTERED. */ |
589005ff | 1010 | |
b5ce41ff | 1011 | static void |
7b1b4aed SB |
1012 | compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc, |
1013 | struct hash_table *table) | |
b5ce41ff | 1014 | { |
02280659 | 1015 | unsigned int i; |
589005ff | 1016 | |
b5ce41ff JL |
1017 | /* Initialize any bitmaps that were passed in. */ |
1018 | if (transp) | |
695ab36a | 1019 | { |
02280659 | 1020 | if (table->set_p) |
d55bc081 | 1021 | sbitmap_vector_zero (transp, last_basic_block); |
695ab36a | 1022 | else |
d55bc081 | 1023 | sbitmap_vector_ones (transp, last_basic_block); |
695ab36a | 1024 | } |
c4c81601 | 1025 | |
b5ce41ff | 1026 | if (comp) |
d55bc081 | 1027 | sbitmap_vector_zero (comp, last_basic_block); |
b5ce41ff | 1028 | if (antloc) |
d55bc081 | 1029 | sbitmap_vector_zero (antloc, last_basic_block); |
b5ce41ff | 1030 | |
02280659 | 1031 | for (i = 0; i < table->size; i++) |
7506f491 | 1032 | { |
b5ce41ff JL |
1033 | struct expr *expr; |
1034 | ||
02280659 | 1035 | for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash) |
b5ce41ff | 1036 | { |
b5ce41ff | 1037 | int indx = expr->bitmap_index; |
c4c81601 | 1038 | struct occr *occr; |
b5ce41ff JL |
1039 | |
1040 | /* The expression is transparent in this block if it is not killed. | |
1041 | We start by assuming all are transparent [none are killed], and | |
1042 | then reset the bits for those that are. */ | |
b5ce41ff | 1043 | if (transp) |
02280659 | 1044 | compute_transp (expr->expr, indx, transp, table->set_p); |
b5ce41ff JL |
1045 | |
1046 | /* The occurrences recorded in antic_occr are exactly those that | |
cc2902df | 1047 | we want to set to nonzero in ANTLOC. */ |
b5ce41ff | 1048 | if (antloc) |
c4c81601 RK |
1049 | for (occr = expr->antic_occr; occr != NULL; occr = occr->next) |
1050 | { | |
1051 | SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx); | |
b5ce41ff | 1052 | |
c4c81601 RK |
1053 | /* While we're scanning the table, this is a good place to |
1054 | initialize this. */ | |
1055 | occr->deleted_p = 0; | |
1056 | } | |
b5ce41ff JL |
1057 | |
1058 | /* The occurrences recorded in avail_occr are exactly those that | |
cc2902df | 1059 | we want to set to nonzero in COMP. */ |
b5ce41ff | 1060 | if (comp) |
c4c81601 RK |
1061 | for (occr = expr->avail_occr; occr != NULL; occr = occr->next) |
1062 | { | |
1063 | SET_BIT (comp[BLOCK_NUM (occr->insn)], indx); | |
b5ce41ff | 1064 | |
c4c81601 RK |
1065 | /* While we're scanning the table, this is a good place to |
1066 | initialize this. */ | |
1067 | occr->copied_p = 0; | |
1068 | } | |
b5ce41ff JL |
1069 | |
1070 | /* While we're scanning the table, this is a good place to | |
1071 | initialize this. */ | |
1072 | expr->reaching_reg = 0; | |
1073 | } | |
7506f491 | 1074 | } |
7506f491 DE |
1075 | } |
1076 | \f | |
1077 | /* Register set information. | |
1078 | ||
1079 | `reg_set_table' records where each register is set or otherwise | |
1080 | modified. */ | |
1081 | ||
1082 | static struct obstack reg_set_obstack; | |
1083 | ||
1084 | static void | |
1d088dee | 1085 | alloc_reg_set_mem (int n_regs) |
7506f491 | 1086 | { |
7506f491 | 1087 | reg_set_table_size = n_regs + REG_SET_TABLE_SLOP; |
9fe15a12 | 1088 | reg_set_table = gcalloc (reg_set_table_size, sizeof (struct reg_set *)); |
7506f491 DE |
1089 | |
1090 | gcc_obstack_init (®_set_obstack); | |
1091 | } | |
1092 | ||
1093 | static void | |
1d088dee | 1094 | free_reg_set_mem (void) |
7506f491 DE |
1095 | { |
1096 | free (reg_set_table); | |
6496a589 | 1097 | obstack_free (®_set_obstack, NULL); |
7506f491 DE |
1098 | } |
1099 | ||
1100 | /* Record REGNO in the reg_set table. */ | |
1101 | ||
1102 | static void | |
1d088dee | 1103 | record_one_set (int regno, rtx insn) |
7506f491 | 1104 | { |
172890a2 | 1105 | /* Allocate a new reg_set element and link it onto the list. */ |
63bc1d05 | 1106 | struct reg_set *new_reg_info; |
7506f491 DE |
1107 | |
1108 | /* If the table isn't big enough, enlarge it. */ | |
1109 | if (regno >= reg_set_table_size) | |
1110 | { | |
1111 | int new_size = regno + REG_SET_TABLE_SLOP; | |
c4c81601 | 1112 | |
703ad42b KG |
1113 | reg_set_table = grealloc (reg_set_table, |
1114 | new_size * sizeof (struct reg_set *)); | |
1115 | memset (reg_set_table + reg_set_table_size, 0, | |
8e42ace1 | 1116 | (new_size - reg_set_table_size) * sizeof (struct reg_set *)); |
7506f491 DE |
1117 | reg_set_table_size = new_size; |
1118 | } | |
1119 | ||
703ad42b | 1120 | new_reg_info = obstack_alloc (®_set_obstack, sizeof (struct reg_set)); |
7506f491 | 1121 | bytes_used += sizeof (struct reg_set); |
ed425871 | 1122 | new_reg_info->bb_index = BLOCK_NUM (insn); |
274969ea MM |
1123 | new_reg_info->next = reg_set_table[regno]; |
1124 | reg_set_table[regno] = new_reg_info; | |
7506f491 DE |
1125 | } |
1126 | ||
c4c81601 RK |
1127 | /* Called from compute_sets via note_stores to handle one SET or CLOBBER in |
1128 | an insn. The DATA is really the instruction in which the SET is | |
1129 | occurring. */ | |
7506f491 DE |
1130 | |
1131 | static void | |
7bc980e1 | 1132 | record_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data) |
7506f491 | 1133 | { |
84832317 MM |
1134 | rtx record_set_insn = (rtx) data; |
1135 | ||
7b1b4aed | 1136 | if (REG_P (dest) && REGNO (dest) >= FIRST_PSEUDO_REGISTER) |
c4c81601 | 1137 | record_one_set (REGNO (dest), record_set_insn); |
7506f491 DE |
1138 | } |
1139 | ||
1140 | /* Scan the function and record each set of each pseudo-register. | |
1141 | ||
c4c81601 | 1142 | This is called once, at the start of the gcse pass. See the comments for |
fbe5a4a6 | 1143 | `reg_set_table' for further documentation. */ |
7506f491 DE |
1144 | |
1145 | static void | |
eb232f4e | 1146 | compute_sets (void) |
7506f491 | 1147 | { |
eb232f4e | 1148 | basic_block bb; |
c4c81601 | 1149 | rtx insn; |
7506f491 | 1150 | |
eb232f4e SB |
1151 | FOR_EACH_BB (bb) |
1152 | FOR_BB_INSNS (bb, insn) | |
1153 | if (INSN_P (insn)) | |
1154 | note_stores (PATTERN (insn), record_set_info, insn); | |
7506f491 DE |
1155 | } |
1156 | \f | |
1157 | /* Hash table support. */ | |
1158 | ||
80c29cc4 RZ |
1159 | struct reg_avail_info |
1160 | { | |
e0082a72 | 1161 | basic_block last_bb; |
80c29cc4 RZ |
1162 | int first_set; |
1163 | int last_set; | |
1164 | }; | |
1165 | ||
1166 | static struct reg_avail_info *reg_avail_info; | |
e0082a72 | 1167 | static basic_block current_bb; |
7506f491 | 1168 | |
7506f491 | 1169 | |
fb0c0a12 RK |
1170 | /* See whether X, the source of a set, is something we want to consider for |
1171 | GCSE. */ | |
7506f491 DE |
1172 | |
1173 | static int | |
1d088dee | 1174 | want_to_gcse_p (rtx x) |
7506f491 | 1175 | { |
3d8504ac RS |
1176 | #ifdef STACK_REGS |
1177 | /* On register stack architectures, don't GCSE constants from the | |
1178 | constant pool, as the benefits are often swamped by the overhead | |
1179 | of shuffling the register stack between basic blocks. */ | |
1180 | if (IS_STACK_MODE (GET_MODE (x))) | |
1181 | x = avoid_constant_pool_reference (x); | |
1182 | #endif | |
1183 | ||
c4c81601 | 1184 | switch (GET_CODE (x)) |
7506f491 DE |
1185 | { |
1186 | case REG: | |
1187 | case SUBREG: | |
1188 | case CONST_INT: | |
1189 | case CONST_DOUBLE: | |
091a3ac7 | 1190 | case CONST_FIXED: |
69ef87e2 | 1191 | case CONST_VECTOR: |
7506f491 DE |
1192 | case CALL: |
1193 | return 0; | |
1194 | ||
1195 | default: | |
1707bafa | 1196 | return can_assign_to_reg_p (x); |
7506f491 | 1197 | } |
1707bafa RS |
1198 | } |
1199 | ||
1200 | /* Used internally by can_assign_to_reg_p. */ | |
1201 | ||
1202 | static GTY(()) rtx test_insn; | |
1203 | ||
1204 | /* Return true if we can assign X to a pseudo register. */ | |
1205 | ||
1206 | static bool | |
1207 | can_assign_to_reg_p (rtx x) | |
1208 | { | |
1209 | int num_clobbers = 0; | |
1210 | int icode; | |
7506f491 | 1211 | |
fb0c0a12 RK |
1212 | /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */ |
1213 | if (general_operand (x, GET_MODE (x))) | |
1214 | return 1; | |
1215 | else if (GET_MODE (x) == VOIDmode) | |
1216 | return 0; | |
1217 | ||
1218 | /* Otherwise, check if we can make a valid insn from it. First initialize | |
1219 | our test insn if we haven't already. */ | |
1220 | if (test_insn == 0) | |
1221 | { | |
1222 | test_insn | |
1223 | = make_insn_raw (gen_rtx_SET (VOIDmode, | |
1224 | gen_rtx_REG (word_mode, | |
1225 | FIRST_PSEUDO_REGISTER * 2), | |
1226 | const0_rtx)); | |
1227 | NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0; | |
fb0c0a12 RK |
1228 | } |
1229 | ||
1230 | /* Now make an insn like the one we would make when GCSE'ing and see if | |
1231 | valid. */ | |
1232 | PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x)); | |
1233 | SET_SRC (PATTERN (test_insn)) = x; | |
1234 | return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0 | |
1235 | && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode))); | |
7506f491 DE |
1236 | } |
1237 | ||
cc2902df | 1238 | /* Return nonzero if the operands of expression X are unchanged from the |
7506f491 DE |
1239 | start of INSN's basic block up to but not including INSN (if AVAIL_P == 0), |
1240 | or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */ | |
1241 | ||
1242 | static int | |
ed7a4b4b | 1243 | oprs_unchanged_p (const_rtx x, const_rtx insn, int avail_p) |
7506f491 | 1244 | { |
c4c81601 | 1245 | int i, j; |
7506f491 | 1246 | enum rtx_code code; |
6f7d635c | 1247 | const char *fmt; |
7506f491 | 1248 | |
7506f491 DE |
1249 | if (x == 0) |
1250 | return 1; | |
1251 | ||
1252 | code = GET_CODE (x); | |
1253 | switch (code) | |
1254 | { | |
1255 | case REG: | |
80c29cc4 RZ |
1256 | { |
1257 | struct reg_avail_info *info = ®_avail_info[REGNO (x)]; | |
1258 | ||
1259 | if (info->last_bb != current_bb) | |
1260 | return 1; | |
589005ff | 1261 | if (avail_p) |
80c29cc4 RZ |
1262 | return info->last_set < INSN_CUID (insn); |
1263 | else | |
1264 | return info->first_set >= INSN_CUID (insn); | |
1265 | } | |
7506f491 DE |
1266 | |
1267 | case MEM: | |
e0082a72 | 1268 | if (load_killed_in_block_p (current_bb, INSN_CUID (insn), |
a13d4ebf AM |
1269 | x, avail_p)) |
1270 | return 0; | |
7506f491 | 1271 | else |
c4c81601 | 1272 | return oprs_unchanged_p (XEXP (x, 0), insn, avail_p); |
7506f491 DE |
1273 | |
1274 | case PRE_DEC: | |
1275 | case PRE_INC: | |
1276 | case POST_DEC: | |
1277 | case POST_INC: | |
4b983fdc RH |
1278 | case PRE_MODIFY: |
1279 | case POST_MODIFY: | |
7506f491 DE |
1280 | return 0; |
1281 | ||
1282 | case PC: | |
1283 | case CC0: /*FIXME*/ | |
1284 | case CONST: | |
1285 | case CONST_INT: | |
1286 | case CONST_DOUBLE: | |
091a3ac7 | 1287 | case CONST_FIXED: |
69ef87e2 | 1288 | case CONST_VECTOR: |
7506f491 DE |
1289 | case SYMBOL_REF: |
1290 | case LABEL_REF: | |
1291 | case ADDR_VEC: | |
1292 | case ADDR_DIFF_VEC: | |
1293 | return 1; | |
1294 | ||
1295 | default: | |
1296 | break; | |
1297 | } | |
1298 | ||
c4c81601 | 1299 | for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--) |
7506f491 DE |
1300 | { |
1301 | if (fmt[i] == 'e') | |
1302 | { | |
c4c81601 RK |
1303 | /* If we are about to do the last recursive call needed at this |
1304 | level, change it into iteration. This function is called enough | |
1305 | to be worth it. */ | |
7506f491 | 1306 | if (i == 0) |
c4c81601 RK |
1307 | return oprs_unchanged_p (XEXP (x, i), insn, avail_p); |
1308 | ||
1309 | else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p)) | |
7506f491 DE |
1310 | return 0; |
1311 | } | |
1312 | else if (fmt[i] == 'E') | |
c4c81601 RK |
1313 | for (j = 0; j < XVECLEN (x, i); j++) |
1314 | if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p)) | |
1315 | return 0; | |
7506f491 DE |
1316 | } |
1317 | ||
1318 | return 1; | |
1319 | } | |
1320 | ||
a13d4ebf AM |
1321 | /* Used for communication between mems_conflict_for_gcse_p and |
1322 | load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a | |
1323 | conflict between two memory references. */ | |
1324 | static int gcse_mems_conflict_p; | |
1325 | ||
1326 | /* Used for communication between mems_conflict_for_gcse_p and | |
1327 | load_killed_in_block_p. A memory reference for a load instruction, | |
1328 | mems_conflict_for_gcse_p will see if a memory store conflicts with | |
1329 | this memory load. */ | |
ed7a4b4b | 1330 | static const_rtx gcse_mem_operand; |
a13d4ebf AM |
1331 | |
1332 | /* DEST is the output of an instruction. If it is a memory reference, and | |
1333 | possibly conflicts with the load found in gcse_mem_operand, then set | |
1334 | gcse_mems_conflict_p to a nonzero value. */ | |
1335 | ||
1336 | static void | |
7bc980e1 | 1337 | mems_conflict_for_gcse_p (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, |
1d088dee | 1338 | void *data ATTRIBUTE_UNUSED) |
a13d4ebf AM |
1339 | { |
1340 | while (GET_CODE (dest) == SUBREG | |
1341 | || GET_CODE (dest) == ZERO_EXTRACT | |
a13d4ebf AM |
1342 | || GET_CODE (dest) == STRICT_LOW_PART) |
1343 | dest = XEXP (dest, 0); | |
1344 | ||
1345 | /* If DEST is not a MEM, then it will not conflict with the load. Note | |
1346 | that function calls are assumed to clobber memory, but are handled | |
1347 | elsewhere. */ | |
7b1b4aed | 1348 | if (! MEM_P (dest)) |
a13d4ebf | 1349 | return; |
aaa4ca30 | 1350 | |
a13d4ebf | 1351 | /* If we are setting a MEM in our list of specially recognized MEMs, |
589005ff KH |
1352 | don't mark as killed this time. */ |
1353 | ||
47a3dae1 | 1354 | if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL) |
a13d4ebf AM |
1355 | { |
1356 | if (!find_rtx_in_ldst (dest)) | |
1357 | gcse_mems_conflict_p = 1; | |
1358 | return; | |
1359 | } | |
aaa4ca30 | 1360 | |
a13d4ebf AM |
1361 | if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand, |
1362 | rtx_addr_varies_p)) | |
1363 | gcse_mems_conflict_p = 1; | |
1364 | } | |
1365 | ||
1366 | /* Return nonzero if the expression in X (a memory reference) is killed | |
1367 | in block BB before or after the insn with the CUID in UID_LIMIT. | |
1368 | AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills | |
1369 | before UID_LIMIT. | |
1370 | ||
1371 | To check the entire block, set UID_LIMIT to max_uid + 1 and | |
1372 | AVAIL_P to 0. */ | |
1373 | ||
1374 | static int | |
ed7a4b4b | 1375 | load_killed_in_block_p (const_basic_block bb, int uid_limit, const_rtx x, int avail_p) |
a13d4ebf | 1376 | { |
0b17ab2f | 1377 | rtx list_entry = modify_mem_list[bb->index]; |
16c5b95d MH |
1378 | |
1379 | /* If this is a readonly then we aren't going to be changing it. */ | |
1380 | if (MEM_READONLY_P (x)) | |
1381 | return 0; | |
1382 | ||
a13d4ebf AM |
1383 | while (list_entry) |
1384 | { | |
1385 | rtx setter; | |
1386 | /* Ignore entries in the list that do not apply. */ | |
1387 | if ((avail_p | |
1388 | && INSN_CUID (XEXP (list_entry, 0)) < uid_limit) | |
1389 | || (! avail_p | |
1390 | && INSN_CUID (XEXP (list_entry, 0)) > uid_limit)) | |
1391 | { | |
1392 | list_entry = XEXP (list_entry, 1); | |
1393 | continue; | |
1394 | } | |
1395 | ||
1396 | setter = XEXP (list_entry, 0); | |
1397 | ||
1398 | /* If SETTER is a call everything is clobbered. Note that calls | |
1399 | to pure functions are never put on the list, so we need not | |
1400 | worry about them. */ | |
7b1b4aed | 1401 | if (CALL_P (setter)) |
a13d4ebf AM |
1402 | return 1; |
1403 | ||
1404 | /* SETTER must be an INSN of some kind that sets memory. Call | |
589005ff | 1405 | note_stores to examine each hunk of memory that is modified. |
a13d4ebf AM |
1406 | |
1407 | The note_stores interface is pretty limited, so we have to | |
1408 | communicate via global variables. Yuk. */ | |
1409 | gcse_mem_operand = x; | |
1410 | gcse_mems_conflict_p = 0; | |
1411 | note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL); | |
1412 | if (gcse_mems_conflict_p) | |
1413 | return 1; | |
1414 | list_entry = XEXP (list_entry, 1); | |
1415 | } | |
1416 | return 0; | |
1417 | } | |
1418 | ||
cc2902df | 1419 | /* Return nonzero if the operands of expression X are unchanged from |
7506f491 DE |
1420 | the start of INSN's basic block up to but not including INSN. */ |
1421 | ||
1422 | static int | |
ed7a4b4b | 1423 | oprs_anticipatable_p (const_rtx x, const_rtx insn) |
7506f491 DE |
1424 | { |
1425 | return oprs_unchanged_p (x, insn, 0); | |
1426 | } | |
1427 | ||
cc2902df | 1428 | /* Return nonzero if the operands of expression X are unchanged from |
7506f491 DE |
1429 | INSN to the end of INSN's basic block. */ |
1430 | ||
1431 | static int | |
ed7a4b4b | 1432 | oprs_available_p (const_rtx x, const_rtx insn) |
7506f491 DE |
1433 | { |
1434 | return oprs_unchanged_p (x, insn, 1); | |
1435 | } | |
1436 | ||
1437 | /* Hash expression X. | |
c4c81601 RK |
1438 | |
1439 | MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean | |
1440 | indicating if a volatile operand is found or if the expression contains | |
b58b21d5 | 1441 | something we don't want to insert in the table. HASH_TABLE_SIZE is |
0516f6fe | 1442 | the current size of the hash table to be probed. */ |
7506f491 DE |
1443 | |
1444 | static unsigned int | |
ed7a4b4b | 1445 | hash_expr (const_rtx x, enum machine_mode mode, int *do_not_record_p, |
b58b21d5 | 1446 | int hash_table_size) |
7506f491 DE |
1447 | { |
1448 | unsigned int hash; | |
1449 | ||
1450 | *do_not_record_p = 0; | |
1451 | ||
0516f6fe SB |
1452 | hash = hash_rtx (x, mode, do_not_record_p, |
1453 | NULL, /*have_reg_qty=*/false); | |
7506f491 DE |
1454 | return hash % hash_table_size; |
1455 | } | |
172890a2 | 1456 | |
7506f491 DE |
1457 | /* Hash a set of register REGNO. |
1458 | ||
c4c81601 RK |
1459 | Sets are hashed on the register that is set. This simplifies the PRE copy |
1460 | propagation code. | |
7506f491 DE |
1461 | |
1462 | ??? May need to make things more elaborate. Later, as necessary. */ | |
1463 | ||
1464 | static unsigned int | |
1d088dee | 1465 | hash_set (int regno, int hash_table_size) |
7506f491 DE |
1466 | { |
1467 | unsigned int hash; | |
1468 | ||
1469 | hash = regno; | |
1470 | return hash % hash_table_size; | |
1471 | } | |
1472 | ||
0516f6fe | 1473 | /* Return nonzero if exp1 is equivalent to exp2. */ |
7506f491 DE |
1474 | |
1475 | static int | |
ed7a4b4b | 1476 | expr_equiv_p (const_rtx x, const_rtx y) |
7506f491 | 1477 | { |
0516f6fe | 1478 | return exp_equiv_p (x, y, 0, true); |
7506f491 DE |
1479 | } |
1480 | ||
02280659 | 1481 | /* Insert expression X in INSN in the hash TABLE. |
7506f491 DE |
1482 | If it is already present, record it as the last occurrence in INSN's |
1483 | basic block. | |
1484 | ||
1485 | MODE is the mode of the value X is being stored into. | |
1486 | It is only used if X is a CONST_INT. | |
1487 | ||
cc2902df KH |
1488 | ANTIC_P is nonzero if X is an anticipatable expression. |
1489 | AVAIL_P is nonzero if X is an available expression. */ | |
7506f491 DE |
1490 | |
1491 | static void | |
1d088dee AJ |
1492 | insert_expr_in_table (rtx x, enum machine_mode mode, rtx insn, int antic_p, |
1493 | int avail_p, struct hash_table *table) | |
7506f491 DE |
1494 | { |
1495 | int found, do_not_record_p; | |
1496 | unsigned int hash; | |
1497 | struct expr *cur_expr, *last_expr = NULL; | |
1498 | struct occr *antic_occr, *avail_occr; | |
7506f491 | 1499 | |
02280659 | 1500 | hash = hash_expr (x, mode, &do_not_record_p, table->size); |
7506f491 DE |
1501 | |
1502 | /* Do not insert expression in table if it contains volatile operands, | |
1503 | or if hash_expr determines the expression is something we don't want | |
1504 | to or can't handle. */ | |
1505 | if (do_not_record_p) | |
1506 | return; | |
1507 | ||
02280659 | 1508 | cur_expr = table->table[hash]; |
7506f491 DE |
1509 | found = 0; |
1510 | ||
c4c81601 | 1511 | while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x))) |
7506f491 DE |
1512 | { |
1513 | /* If the expression isn't found, save a pointer to the end of | |
1514 | the list. */ | |
1515 | last_expr = cur_expr; | |
1516 | cur_expr = cur_expr->next_same_hash; | |
1517 | } | |
1518 | ||
1519 | if (! found) | |
1520 | { | |
703ad42b | 1521 | cur_expr = gcse_alloc (sizeof (struct expr)); |
7506f491 | 1522 | bytes_used += sizeof (struct expr); |
02280659 | 1523 | if (table->table[hash] == NULL) |
c4c81601 | 1524 | /* This is the first pattern that hashed to this index. */ |
02280659 | 1525 | table->table[hash] = cur_expr; |
7506f491 | 1526 | else |
c4c81601 RK |
1527 | /* Add EXPR to end of this hash chain. */ |
1528 | last_expr->next_same_hash = cur_expr; | |
1529 | ||
589005ff | 1530 | /* Set the fields of the expr element. */ |
7506f491 | 1531 | cur_expr->expr = x; |
02280659 | 1532 | cur_expr->bitmap_index = table->n_elems++; |
7506f491 DE |
1533 | cur_expr->next_same_hash = NULL; |
1534 | cur_expr->antic_occr = NULL; | |
1535 | cur_expr->avail_occr = NULL; | |
1536 | } | |
1537 | ||
1538 | /* Now record the occurrence(s). */ | |
7506f491 DE |
1539 | if (antic_p) |
1540 | { | |
1541 | antic_occr = cur_expr->antic_occr; | |
1542 | ||
b6e47ceb JL |
1543 | if (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn)) |
1544 | antic_occr = NULL; | |
7506f491 DE |
1545 | |
1546 | if (antic_occr) | |
c4c81601 RK |
1547 | /* Found another instance of the expression in the same basic block. |
1548 | Prefer the currently recorded one. We want the first one in the | |
1549 | block and the block is scanned from start to end. */ | |
1550 | ; /* nothing to do */ | |
7506f491 DE |
1551 | else |
1552 | { | |
1553 | /* First occurrence of this expression in this basic block. */ | |
703ad42b | 1554 | antic_occr = gcse_alloc (sizeof (struct occr)); |
7506f491 | 1555 | bytes_used += sizeof (struct occr); |
7506f491 | 1556 | antic_occr->insn = insn; |
b6e47ceb | 1557 | antic_occr->next = cur_expr->antic_occr; |
f9957958 | 1558 | antic_occr->deleted_p = 0; |
b6e47ceb | 1559 | cur_expr->antic_occr = antic_occr; |
7506f491 DE |
1560 | } |
1561 | } | |
1562 | ||
1563 | if (avail_p) | |
1564 | { | |
1565 | avail_occr = cur_expr->avail_occr; | |
1566 | ||
b6e47ceb | 1567 | if (avail_occr && BLOCK_NUM (avail_occr->insn) == BLOCK_NUM (insn)) |
7506f491 | 1568 | { |
b6e47ceb JL |
1569 | /* Found another instance of the expression in the same basic block. |
1570 | Prefer this occurrence to the currently recorded one. We want | |
1571 | the last one in the block and the block is scanned from start | |
1572 | to end. */ | |
1573 | avail_occr->insn = insn; | |
7506f491 | 1574 | } |
7506f491 DE |
1575 | else |
1576 | { | |
1577 | /* First occurrence of this expression in this basic block. */ | |
703ad42b | 1578 | avail_occr = gcse_alloc (sizeof (struct occr)); |
7506f491 | 1579 | bytes_used += sizeof (struct occr); |
7506f491 | 1580 | avail_occr->insn = insn; |
b6e47ceb | 1581 | avail_occr->next = cur_expr->avail_occr; |
f9957958 | 1582 | avail_occr->deleted_p = 0; |
b6e47ceb | 1583 | cur_expr->avail_occr = avail_occr; |
7506f491 DE |
1584 | } |
1585 | } | |
1586 | } | |
1587 | ||
1588 | /* Insert pattern X in INSN in the hash table. | |
1589 | X is a SET of a reg to either another reg or a constant. | |
1590 | If it is already present, record it as the last occurrence in INSN's | |
1591 | basic block. */ | |
1592 | ||
1593 | static void | |
1d088dee | 1594 | insert_set_in_table (rtx x, rtx insn, struct hash_table *table) |
7506f491 DE |
1595 | { |
1596 | int found; | |
1597 | unsigned int hash; | |
1598 | struct expr *cur_expr, *last_expr = NULL; | |
b6e47ceb | 1599 | struct occr *cur_occr; |
7506f491 | 1600 | |
282899df | 1601 | gcc_assert (GET_CODE (x) == SET && REG_P (SET_DEST (x))); |
7506f491 | 1602 | |
02280659 | 1603 | hash = hash_set (REGNO (SET_DEST (x)), table->size); |
7506f491 | 1604 | |
02280659 | 1605 | cur_expr = table->table[hash]; |
7506f491 DE |
1606 | found = 0; |
1607 | ||
c4c81601 | 1608 | while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x))) |
7506f491 DE |
1609 | { |
1610 | /* If the expression isn't found, save a pointer to the end of | |
1611 | the list. */ | |
1612 | last_expr = cur_expr; | |
1613 | cur_expr = cur_expr->next_same_hash; | |
1614 | } | |
1615 | ||
1616 | if (! found) | |
1617 | { | |
703ad42b | 1618 | cur_expr = gcse_alloc (sizeof (struct expr)); |
7506f491 | 1619 | bytes_used += sizeof (struct expr); |
02280659 | 1620 | if (table->table[hash] == NULL) |
c4c81601 | 1621 | /* This is the first pattern that hashed to this index. */ |
02280659 | 1622 | table->table[hash] = cur_expr; |
7506f491 | 1623 | else |
c4c81601 RK |
1624 | /* Add EXPR to end of this hash chain. */ |
1625 | last_expr->next_same_hash = cur_expr; | |
1626 | ||
7506f491 DE |
1627 | /* Set the fields of the expr element. |
1628 | We must copy X because it can be modified when copy propagation is | |
1629 | performed on its operands. */ | |
7506f491 | 1630 | cur_expr->expr = copy_rtx (x); |
02280659 | 1631 | cur_expr->bitmap_index = table->n_elems++; |
7506f491 DE |
1632 | cur_expr->next_same_hash = NULL; |
1633 | cur_expr->antic_occr = NULL; | |
1634 | cur_expr->avail_occr = NULL; | |
1635 | } | |
1636 | ||
1637 | /* Now record the occurrence. */ | |
7506f491 DE |
1638 | cur_occr = cur_expr->avail_occr; |
1639 | ||
b6e47ceb | 1640 | if (cur_occr && BLOCK_NUM (cur_occr->insn) == BLOCK_NUM (insn)) |
7506f491 | 1641 | { |
b6e47ceb JL |
1642 | /* Found another instance of the expression in the same basic block. |
1643 | Prefer this occurrence to the currently recorded one. We want | |
1644 | the last one in the block and the block is scanned from start | |
1645 | to end. */ | |
1646 | cur_occr->insn = insn; | |
7506f491 | 1647 | } |
7506f491 DE |
1648 | else |
1649 | { | |
1650 | /* First occurrence of this expression in this basic block. */ | |
703ad42b | 1651 | cur_occr = gcse_alloc (sizeof (struct occr)); |
7506f491 | 1652 | bytes_used += sizeof (struct occr); |
c4c81601 | 1653 | |
b6e47ceb JL |
1654 | cur_occr->insn = insn; |
1655 | cur_occr->next = cur_expr->avail_occr; | |
1656 | cur_occr->deleted_p = 0; | |
1657 | cur_expr->avail_occr = cur_occr; | |
7506f491 DE |
1658 | } |
1659 | } | |
1660 | ||
6b2d1c9e RS |
1661 | /* Determine whether the rtx X should be treated as a constant for |
1662 | the purposes of GCSE's constant propagation. */ | |
1663 | ||
1664 | static bool | |
ed7a4b4b | 1665 | gcse_constant_p (const_rtx x) |
6b2d1c9e RS |
1666 | { |
1667 | /* Consider a COMPARE of two integers constant. */ | |
1668 | if (GET_CODE (x) == COMPARE | |
1669 | && GET_CODE (XEXP (x, 0)) == CONST_INT | |
1670 | && GET_CODE (XEXP (x, 1)) == CONST_INT) | |
1671 | return true; | |
1672 | ||
db2f435b | 1673 | /* Consider a COMPARE of the same registers is a constant |
7b1b4aed | 1674 | if they are not floating point registers. */ |
db2f435b | 1675 | if (GET_CODE(x) == COMPARE |
7b1b4aed | 1676 | && REG_P (XEXP (x, 0)) && REG_P (XEXP (x, 1)) |
db2f435b AP |
1677 | && REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 1)) |
1678 | && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))) | |
1679 | && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 1)))) | |
1680 | return true; | |
1681 | ||
6b2d1c9e RS |
1682 | return CONSTANT_P (x); |
1683 | } | |
1684 | ||
02280659 ZD |
1685 | /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or |
1686 | expression one). */ | |
7506f491 DE |
1687 | |
1688 | static void | |
1d088dee | 1689 | hash_scan_set (rtx pat, rtx insn, struct hash_table *table) |
7506f491 DE |
1690 | { |
1691 | rtx src = SET_SRC (pat); | |
1692 | rtx dest = SET_DEST (pat); | |
172890a2 | 1693 | rtx note; |
7506f491 | 1694 | |
6e72d1e9 | 1695 | if (GET_CODE (src) == CALL) |
02280659 | 1696 | hash_scan_call (src, insn, table); |
7506f491 | 1697 | |
7b1b4aed | 1698 | else if (REG_P (dest)) |
7506f491 | 1699 | { |
172890a2 | 1700 | unsigned int regno = REGNO (dest); |
7506f491 DE |
1701 | rtx tmp; |
1702 | ||
90631280 PB |
1703 | /* See if a REG_NOTE shows this equivalent to a simpler expression. |
1704 | This allows us to do a single GCSE pass and still eliminate | |
1705 | redundant constants, addresses or other expressions that are | |
1706 | constructed with multiple instructions. */ | |
1707 | note = find_reg_equal_equiv_note (insn); | |
1708 | if (note != 0 | |
1709 | && (table->set_p | |
1710 | ? gcse_constant_p (XEXP (note, 0)) | |
1711 | : want_to_gcse_p (XEXP (note, 0)))) | |
172890a2 RK |
1712 | src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src); |
1713 | ||
7506f491 | 1714 | /* Only record sets of pseudo-regs in the hash table. */ |
02280659 | 1715 | if (! table->set_p |
7506f491 DE |
1716 | && regno >= FIRST_PSEUDO_REGISTER |
1717 | /* Don't GCSE something if we can't do a reg/reg copy. */ | |
773eae39 | 1718 | && can_copy_p (GET_MODE (dest)) |
068473ec JH |
1719 | /* GCSE commonly inserts instruction after the insn. We can't |
1720 | do that easily for EH_REGION notes so disable GCSE on these | |
1721 | for now. */ | |
1722 | && !find_reg_note (insn, REG_EH_REGION, NULL_RTX) | |
7506f491 | 1723 | /* Is SET_SRC something we want to gcse? */ |
172890a2 RK |
1724 | && want_to_gcse_p (src) |
1725 | /* Don't CSE a nop. */ | |
43e72072 JJ |
1726 | && ! set_noop_p (pat) |
1727 | /* Don't GCSE if it has attached REG_EQUIV note. | |
1728 | At this point this only function parameters should have | |
1729 | REG_EQUIV notes and if the argument slot is used somewhere | |
a1f300c0 | 1730 | explicitly, it means address of parameter has been taken, |
43e72072 | 1731 | so we should not extend the lifetime of the pseudo. */ |
90631280 | 1732 | && (note == NULL_RTX || ! MEM_P (XEXP (note, 0)))) |
7506f491 DE |
1733 | { |
1734 | /* An expression is not anticipatable if its operands are | |
52d76e11 | 1735 | modified before this insn or if this is not the only SET in |
6fb5fa3c DB |
1736 | this insn. The latter condition does not have to mean that |
1737 | SRC itself is not anticipatable, but we just will not be | |
1738 | able to handle code motion of insns with multiple sets. */ | |
1739 | int antic_p = oprs_anticipatable_p (src, insn) | |
1740 | && !multiple_sets (insn); | |
7506f491 | 1741 | /* An expression is not available if its operands are |
eb296bd9 GK |
1742 | subsequently modified, including this insn. It's also not |
1743 | available if this is a branch, because we can't insert | |
1744 | a set after the branch. */ | |
1745 | int avail_p = (oprs_available_p (src, insn) | |
1746 | && ! JUMP_P (insn)); | |
c4c81601 | 1747 | |
02280659 | 1748 | insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table); |
7506f491 | 1749 | } |
c4c81601 | 1750 | |
7506f491 | 1751 | /* Record sets for constant/copy propagation. */ |
02280659 | 1752 | else if (table->set_p |
7506f491 | 1753 | && regno >= FIRST_PSEUDO_REGISTER |
7b1b4aed | 1754 | && ((REG_P (src) |
7506f491 | 1755 | && REGNO (src) >= FIRST_PSEUDO_REGISTER |
773eae39 | 1756 | && can_copy_p (GET_MODE (dest)) |
172890a2 | 1757 | && REGNO (src) != regno) |
6b2d1c9e | 1758 | || gcse_constant_p (src)) |
7506f491 DE |
1759 | /* A copy is not available if its src or dest is subsequently |
1760 | modified. Here we want to search from INSN+1 on, but | |
1761 | oprs_available_p searches from INSN on. */ | |
a813c111 | 1762 | && (insn == BB_END (BLOCK_FOR_INSN (insn)) |
02a4823b AO |
1763 | || (tmp = next_nonnote_insn (insn)) == NULL_RTX |
1764 | || oprs_available_p (pat, tmp))) | |
02280659 | 1765 | insert_set_in_table (pat, insn, table); |
7506f491 | 1766 | } |
d91edf86 | 1767 | /* In case of store we want to consider the memory value as available in |
f5f2e3cd MH |
1768 | the REG stored in that memory. This makes it possible to remove |
1769 | redundant loads from due to stores to the same location. */ | |
7b1b4aed | 1770 | else if (flag_gcse_las && REG_P (src) && MEM_P (dest)) |
f5f2e3cd MH |
1771 | { |
1772 | unsigned int regno = REGNO (src); | |
1773 | ||
1774 | /* Do not do this for constant/copy propagation. */ | |
1775 | if (! table->set_p | |
1776 | /* Only record sets of pseudo-regs in the hash table. */ | |
1777 | && regno >= FIRST_PSEUDO_REGISTER | |
1778 | /* Don't GCSE something if we can't do a reg/reg copy. */ | |
1779 | && can_copy_p (GET_MODE (src)) | |
1780 | /* GCSE commonly inserts instruction after the insn. We can't | |
1781 | do that easily for EH_REGION notes so disable GCSE on these | |
1782 | for now. */ | |
1783 | && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX) | |
1784 | /* Is SET_DEST something we want to gcse? */ | |
1785 | && want_to_gcse_p (dest) | |
1786 | /* Don't CSE a nop. */ | |
1787 | && ! set_noop_p (pat) | |
1788 | /* Don't GCSE if it has attached REG_EQUIV note. | |
1789 | At this point this only function parameters should have | |
1790 | REG_EQUIV notes and if the argument slot is used somewhere | |
1791 | explicitly, it means address of parameter has been taken, | |
1792 | so we should not extend the lifetime of the pseudo. */ | |
1793 | && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0 | |
7b1b4aed | 1794 | || ! MEM_P (XEXP (note, 0)))) |
f5f2e3cd MH |
1795 | { |
1796 | /* Stores are never anticipatable. */ | |
1797 | int antic_p = 0; | |
1798 | /* An expression is not available if its operands are | |
1799 | subsequently modified, including this insn. It's also not | |
1800 | available if this is a branch, because we can't insert | |
1801 | a set after the branch. */ | |
1802 | int avail_p = oprs_available_p (dest, insn) | |
1803 | && ! JUMP_P (insn); | |
1804 | ||
1805 | /* Record the memory expression (DEST) in the hash table. */ | |
1806 | insert_expr_in_table (dest, GET_MODE (dest), insn, | |
1807 | antic_p, avail_p, table); | |
1808 | } | |
1809 | } | |
7506f491 DE |
1810 | } |
1811 | ||
1812 | static void | |
1d088dee AJ |
1813 | hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED, |
1814 | struct hash_table *table ATTRIBUTE_UNUSED) | |
7506f491 DE |
1815 | { |
1816 | /* Currently nothing to do. */ | |
1817 | } | |
1818 | ||
1819 | static void | |
1d088dee AJ |
1820 | hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED, |
1821 | struct hash_table *table ATTRIBUTE_UNUSED) | |
7506f491 DE |
1822 | { |
1823 | /* Currently nothing to do. */ | |
1824 | } | |
1825 | ||
1826 | /* Process INSN and add hash table entries as appropriate. | |
1827 | ||
1828 | Only available expressions that set a single pseudo-reg are recorded. | |
1829 | ||
1830 | Single sets in a PARALLEL could be handled, but it's an extra complication | |
1831 | that isn't dealt with right now. The trick is handling the CLOBBERs that | |
1832 | are also in the PARALLEL. Later. | |
1833 | ||
cc2902df | 1834 | If SET_P is nonzero, this is for the assignment hash table, |
ed79bb3d R |
1835 | otherwise it is for the expression hash table. |
1836 | If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should | |
1837 | not record any expressions. */ | |
7506f491 DE |
1838 | |
1839 | static void | |
1d088dee | 1840 | hash_scan_insn (rtx insn, struct hash_table *table, int in_libcall_block) |
7506f491 DE |
1841 | { |
1842 | rtx pat = PATTERN (insn); | |
c4c81601 | 1843 | int i; |
7506f491 | 1844 | |
172890a2 RK |
1845 | if (in_libcall_block) |
1846 | return; | |
1847 | ||
7506f491 DE |
1848 | /* Pick out the sets of INSN and for other forms of instructions record |
1849 | what's been modified. */ | |
1850 | ||
172890a2 | 1851 | if (GET_CODE (pat) == SET) |
02280659 | 1852 | hash_scan_set (pat, insn, table); |
7506f491 | 1853 | else if (GET_CODE (pat) == PARALLEL) |
c4c81601 RK |
1854 | for (i = 0; i < XVECLEN (pat, 0); i++) |
1855 | { | |
1856 | rtx x = XVECEXP (pat, 0, i); | |
7506f491 | 1857 | |
c4c81601 | 1858 | if (GET_CODE (x) == SET) |
02280659 | 1859 | hash_scan_set (x, insn, table); |
c4c81601 | 1860 | else if (GET_CODE (x) == CLOBBER) |
02280659 | 1861 | hash_scan_clobber (x, insn, table); |
6e72d1e9 | 1862 | else if (GET_CODE (x) == CALL) |
02280659 | 1863 | hash_scan_call (x, insn, table); |
c4c81601 | 1864 | } |
7506f491 | 1865 | |
7506f491 | 1866 | else if (GET_CODE (pat) == CLOBBER) |
02280659 | 1867 | hash_scan_clobber (pat, insn, table); |
6e72d1e9 | 1868 | else if (GET_CODE (pat) == CALL) |
02280659 | 1869 | hash_scan_call (pat, insn, table); |
7506f491 DE |
1870 | } |
1871 | ||
1872 | static void | |
1d088dee | 1873 | dump_hash_table (FILE *file, const char *name, struct hash_table *table) |
7506f491 DE |
1874 | { |
1875 | int i; | |
1876 | /* Flattened out table, so it's printed in proper order. */ | |
4da896b2 MM |
1877 | struct expr **flat_table; |
1878 | unsigned int *hash_val; | |
c4c81601 | 1879 | struct expr *expr; |
4da896b2 | 1880 | |
703ad42b KG |
1881 | flat_table = xcalloc (table->n_elems, sizeof (struct expr *)); |
1882 | hash_val = xmalloc (table->n_elems * sizeof (unsigned int)); | |
7506f491 | 1883 | |
02280659 ZD |
1884 | for (i = 0; i < (int) table->size; i++) |
1885 | for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash) | |
c4c81601 RK |
1886 | { |
1887 | flat_table[expr->bitmap_index] = expr; | |
1888 | hash_val[expr->bitmap_index] = i; | |
1889 | } | |
7506f491 DE |
1890 | |
1891 | fprintf (file, "%s hash table (%d buckets, %d entries)\n", | |
02280659 | 1892 | name, table->size, table->n_elems); |
7506f491 | 1893 | |
02280659 | 1894 | for (i = 0; i < (int) table->n_elems; i++) |
21318741 RK |
1895 | if (flat_table[i] != 0) |
1896 | { | |
a0ac9e5a | 1897 | expr = flat_table[i]; |
21318741 RK |
1898 | fprintf (file, "Index %d (hash value %d)\n ", |
1899 | expr->bitmap_index, hash_val[i]); | |
a0ac9e5a | 1900 | print_rtl (file, expr->expr); |
21318741 RK |
1901 | fprintf (file, "\n"); |
1902 | } | |
7506f491 DE |
1903 | |
1904 | fprintf (file, "\n"); | |
4da896b2 | 1905 | |
4da896b2 MM |
1906 | free (flat_table); |
1907 | free (hash_val); | |
7506f491 DE |
1908 | } |
1909 | ||
1910 | /* Record register first/last/block set information for REGNO in INSN. | |
c4c81601 | 1911 | |
80c29cc4 | 1912 | first_set records the first place in the block where the register |
7506f491 | 1913 | is set and is used to compute "anticipatability". |
c4c81601 | 1914 | |
80c29cc4 | 1915 | last_set records the last place in the block where the register |
7506f491 | 1916 | is set and is used to compute "availability". |
c4c81601 | 1917 | |
80c29cc4 RZ |
1918 | last_bb records the block for which first_set and last_set are |
1919 | valid, as a quick test to invalidate them. | |
1920 | ||
7506f491 DE |
1921 | reg_set_in_block records whether the register is set in the block |
1922 | and is used to compute "transparency". */ | |
1923 | ||
1924 | static void | |
1d088dee | 1925 | record_last_reg_set_info (rtx insn, int regno) |
7506f491 | 1926 | { |
80c29cc4 RZ |
1927 | struct reg_avail_info *info = ®_avail_info[regno]; |
1928 | int cuid = INSN_CUID (insn); | |
c4c81601 | 1929 | |
80c29cc4 RZ |
1930 | info->last_set = cuid; |
1931 | if (info->last_bb != current_bb) | |
1932 | { | |
1933 | info->last_bb = current_bb; | |
1934 | info->first_set = cuid; | |
e0082a72 | 1935 | SET_BIT (reg_set_in_block[current_bb->index], regno); |
80c29cc4 | 1936 | } |
7506f491 DE |
1937 | } |
1938 | ||
a13d4ebf AM |
1939 | |
1940 | /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn. | |
1941 | Note we store a pair of elements in the list, so they have to be | |
1942 | taken off pairwise. */ | |
1943 | ||
589005ff | 1944 | static void |
7bc980e1 | 1945 | canon_list_insert (rtx dest ATTRIBUTE_UNUSED, const_rtx unused1 ATTRIBUTE_UNUSED, |
1d088dee | 1946 | void * v_insn) |
a13d4ebf AM |
1947 | { |
1948 | rtx dest_addr, insn; | |
0fe854a7 | 1949 | int bb; |
a13d4ebf AM |
1950 | |
1951 | while (GET_CODE (dest) == SUBREG | |
1952 | || GET_CODE (dest) == ZERO_EXTRACT | |
a13d4ebf AM |
1953 | || GET_CODE (dest) == STRICT_LOW_PART) |
1954 | dest = XEXP (dest, 0); | |
1955 | ||
1956 | /* If DEST is not a MEM, then it will not conflict with a load. Note | |
1957 | that function calls are assumed to clobber memory, but are handled | |
1958 | elsewhere. */ | |
1959 | ||
7b1b4aed | 1960 | if (! MEM_P (dest)) |
a13d4ebf AM |
1961 | return; |
1962 | ||
1963 | dest_addr = get_addr (XEXP (dest, 0)); | |
1964 | dest_addr = canon_rtx (dest_addr); | |
589005ff | 1965 | insn = (rtx) v_insn; |
0fe854a7 | 1966 | bb = BLOCK_NUM (insn); |
a13d4ebf | 1967 | |
589005ff | 1968 | canon_modify_mem_list[bb] = |
0fe854a7 | 1969 | alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]); |
589005ff | 1970 | canon_modify_mem_list[bb] = |
0fe854a7 | 1971 | alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]); |
a13d4ebf AM |
1972 | } |
1973 | ||
a13d4ebf AM |
1974 | /* Record memory modification information for INSN. We do not actually care |
1975 | about the memory location(s) that are set, or even how they are set (consider | |
1976 | a CALL_INSN). We merely need to record which insns modify memory. */ | |
7506f491 DE |
1977 | |
1978 | static void | |
1d088dee | 1979 | record_last_mem_set_info (rtx insn) |
7506f491 | 1980 | { |
0fe854a7 RH |
1981 | int bb = BLOCK_NUM (insn); |
1982 | ||
ccef9ef5 | 1983 | /* load_killed_in_block_p will handle the case of calls clobbering |
dc297297 | 1984 | everything. */ |
0fe854a7 RH |
1985 | modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]); |
1986 | bitmap_set_bit (modify_mem_list_set, bb); | |
a13d4ebf | 1987 | |
7b1b4aed | 1988 | if (CALL_P (insn)) |
a13d4ebf AM |
1989 | { |
1990 | /* Note that traversals of this loop (other than for free-ing) | |
1991 | will break after encountering a CALL_INSN. So, there's no | |
dc297297 | 1992 | need to insert a pair of items, as canon_list_insert does. */ |
589005ff KH |
1993 | canon_modify_mem_list[bb] = |
1994 | alloc_INSN_LIST (insn, canon_modify_mem_list[bb]); | |
aa47fcfa | 1995 | bitmap_set_bit (blocks_with_calls, bb); |
a13d4ebf AM |
1996 | } |
1997 | else | |
0fe854a7 | 1998 | note_stores (PATTERN (insn), canon_list_insert, (void*) insn); |
7506f491 DE |
1999 | } |
2000 | ||
7506f491 | 2001 | /* Called from compute_hash_table via note_stores to handle one |
84832317 MM |
2002 | SET or CLOBBER in an insn. DATA is really the instruction in which |
2003 | the SET is taking place. */ | |
7506f491 DE |
2004 | |
2005 | static void | |
7bc980e1 | 2006 | record_last_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data) |
7506f491 | 2007 | { |
84832317 MM |
2008 | rtx last_set_insn = (rtx) data; |
2009 | ||
7506f491 DE |
2010 | if (GET_CODE (dest) == SUBREG) |
2011 | dest = SUBREG_REG (dest); | |
2012 | ||
7b1b4aed | 2013 | if (REG_P (dest)) |
7506f491 | 2014 | record_last_reg_set_info (last_set_insn, REGNO (dest)); |
7b1b4aed | 2015 | else if (MEM_P (dest) |
7506f491 DE |
2016 | /* Ignore pushes, they clobber nothing. */ |
2017 | && ! push_operand (dest, GET_MODE (dest))) | |
2018 | record_last_mem_set_info (last_set_insn); | |
2019 | } | |
2020 | ||
2021 | /* Top level function to create an expression or assignment hash table. | |
2022 | ||
2023 | Expression entries are placed in the hash table if | |
2024 | - they are of the form (set (pseudo-reg) src), | |
2025 | - src is something we want to perform GCSE on, | |
2026 | - none of the operands are subsequently modified in the block | |
2027 | ||
2028 | Assignment entries are placed in the hash table if | |
2029 | - they are of the form (set (pseudo-reg) src), | |
2030 | - src is something we want to perform const/copy propagation on, | |
2031 | - none of the operands or target are subsequently modified in the block | |
c4c81601 | 2032 | |
7506f491 DE |
2033 | Currently src must be a pseudo-reg or a const_int. |
2034 | ||
02280659 | 2035 | TABLE is the table computed. */ |
7506f491 DE |
2036 | |
2037 | static void | |
1d088dee | 2038 | compute_hash_table_work (struct hash_table *table) |
7506f491 | 2039 | { |
80c29cc4 | 2040 | unsigned int i; |
7506f491 DE |
2041 | |
2042 | /* While we compute the hash table we also compute a bit array of which | |
2043 | registers are set in which blocks. | |
7506f491 DE |
2044 | ??? This isn't needed during const/copy propagation, but it's cheap to |
2045 | compute. Later. */ | |
d55bc081 | 2046 | sbitmap_vector_zero (reg_set_in_block, last_basic_block); |
7506f491 | 2047 | |
a13d4ebf | 2048 | /* re-Cache any INSN_LIST nodes we have allocated. */ |
73991d6a | 2049 | clear_modify_mem_tables (); |
7506f491 | 2050 | /* Some working arrays used to track first and last set in each block. */ |
703ad42b | 2051 | reg_avail_info = gmalloc (max_gcse_regno * sizeof (struct reg_avail_info)); |
80c29cc4 RZ |
2052 | |
2053 | for (i = 0; i < max_gcse_regno; ++i) | |
e0082a72 | 2054 | reg_avail_info[i].last_bb = NULL; |
7506f491 | 2055 | |
e0082a72 | 2056 | FOR_EACH_BB (current_bb) |
7506f491 DE |
2057 | { |
2058 | rtx insn; | |
770ae6cc | 2059 | unsigned int regno; |
ed79bb3d | 2060 | int in_libcall_block; |
7506f491 DE |
2061 | |
2062 | /* First pass over the instructions records information used to | |
2063 | determine when registers and memory are first and last set. | |
ccef9ef5 | 2064 | ??? hard-reg reg_set_in_block computation |
7506f491 DE |
2065 | could be moved to compute_sets since they currently don't change. */ |
2066 | ||
eb232f4e | 2067 | FOR_BB_INSNS (current_bb, insn) |
7506f491 | 2068 | { |
2c3c49de | 2069 | if (! INSN_P (insn)) |
7506f491 DE |
2070 | continue; |
2071 | ||
7b1b4aed | 2072 | if (CALL_P (insn)) |
7506f491 DE |
2073 | { |
2074 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
6e14af16 | 2075 | if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)) |
7506f491 | 2076 | record_last_reg_set_info (insn, regno); |
c4c81601 | 2077 | |
24a28584 | 2078 | mark_call (insn); |
7506f491 DE |
2079 | } |
2080 | ||
84832317 | 2081 | note_stores (PATTERN (insn), record_last_set_info, insn); |
7506f491 DE |
2082 | } |
2083 | ||
fbef91d8 RS |
2084 | /* Insert implicit sets in the hash table. */ |
2085 | if (table->set_p | |
2086 | && implicit_sets[current_bb->index] != NULL_RTX) | |
2087 | hash_scan_set (implicit_sets[current_bb->index], | |
a813c111 | 2088 | BB_HEAD (current_bb), table); |
fbef91d8 | 2089 | |
7506f491 | 2090 | /* The next pass builds the hash table. */ |
eb232f4e SB |
2091 | in_libcall_block = 0; |
2092 | FOR_BB_INSNS (current_bb, insn) | |
2c3c49de | 2093 | if (INSN_P (insn)) |
c4c81601 RK |
2094 | { |
2095 | if (find_reg_note (insn, REG_LIBCALL, NULL_RTX)) | |
589005ff | 2096 | in_libcall_block = 1; |
02280659 | 2097 | else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX)) |
589005ff | 2098 | in_libcall_block = 0; |
02280659 ZD |
2099 | hash_scan_insn (insn, table, in_libcall_block); |
2100 | if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX)) | |
589005ff | 2101 | in_libcall_block = 0; |
8e42ace1 | 2102 | } |
7506f491 DE |
2103 | } |
2104 | ||
80c29cc4 RZ |
2105 | free (reg_avail_info); |
2106 | reg_avail_info = NULL; | |
7506f491 DE |
2107 | } |
2108 | ||
02280659 | 2109 | /* Allocate space for the set/expr hash TABLE. |
7506f491 | 2110 | N_INSNS is the number of instructions in the function. |
02280659 ZD |
2111 | It is used to determine the number of buckets to use. |
2112 | SET_P determines whether set or expression table will | |
2113 | be created. */ | |
7506f491 DE |
2114 | |
2115 | static void | |
1d088dee | 2116 | alloc_hash_table (int n_insns, struct hash_table *table, int set_p) |
7506f491 DE |
2117 | { |
2118 | int n; | |
2119 | ||
02280659 ZD |
2120 | table->size = n_insns / 4; |
2121 | if (table->size < 11) | |
2122 | table->size = 11; | |
c4c81601 | 2123 | |
7506f491 DE |
2124 | /* Attempt to maintain efficient use of hash table. |
2125 | Making it an odd number is simplest for now. | |
2126 | ??? Later take some measurements. */ | |
02280659 ZD |
2127 | table->size |= 1; |
2128 | n = table->size * sizeof (struct expr *); | |
703ad42b | 2129 | table->table = gmalloc (n); |
02280659 | 2130 | table->set_p = set_p; |
7506f491 DE |
2131 | } |
2132 | ||
02280659 | 2133 | /* Free things allocated by alloc_hash_table. */ |
7506f491 DE |
2134 | |
2135 | static void | |
1d088dee | 2136 | free_hash_table (struct hash_table *table) |
7506f491 | 2137 | { |
02280659 | 2138 | free (table->table); |
7506f491 DE |
2139 | } |
2140 | ||
02280659 ZD |
2141 | /* Compute the hash TABLE for doing copy/const propagation or |
2142 | expression hash table. */ | |
7506f491 DE |
2143 | |
2144 | static void | |
1d088dee | 2145 | compute_hash_table (struct hash_table *table) |
7506f491 DE |
2146 | { |
2147 | /* Initialize count of number of entries in hash table. */ | |
02280659 | 2148 | table->n_elems = 0; |
703ad42b | 2149 | memset (table->table, 0, table->size * sizeof (struct expr *)); |
7506f491 | 2150 | |
02280659 | 2151 | compute_hash_table_work (table); |
7506f491 DE |
2152 | } |
2153 | \f | |
2154 | /* Expression tracking support. */ | |
2155 | ||
ceda50e9 RH |
2156 | /* Lookup REGNO in the set TABLE. The result is a pointer to the |
2157 | table entry, or NULL if not found. */ | |
7506f491 DE |
2158 | |
2159 | static struct expr * | |
1d088dee | 2160 | lookup_set (unsigned int regno, struct hash_table *table) |
7506f491 | 2161 | { |
02280659 | 2162 | unsigned int hash = hash_set (regno, table->size); |
7506f491 DE |
2163 | struct expr *expr; |
2164 | ||
02280659 | 2165 | expr = table->table[hash]; |
7506f491 | 2166 | |
ceda50e9 RH |
2167 | while (expr && REGNO (SET_DEST (expr->expr)) != regno) |
2168 | expr = expr->next_same_hash; | |
7506f491 DE |
2169 | |
2170 | return expr; | |
2171 | } | |
2172 | ||
2173 | /* Return the next entry for REGNO in list EXPR. */ | |
2174 | ||
2175 | static struct expr * | |
1d088dee | 2176 | next_set (unsigned int regno, struct expr *expr) |
7506f491 DE |
2177 | { |
2178 | do | |
2179 | expr = expr->next_same_hash; | |
2180 | while (expr && REGNO (SET_DEST (expr->expr)) != regno); | |
c4c81601 | 2181 | |
7506f491 DE |
2182 | return expr; |
2183 | } | |
2184 | ||
0fe854a7 RH |
2185 | /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node |
2186 | types may be mixed. */ | |
2187 | ||
2188 | static void | |
1d088dee | 2189 | free_insn_expr_list_list (rtx *listp) |
0fe854a7 RH |
2190 | { |
2191 | rtx list, next; | |
2192 | ||
2193 | for (list = *listp; list ; list = next) | |
2194 | { | |
2195 | next = XEXP (list, 1); | |
2196 | if (GET_CODE (list) == EXPR_LIST) | |
2197 | free_EXPR_LIST_node (list); | |
2198 | else | |
2199 | free_INSN_LIST_node (list); | |
2200 | } | |
2201 | ||
2202 | *listp = NULL; | |
2203 | } | |
2204 | ||
73991d6a JH |
2205 | /* Clear canon_modify_mem_list and modify_mem_list tables. */ |
2206 | static void | |
1d088dee | 2207 | clear_modify_mem_tables (void) |
73991d6a | 2208 | { |
3cd8c58a | 2209 | unsigned i; |
87c476a2 | 2210 | bitmap_iterator bi; |
73991d6a | 2211 | |
87c476a2 ZD |
2212 | EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set, 0, i, bi) |
2213 | { | |
2214 | free_INSN_LIST_list (modify_mem_list + i); | |
87c476a2 ZD |
2215 | free_insn_expr_list_list (canon_modify_mem_list + i); |
2216 | } | |
9a6cf911 | 2217 | bitmap_clear (modify_mem_list_set); |
aa47fcfa | 2218 | bitmap_clear (blocks_with_calls); |
73991d6a JH |
2219 | } |
2220 | ||
9a6cf911 | 2221 | /* Release memory used by modify_mem_list_set. */ |
73991d6a JH |
2222 | |
2223 | static void | |
1d088dee | 2224 | free_modify_mem_tables (void) |
73991d6a JH |
2225 | { |
2226 | clear_modify_mem_tables (); | |
2227 | free (modify_mem_list); | |
2228 | free (canon_modify_mem_list); | |
2229 | modify_mem_list = 0; | |
2230 | canon_modify_mem_list = 0; | |
2231 | } | |
2232 | ||
7506f491 DE |
2233 | /* Reset tables used to keep track of what's still available [since the |
2234 | start of the block]. */ | |
2235 | ||
2236 | static void | |
1d088dee | 2237 | reset_opr_set_tables (void) |
7506f491 DE |
2238 | { |
2239 | /* Maintain a bitmap of which regs have been set since beginning of | |
2240 | the block. */ | |
73991d6a | 2241 | CLEAR_REG_SET (reg_set_bitmap); |
c4c81601 | 2242 | |
7506f491 DE |
2243 | /* Also keep a record of the last instruction to modify memory. |
2244 | For now this is very trivial, we only record whether any memory | |
2245 | location has been modified. */ | |
73991d6a | 2246 | clear_modify_mem_tables (); |
7506f491 DE |
2247 | } |
2248 | ||
cc2902df | 2249 | /* Return nonzero if the operands of X are not set before INSN in |
7506f491 DE |
2250 | INSN's basic block. */ |
2251 | ||
2252 | static int | |
ed7a4b4b | 2253 | oprs_not_set_p (const_rtx x, const_rtx insn) |
7506f491 | 2254 | { |
c4c81601 | 2255 | int i, j; |
7506f491 | 2256 | enum rtx_code code; |
6f7d635c | 2257 | const char *fmt; |
7506f491 | 2258 | |
7506f491 DE |
2259 | if (x == 0) |
2260 | return 1; | |
2261 | ||
2262 | code = GET_CODE (x); | |
2263 | switch (code) | |
2264 | { | |
2265 | case PC: | |
2266 | case CC0: | |
2267 | case CONST: | |
2268 | case CONST_INT: | |
2269 | case CONST_DOUBLE: | |
091a3ac7 | 2270 | case CONST_FIXED: |
69ef87e2 | 2271 | case CONST_VECTOR: |
7506f491 DE |
2272 | case SYMBOL_REF: |
2273 | case LABEL_REF: | |
2274 | case ADDR_VEC: | |
2275 | case ADDR_DIFF_VEC: | |
2276 | return 1; | |
2277 | ||
2278 | case MEM: | |
589005ff | 2279 | if (load_killed_in_block_p (BLOCK_FOR_INSN (insn), |
e2d2ed72 | 2280 | INSN_CUID (insn), x, 0)) |
a13d4ebf | 2281 | return 0; |
c4c81601 RK |
2282 | else |
2283 | return oprs_not_set_p (XEXP (x, 0), insn); | |
7506f491 DE |
2284 | |
2285 | case REG: | |
73991d6a | 2286 | return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x)); |
7506f491 DE |
2287 | |
2288 | default: | |
2289 | break; | |
2290 | } | |
2291 | ||
c4c81601 | 2292 | for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--) |
7506f491 DE |
2293 | { |
2294 | if (fmt[i] == 'e') | |
2295 | { | |
7506f491 DE |
2296 | /* If we are about to do the last recursive call |
2297 | needed at this level, change it into iteration. | |
2298 | This function is called enough to be worth it. */ | |
2299 | if (i == 0) | |
c4c81601 RK |
2300 | return oprs_not_set_p (XEXP (x, i), insn); |
2301 | ||
2302 | if (! oprs_not_set_p (XEXP (x, i), insn)) | |
7506f491 DE |
2303 | return 0; |
2304 | } | |
2305 | else if (fmt[i] == 'E') | |
c4c81601 RK |
2306 | for (j = 0; j < XVECLEN (x, i); j++) |
2307 | if (! oprs_not_set_p (XVECEXP (x, i, j), insn)) | |
2308 | return 0; | |
7506f491 DE |
2309 | } |
2310 | ||
2311 | return 1; | |
2312 | } | |
2313 | ||
2314 | /* Mark things set by a CALL. */ | |
2315 | ||
2316 | static void | |
1d088dee | 2317 | mark_call (rtx insn) |
7506f491 | 2318 | { |
24a28584 | 2319 | if (! CONST_OR_PURE_CALL_P (insn)) |
a13d4ebf | 2320 | record_last_mem_set_info (insn); |
7506f491 DE |
2321 | } |
2322 | ||
2323 | /* Mark things set by a SET. */ | |
2324 | ||
2325 | static void | |
1d088dee | 2326 | mark_set (rtx pat, rtx insn) |
7506f491 DE |
2327 | { |
2328 | rtx dest = SET_DEST (pat); | |
2329 | ||
2330 | while (GET_CODE (dest) == SUBREG | |
2331 | || GET_CODE (dest) == ZERO_EXTRACT | |
7506f491 DE |
2332 | || GET_CODE (dest) == STRICT_LOW_PART) |
2333 | dest = XEXP (dest, 0); | |
2334 | ||
7b1b4aed | 2335 | if (REG_P (dest)) |
73991d6a | 2336 | SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest)); |
7b1b4aed | 2337 | else if (MEM_P (dest)) |
a13d4ebf AM |
2338 | record_last_mem_set_info (insn); |
2339 | ||
6e72d1e9 | 2340 | if (GET_CODE (SET_SRC (pat)) == CALL) |
b5ce41ff | 2341 | mark_call (insn); |
7506f491 DE |
2342 | } |
2343 | ||
2344 | /* Record things set by a CLOBBER. */ | |
2345 | ||
2346 | static void | |
1d088dee | 2347 | mark_clobber (rtx pat, rtx insn) |
7506f491 DE |
2348 | { |
2349 | rtx clob = XEXP (pat, 0); | |
2350 | ||
2351 | while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART) | |
2352 | clob = XEXP (clob, 0); | |
2353 | ||
7b1b4aed | 2354 | if (REG_P (clob)) |
73991d6a | 2355 | SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob)); |
a13d4ebf AM |
2356 | else |
2357 | record_last_mem_set_info (insn); | |
7506f491 DE |
2358 | } |
2359 | ||
2360 | /* Record things set by INSN. | |
2361 | This data is used by oprs_not_set_p. */ | |
2362 | ||
2363 | static void | |
1d088dee | 2364 | mark_oprs_set (rtx insn) |
7506f491 DE |
2365 | { |
2366 | rtx pat = PATTERN (insn); | |
c4c81601 | 2367 | int i; |
7506f491 DE |
2368 | |
2369 | if (GET_CODE (pat) == SET) | |
2370 | mark_set (pat, insn); | |
2371 | else if (GET_CODE (pat) == PARALLEL) | |
c4c81601 RK |
2372 | for (i = 0; i < XVECLEN (pat, 0); i++) |
2373 | { | |
2374 | rtx x = XVECEXP (pat, 0, i); | |
2375 | ||
2376 | if (GET_CODE (x) == SET) | |
2377 | mark_set (x, insn); | |
2378 | else if (GET_CODE (x) == CLOBBER) | |
2379 | mark_clobber (x, insn); | |
6e72d1e9 | 2380 | else if (GET_CODE (x) == CALL) |
c4c81601 RK |
2381 | mark_call (insn); |
2382 | } | |
7506f491 | 2383 | |
7506f491 DE |
2384 | else if (GET_CODE (pat) == CLOBBER) |
2385 | mark_clobber (pat, insn); | |
6e72d1e9 | 2386 | else if (GET_CODE (pat) == CALL) |
b5ce41ff | 2387 | mark_call (insn); |
7506f491 | 2388 | } |
b5ce41ff | 2389 | |
7506f491 DE |
2390 | \f |
2391 | /* Compute copy/constant propagation working variables. */ | |
2392 | ||
2393 | /* Local properties of assignments. */ | |
7506f491 DE |
2394 | static sbitmap *cprop_pavloc; |
2395 | static sbitmap *cprop_absaltered; | |
2396 | ||
2397 | /* Global properties of assignments (computed from the local properties). */ | |
7506f491 DE |
2398 | static sbitmap *cprop_avin; |
2399 | static sbitmap *cprop_avout; | |
2400 | ||
c4c81601 RK |
2401 | /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of |
2402 | basic blocks. N_SETS is the number of sets. */ | |
7506f491 DE |
2403 | |
2404 | static void | |
1d088dee | 2405 | alloc_cprop_mem (int n_blocks, int n_sets) |
7506f491 DE |
2406 | { |
2407 | cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets); | |
2408 | cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets); | |
2409 | ||
2410 | cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets); | |
2411 | cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets); | |
2412 | } | |
2413 | ||
2414 | /* Free vars used by copy/const propagation. */ | |
2415 | ||
2416 | static void | |
1d088dee | 2417 | free_cprop_mem (void) |
7506f491 | 2418 | { |
5a660bff DB |
2419 | sbitmap_vector_free (cprop_pavloc); |
2420 | sbitmap_vector_free (cprop_absaltered); | |
2421 | sbitmap_vector_free (cprop_avin); | |
2422 | sbitmap_vector_free (cprop_avout); | |
7506f491 DE |
2423 | } |
2424 | ||
c4c81601 RK |
2425 | /* For each block, compute whether X is transparent. X is either an |
2426 | expression or an assignment [though we don't care which, for this context | |
2427 | an assignment is treated as an expression]. For each block where an | |
2428 | element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX | |
2429 | bit in BMAP. */ | |
7506f491 DE |
2430 | |
2431 | static void | |
ed7a4b4b | 2432 | compute_transp (const_rtx x, int indx, sbitmap *bmap, int set_p) |
7506f491 | 2433 | { |
e0082a72 ZD |
2434 | int i, j; |
2435 | basic_block bb; | |
7506f491 | 2436 | enum rtx_code code; |
c4c81601 | 2437 | reg_set *r; |
6f7d635c | 2438 | const char *fmt; |
7506f491 | 2439 | |
c4c81601 RK |
2440 | /* repeat is used to turn tail-recursion into iteration since GCC |
2441 | can't do it when there's no return value. */ | |
7506f491 DE |
2442 | repeat: |
2443 | ||
2444 | if (x == 0) | |
2445 | return; | |
2446 | ||
2447 | code = GET_CODE (x); | |
2448 | switch (code) | |
2449 | { | |
2450 | case REG: | |
c4c81601 RK |
2451 | if (set_p) |
2452 | { | |
2453 | if (REGNO (x) < FIRST_PSEUDO_REGISTER) | |
2454 | { | |
e0082a72 ZD |
2455 | FOR_EACH_BB (bb) |
2456 | if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x))) | |
2457 | SET_BIT (bmap[bb->index], indx); | |
c4c81601 RK |
2458 | } |
2459 | else | |
2460 | { | |
2461 | for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next) | |
ed425871 | 2462 | SET_BIT (bmap[r->bb_index], indx); |
c4c81601 RK |
2463 | } |
2464 | } | |
2465 | else | |
2466 | { | |
2467 | if (REGNO (x) < FIRST_PSEUDO_REGISTER) | |
2468 | { | |
e0082a72 ZD |
2469 | FOR_EACH_BB (bb) |
2470 | if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x))) | |
2471 | RESET_BIT (bmap[bb->index], indx); | |
c4c81601 RK |
2472 | } |
2473 | else | |
2474 | { | |
2475 | for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next) | |
ed425871 | 2476 | RESET_BIT (bmap[r->bb_index], indx); |
c4c81601 RK |
2477 | } |
2478 | } | |
7506f491 | 2479 | |
c4c81601 | 2480 | return; |
7506f491 DE |
2481 | |
2482 | case MEM: | |
16c5b95d MH |
2483 | if (! MEM_READONLY_P (x)) |
2484 | { | |
2485 | bitmap_iterator bi; | |
2486 | unsigned bb_index; | |
aa47fcfa | 2487 | |
16c5b95d MH |
2488 | /* First handle all the blocks with calls. We don't need to |
2489 | do any list walking for them. */ | |
2490 | EXECUTE_IF_SET_IN_BITMAP (blocks_with_calls, 0, bb_index, bi) | |
2491 | { | |
2492 | if (set_p) | |
2493 | SET_BIT (bmap[bb_index], indx); | |
2494 | else | |
2495 | RESET_BIT (bmap[bb_index], indx); | |
2496 | } | |
aa47fcfa | 2497 | |
16c5b95d MH |
2498 | /* Now iterate over the blocks which have memory modifications |
2499 | but which do not have any calls. */ | |
2500 | EXECUTE_IF_AND_COMPL_IN_BITMAP (modify_mem_list_set, | |
2501 | blocks_with_calls, | |
2502 | 0, bb_index, bi) | |
aa47fcfa | 2503 | { |
16c5b95d | 2504 | rtx list_entry = canon_modify_mem_list[bb_index]; |
aa47fcfa | 2505 | |
16c5b95d | 2506 | while (list_entry) |
aa47fcfa | 2507 | { |
16c5b95d MH |
2508 | rtx dest, dest_addr; |
2509 | ||
2510 | /* LIST_ENTRY must be an INSN of some kind that sets memory. | |
2511 | Examine each hunk of memory that is modified. */ | |
2512 | ||
2513 | dest = XEXP (list_entry, 0); | |
2514 | list_entry = XEXP (list_entry, 1); | |
2515 | dest_addr = XEXP (list_entry, 0); | |
2516 | ||
2517 | if (canon_true_dependence (dest, GET_MODE (dest), dest_addr, | |
2518 | x, rtx_addr_varies_p)) | |
2519 | { | |
2520 | if (set_p) | |
2521 | SET_BIT (bmap[bb_index], indx); | |
2522 | else | |
2523 | RESET_BIT (bmap[bb_index], indx); | |
2524 | break; | |
2525 | } | |
2526 | list_entry = XEXP (list_entry, 1); | |
2527 | } | |
aa47fcfa | 2528 | } |
16c5b95d | 2529 | } |
c4c81601 | 2530 | |
7506f491 DE |
2531 | x = XEXP (x, 0); |
2532 | goto repeat; | |
2533 | ||
2534 | case PC: | |
2535 | case CC0: /*FIXME*/ | |
2536 | case CONST: | |
2537 | case CONST_INT: | |
2538 | case CONST_DOUBLE: | |
091a3ac7 | 2539 | case CONST_FIXED: |
69ef87e2 | 2540 | case CONST_VECTOR: |
7506f491 DE |
2541 | case SYMBOL_REF: |
2542 | case LABEL_REF: | |
2543 | case ADDR_VEC: | |
2544 | case ADDR_DIFF_VEC: | |
2545 | return; | |
2546 | ||
2547 | default: | |
2548 | break; | |
2549 | } | |
2550 | ||
c4c81601 | 2551 | for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--) |
7506f491 DE |
2552 | { |
2553 | if (fmt[i] == 'e') | |
2554 | { | |
7506f491 DE |
2555 | /* If we are about to do the last recursive call |
2556 | needed at this level, change it into iteration. | |
2557 | This function is called enough to be worth it. */ | |
2558 | if (i == 0) | |
2559 | { | |
c4c81601 | 2560 | x = XEXP (x, i); |
7506f491 DE |
2561 | goto repeat; |
2562 | } | |
c4c81601 RK |
2563 | |
2564 | compute_transp (XEXP (x, i), indx, bmap, set_p); | |
7506f491 DE |
2565 | } |
2566 | else if (fmt[i] == 'E') | |
c4c81601 RK |
2567 | for (j = 0; j < XVECLEN (x, i); j++) |
2568 | compute_transp (XVECEXP (x, i, j), indx, bmap, set_p); | |
7506f491 DE |
2569 | } |
2570 | } | |
2571 | ||
7506f491 DE |
2572 | /* Top level routine to do the dataflow analysis needed by copy/const |
2573 | propagation. */ | |
2574 | ||
2575 | static void | |
1d088dee | 2576 | compute_cprop_data (void) |
7506f491 | 2577 | { |
02280659 | 2578 | compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table); |
ce724250 JL |
2579 | compute_available (cprop_pavloc, cprop_absaltered, |
2580 | cprop_avout, cprop_avin); | |
7506f491 DE |
2581 | } |
2582 | \f | |
2583 | /* Copy/constant propagation. */ | |
2584 | ||
7506f491 DE |
2585 | /* Maximum number of register uses in an insn that we handle. */ |
2586 | #define MAX_USES 8 | |
2587 | ||
2588 | /* Table of uses found in an insn. | |
2589 | Allocated statically to avoid alloc/free complexity and overhead. */ | |
2590 | static struct reg_use reg_use_table[MAX_USES]; | |
2591 | ||
2592 | /* Index into `reg_use_table' while building it. */ | |
2593 | static int reg_use_count; | |
2594 | ||
c4c81601 RK |
2595 | /* Set up a list of register numbers used in INSN. The found uses are stored |
2596 | in `reg_use_table'. `reg_use_count' is initialized to zero before entry, | |
2597 | and contains the number of uses in the table upon exit. | |
7506f491 | 2598 | |
c4c81601 RK |
2599 | ??? If a register appears multiple times we will record it multiple times. |
2600 | This doesn't hurt anything but it will slow things down. */ | |
7506f491 DE |
2601 | |
2602 | static void | |
1d088dee | 2603 | find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED) |
7506f491 | 2604 | { |
c4c81601 | 2605 | int i, j; |
7506f491 | 2606 | enum rtx_code code; |
6f7d635c | 2607 | const char *fmt; |
9e71c818 | 2608 | rtx x = *xptr; |
7506f491 | 2609 | |
c4c81601 RK |
2610 | /* repeat is used to turn tail-recursion into iteration since GCC |
2611 | can't do it when there's no return value. */ | |
7506f491 | 2612 | repeat: |
7506f491 DE |
2613 | if (x == 0) |
2614 | return; | |
2615 | ||
2616 | code = GET_CODE (x); | |
9e71c818 | 2617 | if (REG_P (x)) |
7506f491 | 2618 | { |
7506f491 DE |
2619 | if (reg_use_count == MAX_USES) |
2620 | return; | |
c4c81601 | 2621 | |
7506f491 DE |
2622 | reg_use_table[reg_use_count].reg_rtx = x; |
2623 | reg_use_count++; | |
7506f491 DE |
2624 | } |
2625 | ||
2626 | /* Recursively scan the operands of this expression. */ | |
2627 | ||
c4c81601 | 2628 | for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--) |
7506f491 DE |
2629 | { |
2630 | if (fmt[i] == 'e') | |
2631 | { | |
2632 | /* If we are about to do the last recursive call | |
2633 | needed at this level, change it into iteration. | |
2634 | This function is called enough to be worth it. */ | |
2635 | if (i == 0) | |
2636 | { | |
2637 | x = XEXP (x, 0); | |
2638 | goto repeat; | |
2639 | } | |
c4c81601 | 2640 | |
9e71c818 | 2641 | find_used_regs (&XEXP (x, i), data); |
7506f491 DE |
2642 | } |
2643 | else if (fmt[i] == 'E') | |
c4c81601 | 2644 | for (j = 0; j < XVECLEN (x, i); j++) |
9e71c818 | 2645 | find_used_regs (&XVECEXP (x, i, j), data); |
7506f491 DE |
2646 | } |
2647 | } | |
2648 | ||
2649 | /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO. | |
cc2902df | 2650 | Returns nonzero is successful. */ |
7506f491 DE |
2651 | |
2652 | static int | |
1d088dee | 2653 | try_replace_reg (rtx from, rtx to, rtx insn) |
7506f491 | 2654 | { |
205eb6e7 | 2655 | rtx note = find_reg_equal_equiv_note (insn); |
fb0c0a12 | 2656 | rtx src = 0; |
172890a2 RK |
2657 | int success = 0; |
2658 | rtx set = single_set (insn); | |
833fc3ad | 2659 | |
3e916873 JH |
2660 | /* Usually we substitute easy stuff, so we won't copy everything. |
2661 | We however need to take care to not duplicate non-trivial CONST | |
2662 | expressions. */ | |
2663 | to = copy_rtx (to); | |
2664 | ||
2b773ee2 JH |
2665 | validate_replace_src_group (from, to, insn); |
2666 | if (num_changes_pending () && apply_change_group ()) | |
2667 | success = 1; | |
9e71c818 | 2668 | |
9feff114 JDA |
2669 | /* Try to simplify SET_SRC if we have substituted a constant. */ |
2670 | if (success && set && CONSTANT_P (to)) | |
2671 | { | |
2672 | src = simplify_rtx (SET_SRC (set)); | |
2673 | ||
2674 | if (src) | |
2675 | validate_change (insn, &SET_SRC (set), src, 0); | |
2676 | } | |
2677 | ||
205eb6e7 RS |
2678 | /* If there is already a REG_EQUAL note, update the expression in it |
2679 | with our replacement. */ | |
2680 | if (note != 0 && REG_NOTE_KIND (note) == REG_EQUAL) | |
a31830a7 SB |
2681 | set_unique_reg_note (insn, REG_EQUAL, |
2682 | simplify_replace_rtx (XEXP (note, 0), from, to)); | |
f305679f | 2683 | if (!success && set && reg_mentioned_p (from, SET_SRC (set))) |
833fc3ad | 2684 | { |
f305679f JH |
2685 | /* If above failed and this is a single set, try to simplify the source of |
2686 | the set given our substitution. We could perhaps try this for multiple | |
2687 | SETs, but it probably won't buy us anything. */ | |
172890a2 RK |
2688 | src = simplify_replace_rtx (SET_SRC (set), from, to); |
2689 | ||
9e71c818 JH |
2690 | if (!rtx_equal_p (src, SET_SRC (set)) |
2691 | && validate_change (insn, &SET_SRC (set), src, 0)) | |
172890a2 | 2692 | success = 1; |
833fc3ad | 2693 | |
bbd288a4 FS |
2694 | /* If we've failed to do replacement, have a single SET, don't already |
2695 | have a note, and have no special SET, add a REG_EQUAL note to not | |
2696 | lose information. */ | |
2697 | if (!success && note == 0 && set != 0 | |
70a640af AK |
2698 | && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT |
2699 | && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART) | |
f305679f JH |
2700 | note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src)); |
2701 | } | |
e251e2a2 | 2702 | |
172890a2 RK |
2703 | /* REG_EQUAL may get simplified into register. |
2704 | We don't allow that. Remove that note. This code ought | |
fbe5a4a6 | 2705 | not to happen, because previous code ought to synthesize |
172890a2 | 2706 | reg-reg move, but be on the safe side. */ |
205eb6e7 | 2707 | if (note && REG_NOTE_KIND (note) == REG_EQUAL && REG_P (XEXP (note, 0))) |
172890a2 | 2708 | remove_note (insn, note); |
833fc3ad | 2709 | |
833fc3ad JH |
2710 | return success; |
2711 | } | |
c4c81601 RK |
2712 | |
2713 | /* Find a set of REGNOs that are available on entry to INSN's block. Returns | |
2714 | NULL no such set is found. */ | |
7506f491 DE |
2715 | |
2716 | static struct expr * | |
1d088dee | 2717 | find_avail_set (int regno, rtx insn) |
7506f491 | 2718 | { |
cafba495 BS |
2719 | /* SET1 contains the last set found that can be returned to the caller for |
2720 | use in a substitution. */ | |
2721 | struct expr *set1 = 0; | |
589005ff | 2722 | |
cafba495 | 2723 | /* Loops are not possible here. To get a loop we would need two sets |
454ff5cb | 2724 | available at the start of the block containing INSN. i.e. we would |
cafba495 BS |
2725 | need two sets like this available at the start of the block: |
2726 | ||
2727 | (set (reg X) (reg Y)) | |
2728 | (set (reg Y) (reg X)) | |
2729 | ||
2730 | This can not happen since the set of (reg Y) would have killed the | |
2731 | set of (reg X) making it unavailable at the start of this block. */ | |
2732 | while (1) | |
8e42ace1 | 2733 | { |
cafba495 | 2734 | rtx src; |
ceda50e9 | 2735 | struct expr *set = lookup_set (regno, &set_hash_table); |
cafba495 BS |
2736 | |
2737 | /* Find a set that is available at the start of the block | |
2738 | which contains INSN. */ | |
2739 | while (set) | |
2740 | { | |
2741 | if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index)) | |
2742 | break; | |
2743 | set = next_set (regno, set); | |
2744 | } | |
7506f491 | 2745 | |
cafba495 BS |
2746 | /* If no available set was found we've reached the end of the |
2747 | (possibly empty) copy chain. */ | |
2748 | if (set == 0) | |
589005ff | 2749 | break; |
cafba495 | 2750 | |
282899df | 2751 | gcc_assert (GET_CODE (set->expr) == SET); |
cafba495 BS |
2752 | |
2753 | src = SET_SRC (set->expr); | |
2754 | ||
2755 | /* We know the set is available. | |
2756 | Now check that SRC is ANTLOC (i.e. none of the source operands | |
589005ff | 2757 | have changed since the start of the block). |
cafba495 BS |
2758 | |
2759 | If the source operand changed, we may still use it for the next | |
2760 | iteration of this loop, but we may not use it for substitutions. */ | |
c4c81601 | 2761 | |
6b2d1c9e | 2762 | if (gcse_constant_p (src) || oprs_not_set_p (src, insn)) |
cafba495 BS |
2763 | set1 = set; |
2764 | ||
2765 | /* If the source of the set is anything except a register, then | |
2766 | we have reached the end of the copy chain. */ | |
7b1b4aed | 2767 | if (! REG_P (src)) |
7506f491 | 2768 | break; |
7506f491 | 2769 | |
454ff5cb | 2770 | /* Follow the copy chain, i.e. start another iteration of the loop |
cafba495 BS |
2771 | and see if we have an available copy into SRC. */ |
2772 | regno = REGNO (src); | |
8e42ace1 | 2773 | } |
cafba495 BS |
2774 | |
2775 | /* SET1 holds the last set that was available and anticipatable at | |
2776 | INSN. */ | |
2777 | return set1; | |
7506f491 DE |
2778 | } |
2779 | ||
abd535b6 | 2780 | /* Subroutine of cprop_insn that tries to propagate constants into |
0e3f0221 | 2781 | JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL |
fbe5a4a6 | 2782 | it is the instruction that immediately precedes JUMP, and must be a |
818b6b7f | 2783 | single SET of a register. FROM is what we will try to replace, |
0e3f0221 | 2784 | SRC is the constant we will try to substitute for it. Returns nonzero |
589005ff | 2785 | if a change was made. */ |
c4c81601 | 2786 | |
abd535b6 | 2787 | static int |
1d088dee | 2788 | cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src) |
abd535b6 | 2789 | { |
bc6688b4 | 2790 | rtx new, set_src, note_src; |
0e3f0221 | 2791 | rtx set = pc_set (jump); |
bc6688b4 | 2792 | rtx note = find_reg_equal_equiv_note (jump); |
0e3f0221 | 2793 | |
bc6688b4 RS |
2794 | if (note) |
2795 | { | |
2796 | note_src = XEXP (note, 0); | |
2797 | if (GET_CODE (note_src) == EXPR_LIST) | |
2798 | note_src = NULL_RTX; | |
2799 | } | |
2800 | else note_src = NULL_RTX; | |
2801 | ||
2802 | /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */ | |
2803 | set_src = note_src ? note_src : SET_SRC (set); | |
2804 | ||
2805 | /* First substitute the SETCC condition into the JUMP instruction, | |
2806 | then substitute that given values into this expanded JUMP. */ | |
2807 | if (setcc != NULL_RTX | |
48ddd46c JH |
2808 | && !modified_between_p (from, setcc, jump) |
2809 | && !modified_between_p (src, setcc, jump)) | |
b2f02503 | 2810 | { |
bc6688b4 | 2811 | rtx setcc_src; |
b2f02503 | 2812 | rtx setcc_set = single_set (setcc); |
bc6688b4 RS |
2813 | rtx setcc_note = find_reg_equal_equiv_note (setcc); |
2814 | setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST) | |
2815 | ? XEXP (setcc_note, 0) : SET_SRC (setcc_set); | |
2816 | set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set), | |
2817 | setcc_src); | |
b2f02503 | 2818 | } |
0e3f0221 | 2819 | else |
bc6688b4 | 2820 | setcc = NULL_RTX; |
0e3f0221 | 2821 | |
bc6688b4 | 2822 | new = simplify_replace_rtx (set_src, from, src); |
abd535b6 | 2823 | |
bc6688b4 RS |
2824 | /* If no simplification can be made, then try the next register. */ |
2825 | if (rtx_equal_p (new, SET_SRC (set))) | |
9e48c409 | 2826 | return 0; |
589005ff | 2827 | |
7d5ab30e | 2828 | /* If this is now a no-op delete it, otherwise this must be a valid insn. */ |
172890a2 | 2829 | if (new == pc_rtx) |
0e3f0221 | 2830 | delete_insn (jump); |
7d5ab30e | 2831 | else |
abd535b6 | 2832 | { |
48ddd46c JH |
2833 | /* Ensure the value computed inside the jump insn to be equivalent |
2834 | to one computed by setcc. */ | |
bc6688b4 | 2835 | if (setcc && modified_in_p (new, setcc)) |
48ddd46c | 2836 | return 0; |
0e3f0221 | 2837 | if (! validate_change (jump, &SET_SRC (set), new, 0)) |
bc6688b4 RS |
2838 | { |
2839 | /* When (some) constants are not valid in a comparison, and there | |
2840 | are two registers to be replaced by constants before the entire | |
2841 | comparison can be folded into a constant, we need to keep | |
2842 | intermediate information in REG_EQUAL notes. For targets with | |
2843 | separate compare insns, such notes are added by try_replace_reg. | |
2844 | When we have a combined compare-and-branch instruction, however, | |
2845 | we need to attach a note to the branch itself to make this | |
2846 | optimization work. */ | |
2847 | ||
2848 | if (!rtx_equal_p (new, note_src)) | |
2849 | set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new)); | |
2850 | return 0; | |
2851 | } | |
2852 | ||
2853 | /* Remove REG_EQUAL note after simplification. */ | |
2854 | if (note_src) | |
2855 | remove_note (jump, note); | |
7d5ab30e | 2856 | } |
abd535b6 | 2857 | |
0e3f0221 RS |
2858 | #ifdef HAVE_cc0 |
2859 | /* Delete the cc0 setter. */ | |
818b6b7f | 2860 | if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc)))) |
0e3f0221 RS |
2861 | delete_insn (setcc); |
2862 | #endif | |
2863 | ||
172890a2 | 2864 | run_jump_opt_after_gcse = 1; |
c4c81601 | 2865 | |
27fb79ad | 2866 | global_const_prop_count++; |
10d22567 | 2867 | if (dump_file != NULL) |
172890a2 | 2868 | { |
10d22567 | 2869 | fprintf (dump_file, |
27fb79ad | 2870 | "GLOBAL CONST-PROP: Replacing reg %d in jump_insn %d with constant ", |
0e3f0221 | 2871 | REGNO (from), INSN_UID (jump)); |
10d22567 ZD |
2872 | print_rtl (dump_file, src); |
2873 | fprintf (dump_file, "\n"); | |
abd535b6 | 2874 | } |
0005550b | 2875 | purge_dead_edges (bb); |
172890a2 RK |
2876 | |
2877 | return 1; | |
abd535b6 BS |
2878 | } |
2879 | ||
ae860ff7 | 2880 | static bool |
eb232f4e | 2881 | constprop_register (rtx insn, rtx from, rtx to, bool alter_jumps) |
ae860ff7 JH |
2882 | { |
2883 | rtx sset; | |
2884 | ||
2885 | /* Check for reg or cc0 setting instructions followed by | |
2886 | conditional branch instructions first. */ | |
2887 | if (alter_jumps | |
2888 | && (sset = single_set (insn)) != NULL | |
244d05fb | 2889 | && NEXT_INSN (insn) |
ae860ff7 JH |
2890 | && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn))) |
2891 | { | |
2892 | rtx dest = SET_DEST (sset); | |
2893 | if ((REG_P (dest) || CC0_P (dest)) | |
2894 | && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to)) | |
2895 | return 1; | |
2896 | } | |
2897 | ||
2898 | /* Handle normal insns next. */ | |
4b4bf941 | 2899 | if (NONJUMP_INSN_P (insn) |
ae860ff7 JH |
2900 | && try_replace_reg (from, to, insn)) |
2901 | return 1; | |
2902 | ||
2903 | /* Try to propagate a CONST_INT into a conditional jump. | |
2904 | We're pretty specific about what we will handle in this | |
2905 | code, we can extend this as necessary over time. | |
2906 | ||
2907 | Right now the insn in question must look like | |
2908 | (set (pc) (if_then_else ...)) */ | |
2909 | else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn)) | |
2910 | return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to); | |
2911 | return 0; | |
2912 | } | |
2913 | ||
7506f491 | 2914 | /* Perform constant and copy propagation on INSN. |
cc2902df | 2915 | The result is nonzero if a change was made. */ |
7506f491 DE |
2916 | |
2917 | static int | |
1d088dee | 2918 | cprop_insn (rtx insn, int alter_jumps) |
7506f491 DE |
2919 | { |
2920 | struct reg_use *reg_used; | |
2921 | int changed = 0; | |
833fc3ad | 2922 | rtx note; |
7506f491 | 2923 | |
9e71c818 | 2924 | if (!INSN_P (insn)) |
7506f491 DE |
2925 | return 0; |
2926 | ||
2927 | reg_use_count = 0; | |
9e71c818 | 2928 | note_uses (&PATTERN (insn), find_used_regs, NULL); |
589005ff | 2929 | |
172890a2 | 2930 | note = find_reg_equal_equiv_note (insn); |
833fc3ad | 2931 | |
dc297297 | 2932 | /* We may win even when propagating constants into notes. */ |
833fc3ad | 2933 | if (note) |
9e71c818 | 2934 | find_used_regs (&XEXP (note, 0), NULL); |
7506f491 | 2935 | |
c4c81601 RK |
2936 | for (reg_used = ®_use_table[0]; reg_use_count > 0; |
2937 | reg_used++, reg_use_count--) | |
7506f491 | 2938 | { |
770ae6cc | 2939 | unsigned int regno = REGNO (reg_used->reg_rtx); |
7506f491 DE |
2940 | rtx pat, src; |
2941 | struct expr *set; | |
7506f491 DE |
2942 | |
2943 | /* Ignore registers created by GCSE. | |
dc297297 | 2944 | We do this because ... */ |
7506f491 DE |
2945 | if (regno >= max_gcse_regno) |
2946 | continue; | |
2947 | ||
2948 | /* If the register has already been set in this block, there's | |
2949 | nothing we can do. */ | |
2950 | if (! oprs_not_set_p (reg_used->reg_rtx, insn)) | |
2951 | continue; | |
2952 | ||
2953 | /* Find an assignment that sets reg_used and is available | |
2954 | at the start of the block. */ | |
2955 | set = find_avail_set (regno, insn); | |
2956 | if (! set) | |
2957 | continue; | |
589005ff | 2958 | |
7506f491 DE |
2959 | pat = set->expr; |
2960 | /* ??? We might be able to handle PARALLELs. Later. */ | |
282899df | 2961 | gcc_assert (GET_CODE (pat) == SET); |
c4c81601 | 2962 | |
7506f491 DE |
2963 | src = SET_SRC (pat); |
2964 | ||
e78d9500 | 2965 | /* Constant propagation. */ |
6b2d1c9e | 2966 | if (gcse_constant_p (src)) |
7506f491 | 2967 | { |
ae860ff7 | 2968 | if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps)) |
7506f491 DE |
2969 | { |
2970 | changed = 1; | |
27fb79ad | 2971 | global_const_prop_count++; |
10d22567 | 2972 | if (dump_file != NULL) |
7506f491 | 2973 | { |
10d22567 ZD |
2974 | fprintf (dump_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno); |
2975 | fprintf (dump_file, "insn %d with constant ", INSN_UID (insn)); | |
2976 | print_rtl (dump_file, src); | |
2977 | fprintf (dump_file, "\n"); | |
7506f491 | 2978 | } |
bc6688b4 RS |
2979 | if (INSN_DELETED_P (insn)) |
2980 | return 1; | |
7506f491 DE |
2981 | } |
2982 | } | |
7b1b4aed | 2983 | else if (REG_P (src) |
7506f491 DE |
2984 | && REGNO (src) >= FIRST_PSEUDO_REGISTER |
2985 | && REGNO (src) != regno) | |
2986 | { | |
cafba495 | 2987 | if (try_replace_reg (reg_used->reg_rtx, src, insn)) |
7506f491 | 2988 | { |
cafba495 | 2989 | changed = 1; |
27fb79ad | 2990 | global_copy_prop_count++; |
10d22567 | 2991 | if (dump_file != NULL) |
7506f491 | 2992 | { |
10d22567 | 2993 | fprintf (dump_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d", |
c4c81601 | 2994 | regno, INSN_UID (insn)); |
10d22567 | 2995 | fprintf (dump_file, " with reg %d\n", REGNO (src)); |
7506f491 | 2996 | } |
cafba495 BS |
2997 | |
2998 | /* The original insn setting reg_used may or may not now be | |
2999 | deletable. We leave the deletion to flow. */ | |
3000 | /* FIXME: If it turns out that the insn isn't deletable, | |
3001 | then we may have unnecessarily extended register lifetimes | |
3002 | and made things worse. */ | |
7506f491 DE |
3003 | } |
3004 | } | |
3005 | } | |
3006 | ||
3007 | return changed; | |
3008 | } | |
3009 | ||
710ee3ed RH |
3010 | /* Like find_used_regs, but avoid recording uses that appear in |
3011 | input-output contexts such as zero_extract or pre_dec. This | |
3012 | restricts the cases we consider to those for which local cprop | |
3013 | can legitimately make replacements. */ | |
3014 | ||
3015 | static void | |
1d088dee | 3016 | local_cprop_find_used_regs (rtx *xptr, void *data) |
710ee3ed RH |
3017 | { |
3018 | rtx x = *xptr; | |
3019 | ||
3020 | if (x == 0) | |
3021 | return; | |
3022 | ||
3023 | switch (GET_CODE (x)) | |
3024 | { | |
3025 | case ZERO_EXTRACT: | |
3026 | case SIGN_EXTRACT: | |
3027 | case STRICT_LOW_PART: | |
3028 | return; | |
3029 | ||
3030 | case PRE_DEC: | |
3031 | case PRE_INC: | |
3032 | case POST_DEC: | |
3033 | case POST_INC: | |
3034 | case PRE_MODIFY: | |
3035 | case POST_MODIFY: | |
3036 | /* Can only legitimately appear this early in the context of | |
3037 | stack pushes for function arguments, but handle all of the | |
3038 | codes nonetheless. */ | |
3039 | return; | |
3040 | ||
3041 | case SUBREG: | |
3042 | /* Setting a subreg of a register larger than word_mode leaves | |
3043 | the non-written words unchanged. */ | |
3044 | if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD) | |
3045 | return; | |
3046 | break; | |
3047 | ||
3048 | default: | |
3049 | break; | |
3050 | } | |
3051 | ||
3052 | find_used_regs (xptr, data); | |
3053 | } | |
1d088dee | 3054 | |
8ba46434 R |
3055 | /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall; |
3056 | their REG_EQUAL notes need updating. */ | |
e197b6fc | 3057 | |
ae860ff7 | 3058 | static bool |
eb232f4e | 3059 | do_local_cprop (rtx x, rtx insn, bool alter_jumps, rtx *libcall_sp) |
ae860ff7 JH |
3060 | { |
3061 | rtx newreg = NULL, newcnst = NULL; | |
3062 | ||
e197b6fc RH |
3063 | /* Rule out USE instructions and ASM statements as we don't want to |
3064 | change the hard registers mentioned. */ | |
7b1b4aed | 3065 | if (REG_P (x) |
ae860ff7 | 3066 | && (REGNO (x) >= FIRST_PSEUDO_REGISTER |
e197b6fc RH |
3067 | || (GET_CODE (PATTERN (insn)) != USE |
3068 | && asm_noperands (PATTERN (insn)) < 0))) | |
ae860ff7 JH |
3069 | { |
3070 | cselib_val *val = cselib_lookup (x, GET_MODE (x), 0); | |
3071 | struct elt_loc_list *l; | |
3072 | ||
3073 | if (!val) | |
3074 | return false; | |
3075 | for (l = val->locs; l; l = l->next) | |
3076 | { | |
3077 | rtx this_rtx = l->loc; | |
46690369 JH |
3078 | rtx note; |
3079 | ||
5976e643 RS |
3080 | /* Don't CSE non-constant values out of libcall blocks. */ |
3081 | if (l->in_libcall && ! CONSTANT_P (this_rtx)) | |
9635cfad JH |
3082 | continue; |
3083 | ||
6b2d1c9e | 3084 | if (gcse_constant_p (this_rtx)) |
ae860ff7 | 3085 | newcnst = this_rtx; |
46690369 JH |
3086 | if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER |
3087 | /* Don't copy propagate if it has attached REG_EQUIV note. | |
3088 | At this point this only function parameters should have | |
3089 | REG_EQUIV notes and if the argument slot is used somewhere | |
3090 | explicitly, it means address of parameter has been taken, | |
3091 | so we should not extend the lifetime of the pseudo. */ | |
3092 | && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX)) | |
7b1b4aed | 3093 | || ! MEM_P (XEXP (note, 0)))) |
ae860ff7 JH |
3094 | newreg = this_rtx; |
3095 | } | |
3096 | if (newcnst && constprop_register (insn, x, newcnst, alter_jumps)) | |
3097 | { | |
8ba46434 | 3098 | /* If we find a case where we can't fix the retval REG_EQUAL notes |
fbe5a4a6 | 3099 | match the new register, we either have to abandon this replacement |
8ba46434 | 3100 | or fix delete_trivially_dead_insns to preserve the setting insn, |
6fb5fa3c | 3101 | or make it delete the REG_EQUAL note, and fix up all passes that |
8ba46434 | 3102 | require the REG_EQUAL note there. */ |
282899df NS |
3103 | bool adjusted; |
3104 | ||
3105 | adjusted = adjust_libcall_notes (x, newcnst, insn, libcall_sp); | |
3106 | gcc_assert (adjusted); | |
3107 | ||
10d22567 | 3108 | if (dump_file != NULL) |
ae860ff7 | 3109 | { |
10d22567 | 3110 | fprintf (dump_file, "LOCAL CONST-PROP: Replacing reg %d in ", |
ae860ff7 | 3111 | REGNO (x)); |
10d22567 | 3112 | fprintf (dump_file, "insn %d with constant ", |
ae860ff7 | 3113 | INSN_UID (insn)); |
10d22567 ZD |
3114 | print_rtl (dump_file, newcnst); |
3115 | fprintf (dump_file, "\n"); | |
ae860ff7 | 3116 | } |
27fb79ad | 3117 | local_const_prop_count++; |
ae860ff7 JH |
3118 | return true; |
3119 | } | |
3120 | else if (newreg && newreg != x && try_replace_reg (x, newreg, insn)) | |
3121 | { | |
8ba46434 | 3122 | adjust_libcall_notes (x, newreg, insn, libcall_sp); |
10d22567 | 3123 | if (dump_file != NULL) |
ae860ff7 | 3124 | { |
10d22567 | 3125 | fprintf (dump_file, |
ae860ff7 JH |
3126 | "LOCAL COPY-PROP: Replacing reg %d in insn %d", |
3127 | REGNO (x), INSN_UID (insn)); | |
10d22567 | 3128 | fprintf (dump_file, " with reg %d\n", REGNO (newreg)); |
ae860ff7 | 3129 | } |
27fb79ad | 3130 | local_copy_prop_count++; |
ae860ff7 JH |
3131 | return true; |
3132 | } | |
3133 | } | |
3134 | return false; | |
3135 | } | |
3136 | ||
8ba46434 R |
3137 | /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall; |
3138 | their REG_EQUAL notes need updating to reflect that OLDREG has been | |
f4e3e618 RH |
3139 | replaced with NEWVAL in INSN. Return true if all substitutions could |
3140 | be made. */ | |
8ba46434 | 3141 | static bool |
1d088dee | 3142 | adjust_libcall_notes (rtx oldreg, rtx newval, rtx insn, rtx *libcall_sp) |
8ba46434 | 3143 | { |
f4e3e618 | 3144 | rtx end; |
8ba46434 R |
3145 | |
3146 | while ((end = *libcall_sp++)) | |
3147 | { | |
f4e3e618 | 3148 | rtx note = find_reg_equal_equiv_note (end); |
8ba46434 R |
3149 | |
3150 | if (! note) | |
3151 | continue; | |
3152 | ||
3153 | if (REG_P (newval)) | |
3154 | { | |
3155 | if (reg_set_between_p (newval, PREV_INSN (insn), end)) | |
3156 | { | |
3157 | do | |
3158 | { | |
3159 | note = find_reg_equal_equiv_note (end); | |
3160 | if (! note) | |
3161 | continue; | |
3162 | if (reg_mentioned_p (newval, XEXP (note, 0))) | |
3163 | return false; | |
3164 | } | |
3165 | while ((end = *libcall_sp++)); | |
3166 | return true; | |
3167 | } | |
3168 | } | |
5976e643 | 3169 | XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), oldreg, newval); |
6fb5fa3c | 3170 | df_notes_rescan (end); |
8ba46434 R |
3171 | insn = end; |
3172 | } | |
3173 | return true; | |
3174 | } | |
3175 | ||
3176 | #define MAX_NESTED_LIBCALLS 9 | |
3177 | ||
eb232f4e SB |
3178 | /* Do local const/copy propagation (i.e. within each basic block). |
3179 | If ALTER_JUMPS is true, allow propagating into jump insns, which | |
3180 | could modify the CFG. */ | |
3181 | ||
ae860ff7 | 3182 | static void |
eb232f4e | 3183 | local_cprop_pass (bool alter_jumps) |
ae860ff7 | 3184 | { |
eb232f4e | 3185 | basic_block bb; |
ae860ff7 JH |
3186 | rtx insn; |
3187 | struct reg_use *reg_used; | |
8ba46434 | 3188 | rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp; |
1649d92f | 3189 | bool changed = false; |
ae860ff7 | 3190 | |
463301c3 | 3191 | cselib_init (false); |
8ba46434 R |
3192 | libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS]; |
3193 | *libcall_sp = 0; | |
eb232f4e | 3194 | FOR_EACH_BB (bb) |
ae860ff7 | 3195 | { |
eb232f4e | 3196 | FOR_BB_INSNS (bb, insn) |
ae860ff7 | 3197 | { |
eb232f4e | 3198 | if (INSN_P (insn)) |
ae860ff7 | 3199 | { |
eb232f4e | 3200 | rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX); |
ae860ff7 | 3201 | |
eb232f4e SB |
3202 | if (note) |
3203 | { | |
3204 | gcc_assert (libcall_sp != libcall_stack); | |
3205 | *--libcall_sp = XEXP (note, 0); | |
3206 | } | |
3207 | note = find_reg_note (insn, REG_RETVAL, NULL_RTX); | |
3208 | if (note) | |
3209 | libcall_sp++; | |
3210 | note = find_reg_equal_equiv_note (insn); | |
3211 | do | |
3212 | { | |
3213 | reg_use_count = 0; | |
3214 | note_uses (&PATTERN (insn), local_cprop_find_used_regs, | |
3215 | NULL); | |
3216 | if (note) | |
3217 | local_cprop_find_used_regs (&XEXP (note, 0), NULL); | |
3218 | ||
3219 | for (reg_used = ®_use_table[0]; reg_use_count > 0; | |
3220 | reg_used++, reg_use_count--) | |
6fb5fa3c DB |
3221 | { |
3222 | if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps, | |
3223 | libcall_sp)) | |
3224 | { | |
3225 | changed = true; | |
3226 | break; | |
3227 | } | |
3228 | } | |
eb232f4e | 3229 | if (INSN_DELETED_P (insn)) |
1649d92f | 3230 | break; |
eb232f4e SB |
3231 | } |
3232 | while (reg_use_count); | |
ae860ff7 | 3233 | } |
eb232f4e | 3234 | cselib_process_insn (insn); |
ae860ff7 | 3235 | } |
eb232f4e SB |
3236 | |
3237 | /* Forget everything at the end of a basic block. Make sure we are | |
3238 | not inside a libcall, they should never cross basic blocks. */ | |
3239 | cselib_clear_table (); | |
3240 | gcc_assert (libcall_sp == &libcall_stack[MAX_NESTED_LIBCALLS]); | |
ae860ff7 | 3241 | } |
eb232f4e | 3242 | |
ae860ff7 | 3243 | cselib_finish (); |
eb232f4e | 3244 | |
1649d92f JH |
3245 | /* Global analysis may get into infinite loops for unreachable blocks. */ |
3246 | if (changed && alter_jumps) | |
5f0bea72 JH |
3247 | { |
3248 | delete_unreachable_blocks (); | |
3249 | free_reg_set_mem (); | |
3250 | alloc_reg_set_mem (max_reg_num ()); | |
eb232f4e | 3251 | compute_sets (); |
5f0bea72 | 3252 | } |
ae860ff7 JH |
3253 | } |
3254 | ||
c4c81601 | 3255 | /* Forward propagate copies. This includes copies and constants. Return |
cc2902df | 3256 | nonzero if a change was made. */ |
7506f491 DE |
3257 | |
3258 | static int | |
1d088dee | 3259 | cprop (int alter_jumps) |
7506f491 | 3260 | { |
e0082a72 ZD |
3261 | int changed; |
3262 | basic_block bb; | |
7506f491 DE |
3263 | rtx insn; |
3264 | ||
3265 | /* Note we start at block 1. */ | |
e0082a72 ZD |
3266 | if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR) |
3267 | { | |
10d22567 ZD |
3268 | if (dump_file != NULL) |
3269 | fprintf (dump_file, "\n"); | |
e0082a72 ZD |
3270 | return 0; |
3271 | } | |
7506f491 DE |
3272 | |
3273 | changed = 0; | |
e0082a72 | 3274 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb) |
7506f491 DE |
3275 | { |
3276 | /* Reset tables used to keep track of what's still valid [since the | |
3277 | start of the block]. */ | |
3278 | reset_opr_set_tables (); | |
3279 | ||
eb232f4e | 3280 | FOR_BB_INSNS (bb, insn) |
172890a2 RK |
3281 | if (INSN_P (insn)) |
3282 | { | |
ae860ff7 | 3283 | changed |= cprop_insn (insn, alter_jumps); |
7506f491 | 3284 | |
172890a2 RK |
3285 | /* Keep track of everything modified by this insn. */ |
3286 | /* ??? Need to be careful w.r.t. mods done to INSN. Don't | |
3287 | call mark_oprs_set if we turned the insn into a NOTE. */ | |
7b1b4aed | 3288 | if (! NOTE_P (insn)) |
172890a2 | 3289 | mark_oprs_set (insn); |
8e42ace1 | 3290 | } |
7506f491 DE |
3291 | } |
3292 | ||
10d22567 ZD |
3293 | if (dump_file != NULL) |
3294 | fprintf (dump_file, "\n"); | |
7506f491 DE |
3295 | |
3296 | return changed; | |
3297 | } | |
3298 | ||
fbef91d8 RS |
3299 | /* Similar to get_condition, only the resulting condition must be |
3300 | valid at JUMP, instead of at EARLIEST. | |
3301 | ||
3302 | This differs from noce_get_condition in ifcvt.c in that we prefer not to | |
3303 | settle for the condition variable in the jump instruction being integral. | |
3304 | We prefer to be able to record the value of a user variable, rather than | |
3305 | the value of a temporary used in a condition. This could be solved by | |
aabcd309 | 3306 | recording the value of *every* register scanned by canonicalize_condition, |
fbef91d8 RS |
3307 | but this would require some code reorganization. */ |
3308 | ||
2fa4a849 | 3309 | rtx |
1d088dee | 3310 | fis_get_condition (rtx jump) |
fbef91d8 | 3311 | { |
45d09c02 | 3312 | return get_condition (jump, NULL, false, true); |
fbef91d8 RS |
3313 | } |
3314 | ||
b0656d8b JW |
3315 | /* Check the comparison COND to see if we can safely form an implicit set from |
3316 | it. COND is either an EQ or NE comparison. */ | |
3317 | ||
3318 | static bool | |
ed7a4b4b | 3319 | implicit_set_cond_p (const_rtx cond) |
b0656d8b | 3320 | { |
ed7a4b4b KG |
3321 | const enum machine_mode mode = GET_MODE (XEXP (cond, 0)); |
3322 | const_rtx cst = XEXP (cond, 1); | |
b0656d8b JW |
3323 | |
3324 | /* We can't perform this optimization if either operand might be or might | |
3325 | contain a signed zero. */ | |
3326 | if (HONOR_SIGNED_ZEROS (mode)) | |
3327 | { | |
3328 | /* It is sufficient to check if CST is or contains a zero. We must | |
3329 | handle float, complex, and vector. If any subpart is a zero, then | |
3330 | the optimization can't be performed. */ | |
3331 | /* ??? The complex and vector checks are not implemented yet. We just | |
3332 | always return zero for them. */ | |
3333 | if (GET_CODE (cst) == CONST_DOUBLE) | |
3334 | { | |
3335 | REAL_VALUE_TYPE d; | |
3336 | REAL_VALUE_FROM_CONST_DOUBLE (d, cst); | |
3337 | if (REAL_VALUES_EQUAL (d, dconst0)) | |
3338 | return 0; | |
3339 | } | |
3340 | else | |
3341 | return 0; | |
3342 | } | |
3343 | ||
3344 | return gcse_constant_p (cst); | |
3345 | } | |
3346 | ||
fbef91d8 RS |
3347 | /* Find the implicit sets of a function. An "implicit set" is a constraint |
3348 | on the value of a variable, implied by a conditional jump. For example, | |
3349 | following "if (x == 2)", the then branch may be optimized as though the | |
3350 | conditional performed an "explicit set", in this example, "x = 2". This | |
3351 | function records the set patterns that are implicit at the start of each | |
3352 | basic block. */ | |
3353 | ||
3354 | static void | |
1d088dee | 3355 | find_implicit_sets (void) |
fbef91d8 RS |
3356 | { |
3357 | basic_block bb, dest; | |
3358 | unsigned int count; | |
3359 | rtx cond, new; | |
3360 | ||
3361 | count = 0; | |
3362 | FOR_EACH_BB (bb) | |
a98ebe2e | 3363 | /* Check for more than one successor. */ |
628f6a4e | 3364 | if (EDGE_COUNT (bb->succs) > 1) |
fbef91d8 | 3365 | { |
a813c111 | 3366 | cond = fis_get_condition (BB_END (bb)); |
fbef91d8 RS |
3367 | |
3368 | if (cond | |
3369 | && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE) | |
7b1b4aed | 3370 | && REG_P (XEXP (cond, 0)) |
fbef91d8 | 3371 | && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER |
b0656d8b | 3372 | && implicit_set_cond_p (cond)) |
fbef91d8 RS |
3373 | { |
3374 | dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest | |
3375 | : FALLTHRU_EDGE (bb)->dest; | |
3376 | ||
c5cbcccf | 3377 | if (dest && single_pred_p (dest) |
fbef91d8 RS |
3378 | && dest != EXIT_BLOCK_PTR) |
3379 | { | |
3380 | new = gen_rtx_SET (VOIDmode, XEXP (cond, 0), | |
3381 | XEXP (cond, 1)); | |
3382 | implicit_sets[dest->index] = new; | |
10d22567 | 3383 | if (dump_file) |
fbef91d8 | 3384 | { |
10d22567 | 3385 | fprintf(dump_file, "Implicit set of reg %d in ", |
fbef91d8 | 3386 | REGNO (XEXP (cond, 0))); |
10d22567 | 3387 | fprintf(dump_file, "basic block %d\n", dest->index); |
fbef91d8 RS |
3388 | } |
3389 | count++; | |
3390 | } | |
3391 | } | |
3392 | } | |
3393 | ||
10d22567 ZD |
3394 | if (dump_file) |
3395 | fprintf (dump_file, "Found %d implicit sets\n", count); | |
fbef91d8 RS |
3396 | } |
3397 | ||
7506f491 | 3398 | /* Perform one copy/constant propagation pass. |
a0134312 RS |
3399 | PASS is the pass count. If CPROP_JUMPS is true, perform constant |
3400 | propagation into conditional jumps. If BYPASS_JUMPS is true, | |
3401 | perform conditional jump bypassing optimizations. */ | |
7506f491 DE |
3402 | |
3403 | static int | |
eb232f4e | 3404 | one_cprop_pass (int pass, bool cprop_jumps, bool bypass_jumps) |
7506f491 DE |
3405 | { |
3406 | int changed = 0; | |
3407 | ||
27fb79ad SB |
3408 | global_const_prop_count = local_const_prop_count = 0; |
3409 | global_copy_prop_count = local_copy_prop_count = 0; | |
7506f491 | 3410 | |
a52b023a PB |
3411 | if (cprop_jumps) |
3412 | local_cprop_pass (cprop_jumps); | |
ae860ff7 | 3413 | |
fbef91d8 | 3414 | /* Determine implicit sets. */ |
5ed6ace5 | 3415 | implicit_sets = XCNEWVEC (rtx, last_basic_block); |
fbef91d8 RS |
3416 | find_implicit_sets (); |
3417 | ||
02280659 ZD |
3418 | alloc_hash_table (max_cuid, &set_hash_table, 1); |
3419 | compute_hash_table (&set_hash_table); | |
fbef91d8 RS |
3420 | |
3421 | /* Free implicit_sets before peak usage. */ | |
3422 | free (implicit_sets); | |
3423 | implicit_sets = NULL; | |
3424 | ||
10d22567 ZD |
3425 | if (dump_file) |
3426 | dump_hash_table (dump_file, "SET", &set_hash_table); | |
02280659 | 3427 | if (set_hash_table.n_elems > 0) |
7506f491 | 3428 | { |
02280659 | 3429 | alloc_cprop_mem (last_basic_block, set_hash_table.n_elems); |
7506f491 | 3430 | compute_cprop_data (); |
a0134312 RS |
3431 | changed = cprop (cprop_jumps); |
3432 | if (bypass_jumps) | |
0e3f0221 | 3433 | changed |= bypass_conditional_jumps (); |
7506f491 DE |
3434 | free_cprop_mem (); |
3435 | } | |
c4c81601 | 3436 | |
02280659 | 3437 | free_hash_table (&set_hash_table); |
7506f491 | 3438 | |
10d22567 | 3439 | if (dump_file) |
7506f491 | 3440 | { |
10d22567 | 3441 | fprintf (dump_file, "CPROP of %s, pass %d: %d bytes needed, ", |
faed5cc3 | 3442 | current_function_name (), pass, bytes_used); |
10d22567 | 3443 | fprintf (dump_file, "%d local const props, %d local copy props, ", |
27fb79ad | 3444 | local_const_prop_count, local_copy_prop_count); |
10d22567 | 3445 | fprintf (dump_file, "%d global const props, %d global copy props\n\n", |
27fb79ad | 3446 | global_const_prop_count, global_copy_prop_count); |
7506f491 | 3447 | } |
1649d92f JH |
3448 | /* Global analysis may get into infinite loops for unreachable blocks. */ |
3449 | if (changed && cprop_jumps) | |
3450 | delete_unreachable_blocks (); | |
7506f491 DE |
3451 | |
3452 | return changed; | |
3453 | } | |
3454 | \f | |
0e3f0221 RS |
3455 | /* Bypass conditional jumps. */ |
3456 | ||
7821bfc7 RS |
3457 | /* The value of last_basic_block at the beginning of the jump_bypass |
3458 | pass. The use of redirect_edge_and_branch_force may introduce new | |
3459 | basic blocks, but the data flow analysis is only valid for basic | |
3460 | block indices less than bypass_last_basic_block. */ | |
3461 | ||
3462 | static int bypass_last_basic_block; | |
3463 | ||
0e3f0221 RS |
3464 | /* Find a set of REGNO to a constant that is available at the end of basic |
3465 | block BB. Returns NULL if no such set is found. Based heavily upon | |
3466 | find_avail_set. */ | |
3467 | ||
3468 | static struct expr * | |
1d088dee | 3469 | find_bypass_set (int regno, int bb) |
0e3f0221 RS |
3470 | { |
3471 | struct expr *result = 0; | |
3472 | ||
3473 | for (;;) | |
3474 | { | |
3475 | rtx src; | |
ceda50e9 | 3476 | struct expr *set = lookup_set (regno, &set_hash_table); |
0e3f0221 RS |
3477 | |
3478 | while (set) | |
3479 | { | |
3480 | if (TEST_BIT (cprop_avout[bb], set->bitmap_index)) | |
3481 | break; | |
3482 | set = next_set (regno, set); | |
3483 | } | |
3484 | ||
3485 | if (set == 0) | |
3486 | break; | |
3487 | ||
282899df | 3488 | gcc_assert (GET_CODE (set->expr) == SET); |
0e3f0221 RS |
3489 | |
3490 | src = SET_SRC (set->expr); | |
6b2d1c9e | 3491 | if (gcse_constant_p (src)) |
0e3f0221 RS |
3492 | result = set; |
3493 | ||
7b1b4aed | 3494 | if (! REG_P (src)) |
0e3f0221 RS |
3495 | break; |
3496 | ||
3497 | regno = REGNO (src); | |
3498 | } | |
3499 | return result; | |
3500 | } | |
3501 | ||
3502 | ||
e129b3f9 RS |
3503 | /* Subroutine of bypass_block that checks whether a pseudo is killed by |
3504 | any of the instructions inserted on an edge. Jump bypassing places | |
3505 | condition code setters on CFG edges using insert_insn_on_edge. This | |
3506 | function is required to check that our data flow analysis is still | |
3507 | valid prior to commit_edge_insertions. */ | |
3508 | ||
3509 | static bool | |
ed7a4b4b | 3510 | reg_killed_on_edge (const_rtx reg, const_edge e) |
e129b3f9 RS |
3511 | { |
3512 | rtx insn; | |
3513 | ||
6de9cd9a | 3514 | for (insn = e->insns.r; insn; insn = NEXT_INSN (insn)) |
e129b3f9 RS |
3515 | if (INSN_P (insn) && reg_set_p (reg, insn)) |
3516 | return true; | |
3517 | ||
3518 | return false; | |
3519 | } | |
3520 | ||
0e3f0221 RS |
3521 | /* Subroutine of bypass_conditional_jumps that attempts to bypass the given |
3522 | basic block BB which has more than one predecessor. If not NULL, SETCC | |
3523 | is the first instruction of BB, which is immediately followed by JUMP_INSN | |
3524 | JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB. | |
e129b3f9 RS |
3525 | Returns nonzero if a change was made. |
3526 | ||
e0bb17a8 | 3527 | During the jump bypassing pass, we may place copies of SETCC instructions |
e129b3f9 RS |
3528 | on CFG edges. The following routine must be careful to pay attention to |
3529 | these inserted insns when performing its transformations. */ | |
0e3f0221 RS |
3530 | |
3531 | static int | |
1d088dee | 3532 | bypass_block (basic_block bb, rtx setcc, rtx jump) |
0e3f0221 RS |
3533 | { |
3534 | rtx insn, note; | |
628f6a4e | 3535 | edge e, edest; |
818b6b7f | 3536 | int i, change; |
72b8d451 | 3537 | int may_be_loop_header; |
628f6a4e BE |
3538 | unsigned removed_p; |
3539 | edge_iterator ei; | |
0e3f0221 RS |
3540 | |
3541 | insn = (setcc != NULL) ? setcc : jump; | |
3542 | ||
3543 | /* Determine set of register uses in INSN. */ | |
3544 | reg_use_count = 0; | |
3545 | note_uses (&PATTERN (insn), find_used_regs, NULL); | |
3546 | note = find_reg_equal_equiv_note (insn); | |
3547 | if (note) | |
3548 | find_used_regs (&XEXP (note, 0), NULL); | |
3549 | ||
72b8d451 | 3550 | may_be_loop_header = false; |
628f6a4e | 3551 | FOR_EACH_EDGE (e, ei, bb->preds) |
72b8d451 ZD |
3552 | if (e->flags & EDGE_DFS_BACK) |
3553 | { | |
3554 | may_be_loop_header = true; | |
3555 | break; | |
3556 | } | |
3557 | ||
0e3f0221 | 3558 | change = 0; |
628f6a4e | 3559 | for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); ) |
0e3f0221 | 3560 | { |
628f6a4e BE |
3561 | removed_p = 0; |
3562 | ||
7821bfc7 | 3563 | if (e->flags & EDGE_COMPLEX) |
628f6a4e BE |
3564 | { |
3565 | ei_next (&ei); | |
3566 | continue; | |
3567 | } | |
7821bfc7 RS |
3568 | |
3569 | /* We can't redirect edges from new basic blocks. */ | |
3570 | if (e->src->index >= bypass_last_basic_block) | |
628f6a4e BE |
3571 | { |
3572 | ei_next (&ei); | |
3573 | continue; | |
3574 | } | |
7821bfc7 | 3575 | |
72b8d451 | 3576 | /* The irreducible loops created by redirecting of edges entering the |
e0bb17a8 KH |
3577 | loop from outside would decrease effectiveness of some of the following |
3578 | optimizations, so prevent this. */ | |
72b8d451 ZD |
3579 | if (may_be_loop_header |
3580 | && !(e->flags & EDGE_DFS_BACK)) | |
628f6a4e BE |
3581 | { |
3582 | ei_next (&ei); | |
3583 | continue; | |
3584 | } | |
72b8d451 | 3585 | |
0e3f0221 RS |
3586 | for (i = 0; i < reg_use_count; i++) |
3587 | { | |
3588 | struct reg_use *reg_used = ®_use_table[i]; | |
589005ff | 3589 | unsigned int regno = REGNO (reg_used->reg_rtx); |
818b6b7f | 3590 | basic_block dest, old_dest; |
589005ff KH |
3591 | struct expr *set; |
3592 | rtx src, new; | |
0e3f0221 | 3593 | |
589005ff KH |
3594 | if (regno >= max_gcse_regno) |
3595 | continue; | |
0e3f0221 | 3596 | |
589005ff | 3597 | set = find_bypass_set (regno, e->src->index); |
0e3f0221 RS |
3598 | |
3599 | if (! set) | |
3600 | continue; | |
3601 | ||
e129b3f9 | 3602 | /* Check the data flow is valid after edge insertions. */ |
6de9cd9a | 3603 | if (e->insns.r && reg_killed_on_edge (reg_used->reg_rtx, e)) |
e129b3f9 RS |
3604 | continue; |
3605 | ||
589005ff | 3606 | src = SET_SRC (pc_set (jump)); |
0e3f0221 RS |
3607 | |
3608 | if (setcc != NULL) | |
3609 | src = simplify_replace_rtx (src, | |
589005ff KH |
3610 | SET_DEST (PATTERN (setcc)), |
3611 | SET_SRC (PATTERN (setcc))); | |
0e3f0221 RS |
3612 | |
3613 | new = simplify_replace_rtx (src, reg_used->reg_rtx, | |
589005ff | 3614 | SET_SRC (set->expr)); |
0e3f0221 | 3615 | |
1d088dee | 3616 | /* Jump bypassing may have already placed instructions on |
e129b3f9 RS |
3617 | edges of the CFG. We can't bypass an outgoing edge that |
3618 | has instructions associated with it, as these insns won't | |
3619 | get executed if the incoming edge is redirected. */ | |
3620 | ||
589005ff | 3621 | if (new == pc_rtx) |
e129b3f9 RS |
3622 | { |
3623 | edest = FALLTHRU_EDGE (bb); | |
6de9cd9a | 3624 | dest = edest->insns.r ? NULL : edest->dest; |
e129b3f9 | 3625 | } |
0e3f0221 | 3626 | else if (GET_CODE (new) == LABEL_REF) |
e129b3f9 RS |
3627 | { |
3628 | dest = BLOCK_FOR_INSN (XEXP (new, 0)); | |
3629 | /* Don't bypass edges containing instructions. */ | |
c7d1b449 KH |
3630 | edest = find_edge (bb, dest); |
3631 | if (edest && edest->insns.r) | |
3632 | dest = NULL; | |
e129b3f9 | 3633 | } |
0e3f0221 RS |
3634 | else |
3635 | dest = NULL; | |
3636 | ||
a544524a JH |
3637 | /* Avoid unification of the edge with other edges from original |
3638 | branch. We would end up emitting the instruction on "both" | |
3639 | edges. */ | |
7b1b4aed | 3640 | |
c7d1b449 KH |
3641 | if (dest && setcc && !CC0_P (SET_DEST (PATTERN (setcc))) |
3642 | && find_edge (e->src, dest)) | |
3643 | dest = NULL; | |
a544524a | 3644 | |
818b6b7f | 3645 | old_dest = e->dest; |
7821bfc7 RS |
3646 | if (dest != NULL |
3647 | && dest != old_dest | |
3648 | && dest != EXIT_BLOCK_PTR) | |
3649 | { | |
3650 | redirect_edge_and_branch_force (e, dest); | |
3651 | ||
818b6b7f | 3652 | /* Copy the register setter to the redirected edge. |
0e3f0221 RS |
3653 | Don't copy CC0 setters, as CC0 is dead after jump. */ |
3654 | if (setcc) | |
3655 | { | |
3656 | rtx pat = PATTERN (setcc); | |
818b6b7f | 3657 | if (!CC0_P (SET_DEST (pat))) |
0e3f0221 RS |
3658 | insert_insn_on_edge (copy_insn (pat), e); |
3659 | } | |
3660 | ||
10d22567 | 3661 | if (dump_file != NULL) |
0e3f0221 | 3662 | { |
10d22567 | 3663 | fprintf (dump_file, "JUMP-BYPASS: Proved reg %d " |
27fb79ad | 3664 | "in jump_insn %d equals constant ", |
818b6b7f | 3665 | regno, INSN_UID (jump)); |
10d22567 ZD |
3666 | print_rtl (dump_file, SET_SRC (set->expr)); |
3667 | fprintf (dump_file, "\nBypass edge from %d->%d to %d\n", | |
818b6b7f | 3668 | e->src->index, old_dest->index, dest->index); |
0e3f0221 RS |
3669 | } |
3670 | change = 1; | |
628f6a4e | 3671 | removed_p = 1; |
0e3f0221 RS |
3672 | break; |
3673 | } | |
3674 | } | |
628f6a4e BE |
3675 | if (!removed_p) |
3676 | ei_next (&ei); | |
0e3f0221 RS |
3677 | } |
3678 | return change; | |
3679 | } | |
3680 | ||
3681 | /* Find basic blocks with more than one predecessor that only contain a | |
3682 | single conditional jump. If the result of the comparison is known at | |
3683 | compile-time from any incoming edge, redirect that edge to the | |
9a71ece1 RH |
3684 | appropriate target. Returns nonzero if a change was made. |
3685 | ||
3686 | This function is now mis-named, because we also handle indirect jumps. */ | |
0e3f0221 RS |
3687 | |
3688 | static int | |
1d088dee | 3689 | bypass_conditional_jumps (void) |
0e3f0221 RS |
3690 | { |
3691 | basic_block bb; | |
3692 | int changed; | |
3693 | rtx setcc; | |
3694 | rtx insn; | |
3695 | rtx dest; | |
3696 | ||
3697 | /* Note we start at block 1. */ | |
3698 | if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR) | |
3699 | return 0; | |
3700 | ||
7821bfc7 | 3701 | bypass_last_basic_block = last_basic_block; |
72b8d451 | 3702 | mark_dfs_back_edges (); |
7821bfc7 | 3703 | |
0e3f0221 RS |
3704 | changed = 0; |
3705 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, | |
589005ff | 3706 | EXIT_BLOCK_PTR, next_bb) |
0e3f0221 RS |
3707 | { |
3708 | /* Check for more than one predecessor. */ | |
c5cbcccf | 3709 | if (!single_pred_p (bb)) |
0e3f0221 RS |
3710 | { |
3711 | setcc = NULL_RTX; | |
eb232f4e | 3712 | FOR_BB_INSNS (bb, insn) |
4b4bf941 | 3713 | if (NONJUMP_INSN_P (insn)) |
0e3f0221 | 3714 | { |
9543a9d2 | 3715 | if (setcc) |
0e3f0221 | 3716 | break; |
ba4f7968 | 3717 | if (GET_CODE (PATTERN (insn)) != SET) |
0e3f0221 RS |
3718 | break; |
3719 | ||
ba4f7968 | 3720 | dest = SET_DEST (PATTERN (insn)); |
818b6b7f | 3721 | if (REG_P (dest) || CC0_P (dest)) |
0e3f0221 | 3722 | setcc = insn; |
0e3f0221 RS |
3723 | else |
3724 | break; | |
3725 | } | |
7b1b4aed | 3726 | else if (JUMP_P (insn)) |
0e3f0221 | 3727 | { |
9a71ece1 RH |
3728 | if ((any_condjump_p (insn) || computed_jump_p (insn)) |
3729 | && onlyjump_p (insn)) | |
0e3f0221 RS |
3730 | changed |= bypass_block (bb, setcc, insn); |
3731 | break; | |
3732 | } | |
3733 | else if (INSN_P (insn)) | |
3734 | break; | |
3735 | } | |
3736 | } | |
3737 | ||
818b6b7f | 3738 | /* If we bypassed any register setting insns, we inserted a |
fbe5a4a6 | 3739 | copy on the redirected edge. These need to be committed. */ |
0e3f0221 | 3740 | if (changed) |
62e5bf5d | 3741 | commit_edge_insertions (); |
0e3f0221 RS |
3742 | |
3743 | return changed; | |
3744 | } | |
3745 | \f | |
a65f3558 | 3746 | /* Compute PRE+LCM working variables. */ |
7506f491 DE |
3747 | |
3748 | /* Local properties of expressions. */ | |
3749 | /* Nonzero for expressions that are transparent in the block. */ | |
a65f3558 | 3750 | static sbitmap *transp; |
7506f491 | 3751 | |
5c35539b RH |
3752 | /* Nonzero for expressions that are transparent at the end of the block. |
3753 | This is only zero for expressions killed by abnormal critical edge | |
3754 | created by a calls. */ | |
a65f3558 | 3755 | static sbitmap *transpout; |
5c35539b | 3756 | |
a65f3558 JL |
3757 | /* Nonzero for expressions that are computed (available) in the block. */ |
3758 | static sbitmap *comp; | |
7506f491 | 3759 | |
a65f3558 JL |
3760 | /* Nonzero for expressions that are locally anticipatable in the block. */ |
3761 | static sbitmap *antloc; | |
7506f491 | 3762 | |
a65f3558 JL |
3763 | /* Nonzero for expressions where this block is an optimal computation |
3764 | point. */ | |
3765 | static sbitmap *pre_optimal; | |
5c35539b | 3766 | |
a65f3558 JL |
3767 | /* Nonzero for expressions which are redundant in a particular block. */ |
3768 | static sbitmap *pre_redundant; | |
7506f491 | 3769 | |
a42cd965 AM |
3770 | /* Nonzero for expressions which should be inserted on a specific edge. */ |
3771 | static sbitmap *pre_insert_map; | |
3772 | ||
3773 | /* Nonzero for expressions which should be deleted in a specific block. */ | |
3774 | static sbitmap *pre_delete_map; | |
3775 | ||
3776 | /* Contains the edge_list returned by pre_edge_lcm. */ | |
3777 | static struct edge_list *edge_list; | |
3778 | ||
a65f3558 JL |
3779 | /* Redundant insns. */ |
3780 | static sbitmap pre_redundant_insns; | |
7506f491 | 3781 | |
a65f3558 | 3782 | /* Allocate vars used for PRE analysis. */ |
7506f491 DE |
3783 | |
3784 | static void | |
1d088dee | 3785 | alloc_pre_mem (int n_blocks, int n_exprs) |
7506f491 | 3786 | { |
a65f3558 JL |
3787 | transp = sbitmap_vector_alloc (n_blocks, n_exprs); |
3788 | comp = sbitmap_vector_alloc (n_blocks, n_exprs); | |
3789 | antloc = sbitmap_vector_alloc (n_blocks, n_exprs); | |
5faf03ae | 3790 | |
a42cd965 AM |
3791 | pre_optimal = NULL; |
3792 | pre_redundant = NULL; | |
3793 | pre_insert_map = NULL; | |
3794 | pre_delete_map = NULL; | |
a42cd965 | 3795 | ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs); |
c4c81601 | 3796 | |
a42cd965 | 3797 | /* pre_insert and pre_delete are allocated later. */ |
7506f491 DE |
3798 | } |
3799 | ||
a65f3558 | 3800 | /* Free vars used for PRE analysis. */ |
7506f491 DE |
3801 | |
3802 | static void | |
1d088dee | 3803 | free_pre_mem (void) |
7506f491 | 3804 | { |
5a660bff DB |
3805 | sbitmap_vector_free (transp); |
3806 | sbitmap_vector_free (comp); | |
bd3675fc JL |
3807 | |
3808 | /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */ | |
7506f491 | 3809 | |
a42cd965 | 3810 | if (pre_optimal) |
5a660bff | 3811 | sbitmap_vector_free (pre_optimal); |
a42cd965 | 3812 | if (pre_redundant) |
5a660bff | 3813 | sbitmap_vector_free (pre_redundant); |
a42cd965 | 3814 | if (pre_insert_map) |
5a660bff | 3815 | sbitmap_vector_free (pre_insert_map); |
a42cd965 | 3816 | if (pre_delete_map) |
5a660bff | 3817 | sbitmap_vector_free (pre_delete_map); |
a42cd965 | 3818 | |
bd3675fc | 3819 | transp = comp = NULL; |
a42cd965 | 3820 | pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL; |
7506f491 DE |
3821 | } |
3822 | ||
3823 | /* Top level routine to do the dataflow analysis needed by PRE. */ | |
3824 | ||
3825 | static void | |
1d088dee | 3826 | compute_pre_data (void) |
7506f491 | 3827 | { |
b614171e | 3828 | sbitmap trapping_expr; |
e0082a72 | 3829 | basic_block bb; |
b614171e | 3830 | unsigned int ui; |
c66e8ae9 | 3831 | |
02280659 | 3832 | compute_local_properties (transp, comp, antloc, &expr_hash_table); |
d55bc081 | 3833 | sbitmap_vector_zero (ae_kill, last_basic_block); |
c66e8ae9 | 3834 | |
b614171e | 3835 | /* Collect expressions which might trap. */ |
02280659 | 3836 | trapping_expr = sbitmap_alloc (expr_hash_table.n_elems); |
b614171e | 3837 | sbitmap_zero (trapping_expr); |
02280659 | 3838 | for (ui = 0; ui < expr_hash_table.size; ui++) |
b614171e MM |
3839 | { |
3840 | struct expr *e; | |
02280659 | 3841 | for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash) |
b614171e MM |
3842 | if (may_trap_p (e->expr)) |
3843 | SET_BIT (trapping_expr, e->bitmap_index); | |
3844 | } | |
3845 | ||
c66e8ae9 JL |
3846 | /* Compute ae_kill for each basic block using: |
3847 | ||
3848 | ~(TRANSP | COMP) | |
e83f4801 | 3849 | */ |
c66e8ae9 | 3850 | |
e0082a72 | 3851 | FOR_EACH_BB (bb) |
c66e8ae9 | 3852 | { |
b614171e | 3853 | edge e; |
628f6a4e | 3854 | edge_iterator ei; |
b614171e MM |
3855 | |
3856 | /* If the current block is the destination of an abnormal edge, we | |
3857 | kill all trapping expressions because we won't be able to properly | |
3858 | place the instruction on the edge. So make them neither | |
3859 | anticipatable nor transparent. This is fairly conservative. */ | |
628f6a4e | 3860 | FOR_EACH_EDGE (e, ei, bb->preds) |
b614171e MM |
3861 | if (e->flags & EDGE_ABNORMAL) |
3862 | { | |
e0082a72 ZD |
3863 | sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr); |
3864 | sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr); | |
b614171e MM |
3865 | break; |
3866 | } | |
3867 | ||
e0082a72 ZD |
3868 | sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]); |
3869 | sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]); | |
c66e8ae9 JL |
3870 | } |
3871 | ||
10d22567 | 3872 | edge_list = pre_edge_lcm (expr_hash_table.n_elems, transp, comp, antloc, |
a42cd965 | 3873 | ae_kill, &pre_insert_map, &pre_delete_map); |
5a660bff | 3874 | sbitmap_vector_free (antloc); |
bd3675fc | 3875 | antloc = NULL; |
5a660bff | 3876 | sbitmap_vector_free (ae_kill); |
589005ff | 3877 | ae_kill = NULL; |
76ac938b | 3878 | sbitmap_free (trapping_expr); |
7506f491 DE |
3879 | } |
3880 | \f | |
3881 | /* PRE utilities */ | |
3882 | ||
cc2902df | 3883 | /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach |
a65f3558 | 3884 | block BB. |
7506f491 DE |
3885 | |
3886 | VISITED is a pointer to a working buffer for tracking which BB's have | |
3887 | been visited. It is NULL for the top-level call. | |
3888 | ||
3889 | We treat reaching expressions that go through blocks containing the same | |
3890 | reaching expression as "not reaching". E.g. if EXPR is generated in blocks | |
3891 | 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block | |
3892 | 2 as not reaching. The intent is to improve the probability of finding | |
3893 | only one reaching expression and to reduce register lifetimes by picking | |
3894 | the closest such expression. */ | |
3895 | ||
3896 | static int | |
1d088dee | 3897 | pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr, basic_block bb, char *visited) |
7506f491 | 3898 | { |
36349f8b | 3899 | edge pred; |
628f6a4e BE |
3900 | edge_iterator ei; |
3901 | ||
3902 | FOR_EACH_EDGE (pred, ei, bb->preds) | |
7506f491 | 3903 | { |
e2d2ed72 | 3904 | basic_block pred_bb = pred->src; |
7506f491 | 3905 | |
36349f8b | 3906 | if (pred->src == ENTRY_BLOCK_PTR |
7506f491 | 3907 | /* Has predecessor has already been visited? */ |
0b17ab2f | 3908 | || visited[pred_bb->index]) |
c4c81601 RK |
3909 | ;/* Nothing to do. */ |
3910 | ||
7506f491 | 3911 | /* Does this predecessor generate this expression? */ |
0b17ab2f | 3912 | else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index)) |
7506f491 DE |
3913 | { |
3914 | /* Is this the occurrence we're looking for? | |
3915 | Note that there's only one generating occurrence per block | |
3916 | so we just need to check the block number. */ | |
a65f3558 | 3917 | if (occr_bb == pred_bb) |
7506f491 | 3918 | return 1; |
c4c81601 | 3919 | |
0b17ab2f | 3920 | visited[pred_bb->index] = 1; |
7506f491 DE |
3921 | } |
3922 | /* Ignore this predecessor if it kills the expression. */ | |
0b17ab2f RH |
3923 | else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index)) |
3924 | visited[pred_bb->index] = 1; | |
c4c81601 | 3925 | |
7506f491 DE |
3926 | /* Neither gen nor kill. */ |
3927 | else | |
ac7c5af5 | 3928 | { |
0b17ab2f | 3929 | visited[pred_bb->index] = 1; |
89e606c9 | 3930 | if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited)) |
7506f491 | 3931 | return 1; |
ac7c5af5 | 3932 | } |
7506f491 DE |
3933 | } |
3934 | ||
3935 | /* All paths have been checked. */ | |
3936 | return 0; | |
3937 | } | |
283a2545 RL |
3938 | |
3939 | /* The wrapper for pre_expr_reaches_here_work that ensures that any | |
dc297297 | 3940 | memory allocated for that function is returned. */ |
283a2545 RL |
3941 | |
3942 | static int | |
1d088dee | 3943 | pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb) |
283a2545 RL |
3944 | { |
3945 | int rval; | |
5ed6ace5 | 3946 | char *visited = XCNEWVEC (char, last_basic_block); |
283a2545 | 3947 | |
8e42ace1 | 3948 | rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited); |
283a2545 RL |
3949 | |
3950 | free (visited); | |
c4c81601 | 3951 | return rval; |
283a2545 | 3952 | } |
7506f491 | 3953 | \f |
a42cd965 AM |
3954 | |
3955 | /* Given an expr, generate RTL which we can insert at the end of a BB, | |
589005ff | 3956 | or on an edge. Set the block number of any insns generated to |
a42cd965 AM |
3957 | the value of BB. */ |
3958 | ||
3959 | static rtx | |
1d088dee | 3960 | process_insert_insn (struct expr *expr) |
a42cd965 AM |
3961 | { |
3962 | rtx reg = expr->reaching_reg; | |
fb0c0a12 RK |
3963 | rtx exp = copy_rtx (expr->expr); |
3964 | rtx pat; | |
a42cd965 AM |
3965 | |
3966 | start_sequence (); | |
fb0c0a12 RK |
3967 | |
3968 | /* If the expression is something that's an operand, like a constant, | |
3969 | just copy it to a register. */ | |
3970 | if (general_operand (exp, GET_MODE (reg))) | |
3971 | emit_move_insn (reg, exp); | |
3972 | ||
3973 | /* Otherwise, make a new insn to compute this expression and make sure the | |
3974 | insn will be recognized (this also adds any needed CLOBBERs). Copy the | |
3975 | expression to make sure we don't have any sharing issues. */ | |
282899df NS |
3976 | else |
3977 | { | |
3978 | rtx insn = emit_insn (gen_rtx_SET (VOIDmode, reg, exp)); | |
3979 | ||
2f021b67 AP |
3980 | if (insn_invalid_p (insn)) |
3981 | gcc_unreachable (); | |
282899df NS |
3982 | } |
3983 | ||
589005ff | 3984 | |
2f937369 | 3985 | pat = get_insns (); |
a42cd965 AM |
3986 | end_sequence (); |
3987 | ||
3988 | return pat; | |
3989 | } | |
589005ff | 3990 | |
a65f3558 JL |
3991 | /* Add EXPR to the end of basic block BB. |
3992 | ||
3993 | This is used by both the PRE and code hoisting. | |
3994 | ||
3995 | For PRE, we want to verify that the expr is either transparent | |
3996 | or locally anticipatable in the target block. This check makes | |
3997 | no sense for code hoisting. */ | |
7506f491 DE |
3998 | |
3999 | static void | |
6fb5fa3c | 4000 | insert_insn_end_basic_block (struct expr *expr, basic_block bb, int pre) |
7506f491 | 4001 | { |
a813c111 | 4002 | rtx insn = BB_END (bb); |
7506f491 DE |
4003 | rtx new_insn; |
4004 | rtx reg = expr->reaching_reg; | |
4005 | int regno = REGNO (reg); | |
2f937369 | 4006 | rtx pat, pat_end; |
7506f491 | 4007 | |
a42cd965 | 4008 | pat = process_insert_insn (expr); |
282899df | 4009 | gcc_assert (pat && INSN_P (pat)); |
2f937369 DM |
4010 | |
4011 | pat_end = pat; | |
4012 | while (NEXT_INSN (pat_end) != NULL_RTX) | |
4013 | pat_end = NEXT_INSN (pat_end); | |
7506f491 DE |
4014 | |
4015 | /* If the last insn is a jump, insert EXPR in front [taking care to | |
4d6922ee | 4016 | handle cc0, etc. properly]. Similarly we need to care trapping |
068473ec | 4017 | instructions in presence of non-call exceptions. */ |
7506f491 | 4018 | |
7b1b4aed | 4019 | if (JUMP_P (insn) |
4b4bf941 | 4020 | || (NONJUMP_INSN_P (insn) |
c5cbcccf ZD |
4021 | && (!single_succ_p (bb) |
4022 | || single_succ_edge (bb)->flags & EDGE_ABNORMAL))) | |
7506f491 | 4023 | { |
50b2596f | 4024 | #ifdef HAVE_cc0 |
7506f491 | 4025 | rtx note; |
50b2596f | 4026 | #endif |
068473ec JH |
4027 | /* It should always be the case that we can put these instructions |
4028 | anywhere in the basic block with performing PRE optimizations. | |
4029 | Check this. */ | |
282899df NS |
4030 | gcc_assert (!NONJUMP_INSN_P (insn) || !pre |
4031 | || TEST_BIT (antloc[bb->index], expr->bitmap_index) | |
4032 | || TEST_BIT (transp[bb->index], expr->bitmap_index)); | |
7506f491 DE |
4033 | |
4034 | /* If this is a jump table, then we can't insert stuff here. Since | |
4035 | we know the previous real insn must be the tablejump, we insert | |
4036 | the new instruction just before the tablejump. */ | |
4037 | if (GET_CODE (PATTERN (insn)) == ADDR_VEC | |
4038 | || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC) | |
4039 | insn = prev_real_insn (insn); | |
4040 | ||
4041 | #ifdef HAVE_cc0 | |
4042 | /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts | |
4043 | if cc0 isn't set. */ | |
4044 | note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX); | |
4045 | if (note) | |
4046 | insn = XEXP (note, 0); | |
4047 | else | |
4048 | { | |
4049 | rtx maybe_cc0_setter = prev_nonnote_insn (insn); | |
4050 | if (maybe_cc0_setter | |
2c3c49de | 4051 | && INSN_P (maybe_cc0_setter) |
7506f491 DE |
4052 | && sets_cc0_p (PATTERN (maybe_cc0_setter))) |
4053 | insn = maybe_cc0_setter; | |
4054 | } | |
4055 | #endif | |
4056 | /* FIXME: What if something in cc0/jump uses value set in new insn? */ | |
6fb5fa3c | 4057 | new_insn = emit_insn_before_noloc (pat, insn, bb); |
3947e2f9 | 4058 | } |
c4c81601 | 4059 | |
3947e2f9 RH |
4060 | /* Likewise if the last insn is a call, as will happen in the presence |
4061 | of exception handling. */ | |
7b1b4aed | 4062 | else if (CALL_P (insn) |
c5cbcccf ZD |
4063 | && (!single_succ_p (bb) |
4064 | || single_succ_edge (bb)->flags & EDGE_ABNORMAL)) | |
3947e2f9 | 4065 | { |
3947e2f9 RH |
4066 | /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers, |
4067 | we search backward and place the instructions before the first | |
4068 | parameter is loaded. Do this for everyone for consistency and a | |
fbe5a4a6 | 4069 | presumption that we'll get better code elsewhere as well. |
3947e2f9 | 4070 | |
c4c81601 | 4071 | It should always be the case that we can put these instructions |
a65f3558 JL |
4072 | anywhere in the basic block with performing PRE optimizations. |
4073 | Check this. */ | |
c4c81601 | 4074 | |
282899df NS |
4075 | gcc_assert (!pre |
4076 | || TEST_BIT (antloc[bb->index], expr->bitmap_index) | |
4077 | || TEST_BIT (transp[bb->index], expr->bitmap_index)); | |
3947e2f9 RH |
4078 | |
4079 | /* Since different machines initialize their parameter registers | |
4080 | in different orders, assume nothing. Collect the set of all | |
4081 | parameter registers. */ | |
a813c111 | 4082 | insn = find_first_parameter_load (insn, BB_HEAD (bb)); |
3947e2f9 | 4083 | |
b1d26727 JL |
4084 | /* If we found all the parameter loads, then we want to insert |
4085 | before the first parameter load. | |
4086 | ||
4087 | If we did not find all the parameter loads, then we might have | |
4088 | stopped on the head of the block, which could be a CODE_LABEL. | |
4089 | If we inserted before the CODE_LABEL, then we would be putting | |
4090 | the insn in the wrong basic block. In that case, put the insn | |
b5229628 | 4091 | after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */ |
7b1b4aed | 4092 | while (LABEL_P (insn) |
589ca5cb | 4093 | || NOTE_INSN_BASIC_BLOCK_P (insn)) |
b5229628 | 4094 | insn = NEXT_INSN (insn); |
c4c81601 | 4095 | |
6fb5fa3c | 4096 | new_insn = emit_insn_before_noloc (pat, insn, bb); |
7506f491 DE |
4097 | } |
4098 | else | |
6fb5fa3c | 4099 | new_insn = emit_insn_after_noloc (pat, insn, bb); |
7506f491 | 4100 | |
2f937369 | 4101 | while (1) |
a65f3558 | 4102 | { |
2f937369 | 4103 | if (INSN_P (pat)) |
a65f3558 | 4104 | { |
2f937369 DM |
4105 | add_label_notes (PATTERN (pat), new_insn); |
4106 | note_stores (PATTERN (pat), record_set_info, pat); | |
a65f3558 | 4107 | } |
2f937369 DM |
4108 | if (pat == pat_end) |
4109 | break; | |
4110 | pat = NEXT_INSN (pat); | |
a65f3558 | 4111 | } |
3947e2f9 | 4112 | |
7506f491 DE |
4113 | gcse_create_count++; |
4114 | ||
10d22567 | 4115 | if (dump_file) |
7506f491 | 4116 | { |
10d22567 | 4117 | fprintf (dump_file, "PRE/HOIST: end of bb %d, insn %d, ", |
0b17ab2f | 4118 | bb->index, INSN_UID (new_insn)); |
10d22567 | 4119 | fprintf (dump_file, "copying expression %d to reg %d\n", |
c4c81601 | 4120 | expr->bitmap_index, regno); |
7506f491 DE |
4121 | } |
4122 | } | |
4123 | ||
a42cd965 AM |
4124 | /* Insert partially redundant expressions on edges in the CFG to make |
4125 | the expressions fully redundant. */ | |
7506f491 | 4126 | |
a42cd965 | 4127 | static int |
1d088dee | 4128 | pre_edge_insert (struct edge_list *edge_list, struct expr **index_map) |
7506f491 | 4129 | { |
c4c81601 | 4130 | int e, i, j, num_edges, set_size, did_insert = 0; |
a65f3558 JL |
4131 | sbitmap *inserted; |
4132 | ||
a42cd965 AM |
4133 | /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge |
4134 | if it reaches any of the deleted expressions. */ | |
7506f491 | 4135 | |
a42cd965 AM |
4136 | set_size = pre_insert_map[0]->size; |
4137 | num_edges = NUM_EDGES (edge_list); | |
02280659 | 4138 | inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems); |
a42cd965 | 4139 | sbitmap_vector_zero (inserted, num_edges); |
7506f491 | 4140 | |
a42cd965 | 4141 | for (e = 0; e < num_edges; e++) |
7506f491 DE |
4142 | { |
4143 | int indx; | |
e2d2ed72 | 4144 | basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e); |
a65f3558 | 4145 | |
a65f3558 | 4146 | for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS) |
7506f491 | 4147 | { |
a42cd965 | 4148 | SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i]; |
7506f491 | 4149 | |
02280659 | 4150 | for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1) |
c4c81601 RK |
4151 | if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX) |
4152 | { | |
4153 | struct expr *expr = index_map[j]; | |
4154 | struct occr *occr; | |
a65f3558 | 4155 | |
ff7cc307 | 4156 | /* Now look at each deleted occurrence of this expression. */ |
c4c81601 RK |
4157 | for (occr = expr->antic_occr; occr != NULL; occr = occr->next) |
4158 | { | |
4159 | if (! occr->deleted_p) | |
4160 | continue; | |
4161 | ||
3f117656 | 4162 | /* Insert this expression on this edge if it would |
ff7cc307 | 4163 | reach the deleted occurrence in BB. */ |
c4c81601 RK |
4164 | if (!TEST_BIT (inserted[e], j)) |
4165 | { | |
4166 | rtx insn; | |
4167 | edge eg = INDEX_EDGE (edge_list, e); | |
4168 | ||
4169 | /* We can't insert anything on an abnormal and | |
4170 | critical edge, so we insert the insn at the end of | |
4171 | the previous block. There are several alternatives | |
4172 | detailed in Morgans book P277 (sec 10.5) for | |
4173 | handling this situation. This one is easiest for | |
4174 | now. */ | |
4175 | ||
b16aa8a5 | 4176 | if (eg->flags & EDGE_ABNORMAL) |
6fb5fa3c | 4177 | insert_insn_end_basic_block (index_map[j], bb, 0); |
c4c81601 RK |
4178 | else |
4179 | { | |
4180 | insn = process_insert_insn (index_map[j]); | |
4181 | insert_insn_on_edge (insn, eg); | |
4182 | } | |
4183 | ||
10d22567 | 4184 | if (dump_file) |
c4c81601 | 4185 | { |
10d22567 | 4186 | fprintf (dump_file, "PRE/HOIST: edge (%d,%d), ", |
0b17ab2f RH |
4187 | bb->index, |
4188 | INDEX_EDGE_SUCC_BB (edge_list, e)->index); | |
10d22567 | 4189 | fprintf (dump_file, "copy expression %d\n", |
c4c81601 RK |
4190 | expr->bitmap_index); |
4191 | } | |
4192 | ||
a13d4ebf | 4193 | update_ld_motion_stores (expr); |
c4c81601 RK |
4194 | SET_BIT (inserted[e], j); |
4195 | did_insert = 1; | |
4196 | gcse_create_count++; | |
4197 | } | |
4198 | } | |
4199 | } | |
7506f491 DE |
4200 | } |
4201 | } | |
5faf03ae | 4202 | |
5a660bff | 4203 | sbitmap_vector_free (inserted); |
a42cd965 | 4204 | return did_insert; |
7506f491 DE |
4205 | } |
4206 | ||
073089a7 | 4207 | /* Copy the result of EXPR->EXPR generated by INSN to EXPR->REACHING_REG. |
b885908b MH |
4208 | Given "old_reg <- expr" (INSN), instead of adding after it |
4209 | reaching_reg <- old_reg | |
4210 | it's better to do the following: | |
4211 | reaching_reg <- expr | |
4212 | old_reg <- reaching_reg | |
4213 | because this way copy propagation can discover additional PRE | |
f5f2e3cd MH |
4214 | opportunities. But if this fails, we try the old way. |
4215 | When "expr" is a store, i.e. | |
4216 | given "MEM <- old_reg", instead of adding after it | |
4217 | reaching_reg <- old_reg | |
4218 | it's better to add it before as follows: | |
4219 | reaching_reg <- old_reg | |
4220 | MEM <- reaching_reg. */ | |
7506f491 DE |
4221 | |
4222 | static void | |
1d088dee | 4223 | pre_insert_copy_insn (struct expr *expr, rtx insn) |
7506f491 DE |
4224 | { |
4225 | rtx reg = expr->reaching_reg; | |
4226 | int regno = REGNO (reg); | |
4227 | int indx = expr->bitmap_index; | |
073089a7 | 4228 | rtx pat = PATTERN (insn); |
64068ca2 | 4229 | rtx set, first_set, new_insn; |
b885908b | 4230 | rtx old_reg; |
073089a7 | 4231 | int i; |
7506f491 | 4232 | |
073089a7 | 4233 | /* This block matches the logic in hash_scan_insn. */ |
282899df | 4234 | switch (GET_CODE (pat)) |
073089a7 | 4235 | { |
282899df NS |
4236 | case SET: |
4237 | set = pat; | |
4238 | break; | |
4239 | ||
4240 | case PARALLEL: | |
073089a7 RS |
4241 | /* Search through the parallel looking for the set whose |
4242 | source was the expression that we're interested in. */ | |
64068ca2 | 4243 | first_set = NULL_RTX; |
073089a7 RS |
4244 | set = NULL_RTX; |
4245 | for (i = 0; i < XVECLEN (pat, 0); i++) | |
4246 | { | |
4247 | rtx x = XVECEXP (pat, 0, i); | |
64068ca2 | 4248 | if (GET_CODE (x) == SET) |
073089a7 | 4249 | { |
64068ca2 RS |
4250 | /* If the source was a REG_EQUAL or REG_EQUIV note, we |
4251 | may not find an equivalent expression, but in this | |
4252 | case the PARALLEL will have a single set. */ | |
4253 | if (first_set == NULL_RTX) | |
4254 | first_set = x; | |
4255 | if (expr_equiv_p (SET_SRC (x), expr->expr)) | |
4256 | { | |
4257 | set = x; | |
4258 | break; | |
4259 | } | |
073089a7 RS |
4260 | } |
4261 | } | |
64068ca2 RS |
4262 | |
4263 | gcc_assert (first_set); | |
4264 | if (set == NULL_RTX) | |
4265 | set = first_set; | |
282899df NS |
4266 | break; |
4267 | ||
4268 | default: | |
4269 | gcc_unreachable (); | |
073089a7 | 4270 | } |
c4c81601 | 4271 | |
7b1b4aed | 4272 | if (REG_P (SET_DEST (set))) |
073089a7 | 4273 | { |
f5f2e3cd MH |
4274 | old_reg = SET_DEST (set); |
4275 | /* Check if we can modify the set destination in the original insn. */ | |
4276 | if (validate_change (insn, &SET_DEST (set), reg, 0)) | |
4277 | { | |
4278 | new_insn = gen_move_insn (old_reg, reg); | |
4279 | new_insn = emit_insn_after (new_insn, insn); | |
4280 | ||
4281 | /* Keep register set table up to date. */ | |
f5f2e3cd MH |
4282 | record_one_set (regno, insn); |
4283 | } | |
4284 | else | |
4285 | { | |
4286 | new_insn = gen_move_insn (reg, old_reg); | |
4287 | new_insn = emit_insn_after (new_insn, insn); | |
073089a7 | 4288 | |
f5f2e3cd MH |
4289 | /* Keep register set table up to date. */ |
4290 | record_one_set (regno, new_insn); | |
4291 | } | |
073089a7 | 4292 | } |
f5f2e3cd | 4293 | else /* This is possible only in case of a store to memory. */ |
073089a7 | 4294 | { |
f5f2e3cd | 4295 | old_reg = SET_SRC (set); |
073089a7 | 4296 | new_insn = gen_move_insn (reg, old_reg); |
f5f2e3cd MH |
4297 | |
4298 | /* Check if we can modify the set source in the original insn. */ | |
4299 | if (validate_change (insn, &SET_SRC (set), reg, 0)) | |
4300 | new_insn = emit_insn_before (new_insn, insn); | |
4301 | else | |
4302 | new_insn = emit_insn_after (new_insn, insn); | |
c4c81601 | 4303 | |
073089a7 RS |
4304 | /* Keep register set table up to date. */ |
4305 | record_one_set (regno, new_insn); | |
4306 | } | |
7506f491 DE |
4307 | |
4308 | gcse_create_count++; | |
4309 | ||
10d22567 ZD |
4310 | if (dump_file) |
4311 | fprintf (dump_file, | |
a42cd965 AM |
4312 | "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n", |
4313 | BLOCK_NUM (insn), INSN_UID (new_insn), indx, | |
4314 | INSN_UID (insn), regno); | |
7506f491 DE |
4315 | } |
4316 | ||
4317 | /* Copy available expressions that reach the redundant expression | |
4318 | to `reaching_reg'. */ | |
4319 | ||
4320 | static void | |
1d088dee | 4321 | pre_insert_copies (void) |
7506f491 | 4322 | { |
f5f2e3cd | 4323 | unsigned int i, added_copy; |
c4c81601 RK |
4324 | struct expr *expr; |
4325 | struct occr *occr; | |
4326 | struct occr *avail; | |
a65f3558 | 4327 | |
7506f491 DE |
4328 | /* For each available expression in the table, copy the result to |
4329 | `reaching_reg' if the expression reaches a deleted one. | |
4330 | ||
4331 | ??? The current algorithm is rather brute force. | |
4332 | Need to do some profiling. */ | |
4333 | ||
02280659 ZD |
4334 | for (i = 0; i < expr_hash_table.size; i++) |
4335 | for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash) | |
c4c81601 RK |
4336 | { |
4337 | /* If the basic block isn't reachable, PPOUT will be TRUE. However, | |
4338 | we don't want to insert a copy here because the expression may not | |
4339 | really be redundant. So only insert an insn if the expression was | |
4340 | deleted. This test also avoids further processing if the | |
4341 | expression wasn't deleted anywhere. */ | |
4342 | if (expr->reaching_reg == NULL) | |
4343 | continue; | |
7b1b4aed | 4344 | |
f5f2e3cd | 4345 | /* Set when we add a copy for that expression. */ |
7b1b4aed | 4346 | added_copy = 0; |
c4c81601 RK |
4347 | |
4348 | for (occr = expr->antic_occr; occr != NULL; occr = occr->next) | |
4349 | { | |
4350 | if (! occr->deleted_p) | |
4351 | continue; | |
7506f491 | 4352 | |
c4c81601 RK |
4353 | for (avail = expr->avail_occr; avail != NULL; avail = avail->next) |
4354 | { | |
4355 | rtx insn = avail->insn; | |
7506f491 | 4356 | |
c4c81601 RK |
4357 | /* No need to handle this one if handled already. */ |
4358 | if (avail->copied_p) | |
4359 | continue; | |
7506f491 | 4360 | |
c4c81601 RK |
4361 | /* Don't handle this one if it's a redundant one. */ |
4362 | if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn))) | |
4363 | continue; | |
7506f491 | 4364 | |
c4c81601 | 4365 | /* Or if the expression doesn't reach the deleted one. */ |
589005ff | 4366 | if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn), |
e2d2ed72 AM |
4367 | expr, |
4368 | BLOCK_FOR_INSN (occr->insn))) | |
c4c81601 | 4369 | continue; |
7506f491 | 4370 | |
f5f2e3cd MH |
4371 | added_copy = 1; |
4372 | ||
c4c81601 RK |
4373 | /* Copy the result of avail to reaching_reg. */ |
4374 | pre_insert_copy_insn (expr, insn); | |
4375 | avail->copied_p = 1; | |
4376 | } | |
4377 | } | |
f5f2e3cd | 4378 | |
7b1b4aed | 4379 | if (added_copy) |
f5f2e3cd | 4380 | update_ld_motion_stores (expr); |
c4c81601 | 4381 | } |
7506f491 DE |
4382 | } |
4383 | ||
10d1bb36 JH |
4384 | /* Emit move from SRC to DEST noting the equivalence with expression computed |
4385 | in INSN. */ | |
4386 | static rtx | |
1d088dee | 4387 | gcse_emit_move_after (rtx src, rtx dest, rtx insn) |
10d1bb36 JH |
4388 | { |
4389 | rtx new; | |
6bdb8dd6 | 4390 | rtx set = single_set (insn), set2; |
10d1bb36 JH |
4391 | rtx note; |
4392 | rtx eqv; | |
4393 | ||
4394 | /* This should never fail since we're creating a reg->reg copy | |
4395 | we've verified to be valid. */ | |
4396 | ||
6bdb8dd6 | 4397 | new = emit_insn_after (gen_move_insn (dest, src), insn); |
285464d0 | 4398 | |
10d1bb36 | 4399 | /* Note the equivalence for local CSE pass. */ |
6bdb8dd6 JH |
4400 | set2 = single_set (new); |
4401 | if (!set2 || !rtx_equal_p (SET_DEST (set2), dest)) | |
4402 | return new; | |
10d1bb36 JH |
4403 | if ((note = find_reg_equal_equiv_note (insn))) |
4404 | eqv = XEXP (note, 0); | |
4405 | else | |
4406 | eqv = SET_SRC (set); | |
4407 | ||
a500466b | 4408 | set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv)); |
10d1bb36 JH |
4409 | |
4410 | return new; | |
4411 | } | |
4412 | ||
7506f491 | 4413 | /* Delete redundant computations. |
7506f491 DE |
4414 | Deletion is done by changing the insn to copy the `reaching_reg' of |
4415 | the expression into the result of the SET. It is left to later passes | |
4416 | (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it. | |
4417 | ||
cc2902df | 4418 | Returns nonzero if a change is made. */ |
7506f491 DE |
4419 | |
4420 | static int | |
1d088dee | 4421 | pre_delete (void) |
7506f491 | 4422 | { |
2e653e39 | 4423 | unsigned int i; |
63bc1d05 | 4424 | int changed; |
c4c81601 RK |
4425 | struct expr *expr; |
4426 | struct occr *occr; | |
a65f3558 | 4427 | |
7506f491 | 4428 | changed = 0; |
02280659 | 4429 | for (i = 0; i < expr_hash_table.size; i++) |
073089a7 RS |
4430 | for (expr = expr_hash_table.table[i]; |
4431 | expr != NULL; | |
4432 | expr = expr->next_same_hash) | |
c4c81601 RK |
4433 | { |
4434 | int indx = expr->bitmap_index; | |
7506f491 | 4435 | |
c4c81601 RK |
4436 | /* We only need to search antic_occr since we require |
4437 | ANTLOC != 0. */ | |
7506f491 | 4438 | |
c4c81601 RK |
4439 | for (occr = expr->antic_occr; occr != NULL; occr = occr->next) |
4440 | { | |
4441 | rtx insn = occr->insn; | |
4442 | rtx set; | |
e2d2ed72 | 4443 | basic_block bb = BLOCK_FOR_INSN (insn); |
7506f491 | 4444 | |
073089a7 RS |
4445 | /* We only delete insns that have a single_set. */ |
4446 | if (TEST_BIT (pre_delete_map[bb->index], indx) | |
6fb5fa3c DB |
4447 | && (set = single_set (insn)) != 0 |
4448 | && dbg_cnt (pre_insn)) | |
c4c81601 | 4449 | { |
c4c81601 RK |
4450 | /* Create a pseudo-reg to store the result of reaching |
4451 | expressions into. Get the mode for the new pseudo from | |
4452 | the mode of the original destination pseudo. */ | |
4453 | if (expr->reaching_reg == NULL) | |
4454 | expr->reaching_reg | |
4455 | = gen_reg_rtx (GET_MODE (SET_DEST (set))); | |
4456 | ||
9b76aa3b | 4457 | gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn); |
10d1bb36 JH |
4458 | delete_insn (insn); |
4459 | occr->deleted_p = 1; | |
4460 | SET_BIT (pre_redundant_insns, INSN_CUID (insn)); | |
4461 | changed = 1; | |
4462 | gcse_subst_count++; | |
7506f491 | 4463 | |
10d22567 | 4464 | if (dump_file) |
c4c81601 | 4465 | { |
10d22567 | 4466 | fprintf (dump_file, |
c4c81601 RK |
4467 | "PRE: redundant insn %d (expression %d) in ", |
4468 | INSN_UID (insn), indx); | |
10d22567 | 4469 | fprintf (dump_file, "bb %d, reaching reg is %d\n", |
0b17ab2f | 4470 | bb->index, REGNO (expr->reaching_reg)); |
c4c81601 RK |
4471 | } |
4472 | } | |
4473 | } | |
4474 | } | |
7506f491 DE |
4475 | |
4476 | return changed; | |
4477 | } | |
4478 | ||
4479 | /* Perform GCSE optimizations using PRE. | |
4480 | This is called by one_pre_gcse_pass after all the dataflow analysis | |
4481 | has been done. | |
4482 | ||
c4c81601 RK |
4483 | This is based on the original Morel-Renvoise paper Fred Chow's thesis, and |
4484 | lazy code motion from Knoop, Ruthing and Steffen as described in Advanced | |
4485 | Compiler Design and Implementation. | |
7506f491 | 4486 | |
c4c81601 RK |
4487 | ??? A new pseudo reg is created to hold the reaching expression. The nice |
4488 | thing about the classical approach is that it would try to use an existing | |
4489 | reg. If the register can't be adequately optimized [i.e. we introduce | |
4490 | reload problems], one could add a pass here to propagate the new register | |
4491 | through the block. | |
7506f491 | 4492 | |
c4c81601 RK |
4493 | ??? We don't handle single sets in PARALLELs because we're [currently] not |
4494 | able to copy the rest of the parallel when we insert copies to create full | |
4495 | redundancies from partial redundancies. However, there's no reason why we | |
4496 | can't handle PARALLELs in the cases where there are no partial | |
7506f491 DE |
4497 | redundancies. */ |
4498 | ||
4499 | static int | |
1d088dee | 4500 | pre_gcse (void) |
7506f491 | 4501 | { |
2e653e39 RK |
4502 | unsigned int i; |
4503 | int did_insert, changed; | |
7506f491 | 4504 | struct expr **index_map; |
c4c81601 | 4505 | struct expr *expr; |
7506f491 DE |
4506 | |
4507 | /* Compute a mapping from expression number (`bitmap_index') to | |
4508 | hash table entry. */ | |
4509 | ||
5ed6ace5 | 4510 | index_map = XCNEWVEC (struct expr *, expr_hash_table.n_elems); |
02280659 ZD |
4511 | for (i = 0; i < expr_hash_table.size; i++) |
4512 | for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash) | |
c4c81601 | 4513 | index_map[expr->bitmap_index] = expr; |
7506f491 DE |
4514 | |
4515 | /* Reset bitmap used to track which insns are redundant. */ | |
a65f3558 JL |
4516 | pre_redundant_insns = sbitmap_alloc (max_cuid); |
4517 | sbitmap_zero (pre_redundant_insns); | |
7506f491 DE |
4518 | |
4519 | /* Delete the redundant insns first so that | |
4520 | - we know what register to use for the new insns and for the other | |
4521 | ones with reaching expressions | |
4522 | - we know which insns are redundant when we go to create copies */ | |
c4c81601 | 4523 | |
7506f491 | 4524 | changed = pre_delete (); |
a42cd965 | 4525 | did_insert = pre_edge_insert (edge_list, index_map); |
c4c81601 | 4526 | |
7506f491 | 4527 | /* In other places with reaching expressions, copy the expression to the |
a42cd965 | 4528 | specially allocated pseudo-reg that reaches the redundant expr. */ |
7506f491 | 4529 | pre_insert_copies (); |
a42cd965 AM |
4530 | if (did_insert) |
4531 | { | |
4532 | commit_edge_insertions (); | |
4533 | changed = 1; | |
4534 | } | |
7506f491 | 4535 | |
283a2545 | 4536 | free (index_map); |
76ac938b | 4537 | sbitmap_free (pre_redundant_insns); |
7506f491 DE |
4538 | return changed; |
4539 | } | |
4540 | ||
4541 | /* Top level routine to perform one PRE GCSE pass. | |
4542 | ||
cc2902df | 4543 | Return nonzero if a change was made. */ |
7506f491 DE |
4544 | |
4545 | static int | |
1d088dee | 4546 | one_pre_gcse_pass (int pass) |
7506f491 DE |
4547 | { |
4548 | int changed = 0; | |
4549 | ||
4550 | gcse_subst_count = 0; | |
4551 | gcse_create_count = 0; | |
4552 | ||
02280659 | 4553 | alloc_hash_table (max_cuid, &expr_hash_table, 0); |
a42cd965 | 4554 | add_noreturn_fake_exit_edges (); |
a13d4ebf AM |
4555 | if (flag_gcse_lm) |
4556 | compute_ld_motion_mems (); | |
4557 | ||
02280659 | 4558 | compute_hash_table (&expr_hash_table); |
a13d4ebf | 4559 | trim_ld_motion_mems (); |
10d22567 ZD |
4560 | if (dump_file) |
4561 | dump_hash_table (dump_file, "Expression", &expr_hash_table); | |
c4c81601 | 4562 | |
02280659 | 4563 | if (expr_hash_table.n_elems > 0) |
7506f491 | 4564 | { |
02280659 | 4565 | alloc_pre_mem (last_basic_block, expr_hash_table.n_elems); |
7506f491 DE |
4566 | compute_pre_data (); |
4567 | changed |= pre_gcse (); | |
a42cd965 | 4568 | free_edge_list (edge_list); |
7506f491 DE |
4569 | free_pre_mem (); |
4570 | } | |
c4c81601 | 4571 | |
a13d4ebf | 4572 | free_ldst_mems (); |
6809cbf9 | 4573 | remove_fake_exit_edges (); |
02280659 | 4574 | free_hash_table (&expr_hash_table); |
7506f491 | 4575 | |
10d22567 | 4576 | if (dump_file) |
7506f491 | 4577 | { |
10d22567 | 4578 | fprintf (dump_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ", |
faed5cc3 | 4579 | current_function_name (), pass, bytes_used); |
10d22567 | 4580 | fprintf (dump_file, "%d substs, %d insns created\n", |
c4c81601 | 4581 | gcse_subst_count, gcse_create_count); |
7506f491 DE |
4582 | } |
4583 | ||
4584 | return changed; | |
4585 | } | |
aeb2f500 | 4586 | \f |
cf7c4aa6 HPN |
4587 | /* If X contains any LABEL_REF's, add REG_LABEL_OPERAND notes for them |
4588 | to INSN. If such notes are added to an insn which references a | |
4589 | CODE_LABEL, the LABEL_NUSES count is incremented. We have to add | |
4590 | that note, because the following loop optimization pass requires | |
4591 | them. */ | |
aeb2f500 | 4592 | |
aeb2f500 JW |
4593 | /* ??? If there was a jump optimization pass after gcse and before loop, |
4594 | then we would not need to do this here, because jump would add the | |
cf7c4aa6 | 4595 | necessary REG_LABEL_OPERAND and REG_LABEL_TARGET notes. */ |
aeb2f500 JW |
4596 | |
4597 | static void | |
1d088dee | 4598 | add_label_notes (rtx x, rtx insn) |
aeb2f500 JW |
4599 | { |
4600 | enum rtx_code code = GET_CODE (x); | |
4601 | int i, j; | |
6f7d635c | 4602 | const char *fmt; |
aeb2f500 JW |
4603 | |
4604 | if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x)) | |
4605 | { | |
6b3603c2 | 4606 | /* This code used to ignore labels that referred to dispatch tables to |
e0bb17a8 | 4607 | avoid flow generating (slightly) worse code. |
6b3603c2 | 4608 | |
ac7c5af5 JL |
4609 | We no longer ignore such label references (see LABEL_REF handling in |
4610 | mark_jump_label for additional information). */ | |
c4c81601 | 4611 | |
cf7c4aa6 HPN |
4612 | if (reg_mentioned_p (XEXP (x, 0), insn)) |
4613 | { | |
4614 | /* There's no reason for current users to emit jump-insns | |
4615 | with such a LABEL_REF, so we don't have to handle | |
4616 | REG_LABEL_TARGET notes. */ | |
4617 | gcc_assert (!JUMP_P (insn)); | |
4618 | REG_NOTES (insn) | |
4619 | = gen_rtx_INSN_LIST (REG_LABEL_OPERAND, XEXP (x, 0), | |
4620 | REG_NOTES (insn)); | |
4621 | if (LABEL_P (XEXP (x, 0))) | |
4622 | LABEL_NUSES (XEXP (x, 0))++; | |
4623 | } | |
aeb2f500 JW |
4624 | return; |
4625 | } | |
4626 | ||
c4c81601 | 4627 | for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--) |
aeb2f500 JW |
4628 | { |
4629 | if (fmt[i] == 'e') | |
4630 | add_label_notes (XEXP (x, i), insn); | |
4631 | else if (fmt[i] == 'E') | |
4632 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
4633 | add_label_notes (XVECEXP (x, i, j), insn); | |
4634 | } | |
4635 | } | |
a65f3558 JL |
4636 | |
4637 | /* Compute transparent outgoing information for each block. | |
4638 | ||
4639 | An expression is transparent to an edge unless it is killed by | |
4640 | the edge itself. This can only happen with abnormal control flow, | |
4641 | when the edge is traversed through a call. This happens with | |
4642 | non-local labels and exceptions. | |
4643 | ||
4644 | This would not be necessary if we split the edge. While this is | |
4645 | normally impossible for abnormal critical edges, with some effort | |
4646 | it should be possible with exception handling, since we still have | |
4647 | control over which handler should be invoked. But due to increased | |
4648 | EH table sizes, this may not be worthwhile. */ | |
4649 | ||
4650 | static void | |
1d088dee | 4651 | compute_transpout (void) |
a65f3558 | 4652 | { |
e0082a72 | 4653 | basic_block bb; |
2e653e39 | 4654 | unsigned int i; |
c4c81601 | 4655 | struct expr *expr; |
a65f3558 | 4656 | |
d55bc081 | 4657 | sbitmap_vector_ones (transpout, last_basic_block); |
a65f3558 | 4658 | |
e0082a72 | 4659 | FOR_EACH_BB (bb) |
a65f3558 | 4660 | { |
a65f3558 JL |
4661 | /* Note that flow inserted a nop a the end of basic blocks that |
4662 | end in call instructions for reasons other than abnormal | |
4663 | control flow. */ | |
7b1b4aed | 4664 | if (! CALL_P (BB_END (bb))) |
a65f3558 JL |
4665 | continue; |
4666 | ||
02280659 ZD |
4667 | for (i = 0; i < expr_hash_table.size; i++) |
4668 | for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash) | |
7b1b4aed | 4669 | if (MEM_P (expr->expr)) |
c4c81601 RK |
4670 | { |
4671 | if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF | |
4672 | && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0))) | |
4673 | continue; | |
589005ff | 4674 | |
c4c81601 RK |
4675 | /* ??? Optimally, we would use interprocedural alias |
4676 | analysis to determine if this mem is actually killed | |
4677 | by this call. */ | |
e0082a72 | 4678 | RESET_BIT (transpout[bb->index], expr->bitmap_index); |
c4c81601 | 4679 | } |
a65f3558 JL |
4680 | } |
4681 | } | |
dfdb644f | 4682 | |
bb457bd9 JL |
4683 | /* Code Hoisting variables and subroutines. */ |
4684 | ||
4685 | /* Very busy expressions. */ | |
4686 | static sbitmap *hoist_vbein; | |
4687 | static sbitmap *hoist_vbeout; | |
4688 | ||
4689 | /* Hoistable expressions. */ | |
4690 | static sbitmap *hoist_exprs; | |
4691 | ||
bb457bd9 | 4692 | /* ??? We could compute post dominators and run this algorithm in |
68e82b83 | 4693 | reverse to perform tail merging, doing so would probably be |
bb457bd9 JL |
4694 | more effective than the tail merging code in jump.c. |
4695 | ||
4696 | It's unclear if tail merging could be run in parallel with | |
4697 | code hoisting. It would be nice. */ | |
4698 | ||
4699 | /* Allocate vars used for code hoisting analysis. */ | |
4700 | ||
4701 | static void | |
1d088dee | 4702 | alloc_code_hoist_mem (int n_blocks, int n_exprs) |
bb457bd9 JL |
4703 | { |
4704 | antloc = sbitmap_vector_alloc (n_blocks, n_exprs); | |
4705 | transp = sbitmap_vector_alloc (n_blocks, n_exprs); | |
4706 | comp = sbitmap_vector_alloc (n_blocks, n_exprs); | |
4707 | ||
4708 | hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs); | |
4709 | hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs); | |
4710 | hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs); | |
4711 | transpout = sbitmap_vector_alloc (n_blocks, n_exprs); | |
bb457bd9 JL |
4712 | } |
4713 | ||
4714 | /* Free vars used for code hoisting analysis. */ | |
4715 | ||
4716 | static void | |
1d088dee | 4717 | free_code_hoist_mem (void) |
bb457bd9 | 4718 | { |
5a660bff DB |
4719 | sbitmap_vector_free (antloc); |
4720 | sbitmap_vector_free (transp); | |
4721 | sbitmap_vector_free (comp); | |
bb457bd9 | 4722 | |
5a660bff DB |
4723 | sbitmap_vector_free (hoist_vbein); |
4724 | sbitmap_vector_free (hoist_vbeout); | |
4725 | sbitmap_vector_free (hoist_exprs); | |
4726 | sbitmap_vector_free (transpout); | |
bb457bd9 | 4727 | |
d47cc544 | 4728 | free_dominance_info (CDI_DOMINATORS); |
bb457bd9 JL |
4729 | } |
4730 | ||
4731 | /* Compute the very busy expressions at entry/exit from each block. | |
4732 | ||
4733 | An expression is very busy if all paths from a given point | |
4734 | compute the expression. */ | |
4735 | ||
4736 | static void | |
1d088dee | 4737 | compute_code_hoist_vbeinout (void) |
bb457bd9 | 4738 | { |
e0082a72 ZD |
4739 | int changed, passes; |
4740 | basic_block bb; | |
bb457bd9 | 4741 | |
d55bc081 ZD |
4742 | sbitmap_vector_zero (hoist_vbeout, last_basic_block); |
4743 | sbitmap_vector_zero (hoist_vbein, last_basic_block); | |
bb457bd9 JL |
4744 | |
4745 | passes = 0; | |
4746 | changed = 1; | |
c4c81601 | 4747 | |
bb457bd9 JL |
4748 | while (changed) |
4749 | { | |
4750 | changed = 0; | |
c4c81601 | 4751 | |
bb457bd9 JL |
4752 | /* We scan the blocks in the reverse order to speed up |
4753 | the convergence. */ | |
e0082a72 | 4754 | FOR_EACH_BB_REVERSE (bb) |
bb457bd9 | 4755 | { |
e0082a72 ZD |
4756 | changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index], |
4757 | hoist_vbeout[bb->index], transp[bb->index]); | |
4758 | if (bb->next_bb != EXIT_BLOCK_PTR) | |
4759 | sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index); | |
bb457bd9 | 4760 | } |
c4c81601 | 4761 | |
bb457bd9 JL |
4762 | passes++; |
4763 | } | |
4764 | ||
10d22567 ZD |
4765 | if (dump_file) |
4766 | fprintf (dump_file, "hoisting vbeinout computation: %d passes\n", passes); | |
bb457bd9 JL |
4767 | } |
4768 | ||
4769 | /* Top level routine to do the dataflow analysis needed by code hoisting. */ | |
4770 | ||
4771 | static void | |
1d088dee | 4772 | compute_code_hoist_data (void) |
bb457bd9 | 4773 | { |
02280659 | 4774 | compute_local_properties (transp, comp, antloc, &expr_hash_table); |
bb457bd9 JL |
4775 | compute_transpout (); |
4776 | compute_code_hoist_vbeinout (); | |
d47cc544 | 4777 | calculate_dominance_info (CDI_DOMINATORS); |
10d22567 ZD |
4778 | if (dump_file) |
4779 | fprintf (dump_file, "\n"); | |
bb457bd9 JL |
4780 | } |
4781 | ||
4782 | /* Determine if the expression identified by EXPR_INDEX would | |
4783 | reach BB unimpared if it was placed at the end of EXPR_BB. | |
4784 | ||
4785 | It's unclear exactly what Muchnick meant by "unimpared". It seems | |
4786 | to me that the expression must either be computed or transparent in | |
4787 | *every* block in the path(s) from EXPR_BB to BB. Any other definition | |
4788 | would allow the expression to be hoisted out of loops, even if | |
4789 | the expression wasn't a loop invariant. | |
4790 | ||
4791 | Contrast this to reachability for PRE where an expression is | |
4792 | considered reachable if *any* path reaches instead of *all* | |
4793 | paths. */ | |
4794 | ||
4795 | static int | |
1d088dee | 4796 | hoist_expr_reaches_here_p (basic_block expr_bb, int expr_index, basic_block bb, char *visited) |
bb457bd9 JL |
4797 | { |
4798 | edge pred; | |
628f6a4e | 4799 | edge_iterator ei; |
283a2545 | 4800 | int visited_allocated_locally = 0; |
589005ff | 4801 | |
bb457bd9 JL |
4802 | |
4803 | if (visited == NULL) | |
4804 | { | |
8e42ace1 | 4805 | visited_allocated_locally = 1; |
5ed6ace5 | 4806 | visited = XCNEWVEC (char, last_basic_block); |
bb457bd9 JL |
4807 | } |
4808 | ||
628f6a4e | 4809 | FOR_EACH_EDGE (pred, ei, bb->preds) |
bb457bd9 | 4810 | { |
e2d2ed72 | 4811 | basic_block pred_bb = pred->src; |
bb457bd9 JL |
4812 | |
4813 | if (pred->src == ENTRY_BLOCK_PTR) | |
4814 | break; | |
f305679f JH |
4815 | else if (pred_bb == expr_bb) |
4816 | continue; | |
0b17ab2f | 4817 | else if (visited[pred_bb->index]) |
bb457bd9 | 4818 | continue; |
c4c81601 | 4819 | |
bb457bd9 | 4820 | /* Does this predecessor generate this expression? */ |
0b17ab2f | 4821 | else if (TEST_BIT (comp[pred_bb->index], expr_index)) |
bb457bd9 | 4822 | break; |
0b17ab2f | 4823 | else if (! TEST_BIT (transp[pred_bb->index], expr_index)) |
bb457bd9 | 4824 | break; |
c4c81601 | 4825 | |
bb457bd9 JL |
4826 | /* Not killed. */ |
4827 | else | |
4828 | { | |
0b17ab2f | 4829 | visited[pred_bb->index] = 1; |
bb457bd9 JL |
4830 | if (! hoist_expr_reaches_here_p (expr_bb, expr_index, |
4831 | pred_bb, visited)) | |
4832 | break; | |
4833 | } | |
4834 | } | |
589005ff | 4835 | if (visited_allocated_locally) |
283a2545 | 4836 | free (visited); |
c4c81601 | 4837 | |
bb457bd9 JL |
4838 | return (pred == NULL); |
4839 | } | |
4840 | \f | |
4841 | /* Actually perform code hoisting. */ | |
c4c81601 | 4842 | |
bb457bd9 | 4843 | static void |
1d088dee | 4844 | hoist_code (void) |
bb457bd9 | 4845 | { |
e0082a72 | 4846 | basic_block bb, dominated; |
66f97d31 | 4847 | VEC (basic_block, heap) *domby; |
c635a1ec | 4848 | unsigned int i,j; |
bb457bd9 | 4849 | struct expr **index_map; |
c4c81601 | 4850 | struct expr *expr; |
bb457bd9 | 4851 | |
d55bc081 | 4852 | sbitmap_vector_zero (hoist_exprs, last_basic_block); |
bb457bd9 JL |
4853 | |
4854 | /* Compute a mapping from expression number (`bitmap_index') to | |
4855 | hash table entry. */ | |
4856 | ||
5ed6ace5 | 4857 | index_map = XCNEWVEC (struct expr *, expr_hash_table.n_elems); |
02280659 ZD |
4858 | for (i = 0; i < expr_hash_table.size; i++) |
4859 | for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash) | |
c4c81601 | 4860 | index_map[expr->bitmap_index] = expr; |
bb457bd9 JL |
4861 | |
4862 | /* Walk over each basic block looking for potentially hoistable | |
4863 | expressions, nothing gets hoisted from the entry block. */ | |
e0082a72 | 4864 | FOR_EACH_BB (bb) |
bb457bd9 JL |
4865 | { |
4866 | int found = 0; | |
4867 | int insn_inserted_p; | |
4868 | ||
66f97d31 | 4869 | domby = get_dominated_by (CDI_DOMINATORS, bb); |
bb457bd9 JL |
4870 | /* Examine each expression that is very busy at the exit of this |
4871 | block. These are the potentially hoistable expressions. */ | |
e0082a72 | 4872 | for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++) |
bb457bd9 JL |
4873 | { |
4874 | int hoistable = 0; | |
c4c81601 | 4875 | |
c635a1ec DB |
4876 | if (TEST_BIT (hoist_vbeout[bb->index], i) |
4877 | && TEST_BIT (transpout[bb->index], i)) | |
bb457bd9 JL |
4878 | { |
4879 | /* We've found a potentially hoistable expression, now | |
4880 | we look at every block BB dominates to see if it | |
4881 | computes the expression. */ | |
66f97d31 | 4882 | for (j = 0; VEC_iterate (basic_block, domby, j, dominated); j++) |
bb457bd9 JL |
4883 | { |
4884 | /* Ignore self dominance. */ | |
c635a1ec | 4885 | if (bb == dominated) |
bb457bd9 | 4886 | continue; |
bb457bd9 JL |
4887 | /* We've found a dominated block, now see if it computes |
4888 | the busy expression and whether or not moving that | |
4889 | expression to the "beginning" of that block is safe. */ | |
e0082a72 | 4890 | if (!TEST_BIT (antloc[dominated->index], i)) |
bb457bd9 JL |
4891 | continue; |
4892 | ||
4893 | /* Note if the expression would reach the dominated block | |
589005ff | 4894 | unimpared if it was placed at the end of BB. |
bb457bd9 JL |
4895 | |
4896 | Keep track of how many times this expression is hoistable | |
4897 | from a dominated block into BB. */ | |
e0082a72 | 4898 | if (hoist_expr_reaches_here_p (bb, i, dominated, NULL)) |
bb457bd9 JL |
4899 | hoistable++; |
4900 | } | |
4901 | ||
ff7cc307 | 4902 | /* If we found more than one hoistable occurrence of this |
bb457bd9 JL |
4903 | expression, then note it in the bitmap of expressions to |
4904 | hoist. It makes no sense to hoist things which are computed | |
4905 | in only one BB, and doing so tends to pessimize register | |
4906 | allocation. One could increase this value to try harder | |
4907 | to avoid any possible code expansion due to register | |
4908 | allocation issues; however experiments have shown that | |
4909 | the vast majority of hoistable expressions are only movable | |
e0bb17a8 | 4910 | from two successors, so raising this threshold is likely |
bb457bd9 JL |
4911 | to nullify any benefit we get from code hoisting. */ |
4912 | if (hoistable > 1) | |
4913 | { | |
e0082a72 | 4914 | SET_BIT (hoist_exprs[bb->index], i); |
bb457bd9 JL |
4915 | found = 1; |
4916 | } | |
4917 | } | |
4918 | } | |
bb457bd9 JL |
4919 | /* If we found nothing to hoist, then quit now. */ |
4920 | if (! found) | |
c635a1ec | 4921 | { |
66f97d31 ZD |
4922 | VEC_free (basic_block, heap, domby); |
4923 | continue; | |
c635a1ec | 4924 | } |
bb457bd9 JL |
4925 | |
4926 | /* Loop over all the hoistable expressions. */ | |
e0082a72 | 4927 | for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++) |
bb457bd9 JL |
4928 | { |
4929 | /* We want to insert the expression into BB only once, so | |
4930 | note when we've inserted it. */ | |
4931 | insn_inserted_p = 0; | |
4932 | ||
4933 | /* These tests should be the same as the tests above. */ | |
cb83c2ec | 4934 | if (TEST_BIT (hoist_exprs[bb->index], i)) |
bb457bd9 JL |
4935 | { |
4936 | /* We've found a potentially hoistable expression, now | |
4937 | we look at every block BB dominates to see if it | |
4938 | computes the expression. */ | |
66f97d31 | 4939 | for (j = 0; VEC_iterate (basic_block, domby, j, dominated); j++) |
bb457bd9 JL |
4940 | { |
4941 | /* Ignore self dominance. */ | |
c635a1ec | 4942 | if (bb == dominated) |
bb457bd9 JL |
4943 | continue; |
4944 | ||
4945 | /* We've found a dominated block, now see if it computes | |
4946 | the busy expression and whether or not moving that | |
4947 | expression to the "beginning" of that block is safe. */ | |
e0082a72 | 4948 | if (!TEST_BIT (antloc[dominated->index], i)) |
bb457bd9 JL |
4949 | continue; |
4950 | ||
4951 | /* The expression is computed in the dominated block and | |
4952 | it would be safe to compute it at the start of the | |
4953 | dominated block. Now we have to determine if the | |
ff7cc307 | 4954 | expression would reach the dominated block if it was |
bb457bd9 | 4955 | placed at the end of BB. */ |
e0082a72 | 4956 | if (hoist_expr_reaches_here_p (bb, i, dominated, NULL)) |
bb457bd9 JL |
4957 | { |
4958 | struct expr *expr = index_map[i]; | |
4959 | struct occr *occr = expr->antic_occr; | |
4960 | rtx insn; | |
4961 | rtx set; | |
4962 | ||
ff7cc307 | 4963 | /* Find the right occurrence of this expression. */ |
e0082a72 | 4964 | while (BLOCK_FOR_INSN (occr->insn) != dominated && occr) |
bb457bd9 JL |
4965 | occr = occr->next; |
4966 | ||
282899df | 4967 | gcc_assert (occr); |
bb457bd9 | 4968 | insn = occr->insn; |
bb457bd9 | 4969 | set = single_set (insn); |
282899df | 4970 | gcc_assert (set); |
bb457bd9 JL |
4971 | |
4972 | /* Create a pseudo-reg to store the result of reaching | |
4973 | expressions into. Get the mode for the new pseudo | |
4974 | from the mode of the original destination pseudo. */ | |
4975 | if (expr->reaching_reg == NULL) | |
4976 | expr->reaching_reg | |
4977 | = gen_reg_rtx (GET_MODE (SET_DEST (set))); | |
4978 | ||
10d1bb36 JH |
4979 | gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn); |
4980 | delete_insn (insn); | |
4981 | occr->deleted_p = 1; | |
4982 | if (!insn_inserted_p) | |
bb457bd9 | 4983 | { |
6fb5fa3c | 4984 | insert_insn_end_basic_block (index_map[i], bb, 0); |
10d1bb36 | 4985 | insn_inserted_p = 1; |
bb457bd9 JL |
4986 | } |
4987 | } | |
4988 | } | |
4989 | } | |
4990 | } | |
66f97d31 | 4991 | VEC_free (basic_block, heap, domby); |
bb457bd9 | 4992 | } |
c4c81601 | 4993 | |
8e42ace1 | 4994 | free (index_map); |
bb457bd9 JL |
4995 | } |
4996 | ||
4997 | /* Top level routine to perform one code hoisting (aka unification) pass | |
4998 | ||
cc2902df | 4999 | Return nonzero if a change was made. */ |
bb457bd9 JL |
5000 | |
5001 | static int | |
1d088dee | 5002 | one_code_hoisting_pass (void) |
bb457bd9 JL |
5003 | { |
5004 | int changed = 0; | |
5005 | ||
02280659 ZD |
5006 | alloc_hash_table (max_cuid, &expr_hash_table, 0); |
5007 | compute_hash_table (&expr_hash_table); | |
10d22567 ZD |
5008 | if (dump_file) |
5009 | dump_hash_table (dump_file, "Code Hosting Expressions", &expr_hash_table); | |
c4c81601 | 5010 | |
02280659 | 5011 | if (expr_hash_table.n_elems > 0) |
bb457bd9 | 5012 | { |
02280659 | 5013 | alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems); |
bb457bd9 JL |
5014 | compute_code_hoist_data (); |
5015 | hoist_code (); | |
5016 | free_code_hoist_mem (); | |
5017 | } | |
c4c81601 | 5018 | |
02280659 | 5019 | free_hash_table (&expr_hash_table); |
bb457bd9 JL |
5020 | |
5021 | return changed; | |
5022 | } | |
a13d4ebf AM |
5023 | \f |
5024 | /* Here we provide the things required to do store motion towards | |
5025 | the exit. In order for this to be effective, gcse also needed to | |
5026 | be taught how to move a load when it is kill only by a store to itself. | |
5027 | ||
5028 | int i; | |
5029 | float a[10]; | |
5030 | ||
5031 | void foo(float scale) | |
5032 | { | |
5033 | for (i=0; i<10; i++) | |
5034 | a[i] *= scale; | |
5035 | } | |
5036 | ||
5037 | 'i' is both loaded and stored to in the loop. Normally, gcse cannot move | |
589005ff KH |
5038 | the load out since its live around the loop, and stored at the bottom |
5039 | of the loop. | |
a13d4ebf | 5040 | |
589005ff | 5041 | The 'Load Motion' referred to and implemented in this file is |
a13d4ebf AM |
5042 | an enhancement to gcse which when using edge based lcm, recognizes |
5043 | this situation and allows gcse to move the load out of the loop. | |
5044 | ||
5045 | Once gcse has hoisted the load, store motion can then push this | |
5046 | load towards the exit, and we end up with no loads or stores of 'i' | |
5047 | in the loop. */ | |
5048 | ||
9727e468 RG |
5049 | static hashval_t |
5050 | pre_ldst_expr_hash (const void *p) | |
5051 | { | |
5052 | int do_not_record_p = 0; | |
5053 | const struct ls_expr *x = p; | |
5054 | return hash_rtx (x->pattern, GET_MODE (x->pattern), &do_not_record_p, NULL, false); | |
5055 | } | |
5056 | ||
5057 | static int | |
5058 | pre_ldst_expr_eq (const void *p1, const void *p2) | |
5059 | { | |
5060 | const struct ls_expr *ptr1 = p1, *ptr2 = p2; | |
5061 | return expr_equiv_p (ptr1->pattern, ptr2->pattern); | |
5062 | } | |
5063 | ||
ff7cc307 | 5064 | /* This will search the ldst list for a matching expression. If it |
a13d4ebf AM |
5065 | doesn't find one, we create one and initialize it. */ |
5066 | ||
5067 | static struct ls_expr * | |
1d088dee | 5068 | ldst_entry (rtx x) |
a13d4ebf | 5069 | { |
b58b21d5 | 5070 | int do_not_record_p = 0; |
a13d4ebf | 5071 | struct ls_expr * ptr; |
b58b21d5 | 5072 | unsigned int hash; |
9727e468 RG |
5073 | void **slot; |
5074 | struct ls_expr e; | |
a13d4ebf | 5075 | |
0516f6fe SB |
5076 | hash = hash_rtx (x, GET_MODE (x), &do_not_record_p, |
5077 | NULL, /*have_reg_qty=*/false); | |
a13d4ebf | 5078 | |
9727e468 RG |
5079 | e.pattern = x; |
5080 | slot = htab_find_slot_with_hash (pre_ldst_table, &e, hash, INSERT); | |
5081 | if (*slot) | |
5082 | return (struct ls_expr *)*slot; | |
b58b21d5 | 5083 | |
5ed6ace5 | 5084 | ptr = XNEW (struct ls_expr); |
b58b21d5 RS |
5085 | |
5086 | ptr->next = pre_ldst_mems; | |
5087 | ptr->expr = NULL; | |
5088 | ptr->pattern = x; | |
5089 | ptr->pattern_regs = NULL_RTX; | |
5090 | ptr->loads = NULL_RTX; | |
5091 | ptr->stores = NULL_RTX; | |
5092 | ptr->reaching_reg = NULL_RTX; | |
5093 | ptr->invalid = 0; | |
5094 | ptr->index = 0; | |
5095 | ptr->hash_index = hash; | |
5096 | pre_ldst_mems = ptr; | |
9727e468 | 5097 | *slot = ptr; |
589005ff | 5098 | |
a13d4ebf AM |
5099 | return ptr; |
5100 | } | |
5101 | ||
5102 | /* Free up an individual ldst entry. */ | |
5103 | ||
589005ff | 5104 | static void |
1d088dee | 5105 | free_ldst_entry (struct ls_expr * ptr) |
a13d4ebf | 5106 | { |
aaa4ca30 AJ |
5107 | free_INSN_LIST_list (& ptr->loads); |
5108 | free_INSN_LIST_list (& ptr->stores); | |
a13d4ebf AM |
5109 | |
5110 | free (ptr); | |
5111 | } | |
5112 | ||
5113 | /* Free up all memory associated with the ldst list. */ | |
5114 | ||
5115 | static void | |
1d088dee | 5116 | free_ldst_mems (void) |
a13d4ebf | 5117 | { |
35b5442a RG |
5118 | if (pre_ldst_table) |
5119 | htab_delete (pre_ldst_table); | |
9727e468 RG |
5120 | pre_ldst_table = NULL; |
5121 | ||
589005ff | 5122 | while (pre_ldst_mems) |
a13d4ebf AM |
5123 | { |
5124 | struct ls_expr * tmp = pre_ldst_mems; | |
5125 | ||
5126 | pre_ldst_mems = pre_ldst_mems->next; | |
5127 | ||
5128 | free_ldst_entry (tmp); | |
5129 | } | |
5130 | ||
5131 | pre_ldst_mems = NULL; | |
5132 | } | |
5133 | ||
5134 | /* Dump debugging info about the ldst list. */ | |
5135 | ||
5136 | static void | |
1d088dee | 5137 | print_ldst_list (FILE * file) |
a13d4ebf AM |
5138 | { |
5139 | struct ls_expr * ptr; | |
5140 | ||
5141 | fprintf (file, "LDST list: \n"); | |
5142 | ||
62e5bf5d | 5143 | for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr)) |
a13d4ebf AM |
5144 | { |
5145 | fprintf (file, " Pattern (%3d): ", ptr->index); | |
5146 | ||
5147 | print_rtl (file, ptr->pattern); | |
5148 | ||
5149 | fprintf (file, "\n Loads : "); | |
5150 | ||
5151 | if (ptr->loads) | |
5152 | print_rtl (file, ptr->loads); | |
5153 | else | |
5154 | fprintf (file, "(nil)"); | |
5155 | ||
5156 | fprintf (file, "\n Stores : "); | |
5157 | ||
5158 | if (ptr->stores) | |
5159 | print_rtl (file, ptr->stores); | |
5160 | else | |
5161 | fprintf (file, "(nil)"); | |
5162 | ||
5163 | fprintf (file, "\n\n"); | |
5164 | } | |
5165 | ||
5166 | fprintf (file, "\n"); | |
5167 | } | |
5168 | ||
5169 | /* Returns 1 if X is in the list of ldst only expressions. */ | |
5170 | ||
5171 | static struct ls_expr * | |
1d088dee | 5172 | find_rtx_in_ldst (rtx x) |
a13d4ebf | 5173 | { |
9727e468 RG |
5174 | struct ls_expr e; |
5175 | void **slot; | |
6375779a RG |
5176 | if (!pre_ldst_table) |
5177 | return NULL; | |
9727e468 RG |
5178 | e.pattern = x; |
5179 | slot = htab_find_slot (pre_ldst_table, &e, NO_INSERT); | |
5180 | if (!slot || ((struct ls_expr *)*slot)->invalid) | |
5181 | return NULL; | |
5182 | return *slot; | |
a13d4ebf AM |
5183 | } |
5184 | ||
5185 | /* Assign each element of the list of mems a monotonically increasing value. */ | |
5186 | ||
5187 | static int | |
1d088dee | 5188 | enumerate_ldsts (void) |
a13d4ebf AM |
5189 | { |
5190 | struct ls_expr * ptr; | |
5191 | int n = 0; | |
5192 | ||
5193 | for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next) | |
5194 | ptr->index = n++; | |
5195 | ||
5196 | return n; | |
5197 | } | |
5198 | ||
5199 | /* Return first item in the list. */ | |
5200 | ||
5201 | static inline struct ls_expr * | |
1d088dee | 5202 | first_ls_expr (void) |
a13d4ebf AM |
5203 | { |
5204 | return pre_ldst_mems; | |
5205 | } | |
5206 | ||
0e8a66de | 5207 | /* Return the next item in the list after the specified one. */ |
a13d4ebf AM |
5208 | |
5209 | static inline struct ls_expr * | |
1d088dee | 5210 | next_ls_expr (struct ls_expr * ptr) |
a13d4ebf AM |
5211 | { |
5212 | return ptr->next; | |
5213 | } | |
5214 | \f | |
5215 | /* Load Motion for loads which only kill themselves. */ | |
5216 | ||
5217 | /* Return true if x is a simple MEM operation, with no registers or | |
5218 | side effects. These are the types of loads we consider for the | |
5219 | ld_motion list, otherwise we let the usual aliasing take care of it. */ | |
5220 | ||
589005ff | 5221 | static int |
ed7a4b4b | 5222 | simple_mem (const_rtx x) |
a13d4ebf | 5223 | { |
7b1b4aed | 5224 | if (! MEM_P (x)) |
a13d4ebf | 5225 | return 0; |
589005ff | 5226 | |
a13d4ebf AM |
5227 | if (MEM_VOLATILE_P (x)) |
5228 | return 0; | |
589005ff | 5229 | |
a13d4ebf AM |
5230 | if (GET_MODE (x) == BLKmode) |
5231 | return 0; | |
aaa4ca30 | 5232 | |
47a3dae1 ZD |
5233 | /* If we are handling exceptions, we must be careful with memory references |
5234 | that may trap. If we are not, the behavior is undefined, so we may just | |
5235 | continue. */ | |
5236 | if (flag_non_call_exceptions && may_trap_p (x)) | |
98d3d336 RS |
5237 | return 0; |
5238 | ||
47a3dae1 ZD |
5239 | if (side_effects_p (x)) |
5240 | return 0; | |
589005ff | 5241 | |
47a3dae1 ZD |
5242 | /* Do not consider function arguments passed on stack. */ |
5243 | if (reg_mentioned_p (stack_pointer_rtx, x)) | |
5244 | return 0; | |
5245 | ||
5246 | if (flag_float_store && FLOAT_MODE_P (GET_MODE (x))) | |
5247 | return 0; | |
5248 | ||
5249 | return 1; | |
a13d4ebf AM |
5250 | } |
5251 | ||
589005ff KH |
5252 | /* Make sure there isn't a buried reference in this pattern anywhere. |
5253 | If there is, invalidate the entry for it since we're not capable | |
5254 | of fixing it up just yet.. We have to be sure we know about ALL | |
a13d4ebf AM |
5255 | loads since the aliasing code will allow all entries in the |
5256 | ld_motion list to not-alias itself. If we miss a load, we will get | |
589005ff | 5257 | the wrong value since gcse might common it and we won't know to |
a13d4ebf AM |
5258 | fix it up. */ |
5259 | ||
5260 | static void | |
1d088dee | 5261 | invalidate_any_buried_refs (rtx x) |
a13d4ebf AM |
5262 | { |
5263 | const char * fmt; | |
8e42ace1 | 5264 | int i, j; |
a13d4ebf AM |
5265 | struct ls_expr * ptr; |
5266 | ||
5267 | /* Invalidate it in the list. */ | |
7b1b4aed | 5268 | if (MEM_P (x) && simple_mem (x)) |
a13d4ebf AM |
5269 | { |
5270 | ptr = ldst_entry (x); | |
5271 | ptr->invalid = 1; | |
5272 | } | |
5273 | ||
5274 | /* Recursively process the insn. */ | |
5275 | fmt = GET_RTX_FORMAT (GET_CODE (x)); | |
589005ff | 5276 | |
a13d4ebf AM |
5277 | for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--) |
5278 | { | |
5279 | if (fmt[i] == 'e') | |
5280 | invalidate_any_buried_refs (XEXP (x, i)); | |
5281 | else if (fmt[i] == 'E') | |
5282 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
5283 | invalidate_any_buried_refs (XVECEXP (x, i, j)); | |
5284 | } | |
5285 | } | |
5286 | ||
4d3eb89a HPN |
5287 | /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple |
5288 | being defined as MEM loads and stores to symbols, with no side effects | |
5289 | and no registers in the expression. For a MEM destination, we also | |
5290 | check that the insn is still valid if we replace the destination with a | |
5291 | REG, as is done in update_ld_motion_stores. If there are any uses/defs | |
5292 | which don't match this criteria, they are invalidated and trimmed out | |
5293 | later. */ | |
a13d4ebf | 5294 | |
589005ff | 5295 | static void |
1d088dee | 5296 | compute_ld_motion_mems (void) |
a13d4ebf AM |
5297 | { |
5298 | struct ls_expr * ptr; | |
e0082a72 | 5299 | basic_block bb; |
a13d4ebf | 5300 | rtx insn; |
589005ff | 5301 | |
a13d4ebf | 5302 | pre_ldst_mems = NULL; |
9727e468 RG |
5303 | pre_ldst_table = htab_create (13, pre_ldst_expr_hash, |
5304 | pre_ldst_expr_eq, NULL); | |
a13d4ebf | 5305 | |
e0082a72 | 5306 | FOR_EACH_BB (bb) |
a13d4ebf | 5307 | { |
eb232f4e | 5308 | FOR_BB_INSNS (bb, insn) |
a13d4ebf | 5309 | { |
735e8085 | 5310 | if (INSN_P (insn)) |
a13d4ebf AM |
5311 | { |
5312 | if (GET_CODE (PATTERN (insn)) == SET) | |
5313 | { | |
5314 | rtx src = SET_SRC (PATTERN (insn)); | |
5315 | rtx dest = SET_DEST (PATTERN (insn)); | |
5316 | ||
5317 | /* Check for a simple LOAD... */ | |
7b1b4aed | 5318 | if (MEM_P (src) && simple_mem (src)) |
a13d4ebf AM |
5319 | { |
5320 | ptr = ldst_entry (src); | |
7b1b4aed | 5321 | if (REG_P (dest)) |
a13d4ebf AM |
5322 | ptr->loads = alloc_INSN_LIST (insn, ptr->loads); |
5323 | else | |
5324 | ptr->invalid = 1; | |
5325 | } | |
5326 | else | |
5327 | { | |
5328 | /* Make sure there isn't a buried load somewhere. */ | |
5329 | invalidate_any_buried_refs (src); | |
5330 | } | |
589005ff | 5331 | |
a13d4ebf AM |
5332 | /* Check for stores. Don't worry about aliased ones, they |
5333 | will block any movement we might do later. We only care | |
5334 | about this exact pattern since those are the only | |
5335 | circumstance that we will ignore the aliasing info. */ | |
7b1b4aed | 5336 | if (MEM_P (dest) && simple_mem (dest)) |
a13d4ebf AM |
5337 | { |
5338 | ptr = ldst_entry (dest); | |
589005ff | 5339 | |
7b1b4aed | 5340 | if (! MEM_P (src) |
4d3eb89a HPN |
5341 | && GET_CODE (src) != ASM_OPERANDS |
5342 | /* Check for REG manually since want_to_gcse_p | |
5343 | returns 0 for all REGs. */ | |
1707bafa | 5344 | && can_assign_to_reg_p (src)) |
a13d4ebf AM |
5345 | ptr->stores = alloc_INSN_LIST (insn, ptr->stores); |
5346 | else | |
5347 | ptr->invalid = 1; | |
5348 | } | |
5349 | } | |
5350 | else | |
5351 | invalidate_any_buried_refs (PATTERN (insn)); | |
5352 | } | |
5353 | } | |
5354 | } | |
5355 | } | |
5356 | ||
589005ff | 5357 | /* Remove any references that have been either invalidated or are not in the |
a13d4ebf AM |
5358 | expression list for pre gcse. */ |
5359 | ||
5360 | static void | |
1d088dee | 5361 | trim_ld_motion_mems (void) |
a13d4ebf | 5362 | { |
b58b21d5 RS |
5363 | struct ls_expr * * last = & pre_ldst_mems; |
5364 | struct ls_expr * ptr = pre_ldst_mems; | |
a13d4ebf AM |
5365 | |
5366 | while (ptr != NULL) | |
5367 | { | |
b58b21d5 | 5368 | struct expr * expr; |
589005ff | 5369 | |
a13d4ebf | 5370 | /* Delete if entry has been made invalid. */ |
b58b21d5 | 5371 | if (! ptr->invalid) |
a13d4ebf | 5372 | { |
a13d4ebf | 5373 | /* Delete if we cannot find this mem in the expression list. */ |
b58b21d5 | 5374 | unsigned int hash = ptr->hash_index % expr_hash_table.size; |
589005ff | 5375 | |
b58b21d5 RS |
5376 | for (expr = expr_hash_table.table[hash]; |
5377 | expr != NULL; | |
5378 | expr = expr->next_same_hash) | |
5379 | if (expr_equiv_p (expr->expr, ptr->pattern)) | |
5380 | break; | |
a13d4ebf AM |
5381 | } |
5382 | else | |
b58b21d5 RS |
5383 | expr = (struct expr *) 0; |
5384 | ||
5385 | if (expr) | |
a13d4ebf AM |
5386 | { |
5387 | /* Set the expression field if we are keeping it. */ | |
a13d4ebf | 5388 | ptr->expr = expr; |
b58b21d5 | 5389 | last = & ptr->next; |
a13d4ebf AM |
5390 | ptr = ptr->next; |
5391 | } | |
b58b21d5 RS |
5392 | else |
5393 | { | |
5394 | *last = ptr->next; | |
9727e468 | 5395 | htab_remove_elt_with_hash (pre_ldst_table, ptr, ptr->hash_index); |
b58b21d5 RS |
5396 | free_ldst_entry (ptr); |
5397 | ptr = * last; | |
5398 | } | |
a13d4ebf AM |
5399 | } |
5400 | ||
5401 | /* Show the world what we've found. */ | |
10d22567 ZD |
5402 | if (dump_file && pre_ldst_mems != NULL) |
5403 | print_ldst_list (dump_file); | |
a13d4ebf AM |
5404 | } |
5405 | ||
5406 | /* This routine will take an expression which we are replacing with | |
5407 | a reaching register, and update any stores that are needed if | |
5408 | that expression is in the ld_motion list. Stores are updated by | |
a98ebe2e | 5409 | copying their SRC to the reaching register, and then storing |
a13d4ebf AM |
5410 | the reaching register into the store location. These keeps the |
5411 | correct value in the reaching register for the loads. */ | |
5412 | ||
5413 | static void | |
1d088dee | 5414 | update_ld_motion_stores (struct expr * expr) |
a13d4ebf AM |
5415 | { |
5416 | struct ls_expr * mem_ptr; | |
5417 | ||
5418 | if ((mem_ptr = find_rtx_in_ldst (expr->expr))) | |
5419 | { | |
589005ff KH |
5420 | /* We can try to find just the REACHED stores, but is shouldn't |
5421 | matter to set the reaching reg everywhere... some might be | |
a13d4ebf AM |
5422 | dead and should be eliminated later. */ |
5423 | ||
4d3eb89a HPN |
5424 | /* We replace (set mem expr) with (set reg expr) (set mem reg) |
5425 | where reg is the reaching reg used in the load. We checked in | |
5426 | compute_ld_motion_mems that we can replace (set mem expr) with | |
5427 | (set reg expr) in that insn. */ | |
a13d4ebf | 5428 | rtx list = mem_ptr->stores; |
589005ff | 5429 | |
a13d4ebf AM |
5430 | for ( ; list != NULL_RTX; list = XEXP (list, 1)) |
5431 | { | |
5432 | rtx insn = XEXP (list, 0); | |
5433 | rtx pat = PATTERN (insn); | |
5434 | rtx src = SET_SRC (pat); | |
5435 | rtx reg = expr->reaching_reg; | |
c57718d3 | 5436 | rtx copy, new; |
a13d4ebf AM |
5437 | |
5438 | /* If we've already copied it, continue. */ | |
5439 | if (expr->reaching_reg == src) | |
5440 | continue; | |
589005ff | 5441 | |
10d22567 | 5442 | if (dump_file) |
a13d4ebf | 5443 | { |
10d22567 ZD |
5444 | fprintf (dump_file, "PRE: store updated with reaching reg "); |
5445 | print_rtl (dump_file, expr->reaching_reg); | |
5446 | fprintf (dump_file, ":\n "); | |
5447 | print_inline_rtx (dump_file, insn, 8); | |
5448 | fprintf (dump_file, "\n"); | |
a13d4ebf | 5449 | } |
589005ff | 5450 | |
47a3dae1 | 5451 | copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat))); |
c57718d3 RK |
5452 | new = emit_insn_before (copy, insn); |
5453 | record_one_set (REGNO (reg), new); | |
a13d4ebf | 5454 | SET_SRC (pat) = reg; |
6fb5fa3c | 5455 | df_insn_rescan (insn); |
a13d4ebf AM |
5456 | |
5457 | /* un-recognize this pattern since it's probably different now. */ | |
5458 | INSN_CODE (insn) = -1; | |
5459 | gcse_create_count++; | |
5460 | } | |
5461 | } | |
5462 | } | |
5463 | \f | |
5464 | /* Store motion code. */ | |
5465 | ||
47a3dae1 ZD |
5466 | #define ANTIC_STORE_LIST(x) ((x)->loads) |
5467 | #define AVAIL_STORE_LIST(x) ((x)->stores) | |
5468 | #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg) | |
5469 | ||
589005ff | 5470 | /* This is used to communicate the target bitvector we want to use in the |
aaa4ca30 | 5471 | reg_set_info routine when called via the note_stores mechanism. */ |
47a3dae1 ZD |
5472 | static int * regvec; |
5473 | ||
5474 | /* And current insn, for the same routine. */ | |
5475 | static rtx compute_store_table_current_insn; | |
aaa4ca30 | 5476 | |
a13d4ebf AM |
5477 | /* Used in computing the reverse edge graph bit vectors. */ |
5478 | static sbitmap * st_antloc; | |
5479 | ||
5480 | /* Global holding the number of store expressions we are dealing with. */ | |
5481 | static int num_stores; | |
5482 | ||
01c43039 RE |
5483 | /* Checks to set if we need to mark a register set. Called from |
5484 | note_stores. */ | |
a13d4ebf | 5485 | |
aaa4ca30 | 5486 | static void |
7bc980e1 | 5487 | reg_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, |
01c43039 | 5488 | void *data) |
a13d4ebf | 5489 | { |
01c43039 RE |
5490 | sbitmap bb_reg = data; |
5491 | ||
aaa4ca30 AJ |
5492 | if (GET_CODE (dest) == SUBREG) |
5493 | dest = SUBREG_REG (dest); | |
adfcce61 | 5494 | |
7b1b4aed | 5495 | if (REG_P (dest)) |
01c43039 RE |
5496 | { |
5497 | regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn); | |
5498 | if (bb_reg) | |
5499 | SET_BIT (bb_reg, REGNO (dest)); | |
5500 | } | |
5501 | } | |
5502 | ||
5503 | /* Clear any mark that says that this insn sets dest. Called from | |
5504 | note_stores. */ | |
5505 | ||
5506 | static void | |
7bc980e1 | 5507 | reg_clear_last_set (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, |
01c43039 RE |
5508 | void *data) |
5509 | { | |
5510 | int *dead_vec = data; | |
5511 | ||
5512 | if (GET_CODE (dest) == SUBREG) | |
5513 | dest = SUBREG_REG (dest); | |
5514 | ||
7b1b4aed | 5515 | if (REG_P (dest) && |
01c43039 RE |
5516 | dead_vec[REGNO (dest)] == INSN_UID (compute_store_table_current_insn)) |
5517 | dead_vec[REGNO (dest)] = 0; | |
a13d4ebf AM |
5518 | } |
5519 | ||
47a3dae1 ZD |
5520 | /* Return zero if some of the registers in list X are killed |
5521 | due to set of registers in bitmap REGS_SET. */ | |
1d088dee | 5522 | |
47a3dae1 | 5523 | static bool |
ed7a4b4b | 5524 | store_ops_ok (const_rtx x, int *regs_set) |
47a3dae1 | 5525 | { |
ed7a4b4b | 5526 | const_rtx reg; |
47a3dae1 ZD |
5527 | |
5528 | for (; x; x = XEXP (x, 1)) | |
5529 | { | |
5530 | reg = XEXP (x, 0); | |
5531 | if (regs_set[REGNO(reg)]) | |
1d088dee | 5532 | return false; |
47a3dae1 | 5533 | } |
a13d4ebf | 5534 | |
47a3dae1 ZD |
5535 | return true; |
5536 | } | |
5537 | ||
5538 | /* Returns a list of registers mentioned in X. */ | |
5539 | static rtx | |
1d088dee | 5540 | extract_mentioned_regs (rtx x) |
47a3dae1 ZD |
5541 | { |
5542 | return extract_mentioned_regs_helper (x, NULL_RTX); | |
5543 | } | |
5544 | ||
5545 | /* Helper for extract_mentioned_regs; ACCUM is used to accumulate used | |
5546 | registers. */ | |
5547 | static rtx | |
1d088dee | 5548 | extract_mentioned_regs_helper (rtx x, rtx accum) |
a13d4ebf AM |
5549 | { |
5550 | int i; | |
5551 | enum rtx_code code; | |
5552 | const char * fmt; | |
5553 | ||
5554 | /* Repeat is used to turn tail-recursion into iteration. */ | |
5555 | repeat: | |
5556 | ||
5557 | if (x == 0) | |
47a3dae1 | 5558 | return accum; |
a13d4ebf AM |
5559 | |
5560 | code = GET_CODE (x); | |
5561 | switch (code) | |
5562 | { | |
5563 | case REG: | |
47a3dae1 | 5564 | return alloc_EXPR_LIST (0, x, accum); |
a13d4ebf AM |
5565 | |
5566 | case MEM: | |
5567 | x = XEXP (x, 0); | |
5568 | goto repeat; | |
5569 | ||
5570 | case PRE_DEC: | |
5571 | case PRE_INC: | |
6fb5fa3c | 5572 | case PRE_MODIFY: |
a13d4ebf AM |
5573 | case POST_DEC: |
5574 | case POST_INC: | |
6fb5fa3c | 5575 | case POST_MODIFY: |
47a3dae1 | 5576 | /* We do not run this function with arguments having side effects. */ |
282899df | 5577 | gcc_unreachable (); |
a13d4ebf AM |
5578 | |
5579 | case PC: | |
5580 | case CC0: /*FIXME*/ | |
5581 | case CONST: | |
5582 | case CONST_INT: | |
5583 | case CONST_DOUBLE: | |
091a3ac7 | 5584 | case CONST_FIXED: |
69ef87e2 | 5585 | case CONST_VECTOR: |
a13d4ebf AM |
5586 | case SYMBOL_REF: |
5587 | case LABEL_REF: | |
5588 | case ADDR_VEC: | |
5589 | case ADDR_DIFF_VEC: | |
47a3dae1 | 5590 | return accum; |
a13d4ebf AM |
5591 | |
5592 | default: | |
5593 | break; | |
5594 | } | |
5595 | ||
5596 | i = GET_RTX_LENGTH (code) - 1; | |
5597 | fmt = GET_RTX_FORMAT (code); | |
589005ff | 5598 | |
a13d4ebf AM |
5599 | for (; i >= 0; i--) |
5600 | { | |
5601 | if (fmt[i] == 'e') | |
5602 | { | |
5603 | rtx tem = XEXP (x, i); | |
5604 | ||
5605 | /* If we are about to do the last recursive call | |
47a3dae1 | 5606 | needed at this level, change it into iteration. */ |
a13d4ebf AM |
5607 | if (i == 0) |
5608 | { | |
5609 | x = tem; | |
5610 | goto repeat; | |
5611 | } | |
589005ff | 5612 | |
47a3dae1 | 5613 | accum = extract_mentioned_regs_helper (tem, accum); |
a13d4ebf AM |
5614 | } |
5615 | else if (fmt[i] == 'E') | |
5616 | { | |
5617 | int j; | |
589005ff | 5618 | |
a13d4ebf | 5619 | for (j = 0; j < XVECLEN (x, i); j++) |
47a3dae1 | 5620 | accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum); |
a13d4ebf AM |
5621 | } |
5622 | } | |
5623 | ||
47a3dae1 | 5624 | return accum; |
a13d4ebf AM |
5625 | } |
5626 | ||
47a3dae1 ZD |
5627 | /* Determine whether INSN is MEM store pattern that we will consider moving. |
5628 | REGS_SET_BEFORE is bitmap of registers set before (and including) the | |
5629 | current insn, REGS_SET_AFTER is bitmap of registers set after (and | |
5630 | including) the insn in this basic block. We must be passing through BB from | |
5631 | head to end, as we are using this fact to speed things up. | |
1d088dee | 5632 | |
47a3dae1 ZD |
5633 | The results are stored this way: |
5634 | ||
5635 | -- the first anticipatable expression is added into ANTIC_STORE_LIST | |
5636 | -- if the processed expression is not anticipatable, NULL_RTX is added | |
5637 | there instead, so that we can use it as indicator that no further | |
5638 | expression of this type may be anticipatable | |
5639 | -- if the expression is available, it is added as head of AVAIL_STORE_LIST; | |
5640 | consequently, all of them but this head are dead and may be deleted. | |
5641 | -- if the expression is not available, the insn due to that it fails to be | |
5642 | available is stored in reaching_reg. | |
5643 | ||
5644 | The things are complicated a bit by fact that there already may be stores | |
5645 | to the same MEM from other blocks; also caller must take care of the | |
e0bb17a8 | 5646 | necessary cleanup of the temporary markers after end of the basic block. |
47a3dae1 | 5647 | */ |
a13d4ebf AM |
5648 | |
5649 | static void | |
1d088dee | 5650 | find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after) |
a13d4ebf AM |
5651 | { |
5652 | struct ls_expr * ptr; | |
47a3dae1 ZD |
5653 | rtx dest, set, tmp; |
5654 | int check_anticipatable, check_available; | |
5655 | basic_block bb = BLOCK_FOR_INSN (insn); | |
a13d4ebf | 5656 | |
47a3dae1 ZD |
5657 | set = single_set (insn); |
5658 | if (!set) | |
a13d4ebf AM |
5659 | return; |
5660 | ||
47a3dae1 | 5661 | dest = SET_DEST (set); |
589005ff | 5662 | |
7b1b4aed | 5663 | if (! MEM_P (dest) || MEM_VOLATILE_P (dest) |
a13d4ebf | 5664 | || GET_MODE (dest) == BLKmode) |
aaa4ca30 AJ |
5665 | return; |
5666 | ||
47a3dae1 ZD |
5667 | if (side_effects_p (dest)) |
5668 | return; | |
aaa4ca30 | 5669 | |
47a3dae1 ZD |
5670 | /* If we are handling exceptions, we must be careful with memory references |
5671 | that may trap. If we are not, the behavior is undefined, so we may just | |
5672 | continue. */ | |
94f24ddc | 5673 | if (flag_non_call_exceptions && may_trap_p (dest)) |
47a3dae1 | 5674 | return; |
1d088dee | 5675 | |
c2e2375e UW |
5676 | /* Even if the destination cannot trap, the source may. In this case we'd |
5677 | need to handle updating the REG_EH_REGION note. */ | |
5678 | if (find_reg_note (insn, REG_EH_REGION, NULL_RTX)) | |
5679 | return; | |
5680 | ||
26fb114d SB |
5681 | /* Make sure that the SET_SRC of this store insns can be assigned to |
5682 | a register, or we will fail later on in replace_store_insn, which | |
5683 | assumes that we can do this. But sometimes the target machine has | |
5684 | oddities like MEM read-modify-write instruction. See for example | |
5685 | PR24257. */ | |
5686 | if (!can_assign_to_reg_p (SET_SRC (set))) | |
5687 | return; | |
5688 | ||
a13d4ebf | 5689 | ptr = ldst_entry (dest); |
47a3dae1 ZD |
5690 | if (!ptr->pattern_regs) |
5691 | ptr->pattern_regs = extract_mentioned_regs (dest); | |
5692 | ||
5693 | /* Do not check for anticipatability if we either found one anticipatable | |
5694 | store already, or tested for one and found out that it was killed. */ | |
5695 | check_anticipatable = 0; | |
5696 | if (!ANTIC_STORE_LIST (ptr)) | |
5697 | check_anticipatable = 1; | |
5698 | else | |
5699 | { | |
5700 | tmp = XEXP (ANTIC_STORE_LIST (ptr), 0); | |
5701 | if (tmp != NULL_RTX | |
5702 | && BLOCK_FOR_INSN (tmp) != bb) | |
5703 | check_anticipatable = 1; | |
5704 | } | |
5705 | if (check_anticipatable) | |
5706 | { | |
5707 | if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before)) | |
5708 | tmp = NULL_RTX; | |
5709 | else | |
5710 | tmp = insn; | |
5711 | ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp, | |
5712 | ANTIC_STORE_LIST (ptr)); | |
5713 | } | |
a13d4ebf | 5714 | |
e0bb17a8 | 5715 | /* It is not necessary to check whether store is available if we did |
47a3dae1 ZD |
5716 | it successfully before; if we failed before, do not bother to check |
5717 | until we reach the insn that caused us to fail. */ | |
5718 | check_available = 0; | |
5719 | if (!AVAIL_STORE_LIST (ptr)) | |
5720 | check_available = 1; | |
5721 | else | |
5722 | { | |
5723 | tmp = XEXP (AVAIL_STORE_LIST (ptr), 0); | |
5724 | if (BLOCK_FOR_INSN (tmp) != bb) | |
5725 | check_available = 1; | |
5726 | } | |
5727 | if (check_available) | |
5728 | { | |
5729 | /* Check that we have already reached the insn at that the check | |
5730 | failed last time. */ | |
5731 | if (LAST_AVAIL_CHECK_FAILURE (ptr)) | |
5732 | { | |
a813c111 | 5733 | for (tmp = BB_END (bb); |
47a3dae1 ZD |
5734 | tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr); |
5735 | tmp = PREV_INSN (tmp)) | |
5736 | continue; | |
5737 | if (tmp == insn) | |
5738 | check_available = 0; | |
5739 | } | |
5740 | else | |
5741 | check_available = store_killed_after (dest, ptr->pattern_regs, insn, | |
5742 | bb, regs_set_after, | |
5743 | &LAST_AVAIL_CHECK_FAILURE (ptr)); | |
5744 | } | |
5745 | if (!check_available) | |
5746 | AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr)); | |
5747 | } | |
1d088dee | 5748 | |
47a3dae1 | 5749 | /* Find available and anticipatable stores. */ |
a13d4ebf AM |
5750 | |
5751 | static int | |
1d088dee | 5752 | compute_store_table (void) |
a13d4ebf | 5753 | { |
e0082a72 ZD |
5754 | int ret; |
5755 | basic_block bb; | |
aaa4ca30 | 5756 | unsigned regno; |
47a3dae1 ZD |
5757 | rtx insn, pat, tmp; |
5758 | int *last_set_in, *already_set; | |
5759 | struct ls_expr * ptr, **prev_next_ptr_ptr; | |
aaa4ca30 | 5760 | |
a13d4ebf AM |
5761 | max_gcse_regno = max_reg_num (); |
5762 | ||
703ad42b | 5763 | reg_set_in_block = sbitmap_vector_alloc (last_basic_block, |
aaa4ca30 | 5764 | max_gcse_regno); |
d55bc081 | 5765 | sbitmap_vector_zero (reg_set_in_block, last_basic_block); |
a13d4ebf | 5766 | pre_ldst_mems = 0; |
9727e468 RG |
5767 | pre_ldst_table = htab_create (13, pre_ldst_expr_hash, |
5768 | pre_ldst_expr_eq, NULL); | |
5ed6ace5 MD |
5769 | last_set_in = XCNEWVEC (int, max_gcse_regno); |
5770 | already_set = XNEWVEC (int, max_gcse_regno); | |
aaa4ca30 | 5771 | |
a13d4ebf | 5772 | /* Find all the stores we care about. */ |
e0082a72 | 5773 | FOR_EACH_BB (bb) |
a13d4ebf | 5774 | { |
47a3dae1 | 5775 | /* First compute the registers set in this block. */ |
47a3dae1 ZD |
5776 | regvec = last_set_in; |
5777 | ||
eb232f4e | 5778 | FOR_BB_INSNS (bb, insn) |
47a3dae1 ZD |
5779 | { |
5780 | if (! INSN_P (insn)) | |
5781 | continue; | |
5782 | ||
7b1b4aed | 5783 | if (CALL_P (insn)) |
47a3dae1 | 5784 | { |
47a3dae1 | 5785 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) |
6e14af16 | 5786 | if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)) |
01c43039 RE |
5787 | { |
5788 | last_set_in[regno] = INSN_UID (insn); | |
5789 | SET_BIT (reg_set_in_block[bb->index], regno); | |
5790 | } | |
47a3dae1 ZD |
5791 | } |
5792 | ||
5793 | pat = PATTERN (insn); | |
5794 | compute_store_table_current_insn = insn; | |
01c43039 | 5795 | note_stores (pat, reg_set_info, reg_set_in_block[bb->index]); |
47a3dae1 ZD |
5796 | } |
5797 | ||
47a3dae1 ZD |
5798 | /* Now find the stores. */ |
5799 | memset (already_set, 0, sizeof (int) * max_gcse_regno); | |
5800 | regvec = already_set; | |
eb232f4e | 5801 | FOR_BB_INSNS (bb, insn) |
a13d4ebf | 5802 | { |
19652adf | 5803 | if (! INSN_P (insn)) |
a13d4ebf AM |
5804 | continue; |
5805 | ||
7b1b4aed | 5806 | if (CALL_P (insn)) |
aaa4ca30 AJ |
5807 | { |
5808 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
6e14af16 | 5809 | if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)) |
47a3dae1 | 5810 | already_set[regno] = 1; |
aaa4ca30 | 5811 | } |
589005ff | 5812 | |
a13d4ebf | 5813 | pat = PATTERN (insn); |
aaa4ca30 | 5814 | note_stores (pat, reg_set_info, NULL); |
589005ff | 5815 | |
a13d4ebf | 5816 | /* Now that we've marked regs, look for stores. */ |
47a3dae1 ZD |
5817 | find_moveable_store (insn, already_set, last_set_in); |
5818 | ||
5819 | /* Unmark regs that are no longer set. */ | |
01c43039 RE |
5820 | compute_store_table_current_insn = insn; |
5821 | note_stores (pat, reg_clear_last_set, last_set_in); | |
7b1b4aed | 5822 | if (CALL_P (insn)) |
01c43039 | 5823 | { |
01c43039 | 5824 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) |
6e14af16 | 5825 | if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno) |
01c43039 RE |
5826 | && last_set_in[regno] == INSN_UID (insn)) |
5827 | last_set_in[regno] = 0; | |
5828 | } | |
47a3dae1 ZD |
5829 | } |
5830 | ||
01c43039 RE |
5831 | #ifdef ENABLE_CHECKING |
5832 | /* last_set_in should now be all-zero. */ | |
5833 | for (regno = 0; regno < max_gcse_regno; regno++) | |
282899df | 5834 | gcc_assert (!last_set_in[regno]); |
01c43039 RE |
5835 | #endif |
5836 | ||
47a3dae1 ZD |
5837 | /* Clear temporary marks. */ |
5838 | for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr)) | |
5839 | { | |
5840 | LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX; | |
5841 | if (ANTIC_STORE_LIST (ptr) | |
5842 | && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX) | |
5843 | ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1); | |
5844 | } | |
5845 | } | |
5846 | ||
5847 | /* Remove the stores that are not available anywhere, as there will | |
5848 | be no opportunity to optimize them. */ | |
5849 | for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems; | |
5850 | ptr != NULL; | |
5851 | ptr = *prev_next_ptr_ptr) | |
5852 | { | |
5853 | if (!AVAIL_STORE_LIST (ptr)) | |
5854 | { | |
5855 | *prev_next_ptr_ptr = ptr->next; | |
9727e468 | 5856 | htab_remove_elt_with_hash (pre_ldst_table, ptr, ptr->hash_index); |
47a3dae1 | 5857 | free_ldst_entry (ptr); |
a13d4ebf | 5858 | } |
47a3dae1 ZD |
5859 | else |
5860 | prev_next_ptr_ptr = &ptr->next; | |
a13d4ebf AM |
5861 | } |
5862 | ||
5863 | ret = enumerate_ldsts (); | |
589005ff | 5864 | |
10d22567 | 5865 | if (dump_file) |
a13d4ebf | 5866 | { |
10d22567 ZD |
5867 | fprintf (dump_file, "ST_avail and ST_antic (shown under loads..)\n"); |
5868 | print_ldst_list (dump_file); | |
a13d4ebf | 5869 | } |
589005ff | 5870 | |
47a3dae1 ZD |
5871 | free (last_set_in); |
5872 | free (already_set); | |
a13d4ebf AM |
5873 | return ret; |
5874 | } | |
5875 | ||
3b14e3af ZD |
5876 | /* Check to see if the load X is aliased with STORE_PATTERN. |
5877 | AFTER is true if we are checking the case when STORE_PATTERN occurs | |
5878 | after the X. */ | |
a13d4ebf | 5879 | |
47a3dae1 | 5880 | static bool |
ed7a4b4b | 5881 | load_kills_store (const_rtx x, const_rtx store_pattern, int after) |
a13d4ebf | 5882 | { |
3b14e3af ZD |
5883 | if (after) |
5884 | return anti_dependence (x, store_pattern); | |
5885 | else | |
5886 | return true_dependence (store_pattern, GET_MODE (store_pattern), x, | |
5887 | rtx_addr_varies_p); | |
a13d4ebf AM |
5888 | } |
5889 | ||
589005ff | 5890 | /* Go through the entire insn X, looking for any loads which might alias |
3b14e3af ZD |
5891 | STORE_PATTERN. Return true if found. |
5892 | AFTER is true if we are checking the case when STORE_PATTERN occurs | |
5893 | after the insn X. */ | |
a13d4ebf | 5894 | |
47a3dae1 | 5895 | static bool |
ed7a4b4b | 5896 | find_loads (const_rtx x, const_rtx store_pattern, int after) |
a13d4ebf AM |
5897 | { |
5898 | const char * fmt; | |
8e42ace1 | 5899 | int i, j; |
47a3dae1 | 5900 | int ret = false; |
a13d4ebf | 5901 | |
24a28584 | 5902 | if (!x) |
47a3dae1 | 5903 | return false; |
24a28584 | 5904 | |
589005ff | 5905 | if (GET_CODE (x) == SET) |
a13d4ebf AM |
5906 | x = SET_SRC (x); |
5907 | ||
7b1b4aed | 5908 | if (MEM_P (x)) |
a13d4ebf | 5909 | { |
3b14e3af | 5910 | if (load_kills_store (x, store_pattern, after)) |
47a3dae1 | 5911 | return true; |
a13d4ebf AM |
5912 | } |
5913 | ||
5914 | /* Recursively process the insn. */ | |
5915 | fmt = GET_RTX_FORMAT (GET_CODE (x)); | |
589005ff | 5916 | |
a13d4ebf AM |
5917 | for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--) |
5918 | { | |
5919 | if (fmt[i] == 'e') | |
3b14e3af | 5920 | ret |= find_loads (XEXP (x, i), store_pattern, after); |
a13d4ebf AM |
5921 | else if (fmt[i] == 'E') |
5922 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
3b14e3af | 5923 | ret |= find_loads (XVECEXP (x, i, j), store_pattern, after); |
a13d4ebf AM |
5924 | } |
5925 | return ret; | |
5926 | } | |
5927 | ||
1071bcbd | 5928 | static inline bool |
ed7a4b4b | 5929 | store_killed_in_pat (const_rtx x, const_rtx pat, int after) |
1071bcbd AO |
5930 | { |
5931 | if (GET_CODE (pat) == SET) | |
5932 | { | |
5933 | rtx dest = SET_DEST (pat); | |
5934 | ||
5935 | if (GET_CODE (dest) == ZERO_EXTRACT) | |
5936 | dest = XEXP (dest, 0); | |
5937 | ||
5938 | /* Check for memory stores to aliased objects. */ | |
5939 | if (MEM_P (dest) | |
5940 | && !expr_equiv_p (dest, x)) | |
5941 | { | |
5942 | if (after) | |
5943 | { | |
5944 | if (output_dependence (dest, x)) | |
5945 | return true; | |
5946 | } | |
5947 | else | |
5948 | { | |
5949 | if (output_dependence (x, dest)) | |
5950 | return true; | |
5951 | } | |
5952 | } | |
5953 | } | |
5954 | ||
5955 | if (find_loads (pat, x, after)) | |
5956 | return true; | |
5957 | ||
5958 | return false; | |
5959 | } | |
5960 | ||
589005ff | 5961 | /* Check if INSN kills the store pattern X (is aliased with it). |
3b14e3af | 5962 | AFTER is true if we are checking the case when store X occurs |
3f117656 | 5963 | after the insn. Return true if it does. */ |
a13d4ebf | 5964 | |
47a3dae1 | 5965 | static bool |
ed7a4b4b | 5966 | store_killed_in_insn (const_rtx x, const_rtx x_regs, const_rtx insn, int after) |
a13d4ebf | 5967 | { |
ed7a4b4b | 5968 | const_rtx reg, base, note, pat; |
94f24ddc | 5969 | |
735e8085 | 5970 | if (!INSN_P (insn)) |
47a3dae1 | 5971 | return false; |
589005ff | 5972 | |
7b1b4aed | 5973 | if (CALL_P (insn)) |
a13d4ebf | 5974 | { |
1218665b JJ |
5975 | /* A normal or pure call might read from pattern, |
5976 | but a const call will not. */ | |
47a3dae1 ZD |
5977 | if (! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn)) |
5978 | return true; | |
5979 | ||
94f24ddc ZD |
5980 | /* But even a const call reads its parameters. Check whether the |
5981 | base of some of registers used in mem is stack pointer. */ | |
5982 | for (reg = x_regs; reg; reg = XEXP (reg, 1)) | |
5983 | { | |
bc083e18 | 5984 | base = find_base_term (XEXP (reg, 0)); |
94f24ddc ZD |
5985 | if (!base |
5986 | || (GET_CODE (base) == ADDRESS | |
5987 | && GET_MODE (base) == Pmode | |
5988 | && XEXP (base, 0) == stack_pointer_rtx)) | |
5989 | return true; | |
5990 | } | |
47a3dae1 ZD |
5991 | |
5992 | return false; | |
a13d4ebf | 5993 | } |
589005ff | 5994 | |
1071bcbd AO |
5995 | pat = PATTERN (insn); |
5996 | if (GET_CODE (pat) == SET) | |
a13d4ebf | 5997 | { |
1071bcbd | 5998 | if (store_killed_in_pat (x, pat, after)) |
d088acea | 5999 | return true; |
a13d4ebf | 6000 | } |
1071bcbd AO |
6001 | else if (GET_CODE (pat) == PARALLEL) |
6002 | { | |
6003 | int i; | |
6004 | ||
6005 | for (i = 0; i < XVECLEN (pat, 0); i++) | |
6006 | if (store_killed_in_pat (x, XVECEXP (pat, 0, i), after)) | |
6007 | return true; | |
6008 | } | |
d088acea ZD |
6009 | else if (find_loads (PATTERN (insn), x, after)) |
6010 | return true; | |
6011 | ||
6012 | /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory | |
6013 | location aliased with X, then this insn kills X. */ | |
6014 | note = find_reg_equal_equiv_note (insn); | |
6015 | if (! note) | |
6016 | return false; | |
6017 | note = XEXP (note, 0); | |
6018 | ||
6019 | /* However, if the note represents a must alias rather than a may | |
6020 | alias relationship, then it does not kill X. */ | |
6021 | if (expr_equiv_p (note, x)) | |
6022 | return false; | |
6023 | ||
6024 | /* See if there are any aliased loads in the note. */ | |
6025 | return find_loads (note, x, after); | |
a13d4ebf AM |
6026 | } |
6027 | ||
47a3dae1 ZD |
6028 | /* Returns true if the expression X is loaded or clobbered on or after INSN |
6029 | within basic block BB. REGS_SET_AFTER is bitmap of registers set in | |
6030 | or after the insn. X_REGS is list of registers mentioned in X. If the store | |
6031 | is killed, return the last insn in that it occurs in FAIL_INSN. */ | |
a13d4ebf | 6032 | |
47a3dae1 | 6033 | static bool |
ed7a4b4b | 6034 | store_killed_after (const_rtx x, const_rtx x_regs, const_rtx insn, const_basic_block bb, |
1d088dee | 6035 | int *regs_set_after, rtx *fail_insn) |
a13d4ebf | 6036 | { |
a813c111 | 6037 | rtx last = BB_END (bb), act; |
aaa4ca30 | 6038 | |
47a3dae1 | 6039 | if (!store_ops_ok (x_regs, regs_set_after)) |
1d088dee | 6040 | { |
47a3dae1 ZD |
6041 | /* We do not know where it will happen. */ |
6042 | if (fail_insn) | |
6043 | *fail_insn = NULL_RTX; | |
6044 | return true; | |
6045 | } | |
a13d4ebf | 6046 | |
47a3dae1 ZD |
6047 | /* Scan from the end, so that fail_insn is determined correctly. */ |
6048 | for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act)) | |
3b14e3af | 6049 | if (store_killed_in_insn (x, x_regs, act, false)) |
47a3dae1 ZD |
6050 | { |
6051 | if (fail_insn) | |
6052 | *fail_insn = act; | |
6053 | return true; | |
6054 | } | |
589005ff | 6055 | |
47a3dae1 | 6056 | return false; |
a13d4ebf | 6057 | } |
1d088dee | 6058 | |
47a3dae1 ZD |
6059 | /* Returns true if the expression X is loaded or clobbered on or before INSN |
6060 | within basic block BB. X_REGS is list of registers mentioned in X. | |
6061 | REGS_SET_BEFORE is bitmap of registers set before or in this insn. */ | |
6062 | static bool | |
ed7a4b4b | 6063 | store_killed_before (const_rtx x, const_rtx x_regs, const_rtx insn, const_basic_block bb, |
1d088dee | 6064 | int *regs_set_before) |
a13d4ebf | 6065 | { |
a813c111 | 6066 | rtx first = BB_HEAD (bb); |
a13d4ebf | 6067 | |
47a3dae1 ZD |
6068 | if (!store_ops_ok (x_regs, regs_set_before)) |
6069 | return true; | |
a13d4ebf | 6070 | |
47a3dae1 | 6071 | for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn)) |
3b14e3af | 6072 | if (store_killed_in_insn (x, x_regs, insn, true)) |
47a3dae1 | 6073 | return true; |
589005ff | 6074 | |
47a3dae1 | 6075 | return false; |
a13d4ebf | 6076 | } |
1d088dee | 6077 | |
47a3dae1 ZD |
6078 | /* Fill in available, anticipatable, transparent and kill vectors in |
6079 | STORE_DATA, based on lists of available and anticipatable stores. */ | |
a13d4ebf | 6080 | static void |
1d088dee | 6081 | build_store_vectors (void) |
a13d4ebf | 6082 | { |
47a3dae1 ZD |
6083 | basic_block bb; |
6084 | int *regs_set_in_block; | |
a13d4ebf AM |
6085 | rtx insn, st; |
6086 | struct ls_expr * ptr; | |
47a3dae1 | 6087 | unsigned regno; |
a13d4ebf AM |
6088 | |
6089 | /* Build the gen_vector. This is any store in the table which is not killed | |
6090 | by aliasing later in its block. */ | |
703ad42b | 6091 | ae_gen = sbitmap_vector_alloc (last_basic_block, num_stores); |
d55bc081 | 6092 | sbitmap_vector_zero (ae_gen, last_basic_block); |
a13d4ebf | 6093 | |
703ad42b | 6094 | st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores); |
d55bc081 | 6095 | sbitmap_vector_zero (st_antloc, last_basic_block); |
aaa4ca30 | 6096 | |
a13d4ebf | 6097 | for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr)) |
589005ff | 6098 | { |
47a3dae1 | 6099 | for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1)) |
a13d4ebf AM |
6100 | { |
6101 | insn = XEXP (st, 0); | |
e2d2ed72 | 6102 | bb = BLOCK_FOR_INSN (insn); |
589005ff | 6103 | |
47a3dae1 ZD |
6104 | /* If we've already seen an available expression in this block, |
6105 | we can delete this one (It occurs earlier in the block). We'll | |
6106 | copy the SRC expression to an unused register in case there | |
6107 | are any side effects. */ | |
6108 | if (TEST_BIT (ae_gen[bb->index], ptr->index)) | |
a13d4ebf | 6109 | { |
47a3dae1 | 6110 | rtx r = gen_reg_rtx (GET_MODE (ptr->pattern)); |
10d22567 ZD |
6111 | if (dump_file) |
6112 | fprintf (dump_file, "Removing redundant store:\n"); | |
d088acea | 6113 | replace_store_insn (r, XEXP (st, 0), bb, ptr); |
47a3dae1 | 6114 | continue; |
a13d4ebf | 6115 | } |
47a3dae1 | 6116 | SET_BIT (ae_gen[bb->index], ptr->index); |
a13d4ebf | 6117 | } |
589005ff | 6118 | |
47a3dae1 ZD |
6119 | for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1)) |
6120 | { | |
6121 | insn = XEXP (st, 0); | |
6122 | bb = BLOCK_FOR_INSN (insn); | |
6123 | SET_BIT (st_antloc[bb->index], ptr->index); | |
6124 | } | |
a13d4ebf | 6125 | } |
589005ff | 6126 | |
703ad42b | 6127 | ae_kill = sbitmap_vector_alloc (last_basic_block, num_stores); |
d55bc081 | 6128 | sbitmap_vector_zero (ae_kill, last_basic_block); |
a13d4ebf | 6129 | |
703ad42b | 6130 | transp = sbitmap_vector_alloc (last_basic_block, num_stores); |
d55bc081 | 6131 | sbitmap_vector_zero (transp, last_basic_block); |
5ed6ace5 | 6132 | regs_set_in_block = XNEWVEC (int, max_gcse_regno); |
a13d4ebf | 6133 | |
47a3dae1 ZD |
6134 | FOR_EACH_BB (bb) |
6135 | { | |
6136 | for (regno = 0; regno < max_gcse_regno; regno++) | |
6137 | regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno); | |
6138 | ||
6139 | for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr)) | |
6140 | { | |
a813c111 | 6141 | if (store_killed_after (ptr->pattern, ptr->pattern_regs, BB_HEAD (bb), |
47a3dae1 ZD |
6142 | bb, regs_set_in_block, NULL)) |
6143 | { | |
e0bb17a8 | 6144 | /* It should not be necessary to consider the expression |
47a3dae1 ZD |
6145 | killed if it is both anticipatable and available. */ |
6146 | if (!TEST_BIT (st_antloc[bb->index], ptr->index) | |
6147 | || !TEST_BIT (ae_gen[bb->index], ptr->index)) | |
6148 | SET_BIT (ae_kill[bb->index], ptr->index); | |
1d088dee AJ |
6149 | } |
6150 | else | |
6151 | SET_BIT (transp[bb->index], ptr->index); | |
6152 | } | |
47a3dae1 ZD |
6153 | } |
6154 | ||
6155 | free (regs_set_in_block); | |
aaa4ca30 | 6156 | |
10d22567 | 6157 | if (dump_file) |
aaa4ca30 | 6158 | { |
10d22567 ZD |
6159 | dump_sbitmap_vector (dump_file, "st_antloc", "", st_antloc, last_basic_block); |
6160 | dump_sbitmap_vector (dump_file, "st_kill", "", ae_kill, last_basic_block); | |
6161 | dump_sbitmap_vector (dump_file, "Transpt", "", transp, last_basic_block); | |
6162 | dump_sbitmap_vector (dump_file, "st_avloc", "", ae_gen, last_basic_block); | |
a13d4ebf AM |
6163 | } |
6164 | } | |
6165 | ||
fbe5a4a6 | 6166 | /* Insert an instruction at the beginning of a basic block, and update |
a813c111 | 6167 | the BB_HEAD if needed. */ |
a13d4ebf | 6168 | |
589005ff | 6169 | static void |
6fb5fa3c | 6170 | insert_insn_start_basic_block (rtx insn, basic_block bb) |
a13d4ebf AM |
6171 | { |
6172 | /* Insert at start of successor block. */ | |
a813c111 SB |
6173 | rtx prev = PREV_INSN (BB_HEAD (bb)); |
6174 | rtx before = BB_HEAD (bb); | |
a13d4ebf AM |
6175 | while (before != 0) |
6176 | { | |
7b1b4aed | 6177 | if (! LABEL_P (before) |
a38e7aa5 | 6178 | && !NOTE_INSN_BASIC_BLOCK_P (before)) |
a13d4ebf AM |
6179 | break; |
6180 | prev = before; | |
a813c111 | 6181 | if (prev == BB_END (bb)) |
a13d4ebf AM |
6182 | break; |
6183 | before = NEXT_INSN (before); | |
6184 | } | |
6185 | ||
6fb5fa3c | 6186 | insn = emit_insn_after_noloc (insn, prev, bb); |
a13d4ebf | 6187 | |
10d22567 | 6188 | if (dump_file) |
a13d4ebf | 6189 | { |
10d22567 | 6190 | fprintf (dump_file, "STORE_MOTION insert store at start of BB %d:\n", |
0b17ab2f | 6191 | bb->index); |
10d22567 ZD |
6192 | print_inline_rtx (dump_file, insn, 6); |
6193 | fprintf (dump_file, "\n"); | |
a13d4ebf AM |
6194 | } |
6195 | } | |
6196 | ||
6197 | /* This routine will insert a store on an edge. EXPR is the ldst entry for | |
cc2902df | 6198 | the memory reference, and E is the edge to insert it on. Returns nonzero |
a13d4ebf AM |
6199 | if an edge insertion was performed. */ |
6200 | ||
6201 | static int | |
1d088dee | 6202 | insert_store (struct ls_expr * expr, edge e) |
a13d4ebf AM |
6203 | { |
6204 | rtx reg, insn; | |
e2d2ed72 | 6205 | basic_block bb; |
a13d4ebf | 6206 | edge tmp; |
628f6a4e | 6207 | edge_iterator ei; |
a13d4ebf AM |
6208 | |
6209 | /* We did all the deleted before this insert, so if we didn't delete a | |
6210 | store, then we haven't set the reaching reg yet either. */ | |
6211 | if (expr->reaching_reg == NULL_RTX) | |
6212 | return 0; | |
6213 | ||
a0c8285b JH |
6214 | if (e->flags & EDGE_FAKE) |
6215 | return 0; | |
6216 | ||
a13d4ebf | 6217 | reg = expr->reaching_reg; |
47a3dae1 | 6218 | insn = gen_move_insn (copy_rtx (expr->pattern), reg); |
589005ff | 6219 | |
a13d4ebf AM |
6220 | /* If we are inserting this expression on ALL predecessor edges of a BB, |
6221 | insert it at the start of the BB, and reset the insert bits on the other | |
ff7cc307 | 6222 | edges so we don't try to insert it on the other edges. */ |
e2d2ed72 | 6223 | bb = e->dest; |
628f6a4e | 6224 | FOR_EACH_EDGE (tmp, ei, e->dest->preds) |
3f2eae23 | 6225 | if (!(tmp->flags & EDGE_FAKE)) |
a0c8285b JH |
6226 | { |
6227 | int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest); | |
282899df NS |
6228 | |
6229 | gcc_assert (index != EDGE_INDEX_NO_EDGE); | |
a0c8285b JH |
6230 | if (! TEST_BIT (pre_insert_map[index], expr->index)) |
6231 | break; | |
6232 | } | |
a13d4ebf AM |
6233 | |
6234 | /* If tmp is NULL, we found an insertion on every edge, blank the | |
6235 | insertion vector for these edges, and insert at the start of the BB. */ | |
e2d2ed72 | 6236 | if (!tmp && bb != EXIT_BLOCK_PTR) |
a13d4ebf | 6237 | { |
628f6a4e | 6238 | FOR_EACH_EDGE (tmp, ei, e->dest->preds) |
a13d4ebf AM |
6239 | { |
6240 | int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest); | |
6241 | RESET_BIT (pre_insert_map[index], expr->index); | |
6242 | } | |
6fb5fa3c | 6243 | insert_insn_start_basic_block (insn, bb); |
a13d4ebf AM |
6244 | return 0; |
6245 | } | |
589005ff | 6246 | |
b16aa8a5 RK |
6247 | /* We can't put stores in the front of blocks pointed to by abnormal |
6248 | edges since that may put a store where one didn't used to be. */ | |
6249 | gcc_assert (!(e->flags & EDGE_ABNORMAL)); | |
a13d4ebf AM |
6250 | |
6251 | insert_insn_on_edge (insn, e); | |
589005ff | 6252 | |
10d22567 | 6253 | if (dump_file) |
a13d4ebf | 6254 | { |
10d22567 | 6255 | fprintf (dump_file, "STORE_MOTION insert insn on edge (%d, %d):\n", |
0b17ab2f | 6256 | e->src->index, e->dest->index); |
10d22567 ZD |
6257 | print_inline_rtx (dump_file, insn, 6); |
6258 | fprintf (dump_file, "\n"); | |
a13d4ebf | 6259 | } |
589005ff | 6260 | |
a13d4ebf AM |
6261 | return 1; |
6262 | } | |
6263 | ||
d088acea ZD |
6264 | /* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the |
6265 | memory location in SMEXPR set in basic block BB. | |
6266 | ||
6267 | This could be rather expensive. */ | |
6268 | ||
6269 | static void | |
6270 | remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr) | |
6271 | { | |
628f6a4e BE |
6272 | edge_iterator *stack, ei; |
6273 | int sp; | |
6274 | edge act; | |
d088acea | 6275 | sbitmap visited = sbitmap_alloc (last_basic_block); |
d088acea ZD |
6276 | rtx last, insn, note; |
6277 | rtx mem = smexpr->pattern; | |
6278 | ||
5ed6ace5 | 6279 | stack = XNEWVEC (edge_iterator, n_basic_blocks); |
628f6a4e BE |
6280 | sp = 0; |
6281 | ei = ei_start (bb->succs); | |
6282 | ||
d088acea | 6283 | sbitmap_zero (visited); |
d088acea | 6284 | |
f76ccf60 | 6285 | act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL); |
d088acea ZD |
6286 | while (1) |
6287 | { | |
6288 | if (!act) | |
6289 | { | |
628f6a4e | 6290 | if (!sp) |
d088acea ZD |
6291 | { |
6292 | free (stack); | |
6293 | sbitmap_free (visited); | |
6294 | return; | |
6295 | } | |
628f6a4e | 6296 | act = ei_edge (stack[--sp]); |
d088acea ZD |
6297 | } |
6298 | bb = act->dest; | |
7b1b4aed | 6299 | |
d088acea | 6300 | if (bb == EXIT_BLOCK_PTR |
d1c6a401 | 6301 | || TEST_BIT (visited, bb->index)) |
d088acea | 6302 | { |
628f6a4e BE |
6303 | if (!ei_end_p (ei)) |
6304 | ei_next (&ei); | |
6305 | act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL; | |
d088acea ZD |
6306 | continue; |
6307 | } | |
6308 | SET_BIT (visited, bb->index); | |
6309 | ||
6310 | if (TEST_BIT (st_antloc[bb->index], smexpr->index)) | |
6311 | { | |
6312 | for (last = ANTIC_STORE_LIST (smexpr); | |
6313 | BLOCK_FOR_INSN (XEXP (last, 0)) != bb; | |
6314 | last = XEXP (last, 1)) | |
6315 | continue; | |
6316 | last = XEXP (last, 0); | |
6317 | } | |
6318 | else | |
a813c111 | 6319 | last = NEXT_INSN (BB_END (bb)); |
7b1b4aed | 6320 | |
a813c111 | 6321 | for (insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn)) |
d088acea ZD |
6322 | if (INSN_P (insn)) |
6323 | { | |
6324 | note = find_reg_equal_equiv_note (insn); | |
6325 | if (!note || !expr_equiv_p (XEXP (note, 0), mem)) | |
6326 | continue; | |
6327 | ||
10d22567 ZD |
6328 | if (dump_file) |
6329 | fprintf (dump_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n", | |
d088acea ZD |
6330 | INSN_UID (insn)); |
6331 | remove_note (insn, note); | |
6332 | } | |
628f6a4e BE |
6333 | |
6334 | if (!ei_end_p (ei)) | |
6335 | ei_next (&ei); | |
6336 | act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL; | |
6337 | ||
6338 | if (EDGE_COUNT (bb->succs) > 0) | |
d088acea ZD |
6339 | { |
6340 | if (act) | |
628f6a4e BE |
6341 | stack[sp++] = ei; |
6342 | ei = ei_start (bb->succs); | |
f76ccf60 | 6343 | act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL); |
d088acea ZD |
6344 | } |
6345 | } | |
6346 | } | |
6347 | ||
a13d4ebf AM |
6348 | /* This routine will replace a store with a SET to a specified register. */ |
6349 | ||
6350 | static void | |
d088acea | 6351 | replace_store_insn (rtx reg, rtx del, basic_block bb, struct ls_expr *smexpr) |
a13d4ebf | 6352 | { |
d7fe1183 | 6353 | rtx insn, mem, note, set, ptr, pair; |
589005ff | 6354 | |
d088acea | 6355 | mem = smexpr->pattern; |
9a318d30 | 6356 | insn = gen_move_insn (reg, SET_SRC (single_set (del))); |
589005ff | 6357 | |
d088acea ZD |
6358 | for (ptr = ANTIC_STORE_LIST (smexpr); ptr; ptr = XEXP (ptr, 1)) |
6359 | if (XEXP (ptr, 0) == del) | |
6360 | { | |
6361 | XEXP (ptr, 0) = insn; | |
6362 | break; | |
6363 | } | |
d7fe1183 ZD |
6364 | |
6365 | /* Move the notes from the deleted insn to its replacement, and patch | |
6366 | up the LIBCALL notes. */ | |
6367 | REG_NOTES (insn) = REG_NOTES (del); | |
6368 | ||
6369 | note = find_reg_note (insn, REG_RETVAL, NULL_RTX); | |
6370 | if (note) | |
6371 | { | |
6372 | pair = XEXP (note, 0); | |
6373 | note = find_reg_note (pair, REG_LIBCALL, NULL_RTX); | |
6374 | XEXP (note, 0) = insn; | |
6375 | } | |
6376 | note = find_reg_note (insn, REG_LIBCALL, NULL_RTX); | |
6377 | if (note) | |
6378 | { | |
6379 | pair = XEXP (note, 0); | |
6380 | note = find_reg_note (pair, REG_RETVAL, NULL_RTX); | |
6381 | XEXP (note, 0) = insn; | |
6382 | } | |
6383 | ||
bfff9190 SP |
6384 | /* Emit the insn AFTER all the notes are transferred. |
6385 | This is cheaper since we avoid df rescanning for the note change. */ | |
6386 | insn = emit_insn_after (insn, del); | |
6387 | ||
6388 | if (dump_file) | |
6389 | { | |
6390 | fprintf (dump_file, | |
6391 | "STORE_MOTION delete insn in BB %d:\n ", bb->index); | |
6392 | print_inline_rtx (dump_file, del, 6); | |
6393 | fprintf (dump_file, "\nSTORE MOTION replaced with insn:\n "); | |
6394 | print_inline_rtx (dump_file, insn, 6); | |
6395 | fprintf (dump_file, "\n"); | |
6396 | } | |
6397 | ||
49ce134f | 6398 | delete_insn (del); |
d088acea ZD |
6399 | |
6400 | /* Now we must handle REG_EQUAL notes whose contents is equal to the mem; | |
6401 | they are no longer accurate provided that they are reached by this | |
6402 | definition, so drop them. */ | |
a813c111 | 6403 | for (; insn != NEXT_INSN (BB_END (bb)); insn = NEXT_INSN (insn)) |
d088acea ZD |
6404 | if (INSN_P (insn)) |
6405 | { | |
6406 | set = single_set (insn); | |
6407 | if (!set) | |
6408 | continue; | |
6409 | if (expr_equiv_p (SET_DEST (set), mem)) | |
6410 | return; | |
6411 | note = find_reg_equal_equiv_note (insn); | |
6412 | if (!note || !expr_equiv_p (XEXP (note, 0), mem)) | |
6413 | continue; | |
6414 | ||
10d22567 ZD |
6415 | if (dump_file) |
6416 | fprintf (dump_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n", | |
d088acea ZD |
6417 | INSN_UID (insn)); |
6418 | remove_note (insn, note); | |
6419 | } | |
6420 | remove_reachable_equiv_notes (bb, smexpr); | |
a13d4ebf AM |
6421 | } |
6422 | ||
6423 | ||
6424 | /* Delete a store, but copy the value that would have been stored into | |
6425 | the reaching_reg for later storing. */ | |
6426 | ||
6427 | static void | |
1d088dee | 6428 | delete_store (struct ls_expr * expr, basic_block bb) |
a13d4ebf AM |
6429 | { |
6430 | rtx reg, i, del; | |
6431 | ||
6432 | if (expr->reaching_reg == NULL_RTX) | |
6433 | expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern)); | |
a13d4ebf | 6434 | |
a13d4ebf | 6435 | reg = expr->reaching_reg; |
589005ff | 6436 | |
a13d4ebf AM |
6437 | for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1)) |
6438 | { | |
6439 | del = XEXP (i, 0); | |
e2d2ed72 | 6440 | if (BLOCK_FOR_INSN (del) == bb) |
a13d4ebf | 6441 | { |
589005ff | 6442 | /* We know there is only one since we deleted redundant |
a13d4ebf | 6443 | ones during the available computation. */ |
d088acea | 6444 | replace_store_insn (reg, del, bb, expr); |
a13d4ebf AM |
6445 | break; |
6446 | } | |
6447 | } | |
6448 | } | |
6449 | ||
6450 | /* Free memory used by store motion. */ | |
6451 | ||
589005ff | 6452 | static void |
1d088dee | 6453 | free_store_memory (void) |
a13d4ebf AM |
6454 | { |
6455 | free_ldst_mems (); | |
589005ff | 6456 | |
a13d4ebf | 6457 | if (ae_gen) |
5a660bff | 6458 | sbitmap_vector_free (ae_gen); |
a13d4ebf | 6459 | if (ae_kill) |
5a660bff | 6460 | sbitmap_vector_free (ae_kill); |
a13d4ebf | 6461 | if (transp) |
5a660bff | 6462 | sbitmap_vector_free (transp); |
a13d4ebf | 6463 | if (st_antloc) |
5a660bff | 6464 | sbitmap_vector_free (st_antloc); |
a13d4ebf | 6465 | if (pre_insert_map) |
5a660bff | 6466 | sbitmap_vector_free (pre_insert_map); |
a13d4ebf | 6467 | if (pre_delete_map) |
5a660bff | 6468 | sbitmap_vector_free (pre_delete_map); |
aaa4ca30 AJ |
6469 | if (reg_set_in_block) |
6470 | sbitmap_vector_free (reg_set_in_block); | |
589005ff | 6471 | |
a13d4ebf AM |
6472 | ae_gen = ae_kill = transp = st_antloc = NULL; |
6473 | pre_insert_map = pre_delete_map = reg_set_in_block = NULL; | |
6474 | } | |
6475 | ||
6476 | /* Perform store motion. Much like gcse, except we move expressions the | |
6477 | other way by looking at the flowgraph in reverse. */ | |
6478 | ||
6479 | static void | |
1d088dee | 6480 | store_motion (void) |
a13d4ebf | 6481 | { |
e0082a72 | 6482 | basic_block bb; |
0b17ab2f | 6483 | int x; |
a13d4ebf | 6484 | struct ls_expr * ptr; |
adfcce61 | 6485 | int update_flow = 0; |
aaa4ca30 | 6486 | |
10d22567 | 6487 | if (dump_file) |
a13d4ebf | 6488 | { |
10d22567 ZD |
6489 | fprintf (dump_file, "before store motion\n"); |
6490 | print_rtl (dump_file, get_insns ()); | |
a13d4ebf AM |
6491 | } |
6492 | ||
a13d4ebf | 6493 | init_alias_analysis (); |
aaa4ca30 | 6494 | |
47a3dae1 | 6495 | /* Find all the available and anticipatable stores. */ |
a13d4ebf AM |
6496 | num_stores = compute_store_table (); |
6497 | if (num_stores == 0) | |
6498 | { | |
9727e468 RG |
6499 | htab_delete (pre_ldst_table); |
6500 | pre_ldst_table = NULL; | |
aaa4ca30 | 6501 | sbitmap_vector_free (reg_set_in_block); |
a13d4ebf AM |
6502 | end_alias_analysis (); |
6503 | return; | |
6504 | } | |
6505 | ||
47a3dae1 | 6506 | /* Now compute kill & transp vectors. */ |
a13d4ebf | 6507 | build_store_vectors (); |
47a3dae1 | 6508 | add_noreturn_fake_exit_edges (); |
2a868ea4 | 6509 | connect_infinite_loops_to_exit (); |
a13d4ebf | 6510 | |
10d22567 | 6511 | edge_list = pre_edge_rev_lcm (num_stores, transp, ae_gen, |
589005ff | 6512 | st_antloc, ae_kill, &pre_insert_map, |
a13d4ebf AM |
6513 | &pre_delete_map); |
6514 | ||
6515 | /* Now we want to insert the new stores which are going to be needed. */ | |
6516 | for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr)) | |
6517 | { | |
b16aa8a5 RK |
6518 | /* If any of the edges we have above are abnormal, we can't move this |
6519 | store. */ | |
6520 | for (x = NUM_EDGES (edge_list) - 1; x >= 0; x--) | |
6521 | if (TEST_BIT (pre_insert_map[x], ptr->index) | |
6522 | && (INDEX_EDGE (edge_list, x)->flags & EDGE_ABNORMAL)) | |
6523 | break; | |
6524 | ||
6525 | if (x >= 0) | |
6526 | { | |
10d22567 ZD |
6527 | if (dump_file != NULL) |
6528 | fprintf (dump_file, | |
b16aa8a5 RK |
6529 | "Can't replace store %d: abnormal edge from %d to %d\n", |
6530 | ptr->index, INDEX_EDGE (edge_list, x)->src->index, | |
6531 | INDEX_EDGE (edge_list, x)->dest->index); | |
6532 | continue; | |
6533 | } | |
6534 | ||
6535 | /* Now we want to insert the new stores which are going to be needed. */ | |
6536 | ||
e0082a72 ZD |
6537 | FOR_EACH_BB (bb) |
6538 | if (TEST_BIT (pre_delete_map[bb->index], ptr->index)) | |
6539 | delete_store (ptr, bb); | |
a13d4ebf | 6540 | |
0b17ab2f RH |
6541 | for (x = 0; x < NUM_EDGES (edge_list); x++) |
6542 | if (TEST_BIT (pre_insert_map[x], ptr->index)) | |
6543 | update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x)); | |
a13d4ebf AM |
6544 | } |
6545 | ||
6546 | if (update_flow) | |
6547 | commit_edge_insertions (); | |
aaa4ca30 | 6548 | |
a13d4ebf AM |
6549 | free_store_memory (); |
6550 | free_edge_list (edge_list); | |
6809cbf9 | 6551 | remove_fake_exit_edges (); |
a13d4ebf AM |
6552 | end_alias_analysis (); |
6553 | } | |
e2500fed | 6554 | |
a0134312 RS |
6555 | \f |
6556 | /* Entry point for jump bypassing optimization pass. */ | |
6557 | ||
65727068 | 6558 | static int |
10d22567 | 6559 | bypass_jumps (void) |
a0134312 RS |
6560 | { |
6561 | int changed; | |
6562 | ||
6563 | /* We do not construct an accurate cfg in functions which call | |
6564 | setjmp, so just punt to be safe. */ | |
6565 | if (current_function_calls_setjmp) | |
6566 | return 0; | |
6567 | ||
a0134312 RS |
6568 | /* Identify the basic block information for this function, including |
6569 | successors and predecessors. */ | |
6570 | max_gcse_regno = max_reg_num (); | |
6571 | ||
10d22567 | 6572 | if (dump_file) |
5b4fdb20 | 6573 | dump_flow_info (dump_file, dump_flags); |
a0134312 | 6574 | |
6614fd40 | 6575 | /* Return if there's nothing to do, or it is too expensive. */ |
ab9a1ff8 SB |
6576 | if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1 |
6577 | || is_too_expensive (_ ("jump bypassing disabled"))) | |
a0134312 RS |
6578 | return 0; |
6579 | ||
a0134312 RS |
6580 | gcc_obstack_init (&gcse_obstack); |
6581 | bytes_used = 0; | |
6582 | ||
6583 | /* We need alias. */ | |
6584 | init_alias_analysis (); | |
6585 | ||
6586 | /* Record where pseudo-registers are set. This data is kept accurate | |
6587 | during each pass. ??? We could also record hard-reg information here | |
6588 | [since it's unchanging], however it is currently done during hash table | |
6589 | computation. | |
6590 | ||
6591 | It may be tempting to compute MEM set information here too, but MEM sets | |
6592 | will be subject to code motion one day and thus we need to compute | |
6593 | information about memory sets when we build the hash tables. */ | |
6594 | ||
6595 | alloc_reg_set_mem (max_gcse_regno); | |
eb232f4e | 6596 | compute_sets (); |
a0134312 RS |
6597 | |
6598 | max_gcse_regno = max_reg_num (); | |
eb232f4e SB |
6599 | alloc_gcse_mem (); |
6600 | changed = one_cprop_pass (MAX_GCSE_PASSES + 2, true, true); | |
a0134312 RS |
6601 | free_gcse_mem (); |
6602 | ||
10d22567 | 6603 | if (dump_file) |
a0134312 | 6604 | { |
10d22567 | 6605 | fprintf (dump_file, "BYPASS of %s: %d basic blocks, ", |
faed5cc3 | 6606 | current_function_name (), n_basic_blocks); |
10d22567 | 6607 | fprintf (dump_file, "%d bytes\n\n", bytes_used); |
a0134312 RS |
6608 | } |
6609 | ||
6610 | obstack_free (&gcse_obstack, NULL); | |
6611 | free_reg_set_mem (); | |
6612 | ||
6613 | /* We are finished with alias. */ | |
6614 | end_alias_analysis (); | |
a0134312 RS |
6615 | |
6616 | return changed; | |
6617 | } | |
6618 | ||
d128effb NS |
6619 | /* Return true if the graph is too expensive to optimize. PASS is the |
6620 | optimization about to be performed. */ | |
6621 | ||
6622 | static bool | |
6623 | is_too_expensive (const char *pass) | |
6624 | { | |
6625 | /* Trying to perform global optimizations on flow graphs which have | |
6626 | a high connectivity will take a long time and is unlikely to be | |
6627 | particularly useful. | |
7b1b4aed | 6628 | |
d128effb NS |
6629 | In normal circumstances a cfg should have about twice as many |
6630 | edges as blocks. But we do not want to punish small functions | |
6631 | which have a couple switch statements. Rather than simply | |
6632 | threshold the number of blocks, uses something with a more | |
6633 | graceful degradation. */ | |
6634 | if (n_edges > 20000 + n_basic_blocks * 4) | |
6635 | { | |
44c21c7f DD |
6636 | warning (OPT_Wdisabled_optimization, |
6637 | "%s: %d basic blocks and %d edges/basic block", | |
6638 | pass, n_basic_blocks, n_edges / n_basic_blocks); | |
7b1b4aed | 6639 | |
d128effb NS |
6640 | return true; |
6641 | } | |
6642 | ||
6643 | /* If allocating memory for the cprop bitmap would take up too much | |
6644 | storage it's better just to disable the optimization. */ | |
6645 | if ((n_basic_blocks | |
6646 | * SBITMAP_SET_SIZE (max_reg_num ()) | |
6647 | * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY) | |
6648 | { | |
44c21c7f DD |
6649 | warning (OPT_Wdisabled_optimization, |
6650 | "%s: %d basic blocks and %d registers", | |
6651 | pass, n_basic_blocks, max_reg_num ()); | |
d128effb NS |
6652 | |
6653 | return true; | |
6654 | } | |
6655 | ||
6656 | return false; | |
6657 | } | |
ef330312 PB |
6658 | \f |
6659 | static bool | |
6660 | gate_handle_jump_bypass (void) | |
6661 | { | |
6662 | return optimize > 0 && flag_gcse; | |
6663 | } | |
6664 | ||
6665 | /* Perform jump bypassing and control flow optimizations. */ | |
c2924966 | 6666 | static unsigned int |
ef330312 PB |
6667 | rest_of_handle_jump_bypass (void) |
6668 | { | |
6fb5fa3c | 6669 | delete_unreachable_blocks (); |
10d22567 | 6670 | if (bypass_jumps ()) |
ef330312 | 6671 | { |
ef330312 | 6672 | delete_trivially_dead_insns (get_insns (), max_reg_num ()); |
6fb5fa3c DB |
6673 | rebuild_jump_labels (get_insns ()); |
6674 | cleanup_cfg (0); | |
ef330312 | 6675 | } |
c2924966 | 6676 | return 0; |
ef330312 PB |
6677 | } |
6678 | ||
6679 | struct tree_opt_pass pass_jump_bypass = | |
6680 | { | |
6681 | "bypass", /* name */ | |
6682 | gate_handle_jump_bypass, /* gate */ | |
6683 | rest_of_handle_jump_bypass, /* execute */ | |
6684 | NULL, /* sub */ | |
6685 | NULL, /* next */ | |
6686 | 0, /* static_pass_number */ | |
6687 | TV_BYPASS, /* tv_id */ | |
6688 | 0, /* properties_required */ | |
6689 | 0, /* properties_provided */ | |
6690 | 0, /* properties_destroyed */ | |
6691 | 0, /* todo_flags_start */ | |
6692 | TODO_dump_func | | |
6693 | TODO_ggc_collect | TODO_verify_flow, /* todo_flags_finish */ | |
6694 | 'G' /* letter */ | |
6695 | }; | |
6696 | ||
6697 | ||
6698 | static bool | |
6699 | gate_handle_gcse (void) | |
6700 | { | |
6701 | return optimize > 0 && flag_gcse; | |
6702 | } | |
6703 | ||
6704 | ||
c2924966 | 6705 | static unsigned int |
ef330312 PB |
6706 | rest_of_handle_gcse (void) |
6707 | { | |
6708 | int save_csb, save_cfj; | |
6709 | int tem2 = 0, tem; | |
10d22567 | 6710 | tem = gcse_main (get_insns ()); |
ef330312 | 6711 | delete_trivially_dead_insns (get_insns (), max_reg_num ()); |
6fb5fa3c | 6712 | rebuild_jump_labels (get_insns ()); |
ef330312 PB |
6713 | save_csb = flag_cse_skip_blocks; |
6714 | save_cfj = flag_cse_follow_jumps; | |
6715 | flag_cse_skip_blocks = flag_cse_follow_jumps = 0; | |
6716 | ||
6717 | /* If -fexpensive-optimizations, re-run CSE to clean up things done | |
6718 | by gcse. */ | |
6719 | if (flag_expensive_optimizations) | |
6720 | { | |
6721 | timevar_push (TV_CSE); | |
10d22567 | 6722 | tem2 = cse_main (get_insns (), max_reg_num ()); |
0d475361 | 6723 | df_finish_pass (false); |
ef330312 PB |
6724 | purge_all_dead_edges (); |
6725 | delete_trivially_dead_insns (get_insns (), max_reg_num ()); | |
6726 | timevar_pop (TV_CSE); | |
6727 | cse_not_expected = !flag_rerun_cse_after_loop; | |
6728 | } | |
6729 | ||
6730 | /* If gcse or cse altered any jumps, rerun jump optimizations to clean | |
6731 | things up. */ | |
6732 | if (tem || tem2) | |
6733 | { | |
6734 | timevar_push (TV_JUMP); | |
6735 | rebuild_jump_labels (get_insns ()); | |
6fb5fa3c | 6736 | cleanup_cfg (0); |
ef330312 PB |
6737 | timevar_pop (TV_JUMP); |
6738 | } | |
6739 | ||
6740 | flag_cse_skip_blocks = save_csb; | |
6741 | flag_cse_follow_jumps = save_cfj; | |
c2924966 | 6742 | return 0; |
ef330312 PB |
6743 | } |
6744 | ||
6745 | struct tree_opt_pass pass_gcse = | |
6746 | { | |
6747 | "gcse1", /* name */ | |
6748 | gate_handle_gcse, /* gate */ | |
6749 | rest_of_handle_gcse, /* execute */ | |
6750 | NULL, /* sub */ | |
6751 | NULL, /* next */ | |
6752 | 0, /* static_pass_number */ | |
6753 | TV_GCSE, /* tv_id */ | |
6754 | 0, /* properties_required */ | |
6755 | 0, /* properties_provided */ | |
6756 | 0, /* properties_destroyed */ | |
6757 | 0, /* todo_flags_start */ | |
a36b8a1e | 6758 | TODO_df_finish | TODO_verify_rtl_sharing | |
ef330312 PB |
6759 | TODO_dump_func | |
6760 | TODO_verify_flow | TODO_ggc_collect, /* todo_flags_finish */ | |
6761 | 'G' /* letter */ | |
6762 | }; | |
6763 | ||
d128effb | 6764 | |
e2500fed | 6765 | #include "gt-gcse.h" |