]>
Commit | Line | Data |
---|---|---|
f4e584dc | 1 | /* Global common subexpression elimination/Partial redundancy elimination |
7506f491 | 2 | and global constant/copy propagation for GNU compiler. |
5b1ef594 | 3 | Copyright (C) 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc. |
7506f491 DE |
4 | |
5 | This file is part of GNU CC. | |
6 | ||
7 | GNU CC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
9 | the Free Software Foundation; either version 2, or (at your option) | |
10 | any later version. | |
11 | ||
12 | GNU CC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GNU CC; see the file COPYING. If not, write to | |
19 | the Free Software Foundation, 59 Temple Place - Suite 330, | |
20 | Boston, MA 02111-1307, USA. */ | |
21 | ||
22 | /* TODO | |
23 | - reordering of memory allocation and freeing to be more space efficient | |
24 | - do rough calc of how many regs are needed in each block, and a rough | |
25 | calc of how many regs are available in each class and use that to | |
26 | throttle back the code in cases where RTX_COST is minimal. | |
f4e584dc JL |
27 | - dead store elimination |
28 | - a store to the same address as a load does not kill the load if the | |
29 | source of the store is also the destination of the load. Handling this | |
30 | allows more load motion, particularly out of loops. | |
7506f491 DE |
31 | - ability to realloc sbitmap vectors would allow one initial computation |
32 | of reg_set_in_block with only subsequent additions, rather than | |
33 | recomputing it for each pass | |
34 | ||
7506f491 DE |
35 | */ |
36 | ||
37 | /* References searched while implementing this. | |
7506f491 DE |
38 | |
39 | Compilers Principles, Techniques and Tools | |
40 | Aho, Sethi, Ullman | |
41 | Addison-Wesley, 1988 | |
42 | ||
43 | Global Optimization by Suppression of Partial Redundancies | |
44 | E. Morel, C. Renvoise | |
45 | communications of the acm, Vol. 22, Num. 2, Feb. 1979 | |
46 | ||
47 | A Portable Machine-Independent Global Optimizer - Design and Measurements | |
48 | Frederick Chow | |
49 | Stanford Ph.D. thesis, Dec. 1983 | |
50 | ||
7506f491 DE |
51 | A Fast Algorithm for Code Movement Optimization |
52 | D.M. Dhamdhere | |
53 | SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988 | |
54 | ||
55 | A Solution to a Problem with Morel and Renvoise's | |
56 | Global Optimization by Suppression of Partial Redundancies | |
57 | K-H Drechsler, M.P. Stadel | |
58 | ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988 | |
59 | ||
60 | Practical Adaptation of the Global Optimization | |
61 | Algorithm of Morel and Renvoise | |
62 | D.M. Dhamdhere | |
63 | ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991 | |
64 | ||
65 | Efficiently Computing Static Single Assignment Form and the Control | |
66 | Dependence Graph | |
67 | R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck | |
68 | ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991 | |
69 | ||
7506f491 DE |
70 | Lazy Code Motion |
71 | J. Knoop, O. Ruthing, B. Steffen | |
72 | ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI | |
73 | ||
74 | What's In a Region? Or Computing Control Dependence Regions in Near-Linear | |
75 | Time for Reducible Flow Control | |
76 | Thomas Ball | |
77 | ACM Letters on Programming Languages and Systems, | |
78 | Vol. 2, Num. 1-4, Mar-Dec 1993 | |
79 | ||
80 | An Efficient Representation for Sparse Sets | |
81 | Preston Briggs, Linda Torczon | |
82 | ACM Letters on Programming Languages and Systems, | |
83 | Vol. 2, Num. 1-4, Mar-Dec 1993 | |
84 | ||
85 | A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion | |
86 | K-H Drechsler, M.P. Stadel | |
87 | ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993 | |
88 | ||
89 | Partial Dead Code Elimination | |
90 | J. Knoop, O. Ruthing, B. Steffen | |
91 | ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994 | |
92 | ||
93 | Effective Partial Redundancy Elimination | |
94 | P. Briggs, K.D. Cooper | |
95 | ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994 | |
96 | ||
97 | The Program Structure Tree: Computing Control Regions in Linear Time | |
98 | R. Johnson, D. Pearson, K. Pingali | |
99 | ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994 | |
100 | ||
101 | Optimal Code Motion: Theory and Practice | |
102 | J. Knoop, O. Ruthing, B. Steffen | |
103 | ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994 | |
104 | ||
105 | The power of assignment motion | |
106 | J. Knoop, O. Ruthing, B. Steffen | |
107 | ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI | |
108 | ||
109 | Global code motion / global value numbering | |
110 | C. Click | |
111 | ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI | |
112 | ||
113 | Value Driven Redundancy Elimination | |
114 | L.T. Simpson | |
115 | Rice University Ph.D. thesis, Apr. 1996 | |
116 | ||
117 | Value Numbering | |
118 | L.T. Simpson | |
119 | Massively Scalar Compiler Project, Rice University, Sep. 1996 | |
120 | ||
121 | High Performance Compilers for Parallel Computing | |
122 | Michael Wolfe | |
123 | Addison-Wesley, 1996 | |
124 | ||
f4e584dc JL |
125 | Advanced Compiler Design and Implementation |
126 | Steven Muchnick | |
127 | Morgan Kaufmann, 1997 | |
128 | ||
a42cd965 AM |
129 | Building an Optimizing Compiler |
130 | Robert Morgan | |
131 | Digital Press, 1998 | |
132 | ||
f4e584dc JL |
133 | People wishing to speed up the code here should read: |
134 | Elimination Algorithms for Data Flow Analysis | |
135 | B.G. Ryder, M.C. Paull | |
136 | ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986 | |
137 | ||
138 | How to Analyze Large Programs Efficiently and Informatively | |
139 | D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck | |
140 | ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI | |
141 | ||
7506f491 DE |
142 | People wishing to do something different can find various possibilities |
143 | in the above papers and elsewhere. | |
144 | */ | |
145 | ||
146 | #include "config.h" | |
50b2596f | 147 | #include "system.h" |
01198c2f | 148 | #include "toplev.h" |
7506f491 DE |
149 | |
150 | #include "rtl.h" | |
6baf1cc8 | 151 | #include "tm_p.h" |
7506f491 DE |
152 | #include "regs.h" |
153 | #include "hard-reg-set.h" | |
154 | #include "flags.h" | |
155 | #include "real.h" | |
156 | #include "insn-config.h" | |
157 | #include "recog.h" | |
158 | #include "basic-block.h" | |
50b2596f | 159 | #include "output.h" |
49ad7cfa | 160 | #include "function.h" |
3cdbd1f8 | 161 | #include "expr.h" |
fb0c0a12 | 162 | #include "ggc.h" |
f1fa37ff | 163 | #include "params.h" |
7506f491 DE |
164 | |
165 | #include "obstack.h" | |
166 | #define obstack_chunk_alloc gmalloc | |
167 | #define obstack_chunk_free free | |
168 | ||
169 | /* Maximum number of passes to perform. */ | |
170 | #define MAX_PASSES 1 | |
171 | ||
172 | /* Propagate flow information through back edges and thus enable PRE's | |
173 | moving loop invariant calculations out of loops. | |
174 | ||
175 | Originally this tended to create worse overall code, but several | |
176 | improvements during the development of PRE seem to have made following | |
177 | back edges generally a win. | |
178 | ||
179 | Note much of the loop invariant code motion done here would normally | |
180 | be done by loop.c, which has more heuristics for when to move invariants | |
181 | out of loops. At some point we might need to move some of those | |
182 | heuristics into gcse.c. */ | |
183 | #define FOLLOW_BACK_EDGES 1 | |
184 | ||
f4e584dc JL |
185 | /* We support GCSE via Partial Redundancy Elimination. PRE optimizations |
186 | are a superset of those done by GCSE. | |
7506f491 | 187 | |
f4e584dc | 188 | We perform the following steps: |
7506f491 DE |
189 | |
190 | 1) Compute basic block information. | |
191 | ||
192 | 2) Compute table of places where registers are set. | |
193 | ||
194 | 3) Perform copy/constant propagation. | |
195 | ||
196 | 4) Perform global cse. | |
197 | ||
e78d9500 | 198 | 5) Perform another pass of copy/constant propagation. |
7506f491 DE |
199 | |
200 | Two passes of copy/constant propagation are done because the first one | |
201 | enables more GCSE and the second one helps to clean up the copies that | |
202 | GCSE creates. This is needed more for PRE than for Classic because Classic | |
203 | GCSE will try to use an existing register containing the common | |
204 | subexpression rather than create a new one. This is harder to do for PRE | |
205 | because of the code motion (which Classic GCSE doesn't do). | |
206 | ||
207 | Expressions we are interested in GCSE-ing are of the form | |
208 | (set (pseudo-reg) (expression)). | |
209 | Function want_to_gcse_p says what these are. | |
210 | ||
211 | PRE handles moving invariant expressions out of loops (by treating them as | |
f4e584dc | 212 | partially redundant). |
7506f491 DE |
213 | |
214 | Eventually it would be nice to replace cse.c/gcse.c with SSA (static single | |
215 | assignment) based GVN (global value numbering). L. T. Simpson's paper | |
216 | (Rice University) on value numbering is a useful reference for this. | |
217 | ||
218 | ********************** | |
219 | ||
220 | We used to support multiple passes but there are diminishing returns in | |
221 | doing so. The first pass usually makes 90% of the changes that are doable. | |
222 | A second pass can make a few more changes made possible by the first pass. | |
223 | Experiments show any further passes don't make enough changes to justify | |
224 | the expense. | |
225 | ||
226 | A study of spec92 using an unlimited number of passes: | |
227 | [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83, | |
228 | [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2, | |
229 | [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1 | |
230 | ||
231 | It was found doing copy propagation between each pass enables further | |
232 | substitutions. | |
233 | ||
234 | PRE is quite expensive in complicated functions because the DFA can take | |
235 | awhile to converge. Hence we only perform one pass. Macro MAX_PASSES can | |
236 | be modified if one wants to experiment. | |
237 | ||
238 | ********************** | |
239 | ||
240 | The steps for PRE are: | |
241 | ||
242 | 1) Build the hash table of expressions we wish to GCSE (expr_hash_table). | |
243 | ||
244 | 2) Perform the data flow analysis for PRE. | |
245 | ||
246 | 3) Delete the redundant instructions | |
247 | ||
248 | 4) Insert the required copies [if any] that make the partially | |
249 | redundant instructions fully redundant. | |
250 | ||
251 | 5) For other reaching expressions, insert an instruction to copy the value | |
252 | to a newly created pseudo that will reach the redundant instruction. | |
253 | ||
254 | The deletion is done first so that when we do insertions we | |
255 | know which pseudo reg to use. | |
256 | ||
257 | Various papers have argued that PRE DFA is expensive (O(n^2)) and others | |
258 | argue it is not. The number of iterations for the algorithm to converge | |
259 | is typically 2-4 so I don't view it as that expensive (relatively speaking). | |
260 | ||
f4e584dc | 261 | PRE GCSE depends heavily on the second CSE pass to clean up the copies |
7506f491 DE |
262 | we create. To make an expression reach the place where it's redundant, |
263 | the result of the expression is copied to a new register, and the redundant | |
264 | expression is deleted by replacing it with this new register. Classic GCSE | |
265 | doesn't have this problem as much as it computes the reaching defs of | |
266 | each register in each block and thus can try to use an existing register. | |
267 | ||
268 | ********************** | |
269 | ||
7506f491 DE |
270 | A fair bit of simplicity is created by creating small functions for simple |
271 | tasks, even when the function is only called in one place. This may | |
272 | measurably slow things down [or may not] by creating more function call | |
273 | overhead than is necessary. The source is laid out so that it's trivial | |
274 | to make the affected functions inline so that one can measure what speed | |
275 | up, if any, can be achieved, and maybe later when things settle things can | |
276 | be rearranged. | |
277 | ||
278 | Help stamp out big monolithic functions! */ | |
279 | \f | |
280 | /* GCSE global vars. */ | |
281 | ||
282 | /* -dG dump file. */ | |
283 | static FILE *gcse_file; | |
284 | ||
f4e584dc JL |
285 | /* Note whether or not we should run jump optimization after gcse. We |
286 | want to do this for two cases. | |
287 | ||
288 | * If we changed any jumps via cprop. | |
289 | ||
290 | * If we added any labels via edge splitting. */ | |
291 | ||
292 | static int run_jump_opt_after_gcse; | |
293 | ||
7506f491 DE |
294 | /* Bitmaps are normally not included in debugging dumps. |
295 | However it's useful to be able to print them from GDB. | |
296 | We could create special functions for this, but it's simpler to | |
297 | just allow passing stderr to the dump_foo fns. Since stderr can | |
298 | be a macro, we store a copy here. */ | |
299 | static FILE *debug_stderr; | |
300 | ||
301 | /* An obstack for our working variables. */ | |
302 | static struct obstack gcse_obstack; | |
303 | ||
304 | /* Non-zero for each mode that supports (set (reg) (reg)). | |
305 | This is trivially true for integer and floating point values. | |
306 | It may or may not be true for condition codes. */ | |
307 | static char can_copy_p[(int) NUM_MACHINE_MODES]; | |
308 | ||
309 | /* Non-zero if can_copy_p has been initialized. */ | |
310 | static int can_copy_init_p; | |
311 | ||
c4c81601 | 312 | struct reg_use {rtx reg_rtx; }; |
abd535b6 | 313 | |
7506f491 DE |
314 | /* Hash table of expressions. */ |
315 | ||
316 | struct expr | |
317 | { | |
318 | /* The expression (SET_SRC for expressions, PATTERN for assignments). */ | |
319 | rtx expr; | |
320 | /* Index in the available expression bitmaps. */ | |
321 | int bitmap_index; | |
322 | /* Next entry with the same hash. */ | |
323 | struct expr *next_same_hash; | |
324 | /* List of anticipatable occurrences in basic blocks in the function. | |
325 | An "anticipatable occurrence" is one that is the first occurrence in the | |
f4e584dc JL |
326 | basic block, the operands are not modified in the basic block prior |
327 | to the occurrence and the output is not used between the start of | |
328 | the block and the occurrence. */ | |
7506f491 DE |
329 | struct occr *antic_occr; |
330 | /* List of available occurrence in basic blocks in the function. | |
331 | An "available occurrence" is one that is the last occurrence in the | |
332 | basic block and the operands are not modified by following statements in | |
333 | the basic block [including this insn]. */ | |
334 | struct occr *avail_occr; | |
335 | /* Non-null if the computation is PRE redundant. | |
336 | The value is the newly created pseudo-reg to record a copy of the | |
337 | expression in all the places that reach the redundant copy. */ | |
338 | rtx reaching_reg; | |
339 | }; | |
340 | ||
341 | /* Occurrence of an expression. | |
342 | There is one per basic block. If a pattern appears more than once the | |
343 | last appearance is used [or first for anticipatable expressions]. */ | |
344 | ||
345 | struct occr | |
346 | { | |
347 | /* Next occurrence of this expression. */ | |
348 | struct occr *next; | |
349 | /* The insn that computes the expression. */ | |
350 | rtx insn; | |
351 | /* Non-zero if this [anticipatable] occurrence has been deleted. */ | |
352 | char deleted_p; | |
353 | /* Non-zero if this [available] occurrence has been copied to | |
354 | reaching_reg. */ | |
355 | /* ??? This is mutually exclusive with deleted_p, so they could share | |
356 | the same byte. */ | |
357 | char copied_p; | |
358 | }; | |
359 | ||
360 | /* Expression and copy propagation hash tables. | |
361 | Each hash table is an array of buckets. | |
362 | ??? It is known that if it were an array of entries, structure elements | |
363 | `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is | |
364 | not clear whether in the final analysis a sufficient amount of memory would | |
365 | be saved as the size of the available expression bitmaps would be larger | |
366 | [one could build a mapping table without holes afterwards though]. | |
c4c81601 | 367 | Someday I'll perform the computation and figure it out. */ |
7506f491 DE |
368 | |
369 | /* Total size of the expression hash table, in elements. */ | |
2e653e39 RK |
370 | static unsigned int expr_hash_table_size; |
371 | ||
7506f491 DE |
372 | /* The table itself. |
373 | This is an array of `expr_hash_table_size' elements. */ | |
374 | static struct expr **expr_hash_table; | |
375 | ||
376 | /* Total size of the copy propagation hash table, in elements. */ | |
ebb13e7e | 377 | static unsigned int set_hash_table_size; |
c4c81601 | 378 | |
7506f491 DE |
379 | /* The table itself. |
380 | This is an array of `set_hash_table_size' elements. */ | |
381 | static struct expr **set_hash_table; | |
382 | ||
383 | /* Mapping of uids to cuids. | |
384 | Only real insns get cuids. */ | |
385 | static int *uid_cuid; | |
386 | ||
387 | /* Highest UID in UID_CUID. */ | |
388 | static int max_uid; | |
389 | ||
390 | /* Get the cuid of an insn. */ | |
b86db3eb BS |
391 | #ifdef ENABLE_CHECKING |
392 | #define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)]) | |
393 | #else | |
7506f491 | 394 | #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)]) |
b86db3eb | 395 | #endif |
7506f491 DE |
396 | |
397 | /* Number of cuids. */ | |
398 | static int max_cuid; | |
399 | ||
400 | /* Mapping of cuids to insns. */ | |
401 | static rtx *cuid_insn; | |
402 | ||
403 | /* Get insn from cuid. */ | |
404 | #define CUID_INSN(CUID) (cuid_insn[CUID]) | |
405 | ||
406 | /* Maximum register number in function prior to doing gcse + 1. | |
407 | Registers created during this pass have regno >= max_gcse_regno. | |
408 | This is named with "gcse" to not collide with global of same name. */ | |
770ae6cc | 409 | static unsigned int max_gcse_regno; |
7506f491 DE |
410 | |
411 | /* Maximum number of cse-able expressions found. */ | |
412 | static int n_exprs; | |
c4c81601 | 413 | |
7506f491 DE |
414 | /* Maximum number of assignments for copy propagation found. */ |
415 | static int n_sets; | |
416 | ||
417 | /* Table of registers that are modified. | |
c4c81601 | 418 | |
7506f491 DE |
419 | For each register, each element is a list of places where the pseudo-reg |
420 | is set. | |
421 | ||
422 | For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only | |
423 | requires knowledge of which blocks kill which regs [and thus could use | |
f4e584dc | 424 | a bitmap instead of the lists `reg_set_table' uses]. |
7506f491 | 425 | |
c4c81601 RK |
426 | `reg_set_table' and could be turned into an array of bitmaps (num-bbs x |
427 | num-regs) [however perhaps it may be useful to keep the data as is]. One | |
428 | advantage of recording things this way is that `reg_set_table' is fairly | |
429 | sparse with respect to pseudo regs but for hard regs could be fairly dense | |
430 | [relatively speaking]. And recording sets of pseudo-regs in lists speeds | |
7506f491 DE |
431 | up functions like compute_transp since in the case of pseudo-regs we only |
432 | need to iterate over the number of times a pseudo-reg is set, not over the | |
433 | number of basic blocks [clearly there is a bit of a slow down in the cases | |
434 | where a pseudo is set more than once in a block, however it is believed | |
435 | that the net effect is to speed things up]. This isn't done for hard-regs | |
436 | because recording call-clobbered hard-regs in `reg_set_table' at each | |
c4c81601 RK |
437 | function call can consume a fair bit of memory, and iterating over |
438 | hard-regs stored this way in compute_transp will be more expensive. */ | |
7506f491 | 439 | |
c4c81601 RK |
440 | typedef struct reg_set |
441 | { | |
7506f491 DE |
442 | /* The next setting of this register. */ |
443 | struct reg_set *next; | |
444 | /* The insn where it was set. */ | |
445 | rtx insn; | |
446 | } reg_set; | |
c4c81601 | 447 | |
7506f491 | 448 | static reg_set **reg_set_table; |
c4c81601 | 449 | |
7506f491 DE |
450 | /* Size of `reg_set_table'. |
451 | The table starts out at max_gcse_regno + slop, and is enlarged as | |
452 | necessary. */ | |
453 | static int reg_set_table_size; | |
c4c81601 | 454 | |
7506f491 DE |
455 | /* Amount to grow `reg_set_table' by when it's full. */ |
456 | #define REG_SET_TABLE_SLOP 100 | |
457 | ||
a13d4ebf AM |
458 | /* This is a list of expressions which are MEMs and will be used by load |
459 | or store motion. | |
460 | Load motion tracks MEMs which aren't killed by | |
461 | anything except itself. (ie, loads and stores to a single location). | |
462 | We can then allow movement of these MEM refs with a little special | |
463 | allowance. (all stores copy the same value to the reaching reg used | |
464 | for the loads). This means all values used to store into memory must have | |
465 | no side effects so we can re-issue the setter value. | |
466 | Store Motion uses this structure as an expression table to track stores | |
467 | which look interesting, and might be moveable towards the exit block. */ | |
468 | ||
469 | struct ls_expr | |
470 | { | |
471 | struct expr * expr; /* Gcse expression reference for LM. */ | |
472 | rtx pattern; /* Pattern of this mem. */ | |
473 | rtx loads; /* INSN list of loads seen. */ | |
474 | rtx stores; /* INSN list of stores seen. */ | |
475 | struct ls_expr * next; /* Next in the list. */ | |
476 | int invalid; /* Invalid for some reason. */ | |
477 | int index; /* If it maps to a bitmap index. */ | |
478 | int hash_index; /* Index when in a hash table. */ | |
479 | rtx reaching_reg; /* Register to use when re-writing. */ | |
480 | }; | |
481 | ||
482 | /* Head of the list of load/store memory refs. */ | |
483 | static struct ls_expr * pre_ldst_mems = NULL; | |
484 | ||
7506f491 DE |
485 | /* Bitmap containing one bit for each register in the program. |
486 | Used when performing GCSE to track which registers have been set since | |
487 | the start of the basic block. */ | |
488 | static sbitmap reg_set_bitmap; | |
489 | ||
490 | /* For each block, a bitmap of registers set in the block. | |
491 | This is used by expr_killed_p and compute_transp. | |
492 | It is computed during hash table computation and not by compute_sets | |
493 | as it includes registers added since the last pass (or between cprop and | |
494 | gcse) and it's currently not easy to realloc sbitmap vectors. */ | |
495 | static sbitmap *reg_set_in_block; | |
496 | ||
a13d4ebf AM |
497 | /* Array, indexed by basic block number for a list of insns which modify |
498 | memory within that block. */ | |
499 | static rtx * modify_mem_list; | |
500 | ||
501 | /* This array parallels modify_mem_list, but is kept canonicalized. */ | |
502 | static rtx * canon_modify_mem_list; | |
503 | ||
7506f491 DE |
504 | /* For each block, non-zero if memory is set in that block. |
505 | This is computed during hash table computation and is used by | |
506 | expr_killed_p and compute_transp. | |
507 | ??? Handling of memory is very simple, we don't make any attempt | |
508 | to optimize things (later). | |
509 | ??? This can be computed by compute_sets since the information | |
510 | doesn't change. */ | |
511 | static char *mem_set_in_block; | |
512 | ||
513 | /* Various variables for statistics gathering. */ | |
514 | ||
515 | /* Memory used in a pass. | |
516 | This isn't intended to be absolutely precise. Its intent is only | |
517 | to keep an eye on memory usage. */ | |
518 | static int bytes_used; | |
c4c81601 | 519 | |
7506f491 DE |
520 | /* GCSE substitutions made. */ |
521 | static int gcse_subst_count; | |
522 | /* Number of copy instructions created. */ | |
523 | static int gcse_create_count; | |
524 | /* Number of constants propagated. */ | |
525 | static int const_prop_count; | |
526 | /* Number of copys propagated. */ | |
527 | static int copy_prop_count; | |
7506f491 DE |
528 | \f |
529 | /* These variables are used by classic GCSE. | |
530 | Normally they'd be defined a bit later, but `rd_gen' needs to | |
531 | be declared sooner. */ | |
532 | ||
7506f491 DE |
533 | /* Each block has a bitmap of each type. |
534 | The length of each blocks bitmap is: | |
535 | ||
536 | max_cuid - for reaching definitions | |
537 | n_exprs - for available expressions | |
538 | ||
539 | Thus we view the bitmaps as 2 dimensional arrays. i.e. | |
540 | rd_kill[block_num][cuid_num] | |
c4c81601 | 541 | ae_kill[block_num][expr_num] */ |
7506f491 DE |
542 | |
543 | /* For reaching defs */ | |
544 | static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out; | |
545 | ||
546 | /* for available exprs */ | |
547 | static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out; | |
b5ce41ff | 548 | |
0511851c MM |
549 | /* Objects of this type are passed around by the null-pointer check |
550 | removal routines. */ | |
c4c81601 RK |
551 | struct null_pointer_info |
552 | { | |
0511851c MM |
553 | /* The basic block being processed. */ |
554 | int current_block; | |
555 | /* The first register to be handled in this pass. */ | |
770ae6cc | 556 | unsigned int min_reg; |
0511851c | 557 | /* One greater than the last register to be handled in this pass. */ |
770ae6cc | 558 | unsigned int max_reg; |
0511851c MM |
559 | sbitmap *nonnull_local; |
560 | sbitmap *nonnull_killed; | |
561 | }; | |
7506f491 | 562 | \f |
c4c81601 RK |
563 | static void compute_can_copy PARAMS ((void)); |
564 | static char *gmalloc PARAMS ((unsigned int)); | |
565 | static char *grealloc PARAMS ((char *, unsigned int)); | |
566 | static char *gcse_alloc PARAMS ((unsigned long)); | |
567 | static void alloc_gcse_mem PARAMS ((rtx)); | |
568 | static void free_gcse_mem PARAMS ((void)); | |
569 | static void alloc_reg_set_mem PARAMS ((int)); | |
570 | static void free_reg_set_mem PARAMS ((void)); | |
571 | static int get_bitmap_width PARAMS ((int, int, int)); | |
572 | static void record_one_set PARAMS ((int, rtx)); | |
573 | static void record_set_info PARAMS ((rtx, rtx, void *)); | |
574 | static void compute_sets PARAMS ((rtx)); | |
575 | static void hash_scan_insn PARAMS ((rtx, int, int)); | |
576 | static void hash_scan_set PARAMS ((rtx, rtx, int)); | |
577 | static void hash_scan_clobber PARAMS ((rtx, rtx)); | |
578 | static void hash_scan_call PARAMS ((rtx, rtx)); | |
579 | static int want_to_gcse_p PARAMS ((rtx)); | |
580 | static int oprs_unchanged_p PARAMS ((rtx, rtx, int)); | |
581 | static int oprs_anticipatable_p PARAMS ((rtx, rtx)); | |
582 | static int oprs_available_p PARAMS ((rtx, rtx)); | |
583 | static void insert_expr_in_table PARAMS ((rtx, enum machine_mode, rtx, | |
584 | int, int)); | |
585 | static void insert_set_in_table PARAMS ((rtx, rtx)); | |
586 | static unsigned int hash_expr PARAMS ((rtx, enum machine_mode, int *, int)); | |
587 | static unsigned int hash_expr_1 PARAMS ((rtx, enum machine_mode, int *)); | |
c0712acb | 588 | static unsigned int hash_string_1 PARAMS ((const char *)); |
c4c81601 RK |
589 | static unsigned int hash_set PARAMS ((int, int)); |
590 | static int expr_equiv_p PARAMS ((rtx, rtx)); | |
591 | static void record_last_reg_set_info PARAMS ((rtx, int)); | |
592 | static void record_last_mem_set_info PARAMS ((rtx)); | |
593 | static void record_last_set_info PARAMS ((rtx, rtx, void *)); | |
711d877c | 594 | static void compute_hash_table PARAMS ((int)); |
c4c81601 RK |
595 | static void alloc_set_hash_table PARAMS ((int)); |
596 | static void free_set_hash_table PARAMS ((void)); | |
597 | static void compute_set_hash_table PARAMS ((void)); | |
2e653e39 | 598 | static void alloc_expr_hash_table PARAMS ((unsigned int)); |
c4c81601 RK |
599 | static void free_expr_hash_table PARAMS ((void)); |
600 | static void compute_expr_hash_table PARAMS ((void)); | |
601 | static void dump_hash_table PARAMS ((FILE *, const char *, struct expr **, | |
602 | int, int)); | |
603 | static struct expr *lookup_expr PARAMS ((rtx)); | |
770ae6cc RK |
604 | static struct expr *lookup_set PARAMS ((unsigned int, rtx)); |
605 | static struct expr *next_set PARAMS ((unsigned int, struct expr *)); | |
c4c81601 RK |
606 | static void reset_opr_set_tables PARAMS ((void)); |
607 | static int oprs_not_set_p PARAMS ((rtx, rtx)); | |
608 | static void mark_call PARAMS ((rtx)); | |
609 | static void mark_set PARAMS ((rtx, rtx)); | |
610 | static void mark_clobber PARAMS ((rtx, rtx)); | |
611 | static void mark_oprs_set PARAMS ((rtx)); | |
612 | static void alloc_cprop_mem PARAMS ((int, int)); | |
613 | static void free_cprop_mem PARAMS ((void)); | |
614 | static void compute_transp PARAMS ((rtx, int, sbitmap *, int)); | |
615 | static void compute_transpout PARAMS ((void)); | |
616 | static void compute_local_properties PARAMS ((sbitmap *, sbitmap *, sbitmap *, | |
617 | int)); | |
711d877c | 618 | static void compute_cprop_data PARAMS ((void)); |
c4c81601 RK |
619 | static void find_used_regs PARAMS ((rtx)); |
620 | static int try_replace_reg PARAMS ((rtx, rtx, rtx)); | |
621 | static struct expr *find_avail_set PARAMS ((int, rtx)); | |
172890a2 | 622 | static int cprop_jump PARAMS ((rtx, rtx, rtx)); |
e2bef702 | 623 | #ifdef HAVE_cc0 |
c4c81601 | 624 | static int cprop_cc0_jump PARAMS ((rtx, struct reg_use *, rtx)); |
e2bef702 | 625 | #endif |
a13d4ebf | 626 | static void mems_conflict_for_gcse_p PARAMS ((rtx, rtx, void *)); |
e2d2ed72 | 627 | static int load_killed_in_block_p PARAMS ((basic_block, int, rtx, int)); |
a13d4ebf | 628 | static void canon_list_insert PARAMS ((rtx, rtx, void *)); |
c4c81601 RK |
629 | static int cprop_insn PARAMS ((rtx, int)); |
630 | static int cprop PARAMS ((int)); | |
631 | static int one_cprop_pass PARAMS ((int, int)); | |
632 | static void alloc_pre_mem PARAMS ((int, int)); | |
633 | static void free_pre_mem PARAMS ((void)); | |
634 | static void compute_pre_data PARAMS ((void)); | |
e2d2ed72 AM |
635 | static int pre_expr_reaches_here_p PARAMS ((basic_block, struct expr *, |
636 | basic_block)); | |
637 | static void insert_insn_end_bb PARAMS ((struct expr *, basic_block, int)); | |
c4c81601 RK |
638 | static void pre_insert_copy_insn PARAMS ((struct expr *, rtx)); |
639 | static void pre_insert_copies PARAMS ((void)); | |
640 | static int pre_delete PARAMS ((void)); | |
641 | static int pre_gcse PARAMS ((void)); | |
642 | static int one_pre_gcse_pass PARAMS ((int)); | |
643 | static void add_label_notes PARAMS ((rtx, rtx)); | |
644 | static void alloc_code_hoist_mem PARAMS ((int, int)); | |
645 | static void free_code_hoist_mem PARAMS ((void)); | |
711d877c | 646 | static void compute_code_hoist_vbeinout PARAMS ((void)); |
c4c81601 | 647 | static void compute_code_hoist_data PARAMS ((void)); |
e2d2ed72 AM |
648 | static int hoist_expr_reaches_here_p PARAMS ((basic_block, int, basic_block, |
649 | char *)); | |
c4c81601 RK |
650 | static void hoist_code PARAMS ((void)); |
651 | static int one_code_hoisting_pass PARAMS ((void)); | |
652 | static void alloc_rd_mem PARAMS ((int, int)); | |
653 | static void free_rd_mem PARAMS ((void)); | |
e2d2ed72 | 654 | static void handle_rd_kill_set PARAMS ((rtx, int, basic_block)); |
c4c81601 | 655 | static void compute_kill_rd PARAMS ((void)); |
711d877c | 656 | static void compute_rd PARAMS ((void)); |
c4c81601 RK |
657 | static void alloc_avail_expr_mem PARAMS ((int, int)); |
658 | static void free_avail_expr_mem PARAMS ((void)); | |
659 | static void compute_ae_gen PARAMS ((void)); | |
e2d2ed72 | 660 | static int expr_killed_p PARAMS ((rtx, basic_block)); |
c4c81601 | 661 | static void compute_ae_kill PARAMS ((sbitmap *, sbitmap *)); |
711d877c | 662 | static int expr_reaches_here_p PARAMS ((struct occr *, struct expr *, |
e2d2ed72 | 663 | basic_block, int)); |
c4c81601 RK |
664 | static rtx computing_insn PARAMS ((struct expr *, rtx)); |
665 | static int def_reaches_here_p PARAMS ((rtx, rtx)); | |
666 | static int can_disregard_other_sets PARAMS ((struct reg_set **, rtx, int)); | |
667 | static int handle_avail_expr PARAMS ((rtx, struct expr *)); | |
668 | static int classic_gcse PARAMS ((void)); | |
669 | static int one_classic_gcse_pass PARAMS ((int)); | |
670 | static void invalidate_nonnull_info PARAMS ((rtx, rtx, void *)); | |
8e184d9c JJ |
671 | static void delete_null_pointer_checks_1 PARAMS ((varray_type *, unsigned int *, |
672 | sbitmap *, sbitmap *, | |
711d877c KG |
673 | struct null_pointer_info *)); |
674 | static rtx process_insert_insn PARAMS ((struct expr *)); | |
675 | static int pre_edge_insert PARAMS ((struct edge_list *, struct expr **)); | |
c4c81601 | 676 | static int expr_reaches_here_p_work PARAMS ((struct occr *, struct expr *, |
e2d2ed72 AM |
677 | basic_block, int, char *)); |
678 | static int pre_expr_reaches_here_p_work PARAMS ((basic_block, struct expr *, | |
679 | basic_block, char *)); | |
a13d4ebf AM |
680 | static struct ls_expr * ldst_entry PARAMS ((rtx)); |
681 | static void free_ldst_entry PARAMS ((struct ls_expr *)); | |
682 | static void free_ldst_mems PARAMS ((void)); | |
683 | static void print_ldst_list PARAMS ((FILE *)); | |
684 | static struct ls_expr * find_rtx_in_ldst PARAMS ((rtx)); | |
685 | static int enumerate_ldsts PARAMS ((void)); | |
686 | static inline struct ls_expr * first_ls_expr PARAMS ((void)); | |
687 | static inline struct ls_expr * next_ls_expr PARAMS ((struct ls_expr *)); | |
688 | static int simple_mem PARAMS ((rtx)); | |
689 | static void invalidate_any_buried_refs PARAMS ((rtx)); | |
690 | static void compute_ld_motion_mems PARAMS ((void)); | |
691 | static void trim_ld_motion_mems PARAMS ((void)); | |
692 | static void update_ld_motion_stores PARAMS ((struct expr *)); | |
693 | static void reg_set_info PARAMS ((rtx, rtx, void *)); | |
e2d2ed72 | 694 | static int store_ops_ok PARAMS ((rtx, basic_block)); |
a13d4ebf AM |
695 | static void find_moveable_store PARAMS ((rtx)); |
696 | static int compute_store_table PARAMS ((void)); | |
697 | static int load_kills_store PARAMS ((rtx, rtx)); | |
698 | static int find_loads PARAMS ((rtx, rtx)); | |
699 | static int store_killed_in_insn PARAMS ((rtx, rtx)); | |
e2d2ed72 AM |
700 | static int store_killed_after PARAMS ((rtx, rtx, basic_block)); |
701 | static int store_killed_before PARAMS ((rtx, rtx, basic_block)); | |
a13d4ebf | 702 | static void build_store_vectors PARAMS ((void)); |
e2d2ed72 | 703 | static void insert_insn_start_bb PARAMS ((rtx, basic_block)); |
a13d4ebf | 704 | static int insert_store PARAMS ((struct ls_expr *, edge)); |
e2d2ed72 AM |
705 | static void replace_store_insn PARAMS ((rtx, rtx, basic_block)); |
706 | static void delete_store PARAMS ((struct ls_expr *, | |
707 | basic_block)); | |
a13d4ebf AM |
708 | static void free_store_memory PARAMS ((void)); |
709 | static void store_motion PARAMS ((void)); | |
7506f491 DE |
710 | \f |
711 | /* Entry point for global common subexpression elimination. | |
712 | F is the first instruction in the function. */ | |
713 | ||
e78d9500 | 714 | int |
7506f491 DE |
715 | gcse_main (f, file) |
716 | rtx f; | |
717 | FILE *file; | |
718 | { | |
719 | int changed, pass; | |
720 | /* Bytes used at start of pass. */ | |
721 | int initial_bytes_used; | |
722 | /* Maximum number of bytes used by a pass. */ | |
723 | int max_pass_bytes; | |
724 | /* Point to release obstack data from for each pass. */ | |
725 | char *gcse_obstack_bottom; | |
726 | ||
a13d4ebf AM |
727 | /* Insertion of instructions on edges can create new basic blocks; we |
728 | need the original basic block count so that we can properly deallocate | |
729 | arrays sized on the number of basic blocks originally in the cfg. */ | |
730 | int orig_bb_count; | |
b5ce41ff JL |
731 | /* We do not construct an accurate cfg in functions which call |
732 | setjmp, so just punt to be safe. */ | |
7506f491 | 733 | if (current_function_calls_setjmp) |
e78d9500 | 734 | return 0; |
7506f491 | 735 | |
b5ce41ff JL |
736 | /* Assume that we do not need to run jump optimizations after gcse. */ |
737 | run_jump_opt_after_gcse = 0; | |
738 | ||
7506f491 DE |
739 | /* For calling dump_foo fns from gdb. */ |
740 | debug_stderr = stderr; | |
b5ce41ff | 741 | gcse_file = file; |
7506f491 | 742 | |
b5ce41ff JL |
743 | /* Identify the basic block information for this function, including |
744 | successors and predecessors. */ | |
7506f491 | 745 | max_gcse_regno = max_reg_num (); |
7506f491 | 746 | |
a42cd965 AM |
747 | if (file) |
748 | dump_flow_info (file); | |
749 | ||
a13d4ebf | 750 | orig_bb_count = n_basic_blocks; |
7506f491 DE |
751 | /* Return if there's nothing to do. */ |
752 | if (n_basic_blocks <= 1) | |
a18820c6 | 753 | return 0; |
7506f491 | 754 | |
55f7891b JL |
755 | /* Trying to perform global optimizations on flow graphs which have |
756 | a high connectivity will take a long time and is unlikely to be | |
757 | particularly useful. | |
758 | ||
43e72072 | 759 | In normal circumstances a cfg should have about twice as many edges |
55f7891b JL |
760 | as blocks. But we do not want to punish small functions which have |
761 | a couple switch statements. So we require a relatively large number | |
762 | of basic blocks and the ratio of edges to blocks to be high. */ | |
763 | if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20) | |
18424ae1 BL |
764 | { |
765 | if (warn_disabled_optimization) | |
766 | warning ("GCSE disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block", | |
767 | n_basic_blocks, n_edges / n_basic_blocks); | |
768 | return 0; | |
769 | } | |
55f7891b | 770 | |
f1fa37ff MM |
771 | /* If allocating memory for the cprop bitmap would take up too much |
772 | storage it's better just to disable the optimization. */ | |
773 | if ((n_basic_blocks | |
774 | * SBITMAP_SET_SIZE (max_gcse_regno) | |
775 | * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY) | |
776 | { | |
777 | if (warn_disabled_optimization) | |
778 | warning ("GCSE disabled: %d basic blocks and %d registers", | |
779 | n_basic_blocks, max_gcse_regno); | |
780 | ||
781 | return 0; | |
782 | } | |
783 | ||
7506f491 DE |
784 | /* See what modes support reg/reg copy operations. */ |
785 | if (! can_copy_init_p) | |
786 | { | |
787 | compute_can_copy (); | |
788 | can_copy_init_p = 1; | |
789 | } | |
790 | ||
791 | gcc_obstack_init (&gcse_obstack); | |
a42cd965 | 792 | bytes_used = 0; |
7506f491 | 793 | |
a13d4ebf AM |
794 | /* We need alias. */ |
795 | init_alias_analysis (); | |
c4c81601 RK |
796 | /* Record where pseudo-registers are set. This data is kept accurate |
797 | during each pass. ??? We could also record hard-reg information here | |
798 | [since it's unchanging], however it is currently done during hash table | |
799 | computation. | |
b5ce41ff | 800 | |
c4c81601 RK |
801 | It may be tempting to compute MEM set information here too, but MEM sets |
802 | will be subject to code motion one day and thus we need to compute | |
b5ce41ff | 803 | information about memory sets when we build the hash tables. */ |
7506f491 DE |
804 | |
805 | alloc_reg_set_mem (max_gcse_regno); | |
806 | compute_sets (f); | |
807 | ||
808 | pass = 0; | |
809 | initial_bytes_used = bytes_used; | |
810 | max_pass_bytes = 0; | |
811 | gcse_obstack_bottom = gcse_alloc (1); | |
812 | changed = 1; | |
813 | while (changed && pass < MAX_PASSES) | |
814 | { | |
815 | changed = 0; | |
816 | if (file) | |
817 | fprintf (file, "GCSE pass %d\n\n", pass + 1); | |
818 | ||
819 | /* Initialize bytes_used to the space for the pred/succ lists, | |
820 | and the reg_set_table data. */ | |
821 | bytes_used = initial_bytes_used; | |
822 | ||
823 | /* Each pass may create new registers, so recalculate each time. */ | |
824 | max_gcse_regno = max_reg_num (); | |
825 | ||
826 | alloc_gcse_mem (f); | |
827 | ||
b5ce41ff JL |
828 | /* Don't allow constant propagation to modify jumps |
829 | during this pass. */ | |
830 | changed = one_cprop_pass (pass + 1, 0); | |
7506f491 DE |
831 | |
832 | if (optimize_size) | |
b5ce41ff | 833 | changed |= one_classic_gcse_pass (pass + 1); |
7506f491 | 834 | else |
a42cd965 AM |
835 | { |
836 | changed |= one_pre_gcse_pass (pass + 1); | |
a13d4ebf AM |
837 | /* We may have just created new basic blocks. Release and |
838 | recompute various things which are sized on the number of | |
839 | basic blocks. */ | |
840 | if (changed) | |
841 | { | |
842 | int i; | |
843 | ||
844 | for (i = 0; i < orig_bb_count; i++) | |
845 | { | |
846 | if (modify_mem_list[i]) | |
847 | free_INSN_LIST_list (modify_mem_list + i); | |
848 | if (canon_modify_mem_list[i]) | |
849 | free_INSN_LIST_list (canon_modify_mem_list + i); | |
850 | } | |
851 | modify_mem_list | |
852 | = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx *)); | |
853 | canon_modify_mem_list | |
854 | = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx *)); | |
855 | memset ((char *) modify_mem_list, 0, n_basic_blocks * sizeof (rtx *)); | |
856 | memset ((char *) canon_modify_mem_list, 0, n_basic_blocks * sizeof (rtx *)); | |
857 | orig_bb_count = n_basic_blocks; | |
858 | } | |
a42cd965 AM |
859 | free_reg_set_mem (); |
860 | alloc_reg_set_mem (max_reg_num ()); | |
861 | compute_sets (f); | |
862 | run_jump_opt_after_gcse = 1; | |
863 | } | |
7506f491 DE |
864 | |
865 | if (max_pass_bytes < bytes_used) | |
866 | max_pass_bytes = bytes_used; | |
867 | ||
bb457bd9 JL |
868 | /* Free up memory, then reallocate for code hoisting. We can |
869 | not re-use the existing allocated memory because the tables | |
870 | will not have info for the insns or registers created by | |
871 | partial redundancy elimination. */ | |
7506f491 DE |
872 | free_gcse_mem (); |
873 | ||
bb457bd9 JL |
874 | /* It does not make sense to run code hoisting unless we optimizing |
875 | for code size -- it rarely makes programs faster, and can make | |
876 | them bigger if we did partial redundancy elimination (when optimizing | |
877 | for space, we use a classic gcse algorithm instead of partial | |
878 | redundancy algorithms). */ | |
879 | if (optimize_size) | |
880 | { | |
881 | max_gcse_regno = max_reg_num (); | |
882 | alloc_gcse_mem (f); | |
883 | changed |= one_code_hoisting_pass (); | |
884 | free_gcse_mem (); | |
885 | ||
886 | if (max_pass_bytes < bytes_used) | |
887 | max_pass_bytes = bytes_used; | |
888 | } | |
889 | ||
7506f491 DE |
890 | if (file) |
891 | { | |
892 | fprintf (file, "\n"); | |
893 | fflush (file); | |
894 | } | |
c4c81601 | 895 | |
7506f491 DE |
896 | obstack_free (&gcse_obstack, gcse_obstack_bottom); |
897 | pass++; | |
898 | } | |
899 | ||
b5ce41ff JL |
900 | /* Do one last pass of copy propagation, including cprop into |
901 | conditional jumps. */ | |
902 | ||
903 | max_gcse_regno = max_reg_num (); | |
904 | alloc_gcse_mem (f); | |
905 | /* This time, go ahead and allow cprop to alter jumps. */ | |
906 | one_cprop_pass (pass + 1, 1); | |
907 | free_gcse_mem (); | |
7506f491 DE |
908 | |
909 | if (file) | |
910 | { | |
911 | fprintf (file, "GCSE of %s: %d basic blocks, ", | |
912 | current_function_name, n_basic_blocks); | |
913 | fprintf (file, "%d pass%s, %d bytes\n\n", | |
914 | pass, pass > 1 ? "es" : "", max_pass_bytes); | |
915 | } | |
916 | ||
6496a589 | 917 | obstack_free (&gcse_obstack, NULL); |
7506f491 | 918 | free_reg_set_mem (); |
a13d4ebf AM |
919 | /* We are finished with alias. */ |
920 | end_alias_analysis (); | |
921 | allocate_reg_info (max_reg_num (), FALSE, FALSE); | |
922 | ||
923 | if (!optimize_size && flag_gcse_sm) | |
924 | store_motion (); | |
925 | /* Record where pseudo-registers are set. */ | |
e78d9500 | 926 | return run_jump_opt_after_gcse; |
7506f491 DE |
927 | } |
928 | \f | |
929 | /* Misc. utilities. */ | |
930 | ||
931 | /* Compute which modes support reg/reg copy operations. */ | |
932 | ||
933 | static void | |
934 | compute_can_copy () | |
935 | { | |
936 | int i; | |
50b2596f | 937 | #ifndef AVOID_CCMODE_COPIES |
7506f491 | 938 | rtx reg,insn; |
50b2596f | 939 | #endif |
961192e1 | 940 | memset (can_copy_p, 0, NUM_MACHINE_MODES); |
7506f491 DE |
941 | |
942 | start_sequence (); | |
943 | for (i = 0; i < NUM_MACHINE_MODES; i++) | |
c4c81601 RK |
944 | if (GET_MODE_CLASS (i) == MODE_CC) |
945 | { | |
7506f491 | 946 | #ifdef AVOID_CCMODE_COPIES |
c4c81601 | 947 | can_copy_p[i] = 0; |
7506f491 | 948 | #else |
c4c81601 RK |
949 | reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1); |
950 | insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg)); | |
9714cf43 | 951 | if (recog (PATTERN (insn), insn, NULL) >= 0) |
c4c81601 | 952 | can_copy_p[i] = 1; |
7506f491 | 953 | #endif |
c4c81601 | 954 | } |
141b5810 AO |
955 | else |
956 | can_copy_p[i] = 1; | |
c4c81601 | 957 | |
7506f491 | 958 | end_sequence (); |
7506f491 DE |
959 | } |
960 | \f | |
961 | /* Cover function to xmalloc to record bytes allocated. */ | |
962 | ||
963 | static char * | |
964 | gmalloc (size) | |
965 | unsigned int size; | |
966 | { | |
967 | bytes_used += size; | |
968 | return xmalloc (size); | |
969 | } | |
970 | ||
971 | /* Cover function to xrealloc. | |
972 | We don't record the additional size since we don't know it. | |
973 | It won't affect memory usage stats much anyway. */ | |
974 | ||
975 | static char * | |
976 | grealloc (ptr, size) | |
977 | char *ptr; | |
978 | unsigned int size; | |
979 | { | |
980 | return xrealloc (ptr, size); | |
981 | } | |
982 | ||
983 | /* Cover function to obstack_alloc. | |
984 | We don't need to record the bytes allocated here since | |
985 | obstack_chunk_alloc is set to gmalloc. */ | |
986 | ||
987 | static char * | |
988 | gcse_alloc (size) | |
989 | unsigned long size; | |
990 | { | |
991 | return (char *) obstack_alloc (&gcse_obstack, size); | |
992 | } | |
993 | ||
994 | /* Allocate memory for the cuid mapping array, | |
995 | and reg/memory set tracking tables. | |
996 | ||
997 | This is called at the start of each pass. */ | |
998 | ||
999 | static void | |
1000 | alloc_gcse_mem (f) | |
1001 | rtx f; | |
1002 | { | |
1003 | int i,n; | |
1004 | rtx insn; | |
1005 | ||
1006 | /* Find the largest UID and create a mapping from UIDs to CUIDs. | |
1007 | CUIDs are like UIDs except they increase monotonically, have no gaps, | |
1008 | and only apply to real insns. */ | |
1009 | ||
1010 | max_uid = get_max_uid (); | |
1011 | n = (max_uid + 1) * sizeof (int); | |
1012 | uid_cuid = (int *) gmalloc (n); | |
961192e1 | 1013 | memset ((char *) uid_cuid, 0, n); |
7506f491 DE |
1014 | for (insn = f, i = 0; insn; insn = NEXT_INSN (insn)) |
1015 | { | |
2c3c49de | 1016 | if (INSN_P (insn)) |
b86db3eb | 1017 | uid_cuid[INSN_UID (insn)] = i++; |
7506f491 | 1018 | else |
b86db3eb | 1019 | uid_cuid[INSN_UID (insn)] = i; |
7506f491 DE |
1020 | } |
1021 | ||
1022 | /* Create a table mapping cuids to insns. */ | |
1023 | ||
1024 | max_cuid = i; | |
1025 | n = (max_cuid + 1) * sizeof (rtx); | |
1026 | cuid_insn = (rtx *) gmalloc (n); | |
961192e1 | 1027 | memset ((char *) cuid_insn, 0, n); |
7506f491 | 1028 | for (insn = f, i = 0; insn; insn = NEXT_INSN (insn)) |
2c3c49de | 1029 | if (INSN_P (insn)) |
c4c81601 | 1030 | CUID_INSN (i++) = insn; |
7506f491 DE |
1031 | |
1032 | /* Allocate vars to track sets of regs. */ | |
7506f491 DE |
1033 | reg_set_bitmap = (sbitmap) sbitmap_alloc (max_gcse_regno); |
1034 | ||
1035 | /* Allocate vars to track sets of regs, memory per block. */ | |
7506f491 DE |
1036 | reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, |
1037 | max_gcse_regno); | |
1038 | mem_set_in_block = (char *) gmalloc (n_basic_blocks); | |
a13d4ebf AM |
1039 | /* Allocate array to keep a list of insns which modify memory in each |
1040 | basic block. */ | |
1041 | modify_mem_list = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx *)); | |
1042 | canon_modify_mem_list = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx *)); | |
1043 | memset ((char *) modify_mem_list, 0, n_basic_blocks * sizeof (rtx *)); | |
1044 | memset ((char *) canon_modify_mem_list, 0, n_basic_blocks * sizeof (rtx *)); | |
7506f491 DE |
1045 | } |
1046 | ||
1047 | /* Free memory allocated by alloc_gcse_mem. */ | |
1048 | ||
1049 | static void | |
1050 | free_gcse_mem () | |
1051 | { | |
1052 | free (uid_cuid); | |
1053 | free (cuid_insn); | |
1054 | ||
1055 | free (reg_set_bitmap); | |
1056 | ||
1057 | free (reg_set_in_block); | |
1058 | free (mem_set_in_block); | |
a13d4ebf AM |
1059 | /* re-Cache any INSN_LIST nodes we have allocated. */ |
1060 | { | |
1061 | int i; | |
1062 | ||
1063 | for (i = 0; i < n_basic_blocks; i++) | |
1064 | { | |
1065 | if (modify_mem_list[i]) | |
1066 | free_INSN_LIST_list (modify_mem_list + i); | |
1067 | if (canon_modify_mem_list[i]) | |
1068 | free_INSN_LIST_list (canon_modify_mem_list + i); | |
1069 | } | |
1070 | ||
1071 | free (modify_mem_list); | |
1072 | free (canon_modify_mem_list); | |
1073 | modify_mem_list = 0; | |
1074 | canon_modify_mem_list = 0; | |
1075 | } | |
7506f491 DE |
1076 | } |
1077 | ||
0511851c MM |
1078 | /* Many of the global optimization algorithms work by solving dataflow |
1079 | equations for various expressions. Initially, some local value is | |
c4c81601 RK |
1080 | computed for each expression in each block. Then, the values across the |
1081 | various blocks are combined (by following flow graph edges) to arrive at | |
1082 | global values. Conceptually, each set of equations is independent. We | |
1083 | may therefore solve all the equations in parallel, solve them one at a | |
1084 | time, or pick any intermediate approach. | |
1085 | ||
1086 | When you're going to need N two-dimensional bitmaps, each X (say, the | |
1087 | number of blocks) by Y (say, the number of expressions), call this | |
1088 | function. It's not important what X and Y represent; only that Y | |
1089 | correspond to the things that can be done in parallel. This function will | |
1090 | return an appropriate chunking factor C; you should solve C sets of | |
1091 | equations in parallel. By going through this function, we can easily | |
1092 | trade space against time; by solving fewer equations in parallel we use | |
1093 | less space. */ | |
0511851c MM |
1094 | |
1095 | static int | |
1096 | get_bitmap_width (n, x, y) | |
1097 | int n; | |
1098 | int x; | |
1099 | int y; | |
1100 | { | |
1101 | /* It's not really worth figuring out *exactly* how much memory will | |
1102 | be used by a particular choice. The important thing is to get | |
1103 | something approximately right. */ | |
1104 | size_t max_bitmap_memory = 10 * 1024 * 1024; | |
1105 | ||
1106 | /* The number of bytes we'd use for a single column of minimum | |
1107 | width. */ | |
1108 | size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE); | |
1109 | ||
1110 | /* Often, it's reasonable just to solve all the equations in | |
1111 | parallel. */ | |
1112 | if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory) | |
1113 | return y; | |
1114 | ||
1115 | /* Otherwise, pick the largest width we can, without going over the | |
1116 | limit. */ | |
1117 | return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1) | |
1118 | / column_size); | |
1119 | } | |
b5ce41ff JL |
1120 | \f |
1121 | /* Compute the local properties of each recorded expression. | |
c4c81601 RK |
1122 | |
1123 | Local properties are those that are defined by the block, irrespective of | |
1124 | other blocks. | |
b5ce41ff JL |
1125 | |
1126 | An expression is transparent in a block if its operands are not modified | |
1127 | in the block. | |
1128 | ||
1129 | An expression is computed (locally available) in a block if it is computed | |
1130 | at least once and expression would contain the same value if the | |
1131 | computation was moved to the end of the block. | |
1132 | ||
1133 | An expression is locally anticipatable in a block if it is computed at | |
1134 | least once and expression would contain the same value if the computation | |
1135 | was moved to the beginning of the block. | |
1136 | ||
c4c81601 RK |
1137 | We call this routine for cprop, pre and code hoisting. They all compute |
1138 | basically the same information and thus can easily share this code. | |
7506f491 | 1139 | |
c4c81601 RK |
1140 | TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local |
1141 | properties. If NULL, then it is not necessary to compute or record that | |
1142 | particular property. | |
b5ce41ff | 1143 | |
c4c81601 RK |
1144 | SETP controls which hash table to look at. If zero, this routine looks at |
1145 | the expr hash table; if nonzero this routine looks at the set hash table. | |
1146 | Additionally, TRANSP is computed as ~TRANSP, since this is really cprop's | |
1147 | ABSALTERED. */ | |
b5ce41ff JL |
1148 | |
1149 | static void | |
1150 | compute_local_properties (transp, comp, antloc, setp) | |
1151 | sbitmap *transp; | |
1152 | sbitmap *comp; | |
1153 | sbitmap *antloc; | |
1154 | int setp; | |
1155 | { | |
2e653e39 | 1156 | unsigned int i, hash_table_size; |
b5ce41ff JL |
1157 | struct expr **hash_table; |
1158 | ||
1159 | /* Initialize any bitmaps that were passed in. */ | |
1160 | if (transp) | |
695ab36a BS |
1161 | { |
1162 | if (setp) | |
1163 | sbitmap_vector_zero (transp, n_basic_blocks); | |
1164 | else | |
1165 | sbitmap_vector_ones (transp, n_basic_blocks); | |
1166 | } | |
c4c81601 | 1167 | |
b5ce41ff JL |
1168 | if (comp) |
1169 | sbitmap_vector_zero (comp, n_basic_blocks); | |
1170 | if (antloc) | |
1171 | sbitmap_vector_zero (antloc, n_basic_blocks); | |
1172 | ||
1173 | /* We use the same code for cprop, pre and hoisting. For cprop | |
1174 | we care about the set hash table, for pre and hoisting we | |
1175 | care about the expr hash table. */ | |
1176 | hash_table_size = setp ? set_hash_table_size : expr_hash_table_size; | |
1177 | hash_table = setp ? set_hash_table : expr_hash_table; | |
1178 | ||
1179 | for (i = 0; i < hash_table_size; i++) | |
7506f491 | 1180 | { |
b5ce41ff JL |
1181 | struct expr *expr; |
1182 | ||
1183 | for (expr = hash_table[i]; expr != NULL; expr = expr->next_same_hash) | |
1184 | { | |
b5ce41ff | 1185 | int indx = expr->bitmap_index; |
c4c81601 | 1186 | struct occr *occr; |
b5ce41ff JL |
1187 | |
1188 | /* The expression is transparent in this block if it is not killed. | |
1189 | We start by assuming all are transparent [none are killed], and | |
1190 | then reset the bits for those that are. */ | |
b5ce41ff JL |
1191 | if (transp) |
1192 | compute_transp (expr->expr, indx, transp, setp); | |
1193 | ||
1194 | /* The occurrences recorded in antic_occr are exactly those that | |
1195 | we want to set to non-zero in ANTLOC. */ | |
b5ce41ff | 1196 | if (antloc) |
c4c81601 RK |
1197 | for (occr = expr->antic_occr; occr != NULL; occr = occr->next) |
1198 | { | |
1199 | SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx); | |
b5ce41ff | 1200 | |
c4c81601 RK |
1201 | /* While we're scanning the table, this is a good place to |
1202 | initialize this. */ | |
1203 | occr->deleted_p = 0; | |
1204 | } | |
b5ce41ff JL |
1205 | |
1206 | /* The occurrences recorded in avail_occr are exactly those that | |
1207 | we want to set to non-zero in COMP. */ | |
1208 | if (comp) | |
c4c81601 RK |
1209 | for (occr = expr->avail_occr; occr != NULL; occr = occr->next) |
1210 | { | |
1211 | SET_BIT (comp[BLOCK_NUM (occr->insn)], indx); | |
b5ce41ff | 1212 | |
c4c81601 RK |
1213 | /* While we're scanning the table, this is a good place to |
1214 | initialize this. */ | |
1215 | occr->copied_p = 0; | |
1216 | } | |
b5ce41ff JL |
1217 | |
1218 | /* While we're scanning the table, this is a good place to | |
1219 | initialize this. */ | |
1220 | expr->reaching_reg = 0; | |
1221 | } | |
7506f491 | 1222 | } |
7506f491 DE |
1223 | } |
1224 | \f | |
1225 | /* Register set information. | |
1226 | ||
1227 | `reg_set_table' records where each register is set or otherwise | |
1228 | modified. */ | |
1229 | ||
1230 | static struct obstack reg_set_obstack; | |
1231 | ||
1232 | static void | |
1233 | alloc_reg_set_mem (n_regs) | |
1234 | int n_regs; | |
1235 | { | |
c4c81601 | 1236 | unsigned int n; |
7506f491 DE |
1237 | |
1238 | reg_set_table_size = n_regs + REG_SET_TABLE_SLOP; | |
1239 | n = reg_set_table_size * sizeof (struct reg_set *); | |
1240 | reg_set_table = (struct reg_set **) gmalloc (n); | |
961192e1 | 1241 | memset ((char *) reg_set_table, 0, n); |
7506f491 DE |
1242 | |
1243 | gcc_obstack_init (®_set_obstack); | |
1244 | } | |
1245 | ||
1246 | static void | |
1247 | free_reg_set_mem () | |
1248 | { | |
1249 | free (reg_set_table); | |
6496a589 | 1250 | obstack_free (®_set_obstack, NULL); |
7506f491 DE |
1251 | } |
1252 | ||
1253 | /* Record REGNO in the reg_set table. */ | |
1254 | ||
1255 | static void | |
1256 | record_one_set (regno, insn) | |
1257 | int regno; | |
1258 | rtx insn; | |
1259 | { | |
172890a2 | 1260 | /* Allocate a new reg_set element and link it onto the list. */ |
63bc1d05 | 1261 | struct reg_set *new_reg_info; |
7506f491 DE |
1262 | |
1263 | /* If the table isn't big enough, enlarge it. */ | |
1264 | if (regno >= reg_set_table_size) | |
1265 | { | |
1266 | int new_size = regno + REG_SET_TABLE_SLOP; | |
c4c81601 RK |
1267 | |
1268 | reg_set_table | |
1269 | = (struct reg_set **) grealloc ((char *) reg_set_table, | |
1270 | new_size * sizeof (struct reg_set *)); | |
961192e1 | 1271 | memset ((char *) (reg_set_table + reg_set_table_size), 0, |
7506f491 DE |
1272 | (new_size - reg_set_table_size) * sizeof (struct reg_set *)); |
1273 | reg_set_table_size = new_size; | |
1274 | } | |
1275 | ||
1276 | new_reg_info = (struct reg_set *) obstack_alloc (®_set_obstack, | |
1277 | sizeof (struct reg_set)); | |
1278 | bytes_used += sizeof (struct reg_set); | |
1279 | new_reg_info->insn = insn; | |
274969ea MM |
1280 | new_reg_info->next = reg_set_table[regno]; |
1281 | reg_set_table[regno] = new_reg_info; | |
7506f491 DE |
1282 | } |
1283 | ||
c4c81601 RK |
1284 | /* Called from compute_sets via note_stores to handle one SET or CLOBBER in |
1285 | an insn. The DATA is really the instruction in which the SET is | |
1286 | occurring. */ | |
7506f491 DE |
1287 | |
1288 | static void | |
84832317 | 1289 | record_set_info (dest, setter, data) |
50b2596f | 1290 | rtx dest, setter ATTRIBUTE_UNUSED; |
84832317 | 1291 | void *data; |
7506f491 | 1292 | { |
84832317 MM |
1293 | rtx record_set_insn = (rtx) data; |
1294 | ||
c4c81601 RK |
1295 | if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER) |
1296 | record_one_set (REGNO (dest), record_set_insn); | |
7506f491 DE |
1297 | } |
1298 | ||
1299 | /* Scan the function and record each set of each pseudo-register. | |
1300 | ||
c4c81601 RK |
1301 | This is called once, at the start of the gcse pass. See the comments for |
1302 | `reg_set_table' for further documenation. */ | |
7506f491 DE |
1303 | |
1304 | static void | |
1305 | compute_sets (f) | |
1306 | rtx f; | |
1307 | { | |
c4c81601 | 1308 | rtx insn; |
7506f491 | 1309 | |
c4c81601 | 1310 | for (insn = f; insn != 0; insn = NEXT_INSN (insn)) |
2c3c49de | 1311 | if (INSN_P (insn)) |
c4c81601 | 1312 | note_stores (PATTERN (insn), record_set_info, insn); |
7506f491 DE |
1313 | } |
1314 | \f | |
1315 | /* Hash table support. */ | |
1316 | ||
1317 | /* For each register, the cuid of the first/last insn in the block to set it, | |
e7d99f1e | 1318 | or -1 if not set. */ |
c4c81601 | 1319 | #define NEVER_SET -1 |
7506f491 DE |
1320 | static int *reg_first_set; |
1321 | static int *reg_last_set; | |
1322 | ||
1323 | /* While computing "first/last set" info, this is the CUID of first/last insn | |
e7d99f1e | 1324 | to set memory or -1 if not set. `mem_last_set' is also used when |
7506f491 DE |
1325 | performing GCSE to record whether memory has been set since the beginning |
1326 | of the block. | |
c4c81601 | 1327 | |
7506f491 DE |
1328 | Note that handling of memory is very simple, we don't make any attempt |
1329 | to optimize things (later). */ | |
1330 | static int mem_first_set; | |
1331 | static int mem_last_set; | |
1332 | ||
fb0c0a12 RK |
1333 | /* See whether X, the source of a set, is something we want to consider for |
1334 | GCSE. */ | |
7506f491 DE |
1335 | |
1336 | static int | |
1337 | want_to_gcse_p (x) | |
1338 | rtx x; | |
1339 | { | |
fb0c0a12 RK |
1340 | static rtx test_insn = 0; |
1341 | int num_clobbers = 0; | |
1342 | int icode; | |
1343 | ||
c4c81601 | 1344 | switch (GET_CODE (x)) |
7506f491 DE |
1345 | { |
1346 | case REG: | |
1347 | case SUBREG: | |
1348 | case CONST_INT: | |
1349 | case CONST_DOUBLE: | |
1350 | case CALL: | |
1351 | return 0; | |
1352 | ||
1353 | default: | |
1354 | break; | |
1355 | } | |
1356 | ||
fb0c0a12 RK |
1357 | /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */ |
1358 | if (general_operand (x, GET_MODE (x))) | |
1359 | return 1; | |
1360 | else if (GET_MODE (x) == VOIDmode) | |
1361 | return 0; | |
1362 | ||
1363 | /* Otherwise, check if we can make a valid insn from it. First initialize | |
1364 | our test insn if we haven't already. */ | |
1365 | if (test_insn == 0) | |
1366 | { | |
1367 | test_insn | |
1368 | = make_insn_raw (gen_rtx_SET (VOIDmode, | |
1369 | gen_rtx_REG (word_mode, | |
1370 | FIRST_PSEUDO_REGISTER * 2), | |
1371 | const0_rtx)); | |
1372 | NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0; | |
1373 | ggc_add_rtx_root (&test_insn, 1); | |
1374 | } | |
1375 | ||
1376 | /* Now make an insn like the one we would make when GCSE'ing and see if | |
1377 | valid. */ | |
1378 | PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x)); | |
1379 | SET_SRC (PATTERN (test_insn)) = x; | |
1380 | return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0 | |
1381 | && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode))); | |
7506f491 DE |
1382 | } |
1383 | ||
1384 | /* Return non-zero if the operands of expression X are unchanged from the | |
1385 | start of INSN's basic block up to but not including INSN (if AVAIL_P == 0), | |
1386 | or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */ | |
1387 | ||
1388 | static int | |
1389 | oprs_unchanged_p (x, insn, avail_p) | |
1390 | rtx x, insn; | |
1391 | int avail_p; | |
1392 | { | |
c4c81601 | 1393 | int i, j; |
7506f491 | 1394 | enum rtx_code code; |
6f7d635c | 1395 | const char *fmt; |
7506f491 | 1396 | |
7506f491 DE |
1397 | if (x == 0) |
1398 | return 1; | |
1399 | ||
1400 | code = GET_CODE (x); | |
1401 | switch (code) | |
1402 | { | |
1403 | case REG: | |
1404 | if (avail_p) | |
b86ba9c8 | 1405 | return (reg_last_set[REGNO (x)] == NEVER_SET |
7506f491 DE |
1406 | || reg_last_set[REGNO (x)] < INSN_CUID (insn)); |
1407 | else | |
b86ba9c8 | 1408 | return (reg_first_set[REGNO (x)] == NEVER_SET |
7506f491 DE |
1409 | || reg_first_set[REGNO (x)] >= INSN_CUID (insn)); |
1410 | ||
1411 | case MEM: | |
e2d2ed72 | 1412 | if (load_killed_in_block_p (BLOCK_FOR_INSN (insn), INSN_CUID (insn), |
a13d4ebf AM |
1413 | x, avail_p)) |
1414 | return 0; | |
c4c81601 RK |
1415 | if (avail_p && mem_last_set != NEVER_SET |
1416 | && mem_last_set >= INSN_CUID (insn)) | |
1417 | return 0; | |
1418 | else if (! avail_p && mem_first_set != NEVER_SET | |
1419 | && mem_first_set < INSN_CUID (insn)) | |
1420 | return 0; | |
7506f491 | 1421 | else |
c4c81601 | 1422 | return oprs_unchanged_p (XEXP (x, 0), insn, avail_p); |
7506f491 DE |
1423 | |
1424 | case PRE_DEC: | |
1425 | case PRE_INC: | |
1426 | case POST_DEC: | |
1427 | case POST_INC: | |
4b983fdc RH |
1428 | case PRE_MODIFY: |
1429 | case POST_MODIFY: | |
7506f491 DE |
1430 | return 0; |
1431 | ||
1432 | case PC: | |
1433 | case CC0: /*FIXME*/ | |
1434 | case CONST: | |
1435 | case CONST_INT: | |
1436 | case CONST_DOUBLE: | |
1437 | case SYMBOL_REF: | |
1438 | case LABEL_REF: | |
1439 | case ADDR_VEC: | |
1440 | case ADDR_DIFF_VEC: | |
1441 | return 1; | |
1442 | ||
1443 | default: | |
1444 | break; | |
1445 | } | |
1446 | ||
c4c81601 | 1447 | for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--) |
7506f491 DE |
1448 | { |
1449 | if (fmt[i] == 'e') | |
1450 | { | |
c4c81601 RK |
1451 | /* If we are about to do the last recursive call needed at this |
1452 | level, change it into iteration. This function is called enough | |
1453 | to be worth it. */ | |
7506f491 | 1454 | if (i == 0) |
c4c81601 RK |
1455 | return oprs_unchanged_p (XEXP (x, i), insn, avail_p); |
1456 | ||
1457 | else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p)) | |
7506f491 DE |
1458 | return 0; |
1459 | } | |
1460 | else if (fmt[i] == 'E') | |
c4c81601 RK |
1461 | for (j = 0; j < XVECLEN (x, i); j++) |
1462 | if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p)) | |
1463 | return 0; | |
7506f491 DE |
1464 | } |
1465 | ||
1466 | return 1; | |
1467 | } | |
1468 | ||
a13d4ebf AM |
1469 | /* Used for communication between mems_conflict_for_gcse_p and |
1470 | load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a | |
1471 | conflict between two memory references. */ | |
1472 | static int gcse_mems_conflict_p; | |
1473 | ||
1474 | /* Used for communication between mems_conflict_for_gcse_p and | |
1475 | load_killed_in_block_p. A memory reference for a load instruction, | |
1476 | mems_conflict_for_gcse_p will see if a memory store conflicts with | |
1477 | this memory load. */ | |
1478 | static rtx gcse_mem_operand; | |
1479 | ||
1480 | /* DEST is the output of an instruction. If it is a memory reference, and | |
1481 | possibly conflicts with the load found in gcse_mem_operand, then set | |
1482 | gcse_mems_conflict_p to a nonzero value. */ | |
1483 | ||
1484 | static void | |
1485 | mems_conflict_for_gcse_p (dest, setter, data) | |
1486 | rtx dest, setter ATTRIBUTE_UNUSED; | |
1487 | void *data ATTRIBUTE_UNUSED; | |
1488 | { | |
1489 | while (GET_CODE (dest) == SUBREG | |
1490 | || GET_CODE (dest) == ZERO_EXTRACT | |
1491 | || GET_CODE (dest) == SIGN_EXTRACT | |
1492 | || GET_CODE (dest) == STRICT_LOW_PART) | |
1493 | dest = XEXP (dest, 0); | |
1494 | ||
1495 | /* If DEST is not a MEM, then it will not conflict with the load. Note | |
1496 | that function calls are assumed to clobber memory, but are handled | |
1497 | elsewhere. */ | |
1498 | if (GET_CODE (dest) != MEM) | |
1499 | return; | |
1500 | ||
1501 | /* If we are setting a MEM in our list of specially recognized MEMs, | |
1502 | don't mark as killed this time. */ | |
1503 | ||
1504 | if (dest == gcse_mem_operand && pre_ldst_mems != NULL) | |
1505 | { | |
1506 | if (!find_rtx_in_ldst (dest)) | |
1507 | gcse_mems_conflict_p = 1; | |
1508 | return; | |
1509 | } | |
1510 | ||
1511 | if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand, | |
1512 | rtx_addr_varies_p)) | |
1513 | gcse_mems_conflict_p = 1; | |
1514 | } | |
1515 | ||
1516 | /* Return nonzero if the expression in X (a memory reference) is killed | |
1517 | in block BB before or after the insn with the CUID in UID_LIMIT. | |
1518 | AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills | |
1519 | before UID_LIMIT. | |
1520 | ||
1521 | To check the entire block, set UID_LIMIT to max_uid + 1 and | |
1522 | AVAIL_P to 0. */ | |
1523 | ||
1524 | static int | |
1525 | load_killed_in_block_p (bb, uid_limit, x, avail_p) | |
e2d2ed72 | 1526 | basic_block bb; |
a13d4ebf AM |
1527 | int uid_limit; |
1528 | rtx x; | |
1529 | int avail_p; | |
1530 | { | |
e2d2ed72 | 1531 | rtx list_entry = modify_mem_list[bb->index]; |
a13d4ebf AM |
1532 | while (list_entry) |
1533 | { | |
1534 | rtx setter; | |
1535 | /* Ignore entries in the list that do not apply. */ | |
1536 | if ((avail_p | |
1537 | && INSN_CUID (XEXP (list_entry, 0)) < uid_limit) | |
1538 | || (! avail_p | |
1539 | && INSN_CUID (XEXP (list_entry, 0)) > uid_limit)) | |
1540 | { | |
1541 | list_entry = XEXP (list_entry, 1); | |
1542 | continue; | |
1543 | } | |
1544 | ||
1545 | setter = XEXP (list_entry, 0); | |
1546 | ||
1547 | /* If SETTER is a call everything is clobbered. Note that calls | |
1548 | to pure functions are never put on the list, so we need not | |
1549 | worry about them. */ | |
1550 | if (GET_CODE (setter) == CALL_INSN) | |
1551 | return 1; | |
1552 | ||
1553 | /* SETTER must be an INSN of some kind that sets memory. Call | |
1554 | note_stores to examine each hunk of memory that is modified. | |
1555 | ||
1556 | The note_stores interface is pretty limited, so we have to | |
1557 | communicate via global variables. Yuk. */ | |
1558 | gcse_mem_operand = x; | |
1559 | gcse_mems_conflict_p = 0; | |
1560 | note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL); | |
1561 | if (gcse_mems_conflict_p) | |
1562 | return 1; | |
1563 | list_entry = XEXP (list_entry, 1); | |
1564 | } | |
1565 | return 0; | |
1566 | } | |
1567 | ||
7506f491 DE |
1568 | /* Return non-zero if the operands of expression X are unchanged from |
1569 | the start of INSN's basic block up to but not including INSN. */ | |
1570 | ||
1571 | static int | |
1572 | oprs_anticipatable_p (x, insn) | |
1573 | rtx x, insn; | |
1574 | { | |
1575 | return oprs_unchanged_p (x, insn, 0); | |
1576 | } | |
1577 | ||
1578 | /* Return non-zero if the operands of expression X are unchanged from | |
1579 | INSN to the end of INSN's basic block. */ | |
1580 | ||
1581 | static int | |
1582 | oprs_available_p (x, insn) | |
1583 | rtx x, insn; | |
1584 | { | |
1585 | return oprs_unchanged_p (x, insn, 1); | |
1586 | } | |
1587 | ||
1588 | /* Hash expression X. | |
c4c81601 RK |
1589 | |
1590 | MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean | |
1591 | indicating if a volatile operand is found or if the expression contains | |
1592 | something we don't want to insert in the table. | |
7506f491 DE |
1593 | |
1594 | ??? One might want to merge this with canon_hash. Later. */ | |
1595 | ||
1596 | static unsigned int | |
1597 | hash_expr (x, mode, do_not_record_p, hash_table_size) | |
1598 | rtx x; | |
1599 | enum machine_mode mode; | |
1600 | int *do_not_record_p; | |
1601 | int hash_table_size; | |
1602 | { | |
1603 | unsigned int hash; | |
1604 | ||
1605 | *do_not_record_p = 0; | |
1606 | ||
1607 | hash = hash_expr_1 (x, mode, do_not_record_p); | |
1608 | return hash % hash_table_size; | |
1609 | } | |
172890a2 | 1610 | |
6462bb43 | 1611 | /* Hash a string. Just add its bytes up. */ |
172890a2 | 1612 | |
6462bb43 AO |
1613 | static inline unsigned |
1614 | hash_string_1 (ps) | |
1615 | const char *ps; | |
1616 | { | |
1617 | unsigned hash = 0; | |
1618 | const unsigned char *p = (const unsigned char *)ps; | |
1619 | ||
1620 | if (p) | |
1621 | while (*p) | |
1622 | hash += *p++; | |
1623 | ||
1624 | return hash; | |
1625 | } | |
7506f491 DE |
1626 | |
1627 | /* Subroutine of hash_expr to do the actual work. */ | |
1628 | ||
1629 | static unsigned int | |
1630 | hash_expr_1 (x, mode, do_not_record_p) | |
1631 | rtx x; | |
1632 | enum machine_mode mode; | |
1633 | int *do_not_record_p; | |
1634 | { | |
1635 | int i, j; | |
1636 | unsigned hash = 0; | |
1637 | enum rtx_code code; | |
6f7d635c | 1638 | const char *fmt; |
7506f491 | 1639 | |
c4c81601 RK |
1640 | /* Used to turn recursion into iteration. We can't rely on GCC's |
1641 | tail-recursion eliminatio since we need to keep accumulating values | |
1642 | in HASH. */ | |
7506f491 DE |
1643 | |
1644 | if (x == 0) | |
1645 | return hash; | |
1646 | ||
c4c81601 | 1647 | repeat: |
7506f491 DE |
1648 | code = GET_CODE (x); |
1649 | switch (code) | |
1650 | { | |
1651 | case REG: | |
c4c81601 RK |
1652 | hash += ((unsigned int) REG << 7) + REGNO (x); |
1653 | return hash; | |
7506f491 DE |
1654 | |
1655 | case CONST_INT: | |
c4c81601 RK |
1656 | hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode |
1657 | + (unsigned int) INTVAL (x)); | |
1658 | return hash; | |
7506f491 DE |
1659 | |
1660 | case CONST_DOUBLE: | |
1661 | /* This is like the general case, except that it only counts | |
1662 | the integers representing the constant. */ | |
c4c81601 | 1663 | hash += (unsigned int) code + (unsigned int) GET_MODE (x); |
7506f491 DE |
1664 | if (GET_MODE (x) != VOIDmode) |
1665 | for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++) | |
c4c81601 | 1666 | hash += (unsigned int) XWINT (x, i); |
7506f491 | 1667 | else |
c4c81601 RK |
1668 | hash += ((unsigned int) CONST_DOUBLE_LOW (x) |
1669 | + (unsigned int) CONST_DOUBLE_HIGH (x)); | |
7506f491 DE |
1670 | return hash; |
1671 | ||
1672 | /* Assume there is only one rtx object for any given label. */ | |
1673 | case LABEL_REF: | |
1674 | /* We don't hash on the address of the CODE_LABEL to avoid bootstrap | |
1675 | differences and differences between each stage's debugging dumps. */ | |
c4c81601 RK |
1676 | hash += (((unsigned int) LABEL_REF << 7) |
1677 | + CODE_LABEL_NUMBER (XEXP (x, 0))); | |
7506f491 DE |
1678 | return hash; |
1679 | ||
1680 | case SYMBOL_REF: | |
1681 | { | |
1682 | /* Don't hash on the symbol's address to avoid bootstrap differences. | |
1683 | Different hash values may cause expressions to be recorded in | |
1684 | different orders and thus different registers to be used in the | |
1685 | final assembler. This also avoids differences in the dump files | |
1686 | between various stages. */ | |
1687 | unsigned int h = 0; | |
3cce094d | 1688 | const unsigned char *p = (const unsigned char *) XSTR (x, 0); |
c4c81601 | 1689 | |
7506f491 DE |
1690 | while (*p) |
1691 | h += (h << 7) + *p++; /* ??? revisit */ | |
c4c81601 RK |
1692 | |
1693 | hash += ((unsigned int) SYMBOL_REF << 7) + h; | |
7506f491 DE |
1694 | return hash; |
1695 | } | |
1696 | ||
1697 | case MEM: | |
1698 | if (MEM_VOLATILE_P (x)) | |
1699 | { | |
1700 | *do_not_record_p = 1; | |
1701 | return 0; | |
1702 | } | |
c4c81601 RK |
1703 | |
1704 | hash += (unsigned int) MEM; | |
297c3335 | 1705 | hash += MEM_ALIAS_SET (x); |
7506f491 DE |
1706 | x = XEXP (x, 0); |
1707 | goto repeat; | |
1708 | ||
1709 | case PRE_DEC: | |
1710 | case PRE_INC: | |
1711 | case POST_DEC: | |
1712 | case POST_INC: | |
1713 | case PC: | |
1714 | case CC0: | |
1715 | case CALL: | |
1716 | case UNSPEC_VOLATILE: | |
1717 | *do_not_record_p = 1; | |
1718 | return 0; | |
1719 | ||
1720 | case ASM_OPERANDS: | |
1721 | if (MEM_VOLATILE_P (x)) | |
1722 | { | |
1723 | *do_not_record_p = 1; | |
1724 | return 0; | |
1725 | } | |
6462bb43 AO |
1726 | else |
1727 | { | |
1728 | /* We don't want to take the filename and line into account. */ | |
1729 | hash += (unsigned) code + (unsigned) GET_MODE (x) | |
1730 | + hash_string_1 (ASM_OPERANDS_TEMPLATE (x)) | |
1731 | + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x)) | |
1732 | + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x); | |
1733 | ||
1734 | if (ASM_OPERANDS_INPUT_LENGTH (x)) | |
1735 | { | |
1736 | for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++) | |
1737 | { | |
1738 | hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i), | |
1739 | GET_MODE (ASM_OPERANDS_INPUT (x, i)), | |
1740 | do_not_record_p) | |
1741 | + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT | |
1742 | (x, i))); | |
1743 | } | |
1744 | ||
1745 | hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0)); | |
1746 | x = ASM_OPERANDS_INPUT (x, 0); | |
1747 | mode = GET_MODE (x); | |
1748 | goto repeat; | |
1749 | } | |
1750 | return hash; | |
1751 | } | |
7506f491 DE |
1752 | |
1753 | default: | |
1754 | break; | |
1755 | } | |
1756 | ||
7506f491 | 1757 | hash += (unsigned) code + (unsigned) GET_MODE (x); |
c4c81601 | 1758 | for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--) |
7506f491 DE |
1759 | { |
1760 | if (fmt[i] == 'e') | |
1761 | { | |
7506f491 DE |
1762 | /* If we are about to do the last recursive call |
1763 | needed at this level, change it into iteration. | |
1764 | This function is called enough to be worth it. */ | |
1765 | if (i == 0) | |
1766 | { | |
c4c81601 | 1767 | x = XEXP (x, i); |
7506f491 DE |
1768 | goto repeat; |
1769 | } | |
c4c81601 RK |
1770 | |
1771 | hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p); | |
7506f491 DE |
1772 | if (*do_not_record_p) |
1773 | return 0; | |
1774 | } | |
c4c81601 | 1775 | |
7506f491 DE |
1776 | else if (fmt[i] == 'E') |
1777 | for (j = 0; j < XVECLEN (x, i); j++) | |
1778 | { | |
1779 | hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p); | |
1780 | if (*do_not_record_p) | |
1781 | return 0; | |
1782 | } | |
c4c81601 | 1783 | |
7506f491 | 1784 | else if (fmt[i] == 's') |
6462bb43 | 1785 | hash += hash_string_1 (XSTR (x, i)); |
7506f491 | 1786 | else if (fmt[i] == 'i') |
c4c81601 | 1787 | hash += (unsigned int) XINT (x, i); |
7506f491 DE |
1788 | else |
1789 | abort (); | |
1790 | } | |
1791 | ||
1792 | return hash; | |
1793 | } | |
1794 | ||
1795 | /* Hash a set of register REGNO. | |
1796 | ||
c4c81601 RK |
1797 | Sets are hashed on the register that is set. This simplifies the PRE copy |
1798 | propagation code. | |
7506f491 DE |
1799 | |
1800 | ??? May need to make things more elaborate. Later, as necessary. */ | |
1801 | ||
1802 | static unsigned int | |
1803 | hash_set (regno, hash_table_size) | |
1804 | int regno; | |
1805 | int hash_table_size; | |
1806 | { | |
1807 | unsigned int hash; | |
1808 | ||
1809 | hash = regno; | |
1810 | return hash % hash_table_size; | |
1811 | } | |
1812 | ||
1813 | /* Return non-zero if exp1 is equivalent to exp2. | |
1814 | ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */ | |
1815 | ||
1816 | static int | |
1817 | expr_equiv_p (x, y) | |
1818 | rtx x, y; | |
1819 | { | |
1820 | register int i, j; | |
1821 | register enum rtx_code code; | |
6f7d635c | 1822 | register const char *fmt; |
7506f491 DE |
1823 | |
1824 | if (x == y) | |
1825 | return 1; | |
c4c81601 | 1826 | |
7506f491 DE |
1827 | if (x == 0 || y == 0) |
1828 | return x == y; | |
1829 | ||
1830 | code = GET_CODE (x); | |
1831 | if (code != GET_CODE (y)) | |
1832 | return 0; | |
1833 | ||
1834 | /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */ | |
1835 | if (GET_MODE (x) != GET_MODE (y)) | |
1836 | return 0; | |
1837 | ||
1838 | switch (code) | |
1839 | { | |
1840 | case PC: | |
1841 | case CC0: | |
1842 | return x == y; | |
1843 | ||
1844 | case CONST_INT: | |
1845 | return INTVAL (x) == INTVAL (y); | |
1846 | ||
1847 | case LABEL_REF: | |
1848 | return XEXP (x, 0) == XEXP (y, 0); | |
1849 | ||
1850 | case SYMBOL_REF: | |
1851 | return XSTR (x, 0) == XSTR (y, 0); | |
1852 | ||
1853 | case REG: | |
1854 | return REGNO (x) == REGNO (y); | |
1855 | ||
297c3335 RH |
1856 | case MEM: |
1857 | /* Can't merge two expressions in different alias sets, since we can | |
1858 | decide that the expression is transparent in a block when it isn't, | |
1859 | due to it being set with the different alias set. */ | |
1860 | if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y)) | |
1861 | return 0; | |
1862 | break; | |
1863 | ||
7506f491 DE |
1864 | /* For commutative operations, check both orders. */ |
1865 | case PLUS: | |
1866 | case MULT: | |
1867 | case AND: | |
1868 | case IOR: | |
1869 | case XOR: | |
1870 | case NE: | |
1871 | case EQ: | |
1872 | return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0)) | |
1873 | && expr_equiv_p (XEXP (x, 1), XEXP (y, 1))) | |
1874 | || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1)) | |
1875 | && expr_equiv_p (XEXP (x, 1), XEXP (y, 0)))); | |
1876 | ||
6462bb43 AO |
1877 | case ASM_OPERANDS: |
1878 | /* We don't use the generic code below because we want to | |
1879 | disregard filename and line numbers. */ | |
1880 | ||
1881 | /* A volatile asm isn't equivalent to any other. */ | |
1882 | if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y)) | |
1883 | return 0; | |
1884 | ||
1885 | if (GET_MODE (x) != GET_MODE (y) | |
1886 | || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y)) | |
1887 | || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x), | |
1888 | ASM_OPERANDS_OUTPUT_CONSTRAINT (y)) | |
1889 | || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y) | |
1890 | || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y)) | |
1891 | return 0; | |
1892 | ||
1893 | if (ASM_OPERANDS_INPUT_LENGTH (x)) | |
1894 | { | |
1895 | for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--) | |
1896 | if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i), | |
1897 | ASM_OPERANDS_INPUT (y, i)) | |
1898 | || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i), | |
1899 | ASM_OPERANDS_INPUT_CONSTRAINT (y, i))) | |
1900 | return 0; | |
1901 | } | |
1902 | ||
1903 | return 1; | |
1904 | ||
7506f491 DE |
1905 | default: |
1906 | break; | |
1907 | } | |
1908 | ||
1909 | /* Compare the elements. If any pair of corresponding elements | |
1910 | fail to match, return 0 for the whole thing. */ | |
1911 | ||
1912 | fmt = GET_RTX_FORMAT (code); | |
1913 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
1914 | { | |
1915 | switch (fmt[i]) | |
1916 | { | |
1917 | case 'e': | |
1918 | if (! expr_equiv_p (XEXP (x, i), XEXP (y, i))) | |
1919 | return 0; | |
1920 | break; | |
1921 | ||
1922 | case 'E': | |
1923 | if (XVECLEN (x, i) != XVECLEN (y, i)) | |
1924 | return 0; | |
1925 | for (j = 0; j < XVECLEN (x, i); j++) | |
1926 | if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j))) | |
1927 | return 0; | |
1928 | break; | |
1929 | ||
1930 | case 's': | |
1931 | if (strcmp (XSTR (x, i), XSTR (y, i))) | |
1932 | return 0; | |
1933 | break; | |
1934 | ||
1935 | case 'i': | |
1936 | if (XINT (x, i) != XINT (y, i)) | |
1937 | return 0; | |
1938 | break; | |
1939 | ||
1940 | case 'w': | |
1941 | if (XWINT (x, i) != XWINT (y, i)) | |
1942 | return 0; | |
1943 | break; | |
1944 | ||
1945 | case '0': | |
1946 | break; | |
1947 | ||
1948 | default: | |
1949 | abort (); | |
1950 | } | |
1951 | } | |
1952 | ||
1953 | return 1; | |
1954 | } | |
1955 | ||
1956 | /* Insert expression X in INSN in the hash table. | |
1957 | If it is already present, record it as the last occurrence in INSN's | |
1958 | basic block. | |
1959 | ||
1960 | MODE is the mode of the value X is being stored into. | |
1961 | It is only used if X is a CONST_INT. | |
1962 | ||
1963 | ANTIC_P is non-zero if X is an anticipatable expression. | |
1964 | AVAIL_P is non-zero if X is an available expression. */ | |
1965 | ||
1966 | static void | |
1967 | insert_expr_in_table (x, mode, insn, antic_p, avail_p) | |
1968 | rtx x; | |
1969 | enum machine_mode mode; | |
1970 | rtx insn; | |
1971 | int antic_p, avail_p; | |
1972 | { | |
1973 | int found, do_not_record_p; | |
1974 | unsigned int hash; | |
1975 | struct expr *cur_expr, *last_expr = NULL; | |
1976 | struct occr *antic_occr, *avail_occr; | |
1977 | struct occr *last_occr = NULL; | |
1978 | ||
1979 | hash = hash_expr (x, mode, &do_not_record_p, expr_hash_table_size); | |
1980 | ||
1981 | /* Do not insert expression in table if it contains volatile operands, | |
1982 | or if hash_expr determines the expression is something we don't want | |
1983 | to or can't handle. */ | |
1984 | if (do_not_record_p) | |
1985 | return; | |
1986 | ||
1987 | cur_expr = expr_hash_table[hash]; | |
1988 | found = 0; | |
1989 | ||
c4c81601 | 1990 | while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x))) |
7506f491 DE |
1991 | { |
1992 | /* If the expression isn't found, save a pointer to the end of | |
1993 | the list. */ | |
1994 | last_expr = cur_expr; | |
1995 | cur_expr = cur_expr->next_same_hash; | |
1996 | } | |
1997 | ||
1998 | if (! found) | |
1999 | { | |
2000 | cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr)); | |
2001 | bytes_used += sizeof (struct expr); | |
2002 | if (expr_hash_table[hash] == NULL) | |
c4c81601 RK |
2003 | /* This is the first pattern that hashed to this index. */ |
2004 | expr_hash_table[hash] = cur_expr; | |
7506f491 | 2005 | else |
c4c81601 RK |
2006 | /* Add EXPR to end of this hash chain. */ |
2007 | last_expr->next_same_hash = cur_expr; | |
2008 | ||
7506f491 DE |
2009 | /* Set the fields of the expr element. */ |
2010 | cur_expr->expr = x; | |
2011 | cur_expr->bitmap_index = n_exprs++; | |
2012 | cur_expr->next_same_hash = NULL; | |
2013 | cur_expr->antic_occr = NULL; | |
2014 | cur_expr->avail_occr = NULL; | |
2015 | } | |
2016 | ||
2017 | /* Now record the occurrence(s). */ | |
7506f491 DE |
2018 | if (antic_p) |
2019 | { | |
2020 | antic_occr = cur_expr->antic_occr; | |
2021 | ||
2022 | /* Search for another occurrence in the same basic block. */ | |
2023 | while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn)) | |
2024 | { | |
2025 | /* If an occurrence isn't found, save a pointer to the end of | |
2026 | the list. */ | |
2027 | last_occr = antic_occr; | |
2028 | antic_occr = antic_occr->next; | |
2029 | } | |
2030 | ||
2031 | if (antic_occr) | |
c4c81601 RK |
2032 | /* Found another instance of the expression in the same basic block. |
2033 | Prefer the currently recorded one. We want the first one in the | |
2034 | block and the block is scanned from start to end. */ | |
2035 | ; /* nothing to do */ | |
7506f491 DE |
2036 | else |
2037 | { | |
2038 | /* First occurrence of this expression in this basic block. */ | |
2039 | antic_occr = (struct occr *) gcse_alloc (sizeof (struct occr)); | |
2040 | bytes_used += sizeof (struct occr); | |
2041 | /* First occurrence of this expression in any block? */ | |
2042 | if (cur_expr->antic_occr == NULL) | |
2043 | cur_expr->antic_occr = antic_occr; | |
2044 | else | |
2045 | last_occr->next = antic_occr; | |
c4c81601 | 2046 | |
7506f491 DE |
2047 | antic_occr->insn = insn; |
2048 | antic_occr->next = NULL; | |
2049 | } | |
2050 | } | |
2051 | ||
2052 | if (avail_p) | |
2053 | { | |
2054 | avail_occr = cur_expr->avail_occr; | |
2055 | ||
2056 | /* Search for another occurrence in the same basic block. */ | |
2057 | while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn)) | |
2058 | { | |
2059 | /* If an occurrence isn't found, save a pointer to the end of | |
2060 | the list. */ | |
2061 | last_occr = avail_occr; | |
2062 | avail_occr = avail_occr->next; | |
2063 | } | |
2064 | ||
2065 | if (avail_occr) | |
c4c81601 RK |
2066 | /* Found another instance of the expression in the same basic block. |
2067 | Prefer this occurrence to the currently recorded one. We want | |
2068 | the last one in the block and the block is scanned from start | |
2069 | to end. */ | |
2070 | avail_occr->insn = insn; | |
7506f491 DE |
2071 | else |
2072 | { | |
2073 | /* First occurrence of this expression in this basic block. */ | |
2074 | avail_occr = (struct occr *) gcse_alloc (sizeof (struct occr)); | |
2075 | bytes_used += sizeof (struct occr); | |
c4c81601 | 2076 | |
7506f491 DE |
2077 | /* First occurrence of this expression in any block? */ |
2078 | if (cur_expr->avail_occr == NULL) | |
2079 | cur_expr->avail_occr = avail_occr; | |
2080 | else | |
2081 | last_occr->next = avail_occr; | |
c4c81601 | 2082 | |
7506f491 DE |
2083 | avail_occr->insn = insn; |
2084 | avail_occr->next = NULL; | |
2085 | } | |
2086 | } | |
2087 | } | |
2088 | ||
2089 | /* Insert pattern X in INSN in the hash table. | |
2090 | X is a SET of a reg to either another reg or a constant. | |
2091 | If it is already present, record it as the last occurrence in INSN's | |
2092 | basic block. */ | |
2093 | ||
2094 | static void | |
2095 | insert_set_in_table (x, insn) | |
2096 | rtx x; | |
2097 | rtx insn; | |
2098 | { | |
2099 | int found; | |
2100 | unsigned int hash; | |
2101 | struct expr *cur_expr, *last_expr = NULL; | |
2102 | struct occr *cur_occr, *last_occr = NULL; | |
2103 | ||
2104 | if (GET_CODE (x) != SET | |
2105 | || GET_CODE (SET_DEST (x)) != REG) | |
2106 | abort (); | |
2107 | ||
2108 | hash = hash_set (REGNO (SET_DEST (x)), set_hash_table_size); | |
2109 | ||
2110 | cur_expr = set_hash_table[hash]; | |
2111 | found = 0; | |
2112 | ||
c4c81601 | 2113 | while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x))) |
7506f491 DE |
2114 | { |
2115 | /* If the expression isn't found, save a pointer to the end of | |
2116 | the list. */ | |
2117 | last_expr = cur_expr; | |
2118 | cur_expr = cur_expr->next_same_hash; | |
2119 | } | |
2120 | ||
2121 | if (! found) | |
2122 | { | |
2123 | cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr)); | |
2124 | bytes_used += sizeof (struct expr); | |
2125 | if (set_hash_table[hash] == NULL) | |
c4c81601 RK |
2126 | /* This is the first pattern that hashed to this index. */ |
2127 | set_hash_table[hash] = cur_expr; | |
7506f491 | 2128 | else |
c4c81601 RK |
2129 | /* Add EXPR to end of this hash chain. */ |
2130 | last_expr->next_same_hash = cur_expr; | |
2131 | ||
7506f491 DE |
2132 | /* Set the fields of the expr element. |
2133 | We must copy X because it can be modified when copy propagation is | |
2134 | performed on its operands. */ | |
7506f491 DE |
2135 | cur_expr->expr = copy_rtx (x); |
2136 | cur_expr->bitmap_index = n_sets++; | |
2137 | cur_expr->next_same_hash = NULL; | |
2138 | cur_expr->antic_occr = NULL; | |
2139 | cur_expr->avail_occr = NULL; | |
2140 | } | |
2141 | ||
2142 | /* Now record the occurrence. */ | |
7506f491 DE |
2143 | cur_occr = cur_expr->avail_occr; |
2144 | ||
2145 | /* Search for another occurrence in the same basic block. */ | |
2146 | while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn)) | |
2147 | { | |
2148 | /* If an occurrence isn't found, save a pointer to the end of | |
2149 | the list. */ | |
2150 | last_occr = cur_occr; | |
2151 | cur_occr = cur_occr->next; | |
2152 | } | |
2153 | ||
2154 | if (cur_occr) | |
c4c81601 RK |
2155 | /* Found another instance of the expression in the same basic block. |
2156 | Prefer this occurrence to the currently recorded one. We want the | |
2157 | last one in the block and the block is scanned from start to end. */ | |
2158 | cur_occr->insn = insn; | |
7506f491 DE |
2159 | else |
2160 | { | |
2161 | /* First occurrence of this expression in this basic block. */ | |
2162 | cur_occr = (struct occr *) gcse_alloc (sizeof (struct occr)); | |
2163 | bytes_used += sizeof (struct occr); | |
c4c81601 | 2164 | |
7506f491 DE |
2165 | /* First occurrence of this expression in any block? */ |
2166 | if (cur_expr->avail_occr == NULL) | |
2167 | cur_expr->avail_occr = cur_occr; | |
2168 | else | |
2169 | last_occr->next = cur_occr; | |
c4c81601 | 2170 | |
7506f491 DE |
2171 | cur_occr->insn = insn; |
2172 | cur_occr->next = NULL; | |
2173 | } | |
2174 | } | |
2175 | ||
c4c81601 RK |
2176 | /* Scan pattern PAT of INSN and add an entry to the hash table. If SET_P is |
2177 | non-zero, this is for the assignment hash table, otherwise it is for the | |
2178 | expression hash table. */ | |
7506f491 DE |
2179 | |
2180 | static void | |
2181 | hash_scan_set (pat, insn, set_p) | |
2182 | rtx pat, insn; | |
2183 | int set_p; | |
2184 | { | |
2185 | rtx src = SET_SRC (pat); | |
2186 | rtx dest = SET_DEST (pat); | |
172890a2 | 2187 | rtx note; |
7506f491 DE |
2188 | |
2189 | if (GET_CODE (src) == CALL) | |
2190 | hash_scan_call (src, insn); | |
2191 | ||
172890a2 | 2192 | else if (GET_CODE (dest) == REG) |
7506f491 | 2193 | { |
172890a2 | 2194 | unsigned int regno = REGNO (dest); |
7506f491 DE |
2195 | rtx tmp; |
2196 | ||
172890a2 RK |
2197 | /* If this is a single set and we are doing constant propagation, |
2198 | see if a REG_NOTE shows this equivalent to a constant. */ | |
2199 | if (set_p && (note = find_reg_equal_equiv_note (insn)) != 0 | |
2200 | && CONSTANT_P (XEXP (note, 0))) | |
2201 | src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src); | |
2202 | ||
7506f491 DE |
2203 | /* Only record sets of pseudo-regs in the hash table. */ |
2204 | if (! set_p | |
2205 | && regno >= FIRST_PSEUDO_REGISTER | |
2206 | /* Don't GCSE something if we can't do a reg/reg copy. */ | |
2207 | && can_copy_p [GET_MODE (dest)] | |
2208 | /* Is SET_SRC something we want to gcse? */ | |
172890a2 RK |
2209 | && want_to_gcse_p (src) |
2210 | /* Don't CSE a nop. */ | |
43e72072 JJ |
2211 | && ! set_noop_p (pat) |
2212 | /* Don't GCSE if it has attached REG_EQUIV note. | |
2213 | At this point this only function parameters should have | |
2214 | REG_EQUIV notes and if the argument slot is used somewhere | |
2215 | explicitely, it means address of parameter has been taken, | |
2216 | so we should not extend the lifetime of the pseudo. */ | |
2217 | && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0 | |
2218 | || GET_CODE (XEXP (note, 0)) != MEM)) | |
7506f491 DE |
2219 | { |
2220 | /* An expression is not anticipatable if its operands are | |
52d76e11 RK |
2221 | modified before this insn or if this is not the only SET in |
2222 | this insn. */ | |
2223 | int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn); | |
7506f491 DE |
2224 | /* An expression is not available if its operands are |
2225 | subsequently modified, including this insn. */ | |
2226 | int avail_p = oprs_available_p (src, insn); | |
c4c81601 | 2227 | |
7506f491 DE |
2228 | insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p); |
2229 | } | |
c4c81601 | 2230 | |
7506f491 DE |
2231 | /* Record sets for constant/copy propagation. */ |
2232 | else if (set_p | |
2233 | && regno >= FIRST_PSEUDO_REGISTER | |
2234 | && ((GET_CODE (src) == REG | |
2235 | && REGNO (src) >= FIRST_PSEUDO_REGISTER | |
172890a2 RK |
2236 | && can_copy_p [GET_MODE (dest)] |
2237 | && REGNO (src) != regno) | |
e78d9500 | 2238 | || GET_CODE (src) == CONST_INT |
05f6f07c | 2239 | || GET_CODE (src) == SYMBOL_REF |
e78d9500 | 2240 | || GET_CODE (src) == CONST_DOUBLE) |
7506f491 DE |
2241 | /* A copy is not available if its src or dest is subsequently |
2242 | modified. Here we want to search from INSN+1 on, but | |
2243 | oprs_available_p searches from INSN on. */ | |
2244 | && (insn == BLOCK_END (BLOCK_NUM (insn)) | |
2245 | || ((tmp = next_nonnote_insn (insn)) != NULL_RTX | |
2246 | && oprs_available_p (pat, tmp)))) | |
2247 | insert_set_in_table (pat, insn); | |
2248 | } | |
7506f491 DE |
2249 | } |
2250 | ||
2251 | static void | |
2252 | hash_scan_clobber (x, insn) | |
50b2596f | 2253 | rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED; |
7506f491 DE |
2254 | { |
2255 | /* Currently nothing to do. */ | |
2256 | } | |
2257 | ||
2258 | static void | |
2259 | hash_scan_call (x, insn) | |
50b2596f | 2260 | rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED; |
7506f491 DE |
2261 | { |
2262 | /* Currently nothing to do. */ | |
2263 | } | |
2264 | ||
2265 | /* Process INSN and add hash table entries as appropriate. | |
2266 | ||
2267 | Only available expressions that set a single pseudo-reg are recorded. | |
2268 | ||
2269 | Single sets in a PARALLEL could be handled, but it's an extra complication | |
2270 | that isn't dealt with right now. The trick is handling the CLOBBERs that | |
2271 | are also in the PARALLEL. Later. | |
2272 | ||
2273 | If SET_P is non-zero, this is for the assignment hash table, | |
ed79bb3d R |
2274 | otherwise it is for the expression hash table. |
2275 | If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should | |
2276 | not record any expressions. */ | |
7506f491 DE |
2277 | |
2278 | static void | |
ed79bb3d | 2279 | hash_scan_insn (insn, set_p, in_libcall_block) |
7506f491 DE |
2280 | rtx insn; |
2281 | int set_p; | |
48e87cef | 2282 | int in_libcall_block; |
7506f491 DE |
2283 | { |
2284 | rtx pat = PATTERN (insn); | |
c4c81601 | 2285 | int i; |
7506f491 | 2286 | |
172890a2 RK |
2287 | if (in_libcall_block) |
2288 | return; | |
2289 | ||
7506f491 DE |
2290 | /* Pick out the sets of INSN and for other forms of instructions record |
2291 | what's been modified. */ | |
2292 | ||
172890a2 RK |
2293 | if (GET_CODE (pat) == SET) |
2294 | hash_scan_set (pat, insn, set_p); | |
7506f491 | 2295 | else if (GET_CODE (pat) == PARALLEL) |
c4c81601 RK |
2296 | for (i = 0; i < XVECLEN (pat, 0); i++) |
2297 | { | |
2298 | rtx x = XVECEXP (pat, 0, i); | |
7506f491 | 2299 | |
c4c81601 | 2300 | if (GET_CODE (x) == SET) |
172890a2 | 2301 | hash_scan_set (x, insn, set_p); |
c4c81601 RK |
2302 | else if (GET_CODE (x) == CLOBBER) |
2303 | hash_scan_clobber (x, insn); | |
2304 | else if (GET_CODE (x) == CALL) | |
2305 | hash_scan_call (x, insn); | |
2306 | } | |
7506f491 | 2307 | |
7506f491 DE |
2308 | else if (GET_CODE (pat) == CLOBBER) |
2309 | hash_scan_clobber (pat, insn); | |
2310 | else if (GET_CODE (pat) == CALL) | |
2311 | hash_scan_call (pat, insn); | |
2312 | } | |
2313 | ||
2314 | static void | |
2315 | dump_hash_table (file, name, table, table_size, total_size) | |
2316 | FILE *file; | |
dff01034 | 2317 | const char *name; |
7506f491 DE |
2318 | struct expr **table; |
2319 | int table_size, total_size; | |
2320 | { | |
2321 | int i; | |
2322 | /* Flattened out table, so it's printed in proper order. */ | |
4da896b2 MM |
2323 | struct expr **flat_table; |
2324 | unsigned int *hash_val; | |
c4c81601 | 2325 | struct expr *expr; |
4da896b2 MM |
2326 | |
2327 | flat_table | |
2328 | = (struct expr **) xcalloc (total_size, sizeof (struct expr *)); | |
2329 | hash_val = (unsigned int *) xmalloc (total_size * sizeof (unsigned int)); | |
7506f491 | 2330 | |
7506f491 | 2331 | for (i = 0; i < table_size; i++) |
c4c81601 RK |
2332 | for (expr = table[i]; expr != NULL; expr = expr->next_same_hash) |
2333 | { | |
2334 | flat_table[expr->bitmap_index] = expr; | |
2335 | hash_val[expr->bitmap_index] = i; | |
2336 | } | |
7506f491 DE |
2337 | |
2338 | fprintf (file, "%s hash table (%d buckets, %d entries)\n", | |
2339 | name, table_size, total_size); | |
2340 | ||
2341 | for (i = 0; i < total_size; i++) | |
21318741 RK |
2342 | if (flat_table[i] != 0) |
2343 | { | |
a0ac9e5a | 2344 | expr = flat_table[i]; |
21318741 RK |
2345 | fprintf (file, "Index %d (hash value %d)\n ", |
2346 | expr->bitmap_index, hash_val[i]); | |
a0ac9e5a | 2347 | print_rtl (file, expr->expr); |
21318741 RK |
2348 | fprintf (file, "\n"); |
2349 | } | |
7506f491 DE |
2350 | |
2351 | fprintf (file, "\n"); | |
4da896b2 | 2352 | |
4da896b2 MM |
2353 | free (flat_table); |
2354 | free (hash_val); | |
7506f491 DE |
2355 | } |
2356 | ||
2357 | /* Record register first/last/block set information for REGNO in INSN. | |
c4c81601 | 2358 | |
7506f491 DE |
2359 | reg_first_set records the first place in the block where the register |
2360 | is set and is used to compute "anticipatability". | |
c4c81601 | 2361 | |
7506f491 DE |
2362 | reg_last_set records the last place in the block where the register |
2363 | is set and is used to compute "availability". | |
c4c81601 | 2364 | |
7506f491 DE |
2365 | reg_set_in_block records whether the register is set in the block |
2366 | and is used to compute "transparency". */ | |
2367 | ||
2368 | static void | |
2369 | record_last_reg_set_info (insn, regno) | |
2370 | rtx insn; | |
2371 | int regno; | |
2372 | { | |
b86ba9c8 | 2373 | if (reg_first_set[regno] == NEVER_SET) |
7506f491 | 2374 | reg_first_set[regno] = INSN_CUID (insn); |
c4c81601 | 2375 | |
7506f491 DE |
2376 | reg_last_set[regno] = INSN_CUID (insn); |
2377 | SET_BIT (reg_set_in_block[BLOCK_NUM (insn)], regno); | |
2378 | } | |
2379 | ||
a13d4ebf AM |
2380 | |
2381 | /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn. | |
2382 | Note we store a pair of elements in the list, so they have to be | |
2383 | taken off pairwise. */ | |
2384 | ||
2385 | static void | |
2386 | canon_list_insert (dest, unused1, v_insn) | |
2387 | rtx dest ATTRIBUTE_UNUSED; | |
2388 | rtx unused1 ATTRIBUTE_UNUSED; | |
2389 | void * v_insn; | |
2390 | { | |
2391 | rtx dest_addr, insn; | |
2392 | ||
2393 | while (GET_CODE (dest) == SUBREG | |
2394 | || GET_CODE (dest) == ZERO_EXTRACT | |
2395 | || GET_CODE (dest) == SIGN_EXTRACT | |
2396 | || GET_CODE (dest) == STRICT_LOW_PART) | |
2397 | dest = XEXP (dest, 0); | |
2398 | ||
2399 | /* If DEST is not a MEM, then it will not conflict with a load. Note | |
2400 | that function calls are assumed to clobber memory, but are handled | |
2401 | elsewhere. */ | |
2402 | ||
2403 | if (GET_CODE (dest) != MEM) | |
2404 | return; | |
2405 | ||
2406 | dest_addr = get_addr (XEXP (dest, 0)); | |
2407 | dest_addr = canon_rtx (dest_addr); | |
2408 | insn = (rtx) v_insn; | |
2409 | ||
2410 | canon_modify_mem_list[BLOCK_NUM (insn)] = | |
2411 | alloc_INSN_LIST (dest_addr, canon_modify_mem_list[BLOCK_NUM (insn)]); | |
2412 | canon_modify_mem_list[BLOCK_NUM (insn)] = | |
2413 | alloc_INSN_LIST (dest, canon_modify_mem_list[BLOCK_NUM (insn)]); | |
2414 | } | |
2415 | ||
7506f491 | 2416 | /* Record memory first/last/block set information for INSN. */ |
a13d4ebf AM |
2417 | /* Record memory modification information for INSN. We do not actually care |
2418 | about the memory location(s) that are set, or even how they are set (consider | |
2419 | a CALL_INSN). We merely need to record which insns modify memory. */ | |
7506f491 DE |
2420 | |
2421 | static void | |
2422 | record_last_mem_set_info (insn) | |
2423 | rtx insn; | |
2424 | { | |
b86ba9c8 | 2425 | if (mem_first_set == NEVER_SET) |
7506f491 | 2426 | mem_first_set = INSN_CUID (insn); |
c4c81601 | 2427 | |
7506f491 DE |
2428 | mem_last_set = INSN_CUID (insn); |
2429 | mem_set_in_block[BLOCK_NUM (insn)] = 1; | |
a13d4ebf AM |
2430 | modify_mem_list[BLOCK_NUM (insn)] = |
2431 | alloc_INSN_LIST (insn, modify_mem_list[BLOCK_NUM (insn)]); | |
2432 | ||
2433 | if (GET_CODE (insn) == CALL_INSN) | |
2434 | { | |
2435 | /* Note that traversals of this loop (other than for free-ing) | |
2436 | will break after encountering a CALL_INSN. So, there's no | |
2437 | need to insert a pair of items, as canon_list_insert does. */ | |
2438 | canon_modify_mem_list[BLOCK_NUM (insn)] = | |
2439 | alloc_INSN_LIST (insn, canon_modify_mem_list[BLOCK_NUM (insn)]); | |
2440 | } | |
2441 | else | |
2442 | note_stores (PATTERN (insn), canon_list_insert, (void*)insn ); | |
7506f491 DE |
2443 | } |
2444 | ||
7506f491 | 2445 | /* Called from compute_hash_table via note_stores to handle one |
84832317 MM |
2446 | SET or CLOBBER in an insn. DATA is really the instruction in which |
2447 | the SET is taking place. */ | |
7506f491 DE |
2448 | |
2449 | static void | |
84832317 | 2450 | record_last_set_info (dest, setter, data) |
50b2596f | 2451 | rtx dest, setter ATTRIBUTE_UNUSED; |
84832317 | 2452 | void *data; |
7506f491 | 2453 | { |
84832317 MM |
2454 | rtx last_set_insn = (rtx) data; |
2455 | ||
7506f491 DE |
2456 | if (GET_CODE (dest) == SUBREG) |
2457 | dest = SUBREG_REG (dest); | |
2458 | ||
2459 | if (GET_CODE (dest) == REG) | |
2460 | record_last_reg_set_info (last_set_insn, REGNO (dest)); | |
2461 | else if (GET_CODE (dest) == MEM | |
2462 | /* Ignore pushes, they clobber nothing. */ | |
2463 | && ! push_operand (dest, GET_MODE (dest))) | |
2464 | record_last_mem_set_info (last_set_insn); | |
2465 | } | |
2466 | ||
2467 | /* Top level function to create an expression or assignment hash table. | |
2468 | ||
2469 | Expression entries are placed in the hash table if | |
2470 | - they are of the form (set (pseudo-reg) src), | |
2471 | - src is something we want to perform GCSE on, | |
2472 | - none of the operands are subsequently modified in the block | |
2473 | ||
2474 | Assignment entries are placed in the hash table if | |
2475 | - they are of the form (set (pseudo-reg) src), | |
2476 | - src is something we want to perform const/copy propagation on, | |
2477 | - none of the operands or target are subsequently modified in the block | |
c4c81601 | 2478 | |
7506f491 DE |
2479 | Currently src must be a pseudo-reg or a const_int. |
2480 | ||
2481 | F is the first insn. | |
2482 | SET_P is non-zero for computing the assignment hash table. */ | |
2483 | ||
2484 | static void | |
b5ce41ff | 2485 | compute_hash_table (set_p) |
7506f491 DE |
2486 | int set_p; |
2487 | { | |
2488 | int bb; | |
2489 | ||
2490 | /* While we compute the hash table we also compute a bit array of which | |
2491 | registers are set in which blocks. | |
2492 | We also compute which blocks set memory, in the absence of aliasing | |
2493 | support [which is TODO]. | |
2494 | ??? This isn't needed during const/copy propagation, but it's cheap to | |
2495 | compute. Later. */ | |
2496 | sbitmap_vector_zero (reg_set_in_block, n_basic_blocks); | |
961192e1 | 2497 | memset ((char *) mem_set_in_block, 0, n_basic_blocks); |
7506f491 | 2498 | |
a13d4ebf AM |
2499 | /* re-Cache any INSN_LIST nodes we have allocated. */ |
2500 | { | |
2501 | int i; | |
2502 | for (i = 0; i < n_basic_blocks; i++) | |
2503 | { | |
2504 | if (modify_mem_list[i]) | |
2505 | free_INSN_LIST_list (modify_mem_list + i); | |
2506 | if (canon_modify_mem_list[i]) | |
2507 | free_INSN_LIST_list (canon_modify_mem_list + i); | |
2508 | } | |
2509 | } | |
7506f491 DE |
2510 | /* Some working arrays used to track first and last set in each block. */ |
2511 | /* ??? One could use alloca here, but at some size a threshold is crossed | |
2512 | beyond which one should use malloc. Are we at that threshold here? */ | |
2513 | reg_first_set = (int *) gmalloc (max_gcse_regno * sizeof (int)); | |
2514 | reg_last_set = (int *) gmalloc (max_gcse_regno * sizeof (int)); | |
2515 | ||
2516 | for (bb = 0; bb < n_basic_blocks; bb++) | |
2517 | { | |
2518 | rtx insn; | |
770ae6cc | 2519 | unsigned int regno; |
ed79bb3d | 2520 | int in_libcall_block; |
770ae6cc | 2521 | unsigned int i; |
7506f491 DE |
2522 | |
2523 | /* First pass over the instructions records information used to | |
2524 | determine when registers and memory are first and last set. | |
2525 | ??? The mem_set_in_block and hard-reg reg_set_in_block computation | |
2526 | could be moved to compute_sets since they currently don't change. */ | |
2527 | ||
b86ba9c8 GK |
2528 | for (i = 0; i < max_gcse_regno; i++) |
2529 | reg_first_set[i] = reg_last_set[i] = NEVER_SET; | |
770ae6cc | 2530 | |
b86ba9c8 GK |
2531 | mem_first_set = NEVER_SET; |
2532 | mem_last_set = NEVER_SET; | |
7506f491 | 2533 | |
3b413743 RH |
2534 | for (insn = BLOCK_HEAD (bb); |
2535 | insn && insn != NEXT_INSN (BLOCK_END (bb)); | |
7506f491 DE |
2536 | insn = NEXT_INSN (insn)) |
2537 | { | |
2538 | #ifdef NON_SAVING_SETJMP | |
2539 | if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE | |
2540 | && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP) | |
2541 | { | |
2542 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
2543 | record_last_reg_set_info (insn, regno); | |
2544 | continue; | |
2545 | } | |
2546 | #endif | |
2547 | ||
2c3c49de | 2548 | if (! INSN_P (insn)) |
7506f491 DE |
2549 | continue; |
2550 | ||
2551 | if (GET_CODE (insn) == CALL_INSN) | |
2552 | { | |
2553 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
15f8470f JL |
2554 | if ((call_used_regs[regno] |
2555 | && regno != STACK_POINTER_REGNUM | |
2556 | #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM | |
2557 | && regno != HARD_FRAME_POINTER_REGNUM | |
2558 | #endif | |
2559 | #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM | |
2560 | && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno]) | |
2561 | #endif | |
848e0190 | 2562 | #if !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED) |
15f8470f JL |
2563 | && ! (regno == PIC_OFFSET_TABLE_REGNUM && flag_pic) |
2564 | #endif | |
2565 | ||
2566 | && regno != FRAME_POINTER_REGNUM) | |
2567 | || global_regs[regno]) | |
7506f491 | 2568 | record_last_reg_set_info (insn, regno); |
c4c81601 | 2569 | |
7506f491 DE |
2570 | if (! CONST_CALL_P (insn)) |
2571 | record_last_mem_set_info (insn); | |
2572 | } | |
2573 | ||
84832317 | 2574 | note_stores (PATTERN (insn), record_last_set_info, insn); |
7506f491 DE |
2575 | } |
2576 | ||
2577 | /* The next pass builds the hash table. */ | |
2578 | ||
3b413743 RH |
2579 | for (insn = BLOCK_HEAD (bb), in_libcall_block = 0; |
2580 | insn && insn != NEXT_INSN (BLOCK_END (bb)); | |
7506f491 | 2581 | insn = NEXT_INSN (insn)) |
2c3c49de | 2582 | if (INSN_P (insn)) |
c4c81601 RK |
2583 | { |
2584 | if (find_reg_note (insn, REG_LIBCALL, NULL_RTX)) | |
2585 | in_libcall_block = 1; | |
2586 | else if (find_reg_note (insn, REG_RETVAL, NULL_RTX)) | |
2587 | in_libcall_block = 0; | |
2588 | hash_scan_insn (insn, set_p, in_libcall_block); | |
7506f491 DE |
2589 | } |
2590 | } | |
2591 | ||
2592 | free (reg_first_set); | |
2593 | free (reg_last_set); | |
c4c81601 | 2594 | |
7506f491 DE |
2595 | /* Catch bugs early. */ |
2596 | reg_first_set = reg_last_set = 0; | |
2597 | } | |
2598 | ||
2599 | /* Allocate space for the set hash table. | |
2600 | N_INSNS is the number of instructions in the function. | |
2601 | It is used to determine the number of buckets to use. */ | |
2602 | ||
2603 | static void | |
2604 | alloc_set_hash_table (n_insns) | |
2605 | int n_insns; | |
2606 | { | |
2607 | int n; | |
2608 | ||
2609 | set_hash_table_size = n_insns / 4; | |
2610 | if (set_hash_table_size < 11) | |
2611 | set_hash_table_size = 11; | |
c4c81601 | 2612 | |
7506f491 DE |
2613 | /* Attempt to maintain efficient use of hash table. |
2614 | Making it an odd number is simplest for now. | |
2615 | ??? Later take some measurements. */ | |
2616 | set_hash_table_size |= 1; | |
2617 | n = set_hash_table_size * sizeof (struct expr *); | |
2618 | set_hash_table = (struct expr **) gmalloc (n); | |
2619 | } | |
2620 | ||
2621 | /* Free things allocated by alloc_set_hash_table. */ | |
2622 | ||
2623 | static void | |
2624 | free_set_hash_table () | |
2625 | { | |
2626 | free (set_hash_table); | |
2627 | } | |
2628 | ||
2629 | /* Compute the hash table for doing copy/const propagation. */ | |
2630 | ||
2631 | static void | |
b5ce41ff | 2632 | compute_set_hash_table () |
7506f491 DE |
2633 | { |
2634 | /* Initialize count of number of entries in hash table. */ | |
2635 | n_sets = 0; | |
961192e1 | 2636 | memset ((char *) set_hash_table, 0, |
c4c81601 | 2637 | set_hash_table_size * sizeof (struct expr *)); |
7506f491 | 2638 | |
b5ce41ff | 2639 | compute_hash_table (1); |
7506f491 DE |
2640 | } |
2641 | ||
2642 | /* Allocate space for the expression hash table. | |
2643 | N_INSNS is the number of instructions in the function. | |
2644 | It is used to determine the number of buckets to use. */ | |
2645 | ||
2646 | static void | |
2647 | alloc_expr_hash_table (n_insns) | |
2e653e39 | 2648 | unsigned int n_insns; |
7506f491 DE |
2649 | { |
2650 | int n; | |
2651 | ||
2652 | expr_hash_table_size = n_insns / 2; | |
2653 | /* Make sure the amount is usable. */ | |
2654 | if (expr_hash_table_size < 11) | |
2655 | expr_hash_table_size = 11; | |
c4c81601 | 2656 | |
7506f491 DE |
2657 | /* Attempt to maintain efficient use of hash table. |
2658 | Making it an odd number is simplest for now. | |
2659 | ??? Later take some measurements. */ | |
2660 | expr_hash_table_size |= 1; | |
2661 | n = expr_hash_table_size * sizeof (struct expr *); | |
2662 | expr_hash_table = (struct expr **) gmalloc (n); | |
2663 | } | |
2664 | ||
2665 | /* Free things allocated by alloc_expr_hash_table. */ | |
2666 | ||
2667 | static void | |
2668 | free_expr_hash_table () | |
2669 | { | |
2670 | free (expr_hash_table); | |
2671 | } | |
2672 | ||
2673 | /* Compute the hash table for doing GCSE. */ | |
2674 | ||
2675 | static void | |
b5ce41ff | 2676 | compute_expr_hash_table () |
7506f491 DE |
2677 | { |
2678 | /* Initialize count of number of entries in hash table. */ | |
2679 | n_exprs = 0; | |
961192e1 | 2680 | memset ((char *) expr_hash_table, 0, |
c4c81601 | 2681 | expr_hash_table_size * sizeof (struct expr *)); |
7506f491 | 2682 | |
b5ce41ff | 2683 | compute_hash_table (0); |
7506f491 DE |
2684 | } |
2685 | \f | |
2686 | /* Expression tracking support. */ | |
2687 | ||
2688 | /* Lookup pattern PAT in the expression table. | |
2689 | The result is a pointer to the table entry, or NULL if not found. */ | |
2690 | ||
2691 | static struct expr * | |
2692 | lookup_expr (pat) | |
2693 | rtx pat; | |
2694 | { | |
2695 | int do_not_record_p; | |
2696 | unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p, | |
2697 | expr_hash_table_size); | |
2698 | struct expr *expr; | |
2699 | ||
2700 | if (do_not_record_p) | |
2701 | return NULL; | |
2702 | ||
2703 | expr = expr_hash_table[hash]; | |
2704 | ||
2705 | while (expr && ! expr_equiv_p (expr->expr, pat)) | |
2706 | expr = expr->next_same_hash; | |
2707 | ||
2708 | return expr; | |
2709 | } | |
2710 | ||
c4c81601 RK |
2711 | /* Lookup REGNO in the set table. If PAT is non-NULL look for the entry that |
2712 | matches it, otherwise return the first entry for REGNO. The result is a | |
2713 | pointer to the table entry, or NULL if not found. */ | |
7506f491 DE |
2714 | |
2715 | static struct expr * | |
2716 | lookup_set (regno, pat) | |
770ae6cc | 2717 | unsigned int regno; |
7506f491 DE |
2718 | rtx pat; |
2719 | { | |
2720 | unsigned int hash = hash_set (regno, set_hash_table_size); | |
2721 | struct expr *expr; | |
2722 | ||
2723 | expr = set_hash_table[hash]; | |
2724 | ||
2725 | if (pat) | |
2726 | { | |
2727 | while (expr && ! expr_equiv_p (expr->expr, pat)) | |
2728 | expr = expr->next_same_hash; | |
2729 | } | |
2730 | else | |
2731 | { | |
2732 | while (expr && REGNO (SET_DEST (expr->expr)) != regno) | |
2733 | expr = expr->next_same_hash; | |
2734 | } | |
2735 | ||
2736 | return expr; | |
2737 | } | |
2738 | ||
2739 | /* Return the next entry for REGNO in list EXPR. */ | |
2740 | ||
2741 | static struct expr * | |
2742 | next_set (regno, expr) | |
770ae6cc | 2743 | unsigned int regno; |
7506f491 DE |
2744 | struct expr *expr; |
2745 | { | |
2746 | do | |
2747 | expr = expr->next_same_hash; | |
2748 | while (expr && REGNO (SET_DEST (expr->expr)) != regno); | |
c4c81601 | 2749 | |
7506f491 DE |
2750 | return expr; |
2751 | } | |
2752 | ||
2753 | /* Reset tables used to keep track of what's still available [since the | |
2754 | start of the block]. */ | |
2755 | ||
2756 | static void | |
2757 | reset_opr_set_tables () | |
2758 | { | |
2759 | /* Maintain a bitmap of which regs have been set since beginning of | |
2760 | the block. */ | |
2761 | sbitmap_zero (reg_set_bitmap); | |
c4c81601 | 2762 | |
7506f491 DE |
2763 | /* Also keep a record of the last instruction to modify memory. |
2764 | For now this is very trivial, we only record whether any memory | |
2765 | location has been modified. */ | |
2766 | mem_last_set = 0; | |
a13d4ebf AM |
2767 | { |
2768 | int i; | |
2769 | ||
2770 | /* re-Cache any INSN_LIST nodes we have allocated. */ | |
2771 | for (i = 0; i < n_basic_blocks; i++) | |
2772 | { | |
2773 | if (modify_mem_list[i]) | |
2774 | free_INSN_LIST_list (modify_mem_list + i); | |
2775 | if (canon_modify_mem_list[i]) | |
2776 | free_INSN_LIST_list (canon_modify_mem_list + i); | |
2777 | } | |
2778 | } | |
7506f491 DE |
2779 | } |
2780 | ||
2781 | /* Return non-zero if the operands of X are not set before INSN in | |
2782 | INSN's basic block. */ | |
2783 | ||
2784 | static int | |
2785 | oprs_not_set_p (x, insn) | |
2786 | rtx x, insn; | |
2787 | { | |
c4c81601 | 2788 | int i, j; |
7506f491 | 2789 | enum rtx_code code; |
6f7d635c | 2790 | const char *fmt; |
7506f491 | 2791 | |
7506f491 DE |
2792 | if (x == 0) |
2793 | return 1; | |
2794 | ||
2795 | code = GET_CODE (x); | |
2796 | switch (code) | |
2797 | { | |
2798 | case PC: | |
2799 | case CC0: | |
2800 | case CONST: | |
2801 | case CONST_INT: | |
2802 | case CONST_DOUBLE: | |
2803 | case SYMBOL_REF: | |
2804 | case LABEL_REF: | |
2805 | case ADDR_VEC: | |
2806 | case ADDR_DIFF_VEC: | |
2807 | return 1; | |
2808 | ||
2809 | case MEM: | |
e2d2ed72 AM |
2810 | if (load_killed_in_block_p (BLOCK_FOR_INSN (insn), |
2811 | INSN_CUID (insn), x, 0)) | |
a13d4ebf | 2812 | return 0; |
7506f491 DE |
2813 | if (mem_last_set != 0) |
2814 | return 0; | |
c4c81601 RK |
2815 | else |
2816 | return oprs_not_set_p (XEXP (x, 0), insn); | |
7506f491 DE |
2817 | |
2818 | case REG: | |
2819 | return ! TEST_BIT (reg_set_bitmap, REGNO (x)); | |
2820 | ||
2821 | default: | |
2822 | break; | |
2823 | } | |
2824 | ||
c4c81601 | 2825 | for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--) |
7506f491 DE |
2826 | { |
2827 | if (fmt[i] == 'e') | |
2828 | { | |
7506f491 DE |
2829 | /* If we are about to do the last recursive call |
2830 | needed at this level, change it into iteration. | |
2831 | This function is called enough to be worth it. */ | |
2832 | if (i == 0) | |
c4c81601 RK |
2833 | return oprs_not_set_p (XEXP (x, i), insn); |
2834 | ||
2835 | if (! oprs_not_set_p (XEXP (x, i), insn)) | |
7506f491 DE |
2836 | return 0; |
2837 | } | |
2838 | else if (fmt[i] == 'E') | |
c4c81601 RK |
2839 | for (j = 0; j < XVECLEN (x, i); j++) |
2840 | if (! oprs_not_set_p (XVECEXP (x, i, j), insn)) | |
2841 | return 0; | |
7506f491 DE |
2842 | } |
2843 | ||
2844 | return 1; | |
2845 | } | |
2846 | ||
2847 | /* Mark things set by a CALL. */ | |
2848 | ||
2849 | static void | |
b5ce41ff JL |
2850 | mark_call (insn) |
2851 | rtx insn; | |
7506f491 DE |
2852 | { |
2853 | mem_last_set = INSN_CUID (insn); | |
a13d4ebf AM |
2854 | if (! CONST_CALL_P (insn)) |
2855 | record_last_mem_set_info (insn); | |
7506f491 DE |
2856 | } |
2857 | ||
2858 | /* Mark things set by a SET. */ | |
2859 | ||
2860 | static void | |
2861 | mark_set (pat, insn) | |
2862 | rtx pat, insn; | |
2863 | { | |
2864 | rtx dest = SET_DEST (pat); | |
2865 | ||
2866 | while (GET_CODE (dest) == SUBREG | |
2867 | || GET_CODE (dest) == ZERO_EXTRACT | |
2868 | || GET_CODE (dest) == SIGN_EXTRACT | |
2869 | || GET_CODE (dest) == STRICT_LOW_PART) | |
2870 | dest = XEXP (dest, 0); | |
2871 | ||
a13d4ebf AM |
2872 | if (GET_CODE (dest) == REG) |
2873 | SET_BIT (reg_set_bitmap, REGNO (dest)); | |
2874 | else if (GET_CODE (dest) == MEM) | |
2875 | record_last_mem_set_info (insn); | |
2876 | ||
7506f491 DE |
2877 | if (GET_CODE (dest) == REG) |
2878 | SET_BIT (reg_set_bitmap, REGNO (dest)); | |
2879 | else if (GET_CODE (dest) == MEM) | |
2880 | mem_last_set = INSN_CUID (insn); | |
2881 | ||
2882 | if (GET_CODE (SET_SRC (pat)) == CALL) | |
b5ce41ff | 2883 | mark_call (insn); |
7506f491 DE |
2884 | } |
2885 | ||
2886 | /* Record things set by a CLOBBER. */ | |
2887 | ||
2888 | static void | |
2889 | mark_clobber (pat, insn) | |
2890 | rtx pat, insn; | |
2891 | { | |
2892 | rtx clob = XEXP (pat, 0); | |
2893 | ||
2894 | while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART) | |
2895 | clob = XEXP (clob, 0); | |
2896 | ||
2897 | if (GET_CODE (clob) == REG) | |
2898 | SET_BIT (reg_set_bitmap, REGNO (clob)); | |
2899 | else | |
2900 | mem_last_set = INSN_CUID (insn); | |
a13d4ebf AM |
2901 | if (GET_CODE (clob) == REG) |
2902 | SET_BIT (reg_set_bitmap, REGNO (clob)); | |
2903 | else | |
2904 | record_last_mem_set_info (insn); | |
7506f491 DE |
2905 | } |
2906 | ||
2907 | /* Record things set by INSN. | |
2908 | This data is used by oprs_not_set_p. */ | |
2909 | ||
2910 | static void | |
2911 | mark_oprs_set (insn) | |
2912 | rtx insn; | |
2913 | { | |
2914 | rtx pat = PATTERN (insn); | |
c4c81601 | 2915 | int i; |
7506f491 DE |
2916 | |
2917 | if (GET_CODE (pat) == SET) | |
2918 | mark_set (pat, insn); | |
2919 | else if (GET_CODE (pat) == PARALLEL) | |
c4c81601 RK |
2920 | for (i = 0; i < XVECLEN (pat, 0); i++) |
2921 | { | |
2922 | rtx x = XVECEXP (pat, 0, i); | |
2923 | ||
2924 | if (GET_CODE (x) == SET) | |
2925 | mark_set (x, insn); | |
2926 | else if (GET_CODE (x) == CLOBBER) | |
2927 | mark_clobber (x, insn); | |
2928 | else if (GET_CODE (x) == CALL) | |
2929 | mark_call (insn); | |
2930 | } | |
7506f491 | 2931 | |
7506f491 DE |
2932 | else if (GET_CODE (pat) == CLOBBER) |
2933 | mark_clobber (pat, insn); | |
2934 | else if (GET_CODE (pat) == CALL) | |
b5ce41ff | 2935 | mark_call (insn); |
7506f491 | 2936 | } |
b5ce41ff | 2937 | |
7506f491 DE |
2938 | \f |
2939 | /* Classic GCSE reaching definition support. */ | |
2940 | ||
2941 | /* Allocate reaching def variables. */ | |
2942 | ||
2943 | static void | |
2944 | alloc_rd_mem (n_blocks, n_insns) | |
2945 | int n_blocks, n_insns; | |
2946 | { | |
2947 | rd_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns); | |
2948 | sbitmap_vector_zero (rd_kill, n_basic_blocks); | |
2949 | ||
2950 | rd_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns); | |
2951 | sbitmap_vector_zero (rd_gen, n_basic_blocks); | |
2952 | ||
2953 | reaching_defs = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns); | |
2954 | sbitmap_vector_zero (reaching_defs, n_basic_blocks); | |
2955 | ||
2956 | rd_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns); | |
2957 | sbitmap_vector_zero (rd_out, n_basic_blocks); | |
2958 | } | |
2959 | ||
2960 | /* Free reaching def variables. */ | |
2961 | ||
2962 | static void | |
2963 | free_rd_mem () | |
2964 | { | |
2965 | free (rd_kill); | |
2966 | free (rd_gen); | |
2967 | free (reaching_defs); | |
2968 | free (rd_out); | |
2969 | } | |
2970 | ||
c4c81601 | 2971 | /* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */ |
7506f491 DE |
2972 | |
2973 | static void | |
2974 | handle_rd_kill_set (insn, regno, bb) | |
2975 | rtx insn; | |
e2d2ed72 AM |
2976 | int regno; |
2977 | basic_block bb; | |
7506f491 | 2978 | { |
c4c81601 | 2979 | struct reg_set *this_reg; |
7506f491 | 2980 | |
c4c81601 RK |
2981 | for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next) |
2982 | if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn)) | |
e2d2ed72 | 2983 | SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn)); |
7506f491 DE |
2984 | } |
2985 | ||
7506f491 DE |
2986 | /* Compute the set of kill's for reaching definitions. */ |
2987 | ||
2988 | static void | |
2989 | compute_kill_rd () | |
2990 | { | |
c4c81601 | 2991 | int bb, cuid; |
172890a2 RK |
2992 | unsigned int regno; |
2993 | int i; | |
7506f491 DE |
2994 | |
2995 | /* For each block | |
2996 | For each set bit in `gen' of the block (i.e each insn which | |
ac7c5af5 JL |
2997 | generates a definition in the block) |
2998 | Call the reg set by the insn corresponding to that bit regx | |
2999 | Look at the linked list starting at reg_set_table[regx] | |
3000 | For each setting of regx in the linked list, which is not in | |
3001 | this block | |
c4c81601 | 3002 | Set the bit in `kill' corresponding to that insn. */ |
7506f491 | 3003 | for (bb = 0; bb < n_basic_blocks; bb++) |
c4c81601 RK |
3004 | for (cuid = 0; cuid < max_cuid; cuid++) |
3005 | if (TEST_BIT (rd_gen[bb], cuid)) | |
7506f491 | 3006 | { |
c4c81601 RK |
3007 | rtx insn = CUID_INSN (cuid); |
3008 | rtx pat = PATTERN (insn); | |
7506f491 | 3009 | |
c4c81601 RK |
3010 | if (GET_CODE (insn) == CALL_INSN) |
3011 | { | |
3012 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
ac7c5af5 | 3013 | { |
c4c81601 RK |
3014 | if ((call_used_regs[regno] |
3015 | && regno != STACK_POINTER_REGNUM | |
15f8470f | 3016 | #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM |
c4c81601 | 3017 | && regno != HARD_FRAME_POINTER_REGNUM |
15f8470f JL |
3018 | #endif |
3019 | #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM | |
c4c81601 RK |
3020 | && ! (regno == ARG_POINTER_REGNUM |
3021 | && fixed_regs[regno]) | |
15f8470f | 3022 | #endif |
848e0190 | 3023 | #if !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED) |
c4c81601 | 3024 | && ! (regno == PIC_OFFSET_TABLE_REGNUM && flag_pic) |
15f8470f | 3025 | #endif |
c4c81601 RK |
3026 | && regno != FRAME_POINTER_REGNUM) |
3027 | || global_regs[regno]) | |
e2d2ed72 | 3028 | handle_rd_kill_set (insn, regno, BASIC_BLOCK (bb)); |
ac7c5af5 | 3029 | } |
c4c81601 | 3030 | } |
7506f491 | 3031 | |
c4c81601 RK |
3032 | if (GET_CODE (pat) == PARALLEL) |
3033 | { | |
3034 | for (i = XVECLEN (pat, 0) - 1; i >= 0; i--) | |
7506f491 | 3035 | { |
c4c81601 | 3036 | enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i)); |
7506f491 | 3037 | |
c4c81601 RK |
3038 | if ((code == SET || code == CLOBBER) |
3039 | && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG) | |
3040 | handle_rd_kill_set (insn, | |
3041 | REGNO (XEXP (XVECEXP (pat, 0, i), 0)), | |
e2d2ed72 | 3042 | BASIC_BLOCK (bb)); |
ac7c5af5 | 3043 | } |
ac7c5af5 | 3044 | } |
c4c81601 RK |
3045 | else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG) |
3046 | /* Each setting of this register outside of this block | |
3047 | must be marked in the set of kills in this block. */ | |
e2d2ed72 | 3048 | handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), BASIC_BLOCK (bb)); |
7506f491 | 3049 | } |
7506f491 DE |
3050 | } |
3051 | ||
3052 | /* Compute the reaching definitions as in | |
3053 | Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman, | |
3054 | Chapter 10. It is the same algorithm as used for computing available | |
3055 | expressions but applied to the gens and kills of reaching definitions. */ | |
3056 | ||
3057 | static void | |
3058 | compute_rd () | |
3059 | { | |
3060 | int bb, changed, passes; | |
3061 | ||
3062 | for (bb = 0; bb < n_basic_blocks; bb++) | |
3063 | sbitmap_copy (rd_out[bb] /*dst*/, rd_gen[bb] /*src*/); | |
3064 | ||
3065 | passes = 0; | |
3066 | changed = 1; | |
3067 | while (changed) | |
3068 | { | |
3069 | changed = 0; | |
3070 | for (bb = 0; bb < n_basic_blocks; bb++) | |
ac7c5af5 | 3071 | { |
36349f8b | 3072 | sbitmap_union_of_preds (reaching_defs[bb], rd_out, bb); |
7506f491 DE |
3073 | changed |= sbitmap_union_of_diff (rd_out[bb], rd_gen[bb], |
3074 | reaching_defs[bb], rd_kill[bb]); | |
ac7c5af5 | 3075 | } |
7506f491 DE |
3076 | passes++; |
3077 | } | |
3078 | ||
3079 | if (gcse_file) | |
3080 | fprintf (gcse_file, "reaching def computation: %d passes\n", passes); | |
3081 | } | |
3082 | \f | |
3083 | /* Classic GCSE available expression support. */ | |
3084 | ||
3085 | /* Allocate memory for available expression computation. */ | |
3086 | ||
3087 | static void | |
3088 | alloc_avail_expr_mem (n_blocks, n_exprs) | |
3089 | int n_blocks, n_exprs; | |
3090 | { | |
3091 | ae_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs); | |
3092 | sbitmap_vector_zero (ae_kill, n_basic_blocks); | |
3093 | ||
3094 | ae_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs); | |
3095 | sbitmap_vector_zero (ae_gen, n_basic_blocks); | |
3096 | ||
3097 | ae_in = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs); | |
3098 | sbitmap_vector_zero (ae_in, n_basic_blocks); | |
3099 | ||
3100 | ae_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs); | |
3101 | sbitmap_vector_zero (ae_out, n_basic_blocks); | |
7506f491 DE |
3102 | } |
3103 | ||
3104 | static void | |
3105 | free_avail_expr_mem () | |
3106 | { | |
3107 | free (ae_kill); | |
3108 | free (ae_gen); | |
3109 | free (ae_in); | |
3110 | free (ae_out); | |
7506f491 DE |
3111 | } |
3112 | ||
3113 | /* Compute the set of available expressions generated in each basic block. */ | |
3114 | ||
3115 | static void | |
3116 | compute_ae_gen () | |
3117 | { | |
2e653e39 | 3118 | unsigned int i; |
c4c81601 RK |
3119 | struct expr *expr; |
3120 | struct occr *occr; | |
7506f491 DE |
3121 | |
3122 | /* For each recorded occurrence of each expression, set ae_gen[bb][expr]. | |
3123 | This is all we have to do because an expression is not recorded if it | |
3124 | is not available, and the only expressions we want to work with are the | |
3125 | ones that are recorded. */ | |
7506f491 | 3126 | for (i = 0; i < expr_hash_table_size; i++) |
c4c81601 RK |
3127 | for (expr = expr_hash_table[i]; expr != 0; expr = expr->next_same_hash) |
3128 | for (occr = expr->avail_occr; occr != 0; occr = occr->next) | |
3129 | SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index); | |
7506f491 DE |
3130 | } |
3131 | ||
3132 | /* Return non-zero if expression X is killed in BB. */ | |
3133 | ||
3134 | static int | |
3135 | expr_killed_p (x, bb) | |
3136 | rtx x; | |
e2d2ed72 | 3137 | basic_block bb; |
7506f491 | 3138 | { |
c4c81601 | 3139 | int i, j; |
7506f491 | 3140 | enum rtx_code code; |
6f7d635c | 3141 | const char *fmt; |
7506f491 | 3142 | |
7506f491 DE |
3143 | if (x == 0) |
3144 | return 1; | |
3145 | ||
3146 | code = GET_CODE (x); | |
3147 | switch (code) | |
3148 | { | |
3149 | case REG: | |
e2d2ed72 | 3150 | return TEST_BIT (reg_set_in_block[bb->index], REGNO (x)); |
7506f491 DE |
3151 | |
3152 | case MEM: | |
a13d4ebf AM |
3153 | if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0)) |
3154 | return 1; | |
e2d2ed72 | 3155 | if (mem_set_in_block[bb->index]) |
7506f491 | 3156 | return 1; |
c4c81601 RK |
3157 | else |
3158 | return expr_killed_p (XEXP (x, 0), bb); | |
7506f491 DE |
3159 | |
3160 | case PC: | |
3161 | case CC0: /*FIXME*/ | |
3162 | case CONST: | |
3163 | case CONST_INT: | |
3164 | case CONST_DOUBLE: | |
3165 | case SYMBOL_REF: | |
3166 | case LABEL_REF: | |
3167 | case ADDR_VEC: | |
3168 | case ADDR_DIFF_VEC: | |
3169 | return 0; | |
3170 | ||
3171 | default: | |
3172 | break; | |
3173 | } | |
3174 | ||
c4c81601 | 3175 | for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--) |
7506f491 DE |
3176 | { |
3177 | if (fmt[i] == 'e') | |
3178 | { | |
7506f491 DE |
3179 | /* If we are about to do the last recursive call |
3180 | needed at this level, change it into iteration. | |
3181 | This function is called enough to be worth it. */ | |
3182 | if (i == 0) | |
c4c81601 RK |
3183 | return expr_killed_p (XEXP (x, i), bb); |
3184 | else if (expr_killed_p (XEXP (x, i), bb)) | |
7506f491 DE |
3185 | return 1; |
3186 | } | |
3187 | else if (fmt[i] == 'E') | |
c4c81601 RK |
3188 | for (j = 0; j < XVECLEN (x, i); j++) |
3189 | if (expr_killed_p (XVECEXP (x, i, j), bb)) | |
3190 | return 1; | |
7506f491 DE |
3191 | } |
3192 | ||
3193 | return 0; | |
3194 | } | |
3195 | ||
3196 | /* Compute the set of available expressions killed in each basic block. */ | |
3197 | ||
3198 | static void | |
a42cd965 AM |
3199 | compute_ae_kill (ae_gen, ae_kill) |
3200 | sbitmap *ae_gen, *ae_kill; | |
7506f491 | 3201 | { |
2e653e39 RK |
3202 | int bb; |
3203 | unsigned int i; | |
c4c81601 | 3204 | struct expr *expr; |
7506f491 DE |
3205 | |
3206 | for (bb = 0; bb < n_basic_blocks; bb++) | |
c4c81601 RK |
3207 | for (i = 0; i < expr_hash_table_size; i++) |
3208 | for (expr = expr_hash_table[i]; expr; expr = expr->next_same_hash) | |
7506f491 | 3209 | { |
c4c81601 RK |
3210 | /* Skip EXPR if generated in this block. */ |
3211 | if (TEST_BIT (ae_gen[bb], expr->bitmap_index)) | |
3212 | continue; | |
7506f491 | 3213 | |
e2d2ed72 | 3214 | if (expr_killed_p (expr->expr, BASIC_BLOCK (bb))) |
c4c81601 | 3215 | SET_BIT (ae_kill[bb], expr->bitmap_index); |
7506f491 | 3216 | } |
7506f491 | 3217 | } |
7506f491 DE |
3218 | \f |
3219 | /* Actually perform the Classic GCSE optimizations. */ | |
3220 | ||
3221 | /* Return non-zero if occurrence OCCR of expression EXPR reaches block BB. | |
3222 | ||
3223 | CHECK_SELF_LOOP is non-zero if we should consider a block reaching itself | |
3224 | as a positive reach. We want to do this when there are two computations | |
3225 | of the expression in the block. | |
3226 | ||
3227 | VISITED is a pointer to a working buffer for tracking which BB's have | |
3228 | been visited. It is NULL for the top-level call. | |
3229 | ||
3230 | We treat reaching expressions that go through blocks containing the same | |
3231 | reaching expression as "not reaching". E.g. if EXPR is generated in blocks | |
3232 | 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block | |
3233 | 2 as not reaching. The intent is to improve the probability of finding | |
3234 | only one reaching expression and to reduce register lifetimes by picking | |
3235 | the closest such expression. */ | |
3236 | ||
3237 | static int | |
283a2545 | 3238 | expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited) |
7506f491 DE |
3239 | struct occr *occr; |
3240 | struct expr *expr; | |
e2d2ed72 | 3241 | basic_block bb; |
7506f491 DE |
3242 | int check_self_loop; |
3243 | char *visited; | |
3244 | { | |
36349f8b | 3245 | edge pred; |
7506f491 | 3246 | |
e2d2ed72 | 3247 | for (pred = bb->pred; pred != NULL; pred = pred->pred_next) |
7506f491 | 3248 | { |
e2d2ed72 | 3249 | basic_block pred_bb = pred->src; |
7506f491 | 3250 | |
e2d2ed72 | 3251 | if (visited[pred_bb->index]) |
c4c81601 | 3252 | /* This predecessor has already been visited. Nothing to do. */ |
7506f491 | 3253 | ; |
7506f491 | 3254 | else if (pred_bb == bb) |
ac7c5af5 | 3255 | { |
7506f491 DE |
3256 | /* BB loops on itself. */ |
3257 | if (check_self_loop | |
e2d2ed72 AM |
3258 | && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index) |
3259 | && BLOCK_NUM (occr->insn) == pred_bb->index) | |
7506f491 | 3260 | return 1; |
c4c81601 | 3261 | |
e2d2ed72 | 3262 | visited[pred_bb->index] = 1; |
ac7c5af5 | 3263 | } |
c4c81601 | 3264 | |
7506f491 | 3265 | /* Ignore this predecessor if it kills the expression. */ |
e2d2ed72 AM |
3266 | else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index)) |
3267 | visited[pred_bb->index] = 1; | |
c4c81601 | 3268 | |
7506f491 | 3269 | /* Does this predecessor generate this expression? */ |
e2d2ed72 | 3270 | else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)) |
7506f491 DE |
3271 | { |
3272 | /* Is this the occurrence we're looking for? | |
3273 | Note that there's only one generating occurrence per block | |
3274 | so we just need to check the block number. */ | |
e2d2ed72 | 3275 | if (BLOCK_NUM (occr->insn) == pred_bb->index) |
7506f491 | 3276 | return 1; |
c4c81601 | 3277 | |
e2d2ed72 | 3278 | visited[pred_bb->index] = 1; |
7506f491 | 3279 | } |
c4c81601 | 3280 | |
7506f491 DE |
3281 | /* Neither gen nor kill. */ |
3282 | else | |
ac7c5af5 | 3283 | { |
e2d2ed72 | 3284 | visited[pred_bb->index] = 1; |
283a2545 RL |
3285 | if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop, |
3286 | visited)) | |
c4c81601 | 3287 | |
7506f491 | 3288 | return 1; |
ac7c5af5 | 3289 | } |
7506f491 DE |
3290 | } |
3291 | ||
3292 | /* All paths have been checked. */ | |
3293 | return 0; | |
3294 | } | |
3295 | ||
283a2545 RL |
3296 | /* This wrapper for expr_reaches_here_p_work() is to ensure that any |
3297 | memory allocated for that function is returned. */ | |
3298 | ||
3299 | static int | |
3300 | expr_reaches_here_p (occr, expr, bb, check_self_loop) | |
3301 | struct occr *occr; | |
3302 | struct expr *expr; | |
e2d2ed72 | 3303 | basic_block bb; |
283a2545 RL |
3304 | int check_self_loop; |
3305 | { | |
3306 | int rval; | |
c4c81601 | 3307 | char *visited = (char *) xcalloc (n_basic_blocks, 1); |
283a2545 | 3308 | |
c4c81601 | 3309 | rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited); |
283a2545 RL |
3310 | |
3311 | free (visited); | |
c4c81601 | 3312 | return rval; |
283a2545 RL |
3313 | } |
3314 | ||
7506f491 DE |
3315 | /* Return the instruction that computes EXPR that reaches INSN's basic block. |
3316 | If there is more than one such instruction, return NULL. | |
3317 | ||
3318 | Called only by handle_avail_expr. */ | |
3319 | ||
3320 | static rtx | |
3321 | computing_insn (expr, insn) | |
3322 | struct expr *expr; | |
3323 | rtx insn; | |
3324 | { | |
e2d2ed72 | 3325 | basic_block bb = BLOCK_FOR_INSN (insn); |
7506f491 DE |
3326 | |
3327 | if (expr->avail_occr->next == NULL) | |
3328 | { | |
e2d2ed72 | 3329 | if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb) |
c4c81601 RK |
3330 | /* The available expression is actually itself |
3331 | (i.e. a loop in the flow graph) so do nothing. */ | |
3332 | return NULL; | |
3333 | ||
7506f491 DE |
3334 | /* (FIXME) Case that we found a pattern that was created by |
3335 | a substitution that took place. */ | |
3336 | return expr->avail_occr->insn; | |
3337 | } | |
3338 | else | |
3339 | { | |
3340 | /* Pattern is computed more than once. | |
3341 | Search backwards from this insn to see how many of these | |
3342 | computations actually reach this insn. */ | |
3343 | struct occr *occr; | |
3344 | rtx insn_computes_expr = NULL; | |
3345 | int can_reach = 0; | |
3346 | ||
3347 | for (occr = expr->avail_occr; occr != NULL; occr = occr->next) | |
3348 | { | |
e2d2ed72 | 3349 | if (BLOCK_FOR_INSN (occr->insn) == bb) |
7506f491 DE |
3350 | { |
3351 | /* The expression is generated in this block. | |
3352 | The only time we care about this is when the expression | |
3353 | is generated later in the block [and thus there's a loop]. | |
3354 | We let the normal cse pass handle the other cases. */ | |
c4c81601 RK |
3355 | if (INSN_CUID (insn) < INSN_CUID (occr->insn) |
3356 | && expr_reaches_here_p (occr, expr, bb, 1)) | |
7506f491 DE |
3357 | { |
3358 | can_reach++; | |
3359 | if (can_reach > 1) | |
3360 | return NULL; | |
c4c81601 | 3361 | |
7506f491 DE |
3362 | insn_computes_expr = occr->insn; |
3363 | } | |
3364 | } | |
c4c81601 RK |
3365 | else if (expr_reaches_here_p (occr, expr, bb, 0)) |
3366 | { | |
3367 | can_reach++; | |
3368 | if (can_reach > 1) | |
3369 | return NULL; | |
3370 | ||
3371 | insn_computes_expr = occr->insn; | |
3372 | } | |
7506f491 DE |
3373 | } |
3374 | ||
3375 | if (insn_computes_expr == NULL) | |
3376 | abort (); | |
c4c81601 | 3377 | |
7506f491 DE |
3378 | return insn_computes_expr; |
3379 | } | |
3380 | } | |
3381 | ||
3382 | /* Return non-zero if the definition in DEF_INSN can reach INSN. | |
3383 | Only called by can_disregard_other_sets. */ | |
3384 | ||
3385 | static int | |
3386 | def_reaches_here_p (insn, def_insn) | |
3387 | rtx insn, def_insn; | |
3388 | { | |
3389 | rtx reg; | |
3390 | ||
3391 | if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn))) | |
3392 | return 1; | |
3393 | ||
3394 | if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn)) | |
3395 | { | |
3396 | if (INSN_CUID (def_insn) < INSN_CUID (insn)) | |
ac7c5af5 | 3397 | { |
7506f491 DE |
3398 | if (GET_CODE (PATTERN (def_insn)) == PARALLEL) |
3399 | return 1; | |
c4c81601 | 3400 | else if (GET_CODE (PATTERN (def_insn)) == CLOBBER) |
7506f491 DE |
3401 | reg = XEXP (PATTERN (def_insn), 0); |
3402 | else if (GET_CODE (PATTERN (def_insn)) == SET) | |
3403 | reg = SET_DEST (PATTERN (def_insn)); | |
3404 | else | |
3405 | abort (); | |
c4c81601 | 3406 | |
7506f491 DE |
3407 | return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn); |
3408 | } | |
3409 | else | |
3410 | return 0; | |
3411 | } | |
3412 | ||
3413 | return 0; | |
3414 | } | |
3415 | ||
c4c81601 RK |
3416 | /* Return non-zero if *ADDR_THIS_REG can only have one value at INSN. The |
3417 | value returned is the number of definitions that reach INSN. Returning a | |
3418 | value of zero means that [maybe] more than one definition reaches INSN and | |
3419 | the caller can't perform whatever optimization it is trying. i.e. it is | |
3420 | always safe to return zero. */ | |
7506f491 DE |
3421 | |
3422 | static int | |
3423 | can_disregard_other_sets (addr_this_reg, insn, for_combine) | |
3424 | struct reg_set **addr_this_reg; | |
3425 | rtx insn; | |
3426 | int for_combine; | |
3427 | { | |
3428 | int number_of_reaching_defs = 0; | |
c4c81601 | 3429 | struct reg_set *this_reg; |
7506f491 | 3430 | |
c4c81601 RK |
3431 | for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next) |
3432 | if (def_reaches_here_p (insn, this_reg->insn)) | |
3433 | { | |
3434 | number_of_reaching_defs++; | |
3435 | /* Ignore parallels for now. */ | |
3436 | if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL) | |
3437 | return 0; | |
3438 | ||
3439 | if (!for_combine | |
3440 | && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER | |
3441 | || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)), | |
3442 | SET_SRC (PATTERN (insn))))) | |
3443 | /* A setting of the reg to a different value reaches INSN. */ | |
3444 | return 0; | |
3445 | ||
3446 | if (number_of_reaching_defs > 1) | |
3447 | { | |
3448 | /* If in this setting the value the register is being set to is | |
3449 | equal to the previous value the register was set to and this | |
3450 | setting reaches the insn we are trying to do the substitution | |
3451 | on then we are ok. */ | |
3452 | if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER) | |
7506f491 | 3453 | return 0; |
c4c81601 RK |
3454 | else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)), |
3455 | SET_SRC (PATTERN (insn)))) | |
3456 | return 0; | |
3457 | } | |
7506f491 | 3458 | |
c4c81601 RK |
3459 | *addr_this_reg = this_reg; |
3460 | } | |
7506f491 DE |
3461 | |
3462 | return number_of_reaching_defs; | |
3463 | } | |
3464 | ||
3465 | /* Expression computed by insn is available and the substitution is legal, | |
3466 | so try to perform the substitution. | |
3467 | ||
3468 | The result is non-zero if any changes were made. */ | |
3469 | ||
3470 | static int | |
3471 | handle_avail_expr (insn, expr) | |
3472 | rtx insn; | |
3473 | struct expr *expr; | |
3474 | { | |
3475 | rtx pat, insn_computes_expr; | |
3476 | rtx to; | |
3477 | struct reg_set *this_reg; | |
3478 | int found_setting, use_src; | |
3479 | int changed = 0; | |
3480 | ||
3481 | /* We only handle the case where one computation of the expression | |
3482 | reaches this instruction. */ | |
3483 | insn_computes_expr = computing_insn (expr, insn); | |
3484 | if (insn_computes_expr == NULL) | |
3485 | return 0; | |
3486 | ||
3487 | found_setting = 0; | |
3488 | use_src = 0; | |
3489 | ||
3490 | /* At this point we know only one computation of EXPR outside of this | |
3491 | block reaches this insn. Now try to find a register that the | |
3492 | expression is computed into. */ | |
7506f491 DE |
3493 | if (GET_CODE (SET_SRC (PATTERN (insn_computes_expr))) == REG) |
3494 | { | |
3495 | /* This is the case when the available expression that reaches | |
3496 | here has already been handled as an available expression. */ | |
770ae6cc | 3497 | unsigned int regnum_for_replacing |
c4c81601 RK |
3498 | = REGNO (SET_SRC (PATTERN (insn_computes_expr))); |
3499 | ||
7506f491 DE |
3500 | /* If the register was created by GCSE we can't use `reg_set_table', |
3501 | however we know it's set only once. */ | |
3502 | if (regnum_for_replacing >= max_gcse_regno | |
3503 | /* If the register the expression is computed into is set only once, | |
3504 | or only one set reaches this insn, we can use it. */ | |
3505 | || (((this_reg = reg_set_table[regnum_for_replacing]), | |
3506 | this_reg->next == NULL) | |
3507 | || can_disregard_other_sets (&this_reg, insn, 0))) | |
3508 | { | |
3509 | use_src = 1; | |
3510 | found_setting = 1; | |
3511 | } | |
3512 | } | |
3513 | ||
3514 | if (!found_setting) | |
3515 | { | |
770ae6cc | 3516 | unsigned int regnum_for_replacing |
c4c81601 RK |
3517 | = REGNO (SET_DEST (PATTERN (insn_computes_expr))); |
3518 | ||
7506f491 DE |
3519 | /* This shouldn't happen. */ |
3520 | if (regnum_for_replacing >= max_gcse_regno) | |
3521 | abort (); | |
c4c81601 | 3522 | |
7506f491 | 3523 | this_reg = reg_set_table[regnum_for_replacing]; |
c4c81601 | 3524 | |
7506f491 DE |
3525 | /* If the register the expression is computed into is set only once, |
3526 | or only one set reaches this insn, use it. */ | |
3527 | if (this_reg->next == NULL | |
3528 | || can_disregard_other_sets (&this_reg, insn, 0)) | |
3529 | found_setting = 1; | |
3530 | } | |
3531 | ||
3532 | if (found_setting) | |
3533 | { | |
3534 | pat = PATTERN (insn); | |
3535 | if (use_src) | |
3536 | to = SET_SRC (PATTERN (insn_computes_expr)); | |
3537 | else | |
3538 | to = SET_DEST (PATTERN (insn_computes_expr)); | |
3539 | changed = validate_change (insn, &SET_SRC (pat), to, 0); | |
3540 | ||
3541 | /* We should be able to ignore the return code from validate_change but | |
3542 | to play it safe we check. */ | |
3543 | if (changed) | |
3544 | { | |
3545 | gcse_subst_count++; | |
3546 | if (gcse_file != NULL) | |
3547 | { | |
c4c81601 RK |
3548 | fprintf (gcse_file, "GCSE: Replacing the source in insn %d with", |
3549 | INSN_UID (insn)); | |
3550 | fprintf (gcse_file, " reg %d %s insn %d\n", | |
3551 | REGNO (to), use_src ? "from" : "set in", | |
7506f491 DE |
3552 | INSN_UID (insn_computes_expr)); |
3553 | } | |
7506f491 DE |
3554 | } |
3555 | } | |
c4c81601 | 3556 | |
7506f491 DE |
3557 | /* The register that the expr is computed into is set more than once. */ |
3558 | else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/) | |
3559 | { | |
3560 | /* Insert an insn after insnx that copies the reg set in insnx | |
3561 | into a new pseudo register call this new register REGN. | |
3562 | From insnb until end of basic block or until REGB is set | |
3563 | replace all uses of REGB with REGN. */ | |
3564 | rtx new_insn; | |
3565 | ||
3566 | to = gen_reg_rtx (GET_MODE (SET_DEST (PATTERN (insn_computes_expr)))); | |
3567 | ||
3568 | /* Generate the new insn. */ | |
3569 | /* ??? If the change fails, we return 0, even though we created | |
3570 | an insn. I think this is ok. */ | |
9e6a5703 JC |
3571 | new_insn |
3572 | = emit_insn_after (gen_rtx_SET (VOIDmode, to, | |
c4c81601 RK |
3573 | SET_DEST (PATTERN |
3574 | (insn_computes_expr))), | |
3575 | insn_computes_expr); | |
3576 | ||
7506f491 | 3577 | /* Keep block number table up to date. */ |
ccbaf064 | 3578 | set_block_for_new_insns (new_insn, BLOCK_FOR_INSN (insn_computes_expr)); |
c4c81601 | 3579 | |
7506f491 DE |
3580 | /* Keep register set table up to date. */ |
3581 | record_one_set (REGNO (to), new_insn); | |
3582 | ||
3583 | gcse_create_count++; | |
3584 | if (gcse_file != NULL) | |
ac7c5af5 | 3585 | { |
c4c81601 | 3586 | fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d", |
7506f491 | 3587 | INSN_UID (NEXT_INSN (insn_computes_expr)), |
c4c81601 RK |
3588 | REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr))))); |
3589 | fprintf (gcse_file, ", computed in insn %d,\n", | |
7506f491 | 3590 | INSN_UID (insn_computes_expr)); |
c4c81601 RK |
3591 | fprintf (gcse_file, " into newly allocated reg %d\n", |
3592 | REGNO (to)); | |
ac7c5af5 | 3593 | } |
7506f491 DE |
3594 | |
3595 | pat = PATTERN (insn); | |
3596 | ||
3597 | /* Do register replacement for INSN. */ | |
3598 | changed = validate_change (insn, &SET_SRC (pat), | |
c4c81601 RK |
3599 | SET_DEST (PATTERN |
3600 | (NEXT_INSN (insn_computes_expr))), | |
7506f491 DE |
3601 | 0); |
3602 | ||
3603 | /* We should be able to ignore the return code from validate_change but | |
3604 | to play it safe we check. */ | |
3605 | if (changed) | |
3606 | { | |
3607 | gcse_subst_count++; | |
3608 | if (gcse_file != NULL) | |
3609 | { | |
c4c81601 RK |
3610 | fprintf (gcse_file, |
3611 | "GCSE: Replacing the source in insn %d with reg %d ", | |
7506f491 | 3612 | INSN_UID (insn), |
c4c81601 RK |
3613 | REGNO (SET_DEST (PATTERN (NEXT_INSN |
3614 | (insn_computes_expr))))); | |
3615 | fprintf (gcse_file, "set in insn %d\n", | |
7506f491 DE |
3616 | INSN_UID (insn_computes_expr)); |
3617 | } | |
7506f491 DE |
3618 | } |
3619 | } | |
3620 | ||
3621 | return changed; | |
3622 | } | |
3623 | ||
c4c81601 RK |
3624 | /* Perform classic GCSE. This is called by one_classic_gcse_pass after all |
3625 | the dataflow analysis has been done. | |
7506f491 DE |
3626 | |
3627 | The result is non-zero if a change was made. */ | |
3628 | ||
3629 | static int | |
3630 | classic_gcse () | |
3631 | { | |
3632 | int bb, changed; | |
3633 | rtx insn; | |
3634 | ||
3635 | /* Note we start at block 1. */ | |
3636 | ||
3637 | changed = 0; | |
3638 | for (bb = 1; bb < n_basic_blocks; bb++) | |
3639 | { | |
3640 | /* Reset tables used to keep track of what's still valid [since the | |
3641 | start of the block]. */ | |
3642 | reset_opr_set_tables (); | |
3643 | ||
3b413743 RH |
3644 | for (insn = BLOCK_HEAD (bb); |
3645 | insn != NULL && insn != NEXT_INSN (BLOCK_END (bb)); | |
7506f491 DE |
3646 | insn = NEXT_INSN (insn)) |
3647 | { | |
3648 | /* Is insn of form (set (pseudo-reg) ...)? */ | |
7506f491 DE |
3649 | if (GET_CODE (insn) == INSN |
3650 | && GET_CODE (PATTERN (insn)) == SET | |
3651 | && GET_CODE (SET_DEST (PATTERN (insn))) == REG | |
3652 | && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER) | |
3653 | { | |
3654 | rtx pat = PATTERN (insn); | |
3655 | rtx src = SET_SRC (pat); | |
3656 | struct expr *expr; | |
3657 | ||
3658 | if (want_to_gcse_p (src) | |
3659 | /* Is the expression recorded? */ | |
3660 | && ((expr = lookup_expr (src)) != NULL) | |
3661 | /* Is the expression available [at the start of the | |
3662 | block]? */ | |
3663 | && TEST_BIT (ae_in[bb], expr->bitmap_index) | |
3664 | /* Are the operands unchanged since the start of the | |
3665 | block? */ | |
3666 | && oprs_not_set_p (src, insn)) | |
3667 | changed |= handle_avail_expr (insn, expr); | |
3668 | } | |
3669 | ||
3670 | /* Keep track of everything modified by this insn. */ | |
3671 | /* ??? Need to be careful w.r.t. mods done to INSN. */ | |
2c3c49de | 3672 | if (INSN_P (insn)) |
7506f491 | 3673 | mark_oprs_set (insn); |
ac7c5af5 | 3674 | } |
7506f491 DE |
3675 | } |
3676 | ||
3677 | return changed; | |
3678 | } | |
3679 | ||
3680 | /* Top level routine to perform one classic GCSE pass. | |
3681 | ||
3682 | Return non-zero if a change was made. */ | |
3683 | ||
3684 | static int | |
b5ce41ff | 3685 | one_classic_gcse_pass (pass) |
7506f491 DE |
3686 | int pass; |
3687 | { | |
3688 | int changed = 0; | |
3689 | ||
3690 | gcse_subst_count = 0; | |
3691 | gcse_create_count = 0; | |
3692 | ||
3693 | alloc_expr_hash_table (max_cuid); | |
3694 | alloc_rd_mem (n_basic_blocks, max_cuid); | |
b5ce41ff | 3695 | compute_expr_hash_table (); |
7506f491 DE |
3696 | if (gcse_file) |
3697 | dump_hash_table (gcse_file, "Expression", expr_hash_table, | |
3698 | expr_hash_table_size, n_exprs); | |
c4c81601 | 3699 | |
7506f491 DE |
3700 | if (n_exprs > 0) |
3701 | { | |
3702 | compute_kill_rd (); | |
3703 | compute_rd (); | |
3704 | alloc_avail_expr_mem (n_basic_blocks, n_exprs); | |
3705 | compute_ae_gen (); | |
a42cd965 | 3706 | compute_ae_kill (ae_gen, ae_kill); |
bd0eaec2 | 3707 | compute_available (ae_gen, ae_kill, ae_out, ae_in); |
7506f491 DE |
3708 | changed = classic_gcse (); |
3709 | free_avail_expr_mem (); | |
3710 | } | |
c4c81601 | 3711 | |
7506f491 DE |
3712 | free_rd_mem (); |
3713 | free_expr_hash_table (); | |
3714 | ||
3715 | if (gcse_file) | |
3716 | { | |
3717 | fprintf (gcse_file, "\n"); | |
c4c81601 RK |
3718 | fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,", |
3719 | current_function_name, pass, bytes_used, gcse_subst_count); | |
3720 | fprintf (gcse_file, "%d insns created\n", gcse_create_count); | |
7506f491 DE |
3721 | } |
3722 | ||
3723 | return changed; | |
3724 | } | |
3725 | \f | |
3726 | /* Compute copy/constant propagation working variables. */ | |
3727 | ||
3728 | /* Local properties of assignments. */ | |
7506f491 DE |
3729 | static sbitmap *cprop_pavloc; |
3730 | static sbitmap *cprop_absaltered; | |
3731 | ||
3732 | /* Global properties of assignments (computed from the local properties). */ | |
7506f491 DE |
3733 | static sbitmap *cprop_avin; |
3734 | static sbitmap *cprop_avout; | |
3735 | ||
c4c81601 RK |
3736 | /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of |
3737 | basic blocks. N_SETS is the number of sets. */ | |
7506f491 DE |
3738 | |
3739 | static void | |
3740 | alloc_cprop_mem (n_blocks, n_sets) | |
3741 | int n_blocks, n_sets; | |
3742 | { | |
3743 | cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets); | |
3744 | cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets); | |
3745 | ||
3746 | cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets); | |
3747 | cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets); | |
3748 | } | |
3749 | ||
3750 | /* Free vars used by copy/const propagation. */ | |
3751 | ||
3752 | static void | |
3753 | free_cprop_mem () | |
3754 | { | |
3755 | free (cprop_pavloc); | |
3756 | free (cprop_absaltered); | |
3757 | free (cprop_avin); | |
3758 | free (cprop_avout); | |
3759 | } | |
3760 | ||
c4c81601 RK |
3761 | /* For each block, compute whether X is transparent. X is either an |
3762 | expression or an assignment [though we don't care which, for this context | |
3763 | an assignment is treated as an expression]. For each block where an | |
3764 | element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX | |
3765 | bit in BMAP. */ | |
7506f491 DE |
3766 | |
3767 | static void | |
3768 | compute_transp (x, indx, bmap, set_p) | |
3769 | rtx x; | |
3770 | int indx; | |
3771 | sbitmap *bmap; | |
3772 | int set_p; | |
3773 | { | |
c4c81601 | 3774 | int bb, i, j; |
7506f491 | 3775 | enum rtx_code code; |
c4c81601 | 3776 | reg_set *r; |
6f7d635c | 3777 | const char *fmt; |
7506f491 | 3778 | |
c4c81601 RK |
3779 | /* repeat is used to turn tail-recursion into iteration since GCC |
3780 | can't do it when there's no return value. */ | |
7506f491 DE |
3781 | repeat: |
3782 | ||
3783 | if (x == 0) | |
3784 | return; | |
3785 | ||
3786 | code = GET_CODE (x); | |
3787 | switch (code) | |
3788 | { | |
3789 | case REG: | |
c4c81601 RK |
3790 | if (set_p) |
3791 | { | |
3792 | if (REGNO (x) < FIRST_PSEUDO_REGISTER) | |
3793 | { | |
3794 | for (bb = 0; bb < n_basic_blocks; bb++) | |
3795 | if (TEST_BIT (reg_set_in_block[bb], REGNO (x))) | |
3796 | SET_BIT (bmap[bb], indx); | |
3797 | } | |
3798 | else | |
3799 | { | |
3800 | for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next) | |
3801 | SET_BIT (bmap[BLOCK_NUM (r->insn)], indx); | |
3802 | } | |
3803 | } | |
3804 | else | |
3805 | { | |
3806 | if (REGNO (x) < FIRST_PSEUDO_REGISTER) | |
3807 | { | |
3808 | for (bb = 0; bb < n_basic_blocks; bb++) | |
3809 | if (TEST_BIT (reg_set_in_block[bb], REGNO (x))) | |
3810 | RESET_BIT (bmap[bb], indx); | |
3811 | } | |
3812 | else | |
3813 | { | |
3814 | for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next) | |
3815 | RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx); | |
3816 | } | |
3817 | } | |
7506f491 | 3818 | |
c4c81601 | 3819 | return; |
7506f491 DE |
3820 | |
3821 | case MEM: | |
a13d4ebf AM |
3822 | for (bb = 0; bb < n_basic_blocks; bb++) |
3823 | { | |
3824 | rtx list_entry = canon_modify_mem_list[bb]; | |
3825 | ||
3826 | while (list_entry) | |
3827 | { | |
3828 | rtx dest, dest_addr; | |
3829 | ||
3830 | if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN) | |
3831 | { | |
3832 | if (set_p) | |
3833 | SET_BIT (bmap[bb], indx); | |
3834 | else | |
3835 | RESET_BIT (bmap[bb], indx); | |
3836 | break; | |
3837 | } | |
3838 | /* LIST_ENTRY must be an INSN of some kind that sets memory. | |
3839 | Examine each hunk of memory that is modified. */ | |
3840 | ||
3841 | dest = XEXP (list_entry, 0); | |
3842 | list_entry = XEXP (list_entry, 1); | |
3843 | dest_addr = XEXP (list_entry, 0); | |
3844 | ||
3845 | if (canon_true_dependence (dest, GET_MODE (dest), dest_addr, | |
3846 | x, rtx_addr_varies_p)) | |
3847 | { | |
3848 | if (set_p) | |
3849 | SET_BIT (bmap[bb], indx); | |
3850 | else | |
3851 | RESET_BIT (bmap[bb], indx); | |
3852 | break; | |
3853 | } | |
3854 | list_entry = XEXP (list_entry, 1); | |
3855 | } | |
3856 | } | |
7506f491 DE |
3857 | if (set_p) |
3858 | { | |
3859 | for (bb = 0; bb < n_basic_blocks; bb++) | |
3860 | if (mem_set_in_block[bb]) | |
3861 | SET_BIT (bmap[bb], indx); | |
3862 | } | |
3863 | else | |
3864 | { | |
3865 | for (bb = 0; bb < n_basic_blocks; bb++) | |
3866 | if (mem_set_in_block[bb]) | |
3867 | RESET_BIT (bmap[bb], indx); | |
3868 | } | |
c4c81601 | 3869 | |
7506f491 DE |
3870 | x = XEXP (x, 0); |
3871 | goto repeat; | |
3872 | ||
3873 | case PC: | |
3874 | case CC0: /*FIXME*/ | |
3875 | case CONST: | |
3876 | case CONST_INT: | |
3877 | case CONST_DOUBLE: | |
3878 | case SYMBOL_REF: | |
3879 | case LABEL_REF: | |
3880 | case ADDR_VEC: | |
3881 | case ADDR_DIFF_VEC: | |
3882 | return; | |
3883 | ||
3884 | default: | |
3885 | break; | |
3886 | } | |
3887 | ||
c4c81601 | 3888 | for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--) |
7506f491 DE |
3889 | { |
3890 | if (fmt[i] == 'e') | |
3891 | { | |
7506f491 DE |
3892 | /* If we are about to do the last recursive call |
3893 | needed at this level, change it into iteration. | |
3894 | This function is called enough to be worth it. */ | |
3895 | if (i == 0) | |
3896 | { | |
c4c81601 | 3897 | x = XEXP (x, i); |
7506f491 DE |
3898 | goto repeat; |
3899 | } | |
c4c81601 RK |
3900 | |
3901 | compute_transp (XEXP (x, i), indx, bmap, set_p); | |
7506f491 DE |
3902 | } |
3903 | else if (fmt[i] == 'E') | |
c4c81601 RK |
3904 | for (j = 0; j < XVECLEN (x, i); j++) |
3905 | compute_transp (XVECEXP (x, i, j), indx, bmap, set_p); | |
7506f491 DE |
3906 | } |
3907 | } | |
3908 | ||
7506f491 DE |
3909 | /* Top level routine to do the dataflow analysis needed by copy/const |
3910 | propagation. */ | |
3911 | ||
3912 | static void | |
3913 | compute_cprop_data () | |
3914 | { | |
b5ce41ff | 3915 | compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, 1); |
ce724250 JL |
3916 | compute_available (cprop_pavloc, cprop_absaltered, |
3917 | cprop_avout, cprop_avin); | |
7506f491 DE |
3918 | } |
3919 | \f | |
3920 | /* Copy/constant propagation. */ | |
3921 | ||
7506f491 DE |
3922 | /* Maximum number of register uses in an insn that we handle. */ |
3923 | #define MAX_USES 8 | |
3924 | ||
3925 | /* Table of uses found in an insn. | |
3926 | Allocated statically to avoid alloc/free complexity and overhead. */ | |
3927 | static struct reg_use reg_use_table[MAX_USES]; | |
3928 | ||
3929 | /* Index into `reg_use_table' while building it. */ | |
3930 | static int reg_use_count; | |
3931 | ||
c4c81601 RK |
3932 | /* Set up a list of register numbers used in INSN. The found uses are stored |
3933 | in `reg_use_table'. `reg_use_count' is initialized to zero before entry, | |
3934 | and contains the number of uses in the table upon exit. | |
7506f491 | 3935 | |
c4c81601 RK |
3936 | ??? If a register appears multiple times we will record it multiple times. |
3937 | This doesn't hurt anything but it will slow things down. */ | |
7506f491 DE |
3938 | |
3939 | static void | |
3940 | find_used_regs (x) | |
3941 | rtx x; | |
3942 | { | |
c4c81601 | 3943 | int i, j; |
7506f491 | 3944 | enum rtx_code code; |
6f7d635c | 3945 | const char *fmt; |
7506f491 | 3946 | |
c4c81601 RK |
3947 | /* repeat is used to turn tail-recursion into iteration since GCC |
3948 | can't do it when there's no return value. */ | |
7506f491 DE |
3949 | repeat: |
3950 | ||
3951 | if (x == 0) | |
3952 | return; | |
3953 | ||
3954 | code = GET_CODE (x); | |
3955 | switch (code) | |
3956 | { | |
3957 | case REG: | |
3958 | if (reg_use_count == MAX_USES) | |
3959 | return; | |
c4c81601 | 3960 | |
7506f491 DE |
3961 | reg_use_table[reg_use_count].reg_rtx = x; |
3962 | reg_use_count++; | |
3963 | return; | |
3964 | ||
3965 | case MEM: | |
3966 | x = XEXP (x, 0); | |
3967 | goto repeat; | |
3968 | ||
3969 | case PC: | |
3970 | case CC0: | |
3971 | case CONST: | |
3972 | case CONST_INT: | |
3973 | case CONST_DOUBLE: | |
3974 | case SYMBOL_REF: | |
3975 | case LABEL_REF: | |
3976 | case CLOBBER: | |
3977 | case ADDR_VEC: | |
3978 | case ADDR_DIFF_VEC: | |
3979 | case ASM_INPUT: /*FIXME*/ | |
3980 | return; | |
3981 | ||
3982 | case SET: | |
3983 | if (GET_CODE (SET_DEST (x)) == MEM) | |
3984 | find_used_regs (SET_DEST (x)); | |
3985 | x = SET_SRC (x); | |
3986 | goto repeat; | |
3987 | ||
3988 | default: | |
3989 | break; | |
3990 | } | |
3991 | ||
3992 | /* Recursively scan the operands of this expression. */ | |
3993 | ||
c4c81601 | 3994 | for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--) |
7506f491 DE |
3995 | { |
3996 | if (fmt[i] == 'e') | |
3997 | { | |
3998 | /* If we are about to do the last recursive call | |
3999 | needed at this level, change it into iteration. | |
4000 | This function is called enough to be worth it. */ | |
4001 | if (i == 0) | |
4002 | { | |
4003 | x = XEXP (x, 0); | |
4004 | goto repeat; | |
4005 | } | |
c4c81601 | 4006 | |
7506f491 DE |
4007 | find_used_regs (XEXP (x, i)); |
4008 | } | |
4009 | else if (fmt[i] == 'E') | |
c4c81601 RK |
4010 | for (j = 0; j < XVECLEN (x, i); j++) |
4011 | find_used_regs (XVECEXP (x, i, j)); | |
7506f491 DE |
4012 | } |
4013 | } | |
4014 | ||
4015 | /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO. | |
4016 | Returns non-zero is successful. */ | |
4017 | ||
4018 | static int | |
4019 | try_replace_reg (from, to, insn) | |
4020 | rtx from, to, insn; | |
4021 | { | |
172890a2 | 4022 | rtx note = find_reg_equal_equiv_note (insn); |
fb0c0a12 | 4023 | rtx src = 0; |
172890a2 RK |
4024 | int success = 0; |
4025 | rtx set = single_set (insn); | |
833fc3ad | 4026 | |
172890a2 RK |
4027 | /* If this is a single set, try to simplify the source of the set given |
4028 | our substitution. We could perhaps try this for multiple SETs, but | |
4029 | it probably won't buy us anything. */ | |
4030 | if (set != 0) | |
833fc3ad | 4031 | { |
172890a2 RK |
4032 | src = simplify_replace_rtx (SET_SRC (set), from, to); |
4033 | ||
4034 | /* Try this two ways: first just replace SET_SRC. If that doesn't | |
4035 | work and this is a PARALLEL, try to replace the whole pattern | |
4036 | with a new SET. */ | |
4037 | if (validate_change (insn, &SET_SRC (set), src, 0)) | |
4038 | success = 1; | |
4039 | else if (GET_CODE (PATTERN (insn)) == PARALLEL | |
4040 | && validate_change (insn, &PATTERN (insn), | |
4041 | gen_rtx_SET (VOIDmode, SET_DEST (set), | |
4042 | src), | |
4043 | 0)) | |
4044 | success = 1; | |
833fc3ad JH |
4045 | } |
4046 | ||
172890a2 RK |
4047 | /* Otherwise, try to do a global replacement within the insn. */ |
4048 | if (!success) | |
4049 | success = validate_replace_src (from, to, insn); | |
c4c81601 | 4050 | |
fb0c0a12 RK |
4051 | /* If we've failed to do replacement, have a single SET, and don't already |
4052 | have a note, add a REG_EQUAL note to not lose information. */ | |
172890a2 | 4053 | if (!success && note == 0 && set != 0) |
fb0c0a12 | 4054 | note = REG_NOTES (insn) |
172890a2 | 4055 | = gen_rtx_EXPR_LIST (REG_EQUAL, src, REG_NOTES (insn)); |
e251e2a2 | 4056 | |
172890a2 RK |
4057 | /* If there is already a NOTE, update the expression in it with our |
4058 | replacement. */ | |
4059 | else if (note != 0) | |
4060 | XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to); | |
833fc3ad | 4061 | |
172890a2 RK |
4062 | /* REG_EQUAL may get simplified into register. |
4063 | We don't allow that. Remove that note. This code ought | |
4064 | not to hapen, because previous code ought to syntetize | |
4065 | reg-reg move, but be on the safe side. */ | |
4066 | if (note && REG_P (XEXP (note, 0))) | |
4067 | remove_note (insn, note); | |
833fc3ad | 4068 | |
833fc3ad JH |
4069 | return success; |
4070 | } | |
c4c81601 RK |
4071 | |
4072 | /* Find a set of REGNOs that are available on entry to INSN's block. Returns | |
4073 | NULL no such set is found. */ | |
7506f491 DE |
4074 | |
4075 | static struct expr * | |
4076 | find_avail_set (regno, insn) | |
4077 | int regno; | |
4078 | rtx insn; | |
4079 | { | |
cafba495 BS |
4080 | /* SET1 contains the last set found that can be returned to the caller for |
4081 | use in a substitution. */ | |
4082 | struct expr *set1 = 0; | |
4083 | ||
4084 | /* Loops are not possible here. To get a loop we would need two sets | |
4085 | available at the start of the block containing INSN. ie we would | |
4086 | need two sets like this available at the start of the block: | |
4087 | ||
4088 | (set (reg X) (reg Y)) | |
4089 | (set (reg Y) (reg X)) | |
4090 | ||
4091 | This can not happen since the set of (reg Y) would have killed the | |
4092 | set of (reg X) making it unavailable at the start of this block. */ | |
4093 | while (1) | |
4094 | { | |
4095 | rtx src; | |
4096 | struct expr *set = lookup_set (regno, NULL_RTX); | |
4097 | ||
4098 | /* Find a set that is available at the start of the block | |
4099 | which contains INSN. */ | |
4100 | while (set) | |
4101 | { | |
4102 | if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index)) | |
4103 | break; | |
4104 | set = next_set (regno, set); | |
4105 | } | |
7506f491 | 4106 | |
cafba495 BS |
4107 | /* If no available set was found we've reached the end of the |
4108 | (possibly empty) copy chain. */ | |
4109 | if (set == 0) | |
4110 | break; | |
4111 | ||
4112 | if (GET_CODE (set->expr) != SET) | |
4113 | abort (); | |
4114 | ||
4115 | src = SET_SRC (set->expr); | |
4116 | ||
4117 | /* We know the set is available. | |
4118 | Now check that SRC is ANTLOC (i.e. none of the source operands | |
4119 | have changed since the start of the block). | |
4120 | ||
4121 | If the source operand changed, we may still use it for the next | |
4122 | iteration of this loop, but we may not use it for substitutions. */ | |
c4c81601 | 4123 | |
cafba495 BS |
4124 | if (CONSTANT_P (src) || oprs_not_set_p (src, insn)) |
4125 | set1 = set; | |
4126 | ||
4127 | /* If the source of the set is anything except a register, then | |
4128 | we have reached the end of the copy chain. */ | |
4129 | if (GET_CODE (src) != REG) | |
7506f491 | 4130 | break; |
7506f491 | 4131 | |
cafba495 BS |
4132 | /* Follow the copy chain, ie start another iteration of the loop |
4133 | and see if we have an available copy into SRC. */ | |
4134 | regno = REGNO (src); | |
4135 | } | |
4136 | ||
4137 | /* SET1 holds the last set that was available and anticipatable at | |
4138 | INSN. */ | |
4139 | return set1; | |
7506f491 DE |
4140 | } |
4141 | ||
abd535b6 | 4142 | /* Subroutine of cprop_insn that tries to propagate constants into |
172890a2 RK |
4143 | JUMP_INSNS. INSN must be a conditional jump. FROM is what we will try to |
4144 | replace, SRC is the constant we will try to substitute for it. Returns | |
4145 | nonzero if a change was made. We know INSN has just a SET. */ | |
c4c81601 | 4146 | |
abd535b6 | 4147 | static int |
172890a2 RK |
4148 | cprop_jump (insn, from, src) |
4149 | rtx insn; | |
4150 | rtx from; | |
abd535b6 BS |
4151 | rtx src; |
4152 | { | |
172890a2 RK |
4153 | rtx set = PATTERN (insn); |
4154 | rtx new = simplify_replace_rtx (SET_SRC (set), from, src); | |
abd535b6 BS |
4155 | |
4156 | /* If no simplification can be made, then try the next | |
4157 | register. */ | |
172890a2 | 4158 | if (rtx_equal_p (new, SET_SRC (set))) |
abd535b6 BS |
4159 | return 0; |
4160 | ||
172890a2 RK |
4161 | /* If this is now a no-op leave it that way, but update LABEL_NUSED if |
4162 | necessary. */ | |
4163 | if (new == pc_rtx) | |
abd535b6 | 4164 | { |
172890a2 RK |
4165 | SET_SRC (set) = new; |
4166 | ||
4167 | if (JUMP_LABEL (insn) != 0) | |
abd535b6 | 4168 | --LABEL_NUSES (JUMP_LABEL (insn)); |
172890a2 | 4169 | } |
abd535b6 | 4170 | |
172890a2 RK |
4171 | /* Otherwise, this must be a valid instruction. */ |
4172 | else if (! validate_change (insn, &SET_SRC (set), new, 0)) | |
4173 | return 0; | |
abd535b6 | 4174 | |
172890a2 RK |
4175 | /* If this has turned into an unconditional jump, |
4176 | then put a barrier after it so that the unreachable | |
4177 | code will be deleted. */ | |
4178 | if (GET_CODE (SET_SRC (set)) == LABEL_REF) | |
4179 | emit_barrier_after (insn); | |
abd535b6 | 4180 | |
172890a2 | 4181 | run_jump_opt_after_gcse = 1; |
c4c81601 | 4182 | |
172890a2 RK |
4183 | const_prop_count++; |
4184 | if (gcse_file != NULL) | |
4185 | { | |
4186 | fprintf (gcse_file, | |
4187 | "CONST-PROP: Replacing reg %d in insn %d with constant ", | |
4188 | REGNO (from), INSN_UID (insn)); | |
4189 | print_rtl (gcse_file, src); | |
4190 | fprintf (gcse_file, "\n"); | |
abd535b6 | 4191 | } |
172890a2 RK |
4192 | |
4193 | return 1; | |
abd535b6 BS |
4194 | } |
4195 | ||
4196 | #ifdef HAVE_cc0 | |
c4c81601 RK |
4197 | |
4198 | /* Subroutine of cprop_insn that tries to propagate constants into JUMP_INSNS | |
4199 | for machines that have CC0. INSN is a single set that stores into CC0; | |
4200 | the insn following it is a conditional jump. REG_USED is the use we will | |
4201 | try to replace, SRC is the constant we will try to substitute for it. | |
abd535b6 | 4202 | Returns nonzero if a change was made. */ |
c4c81601 | 4203 | |
abd535b6 BS |
4204 | static int |
4205 | cprop_cc0_jump (insn, reg_used, src) | |
4206 | rtx insn; | |
4207 | struct reg_use *reg_used; | |
4208 | rtx src; | |
4209 | { | |
172890a2 RK |
4210 | /* First substitute in the SET_SRC of INSN, then substitute that for |
4211 | CC0 in JUMP. */ | |
abd535b6 | 4212 | rtx jump = NEXT_INSN (insn); |
172890a2 RK |
4213 | rtx new_src = simplify_replace_rtx (SET_SRC (PATTERN (insn)), |
4214 | reg_used->reg_rtx, src); | |
abd535b6 | 4215 | |
172890a2 | 4216 | if (! cprop_jump (jump, cc0_rtx, new_src)) |
abd535b6 BS |
4217 | return 0; |
4218 | ||
4219 | /* If we succeeded, delete the cc0 setter. */ | |
4220 | PUT_CODE (insn, NOTE); | |
4221 | NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; | |
4222 | NOTE_SOURCE_FILE (insn) = 0; | |
172890a2 | 4223 | |
abd535b6 BS |
4224 | return 1; |
4225 | } | |
4226 | #endif | |
4227 | ||
7506f491 DE |
4228 | /* Perform constant and copy propagation on INSN. |
4229 | The result is non-zero if a change was made. */ | |
4230 | ||
4231 | static int | |
b5ce41ff | 4232 | cprop_insn (insn, alter_jumps) |
7506f491 | 4233 | rtx insn; |
b5ce41ff | 4234 | int alter_jumps; |
7506f491 DE |
4235 | { |
4236 | struct reg_use *reg_used; | |
4237 | int changed = 0; | |
833fc3ad | 4238 | rtx note; |
7506f491 | 4239 | |
e78d9500 JL |
4240 | /* Only propagate into SETs. Note that a conditional jump is a |
4241 | SET with pc_rtx as the destination. */ | |
172890a2 | 4242 | if (GET_CODE (insn) != INSN && GET_CODE (insn) != JUMP_INSN) |
7506f491 DE |
4243 | return 0; |
4244 | ||
4245 | reg_use_count = 0; | |
4246 | find_used_regs (PATTERN (insn)); | |
833fc3ad | 4247 | |
172890a2 | 4248 | note = find_reg_equal_equiv_note (insn); |
833fc3ad JH |
4249 | |
4250 | /* We may win even when propagating constants into notes. */ | |
4251 | if (note) | |
4252 | find_used_regs (XEXP (note, 0)); | |
7506f491 | 4253 | |
c4c81601 RK |
4254 | for (reg_used = ®_use_table[0]; reg_use_count > 0; |
4255 | reg_used++, reg_use_count--) | |
7506f491 | 4256 | { |
770ae6cc | 4257 | unsigned int regno = REGNO (reg_used->reg_rtx); |
7506f491 DE |
4258 | rtx pat, src; |
4259 | struct expr *set; | |
7506f491 DE |
4260 | |
4261 | /* Ignore registers created by GCSE. | |
4262 | We do this because ... */ | |
4263 | if (regno >= max_gcse_regno) | |
4264 | continue; | |
4265 | ||
4266 | /* If the register has already been set in this block, there's | |
4267 | nothing we can do. */ | |
4268 | if (! oprs_not_set_p (reg_used->reg_rtx, insn)) | |
4269 | continue; | |
4270 | ||
4271 | /* Find an assignment that sets reg_used and is available | |
4272 | at the start of the block. */ | |
4273 | set = find_avail_set (regno, insn); | |
4274 | if (! set) | |
4275 | continue; | |
4276 | ||
4277 | pat = set->expr; | |
4278 | /* ??? We might be able to handle PARALLELs. Later. */ | |
4279 | if (GET_CODE (pat) != SET) | |
4280 | abort (); | |
c4c81601 | 4281 | |
7506f491 DE |
4282 | src = SET_SRC (pat); |
4283 | ||
e78d9500 | 4284 | /* Constant propagation. */ |
05f6f07c BS |
4285 | if (GET_CODE (src) == CONST_INT || GET_CODE (src) == CONST_DOUBLE |
4286 | || GET_CODE (src) == SYMBOL_REF) | |
7506f491 | 4287 | { |
e78d9500 JL |
4288 | /* Handle normal insns first. */ |
4289 | if (GET_CODE (insn) == INSN | |
4290 | && try_replace_reg (reg_used->reg_rtx, src, insn)) | |
7506f491 DE |
4291 | { |
4292 | changed = 1; | |
4293 | const_prop_count++; | |
4294 | if (gcse_file != NULL) | |
4295 | { | |
c4c81601 RK |
4296 | fprintf (gcse_file, "CONST-PROP: Replacing reg %d in ", |
4297 | regno); | |
4298 | fprintf (gcse_file, "insn %d with constant ", | |
4299 | INSN_UID (insn)); | |
e78d9500 | 4300 | print_rtl (gcse_file, src); |
7506f491 DE |
4301 | fprintf (gcse_file, "\n"); |
4302 | } | |
4303 | ||
4304 | /* The original insn setting reg_used may or may not now be | |
4305 | deletable. We leave the deletion to flow. */ | |
4306 | } | |
e78d9500 JL |
4307 | |
4308 | /* Try to propagate a CONST_INT into a conditional jump. | |
4309 | We're pretty specific about what we will handle in this | |
4310 | code, we can extend this as necessary over time. | |
4311 | ||
4312 | Right now the insn in question must look like | |
abd535b6 | 4313 | (set (pc) (if_then_else ...)) */ |
b5ce41ff | 4314 | else if (alter_jumps |
6e9a3c38 JL |
4315 | && GET_CODE (insn) == JUMP_INSN |
4316 | && condjump_p (insn) | |
4317 | && ! simplejump_p (insn)) | |
172890a2 RK |
4318 | changed |= cprop_jump (insn, reg_used->reg_rtx, src); |
4319 | ||
abd535b6 BS |
4320 | #ifdef HAVE_cc0 |
4321 | /* Similar code for machines that use a pair of CC0 setter and | |
4322 | conditional jump insn. */ | |
4323 | else if (alter_jumps | |
4324 | && GET_CODE (PATTERN (insn)) == SET | |
4325 | && SET_DEST (PATTERN (insn)) == cc0_rtx | |
4326 | && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN | |
4327 | && condjump_p (NEXT_INSN (insn)) | |
172890a2 RK |
4328 | && ! simplejump_p (NEXT_INSN (insn)) |
4329 | && cprop_cc0_jump (insn, reg_used, src)) | |
4330 | { | |
4331 | changed = 1; | |
4332 | break; | |
d7836e38 | 4333 | } |
abd535b6 | 4334 | #endif |
7506f491 DE |
4335 | } |
4336 | else if (GET_CODE (src) == REG | |
4337 | && REGNO (src) >= FIRST_PSEUDO_REGISTER | |
4338 | && REGNO (src) != regno) | |
4339 | { | |
cafba495 | 4340 | if (try_replace_reg (reg_used->reg_rtx, src, insn)) |
7506f491 | 4341 | { |
cafba495 BS |
4342 | changed = 1; |
4343 | copy_prop_count++; | |
4344 | if (gcse_file != NULL) | |
7506f491 | 4345 | { |
c4c81601 RK |
4346 | fprintf (gcse_file, "COPY-PROP: Replacing reg %d in insn %d", |
4347 | regno, INSN_UID (insn)); | |
4348 | fprintf (gcse_file, " with reg %d\n", REGNO (src)); | |
7506f491 | 4349 | } |
cafba495 BS |
4350 | |
4351 | /* The original insn setting reg_used may or may not now be | |
4352 | deletable. We leave the deletion to flow. */ | |
4353 | /* FIXME: If it turns out that the insn isn't deletable, | |
4354 | then we may have unnecessarily extended register lifetimes | |
4355 | and made things worse. */ | |
7506f491 DE |
4356 | } |
4357 | } | |
4358 | } | |
4359 | ||
4360 | return changed; | |
4361 | } | |
4362 | ||
c4c81601 RK |
4363 | /* Forward propagate copies. This includes copies and constants. Return |
4364 | non-zero if a change was made. */ | |
7506f491 DE |
4365 | |
4366 | static int | |
b5ce41ff JL |
4367 | cprop (alter_jumps) |
4368 | int alter_jumps; | |
7506f491 DE |
4369 | { |
4370 | int bb, changed; | |
4371 | rtx insn; | |
4372 | ||
4373 | /* Note we start at block 1. */ | |
4374 | ||
4375 | changed = 0; | |
4376 | for (bb = 1; bb < n_basic_blocks; bb++) | |
4377 | { | |
4378 | /* Reset tables used to keep track of what's still valid [since the | |
4379 | start of the block]. */ | |
4380 | reset_opr_set_tables (); | |
4381 | ||
3b413743 RH |
4382 | for (insn = BLOCK_HEAD (bb); |
4383 | insn != NULL && insn != NEXT_INSN (BLOCK_END (bb)); | |
7506f491 | 4384 | insn = NEXT_INSN (insn)) |
172890a2 RK |
4385 | if (INSN_P (insn)) |
4386 | { | |
4387 | changed |= cprop_insn (insn, alter_jumps); | |
7506f491 | 4388 | |
172890a2 RK |
4389 | /* Keep track of everything modified by this insn. */ |
4390 | /* ??? Need to be careful w.r.t. mods done to INSN. Don't | |
4391 | call mark_oprs_set if we turned the insn into a NOTE. */ | |
4392 | if (GET_CODE (insn) != NOTE) | |
4393 | mark_oprs_set (insn); | |
ac7c5af5 | 4394 | } |
7506f491 DE |
4395 | } |
4396 | ||
4397 | if (gcse_file != NULL) | |
4398 | fprintf (gcse_file, "\n"); | |
4399 | ||
4400 | return changed; | |
4401 | } | |
4402 | ||
4403 | /* Perform one copy/constant propagation pass. | |
4404 | F is the first insn in the function. | |
4405 | PASS is the pass count. */ | |
4406 | ||
4407 | static int | |
b5ce41ff | 4408 | one_cprop_pass (pass, alter_jumps) |
7506f491 | 4409 | int pass; |
b5ce41ff | 4410 | int alter_jumps; |
7506f491 DE |
4411 | { |
4412 | int changed = 0; | |
4413 | ||
4414 | const_prop_count = 0; | |
4415 | copy_prop_count = 0; | |
4416 | ||
4417 | alloc_set_hash_table (max_cuid); | |
b5ce41ff | 4418 | compute_set_hash_table (); |
7506f491 DE |
4419 | if (gcse_file) |
4420 | dump_hash_table (gcse_file, "SET", set_hash_table, set_hash_table_size, | |
4421 | n_sets); | |
4422 | if (n_sets > 0) | |
4423 | { | |
4424 | alloc_cprop_mem (n_basic_blocks, n_sets); | |
4425 | compute_cprop_data (); | |
b5ce41ff | 4426 | changed = cprop (alter_jumps); |
7506f491 DE |
4427 | free_cprop_mem (); |
4428 | } | |
c4c81601 | 4429 | |
7506f491 DE |
4430 | free_set_hash_table (); |
4431 | ||
4432 | if (gcse_file) | |
4433 | { | |
c4c81601 RK |
4434 | fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ", |
4435 | current_function_name, pass, bytes_used); | |
4436 | fprintf (gcse_file, "%d const props, %d copy props\n\n", | |
4437 | const_prop_count, copy_prop_count); | |
7506f491 DE |
4438 | } |
4439 | ||
4440 | return changed; | |
4441 | } | |
4442 | \f | |
a65f3558 | 4443 | /* Compute PRE+LCM working variables. */ |
7506f491 DE |
4444 | |
4445 | /* Local properties of expressions. */ | |
4446 | /* Nonzero for expressions that are transparent in the block. */ | |
a65f3558 | 4447 | static sbitmap *transp; |
7506f491 | 4448 | |
5c35539b RH |
4449 | /* Nonzero for expressions that are transparent at the end of the block. |
4450 | This is only zero for expressions killed by abnormal critical edge | |
4451 | created by a calls. */ | |
a65f3558 | 4452 | static sbitmap *transpout; |
5c35539b | 4453 | |
a65f3558 JL |
4454 | /* Nonzero for expressions that are computed (available) in the block. */ |
4455 | static sbitmap *comp; | |
7506f491 | 4456 | |
a65f3558 JL |
4457 | /* Nonzero for expressions that are locally anticipatable in the block. */ |
4458 | static sbitmap *antloc; | |
7506f491 | 4459 | |
a65f3558 JL |
4460 | /* Nonzero for expressions where this block is an optimal computation |
4461 | point. */ | |
4462 | static sbitmap *pre_optimal; | |
5c35539b | 4463 | |
a65f3558 JL |
4464 | /* Nonzero for expressions which are redundant in a particular block. */ |
4465 | static sbitmap *pre_redundant; | |
7506f491 | 4466 | |
a42cd965 AM |
4467 | /* Nonzero for expressions which should be inserted on a specific edge. */ |
4468 | static sbitmap *pre_insert_map; | |
4469 | ||
4470 | /* Nonzero for expressions which should be deleted in a specific block. */ | |
4471 | static sbitmap *pre_delete_map; | |
4472 | ||
4473 | /* Contains the edge_list returned by pre_edge_lcm. */ | |
4474 | static struct edge_list *edge_list; | |
4475 | ||
a65f3558 JL |
4476 | /* Redundant insns. */ |
4477 | static sbitmap pre_redundant_insns; | |
7506f491 | 4478 | |
a65f3558 | 4479 | /* Allocate vars used for PRE analysis. */ |
7506f491 DE |
4480 | |
4481 | static void | |
a65f3558 JL |
4482 | alloc_pre_mem (n_blocks, n_exprs) |
4483 | int n_blocks, n_exprs; | |
7506f491 | 4484 | { |
a65f3558 JL |
4485 | transp = sbitmap_vector_alloc (n_blocks, n_exprs); |
4486 | comp = sbitmap_vector_alloc (n_blocks, n_exprs); | |
4487 | antloc = sbitmap_vector_alloc (n_blocks, n_exprs); | |
5faf03ae | 4488 | |
a42cd965 AM |
4489 | pre_optimal = NULL; |
4490 | pre_redundant = NULL; | |
4491 | pre_insert_map = NULL; | |
4492 | pre_delete_map = NULL; | |
4493 | ae_in = NULL; | |
4494 | ae_out = NULL; | |
a42cd965 | 4495 | ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs); |
c4c81601 | 4496 | |
a42cd965 | 4497 | /* pre_insert and pre_delete are allocated later. */ |
7506f491 DE |
4498 | } |
4499 | ||
a65f3558 | 4500 | /* Free vars used for PRE analysis. */ |
7506f491 DE |
4501 | |
4502 | static void | |
a65f3558 | 4503 | free_pre_mem () |
7506f491 | 4504 | { |
a65f3558 JL |
4505 | free (transp); |
4506 | free (comp); | |
bd3675fc JL |
4507 | |
4508 | /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */ | |
7506f491 | 4509 | |
a42cd965 AM |
4510 | if (pre_optimal) |
4511 | free (pre_optimal); | |
4512 | if (pre_redundant) | |
4513 | free (pre_redundant); | |
4514 | if (pre_insert_map) | |
4515 | free (pre_insert_map); | |
4516 | if (pre_delete_map) | |
4517 | free (pre_delete_map); | |
a42cd965 AM |
4518 | |
4519 | if (ae_in) | |
4520 | free (ae_in); | |
4521 | if (ae_out) | |
4522 | free (ae_out); | |
a42cd965 | 4523 | |
bd3675fc | 4524 | transp = comp = NULL; |
a42cd965 | 4525 | pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL; |
55d3f917 | 4526 | ae_in = ae_out = NULL; |
7506f491 DE |
4527 | } |
4528 | ||
4529 | /* Top level routine to do the dataflow analysis needed by PRE. */ | |
4530 | ||
4531 | static void | |
4532 | compute_pre_data () | |
4533 | { | |
b614171e | 4534 | sbitmap trapping_expr; |
c66e8ae9 | 4535 | int i; |
b614171e | 4536 | unsigned int ui; |
c66e8ae9 | 4537 | |
a65f3558 | 4538 | compute_local_properties (transp, comp, antloc, 0); |
a42cd965 | 4539 | sbitmap_vector_zero (ae_kill, n_basic_blocks); |
c66e8ae9 | 4540 | |
b614171e MM |
4541 | /* Collect expressions which might trap. */ |
4542 | trapping_expr = sbitmap_alloc (n_exprs); | |
4543 | sbitmap_zero (trapping_expr); | |
4544 | for (ui = 0; ui < expr_hash_table_size; ui++) | |
4545 | { | |
4546 | struct expr *e; | |
4547 | for (e = expr_hash_table[ui]; e != NULL; e = e->next_same_hash) | |
4548 | if (may_trap_p (e->expr)) | |
4549 | SET_BIT (trapping_expr, e->bitmap_index); | |
4550 | } | |
4551 | ||
c66e8ae9 JL |
4552 | /* Compute ae_kill for each basic block using: |
4553 | ||
4554 | ~(TRANSP | COMP) | |
4555 | ||
a2e90653 | 4556 | This is significantly faster than compute_ae_kill. */ |
c66e8ae9 JL |
4557 | |
4558 | for (i = 0; i < n_basic_blocks; i++) | |
4559 | { | |
b614171e MM |
4560 | edge e; |
4561 | ||
4562 | /* If the current block is the destination of an abnormal edge, we | |
4563 | kill all trapping expressions because we won't be able to properly | |
4564 | place the instruction on the edge. So make them neither | |
4565 | anticipatable nor transparent. This is fairly conservative. */ | |
4566 | for (e = BASIC_BLOCK (i)->pred; e ; e = e->pred_next) | |
4567 | if (e->flags & EDGE_ABNORMAL) | |
4568 | { | |
4569 | sbitmap_difference (antloc[i], antloc[i], trapping_expr); | |
4570 | sbitmap_difference (transp[i], transp[i], trapping_expr); | |
4571 | break; | |
4572 | } | |
4573 | ||
c66e8ae9 JL |
4574 | sbitmap_a_or_b (ae_kill[i], transp[i], comp[i]); |
4575 | sbitmap_not (ae_kill[i], ae_kill[i]); | |
4576 | } | |
4577 | ||
a42cd965 AM |
4578 | edge_list = pre_edge_lcm (gcse_file, n_exprs, transp, comp, antloc, |
4579 | ae_kill, &pre_insert_map, &pre_delete_map); | |
bd3675fc JL |
4580 | free (antloc); |
4581 | antloc = NULL; | |
4582 | free (ae_kill); | |
4583 | ae_kill = NULL; | |
b614171e | 4584 | free (trapping_expr); |
7506f491 DE |
4585 | } |
4586 | \f | |
4587 | /* PRE utilities */ | |
4588 | ||
a65f3558 JL |
4589 | /* Return non-zero if an occurrence of expression EXPR in OCCR_BB would reach |
4590 | block BB. | |
7506f491 DE |
4591 | |
4592 | VISITED is a pointer to a working buffer for tracking which BB's have | |
4593 | been visited. It is NULL for the top-level call. | |
4594 | ||
4595 | We treat reaching expressions that go through blocks containing the same | |
4596 | reaching expression as "not reaching". E.g. if EXPR is generated in blocks | |
4597 | 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block | |
4598 | 2 as not reaching. The intent is to improve the probability of finding | |
4599 | only one reaching expression and to reduce register lifetimes by picking | |
4600 | the closest such expression. */ | |
4601 | ||
4602 | static int | |
89e606c9 | 4603 | pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited) |
e2d2ed72 | 4604 | basic_block occr_bb; |
7506f491 | 4605 | struct expr *expr; |
e2d2ed72 | 4606 | basic_block bb; |
7506f491 DE |
4607 | char *visited; |
4608 | { | |
36349f8b | 4609 | edge pred; |
7506f491 | 4610 | |
e2d2ed72 | 4611 | for (pred = bb->pred; pred != NULL; pred = pred->pred_next) |
7506f491 | 4612 | { |
e2d2ed72 | 4613 | basic_block pred_bb = pred->src; |
7506f491 | 4614 | |
36349f8b | 4615 | if (pred->src == ENTRY_BLOCK_PTR |
7506f491 | 4616 | /* Has predecessor has already been visited? */ |
e2d2ed72 | 4617 | || visited[pred_bb->index]) |
c4c81601 RK |
4618 | ;/* Nothing to do. */ |
4619 | ||
7506f491 | 4620 | /* Does this predecessor generate this expression? */ |
e2d2ed72 | 4621 | else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index)) |
7506f491 DE |
4622 | { |
4623 | /* Is this the occurrence we're looking for? | |
4624 | Note that there's only one generating occurrence per block | |
4625 | so we just need to check the block number. */ | |
a65f3558 | 4626 | if (occr_bb == pred_bb) |
7506f491 | 4627 | return 1; |
c4c81601 | 4628 | |
e2d2ed72 | 4629 | visited[pred_bb->index] = 1; |
7506f491 DE |
4630 | } |
4631 | /* Ignore this predecessor if it kills the expression. */ | |
e2d2ed72 AM |
4632 | else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index)) |
4633 | visited[pred_bb->index] = 1; | |
c4c81601 | 4634 | |
7506f491 DE |
4635 | /* Neither gen nor kill. */ |
4636 | else | |
ac7c5af5 | 4637 | { |
e2d2ed72 | 4638 | visited[pred_bb->index] = 1; |
89e606c9 | 4639 | if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited)) |
7506f491 | 4640 | return 1; |
ac7c5af5 | 4641 | } |
7506f491 DE |
4642 | } |
4643 | ||
4644 | /* All paths have been checked. */ | |
4645 | return 0; | |
4646 | } | |
283a2545 RL |
4647 | |
4648 | /* The wrapper for pre_expr_reaches_here_work that ensures that any | |
4649 | memory allocated for that function is returned. */ | |
4650 | ||
4651 | static int | |
89e606c9 | 4652 | pre_expr_reaches_here_p (occr_bb, expr, bb) |
e2d2ed72 | 4653 | basic_block occr_bb; |
283a2545 | 4654 | struct expr *expr; |
e2d2ed72 | 4655 | basic_block bb; |
283a2545 RL |
4656 | { |
4657 | int rval; | |
c4c81601 | 4658 | char *visited = (char *) xcalloc (n_basic_blocks, 1); |
283a2545 | 4659 | |
89e606c9 | 4660 | rval = pre_expr_reaches_here_p_work(occr_bb, expr, bb, visited); |
283a2545 RL |
4661 | |
4662 | free (visited); | |
c4c81601 | 4663 | return rval; |
283a2545 | 4664 | } |
7506f491 | 4665 | \f |
a42cd965 AM |
4666 | |
4667 | /* Given an expr, generate RTL which we can insert at the end of a BB, | |
4668 | or on an edge. Set the block number of any insns generated to | |
4669 | the value of BB. */ | |
4670 | ||
4671 | static rtx | |
4672 | process_insert_insn (expr) | |
4673 | struct expr *expr; | |
4674 | { | |
4675 | rtx reg = expr->reaching_reg; | |
fb0c0a12 RK |
4676 | rtx exp = copy_rtx (expr->expr); |
4677 | rtx pat; | |
a42cd965 AM |
4678 | |
4679 | start_sequence (); | |
fb0c0a12 RK |
4680 | |
4681 | /* If the expression is something that's an operand, like a constant, | |
4682 | just copy it to a register. */ | |
4683 | if (general_operand (exp, GET_MODE (reg))) | |
4684 | emit_move_insn (reg, exp); | |
4685 | ||
4686 | /* Otherwise, make a new insn to compute this expression and make sure the | |
4687 | insn will be recognized (this also adds any needed CLOBBERs). Copy the | |
4688 | expression to make sure we don't have any sharing issues. */ | |
8d444206 | 4689 | else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp)))) |
fb0c0a12 RK |
4690 | abort (); |
4691 | ||
a42cd965 AM |
4692 | pat = gen_sequence (); |
4693 | end_sequence (); | |
4694 | ||
4695 | return pat; | |
4696 | } | |
4697 | ||
a65f3558 JL |
4698 | /* Add EXPR to the end of basic block BB. |
4699 | ||
4700 | This is used by both the PRE and code hoisting. | |
4701 | ||
4702 | For PRE, we want to verify that the expr is either transparent | |
4703 | or locally anticipatable in the target block. This check makes | |
4704 | no sense for code hoisting. */ | |
7506f491 DE |
4705 | |
4706 | static void | |
a65f3558 | 4707 | insert_insn_end_bb (expr, bb, pre) |
7506f491 | 4708 | struct expr *expr; |
e2d2ed72 | 4709 | basic_block bb; |
a65f3558 | 4710 | int pre; |
7506f491 | 4711 | { |
e2d2ed72 | 4712 | rtx insn = bb->end; |
7506f491 DE |
4713 | rtx new_insn; |
4714 | rtx reg = expr->reaching_reg; | |
4715 | int regno = REGNO (reg); | |
a42cd965 | 4716 | rtx pat; |
c4c81601 | 4717 | int i; |
7506f491 | 4718 | |
a42cd965 | 4719 | pat = process_insert_insn (expr); |
7506f491 DE |
4720 | |
4721 | /* If the last insn is a jump, insert EXPR in front [taking care to | |
4722 | handle cc0, etc. properly]. */ | |
4723 | ||
4724 | if (GET_CODE (insn) == JUMP_INSN) | |
4725 | { | |
50b2596f | 4726 | #ifdef HAVE_cc0 |
7506f491 | 4727 | rtx note; |
50b2596f | 4728 | #endif |
7506f491 DE |
4729 | |
4730 | /* If this is a jump table, then we can't insert stuff here. Since | |
4731 | we know the previous real insn must be the tablejump, we insert | |
4732 | the new instruction just before the tablejump. */ | |
4733 | if (GET_CODE (PATTERN (insn)) == ADDR_VEC | |
4734 | || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC) | |
4735 | insn = prev_real_insn (insn); | |
4736 | ||
4737 | #ifdef HAVE_cc0 | |
4738 | /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts | |
4739 | if cc0 isn't set. */ | |
4740 | note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX); | |
4741 | if (note) | |
4742 | insn = XEXP (note, 0); | |
4743 | else | |
4744 | { | |
4745 | rtx maybe_cc0_setter = prev_nonnote_insn (insn); | |
4746 | if (maybe_cc0_setter | |
2c3c49de | 4747 | && INSN_P (maybe_cc0_setter) |
7506f491 DE |
4748 | && sets_cc0_p (PATTERN (maybe_cc0_setter))) |
4749 | insn = maybe_cc0_setter; | |
4750 | } | |
4751 | #endif | |
4752 | /* FIXME: What if something in cc0/jump uses value set in new insn? */ | |
e2d2ed72 | 4753 | new_insn = emit_block_insn_before (pat, insn, bb); |
3947e2f9 | 4754 | } |
c4c81601 | 4755 | |
3947e2f9 RH |
4756 | /* Likewise if the last insn is a call, as will happen in the presence |
4757 | of exception handling. */ | |
5c35539b | 4758 | else if (GET_CODE (insn) == CALL_INSN) |
3947e2f9 RH |
4759 | { |
4760 | HARD_REG_SET parm_regs; | |
4761 | int nparm_regs; | |
4762 | rtx p; | |
4763 | ||
4764 | /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers, | |
4765 | we search backward and place the instructions before the first | |
4766 | parameter is loaded. Do this for everyone for consistency and a | |
c4c81601 | 4767 | presumtion that we'll get better code elsewhere as well. |
3947e2f9 | 4768 | |
c4c81601 | 4769 | It should always be the case that we can put these instructions |
a65f3558 JL |
4770 | anywhere in the basic block with performing PRE optimizations. |
4771 | Check this. */ | |
c4c81601 | 4772 | |
a65f3558 | 4773 | if (pre |
e2d2ed72 AM |
4774 | && !TEST_BIT (antloc[bb->index], expr->bitmap_index) |
4775 | && !TEST_BIT (transp[bb->index], expr->bitmap_index)) | |
3947e2f9 RH |
4776 | abort (); |
4777 | ||
4778 | /* Since different machines initialize their parameter registers | |
4779 | in different orders, assume nothing. Collect the set of all | |
4780 | parameter registers. */ | |
4781 | CLEAR_HARD_REG_SET (parm_regs); | |
4782 | nparm_regs = 0; | |
4783 | for (p = CALL_INSN_FUNCTION_USAGE (insn); p ; p = XEXP (p, 1)) | |
4784 | if (GET_CODE (XEXP (p, 0)) == USE | |
4785 | && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG) | |
4786 | { | |
c4c81601 | 4787 | if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER) |
5c35539b | 4788 | abort (); |
c4c81601 | 4789 | |
8598a9a2 JL |
4790 | /* We only care about registers which can hold function |
4791 | arguments. */ | |
4792 | if (! FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0)))) | |
4793 | continue; | |
4794 | ||
c4c81601 | 4795 | SET_HARD_REG_BIT (parm_regs, REGNO (XEXP (XEXP (p, 0), 0))); |
3947e2f9 RH |
4796 | nparm_regs++; |
4797 | } | |
4798 | ||
4799 | /* Search backward for the first set of a register in this set. */ | |
e2d2ed72 | 4800 | while (nparm_regs && bb->head != insn) |
3947e2f9 RH |
4801 | { |
4802 | insn = PREV_INSN (insn); | |
4803 | p = single_set (insn); | |
4804 | if (p && GET_CODE (SET_DEST (p)) == REG | |
4805 | && REGNO (SET_DEST (p)) < FIRST_PSEUDO_REGISTER | |
4806 | && TEST_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p)))) | |
4807 | { | |
4808 | CLEAR_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p))); | |
4809 | nparm_regs--; | |
4810 | } | |
4811 | } | |
4812 | ||
b1d26727 JL |
4813 | /* If we found all the parameter loads, then we want to insert |
4814 | before the first parameter load. | |
4815 | ||
4816 | If we did not find all the parameter loads, then we might have | |
4817 | stopped on the head of the block, which could be a CODE_LABEL. | |
4818 | If we inserted before the CODE_LABEL, then we would be putting | |
4819 | the insn in the wrong basic block. In that case, put the insn | |
b5229628 | 4820 | after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */ |
0a377997 | 4821 | while (GET_CODE (insn) == CODE_LABEL |
589ca5cb | 4822 | || NOTE_INSN_BASIC_BLOCK_P (insn)) |
b5229628 | 4823 | insn = NEXT_INSN (insn); |
c4c81601 | 4824 | |
e2d2ed72 | 4825 | new_insn = emit_block_insn_before (pat, insn, bb); |
7506f491 DE |
4826 | } |
4827 | else | |
4828 | { | |
4829 | new_insn = emit_insn_after (pat, insn); | |
e2d2ed72 | 4830 | bb->end = new_insn; |
7506f491 DE |
4831 | } |
4832 | ||
a65f3558 JL |
4833 | /* Keep block number table up to date. |
4834 | Note, PAT could be a multiple insn sequence, we have to make | |
4835 | sure that each insn in the sequence is handled. */ | |
4836 | if (GET_CODE (pat) == SEQUENCE) | |
4837 | { | |
a65f3558 JL |
4838 | for (i = 0; i < XVECLEN (pat, 0); i++) |
4839 | { | |
4840 | rtx insn = XVECEXP (pat, 0, i); | |
c4c81601 | 4841 | |
e2d2ed72 | 4842 | set_block_for_insn (insn, bb); |
2c3c49de | 4843 | if (INSN_P (insn)) |
a65f3558 | 4844 | add_label_notes (PATTERN (insn), new_insn); |
c4c81601 | 4845 | |
84832317 | 4846 | note_stores (PATTERN (insn), record_set_info, insn); |
a65f3558 JL |
4847 | } |
4848 | } | |
4849 | else | |
4850 | { | |
4851 | add_label_notes (SET_SRC (pat), new_insn); | |
e2d2ed72 | 4852 | set_block_for_new_insns (new_insn, bb); |
c4c81601 | 4853 | |
a65f3558 JL |
4854 | /* Keep register set table up to date. */ |
4855 | record_one_set (regno, new_insn); | |
4856 | } | |
3947e2f9 | 4857 | |
7506f491 DE |
4858 | gcse_create_count++; |
4859 | ||
4860 | if (gcse_file) | |
4861 | { | |
c4c81601 | 4862 | fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ", |
e2d2ed72 | 4863 | bb->index, INSN_UID (new_insn)); |
c4c81601 RK |
4864 | fprintf (gcse_file, "copying expression %d to reg %d\n", |
4865 | expr->bitmap_index, regno); | |
7506f491 DE |
4866 | } |
4867 | } | |
4868 | ||
a42cd965 AM |
4869 | /* Insert partially redundant expressions on edges in the CFG to make |
4870 | the expressions fully redundant. */ | |
7506f491 | 4871 | |
a42cd965 AM |
4872 | static int |
4873 | pre_edge_insert (edge_list, index_map) | |
4874 | struct edge_list *edge_list; | |
7506f491 DE |
4875 | struct expr **index_map; |
4876 | { | |
c4c81601 | 4877 | int e, i, j, num_edges, set_size, did_insert = 0; |
a65f3558 JL |
4878 | sbitmap *inserted; |
4879 | ||
a42cd965 AM |
4880 | /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge |
4881 | if it reaches any of the deleted expressions. */ | |
7506f491 | 4882 | |
a42cd965 AM |
4883 | set_size = pre_insert_map[0]->size; |
4884 | num_edges = NUM_EDGES (edge_list); | |
4885 | inserted = sbitmap_vector_alloc (num_edges, n_exprs); | |
4886 | sbitmap_vector_zero (inserted, num_edges); | |
7506f491 | 4887 | |
a42cd965 | 4888 | for (e = 0; e < num_edges; e++) |
7506f491 DE |
4889 | { |
4890 | int indx; | |
e2d2ed72 | 4891 | basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e); |
a65f3558 | 4892 | |
a65f3558 | 4893 | for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS) |
7506f491 | 4894 | { |
a42cd965 | 4895 | SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i]; |
7506f491 | 4896 | |
a65f3558 | 4897 | for (j = indx; insert && j < n_exprs; j++, insert >>= 1) |
c4c81601 RK |
4898 | if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX) |
4899 | { | |
4900 | struct expr *expr = index_map[j]; | |
4901 | struct occr *occr; | |
a65f3558 | 4902 | |
c4c81601 RK |
4903 | /* Now look at each deleted occurence of this expression. */ |
4904 | for (occr = expr->antic_occr; occr != NULL; occr = occr->next) | |
4905 | { | |
4906 | if (! occr->deleted_p) | |
4907 | continue; | |
4908 | ||
4909 | /* Insert this expression on this edge if if it would | |
4910 | reach the deleted occurence in BB. */ | |
4911 | if (!TEST_BIT (inserted[e], j)) | |
4912 | { | |
4913 | rtx insn; | |
4914 | edge eg = INDEX_EDGE (edge_list, e); | |
4915 | ||
4916 | /* We can't insert anything on an abnormal and | |
4917 | critical edge, so we insert the insn at the end of | |
4918 | the previous block. There are several alternatives | |
4919 | detailed in Morgans book P277 (sec 10.5) for | |
4920 | handling this situation. This one is easiest for | |
4921 | now. */ | |
4922 | ||
4923 | if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL) | |
4924 | insert_insn_end_bb (index_map[j], bb, 0); | |
4925 | else | |
4926 | { | |
4927 | insn = process_insert_insn (index_map[j]); | |
4928 | insert_insn_on_edge (insn, eg); | |
4929 | } | |
4930 | ||
4931 | if (gcse_file) | |
4932 | { | |
4933 | fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ", | |
e2d2ed72 | 4934 | bb->index, |
c4c81601 RK |
4935 | INDEX_EDGE_SUCC_BB (edge_list, e)->index); |
4936 | fprintf (gcse_file, "copy expression %d\n", | |
4937 | expr->bitmap_index); | |
4938 | } | |
4939 | ||
a13d4ebf | 4940 | update_ld_motion_stores (expr); |
c4c81601 RK |
4941 | SET_BIT (inserted[e], j); |
4942 | did_insert = 1; | |
4943 | gcse_create_count++; | |
4944 | } | |
4945 | } | |
4946 | } | |
7506f491 DE |
4947 | } |
4948 | } | |
5faf03ae | 4949 | |
5faf03ae | 4950 | free (inserted); |
a42cd965 | 4951 | return did_insert; |
7506f491 DE |
4952 | } |
4953 | ||
c4c81601 | 4954 | /* Copy the result of INSN to REG. INDX is the expression number. */ |
7506f491 DE |
4955 | |
4956 | static void | |
4957 | pre_insert_copy_insn (expr, insn) | |
4958 | struct expr *expr; | |
4959 | rtx insn; | |
4960 | { | |
4961 | rtx reg = expr->reaching_reg; | |
4962 | int regno = REGNO (reg); | |
4963 | int indx = expr->bitmap_index; | |
4964 | rtx set = single_set (insn); | |
4965 | rtx new_insn; | |
e2d2ed72 | 4966 | basic_block bb = BLOCK_FOR_INSN (insn); |
7506f491 DE |
4967 | |
4968 | if (!set) | |
4969 | abort (); | |
c4c81601 | 4970 | |
9e6a5703 | 4971 | new_insn = emit_insn_after (gen_rtx_SET (VOIDmode, reg, SET_DEST (set)), |
7506f491 | 4972 | insn); |
c4c81601 | 4973 | |
7506f491 | 4974 | /* Keep block number table up to date. */ |
e2d2ed72 | 4975 | set_block_for_new_insns (new_insn, bb); |
c4c81601 | 4976 | |
7506f491 DE |
4977 | /* Keep register set table up to date. */ |
4978 | record_one_set (regno, new_insn); | |
e2d2ed72 AM |
4979 | if (insn == bb->end) |
4980 | bb->end = new_insn; | |
7506f491 DE |
4981 | |
4982 | gcse_create_count++; | |
4983 | ||
4984 | if (gcse_file) | |
a42cd965 AM |
4985 | fprintf (gcse_file, |
4986 | "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n", | |
4987 | BLOCK_NUM (insn), INSN_UID (new_insn), indx, | |
4988 | INSN_UID (insn), regno); | |
7506f491 DE |
4989 | } |
4990 | ||
4991 | /* Copy available expressions that reach the redundant expression | |
4992 | to `reaching_reg'. */ | |
4993 | ||
4994 | static void | |
4995 | pre_insert_copies () | |
4996 | { | |
2e653e39 | 4997 | unsigned int i; |
c4c81601 RK |
4998 | struct expr *expr; |
4999 | struct occr *occr; | |
5000 | struct occr *avail; | |
a65f3558 | 5001 | |
7506f491 DE |
5002 | /* For each available expression in the table, copy the result to |
5003 | `reaching_reg' if the expression reaches a deleted one. | |
5004 | ||
5005 | ??? The current algorithm is rather brute force. | |
5006 | Need to do some profiling. */ | |
5007 | ||
5008 | for (i = 0; i < expr_hash_table_size; i++) | |
c4c81601 RK |
5009 | for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash) |
5010 | { | |
5011 | /* If the basic block isn't reachable, PPOUT will be TRUE. However, | |
5012 | we don't want to insert a copy here because the expression may not | |
5013 | really be redundant. So only insert an insn if the expression was | |
5014 | deleted. This test also avoids further processing if the | |
5015 | expression wasn't deleted anywhere. */ | |
5016 | if (expr->reaching_reg == NULL) | |
5017 | continue; | |
5018 | ||
5019 | for (occr = expr->antic_occr; occr != NULL; occr = occr->next) | |
5020 | { | |
5021 | if (! occr->deleted_p) | |
5022 | continue; | |
7506f491 | 5023 | |
c4c81601 RK |
5024 | for (avail = expr->avail_occr; avail != NULL; avail = avail->next) |
5025 | { | |
5026 | rtx insn = avail->insn; | |
7506f491 | 5027 | |
c4c81601 RK |
5028 | /* No need to handle this one if handled already. */ |
5029 | if (avail->copied_p) | |
5030 | continue; | |
7506f491 | 5031 | |
c4c81601 RK |
5032 | /* Don't handle this one if it's a redundant one. */ |
5033 | if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn))) | |
5034 | continue; | |
7506f491 | 5035 | |
c4c81601 | 5036 | /* Or if the expression doesn't reach the deleted one. */ |
e2d2ed72 AM |
5037 | if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn), |
5038 | expr, | |
5039 | BLOCK_FOR_INSN (occr->insn))) | |
c4c81601 | 5040 | continue; |
7506f491 | 5041 | |
c4c81601 RK |
5042 | /* Copy the result of avail to reaching_reg. */ |
5043 | pre_insert_copy_insn (expr, insn); | |
5044 | avail->copied_p = 1; | |
5045 | } | |
5046 | } | |
5047 | } | |
7506f491 DE |
5048 | } |
5049 | ||
5050 | /* Delete redundant computations. | |
7506f491 DE |
5051 | Deletion is done by changing the insn to copy the `reaching_reg' of |
5052 | the expression into the result of the SET. It is left to later passes | |
5053 | (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it. | |
5054 | ||
5055 | Returns non-zero if a change is made. */ | |
5056 | ||
5057 | static int | |
5058 | pre_delete () | |
5059 | { | |
2e653e39 | 5060 | unsigned int i; |
63bc1d05 | 5061 | int changed; |
c4c81601 RK |
5062 | struct expr *expr; |
5063 | struct occr *occr; | |
a65f3558 | 5064 | |
7506f491 DE |
5065 | changed = 0; |
5066 | for (i = 0; i < expr_hash_table_size; i++) | |
c4c81601 RK |
5067 | for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash) |
5068 | { | |
5069 | int indx = expr->bitmap_index; | |
7506f491 | 5070 | |
c4c81601 RK |
5071 | /* We only need to search antic_occr since we require |
5072 | ANTLOC != 0. */ | |
7506f491 | 5073 | |
c4c81601 RK |
5074 | for (occr = expr->antic_occr; occr != NULL; occr = occr->next) |
5075 | { | |
5076 | rtx insn = occr->insn; | |
5077 | rtx set; | |
e2d2ed72 | 5078 | basic_block bb = BLOCK_FOR_INSN (insn); |
7506f491 | 5079 | |
e2d2ed72 | 5080 | if (TEST_BIT (pre_delete_map[bb->index], indx)) |
c4c81601 RK |
5081 | { |
5082 | set = single_set (insn); | |
5083 | if (! set) | |
5084 | abort (); | |
5085 | ||
5086 | /* Create a pseudo-reg to store the result of reaching | |
5087 | expressions into. Get the mode for the new pseudo from | |
5088 | the mode of the original destination pseudo. */ | |
5089 | if (expr->reaching_reg == NULL) | |
5090 | expr->reaching_reg | |
5091 | = gen_reg_rtx (GET_MODE (SET_DEST (set))); | |
5092 | ||
5093 | /* In theory this should never fail since we're creating | |
5094 | a reg->reg copy. | |
5095 | ||
5096 | However, on the x86 some of the movXX patterns actually | |
5097 | contain clobbers of scratch regs. This may cause the | |
5098 | insn created by validate_change to not match any pattern | |
5099 | and thus cause validate_change to fail. */ | |
5100 | if (validate_change (insn, &SET_SRC (set), | |
5101 | expr->reaching_reg, 0)) | |
5102 | { | |
5103 | occr->deleted_p = 1; | |
5104 | SET_BIT (pre_redundant_insns, INSN_CUID (insn)); | |
5105 | changed = 1; | |
5106 | gcse_subst_count++; | |
5107 | } | |
7506f491 | 5108 | |
c4c81601 RK |
5109 | if (gcse_file) |
5110 | { | |
5111 | fprintf (gcse_file, | |
5112 | "PRE: redundant insn %d (expression %d) in ", | |
5113 | INSN_UID (insn), indx); | |
5114 | fprintf (gcse_file, "bb %d, reaching reg is %d\n", | |
e2d2ed72 | 5115 | bb->index, REGNO (expr->reaching_reg)); |
c4c81601 RK |
5116 | } |
5117 | } | |
5118 | } | |
5119 | } | |
7506f491 DE |
5120 | |
5121 | return changed; | |
5122 | } | |
5123 | ||
5124 | /* Perform GCSE optimizations using PRE. | |
5125 | This is called by one_pre_gcse_pass after all the dataflow analysis | |
5126 | has been done. | |
5127 | ||
c4c81601 RK |
5128 | This is based on the original Morel-Renvoise paper Fred Chow's thesis, and |
5129 | lazy code motion from Knoop, Ruthing and Steffen as described in Advanced | |
5130 | Compiler Design and Implementation. | |
7506f491 | 5131 | |
c4c81601 RK |
5132 | ??? A new pseudo reg is created to hold the reaching expression. The nice |
5133 | thing about the classical approach is that it would try to use an existing | |
5134 | reg. If the register can't be adequately optimized [i.e. we introduce | |
5135 | reload problems], one could add a pass here to propagate the new register | |
5136 | through the block. | |
7506f491 | 5137 | |
c4c81601 RK |
5138 | ??? We don't handle single sets in PARALLELs because we're [currently] not |
5139 | able to copy the rest of the parallel when we insert copies to create full | |
5140 | redundancies from partial redundancies. However, there's no reason why we | |
5141 | can't handle PARALLELs in the cases where there are no partial | |
7506f491 DE |
5142 | redundancies. */ |
5143 | ||
5144 | static int | |
5145 | pre_gcse () | |
5146 | { | |
2e653e39 RK |
5147 | unsigned int i; |
5148 | int did_insert, changed; | |
7506f491 | 5149 | struct expr **index_map; |
c4c81601 | 5150 | struct expr *expr; |
7506f491 DE |
5151 | |
5152 | /* Compute a mapping from expression number (`bitmap_index') to | |
5153 | hash table entry. */ | |
5154 | ||
dd1bd863 | 5155 | index_map = (struct expr **) xcalloc (n_exprs, sizeof (struct expr *)); |
7506f491 | 5156 | for (i = 0; i < expr_hash_table_size; i++) |
c4c81601 RK |
5157 | for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash) |
5158 | index_map[expr->bitmap_index] = expr; | |
7506f491 DE |
5159 | |
5160 | /* Reset bitmap used to track which insns are redundant. */ | |
a65f3558 JL |
5161 | pre_redundant_insns = sbitmap_alloc (max_cuid); |
5162 | sbitmap_zero (pre_redundant_insns); | |
7506f491 DE |
5163 | |
5164 | /* Delete the redundant insns first so that | |
5165 | - we know what register to use for the new insns and for the other | |
5166 | ones with reaching expressions | |
5167 | - we know which insns are redundant when we go to create copies */ | |
c4c81601 | 5168 | |
7506f491 DE |
5169 | changed = pre_delete (); |
5170 | ||
a42cd965 | 5171 | did_insert = pre_edge_insert (edge_list, index_map); |
c4c81601 | 5172 | |
7506f491 | 5173 | /* In other places with reaching expressions, copy the expression to the |
a42cd965 | 5174 | specially allocated pseudo-reg that reaches the redundant expr. */ |
7506f491 | 5175 | pre_insert_copies (); |
a42cd965 AM |
5176 | if (did_insert) |
5177 | { | |
5178 | commit_edge_insertions (); | |
5179 | changed = 1; | |
5180 | } | |
7506f491 | 5181 | |
283a2545 | 5182 | free (index_map); |
a65f3558 | 5183 | free (pre_redundant_insns); |
7506f491 DE |
5184 | return changed; |
5185 | } | |
5186 | ||
5187 | /* Top level routine to perform one PRE GCSE pass. | |
5188 | ||
5189 | Return non-zero if a change was made. */ | |
5190 | ||
5191 | static int | |
b5ce41ff | 5192 | one_pre_gcse_pass (pass) |
7506f491 DE |
5193 | int pass; |
5194 | { | |
5195 | int changed = 0; | |
5196 | ||
5197 | gcse_subst_count = 0; | |
5198 | gcse_create_count = 0; | |
5199 | ||
5200 | alloc_expr_hash_table (max_cuid); | |
a42cd965 | 5201 | add_noreturn_fake_exit_edges (); |
a13d4ebf AM |
5202 | if (flag_gcse_lm) |
5203 | compute_ld_motion_mems (); | |
5204 | ||
b5ce41ff | 5205 | compute_expr_hash_table (); |
a13d4ebf | 5206 | trim_ld_motion_mems (); |
7506f491 DE |
5207 | if (gcse_file) |
5208 | dump_hash_table (gcse_file, "Expression", expr_hash_table, | |
5209 | expr_hash_table_size, n_exprs); | |
c4c81601 | 5210 | |
7506f491 DE |
5211 | if (n_exprs > 0) |
5212 | { | |
5213 | alloc_pre_mem (n_basic_blocks, n_exprs); | |
5214 | compute_pre_data (); | |
5215 | changed |= pre_gcse (); | |
a42cd965 | 5216 | free_edge_list (edge_list); |
7506f491 DE |
5217 | free_pre_mem (); |
5218 | } | |
c4c81601 | 5219 | |
a13d4ebf | 5220 | free_ldst_mems (); |
a42cd965 | 5221 | remove_fake_edges (); |
7506f491 DE |
5222 | free_expr_hash_table (); |
5223 | ||
5224 | if (gcse_file) | |
5225 | { | |
c4c81601 RK |
5226 | fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ", |
5227 | current_function_name, pass, bytes_used); | |
5228 | fprintf (gcse_file, "%d substs, %d insns created\n", | |
5229 | gcse_subst_count, gcse_create_count); | |
7506f491 DE |
5230 | } |
5231 | ||
5232 | return changed; | |
5233 | } | |
aeb2f500 JW |
5234 | \f |
5235 | /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN. | |
5b1ef594 JDA |
5236 | If notes are added to an insn which references a CODE_LABEL, the |
5237 | LABEL_NUSES count is incremented. We have to add REG_LABEL notes, | |
5238 | because the following loop optimization pass requires them. */ | |
aeb2f500 JW |
5239 | |
5240 | /* ??? This is very similar to the loop.c add_label_notes function. We | |
5241 | could probably share code here. */ | |
5242 | ||
5243 | /* ??? If there was a jump optimization pass after gcse and before loop, | |
5244 | then we would not need to do this here, because jump would add the | |
5245 | necessary REG_LABEL notes. */ | |
5246 | ||
5247 | static void | |
5248 | add_label_notes (x, insn) | |
5249 | rtx x; | |
5250 | rtx insn; | |
5251 | { | |
5252 | enum rtx_code code = GET_CODE (x); | |
5253 | int i, j; | |
6f7d635c | 5254 | const char *fmt; |
aeb2f500 JW |
5255 | |
5256 | if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x)) | |
5257 | { | |
6b3603c2 | 5258 | /* This code used to ignore labels that referred to dispatch tables to |
ac7c5af5 | 5259 | avoid flow generating (slighly) worse code. |
6b3603c2 | 5260 | |
ac7c5af5 JL |
5261 | We no longer ignore such label references (see LABEL_REF handling in |
5262 | mark_jump_label for additional information). */ | |
c4c81601 | 5263 | |
6b3603c2 JL |
5264 | REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL, XEXP (x, 0), |
5265 | REG_NOTES (insn)); | |
5b1ef594 JDA |
5266 | if (LABEL_P (XEXP (x, 0))) |
5267 | LABEL_NUSES (XEXP (x, 0))++; | |
aeb2f500 JW |
5268 | return; |
5269 | } | |
5270 | ||
c4c81601 | 5271 | for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--) |
aeb2f500 JW |
5272 | { |
5273 | if (fmt[i] == 'e') | |
5274 | add_label_notes (XEXP (x, i), insn); | |
5275 | else if (fmt[i] == 'E') | |
5276 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
5277 | add_label_notes (XVECEXP (x, i, j), insn); | |
5278 | } | |
5279 | } | |
a65f3558 JL |
5280 | |
5281 | /* Compute transparent outgoing information for each block. | |
5282 | ||
5283 | An expression is transparent to an edge unless it is killed by | |
5284 | the edge itself. This can only happen with abnormal control flow, | |
5285 | when the edge is traversed through a call. This happens with | |
5286 | non-local labels and exceptions. | |
5287 | ||
5288 | This would not be necessary if we split the edge. While this is | |
5289 | normally impossible for abnormal critical edges, with some effort | |
5290 | it should be possible with exception handling, since we still have | |
5291 | control over which handler should be invoked. But due to increased | |
5292 | EH table sizes, this may not be worthwhile. */ | |
5293 | ||
5294 | static void | |
5295 | compute_transpout () | |
5296 | { | |
5297 | int bb; | |
2e653e39 | 5298 | unsigned int i; |
c4c81601 | 5299 | struct expr *expr; |
a65f3558 JL |
5300 | |
5301 | sbitmap_vector_ones (transpout, n_basic_blocks); | |
5302 | ||
5303 | for (bb = 0; bb < n_basic_blocks; ++bb) | |
5304 | { | |
a65f3558 JL |
5305 | /* Note that flow inserted a nop a the end of basic blocks that |
5306 | end in call instructions for reasons other than abnormal | |
5307 | control flow. */ | |
5308 | if (GET_CODE (BLOCK_END (bb)) != CALL_INSN) | |
5309 | continue; | |
5310 | ||
5311 | for (i = 0; i < expr_hash_table_size; i++) | |
c4c81601 RK |
5312 | for (expr = expr_hash_table[i]; expr ; expr = expr->next_same_hash) |
5313 | if (GET_CODE (expr->expr) == MEM) | |
5314 | { | |
5315 | if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF | |
5316 | && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0))) | |
5317 | continue; | |
a65f3558 | 5318 | |
c4c81601 RK |
5319 | /* ??? Optimally, we would use interprocedural alias |
5320 | analysis to determine if this mem is actually killed | |
5321 | by this call. */ | |
5322 | RESET_BIT (transpout[bb], expr->bitmap_index); | |
5323 | } | |
a65f3558 JL |
5324 | } |
5325 | } | |
dfdb644f JL |
5326 | |
5327 | /* Removal of useless null pointer checks */ | |
5328 | ||
dfdb644f | 5329 | /* Called via note_stores. X is set by SETTER. If X is a register we must |
0511851c MM |
5330 | invalidate nonnull_local and set nonnull_killed. DATA is really a |
5331 | `null_pointer_info *'. | |
dfdb644f JL |
5332 | |
5333 | We ignore hard registers. */ | |
c4c81601 | 5334 | |
dfdb644f | 5335 | static void |
84832317 | 5336 | invalidate_nonnull_info (x, setter, data) |
dfdb644f JL |
5337 | rtx x; |
5338 | rtx setter ATTRIBUTE_UNUSED; | |
0511851c | 5339 | void *data; |
dfdb644f | 5340 | { |
770ae6cc RK |
5341 | unsigned int regno; |
5342 | struct null_pointer_info *npi = (struct null_pointer_info *) data; | |
c4c81601 | 5343 | |
dfdb644f JL |
5344 | while (GET_CODE (x) == SUBREG) |
5345 | x = SUBREG_REG (x); | |
5346 | ||
5347 | /* Ignore anything that is not a register or is a hard register. */ | |
5348 | if (GET_CODE (x) != REG | |
0511851c MM |
5349 | || REGNO (x) < npi->min_reg |
5350 | || REGNO (x) >= npi->max_reg) | |
dfdb644f JL |
5351 | return; |
5352 | ||
0511851c | 5353 | regno = REGNO (x) - npi->min_reg; |
dfdb644f | 5354 | |
0511851c MM |
5355 | RESET_BIT (npi->nonnull_local[npi->current_block], regno); |
5356 | SET_BIT (npi->nonnull_killed[npi->current_block], regno); | |
dfdb644f JL |
5357 | } |
5358 | ||
0511851c MM |
5359 | /* Do null-pointer check elimination for the registers indicated in |
5360 | NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps; | |
5361 | they are not our responsibility to free. */ | |
dfdb644f | 5362 | |
0511851c | 5363 | static void |
8e184d9c JJ |
5364 | delete_null_pointer_checks_1 (delete_list, block_reg, nonnull_avin, |
5365 | nonnull_avout, npi) | |
5366 | varray_type *delete_list; | |
770ae6cc | 5367 | unsigned int *block_reg; |
0511851c MM |
5368 | sbitmap *nonnull_avin; |
5369 | sbitmap *nonnull_avout; | |
5370 | struct null_pointer_info *npi; | |
dfdb644f | 5371 | { |
ce724250 | 5372 | int bb; |
0511851c MM |
5373 | int current_block; |
5374 | sbitmap *nonnull_local = npi->nonnull_local; | |
5375 | sbitmap *nonnull_killed = npi->nonnull_killed; | |
dfdb644f | 5376 | |
dfdb644f JL |
5377 | /* Compute local properties, nonnull and killed. A register will have |
5378 | the nonnull property if at the end of the current block its value is | |
5379 | known to be nonnull. The killed property indicates that somewhere in | |
5380 | the block any information we had about the register is killed. | |
5381 | ||
5382 | Note that a register can have both properties in a single block. That | |
5383 | indicates that it's killed, then later in the block a new value is | |
5384 | computed. */ | |
5385 | sbitmap_vector_zero (nonnull_local, n_basic_blocks); | |
5386 | sbitmap_vector_zero (nonnull_killed, n_basic_blocks); | |
c4c81601 | 5387 | |
dfdb644f JL |
5388 | for (current_block = 0; current_block < n_basic_blocks; current_block++) |
5389 | { | |
5390 | rtx insn, stop_insn; | |
5391 | ||
0511851c MM |
5392 | /* Set the current block for invalidate_nonnull_info. */ |
5393 | npi->current_block = current_block; | |
5394 | ||
dfdb644f JL |
5395 | /* Scan each insn in the basic block looking for memory references and |
5396 | register sets. */ | |
5397 | stop_insn = NEXT_INSN (BLOCK_END (current_block)); | |
5398 | for (insn = BLOCK_HEAD (current_block); | |
5399 | insn != stop_insn; | |
5400 | insn = NEXT_INSN (insn)) | |
5401 | { | |
5402 | rtx set; | |
0511851c | 5403 | rtx reg; |
dfdb644f JL |
5404 | |
5405 | /* Ignore anything that is not a normal insn. */ | |
2c3c49de | 5406 | if (! INSN_P (insn)) |
dfdb644f JL |
5407 | continue; |
5408 | ||
5409 | /* Basically ignore anything that is not a simple SET. We do have | |
5410 | to make sure to invalidate nonnull_local and set nonnull_killed | |
5411 | for such insns though. */ | |
5412 | set = single_set (insn); | |
5413 | if (!set) | |
5414 | { | |
0511851c | 5415 | note_stores (PATTERN (insn), invalidate_nonnull_info, npi); |
dfdb644f JL |
5416 | continue; |
5417 | } | |
5418 | ||
5419 | /* See if we've got a useable memory load. We handle it first | |
5420 | in case it uses its address register as a dest (which kills | |
5421 | the nonnull property). */ | |
5422 | if (GET_CODE (SET_SRC (set)) == MEM | |
0511851c MM |
5423 | && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG |
5424 | && REGNO (reg) >= npi->min_reg | |
5425 | && REGNO (reg) < npi->max_reg) | |
dfdb644f | 5426 | SET_BIT (nonnull_local[current_block], |
0511851c | 5427 | REGNO (reg) - npi->min_reg); |
dfdb644f JL |
5428 | |
5429 | /* Now invalidate stuff clobbered by this insn. */ | |
0511851c | 5430 | note_stores (PATTERN (insn), invalidate_nonnull_info, npi); |
dfdb644f JL |
5431 | |
5432 | /* And handle stores, we do these last since any sets in INSN can | |
5433 | not kill the nonnull property if it is derived from a MEM | |
5434 | appearing in a SET_DEST. */ | |
5435 | if (GET_CODE (SET_DEST (set)) == MEM | |
0511851c MM |
5436 | && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG |
5437 | && REGNO (reg) >= npi->min_reg | |
5438 | && REGNO (reg) < npi->max_reg) | |
dfdb644f | 5439 | SET_BIT (nonnull_local[current_block], |
0511851c | 5440 | REGNO (reg) - npi->min_reg); |
dfdb644f JL |
5441 | } |
5442 | } | |
5443 | ||
5444 | /* Now compute global properties based on the local properties. This | |
5445 | is a classic global availablity algorithm. */ | |
ce724250 JL |
5446 | compute_available (nonnull_local, nonnull_killed, |
5447 | nonnull_avout, nonnull_avin); | |
dfdb644f JL |
5448 | |
5449 | /* Now look at each bb and see if it ends with a compare of a value | |
5450 | against zero. */ | |
5451 | for (bb = 0; bb < n_basic_blocks; bb++) | |
5452 | { | |
5453 | rtx last_insn = BLOCK_END (bb); | |
0511851c | 5454 | rtx condition, earliest; |
dfdb644f JL |
5455 | int compare_and_branch; |
5456 | ||
0511851c MM |
5457 | /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and |
5458 | since BLOCK_REG[BB] is zero if this block did not end with a | |
5459 | comparison against zero, this condition works. */ | |
5460 | if (block_reg[bb] < npi->min_reg | |
5461 | || block_reg[bb] >= npi->max_reg) | |
dfdb644f JL |
5462 | continue; |
5463 | ||
5464 | /* LAST_INSN is a conditional jump. Get its condition. */ | |
5465 | condition = get_condition (last_insn, &earliest); | |
5466 | ||
40d7a3fe NB |
5467 | /* If we can't determine the condition then skip. */ |
5468 | if (! condition) | |
5469 | continue; | |
5470 | ||
dfdb644f | 5471 | /* Is the register known to have a nonzero value? */ |
0511851c | 5472 | if (!TEST_BIT (nonnull_avout[bb], block_reg[bb] - npi->min_reg)) |
dfdb644f JL |
5473 | continue; |
5474 | ||
5475 | /* Try to compute whether the compare/branch at the loop end is one or | |
5476 | two instructions. */ | |
5477 | if (earliest == last_insn) | |
5478 | compare_and_branch = 1; | |
5479 | else if (earliest == prev_nonnote_insn (last_insn)) | |
5480 | compare_and_branch = 2; | |
5481 | else | |
5482 | continue; | |
5483 | ||
5484 | /* We know the register in this comparison is nonnull at exit from | |
5485 | this block. We can optimize this comparison. */ | |
5486 | if (GET_CODE (condition) == NE) | |
5487 | { | |
5488 | rtx new_jump; | |
5489 | ||
5490 | new_jump = emit_jump_insn_before (gen_jump (JUMP_LABEL (last_insn)), | |
5491 | last_insn); | |
5492 | JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn); | |
5493 | LABEL_NUSES (JUMP_LABEL (new_jump))++; | |
5494 | emit_barrier_after (new_jump); | |
5495 | } | |
8e184d9c JJ |
5496 | if (!*delete_list) |
5497 | VARRAY_RTX_INIT (*delete_list, 10, "delete_list"); | |
5498 | ||
5499 | VARRAY_PUSH_RTX (*delete_list, last_insn); | |
dfdb644f | 5500 | if (compare_and_branch == 2) |
8e184d9c | 5501 | VARRAY_PUSH_RTX (*delete_list, earliest); |
0511851c MM |
5502 | |
5503 | /* Don't check this block again. (Note that BLOCK_END is | |
5504 | invalid here; we deleted the last instruction in the | |
5505 | block.) */ | |
5506 | block_reg[bb] = 0; | |
5507 | } | |
5508 | } | |
5509 | ||
5510 | /* Find EQ/NE comparisons against zero which can be (indirectly) evaluated | |
5511 | at compile time. | |
5512 | ||
5513 | This is conceptually similar to global constant/copy propagation and | |
5514 | classic global CSE (it even uses the same dataflow equations as cprop). | |
5515 | ||
5516 | If a register is used as memory address with the form (mem (reg)), then we | |
5517 | know that REG can not be zero at that point in the program. Any instruction | |
5518 | which sets REG "kills" this property. | |
5519 | ||
5520 | So, if every path leading to a conditional branch has an available memory | |
5521 | reference of that form, then we know the register can not have the value | |
5522 | zero at the conditional branch. | |
5523 | ||
5524 | So we merely need to compute the local properies and propagate that data | |
5525 | around the cfg, then optimize where possible. | |
5526 | ||
5527 | We run this pass two times. Once before CSE, then again after CSE. This | |
5528 | has proven to be the most profitable approach. It is rare for new | |
5529 | optimization opportunities of this nature to appear after the first CSE | |
5530 | pass. | |
5531 | ||
5532 | This could probably be integrated with global cprop with a little work. */ | |
5533 | ||
5534 | void | |
5535 | delete_null_pointer_checks (f) | |
2e653e39 | 5536 | rtx f ATTRIBUTE_UNUSED; |
0511851c | 5537 | { |
0511851c | 5538 | sbitmap *nonnull_avin, *nonnull_avout; |
770ae6cc | 5539 | unsigned int *block_reg; |
8e184d9c | 5540 | varray_type delete_list = NULL; |
0511851c MM |
5541 | int bb; |
5542 | int reg; | |
5543 | int regs_per_pass; | |
5544 | int max_reg; | |
8e184d9c | 5545 | unsigned int i; |
0511851c MM |
5546 | struct null_pointer_info npi; |
5547 | ||
0511851c MM |
5548 | /* If we have only a single block, then there's nothing to do. */ |
5549 | if (n_basic_blocks <= 1) | |
a18820c6 | 5550 | return; |
0511851c MM |
5551 | |
5552 | /* Trying to perform global optimizations on flow graphs which have | |
5553 | a high connectivity will take a long time and is unlikely to be | |
5554 | particularly useful. | |
5555 | ||
43e72072 | 5556 | In normal circumstances a cfg should have about twice as many edges |
0511851c MM |
5557 | as blocks. But we do not want to punish small functions which have |
5558 | a couple switch statements. So we require a relatively large number | |
5559 | of basic blocks and the ratio of edges to blocks to be high. */ | |
5560 | if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20) | |
a18820c6 | 5561 | return; |
0511851c | 5562 | |
0511851c MM |
5563 | /* We need four bitmaps, each with a bit for each register in each |
5564 | basic block. */ | |
5565 | max_reg = max_reg_num (); | |
5566 | regs_per_pass = get_bitmap_width (4, n_basic_blocks, max_reg); | |
5567 | ||
5568 | /* Allocate bitmaps to hold local and global properties. */ | |
5569 | npi.nonnull_local = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass); | |
5570 | npi.nonnull_killed = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass); | |
5571 | nonnull_avin = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass); | |
5572 | nonnull_avout = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass); | |
5573 | ||
5574 | /* Go through the basic blocks, seeing whether or not each block | |
5575 | ends with a conditional branch whose condition is a comparison | |
5576 | against zero. Record the register compared in BLOCK_REG. */ | |
f9e158c3 | 5577 | block_reg = (unsigned int *) xcalloc (n_basic_blocks, sizeof (int)); |
0511851c MM |
5578 | for (bb = 0; bb < n_basic_blocks; bb++) |
5579 | { | |
5580 | rtx last_insn = BLOCK_END (bb); | |
5581 | rtx condition, earliest, reg; | |
5582 | ||
5583 | /* We only want conditional branches. */ | |
5584 | if (GET_CODE (last_insn) != JUMP_INSN | |
7f1c097d JH |
5585 | || !any_condjump_p (last_insn) |
5586 | || !onlyjump_p (last_insn)) | |
0511851c MM |
5587 | continue; |
5588 | ||
5589 | /* LAST_INSN is a conditional jump. Get its condition. */ | |
5590 | condition = get_condition (last_insn, &earliest); | |
5591 | ||
5592 | /* If we were unable to get the condition, or it is not a equality | |
5593 | comparison against zero then there's nothing we can do. */ | |
5594 | if (!condition | |
5595 | || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ) | |
5596 | || GET_CODE (XEXP (condition, 1)) != CONST_INT | |
5597 | || (XEXP (condition, 1) | |
5598 | != CONST0_RTX (GET_MODE (XEXP (condition, 0))))) | |
5599 | continue; | |
5600 | ||
5601 | /* We must be checking a register against zero. */ | |
5602 | reg = XEXP (condition, 0); | |
5603 | if (GET_CODE (reg) != REG) | |
5604 | continue; | |
5605 | ||
5606 | block_reg[bb] = REGNO (reg); | |
5607 | } | |
5608 | ||
5609 | /* Go through the algorithm for each block of registers. */ | |
5610 | for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass) | |
5611 | { | |
5612 | npi.min_reg = reg; | |
5613 | npi.max_reg = MIN (reg + regs_per_pass, max_reg); | |
8e184d9c | 5614 | delete_null_pointer_checks_1 (&delete_list, block_reg, nonnull_avin, |
0511851c | 5615 | nonnull_avout, &npi); |
dfdb644f JL |
5616 | } |
5617 | ||
8e184d9c JJ |
5618 | /* Now delete the instructions all at once. This breaks the CFG. */ |
5619 | if (delete_list) | |
5620 | { | |
5621 | for (i = 0; i < VARRAY_ACTIVE_SIZE (delete_list); i++) | |
5622 | delete_insn (VARRAY_RTX (delete_list, i)); | |
5623 | VARRAY_FREE (delete_list); | |
5624 | } | |
5625 | ||
0511851c MM |
5626 | /* Free the table of registers compared at the end of every block. */ |
5627 | free (block_reg); | |
5628 | ||
dfdb644f | 5629 | /* Free bitmaps. */ |
0511851c MM |
5630 | free (npi.nonnull_local); |
5631 | free (npi.nonnull_killed); | |
dfdb644f JL |
5632 | free (nonnull_avin); |
5633 | free (nonnull_avout); | |
5634 | } | |
bb457bd9 JL |
5635 | |
5636 | /* Code Hoisting variables and subroutines. */ | |
5637 | ||
5638 | /* Very busy expressions. */ | |
5639 | static sbitmap *hoist_vbein; | |
5640 | static sbitmap *hoist_vbeout; | |
5641 | ||
5642 | /* Hoistable expressions. */ | |
5643 | static sbitmap *hoist_exprs; | |
5644 | ||
5645 | /* Dominator bitmaps. */ | |
5646 | static sbitmap *dominators; | |
bb457bd9 JL |
5647 | |
5648 | /* ??? We could compute post dominators and run this algorithm in | |
5649 | reverse to to perform tail merging, doing so would probably be | |
5650 | more effective than the tail merging code in jump.c. | |
5651 | ||
5652 | It's unclear if tail merging could be run in parallel with | |
5653 | code hoisting. It would be nice. */ | |
5654 | ||
5655 | /* Allocate vars used for code hoisting analysis. */ | |
5656 | ||
5657 | static void | |
5658 | alloc_code_hoist_mem (n_blocks, n_exprs) | |
5659 | int n_blocks, n_exprs; | |
5660 | { | |
5661 | antloc = sbitmap_vector_alloc (n_blocks, n_exprs); | |
5662 | transp = sbitmap_vector_alloc (n_blocks, n_exprs); | |
5663 | comp = sbitmap_vector_alloc (n_blocks, n_exprs); | |
5664 | ||
5665 | hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs); | |
5666 | hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs); | |
5667 | hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs); | |
5668 | transpout = sbitmap_vector_alloc (n_blocks, n_exprs); | |
5669 | ||
5670 | dominators = sbitmap_vector_alloc (n_blocks, n_blocks); | |
bb457bd9 JL |
5671 | } |
5672 | ||
5673 | /* Free vars used for code hoisting analysis. */ | |
5674 | ||
5675 | static void | |
5676 | free_code_hoist_mem () | |
5677 | { | |
5678 | free (antloc); | |
5679 | free (transp); | |
5680 | free (comp); | |
5681 | ||
5682 | free (hoist_vbein); | |
5683 | free (hoist_vbeout); | |
5684 | free (hoist_exprs); | |
5685 | free (transpout); | |
5686 | ||
5687 | free (dominators); | |
bb457bd9 JL |
5688 | } |
5689 | ||
5690 | /* Compute the very busy expressions at entry/exit from each block. | |
5691 | ||
5692 | An expression is very busy if all paths from a given point | |
5693 | compute the expression. */ | |
5694 | ||
5695 | static void | |
5696 | compute_code_hoist_vbeinout () | |
5697 | { | |
5698 | int bb, changed, passes; | |
5699 | ||
5700 | sbitmap_vector_zero (hoist_vbeout, n_basic_blocks); | |
5701 | sbitmap_vector_zero (hoist_vbein, n_basic_blocks); | |
5702 | ||
5703 | passes = 0; | |
5704 | changed = 1; | |
c4c81601 | 5705 | |
bb457bd9 JL |
5706 | while (changed) |
5707 | { | |
5708 | changed = 0; | |
c4c81601 | 5709 | |
bb457bd9 JL |
5710 | /* We scan the blocks in the reverse order to speed up |
5711 | the convergence. */ | |
5712 | for (bb = n_basic_blocks - 1; bb >= 0; bb--) | |
5713 | { | |
5714 | changed |= sbitmap_a_or_b_and_c (hoist_vbein[bb], antloc[bb], | |
5715 | hoist_vbeout[bb], transp[bb]); | |
5716 | if (bb != n_basic_blocks - 1) | |
a42cd965 | 5717 | sbitmap_intersection_of_succs (hoist_vbeout[bb], hoist_vbein, bb); |
bb457bd9 | 5718 | } |
c4c81601 | 5719 | |
bb457bd9 JL |
5720 | passes++; |
5721 | } | |
5722 | ||
5723 | if (gcse_file) | |
5724 | fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes); | |
5725 | } | |
5726 | ||
5727 | /* Top level routine to do the dataflow analysis needed by code hoisting. */ | |
5728 | ||
5729 | static void | |
5730 | compute_code_hoist_data () | |
5731 | { | |
5732 | compute_local_properties (transp, comp, antloc, 0); | |
5733 | compute_transpout (); | |
5734 | compute_code_hoist_vbeinout (); | |
f8032688 | 5735 | calculate_dominance_info (NULL, dominators, CDI_DOMINATORS); |
bb457bd9 JL |
5736 | if (gcse_file) |
5737 | fprintf (gcse_file, "\n"); | |
5738 | } | |
5739 | ||
5740 | /* Determine if the expression identified by EXPR_INDEX would | |
5741 | reach BB unimpared if it was placed at the end of EXPR_BB. | |
5742 | ||
5743 | It's unclear exactly what Muchnick meant by "unimpared". It seems | |
5744 | to me that the expression must either be computed or transparent in | |
5745 | *every* block in the path(s) from EXPR_BB to BB. Any other definition | |
5746 | would allow the expression to be hoisted out of loops, even if | |
5747 | the expression wasn't a loop invariant. | |
5748 | ||
5749 | Contrast this to reachability for PRE where an expression is | |
5750 | considered reachable if *any* path reaches instead of *all* | |
5751 | paths. */ | |
5752 | ||
5753 | static int | |
5754 | hoist_expr_reaches_here_p (expr_bb, expr_index, bb, visited) | |
e2d2ed72 | 5755 | basic_block expr_bb; |
bb457bd9 | 5756 | int expr_index; |
e2d2ed72 | 5757 | basic_block bb; |
bb457bd9 JL |
5758 | char *visited; |
5759 | { | |
5760 | edge pred; | |
283a2545 RL |
5761 | int visited_allocated_locally = 0; |
5762 | ||
bb457bd9 JL |
5763 | |
5764 | if (visited == NULL) | |
5765 | { | |
283a2545 RL |
5766 | visited_allocated_locally = 1; |
5767 | visited = xcalloc (n_basic_blocks, 1); | |
bb457bd9 JL |
5768 | } |
5769 | ||
e2d2ed72 | 5770 | for (pred = bb->pred; pred != NULL; pred = pred->pred_next) |
bb457bd9 | 5771 | { |
e2d2ed72 | 5772 | basic_block pred_bb = pred->src; |
bb457bd9 JL |
5773 | |
5774 | if (pred->src == ENTRY_BLOCK_PTR) | |
5775 | break; | |
e2d2ed72 | 5776 | else if (visited[pred_bb->index]) |
bb457bd9 | 5777 | continue; |
c4c81601 | 5778 | |
bb457bd9 | 5779 | /* Does this predecessor generate this expression? */ |
e2d2ed72 | 5780 | else if (TEST_BIT (comp[pred_bb->index], expr_index)) |
bb457bd9 | 5781 | break; |
e2d2ed72 | 5782 | else if (! TEST_BIT (transp[pred_bb->index], expr_index)) |
bb457bd9 | 5783 | break; |
c4c81601 | 5784 | |
bb457bd9 JL |
5785 | /* Not killed. */ |
5786 | else | |
5787 | { | |
e2d2ed72 | 5788 | visited[pred_bb->index] = 1; |
bb457bd9 JL |
5789 | if (! hoist_expr_reaches_here_p (expr_bb, expr_index, |
5790 | pred_bb, visited)) | |
5791 | break; | |
5792 | } | |
5793 | } | |
283a2545 RL |
5794 | if (visited_allocated_locally) |
5795 | free (visited); | |
c4c81601 | 5796 | |
bb457bd9 JL |
5797 | return (pred == NULL); |
5798 | } | |
5799 | \f | |
5800 | /* Actually perform code hoisting. */ | |
c4c81601 | 5801 | |
bb457bd9 JL |
5802 | static void |
5803 | hoist_code () | |
5804 | { | |
2e653e39 RK |
5805 | int bb, dominated; |
5806 | unsigned int i; | |
bb457bd9 | 5807 | struct expr **index_map; |
c4c81601 | 5808 | struct expr *expr; |
bb457bd9 JL |
5809 | |
5810 | sbitmap_vector_zero (hoist_exprs, n_basic_blocks); | |
5811 | ||
5812 | /* Compute a mapping from expression number (`bitmap_index') to | |
5813 | hash table entry. */ | |
5814 | ||
dd1bd863 | 5815 | index_map = (struct expr **) xcalloc (n_exprs, sizeof (struct expr *)); |
bb457bd9 | 5816 | for (i = 0; i < expr_hash_table_size; i++) |
c4c81601 RK |
5817 | for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash) |
5818 | index_map[expr->bitmap_index] = expr; | |
bb457bd9 JL |
5819 | |
5820 | /* Walk over each basic block looking for potentially hoistable | |
5821 | expressions, nothing gets hoisted from the entry block. */ | |
5822 | for (bb = 0; bb < n_basic_blocks; bb++) | |
5823 | { | |
5824 | int found = 0; | |
5825 | int insn_inserted_p; | |
5826 | ||
5827 | /* Examine each expression that is very busy at the exit of this | |
5828 | block. These are the potentially hoistable expressions. */ | |
5829 | for (i = 0; i < hoist_vbeout[bb]->n_bits; i++) | |
5830 | { | |
5831 | int hoistable = 0; | |
c4c81601 RK |
5832 | |
5833 | if (TEST_BIT (hoist_vbeout[bb], i) && TEST_BIT (transpout[bb], i)) | |
bb457bd9 JL |
5834 | { |
5835 | /* We've found a potentially hoistable expression, now | |
5836 | we look at every block BB dominates to see if it | |
5837 | computes the expression. */ | |
5838 | for (dominated = 0; dominated < n_basic_blocks; dominated++) | |
5839 | { | |
5840 | /* Ignore self dominance. */ | |
5841 | if (bb == dominated | |
5842 | || ! TEST_BIT (dominators[dominated], bb)) | |
5843 | continue; | |
5844 | ||
5845 | /* We've found a dominated block, now see if it computes | |
5846 | the busy expression and whether or not moving that | |
5847 | expression to the "beginning" of that block is safe. */ | |
5848 | if (!TEST_BIT (antloc[dominated], i)) | |
5849 | continue; | |
5850 | ||
5851 | /* Note if the expression would reach the dominated block | |
5852 | unimpared if it was placed at the end of BB. | |
5853 | ||
5854 | Keep track of how many times this expression is hoistable | |
5855 | from a dominated block into BB. */ | |
e2d2ed72 AM |
5856 | if (hoist_expr_reaches_here_p (BASIC_BLOCK (bb), i, |
5857 | BASIC_BLOCK (dominated), NULL)) | |
bb457bd9 JL |
5858 | hoistable++; |
5859 | } | |
5860 | ||
5861 | /* If we found more than one hoistable occurence of this | |
5862 | expression, then note it in the bitmap of expressions to | |
5863 | hoist. It makes no sense to hoist things which are computed | |
5864 | in only one BB, and doing so tends to pessimize register | |
5865 | allocation. One could increase this value to try harder | |
5866 | to avoid any possible code expansion due to register | |
5867 | allocation issues; however experiments have shown that | |
5868 | the vast majority of hoistable expressions are only movable | |
5869 | from two successors, so raising this threshhold is likely | |
5870 | to nullify any benefit we get from code hoisting. */ | |
5871 | if (hoistable > 1) | |
5872 | { | |
5873 | SET_BIT (hoist_exprs[bb], i); | |
5874 | found = 1; | |
5875 | } | |
5876 | } | |
5877 | } | |
5878 | ||
5879 | /* If we found nothing to hoist, then quit now. */ | |
5880 | if (! found) | |
5881 | continue; | |
5882 | ||
5883 | /* Loop over all the hoistable expressions. */ | |
5884 | for (i = 0; i < hoist_exprs[bb]->n_bits; i++) | |
5885 | { | |
5886 | /* We want to insert the expression into BB only once, so | |
5887 | note when we've inserted it. */ | |
5888 | insn_inserted_p = 0; | |
5889 | ||
5890 | /* These tests should be the same as the tests above. */ | |
5891 | if (TEST_BIT (hoist_vbeout[bb], i)) | |
5892 | { | |
5893 | /* We've found a potentially hoistable expression, now | |
5894 | we look at every block BB dominates to see if it | |
5895 | computes the expression. */ | |
5896 | for (dominated = 0; dominated < n_basic_blocks; dominated++) | |
5897 | { | |
5898 | /* Ignore self dominance. */ | |
5899 | if (bb == dominated | |
5900 | || ! TEST_BIT (dominators[dominated], bb)) | |
5901 | continue; | |
5902 | ||
5903 | /* We've found a dominated block, now see if it computes | |
5904 | the busy expression and whether or not moving that | |
5905 | expression to the "beginning" of that block is safe. */ | |
5906 | if (!TEST_BIT (antloc[dominated], i)) | |
5907 | continue; | |
5908 | ||
5909 | /* The expression is computed in the dominated block and | |
5910 | it would be safe to compute it at the start of the | |
5911 | dominated block. Now we have to determine if the | |
5912 | expresion would reach the dominated block if it was | |
5913 | placed at the end of BB. */ | |
e2d2ed72 AM |
5914 | if (hoist_expr_reaches_here_p (BASIC_BLOCK (bb), i, |
5915 | BASIC_BLOCK (dominated), NULL)) | |
bb457bd9 JL |
5916 | { |
5917 | struct expr *expr = index_map[i]; | |
5918 | struct occr *occr = expr->antic_occr; | |
5919 | rtx insn; | |
5920 | rtx set; | |
5921 | ||
bb457bd9 JL |
5922 | /* Find the right occurence of this expression. */ |
5923 | while (BLOCK_NUM (occr->insn) != dominated && occr) | |
5924 | occr = occr->next; | |
5925 | ||
5926 | /* Should never happen. */ | |
5927 | if (!occr) | |
5928 | abort (); | |
5929 | ||
5930 | insn = occr->insn; | |
5931 | ||
5932 | set = single_set (insn); | |
5933 | if (! set) | |
5934 | abort (); | |
5935 | ||
5936 | /* Create a pseudo-reg to store the result of reaching | |
5937 | expressions into. Get the mode for the new pseudo | |
5938 | from the mode of the original destination pseudo. */ | |
5939 | if (expr->reaching_reg == NULL) | |
5940 | expr->reaching_reg | |
5941 | = gen_reg_rtx (GET_MODE (SET_DEST (set))); | |
5942 | ||
5943 | /* In theory this should never fail since we're creating | |
5944 | a reg->reg copy. | |
5945 | ||
c4c81601 RK |
5946 | However, on the x86 some of the movXX patterns |
5947 | actually contain clobbers of scratch regs. This may | |
5948 | cause the insn created by validate_change to not | |
5949 | match any pattern and thus cause validate_change to | |
5950 | fail. */ | |
bb457bd9 JL |
5951 | if (validate_change (insn, &SET_SRC (set), |
5952 | expr->reaching_reg, 0)) | |
5953 | { | |
5954 | occr->deleted_p = 1; | |
5955 | if (!insn_inserted_p) | |
5956 | { | |
e2d2ed72 AM |
5957 | insert_insn_end_bb (index_map[i], |
5958 | BASIC_BLOCK (bb), 0); | |
bb457bd9 JL |
5959 | insn_inserted_p = 1; |
5960 | } | |
5961 | } | |
5962 | } | |
5963 | } | |
5964 | } | |
5965 | } | |
5966 | } | |
c4c81601 | 5967 | |
283a2545 | 5968 | free (index_map); |
bb457bd9 JL |
5969 | } |
5970 | ||
5971 | /* Top level routine to perform one code hoisting (aka unification) pass | |
5972 | ||
5973 | Return non-zero if a change was made. */ | |
5974 | ||
5975 | static int | |
5976 | one_code_hoisting_pass () | |
5977 | { | |
5978 | int changed = 0; | |
5979 | ||
5980 | alloc_expr_hash_table (max_cuid); | |
5981 | compute_expr_hash_table (); | |
5982 | if (gcse_file) | |
5983 | dump_hash_table (gcse_file, "Code Hosting Expressions", expr_hash_table, | |
5984 | expr_hash_table_size, n_exprs); | |
c4c81601 | 5985 | |
bb457bd9 JL |
5986 | if (n_exprs > 0) |
5987 | { | |
5988 | alloc_code_hoist_mem (n_basic_blocks, n_exprs); | |
5989 | compute_code_hoist_data (); | |
5990 | hoist_code (); | |
5991 | free_code_hoist_mem (); | |
5992 | } | |
c4c81601 | 5993 | |
bb457bd9 JL |
5994 | free_expr_hash_table (); |
5995 | ||
5996 | return changed; | |
5997 | } | |
a13d4ebf AM |
5998 | \f |
5999 | /* Here we provide the things required to do store motion towards | |
6000 | the exit. In order for this to be effective, gcse also needed to | |
6001 | be taught how to move a load when it is kill only by a store to itself. | |
6002 | ||
6003 | int i; | |
6004 | float a[10]; | |
6005 | ||
6006 | void foo(float scale) | |
6007 | { | |
6008 | for (i=0; i<10; i++) | |
6009 | a[i] *= scale; | |
6010 | } | |
6011 | ||
6012 | 'i' is both loaded and stored to in the loop. Normally, gcse cannot move | |
6013 | the load out since its live around the loop, and stored at the bottom | |
6014 | of the loop. | |
6015 | ||
6016 | The 'Load Motion' referred to and implemented in this file is | |
6017 | an enhancement to gcse which when using edge based lcm, recognizes | |
6018 | this situation and allows gcse to move the load out of the loop. | |
6019 | ||
6020 | Once gcse has hoisted the load, store motion can then push this | |
6021 | load towards the exit, and we end up with no loads or stores of 'i' | |
6022 | in the loop. */ | |
6023 | ||
6024 | /* This will search the ldst list for a matching expresion. If it | |
6025 | doesn't find one, we create one and initialize it. */ | |
6026 | ||
6027 | static struct ls_expr * | |
6028 | ldst_entry (x) | |
6029 | rtx x; | |
6030 | { | |
6031 | struct ls_expr * ptr; | |
6032 | ||
6033 | for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr)) | |
6034 | if (expr_equiv_p (ptr->pattern, x)) | |
6035 | break; | |
6036 | ||
6037 | if (!ptr) | |
6038 | { | |
6039 | ptr = (struct ls_expr *) xmalloc (sizeof (struct ls_expr)); | |
6040 | ||
6041 | ptr->next = pre_ldst_mems; | |
6042 | ptr->expr = NULL; | |
6043 | ptr->pattern = x; | |
6044 | ptr->loads = NULL_RTX; | |
6045 | ptr->stores = NULL_RTX; | |
6046 | ptr->reaching_reg = NULL_RTX; | |
6047 | ptr->invalid = 0; | |
6048 | ptr->index = 0; | |
6049 | ptr->hash_index = 0; | |
6050 | pre_ldst_mems = ptr; | |
6051 | } | |
6052 | ||
6053 | return ptr; | |
6054 | } | |
6055 | ||
6056 | /* Free up an individual ldst entry. */ | |
6057 | ||
6058 | static void | |
6059 | free_ldst_entry (ptr) | |
6060 | struct ls_expr * ptr; | |
6061 | { | |
6062 | free_INSN_LIST_list (& ptr->loads); | |
6063 | free_INSN_LIST_list (& ptr->stores); | |
6064 | ||
6065 | free (ptr); | |
6066 | } | |
6067 | ||
6068 | /* Free up all memory associated with the ldst list. */ | |
6069 | ||
6070 | static void | |
6071 | free_ldst_mems () | |
6072 | { | |
6073 | while (pre_ldst_mems) | |
6074 | { | |
6075 | struct ls_expr * tmp = pre_ldst_mems; | |
6076 | ||
6077 | pre_ldst_mems = pre_ldst_mems->next; | |
6078 | ||
6079 | free_ldst_entry (tmp); | |
6080 | } | |
6081 | ||
6082 | pre_ldst_mems = NULL; | |
6083 | } | |
6084 | ||
6085 | /* Dump debugging info about the ldst list. */ | |
6086 | ||
6087 | static void | |
6088 | print_ldst_list (file) | |
6089 | FILE * file; | |
6090 | { | |
6091 | struct ls_expr * ptr; | |
6092 | ||
6093 | fprintf (file, "LDST list: \n"); | |
6094 | ||
6095 | for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr)) | |
6096 | { | |
6097 | fprintf (file, " Pattern (%3d): ", ptr->index); | |
6098 | ||
6099 | print_rtl (file, ptr->pattern); | |
6100 | ||
6101 | fprintf (file, "\n Loads : "); | |
6102 | ||
6103 | if (ptr->loads) | |
6104 | print_rtl (file, ptr->loads); | |
6105 | else | |
6106 | fprintf (file, "(nil)"); | |
6107 | ||
6108 | fprintf (file, "\n Stores : "); | |
6109 | ||
6110 | if (ptr->stores) | |
6111 | print_rtl (file, ptr->stores); | |
6112 | else | |
6113 | fprintf (file, "(nil)"); | |
6114 | ||
6115 | fprintf (file, "\n\n"); | |
6116 | } | |
6117 | ||
6118 | fprintf (file, "\n"); | |
6119 | } | |
6120 | ||
6121 | /* Returns 1 if X is in the list of ldst only expressions. */ | |
6122 | ||
6123 | static struct ls_expr * | |
6124 | find_rtx_in_ldst (x) | |
6125 | rtx x; | |
6126 | { | |
6127 | struct ls_expr * ptr; | |
6128 | ||
6129 | for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next) | |
6130 | if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid) | |
6131 | return ptr; | |
6132 | ||
6133 | return NULL; | |
6134 | } | |
6135 | ||
6136 | /* Assign each element of the list of mems a monotonically increasing value. */ | |
6137 | ||
6138 | static int | |
6139 | enumerate_ldsts () | |
6140 | { | |
6141 | struct ls_expr * ptr; | |
6142 | int n = 0; | |
6143 | ||
6144 | for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next) | |
6145 | ptr->index = n++; | |
6146 | ||
6147 | return n; | |
6148 | } | |
6149 | ||
6150 | /* Return first item in the list. */ | |
6151 | ||
6152 | static inline struct ls_expr * | |
6153 | first_ls_expr () | |
6154 | { | |
6155 | return pre_ldst_mems; | |
6156 | } | |
6157 | ||
6158 | /* Return the next item in ther list after the specified one. */ | |
6159 | ||
6160 | static inline struct ls_expr * | |
6161 | next_ls_expr (ptr) | |
6162 | struct ls_expr * ptr; | |
6163 | { | |
6164 | return ptr->next; | |
6165 | } | |
6166 | \f | |
6167 | /* Load Motion for loads which only kill themselves. */ | |
6168 | ||
6169 | /* Return true if x is a simple MEM operation, with no registers or | |
6170 | side effects. These are the types of loads we consider for the | |
6171 | ld_motion list, otherwise we let the usual aliasing take care of it. */ | |
6172 | ||
6173 | static int | |
6174 | simple_mem (x) | |
6175 | rtx x; | |
6176 | { | |
6177 | if (GET_CODE (x) != MEM) | |
6178 | return 0; | |
6179 | ||
6180 | if (MEM_VOLATILE_P (x)) | |
6181 | return 0; | |
6182 | ||
6183 | if (GET_MODE (x) == BLKmode) | |
6184 | return 0; | |
6185 | ||
6186 | if (!rtx_varies_p (XEXP (x, 0), 0)) | |
6187 | return 1; | |
6188 | ||
6189 | return 0; | |
6190 | } | |
6191 | ||
6192 | /* Make sure there isn't a buried reference in this pattern anywhere. | |
6193 | If there is, invalidate the entry for it since we're not capable | |
6194 | of fixing it up just yet.. We have to be sure we know about ALL | |
6195 | loads since the aliasing code will allow all entries in the | |
6196 | ld_motion list to not-alias itself. If we miss a load, we will get | |
6197 | the wrong value since gcse might common it and we won't know to | |
6198 | fix it up. */ | |
6199 | ||
6200 | static void | |
6201 | invalidate_any_buried_refs (x) | |
6202 | rtx x; | |
6203 | { | |
6204 | const char * fmt; | |
6205 | int i,j; | |
6206 | struct ls_expr * ptr; | |
6207 | ||
6208 | /* Invalidate it in the list. */ | |
6209 | if (GET_CODE (x) == MEM && simple_mem (x)) | |
6210 | { | |
6211 | ptr = ldst_entry (x); | |
6212 | ptr->invalid = 1; | |
6213 | } | |
6214 | ||
6215 | /* Recursively process the insn. */ | |
6216 | fmt = GET_RTX_FORMAT (GET_CODE (x)); | |
6217 | ||
6218 | for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--) | |
6219 | { | |
6220 | if (fmt[i] == 'e') | |
6221 | invalidate_any_buried_refs (XEXP (x, i)); | |
6222 | else if (fmt[i] == 'E') | |
6223 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
6224 | invalidate_any_buried_refs (XVECEXP (x, i, j)); | |
6225 | } | |
6226 | } | |
6227 | ||
6228 | /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple | |
6229 | being defined as MEM loads and stores to symbols, with no | |
6230 | side effects and no registers in the expression. If there are any | |
6231 | uses/defs which dont match this criteria, it is invalidated and | |
6232 | trimmed out later. */ | |
6233 | ||
6234 | static void | |
6235 | compute_ld_motion_mems () | |
6236 | { | |
6237 | struct ls_expr * ptr; | |
6238 | int bb; | |
6239 | rtx insn; | |
6240 | ||
6241 | pre_ldst_mems = NULL; | |
6242 | ||
6243 | for (bb = 0; bb < n_basic_blocks; bb++) | |
6244 | { | |
6245 | for (insn = BLOCK_HEAD (bb); | |
6246 | insn && insn != NEXT_INSN (BLOCK_END (bb)); | |
6247 | insn = NEXT_INSN (insn)) | |
6248 | { | |
6249 | if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') | |
6250 | { | |
6251 | if (GET_CODE (PATTERN (insn)) == SET) | |
6252 | { | |
6253 | rtx src = SET_SRC (PATTERN (insn)); | |
6254 | rtx dest = SET_DEST (PATTERN (insn)); | |
6255 | ||
6256 | /* Check for a simple LOAD... */ | |
6257 | if (GET_CODE (src) == MEM && simple_mem (src)) | |
6258 | { | |
6259 | ptr = ldst_entry (src); | |
6260 | if (GET_CODE (dest) == REG) | |
6261 | ptr->loads = alloc_INSN_LIST (insn, ptr->loads); | |
6262 | else | |
6263 | ptr->invalid = 1; | |
6264 | } | |
6265 | else | |
6266 | { | |
6267 | /* Make sure there isn't a buried load somewhere. */ | |
6268 | invalidate_any_buried_refs (src); | |
6269 | } | |
6270 | ||
6271 | /* Check for stores. Don't worry about aliased ones, they | |
6272 | will block any movement we might do later. We only care | |
6273 | about this exact pattern since those are the only | |
6274 | circumstance that we will ignore the aliasing info. */ | |
6275 | if (GET_CODE (dest) == MEM && simple_mem (dest)) | |
6276 | { | |
6277 | ptr = ldst_entry (dest); | |
6278 | ||
f54104df AO |
6279 | if (GET_CODE (src) != MEM |
6280 | && GET_CODE (src) != ASM_OPERANDS) | |
a13d4ebf AM |
6281 | ptr->stores = alloc_INSN_LIST (insn, ptr->stores); |
6282 | else | |
6283 | ptr->invalid = 1; | |
6284 | } | |
6285 | } | |
6286 | else | |
6287 | invalidate_any_buried_refs (PATTERN (insn)); | |
6288 | } | |
6289 | } | |
6290 | } | |
6291 | } | |
6292 | ||
6293 | /* Remove any references that have been either invalidated or are not in the | |
6294 | expression list for pre gcse. */ | |
6295 | ||
6296 | static void | |
6297 | trim_ld_motion_mems () | |
6298 | { | |
6299 | struct ls_expr * last = NULL; | |
6300 | struct ls_expr * ptr = first_ls_expr (); | |
6301 | ||
6302 | while (ptr != NULL) | |
6303 | { | |
6304 | int del = ptr->invalid; | |
6305 | struct expr * expr = NULL; | |
6306 | ||
6307 | /* Delete if entry has been made invalid. */ | |
6308 | if (!del) | |
6309 | { | |
6310 | unsigned int i; | |
6311 | ||
6312 | del = 1; | |
6313 | /* Delete if we cannot find this mem in the expression list. */ | |
6314 | for (i = 0; i < expr_hash_table_size && del; i++) | |
6315 | { | |
6316 | for (expr = expr_hash_table[i]; | |
6317 | expr != NULL; | |
6318 | expr = expr->next_same_hash) | |
6319 | if (expr_equiv_p (expr->expr, ptr->pattern)) | |
6320 | { | |
6321 | del = 0; | |
6322 | break; | |
6323 | } | |
6324 | } | |
6325 | } | |
6326 | ||
6327 | if (del) | |
6328 | { | |
6329 | if (last != NULL) | |
6330 | { | |
6331 | last->next = ptr->next; | |
6332 | free_ldst_entry (ptr); | |
6333 | ptr = last->next; | |
6334 | } | |
6335 | else | |
6336 | { | |
6337 | pre_ldst_mems = pre_ldst_mems->next; | |
6338 | free_ldst_entry (ptr); | |
6339 | ptr = pre_ldst_mems; | |
6340 | } | |
6341 | } | |
6342 | else | |
6343 | { | |
6344 | /* Set the expression field if we are keeping it. */ | |
6345 | last = ptr; | |
6346 | ptr->expr = expr; | |
6347 | ptr = ptr->next; | |
6348 | } | |
6349 | } | |
6350 | ||
6351 | /* Show the world what we've found. */ | |
6352 | if (gcse_file && pre_ldst_mems != NULL) | |
6353 | print_ldst_list (gcse_file); | |
6354 | } | |
6355 | ||
6356 | /* This routine will take an expression which we are replacing with | |
6357 | a reaching register, and update any stores that are needed if | |
6358 | that expression is in the ld_motion list. Stores are updated by | |
6359 | copying their SRC to the reaching register, and then storeing | |
6360 | the reaching register into the store location. These keeps the | |
6361 | correct value in the reaching register for the loads. */ | |
6362 | ||
6363 | static void | |
6364 | update_ld_motion_stores (expr) | |
6365 | struct expr * expr; | |
6366 | { | |
6367 | struct ls_expr * mem_ptr; | |
6368 | ||
6369 | if ((mem_ptr = find_rtx_in_ldst (expr->expr))) | |
6370 | { | |
6371 | /* We can try to find just the REACHED stores, but is shouldn't | |
6372 | matter to set the reaching reg everywhere... some might be | |
6373 | dead and should be eliminated later. */ | |
6374 | ||
6375 | /* We replace SET mem = expr with | |
6376 | SET reg = expr | |
6377 | SET mem = reg , where reg is the | |
6378 | reaching reg used in the load. */ | |
6379 | rtx list = mem_ptr->stores; | |
6380 | ||
6381 | for ( ; list != NULL_RTX; list = XEXP (list, 1)) | |
6382 | { | |
6383 | rtx insn = XEXP (list, 0); | |
6384 | rtx pat = PATTERN (insn); | |
6385 | rtx src = SET_SRC (pat); | |
6386 | rtx reg = expr->reaching_reg; | |
c57718d3 | 6387 | rtx copy, new; |
a13d4ebf AM |
6388 | |
6389 | /* If we've already copied it, continue. */ | |
6390 | if (expr->reaching_reg == src) | |
6391 | continue; | |
6392 | ||
6393 | if (gcse_file) | |
6394 | { | |
6395 | fprintf (gcse_file, "PRE: store updated with reaching reg "); | |
6396 | print_rtl (gcse_file, expr->reaching_reg); | |
6397 | fprintf (gcse_file, ":\n "); | |
6398 | print_inline_rtx (gcse_file, insn, 8); | |
6399 | fprintf (gcse_file, "\n"); | |
6400 | } | |
6401 | ||
6402 | copy = gen_move_insn ( reg, SET_SRC (pat)); | |
c57718d3 RK |
6403 | new = emit_insn_before (copy, insn); |
6404 | record_one_set (REGNO (reg), new); | |
6405 | set_block_for_new_insns (new, BLOCK_FOR_INSN (insn)); | |
a13d4ebf AM |
6406 | SET_SRC (pat) = reg; |
6407 | ||
6408 | /* un-recognize this pattern since it's probably different now. */ | |
6409 | INSN_CODE (insn) = -1; | |
6410 | gcse_create_count++; | |
6411 | } | |
6412 | } | |
6413 | } | |
6414 | \f | |
6415 | /* Store motion code. */ | |
6416 | ||
6417 | /* This is used to communicate the target bitvector we want to use in the | |
6418 | reg_set_info routine when called via the note_stores mechanism. */ | |
6419 | static sbitmap * regvec; | |
6420 | ||
6421 | /* Used in computing the reverse edge graph bit vectors. */ | |
6422 | static sbitmap * st_antloc; | |
6423 | ||
6424 | /* Global holding the number of store expressions we are dealing with. */ | |
6425 | static int num_stores; | |
6426 | ||
6427 | /* Checks to set if we need to mark a register set. Called from note_stores. */ | |
6428 | ||
6429 | static void | |
6430 | reg_set_info (dest, setter, data) | |
6431 | rtx dest, setter ATTRIBUTE_UNUSED; | |
6432 | void * data ATTRIBUTE_UNUSED; | |
6433 | { | |
6434 | if (GET_CODE (dest) == SUBREG) | |
6435 | dest = SUBREG_REG (dest); | |
6436 | ||
6437 | if (GET_CODE (dest) == REG) | |
6438 | SET_BIT (*regvec, REGNO (dest)); | |
6439 | } | |
6440 | ||
6441 | /* Return non-zero if the register operands of expression X are killed | |
6442 | anywhere in basic block BB. */ | |
6443 | ||
6444 | static int | |
6445 | store_ops_ok (x, bb) | |
6446 | rtx x; | |
e2d2ed72 | 6447 | basic_block bb; |
a13d4ebf AM |
6448 | { |
6449 | int i; | |
6450 | enum rtx_code code; | |
6451 | const char * fmt; | |
6452 | ||
6453 | /* Repeat is used to turn tail-recursion into iteration. */ | |
6454 | repeat: | |
6455 | ||
6456 | if (x == 0) | |
6457 | return 1; | |
6458 | ||
6459 | code = GET_CODE (x); | |
6460 | switch (code) | |
6461 | { | |
6462 | case REG: | |
6463 | /* If a reg has changed after us in this | |
6464 | block, the operand has been killed. */ | |
e2d2ed72 | 6465 | return TEST_BIT (reg_set_in_block[bb->index], REGNO (x)); |
a13d4ebf AM |
6466 | |
6467 | case MEM: | |
6468 | x = XEXP (x, 0); | |
6469 | goto repeat; | |
6470 | ||
6471 | case PRE_DEC: | |
6472 | case PRE_INC: | |
6473 | case POST_DEC: | |
6474 | case POST_INC: | |
6475 | return 0; | |
6476 | ||
6477 | case PC: | |
6478 | case CC0: /*FIXME*/ | |
6479 | case CONST: | |
6480 | case CONST_INT: | |
6481 | case CONST_DOUBLE: | |
6482 | case SYMBOL_REF: | |
6483 | case LABEL_REF: | |
6484 | case ADDR_VEC: | |
6485 | case ADDR_DIFF_VEC: | |
6486 | return 1; | |
6487 | ||
6488 | default: | |
6489 | break; | |
6490 | } | |
6491 | ||
6492 | i = GET_RTX_LENGTH (code) - 1; | |
6493 | fmt = GET_RTX_FORMAT (code); | |
6494 | ||
6495 | for (; i >= 0; i--) | |
6496 | { | |
6497 | if (fmt[i] == 'e') | |
6498 | { | |
6499 | rtx tem = XEXP (x, i); | |
6500 | ||
6501 | /* If we are about to do the last recursive call | |
6502 | needed at this level, change it into iteration. | |
6503 | This function is called enough to be worth it. */ | |
6504 | if (i == 0) | |
6505 | { | |
6506 | x = tem; | |
6507 | goto repeat; | |
6508 | } | |
6509 | ||
6510 | if (! store_ops_ok (tem, bb)) | |
6511 | return 0; | |
6512 | } | |
6513 | else if (fmt[i] == 'E') | |
6514 | { | |
6515 | int j; | |
6516 | ||
6517 | for (j = 0; j < XVECLEN (x, i); j++) | |
6518 | { | |
6519 | if (! store_ops_ok (XVECEXP (x, i, j), bb)) | |
6520 | return 0; | |
6521 | } | |
6522 | } | |
6523 | } | |
6524 | ||
6525 | return 1; | |
6526 | } | |
6527 | ||
6528 | /* Determine whether insn is MEM store pattern that we will consider moving. */ | |
6529 | ||
6530 | static void | |
6531 | find_moveable_store (insn) | |
6532 | rtx insn; | |
6533 | { | |
6534 | struct ls_expr * ptr; | |
6535 | rtx dest = PATTERN (insn); | |
6536 | ||
f54104df AO |
6537 | if (GET_CODE (dest) != SET |
6538 | || GET_CODE (SET_SRC (dest)) == ASM_OPERANDS) | |
a13d4ebf AM |
6539 | return; |
6540 | ||
6541 | dest = SET_DEST (dest); | |
6542 | ||
6543 | if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest) | |
6544 | || GET_MODE (dest) == BLKmode) | |
6545 | return; | |
6546 | ||
6547 | if (GET_CODE (XEXP (dest, 0)) != SYMBOL_REF) | |
6548 | return; | |
6549 | ||
6550 | if (rtx_varies_p (XEXP (dest, 0), 0)) | |
6551 | return; | |
6552 | ||
6553 | ptr = ldst_entry (dest); | |
6554 | ptr->stores = alloc_INSN_LIST (insn, ptr->stores); | |
6555 | } | |
6556 | ||
6557 | /* Perform store motion. Much like gcse, except we move expressions the | |
6558 | other way by looking at the flowgraph in reverse. */ | |
6559 | ||
6560 | static int | |
6561 | compute_store_table () | |
6562 | { | |
6563 | int bb, ret; | |
6564 | unsigned regno; | |
6565 | rtx insn, pat; | |
6566 | ||
6567 | max_gcse_regno = max_reg_num (); | |
6568 | ||
6569 | reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, | |
6570 | max_gcse_regno); | |
6571 | sbitmap_vector_zero (reg_set_in_block, n_basic_blocks); | |
6572 | pre_ldst_mems = 0; | |
6573 | ||
6574 | /* Find all the stores we care about. */ | |
6575 | for (bb = 0; bb < n_basic_blocks; bb++) | |
6576 | { | |
6577 | regvec = & (reg_set_in_block[bb]); | |
6578 | for (insn = BLOCK_END (bb); | |
6579 | insn && insn != PREV_INSN (BLOCK_HEAD (bb)); | |
6580 | insn = PREV_INSN (insn)) | |
6581 | { | |
6582 | #ifdef NON_SAVING_SETJMP | |
6583 | if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE | |
6584 | && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP) | |
6585 | { | |
6586 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
6587 | SET_BIT (reg_set_in_block[bb], regno); | |
6588 | continue; | |
6589 | } | |
6590 | #endif | |
6591 | /* Ignore anything that is not a normal insn. */ | |
6592 | if (GET_RTX_CLASS (GET_CODE (insn)) != 'i') | |
6593 | continue; | |
6594 | ||
6595 | if (GET_CODE (insn) == CALL_INSN) | |
6596 | { | |
6597 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
6598 | if ((call_used_regs[regno] | |
6599 | && regno != STACK_POINTER_REGNUM | |
6600 | #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM | |
6601 | && regno != HARD_FRAME_POINTER_REGNUM | |
6602 | #endif | |
6603 | #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM | |
6604 | && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno]) | |
6605 | #endif | |
6606 | #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED) | |
6607 | && ! (regno == PIC_OFFSET_TABLE_REGNUM && flag_pic) | |
6608 | #endif | |
6609 | ||
6610 | && regno != FRAME_POINTER_REGNUM) | |
6611 | || global_regs[regno]) | |
6612 | SET_BIT (reg_set_in_block[bb], regno); | |
6613 | } | |
6614 | ||
6615 | pat = PATTERN (insn); | |
6616 | note_stores (pat, reg_set_info, NULL); | |
6617 | ||
6618 | /* Now that we've marked regs, look for stores. */ | |
6619 | if (GET_CODE (pat) == SET) | |
6620 | find_moveable_store (insn); | |
6621 | } | |
6622 | } | |
6623 | ||
6624 | ret = enumerate_ldsts (); | |
6625 | ||
6626 | if (gcse_file) | |
6627 | { | |
6628 | fprintf (gcse_file, "Store Motion Expressions.\n"); | |
6629 | print_ldst_list (gcse_file); | |
6630 | } | |
6631 | ||
6632 | return ret; | |
6633 | } | |
6634 | ||
6635 | /* Check to see if the load X is aliased with STORE_PATTERN. */ | |
6636 | ||
6637 | static int | |
6638 | load_kills_store (x, store_pattern) | |
6639 | rtx x, store_pattern; | |
6640 | { | |
6641 | if (true_dependence (x, GET_MODE (x), store_pattern, rtx_addr_varies_p)) | |
6642 | return 1; | |
6643 | return 0; | |
6644 | } | |
6645 | ||
6646 | /* Go through the entire insn X, looking for any loads which might alias | |
6647 | STORE_PATTERN. Return 1 if found. */ | |
6648 | ||
6649 | static int | |
6650 | find_loads (x, store_pattern) | |
6651 | rtx x, store_pattern; | |
6652 | { | |
6653 | const char * fmt; | |
6654 | int i,j; | |
6655 | int ret = 0; | |
6656 | ||
6657 | if (GET_CODE (x) == SET) | |
6658 | x = SET_SRC (x); | |
6659 | ||
6660 | if (GET_CODE (x) == MEM) | |
6661 | { | |
6662 | if (load_kills_store (x, store_pattern)) | |
6663 | return 1; | |
6664 | } | |
6665 | ||
6666 | /* Recursively process the insn. */ | |
6667 | fmt = GET_RTX_FORMAT (GET_CODE (x)); | |
6668 | ||
6669 | for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--) | |
6670 | { | |
6671 | if (fmt[i] == 'e') | |
6672 | ret |= find_loads (XEXP (x, i), store_pattern); | |
6673 | else if (fmt[i] == 'E') | |
6674 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
6675 | ret |= find_loads (XVECEXP (x, i, j), store_pattern); | |
6676 | } | |
6677 | return ret; | |
6678 | } | |
6679 | ||
6680 | /* Check if INSN kills the store pattern X (is aliased with it). | |
6681 | Return 1 if it it does. */ | |
6682 | ||
6683 | static int | |
6684 | store_killed_in_insn (x, insn) | |
6685 | rtx x, insn; | |
6686 | { | |
6687 | if (GET_RTX_CLASS (GET_CODE (insn)) != 'i') | |
6688 | return 0; | |
6689 | ||
6690 | if (GET_CODE (insn) == CALL_INSN) | |
6691 | { | |
6692 | if (CONST_CALL_P (insn)) | |
6693 | return 0; | |
6694 | else | |
6695 | return 1; | |
6696 | } | |
6697 | ||
6698 | if (GET_CODE (PATTERN (insn)) == SET) | |
6699 | { | |
6700 | rtx pat = PATTERN (insn); | |
6701 | /* Check for memory stores to aliased objects. */ | |
6702 | if (GET_CODE (SET_DEST (pat)) == MEM && !expr_equiv_p (SET_DEST (pat), x)) | |
6703 | /* pretend its a load and check for aliasing. */ | |
6704 | if (find_loads (SET_DEST (pat), x)) | |
6705 | return 1; | |
6706 | return find_loads (SET_SRC (pat), x); | |
6707 | } | |
6708 | else | |
6709 | return find_loads (PATTERN (insn), x); | |
6710 | } | |
6711 | ||
6712 | /* Returns 1 if the expression X is loaded or clobbered on or after INSN | |
6713 | within basic block BB. */ | |
6714 | ||
6715 | static int | |
6716 | store_killed_after (x, insn, bb) | |
6717 | rtx x, insn; | |
e2d2ed72 | 6718 | basic_block bb; |
a13d4ebf | 6719 | { |
e2d2ed72 | 6720 | rtx last = bb->end; |
a13d4ebf AM |
6721 | |
6722 | if (insn == last) | |
6723 | return 0; | |
6724 | ||
6725 | /* Check if the register operands of the store are OK in this block. | |
6726 | Note that if registers are changed ANYWHERE in the block, we'll | |
6727 | decide we can't move it, regardless of whether it changed above | |
6728 | or below the store. This could be improved by checking the register | |
6729 | operands while lookinng for aliasing in each insn. */ | |
6730 | if (!store_ops_ok (XEXP (x, 0), bb)) | |
6731 | return 1; | |
6732 | ||
6733 | for ( ; insn && insn != NEXT_INSN (last); insn = NEXT_INSN (insn)) | |
6734 | if (store_killed_in_insn (x, insn)) | |
6735 | return 1; | |
6736 | ||
6737 | return 0; | |
6738 | } | |
6739 | ||
6740 | /* Returns 1 if the expression X is loaded or clobbered on or before INSN | |
6741 | within basic block BB. */ | |
6742 | static int | |
6743 | store_killed_before (x, insn, bb) | |
6744 | rtx x, insn; | |
e2d2ed72 | 6745 | basic_block bb; |
a13d4ebf | 6746 | { |
e2d2ed72 | 6747 | rtx first = bb->head; |
a13d4ebf AM |
6748 | |
6749 | if (insn == first) | |
6750 | return store_killed_in_insn (x, insn); | |
6751 | ||
6752 | /* Check if the register operands of the store are OK in this block. | |
6753 | Note that if registers are changed ANYWHERE in the block, we'll | |
6754 | decide we can't move it, regardless of whether it changed above | |
6755 | or below the store. This could be improved by checking the register | |
6756 | operands while lookinng for aliasing in each insn. */ | |
6757 | if (!store_ops_ok (XEXP (x, 0), bb)) | |
6758 | return 1; | |
6759 | ||
6760 | for ( ; insn && insn != PREV_INSN (first); insn = PREV_INSN (insn)) | |
6761 | if (store_killed_in_insn (x, insn)) | |
6762 | return 1; | |
6763 | ||
6764 | return 0; | |
6765 | } | |
6766 | ||
6767 | #define ANTIC_STORE_LIST(x) ((x)->loads) | |
6768 | #define AVAIL_STORE_LIST(x) ((x)->stores) | |
6769 | ||
6770 | /* Given the table of available store insns at the end of blocks, | |
6771 | determine which ones are not killed by aliasing, and generate | |
6772 | the appropriate vectors for gen and killed. */ | |
6773 | static void | |
6774 | build_store_vectors () | |
6775 | { | |
e2d2ed72 AM |
6776 | basic_block bb; |
6777 | int b; | |
a13d4ebf AM |
6778 | rtx insn, st; |
6779 | struct ls_expr * ptr; | |
6780 | ||
6781 | /* Build the gen_vector. This is any store in the table which is not killed | |
6782 | by aliasing later in its block. */ | |
6783 | ae_gen = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, num_stores); | |
6784 | sbitmap_vector_zero (ae_gen, n_basic_blocks); | |
6785 | ||
6786 | st_antloc = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, num_stores); | |
6787 | sbitmap_vector_zero (st_antloc, n_basic_blocks); | |
6788 | ||
6789 | for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr)) | |
6790 | { | |
6791 | /* Put all the stores into either the antic list, or the avail list, | |
6792 | or both. */ | |
6793 | rtx store_list = ptr->stores; | |
6794 | ptr->stores = NULL_RTX; | |
6795 | ||
6796 | for (st = store_list; st != NULL; st = XEXP (st, 1)) | |
6797 | { | |
6798 | insn = XEXP (st, 0); | |
e2d2ed72 | 6799 | bb = BLOCK_FOR_INSN (insn); |
a13d4ebf AM |
6800 | |
6801 | if (!store_killed_after (ptr->pattern, insn, bb)) | |
6802 | { | |
6803 | /* If we've already seen an availale expression in this block, | |
6804 | we can delete the one we saw already (It occurs earlier in | |
6805 | the block), and replace it with this one). We'll copy the | |
6806 | old SRC expression to an unused register in case there | |
6807 | are any side effects. */ | |
e2d2ed72 | 6808 | if (TEST_BIT (ae_gen[bb->index], ptr->index)) |
a13d4ebf AM |
6809 | { |
6810 | /* Find previous store. */ | |
6811 | rtx st; | |
6812 | for (st = AVAIL_STORE_LIST (ptr); st ; st = XEXP (st, 1)) | |
e2d2ed72 | 6813 | if (BLOCK_FOR_INSN (XEXP (st, 0)) == bb) |
a13d4ebf AM |
6814 | break; |
6815 | if (st) | |
6816 | { | |
6817 | rtx r = gen_reg_rtx (GET_MODE (ptr->pattern)); | |
6818 | if (gcse_file) | |
6819 | fprintf(gcse_file, "Removing redundant store:\n"); | |
6820 | replace_store_insn (r, XEXP (st, 0), bb); | |
6821 | XEXP (st, 0) = insn; | |
6822 | continue; | |
6823 | } | |
6824 | } | |
e2d2ed72 | 6825 | SET_BIT (ae_gen[bb->index], ptr->index); |
a13d4ebf AM |
6826 | AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, |
6827 | AVAIL_STORE_LIST (ptr)); | |
6828 | } | |
6829 | ||
6830 | if (!store_killed_before (ptr->pattern, insn, bb)) | |
6831 | { | |
6832 | SET_BIT (st_antloc[BLOCK_NUM (insn)], ptr->index); | |
6833 | ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (insn, | |
6834 | ANTIC_STORE_LIST (ptr)); | |
6835 | } | |
6836 | } | |
6837 | ||
6838 | /* Free the original list of store insns. */ | |
6839 | free_INSN_LIST_list (&store_list); | |
6840 | } | |
6841 | ||
6842 | ae_kill = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, num_stores); | |
6843 | sbitmap_vector_zero (ae_kill, n_basic_blocks); | |
6844 | ||
6845 | transp = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, num_stores); | |
6846 | sbitmap_vector_zero (transp, n_basic_blocks); | |
6847 | ||
6848 | for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr)) | |
e2d2ed72 | 6849 | for (b = 0; b < n_basic_blocks; b++) |
a13d4ebf | 6850 | { |
e2d2ed72 | 6851 | if (store_killed_after (ptr->pattern, BLOCK_HEAD (b), BASIC_BLOCK (b))) |
a13d4ebf AM |
6852 | { |
6853 | /* The anticipatable expression is not killed if it's gen'd. */ | |
6854 | /* | |
6855 | We leave this check out for now. If we have a code sequence | |
6856 | in a block which looks like: | |
6857 | ST MEMa = x | |
6858 | L y = MEMa | |
6859 | ST MEMa = z | |
6860 | We should flag this as having an ANTIC expression, NOT | |
6861 | transparent, NOT killed, and AVAIL. | |
6862 | Unfortunately, since we haven't re-written all loads to | |
6863 | use the reaching reg, we'll end up doing an incorrect | |
6864 | Load in the middle here if we push the store down. It happens in | |
6865 | gcc.c-torture/execute/960311-1.c with -O3 | |
6866 | If we always kill it in this case, we'll sometimes do | |
6867 | uneccessary work, but it shouldn't actually hurt anything. | |
e2d2ed72 AM |
6868 | if (!TEST_BIT (ae_gen[b], ptr->index)). */ |
6869 | SET_BIT (ae_kill[b], ptr->index); | |
a13d4ebf AM |
6870 | } |
6871 | else | |
e2d2ed72 | 6872 | SET_BIT (transp[b], ptr->index); |
a13d4ebf AM |
6873 | } |
6874 | ||
6875 | /* Any block with no exits calls some non-returning function, so | |
6876 | we better mark the store killed here, or we might not store to | |
6877 | it at all. If we knew it was abort, we wouldn't have to store, | |
6878 | but we don't know that for sure. */ | |
6879 | if (gcse_file) | |
6880 | { | |
6881 | fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n"); | |
6882 | print_ldst_list (gcse_file); | |
6883 | dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, n_basic_blocks); | |
6884 | dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, n_basic_blocks); | |
6885 | dump_sbitmap_vector (gcse_file, "Transpt", "", transp, n_basic_blocks); | |
6886 | dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, n_basic_blocks); | |
6887 | } | |
6888 | } | |
6889 | ||
6890 | /* Insert an instruction at the begining of a basic block, and update | |
6891 | the BLOCK_HEAD if needed. */ | |
6892 | ||
6893 | static void | |
6894 | insert_insn_start_bb (insn, bb) | |
6895 | rtx insn; | |
e2d2ed72 | 6896 | basic_block bb; |
a13d4ebf AM |
6897 | { |
6898 | /* Insert at start of successor block. */ | |
e2d2ed72 AM |
6899 | rtx prev = PREV_INSN (bb->head); |
6900 | rtx before = bb->head; | |
a13d4ebf AM |
6901 | while (before != 0) |
6902 | { | |
6903 | if (GET_CODE (before) != CODE_LABEL | |
6904 | && (GET_CODE (before) != NOTE | |
6905 | || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK)) | |
6906 | break; | |
6907 | prev = before; | |
e2d2ed72 | 6908 | if (prev == bb->end) |
a13d4ebf AM |
6909 | break; |
6910 | before = NEXT_INSN (before); | |
6911 | } | |
6912 | ||
6913 | insn = emit_insn_after (insn, prev); | |
6914 | ||
e2d2ed72 AM |
6915 | if (prev == bb->end) |
6916 | bb->end = insn; | |
ccbaf064 | 6917 | |
e2d2ed72 | 6918 | set_block_for_new_insns (insn, bb); |
a13d4ebf AM |
6919 | |
6920 | if (gcse_file) | |
6921 | { | |
6922 | fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n", | |
e2d2ed72 | 6923 | bb->index); |
a13d4ebf AM |
6924 | print_inline_rtx (gcse_file, insn, 6); |
6925 | fprintf (gcse_file, "\n"); | |
6926 | } | |
6927 | } | |
6928 | ||
6929 | /* This routine will insert a store on an edge. EXPR is the ldst entry for | |
6930 | the memory reference, and E is the edge to insert it on. Returns non-zero | |
6931 | if an edge insertion was performed. */ | |
6932 | ||
6933 | static int | |
6934 | insert_store (expr, e) | |
6935 | struct ls_expr * expr; | |
6936 | edge e; | |
6937 | { | |
6938 | rtx reg, insn; | |
e2d2ed72 | 6939 | basic_block bb; |
a13d4ebf AM |
6940 | edge tmp; |
6941 | ||
6942 | /* We did all the deleted before this insert, so if we didn't delete a | |
6943 | store, then we haven't set the reaching reg yet either. */ | |
6944 | if (expr->reaching_reg == NULL_RTX) | |
6945 | return 0; | |
6946 | ||
6947 | reg = expr->reaching_reg; | |
6948 | insn = gen_move_insn (expr->pattern, reg); | |
6949 | ||
6950 | /* If we are inserting this expression on ALL predecessor edges of a BB, | |
6951 | insert it at the start of the BB, and reset the insert bits on the other | |
6952 | edges so we don;t try to insert it on the other edges. */ | |
e2d2ed72 | 6953 | bb = e->dest; |
a13d4ebf AM |
6954 | for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next) |
6955 | { | |
6956 | int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest); | |
6957 | if (index == EDGE_INDEX_NO_EDGE) | |
6958 | abort (); | |
6959 | if (! TEST_BIT (pre_insert_map[index], expr->index)) | |
6960 | break; | |
6961 | } | |
6962 | ||
6963 | /* If tmp is NULL, we found an insertion on every edge, blank the | |
6964 | insertion vector for these edges, and insert at the start of the BB. */ | |
e2d2ed72 | 6965 | if (!tmp && bb != EXIT_BLOCK_PTR) |
a13d4ebf AM |
6966 | { |
6967 | for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next) | |
6968 | { | |
6969 | int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest); | |
6970 | RESET_BIT (pre_insert_map[index], expr->index); | |
6971 | } | |
6972 | insert_insn_start_bb (insn, bb); | |
6973 | return 0; | |
6974 | } | |
6975 | ||
6976 | /* We can't insert on this edge, so we'll insert at the head of the | |
6977 | successors block. See Morgan, sec 10.5. */ | |
6978 | if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL) | |
6979 | { | |
6980 | insert_insn_start_bb (insn, bb); | |
6981 | return 0; | |
6982 | } | |
6983 | ||
6984 | insert_insn_on_edge (insn, e); | |
6985 | ||
6986 | if (gcse_file) | |
6987 | { | |
6988 | fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n", | |
6989 | e->src->index, e->dest->index); | |
6990 | print_inline_rtx (gcse_file, insn, 6); | |
6991 | fprintf (gcse_file, "\n"); | |
6992 | } | |
6993 | ||
6994 | return 1; | |
6995 | } | |
6996 | ||
6997 | /* This routine will replace a store with a SET to a specified register. */ | |
6998 | ||
6999 | static void | |
7000 | replace_store_insn (reg, del, bb) | |
7001 | rtx reg, del; | |
e2d2ed72 | 7002 | basic_block bb; |
a13d4ebf AM |
7003 | { |
7004 | rtx insn; | |
7005 | ||
7006 | insn = gen_move_insn (reg, SET_SRC (PATTERN (del))); | |
7007 | insn = emit_insn_after (insn, del); | |
e2d2ed72 | 7008 | set_block_for_new_insns (insn, bb); |
a13d4ebf AM |
7009 | |
7010 | if (gcse_file) | |
7011 | { | |
7012 | fprintf (gcse_file, | |
e2d2ed72 | 7013 | "STORE_MOTION delete insn in BB %d:\n ", bb->index); |
a13d4ebf AM |
7014 | print_inline_rtx (gcse_file, del, 6); |
7015 | fprintf(gcse_file, "\nSTORE MOTION replaced with insn:\n "); | |
7016 | print_inline_rtx (gcse_file, insn, 6); | |
7017 | fprintf(gcse_file, "\n"); | |
7018 | } | |
7019 | ||
e2d2ed72 AM |
7020 | if (bb->end == del) |
7021 | bb->end = insn; | |
a13d4ebf | 7022 | |
e2d2ed72 AM |
7023 | if (bb->head == del) |
7024 | bb->head = insn; | |
a13d4ebf AM |
7025 | |
7026 | delete_insn (del); | |
7027 | } | |
7028 | ||
7029 | ||
7030 | /* Delete a store, but copy the value that would have been stored into | |
7031 | the reaching_reg for later storing. */ | |
7032 | ||
7033 | static void | |
7034 | delete_store (expr, bb) | |
7035 | struct ls_expr * expr; | |
e2d2ed72 | 7036 | basic_block bb; |
a13d4ebf AM |
7037 | { |
7038 | rtx reg, i, del; | |
7039 | ||
7040 | if (expr->reaching_reg == NULL_RTX) | |
7041 | expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern)); | |
7042 | ||
7043 | ||
7044 | /* If there is more than 1 store, the earlier ones will be dead, | |
7045 | but it doesn't hurt to replace them here. */ | |
7046 | reg = expr->reaching_reg; | |
7047 | ||
7048 | for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1)) | |
7049 | { | |
7050 | del = XEXP (i, 0); | |
e2d2ed72 | 7051 | if (BLOCK_FOR_INSN (del) == bb) |
a13d4ebf AM |
7052 | { |
7053 | /* We know there is only one since we deleted redundant | |
7054 | ones during the available computation. */ | |
7055 | replace_store_insn (reg, del, bb); | |
7056 | break; | |
7057 | } | |
7058 | } | |
7059 | } | |
7060 | ||
7061 | /* Free memory used by store motion. */ | |
7062 | ||
7063 | static void | |
7064 | free_store_memory () | |
7065 | { | |
7066 | free_ldst_mems (); | |
7067 | ||
7068 | if (ae_gen) | |
7069 | free (ae_gen); | |
7070 | if (ae_kill) | |
7071 | free (ae_kill); | |
7072 | if (transp) | |
7073 | free (transp); | |
7074 | if (st_antloc) | |
7075 | free (st_antloc); | |
7076 | if (pre_insert_map) | |
7077 | free (pre_insert_map); | |
7078 | if (pre_delete_map) | |
7079 | free (pre_delete_map); | |
7080 | if (reg_set_in_block) | |
7081 | free (reg_set_in_block); | |
7082 | ||
7083 | ae_gen = ae_kill = transp = st_antloc = NULL; | |
7084 | pre_insert_map = pre_delete_map = reg_set_in_block = NULL; | |
7085 | } | |
7086 | ||
7087 | /* Perform store motion. Much like gcse, except we move expressions the | |
7088 | other way by looking at the flowgraph in reverse. */ | |
7089 | ||
7090 | static void | |
7091 | store_motion () | |
7092 | { | |
7093 | int x; | |
7094 | struct ls_expr * ptr; | |
7095 | int update_flow = 0; | |
7096 | ||
7097 | if (gcse_file) | |
7098 | { | |
7099 | fprintf (gcse_file, "before store motion\n"); | |
7100 | print_rtl (gcse_file, get_insns ()); | |
7101 | } | |
7102 | ||
7103 | ||
7104 | init_alias_analysis (); | |
7105 | ||
7106 | /* Find all the stores that are live to the end of their block. */ | |
7107 | num_stores = compute_store_table (); | |
7108 | if (num_stores == 0) | |
7109 | { | |
7110 | free (reg_set_in_block); | |
7111 | end_alias_analysis (); | |
7112 | return; | |
7113 | } | |
7114 | ||
7115 | /* Now compute whats actually available to move. */ | |
7116 | add_noreturn_fake_exit_edges (); | |
7117 | build_store_vectors (); | |
7118 | ||
7119 | edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen, | |
7120 | st_antloc, ae_kill, &pre_insert_map, | |
7121 | &pre_delete_map); | |
7122 | ||
7123 | /* Now we want to insert the new stores which are going to be needed. */ | |
7124 | for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr)) | |
7125 | { | |
7126 | for (x = 0; x < n_basic_blocks; x++) | |
7127 | if (TEST_BIT (pre_delete_map[x], ptr->index)) | |
e2d2ed72 | 7128 | delete_store (ptr, BASIC_BLOCK (x)); |
a13d4ebf AM |
7129 | |
7130 | for (x = 0; x < NUM_EDGES (edge_list); x++) | |
7131 | if (TEST_BIT (pre_insert_map[x], ptr->index)) | |
7132 | update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x)); | |
7133 | } | |
7134 | ||
7135 | if (update_flow) | |
7136 | commit_edge_insertions (); | |
7137 | ||
7138 | free_store_memory (); | |
7139 | free_edge_list (edge_list); | |
7140 | remove_fake_edges (); | |
7141 | end_alias_analysis (); | |
7142 | } |