]>
Commit | Line | Data |
---|---|---|
f4e584dc | 1 | /* Global common subexpression elimination/Partial redundancy elimination |
7506f491 | 2 | and global constant/copy propagation for GNU compiler. |
a5cad800 | 3 | Copyright (C) 1997, 1998, 1999 Free Software Foundation, Inc. |
7506f491 DE |
4 | |
5 | This file is part of GNU CC. | |
6 | ||
7 | GNU CC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
9 | the Free Software Foundation; either version 2, or (at your option) | |
10 | any later version. | |
11 | ||
12 | GNU CC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GNU CC; see the file COPYING. If not, write to | |
19 | the Free Software Foundation, 59 Temple Place - Suite 330, | |
20 | Boston, MA 02111-1307, USA. */ | |
21 | ||
22 | /* TODO | |
23 | - reordering of memory allocation and freeing to be more space efficient | |
24 | - do rough calc of how many regs are needed in each block, and a rough | |
25 | calc of how many regs are available in each class and use that to | |
26 | throttle back the code in cases where RTX_COST is minimal. | |
f4e584dc JL |
27 | - dead store elimination |
28 | - a store to the same address as a load does not kill the load if the | |
29 | source of the store is also the destination of the load. Handling this | |
30 | allows more load motion, particularly out of loops. | |
7506f491 DE |
31 | - ability to realloc sbitmap vectors would allow one initial computation |
32 | of reg_set_in_block with only subsequent additions, rather than | |
33 | recomputing it for each pass | |
34 | ||
7506f491 DE |
35 | */ |
36 | ||
37 | /* References searched while implementing this. | |
7506f491 DE |
38 | |
39 | Compilers Principles, Techniques and Tools | |
40 | Aho, Sethi, Ullman | |
41 | Addison-Wesley, 1988 | |
42 | ||
43 | Global Optimization by Suppression of Partial Redundancies | |
44 | E. Morel, C. Renvoise | |
45 | communications of the acm, Vol. 22, Num. 2, Feb. 1979 | |
46 | ||
47 | A Portable Machine-Independent Global Optimizer - Design and Measurements | |
48 | Frederick Chow | |
49 | Stanford Ph.D. thesis, Dec. 1983 | |
50 | ||
7506f491 DE |
51 | A Fast Algorithm for Code Movement Optimization |
52 | D.M. Dhamdhere | |
53 | SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988 | |
54 | ||
55 | A Solution to a Problem with Morel and Renvoise's | |
56 | Global Optimization by Suppression of Partial Redundancies | |
57 | K-H Drechsler, M.P. Stadel | |
58 | ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988 | |
59 | ||
60 | Practical Adaptation of the Global Optimization | |
61 | Algorithm of Morel and Renvoise | |
62 | D.M. Dhamdhere | |
63 | ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991 | |
64 | ||
65 | Efficiently Computing Static Single Assignment Form and the Control | |
66 | Dependence Graph | |
67 | R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck | |
68 | ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991 | |
69 | ||
7506f491 DE |
70 | Lazy Code Motion |
71 | J. Knoop, O. Ruthing, B. Steffen | |
72 | ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI | |
73 | ||
74 | What's In a Region? Or Computing Control Dependence Regions in Near-Linear | |
75 | Time for Reducible Flow Control | |
76 | Thomas Ball | |
77 | ACM Letters on Programming Languages and Systems, | |
78 | Vol. 2, Num. 1-4, Mar-Dec 1993 | |
79 | ||
80 | An Efficient Representation for Sparse Sets | |
81 | Preston Briggs, Linda Torczon | |
82 | ACM Letters on Programming Languages and Systems, | |
83 | Vol. 2, Num. 1-4, Mar-Dec 1993 | |
84 | ||
85 | A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion | |
86 | K-H Drechsler, M.P. Stadel | |
87 | ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993 | |
88 | ||
89 | Partial Dead Code Elimination | |
90 | J. Knoop, O. Ruthing, B. Steffen | |
91 | ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994 | |
92 | ||
93 | Effective Partial Redundancy Elimination | |
94 | P. Briggs, K.D. Cooper | |
95 | ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994 | |
96 | ||
97 | The Program Structure Tree: Computing Control Regions in Linear Time | |
98 | R. Johnson, D. Pearson, K. Pingali | |
99 | ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994 | |
100 | ||
101 | Optimal Code Motion: Theory and Practice | |
102 | J. Knoop, O. Ruthing, B. Steffen | |
103 | ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994 | |
104 | ||
105 | The power of assignment motion | |
106 | J. Knoop, O. Ruthing, B. Steffen | |
107 | ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI | |
108 | ||
109 | Global code motion / global value numbering | |
110 | C. Click | |
111 | ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI | |
112 | ||
113 | Value Driven Redundancy Elimination | |
114 | L.T. Simpson | |
115 | Rice University Ph.D. thesis, Apr. 1996 | |
116 | ||
117 | Value Numbering | |
118 | L.T. Simpson | |
119 | Massively Scalar Compiler Project, Rice University, Sep. 1996 | |
120 | ||
121 | High Performance Compilers for Parallel Computing | |
122 | Michael Wolfe | |
123 | Addison-Wesley, 1996 | |
124 | ||
f4e584dc JL |
125 | Advanced Compiler Design and Implementation |
126 | Steven Muchnick | |
127 | Morgan Kaufmann, 1997 | |
128 | ||
129 | People wishing to speed up the code here should read: | |
130 | Elimination Algorithms for Data Flow Analysis | |
131 | B.G. Ryder, M.C. Paull | |
132 | ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986 | |
133 | ||
134 | How to Analyze Large Programs Efficiently and Informatively | |
135 | D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck | |
136 | ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI | |
137 | ||
7506f491 DE |
138 | People wishing to do something different can find various possibilities |
139 | in the above papers and elsewhere. | |
140 | */ | |
141 | ||
142 | #include "config.h" | |
50b2596f | 143 | #include "system.h" |
01198c2f | 144 | #include "toplev.h" |
7506f491 DE |
145 | |
146 | #include "rtl.h" | |
6baf1cc8 | 147 | #include "tm_p.h" |
7506f491 DE |
148 | #include "regs.h" |
149 | #include "hard-reg-set.h" | |
150 | #include "flags.h" | |
151 | #include "real.h" | |
152 | #include "insn-config.h" | |
153 | #include "recog.h" | |
154 | #include "basic-block.h" | |
50b2596f | 155 | #include "output.h" |
49ad7cfa | 156 | #include "function.h" |
3cdbd1f8 | 157 | #include "expr.h" |
7506f491 DE |
158 | |
159 | #include "obstack.h" | |
160 | #define obstack_chunk_alloc gmalloc | |
161 | #define obstack_chunk_free free | |
162 | ||
163 | /* Maximum number of passes to perform. */ | |
164 | #define MAX_PASSES 1 | |
165 | ||
166 | /* Propagate flow information through back edges and thus enable PRE's | |
167 | moving loop invariant calculations out of loops. | |
168 | ||
169 | Originally this tended to create worse overall code, but several | |
170 | improvements during the development of PRE seem to have made following | |
171 | back edges generally a win. | |
172 | ||
173 | Note much of the loop invariant code motion done here would normally | |
174 | be done by loop.c, which has more heuristics for when to move invariants | |
175 | out of loops. At some point we might need to move some of those | |
176 | heuristics into gcse.c. */ | |
177 | #define FOLLOW_BACK_EDGES 1 | |
178 | ||
f4e584dc JL |
179 | /* We support GCSE via Partial Redundancy Elimination. PRE optimizations |
180 | are a superset of those done by GCSE. | |
7506f491 | 181 | |
f4e584dc | 182 | We perform the following steps: |
7506f491 DE |
183 | |
184 | 1) Compute basic block information. | |
185 | ||
186 | 2) Compute table of places where registers are set. | |
187 | ||
188 | 3) Perform copy/constant propagation. | |
189 | ||
190 | 4) Perform global cse. | |
191 | ||
e78d9500 | 192 | 5) Perform another pass of copy/constant propagation. |
7506f491 DE |
193 | |
194 | Two passes of copy/constant propagation are done because the first one | |
195 | enables more GCSE and the second one helps to clean up the copies that | |
196 | GCSE creates. This is needed more for PRE than for Classic because Classic | |
197 | GCSE will try to use an existing register containing the common | |
198 | subexpression rather than create a new one. This is harder to do for PRE | |
199 | because of the code motion (which Classic GCSE doesn't do). | |
200 | ||
201 | Expressions we are interested in GCSE-ing are of the form | |
202 | (set (pseudo-reg) (expression)). | |
203 | Function want_to_gcse_p says what these are. | |
204 | ||
205 | PRE handles moving invariant expressions out of loops (by treating them as | |
f4e584dc | 206 | partially redundant). |
7506f491 DE |
207 | |
208 | Eventually it would be nice to replace cse.c/gcse.c with SSA (static single | |
209 | assignment) based GVN (global value numbering). L. T. Simpson's paper | |
210 | (Rice University) on value numbering is a useful reference for this. | |
211 | ||
212 | ********************** | |
213 | ||
214 | We used to support multiple passes but there are diminishing returns in | |
215 | doing so. The first pass usually makes 90% of the changes that are doable. | |
216 | A second pass can make a few more changes made possible by the first pass. | |
217 | Experiments show any further passes don't make enough changes to justify | |
218 | the expense. | |
219 | ||
220 | A study of spec92 using an unlimited number of passes: | |
221 | [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83, | |
222 | [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2, | |
223 | [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1 | |
224 | ||
225 | It was found doing copy propagation between each pass enables further | |
226 | substitutions. | |
227 | ||
228 | PRE is quite expensive in complicated functions because the DFA can take | |
229 | awhile to converge. Hence we only perform one pass. Macro MAX_PASSES can | |
230 | be modified if one wants to experiment. | |
231 | ||
232 | ********************** | |
233 | ||
234 | The steps for PRE are: | |
235 | ||
236 | 1) Build the hash table of expressions we wish to GCSE (expr_hash_table). | |
237 | ||
238 | 2) Perform the data flow analysis for PRE. | |
239 | ||
240 | 3) Delete the redundant instructions | |
241 | ||
242 | 4) Insert the required copies [if any] that make the partially | |
243 | redundant instructions fully redundant. | |
244 | ||
245 | 5) For other reaching expressions, insert an instruction to copy the value | |
246 | to a newly created pseudo that will reach the redundant instruction. | |
247 | ||
248 | The deletion is done first so that when we do insertions we | |
249 | know which pseudo reg to use. | |
250 | ||
251 | Various papers have argued that PRE DFA is expensive (O(n^2)) and others | |
252 | argue it is not. The number of iterations for the algorithm to converge | |
253 | is typically 2-4 so I don't view it as that expensive (relatively speaking). | |
254 | ||
f4e584dc | 255 | PRE GCSE depends heavily on the second CSE pass to clean up the copies |
7506f491 DE |
256 | we create. To make an expression reach the place where it's redundant, |
257 | the result of the expression is copied to a new register, and the redundant | |
258 | expression is deleted by replacing it with this new register. Classic GCSE | |
259 | doesn't have this problem as much as it computes the reaching defs of | |
260 | each register in each block and thus can try to use an existing register. | |
261 | ||
262 | ********************** | |
263 | ||
7506f491 DE |
264 | A fair bit of simplicity is created by creating small functions for simple |
265 | tasks, even when the function is only called in one place. This may | |
266 | measurably slow things down [or may not] by creating more function call | |
267 | overhead than is necessary. The source is laid out so that it's trivial | |
268 | to make the affected functions inline so that one can measure what speed | |
269 | up, if any, can be achieved, and maybe later when things settle things can | |
270 | be rearranged. | |
271 | ||
272 | Help stamp out big monolithic functions! */ | |
273 | \f | |
274 | /* GCSE global vars. */ | |
275 | ||
276 | /* -dG dump file. */ | |
277 | static FILE *gcse_file; | |
278 | ||
f4e584dc JL |
279 | /* Note whether or not we should run jump optimization after gcse. We |
280 | want to do this for two cases. | |
281 | ||
282 | * If we changed any jumps via cprop. | |
283 | ||
284 | * If we added any labels via edge splitting. */ | |
285 | ||
286 | static int run_jump_opt_after_gcse; | |
287 | ||
288 | /* Element I is a list of I's predecessors/successors. */ | |
289 | static int_list_ptr *s_preds; | |
290 | static int_list_ptr *s_succs; | |
291 | ||
292 | /* Element I is the number of predecessors/successors of basic block I. */ | |
293 | static int *num_preds; | |
294 | static int *num_succs; | |
295 | ||
7506f491 DE |
296 | /* Bitmaps are normally not included in debugging dumps. |
297 | However it's useful to be able to print them from GDB. | |
298 | We could create special functions for this, but it's simpler to | |
299 | just allow passing stderr to the dump_foo fns. Since stderr can | |
300 | be a macro, we store a copy here. */ | |
301 | static FILE *debug_stderr; | |
302 | ||
303 | /* An obstack for our working variables. */ | |
304 | static struct obstack gcse_obstack; | |
305 | ||
306 | /* Non-zero for each mode that supports (set (reg) (reg)). | |
307 | This is trivially true for integer and floating point values. | |
308 | It may or may not be true for condition codes. */ | |
309 | static char can_copy_p[(int) NUM_MACHINE_MODES]; | |
310 | ||
311 | /* Non-zero if can_copy_p has been initialized. */ | |
312 | static int can_copy_init_p; | |
313 | ||
abd535b6 BS |
314 | struct reg_use { |
315 | rtx reg_rtx; | |
316 | }; | |
317 | ||
7506f491 DE |
318 | /* Hash table of expressions. */ |
319 | ||
320 | struct expr | |
321 | { | |
322 | /* The expression (SET_SRC for expressions, PATTERN for assignments). */ | |
323 | rtx expr; | |
324 | /* Index in the available expression bitmaps. */ | |
325 | int bitmap_index; | |
326 | /* Next entry with the same hash. */ | |
327 | struct expr *next_same_hash; | |
328 | /* List of anticipatable occurrences in basic blocks in the function. | |
329 | An "anticipatable occurrence" is one that is the first occurrence in the | |
f4e584dc JL |
330 | basic block, the operands are not modified in the basic block prior |
331 | to the occurrence and the output is not used between the start of | |
332 | the block and the occurrence. */ | |
7506f491 DE |
333 | struct occr *antic_occr; |
334 | /* List of available occurrence in basic blocks in the function. | |
335 | An "available occurrence" is one that is the last occurrence in the | |
336 | basic block and the operands are not modified by following statements in | |
337 | the basic block [including this insn]. */ | |
338 | struct occr *avail_occr; | |
339 | /* Non-null if the computation is PRE redundant. | |
340 | The value is the newly created pseudo-reg to record a copy of the | |
341 | expression in all the places that reach the redundant copy. */ | |
342 | rtx reaching_reg; | |
343 | }; | |
344 | ||
345 | /* Occurrence of an expression. | |
346 | There is one per basic block. If a pattern appears more than once the | |
347 | last appearance is used [or first for anticipatable expressions]. */ | |
348 | ||
349 | struct occr | |
350 | { | |
351 | /* Next occurrence of this expression. */ | |
352 | struct occr *next; | |
353 | /* The insn that computes the expression. */ | |
354 | rtx insn; | |
355 | /* Non-zero if this [anticipatable] occurrence has been deleted. */ | |
356 | char deleted_p; | |
357 | /* Non-zero if this [available] occurrence has been copied to | |
358 | reaching_reg. */ | |
359 | /* ??? This is mutually exclusive with deleted_p, so they could share | |
360 | the same byte. */ | |
361 | char copied_p; | |
362 | }; | |
363 | ||
364 | /* Expression and copy propagation hash tables. | |
365 | Each hash table is an array of buckets. | |
366 | ??? It is known that if it were an array of entries, structure elements | |
367 | `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is | |
368 | not clear whether in the final analysis a sufficient amount of memory would | |
369 | be saved as the size of the available expression bitmaps would be larger | |
370 | [one could build a mapping table without holes afterwards though]. | |
371 | Someday I'll perform the computation and figure it out. | |
372 | */ | |
373 | ||
374 | /* Total size of the expression hash table, in elements. */ | |
375 | static int expr_hash_table_size; | |
376 | /* The table itself. | |
377 | This is an array of `expr_hash_table_size' elements. */ | |
378 | static struct expr **expr_hash_table; | |
379 | ||
380 | /* Total size of the copy propagation hash table, in elements. */ | |
381 | static int set_hash_table_size; | |
382 | /* The table itself. | |
383 | This is an array of `set_hash_table_size' elements. */ | |
384 | static struct expr **set_hash_table; | |
385 | ||
386 | /* Mapping of uids to cuids. | |
387 | Only real insns get cuids. */ | |
388 | static int *uid_cuid; | |
389 | ||
390 | /* Highest UID in UID_CUID. */ | |
391 | static int max_uid; | |
392 | ||
393 | /* Get the cuid of an insn. */ | |
394 | #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)]) | |
395 | ||
396 | /* Number of cuids. */ | |
397 | static int max_cuid; | |
398 | ||
399 | /* Mapping of cuids to insns. */ | |
400 | static rtx *cuid_insn; | |
401 | ||
402 | /* Get insn from cuid. */ | |
403 | #define CUID_INSN(CUID) (cuid_insn[CUID]) | |
404 | ||
405 | /* Maximum register number in function prior to doing gcse + 1. | |
406 | Registers created during this pass have regno >= max_gcse_regno. | |
407 | This is named with "gcse" to not collide with global of same name. */ | |
408 | static int max_gcse_regno; | |
409 | ||
410 | /* Maximum number of cse-able expressions found. */ | |
411 | static int n_exprs; | |
412 | /* Maximum number of assignments for copy propagation found. */ | |
413 | static int n_sets; | |
414 | ||
415 | /* Table of registers that are modified. | |
416 | For each register, each element is a list of places where the pseudo-reg | |
417 | is set. | |
418 | ||
419 | For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only | |
420 | requires knowledge of which blocks kill which regs [and thus could use | |
f4e584dc | 421 | a bitmap instead of the lists `reg_set_table' uses]. |
7506f491 | 422 | |
f4e584dc JL |
423 | `reg_set_table' and could be turned into an array of bitmaps |
424 | (num-bbs x num-regs) | |
7506f491 DE |
425 | [however perhaps it may be useful to keep the data as is]. |
426 | One advantage of recording things this way is that `reg_set_table' is | |
427 | fairly sparse with respect to pseudo regs but for hard regs could be | |
428 | fairly dense [relatively speaking]. | |
429 | And recording sets of pseudo-regs in lists speeds | |
430 | up functions like compute_transp since in the case of pseudo-regs we only | |
431 | need to iterate over the number of times a pseudo-reg is set, not over the | |
432 | number of basic blocks [clearly there is a bit of a slow down in the cases | |
433 | where a pseudo is set more than once in a block, however it is believed | |
434 | that the net effect is to speed things up]. This isn't done for hard-regs | |
435 | because recording call-clobbered hard-regs in `reg_set_table' at each | |
436 | function call can consume a fair bit of memory, and iterating over hard-regs | |
437 | stored this way in compute_transp will be more expensive. */ | |
438 | ||
439 | typedef struct reg_set { | |
440 | /* The next setting of this register. */ | |
441 | struct reg_set *next; | |
442 | /* The insn where it was set. */ | |
443 | rtx insn; | |
444 | } reg_set; | |
445 | static reg_set **reg_set_table; | |
446 | /* Size of `reg_set_table'. | |
447 | The table starts out at max_gcse_regno + slop, and is enlarged as | |
448 | necessary. */ | |
449 | static int reg_set_table_size; | |
450 | /* Amount to grow `reg_set_table' by when it's full. */ | |
451 | #define REG_SET_TABLE_SLOP 100 | |
452 | ||
453 | /* Bitmap containing one bit for each register in the program. | |
454 | Used when performing GCSE to track which registers have been set since | |
455 | the start of the basic block. */ | |
456 | static sbitmap reg_set_bitmap; | |
457 | ||
458 | /* For each block, a bitmap of registers set in the block. | |
459 | This is used by expr_killed_p and compute_transp. | |
460 | It is computed during hash table computation and not by compute_sets | |
461 | as it includes registers added since the last pass (or between cprop and | |
462 | gcse) and it's currently not easy to realloc sbitmap vectors. */ | |
463 | static sbitmap *reg_set_in_block; | |
464 | ||
465 | /* For each block, non-zero if memory is set in that block. | |
466 | This is computed during hash table computation and is used by | |
467 | expr_killed_p and compute_transp. | |
468 | ??? Handling of memory is very simple, we don't make any attempt | |
469 | to optimize things (later). | |
470 | ??? This can be computed by compute_sets since the information | |
471 | doesn't change. */ | |
472 | static char *mem_set_in_block; | |
473 | ||
474 | /* Various variables for statistics gathering. */ | |
475 | ||
476 | /* Memory used in a pass. | |
477 | This isn't intended to be absolutely precise. Its intent is only | |
478 | to keep an eye on memory usage. */ | |
479 | static int bytes_used; | |
480 | /* GCSE substitutions made. */ | |
481 | static int gcse_subst_count; | |
482 | /* Number of copy instructions created. */ | |
483 | static int gcse_create_count; | |
484 | /* Number of constants propagated. */ | |
485 | static int const_prop_count; | |
486 | /* Number of copys propagated. */ | |
487 | static int copy_prop_count; | |
7506f491 DE |
488 | \f |
489 | /* These variables are used by classic GCSE. | |
490 | Normally they'd be defined a bit later, but `rd_gen' needs to | |
491 | be declared sooner. */ | |
492 | ||
493 | /* A bitmap of all ones for implementing the algorithm for available | |
494 | expressions and reaching definitions. */ | |
495 | /* ??? Available expression bitmaps have a different size than reaching | |
496 | definition bitmaps. This should be the larger of the two, however, it | |
497 | is not currently used for reaching definitions. */ | |
498 | static sbitmap u_bitmap; | |
499 | ||
500 | /* Each block has a bitmap of each type. | |
501 | The length of each blocks bitmap is: | |
502 | ||
503 | max_cuid - for reaching definitions | |
504 | n_exprs - for available expressions | |
505 | ||
506 | Thus we view the bitmaps as 2 dimensional arrays. i.e. | |
507 | rd_kill[block_num][cuid_num] | |
508 | ae_kill[block_num][expr_num] | |
509 | */ | |
510 | ||
511 | /* For reaching defs */ | |
512 | static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out; | |
513 | ||
514 | /* for available exprs */ | |
515 | static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out; | |
b5ce41ff | 516 | |
7506f491 | 517 | \f |
ac7c5af5 JL |
518 | static void compute_can_copy PROTO ((void)); |
519 | ||
520 | static char *gmalloc PROTO ((unsigned int)); | |
521 | static char *grealloc PROTO ((char *, unsigned int)); | |
522 | static char *gcse_alloc PROTO ((unsigned long)); | |
523 | static void alloc_gcse_mem PROTO ((rtx)); | |
524 | static void free_gcse_mem PROTO ((void)); | |
ac7c5af5 JL |
525 | static void alloc_reg_set_mem PROTO ((int)); |
526 | static void free_reg_set_mem PROTO ((void)); | |
527 | static void record_one_set PROTO ((int, rtx)); | |
528 | static void record_set_info PROTO ((rtx, rtx)); | |
529 | static void compute_sets PROTO ((rtx)); | |
530 | ||
531 | static void hash_scan_insn PROTO ((rtx, int, int)); | |
532 | static void hash_scan_set PROTO ((rtx, rtx, int)); | |
533 | static void hash_scan_clobber PROTO ((rtx, rtx)); | |
534 | static void hash_scan_call PROTO ((rtx, rtx)); | |
ac7c5af5 JL |
535 | static int want_to_gcse_p PROTO ((rtx)); |
536 | static int oprs_unchanged_p PROTO ((rtx, rtx, int)); | |
7506f491 | 537 | static int oprs_anticipatable_p PROTO ((rtx, rtx)); |
ac7c5af5 | 538 | static int oprs_available_p PROTO ((rtx, rtx)); |
b5ce41ff JL |
539 | static void insert_expr_in_table PROTO ((rtx, enum machine_mode, |
540 | rtx, int, int)); | |
7506f491 | 541 | static void insert_set_in_table PROTO ((rtx, rtx)); |
b5ce41ff JL |
542 | static unsigned int hash_expr PROTO ((rtx, enum machine_mode, |
543 | int *, int)); | |
7506f491 | 544 | static unsigned int hash_expr_1 PROTO ((rtx, enum machine_mode, int *)); |
ac7c5af5 JL |
545 | static unsigned int hash_set PROTO ((int, int)); |
546 | static int expr_equiv_p PROTO ((rtx, rtx)); | |
7506f491 DE |
547 | static void record_last_reg_set_info PROTO ((rtx, int)); |
548 | static void record_last_mem_set_info PROTO ((rtx)); | |
549 | static void record_last_set_info PROTO ((rtx, rtx)); | |
b5ce41ff | 550 | static void compute_hash_table PROTO ((int)); |
7506f491 DE |
551 | static void alloc_set_hash_table PROTO ((int)); |
552 | static void free_set_hash_table PROTO ((void)); | |
b5ce41ff | 553 | static void compute_set_hash_table PROTO ((void)); |
7506f491 DE |
554 | static void alloc_expr_hash_table PROTO ((int)); |
555 | static void free_expr_hash_table PROTO ((void)); | |
b5ce41ff | 556 | static void compute_expr_hash_table PROTO ((void)); |
a65f3558 JL |
557 | static void dump_hash_table PROTO ((FILE *, const char *, struct expr **, |
558 | int, int)); | |
7506f491 | 559 | static struct expr *lookup_expr PROTO ((rtx)); |
ac7c5af5 JL |
560 | static struct expr *lookup_set PROTO ((int, rtx)); |
561 | static struct expr *next_set PROTO ((int, struct expr *)); | |
7506f491 | 562 | static void reset_opr_set_tables PROTO ((void)); |
ac7c5af5 | 563 | static int oprs_not_set_p PROTO ((rtx, rtx)); |
b5ce41ff | 564 | static void mark_call PROTO ((rtx)); |
ac7c5af5 JL |
565 | static void mark_set PROTO ((rtx, rtx)); |
566 | static void mark_clobber PROTO ((rtx, rtx)); | |
567 | static void mark_oprs_set PROTO ((rtx)); | |
568 | ||
ac7c5af5 JL |
569 | static void alloc_cprop_mem PROTO ((int, int)); |
570 | static void free_cprop_mem PROTO ((void)); | |
ac7c5af5 | 571 | static void compute_transp PROTO ((rtx, int, sbitmap *, int)); |
a65f3558 | 572 | static void compute_transpout PROTO ((void)); |
b5ce41ff JL |
573 | static void compute_local_properties PROTO ((sbitmap *, sbitmap *, |
574 | sbitmap *, int)); | |
7506f491 | 575 | static void compute_cprop_avinout PROTO ((void)); |
ac7c5af5 JL |
576 | static void compute_cprop_data PROTO ((void)); |
577 | static void find_used_regs PROTO ((rtx)); | |
578 | static int try_replace_reg PROTO ((rtx, rtx, rtx)); | |
7506f491 | 579 | static struct expr *find_avail_set PROTO ((int, rtx)); |
abd535b6 | 580 | static int cprop_jump PROTO((rtx, rtx, struct reg_use *, rtx)); |
e2bef702 | 581 | #ifdef HAVE_cc0 |
abd535b6 | 582 | static int cprop_cc0_jump PROTO((rtx, struct reg_use *, rtx)); |
e2bef702 | 583 | #endif |
b5ce41ff JL |
584 | static int cprop_insn PROTO ((rtx, int)); |
585 | static int cprop PROTO ((int)); | |
586 | static int one_cprop_pass PROTO ((int, int)); | |
7506f491 | 587 | |
ac7c5af5 JL |
588 | static void alloc_pre_mem PROTO ((int, int)); |
589 | static void free_pre_mem PROTO ((void)); | |
ac7c5af5 | 590 | static void compute_pre_data PROTO ((void)); |
a65f3558 JL |
591 | static int pre_expr_reaches_here_p PROTO ((int, struct expr *, |
592 | int, int, char *)); | |
593 | static void insert_insn_end_bb PROTO ((struct expr *, int, int)); | |
ac7c5af5 | 594 | static void pre_insert PROTO ((struct expr **)); |
7506f491 | 595 | static void pre_insert_copy_insn PROTO ((struct expr *, rtx)); |
ac7c5af5 JL |
596 | static void pre_insert_copies PROTO ((void)); |
597 | static int pre_delete PROTO ((void)); | |
598 | static int pre_gcse PROTO ((void)); | |
b5ce41ff | 599 | static int one_pre_gcse_pass PROTO ((int)); |
aeb2f500 JW |
600 | |
601 | static void add_label_notes PROTO ((rtx, rtx)); | |
b5ce41ff | 602 | |
bb457bd9 JL |
603 | static void alloc_code_hoist_mem PROTO ((int, int)); |
604 | static void free_code_hoist_mem PROTO ((void)); | |
605 | static void compute_code_hoist_vbeinout PROTO ((void)); | |
606 | static void compute_code_hoist_data PROTO ((void)); | |
607 | static int hoist_expr_reaches_here_p PROTO ((int, int, int, char *)); | |
608 | static void hoist_code PROTO ((void)); | |
609 | static int one_code_hoisting_pass PROTO ((void)); | |
610 | ||
b5ce41ff JL |
611 | static void alloc_rd_mem PROTO ((int, int)); |
612 | static void free_rd_mem PROTO ((void)); | |
613 | static void handle_rd_kill_set PROTO ((rtx, int, int)); | |
614 | static void compute_kill_rd PROTO ((void)); | |
615 | static void compute_rd PROTO ((void)); | |
616 | static void alloc_avail_expr_mem PROTO ((int, int)); | |
617 | static void free_avail_expr_mem PROTO ((void)); | |
618 | static void compute_ae_gen PROTO ((void)); | |
619 | static int expr_killed_p PROTO ((rtx, int)); | |
620 | static void compute_ae_kill PROTO ((void)); | |
621 | static void compute_available PROTO ((void)); | |
622 | static int expr_reaches_here_p PROTO ((struct occr *, struct expr *, | |
623 | int, int, char *)); | |
624 | static rtx computing_insn PROTO ((struct expr *, rtx)); | |
625 | static int def_reaches_here_p PROTO ((rtx, rtx)); | |
626 | static int can_disregard_other_sets PROTO ((struct reg_set **, rtx, int)); | |
627 | static int handle_avail_expr PROTO ((rtx, struct expr *)); | |
628 | static int classic_gcse PROTO ((void)); | |
629 | static int one_classic_gcse_pass PROTO ((int)); | |
630 | ||
dfdb644f JL |
631 | static void invalidate_nonnull_info PROTO ((rtx, rtx)); |
632 | ||
7506f491 DE |
633 | \f |
634 | /* Entry point for global common subexpression elimination. | |
635 | F is the first instruction in the function. */ | |
636 | ||
e78d9500 | 637 | int |
7506f491 DE |
638 | gcse_main (f, file) |
639 | rtx f; | |
640 | FILE *file; | |
641 | { | |
642 | int changed, pass; | |
643 | /* Bytes used at start of pass. */ | |
644 | int initial_bytes_used; | |
645 | /* Maximum number of bytes used by a pass. */ | |
646 | int max_pass_bytes; | |
647 | /* Point to release obstack data from for each pass. */ | |
648 | char *gcse_obstack_bottom; | |
649 | ||
b5ce41ff JL |
650 | /* We do not construct an accurate cfg in functions which call |
651 | setjmp, so just punt to be safe. */ | |
7506f491 | 652 | if (current_function_calls_setjmp) |
e78d9500 | 653 | return 0; |
7506f491 | 654 | |
b5ce41ff JL |
655 | /* Assume that we do not need to run jump optimizations after gcse. */ |
656 | run_jump_opt_after_gcse = 0; | |
657 | ||
7506f491 DE |
658 | /* For calling dump_foo fns from gdb. */ |
659 | debug_stderr = stderr; | |
b5ce41ff | 660 | gcse_file = file; |
7506f491 | 661 | |
b5ce41ff JL |
662 | /* Identify the basic block information for this function, including |
663 | successors and predecessors. */ | |
7506f491 | 664 | max_gcse_regno = max_reg_num (); |
359da67d | 665 | find_basic_blocks (f, max_gcse_regno, file, 1); |
7506f491 DE |
666 | |
667 | /* Return if there's nothing to do. */ | |
668 | if (n_basic_blocks <= 1) | |
669 | { | |
670 | /* Free storage allocated by find_basic_blocks. */ | |
671 | free_basic_block_vars (0); | |
e78d9500 | 672 | return 0; |
7506f491 DE |
673 | } |
674 | ||
675 | /* See what modes support reg/reg copy operations. */ | |
676 | if (! can_copy_init_p) | |
677 | { | |
678 | compute_can_copy (); | |
679 | can_copy_init_p = 1; | |
680 | } | |
681 | ||
682 | gcc_obstack_init (&gcse_obstack); | |
683 | ||
7506f491 DE |
684 | /* Allocate and compute predecessors/successors. */ |
685 | ||
686 | s_preds = (int_list_ptr *) alloca (n_basic_blocks * sizeof (int_list_ptr)); | |
687 | s_succs = (int_list_ptr *) alloca (n_basic_blocks * sizeof (int_list_ptr)); | |
688 | num_preds = (int *) alloca (n_basic_blocks * sizeof (int)); | |
689 | num_succs = (int *) alloca (n_basic_blocks * sizeof (int)); | |
690 | bytes_used = 4 * n_basic_blocks * sizeof (int_list_ptr); | |
691 | compute_preds_succs (s_preds, s_succs, num_preds, num_succs); | |
692 | ||
693 | if (file) | |
421382ac | 694 | dump_bb_data (file, s_preds, s_succs, 0); |
7506f491 DE |
695 | |
696 | /* Record where pseudo-registers are set. | |
697 | This data is kept accurate during each pass. | |
b5ce41ff | 698 | ??? We could also record hard-reg information here |
7506f491 | 699 | [since it's unchanging], however it is currently done during |
b5ce41ff JL |
700 | hash table computation. |
701 | ||
702 | It may be tempting to compute MEM set information here too, but MEM | |
703 | sets will be subject to code motion one day and thus we need to compute | |
704 | information about memory sets when we build the hash tables. */ | |
7506f491 DE |
705 | |
706 | alloc_reg_set_mem (max_gcse_regno); | |
707 | compute_sets (f); | |
708 | ||
709 | pass = 0; | |
710 | initial_bytes_used = bytes_used; | |
711 | max_pass_bytes = 0; | |
712 | gcse_obstack_bottom = gcse_alloc (1); | |
713 | changed = 1; | |
714 | while (changed && pass < MAX_PASSES) | |
715 | { | |
716 | changed = 0; | |
717 | if (file) | |
718 | fprintf (file, "GCSE pass %d\n\n", pass + 1); | |
719 | ||
720 | /* Initialize bytes_used to the space for the pred/succ lists, | |
721 | and the reg_set_table data. */ | |
722 | bytes_used = initial_bytes_used; | |
723 | ||
724 | /* Each pass may create new registers, so recalculate each time. */ | |
725 | max_gcse_regno = max_reg_num (); | |
726 | ||
727 | alloc_gcse_mem (f); | |
728 | ||
b5ce41ff JL |
729 | /* Don't allow constant propagation to modify jumps |
730 | during this pass. */ | |
731 | changed = one_cprop_pass (pass + 1, 0); | |
7506f491 DE |
732 | |
733 | if (optimize_size) | |
b5ce41ff | 734 | changed |= one_classic_gcse_pass (pass + 1); |
7506f491 | 735 | else |
b5ce41ff | 736 | changed |= one_pre_gcse_pass (pass + 1); |
7506f491 DE |
737 | |
738 | if (max_pass_bytes < bytes_used) | |
739 | max_pass_bytes = bytes_used; | |
740 | ||
bb457bd9 JL |
741 | /* Free up memory, then reallocate for code hoisting. We can |
742 | not re-use the existing allocated memory because the tables | |
743 | will not have info for the insns or registers created by | |
744 | partial redundancy elimination. */ | |
7506f491 DE |
745 | free_gcse_mem (); |
746 | ||
bb457bd9 JL |
747 | /* It does not make sense to run code hoisting unless we optimizing |
748 | for code size -- it rarely makes programs faster, and can make | |
749 | them bigger if we did partial redundancy elimination (when optimizing | |
750 | for space, we use a classic gcse algorithm instead of partial | |
751 | redundancy algorithms). */ | |
752 | if (optimize_size) | |
753 | { | |
754 | max_gcse_regno = max_reg_num (); | |
755 | alloc_gcse_mem (f); | |
756 | changed |= one_code_hoisting_pass (); | |
757 | free_gcse_mem (); | |
758 | ||
759 | if (max_pass_bytes < bytes_used) | |
760 | max_pass_bytes = bytes_used; | |
761 | } | |
762 | ||
7506f491 DE |
763 | if (file) |
764 | { | |
765 | fprintf (file, "\n"); | |
766 | fflush (file); | |
767 | } | |
768 | obstack_free (&gcse_obstack, gcse_obstack_bottom); | |
769 | pass++; | |
770 | } | |
771 | ||
b5ce41ff JL |
772 | /* Do one last pass of copy propagation, including cprop into |
773 | conditional jumps. */ | |
774 | ||
775 | max_gcse_regno = max_reg_num (); | |
776 | alloc_gcse_mem (f); | |
777 | /* This time, go ahead and allow cprop to alter jumps. */ | |
778 | one_cprop_pass (pass + 1, 1); | |
779 | free_gcse_mem (); | |
7506f491 DE |
780 | |
781 | if (file) | |
782 | { | |
783 | fprintf (file, "GCSE of %s: %d basic blocks, ", | |
784 | current_function_name, n_basic_blocks); | |
785 | fprintf (file, "%d pass%s, %d bytes\n\n", | |
786 | pass, pass > 1 ? "es" : "", max_pass_bytes); | |
787 | } | |
788 | ||
789 | /* Free our obstack. */ | |
790 | obstack_free (&gcse_obstack, NULL_PTR); | |
791 | /* Free reg_set_table. */ | |
792 | free_reg_set_mem (); | |
793 | /* Free storage used to record predecessor/successor data. */ | |
794 | free_bb_mem (); | |
795 | /* Free storage allocated by find_basic_blocks. */ | |
796 | free_basic_block_vars (0); | |
e78d9500 | 797 | return run_jump_opt_after_gcse; |
7506f491 DE |
798 | } |
799 | \f | |
800 | /* Misc. utilities. */ | |
801 | ||
802 | /* Compute which modes support reg/reg copy operations. */ | |
803 | ||
804 | static void | |
805 | compute_can_copy () | |
806 | { | |
807 | int i; | |
50b2596f | 808 | #ifndef AVOID_CCMODE_COPIES |
7506f491 | 809 | rtx reg,insn; |
50b2596f | 810 | #endif |
7506f491 DE |
811 | char *free_point = (char *) oballoc (1); |
812 | ||
813 | bzero (can_copy_p, NUM_MACHINE_MODES); | |
814 | ||
815 | start_sequence (); | |
816 | for (i = 0; i < NUM_MACHINE_MODES; i++) | |
817 | { | |
818 | switch (GET_MODE_CLASS (i)) | |
819 | { | |
820 | case MODE_CC : | |
821 | #ifdef AVOID_CCMODE_COPIES | |
822 | can_copy_p[i] = 0; | |
823 | #else | |
9e6a5703 JC |
824 | reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1); |
825 | insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg)); | |
7506f491 DE |
826 | if (recog (PATTERN (insn), insn, NULL_PTR) >= 0) |
827 | can_copy_p[i] = 1; | |
828 | #endif | |
829 | break; | |
830 | default : | |
831 | can_copy_p[i] = 1; | |
832 | break; | |
833 | } | |
834 | } | |
835 | end_sequence (); | |
836 | ||
837 | /* Free the objects we just allocated. */ | |
838 | obfree (free_point); | |
839 | } | |
840 | \f | |
841 | /* Cover function to xmalloc to record bytes allocated. */ | |
842 | ||
843 | static char * | |
844 | gmalloc (size) | |
845 | unsigned int size; | |
846 | { | |
847 | bytes_used += size; | |
848 | return xmalloc (size); | |
849 | } | |
850 | ||
851 | /* Cover function to xrealloc. | |
852 | We don't record the additional size since we don't know it. | |
853 | It won't affect memory usage stats much anyway. */ | |
854 | ||
855 | static char * | |
856 | grealloc (ptr, size) | |
857 | char *ptr; | |
858 | unsigned int size; | |
859 | { | |
860 | return xrealloc (ptr, size); | |
861 | } | |
862 | ||
863 | /* Cover function to obstack_alloc. | |
864 | We don't need to record the bytes allocated here since | |
865 | obstack_chunk_alloc is set to gmalloc. */ | |
866 | ||
867 | static char * | |
868 | gcse_alloc (size) | |
869 | unsigned long size; | |
870 | { | |
871 | return (char *) obstack_alloc (&gcse_obstack, size); | |
872 | } | |
873 | ||
874 | /* Allocate memory for the cuid mapping array, | |
875 | and reg/memory set tracking tables. | |
876 | ||
877 | This is called at the start of each pass. */ | |
878 | ||
879 | static void | |
880 | alloc_gcse_mem (f) | |
881 | rtx f; | |
882 | { | |
883 | int i,n; | |
884 | rtx insn; | |
885 | ||
886 | /* Find the largest UID and create a mapping from UIDs to CUIDs. | |
887 | CUIDs are like UIDs except they increase monotonically, have no gaps, | |
888 | and only apply to real insns. */ | |
889 | ||
890 | max_uid = get_max_uid (); | |
891 | n = (max_uid + 1) * sizeof (int); | |
892 | uid_cuid = (int *) gmalloc (n); | |
893 | bzero ((char *) uid_cuid, n); | |
894 | for (insn = f, i = 0; insn; insn = NEXT_INSN (insn)) | |
895 | { | |
896 | if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') | |
897 | INSN_CUID (insn) = i++; | |
898 | else | |
899 | INSN_CUID (insn) = i; | |
900 | } | |
901 | ||
902 | /* Create a table mapping cuids to insns. */ | |
903 | ||
904 | max_cuid = i; | |
905 | n = (max_cuid + 1) * sizeof (rtx); | |
906 | cuid_insn = (rtx *) gmalloc (n); | |
907 | bzero ((char *) cuid_insn, n); | |
908 | for (insn = f, i = 0; insn; insn = NEXT_INSN (insn)) | |
909 | { | |
910 | if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') | |
911 | { | |
912 | CUID_INSN (i) = insn; | |
913 | i++; | |
914 | } | |
915 | } | |
916 | ||
917 | /* Allocate vars to track sets of regs. */ | |
918 | ||
919 | reg_set_bitmap = (sbitmap) sbitmap_alloc (max_gcse_regno); | |
920 | ||
921 | /* Allocate vars to track sets of regs, memory per block. */ | |
922 | ||
923 | reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, | |
924 | max_gcse_regno); | |
925 | mem_set_in_block = (char *) gmalloc (n_basic_blocks); | |
926 | } | |
927 | ||
928 | /* Free memory allocated by alloc_gcse_mem. */ | |
929 | ||
930 | static void | |
931 | free_gcse_mem () | |
932 | { | |
933 | free (uid_cuid); | |
934 | free (cuid_insn); | |
935 | ||
936 | free (reg_set_bitmap); | |
937 | ||
938 | free (reg_set_in_block); | |
939 | free (mem_set_in_block); | |
940 | } | |
941 | ||
b5ce41ff JL |
942 | \f |
943 | /* Compute the local properties of each recorded expression. | |
944 | Local properties are those that are defined by the block, irrespective | |
945 | of other blocks. | |
946 | ||
947 | An expression is transparent in a block if its operands are not modified | |
948 | in the block. | |
949 | ||
950 | An expression is computed (locally available) in a block if it is computed | |
951 | at least once and expression would contain the same value if the | |
952 | computation was moved to the end of the block. | |
953 | ||
954 | An expression is locally anticipatable in a block if it is computed at | |
955 | least once and expression would contain the same value if the computation | |
956 | was moved to the beginning of the block. | |
957 | ||
958 | We call this routine for cprop, pre and code hoisting. They all | |
959 | compute basically the same information and thus can easily share | |
960 | this code. | |
7506f491 | 961 | |
b5ce41ff JL |
962 | TRANSP, COMP, and ANTLOC are destination sbitmaps for recording |
963 | local properties. If NULL, then it is not necessary to compute | |
964 | or record that particular property. | |
965 | ||
966 | SETP controls which hash table to look at. If zero, this routine | |
967 | looks at the expr hash table; if nonzero this routine looks at | |
695ab36a BS |
968 | the set hash table. Additionally, TRANSP is computed as ~TRANSP, |
969 | since this is really cprop's ABSALTERED. */ | |
b5ce41ff JL |
970 | |
971 | static void | |
972 | compute_local_properties (transp, comp, antloc, setp) | |
973 | sbitmap *transp; | |
974 | sbitmap *comp; | |
975 | sbitmap *antloc; | |
976 | int setp; | |
977 | { | |
978 | int i, hash_table_size; | |
979 | struct expr **hash_table; | |
980 | ||
981 | /* Initialize any bitmaps that were passed in. */ | |
982 | if (transp) | |
695ab36a BS |
983 | { |
984 | if (setp) | |
985 | sbitmap_vector_zero (transp, n_basic_blocks); | |
986 | else | |
987 | sbitmap_vector_ones (transp, n_basic_blocks); | |
988 | } | |
b5ce41ff JL |
989 | if (comp) |
990 | sbitmap_vector_zero (comp, n_basic_blocks); | |
991 | if (antloc) | |
992 | sbitmap_vector_zero (antloc, n_basic_blocks); | |
993 | ||
994 | /* We use the same code for cprop, pre and hoisting. For cprop | |
995 | we care about the set hash table, for pre and hoisting we | |
996 | care about the expr hash table. */ | |
997 | hash_table_size = setp ? set_hash_table_size : expr_hash_table_size; | |
998 | hash_table = setp ? set_hash_table : expr_hash_table; | |
999 | ||
1000 | for (i = 0; i < hash_table_size; i++) | |
7506f491 | 1001 | { |
b5ce41ff JL |
1002 | struct expr *expr; |
1003 | ||
1004 | for (expr = hash_table[i]; expr != NULL; expr = expr->next_same_hash) | |
1005 | { | |
1006 | struct occr *occr; | |
1007 | int indx = expr->bitmap_index; | |
1008 | ||
1009 | /* The expression is transparent in this block if it is not killed. | |
1010 | We start by assuming all are transparent [none are killed], and | |
1011 | then reset the bits for those that are. */ | |
1012 | ||
1013 | if (transp) | |
1014 | compute_transp (expr->expr, indx, transp, setp); | |
1015 | ||
1016 | /* The occurrences recorded in antic_occr are exactly those that | |
1017 | we want to set to non-zero in ANTLOC. */ | |
1018 | ||
1019 | if (antloc) | |
1020 | { | |
1021 | for (occr = expr->antic_occr; occr != NULL; occr = occr->next) | |
1022 | { | |
1023 | int bb = BLOCK_NUM (occr->insn); | |
1024 | SET_BIT (antloc[bb], indx); | |
1025 | ||
1026 | /* While we're scanning the table, this is a good place to | |
1027 | initialize this. */ | |
1028 | occr->deleted_p = 0; | |
1029 | } | |
1030 | } | |
1031 | ||
1032 | /* The occurrences recorded in avail_occr are exactly those that | |
1033 | we want to set to non-zero in COMP. */ | |
1034 | if (comp) | |
1035 | { | |
1036 | ||
1037 | for (occr = expr->avail_occr; occr != NULL; occr = occr->next) | |
1038 | { | |
1039 | int bb = BLOCK_NUM (occr->insn); | |
1040 | SET_BIT (comp[bb], indx); | |
1041 | ||
1042 | /* While we're scanning the table, this is a good place to | |
1043 | initialize this. */ | |
1044 | occr->copied_p = 0; | |
1045 | } | |
1046 | } | |
1047 | ||
1048 | /* While we're scanning the table, this is a good place to | |
1049 | initialize this. */ | |
1050 | expr->reaching_reg = 0; | |
1051 | } | |
7506f491 | 1052 | } |
7506f491 | 1053 | } |
b5ce41ff | 1054 | |
7506f491 DE |
1055 | \f |
1056 | /* Register set information. | |
1057 | ||
1058 | `reg_set_table' records where each register is set or otherwise | |
1059 | modified. */ | |
1060 | ||
1061 | static struct obstack reg_set_obstack; | |
1062 | ||
1063 | static void | |
1064 | alloc_reg_set_mem (n_regs) | |
1065 | int n_regs; | |
1066 | { | |
1067 | int n; | |
1068 | ||
1069 | reg_set_table_size = n_regs + REG_SET_TABLE_SLOP; | |
1070 | n = reg_set_table_size * sizeof (struct reg_set *); | |
1071 | reg_set_table = (struct reg_set **) gmalloc (n); | |
1072 | bzero ((char *) reg_set_table, n); | |
1073 | ||
1074 | gcc_obstack_init (®_set_obstack); | |
1075 | } | |
1076 | ||
1077 | static void | |
1078 | free_reg_set_mem () | |
1079 | { | |
1080 | free (reg_set_table); | |
1081 | obstack_free (®_set_obstack, NULL_PTR); | |
1082 | } | |
1083 | ||
1084 | /* Record REGNO in the reg_set table. */ | |
1085 | ||
1086 | static void | |
1087 | record_one_set (regno, insn) | |
1088 | int regno; | |
1089 | rtx insn; | |
1090 | { | |
1091 | /* allocate a new reg_set element and link it onto the list */ | |
1092 | struct reg_set *new_reg_info, *reg_info_ptr1, *reg_info_ptr2; | |
1093 | ||
1094 | /* If the table isn't big enough, enlarge it. */ | |
1095 | if (regno >= reg_set_table_size) | |
1096 | { | |
1097 | int new_size = regno + REG_SET_TABLE_SLOP; | |
1098 | reg_set_table = (struct reg_set **) | |
1099 | grealloc ((char *) reg_set_table, | |
1100 | new_size * sizeof (struct reg_set *)); | |
1101 | bzero ((char *) (reg_set_table + reg_set_table_size), | |
1102 | (new_size - reg_set_table_size) * sizeof (struct reg_set *)); | |
1103 | reg_set_table_size = new_size; | |
1104 | } | |
1105 | ||
1106 | new_reg_info = (struct reg_set *) obstack_alloc (®_set_obstack, | |
1107 | sizeof (struct reg_set)); | |
1108 | bytes_used += sizeof (struct reg_set); | |
1109 | new_reg_info->insn = insn; | |
1110 | new_reg_info->next = NULL; | |
1111 | if (reg_set_table[regno] == NULL) | |
1112 | reg_set_table[regno] = new_reg_info; | |
1113 | else | |
1114 | { | |
1115 | reg_info_ptr1 = reg_info_ptr2 = reg_set_table[regno]; | |
1116 | /* ??? One could keep a "last" pointer to speed this up. */ | |
1117 | while (reg_info_ptr1 != NULL) | |
1118 | { | |
1119 | reg_info_ptr2 = reg_info_ptr1; | |
1120 | reg_info_ptr1 = reg_info_ptr1->next; | |
1121 | } | |
1122 | reg_info_ptr2->next = new_reg_info; | |
1123 | } | |
1124 | } | |
1125 | ||
1126 | /* For communication between next two functions (via note_stores). */ | |
1127 | static rtx record_set_insn; | |
1128 | ||
1129 | /* Called from compute_sets via note_stores to handle one | |
1130 | SET or CLOBBER in an insn. */ | |
1131 | ||
1132 | static void | |
1133 | record_set_info (dest, setter) | |
50b2596f | 1134 | rtx dest, setter ATTRIBUTE_UNUSED; |
7506f491 DE |
1135 | { |
1136 | if (GET_CODE (dest) == SUBREG) | |
1137 | dest = SUBREG_REG (dest); | |
1138 | ||
1139 | if (GET_CODE (dest) == REG) | |
1140 | { | |
1141 | if (REGNO (dest) >= FIRST_PSEUDO_REGISTER) | |
1142 | record_one_set (REGNO (dest), record_set_insn); | |
1143 | } | |
1144 | } | |
1145 | ||
1146 | /* Scan the function and record each set of each pseudo-register. | |
1147 | ||
1148 | This is called once, at the start of the gcse pass. | |
1149 | See the comments for `reg_set_table' for further docs. */ | |
1150 | ||
1151 | static void | |
1152 | compute_sets (f) | |
1153 | rtx f; | |
1154 | { | |
1155 | rtx insn = f; | |
1156 | ||
1157 | while (insn) | |
1158 | { | |
1159 | if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') | |
1160 | { | |
1161 | record_set_insn = insn; | |
1162 | note_stores (PATTERN (insn), record_set_info); | |
1163 | } | |
1164 | insn = NEXT_INSN (insn); | |
1165 | } | |
1166 | } | |
1167 | \f | |
1168 | /* Hash table support. */ | |
1169 | ||
b86ba9c8 GK |
1170 | #define NEVER_SET -1 |
1171 | ||
7506f491 | 1172 | /* For each register, the cuid of the first/last insn in the block to set it, |
e7d99f1e | 1173 | or -1 if not set. */ |
7506f491 DE |
1174 | static int *reg_first_set; |
1175 | static int *reg_last_set; | |
1176 | ||
1177 | /* While computing "first/last set" info, this is the CUID of first/last insn | |
e7d99f1e | 1178 | to set memory or -1 if not set. `mem_last_set' is also used when |
7506f491 DE |
1179 | performing GCSE to record whether memory has been set since the beginning |
1180 | of the block. | |
1181 | Note that handling of memory is very simple, we don't make any attempt | |
1182 | to optimize things (later). */ | |
1183 | static int mem_first_set; | |
1184 | static int mem_last_set; | |
1185 | ||
7506f491 DE |
1186 | /* Perform a quick check whether X, the source of a set, is something |
1187 | we want to consider for GCSE. */ | |
1188 | ||
1189 | static int | |
1190 | want_to_gcse_p (x) | |
1191 | rtx x; | |
1192 | { | |
1193 | enum rtx_code code = GET_CODE (x); | |
1194 | ||
1195 | switch (code) | |
1196 | { | |
1197 | case REG: | |
1198 | case SUBREG: | |
1199 | case CONST_INT: | |
1200 | case CONST_DOUBLE: | |
1201 | case CALL: | |
1202 | return 0; | |
1203 | ||
1204 | default: | |
1205 | break; | |
1206 | } | |
1207 | ||
1208 | return 1; | |
1209 | } | |
1210 | ||
1211 | /* Return non-zero if the operands of expression X are unchanged from the | |
1212 | start of INSN's basic block up to but not including INSN (if AVAIL_P == 0), | |
1213 | or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */ | |
1214 | ||
1215 | static int | |
1216 | oprs_unchanged_p (x, insn, avail_p) | |
1217 | rtx x, insn; | |
1218 | int avail_p; | |
1219 | { | |
1220 | int i; | |
1221 | enum rtx_code code; | |
6f7d635c | 1222 | const char *fmt; |
7506f491 DE |
1223 | |
1224 | /* repeat is used to turn tail-recursion into iteration. */ | |
1225 | repeat: | |
1226 | ||
1227 | if (x == 0) | |
1228 | return 1; | |
1229 | ||
1230 | code = GET_CODE (x); | |
1231 | switch (code) | |
1232 | { | |
1233 | case REG: | |
1234 | if (avail_p) | |
b86ba9c8 | 1235 | return (reg_last_set[REGNO (x)] == NEVER_SET |
7506f491 DE |
1236 | || reg_last_set[REGNO (x)] < INSN_CUID (insn)); |
1237 | else | |
b86ba9c8 | 1238 | return (reg_first_set[REGNO (x)] == NEVER_SET |
7506f491 DE |
1239 | || reg_first_set[REGNO (x)] >= INSN_CUID (insn)); |
1240 | ||
1241 | case MEM: | |
1242 | if (avail_p) | |
1243 | { | |
b86ba9c8 | 1244 | if (mem_last_set != NEVER_SET |
7506f491 DE |
1245 | && mem_last_set >= INSN_CUID (insn)) |
1246 | return 0; | |
1247 | } | |
1248 | else | |
1249 | { | |
b86ba9c8 | 1250 | if (mem_first_set != NEVER_SET |
7506f491 DE |
1251 | && mem_first_set < INSN_CUID (insn)) |
1252 | return 0; | |
1253 | } | |
1254 | x = XEXP (x, 0); | |
1255 | goto repeat; | |
1256 | ||
1257 | case PRE_DEC: | |
1258 | case PRE_INC: | |
1259 | case POST_DEC: | |
1260 | case POST_INC: | |
1261 | return 0; | |
1262 | ||
1263 | case PC: | |
1264 | case CC0: /*FIXME*/ | |
1265 | case CONST: | |
1266 | case CONST_INT: | |
1267 | case CONST_DOUBLE: | |
1268 | case SYMBOL_REF: | |
1269 | case LABEL_REF: | |
1270 | case ADDR_VEC: | |
1271 | case ADDR_DIFF_VEC: | |
1272 | return 1; | |
1273 | ||
1274 | default: | |
1275 | break; | |
1276 | } | |
1277 | ||
1278 | i = GET_RTX_LENGTH (code) - 1; | |
1279 | fmt = GET_RTX_FORMAT (code); | |
1280 | for (; i >= 0; i--) | |
1281 | { | |
1282 | if (fmt[i] == 'e') | |
1283 | { | |
1284 | rtx tem = XEXP (x, i); | |
1285 | ||
1286 | /* If we are about to do the last recursive call | |
1287 | needed at this level, change it into iteration. | |
1288 | This function is called enough to be worth it. */ | |
1289 | if (i == 0) | |
1290 | { | |
1291 | x = tem; | |
1292 | goto repeat; | |
1293 | } | |
1294 | if (! oprs_unchanged_p (tem, insn, avail_p)) | |
1295 | return 0; | |
1296 | } | |
1297 | else if (fmt[i] == 'E') | |
1298 | { | |
1299 | int j; | |
1300 | for (j = 0; j < XVECLEN (x, i); j++) | |
1301 | { | |
1302 | if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p)) | |
1303 | return 0; | |
1304 | } | |
1305 | } | |
1306 | } | |
1307 | ||
1308 | return 1; | |
1309 | } | |
1310 | ||
1311 | /* Return non-zero if the operands of expression X are unchanged from | |
1312 | the start of INSN's basic block up to but not including INSN. */ | |
1313 | ||
1314 | static int | |
1315 | oprs_anticipatable_p (x, insn) | |
1316 | rtx x, insn; | |
1317 | { | |
1318 | return oprs_unchanged_p (x, insn, 0); | |
1319 | } | |
1320 | ||
1321 | /* Return non-zero if the operands of expression X are unchanged from | |
1322 | INSN to the end of INSN's basic block. */ | |
1323 | ||
1324 | static int | |
1325 | oprs_available_p (x, insn) | |
1326 | rtx x, insn; | |
1327 | { | |
1328 | return oprs_unchanged_p (x, insn, 1); | |
1329 | } | |
1330 | ||
1331 | /* Hash expression X. | |
1332 | MODE is only used if X is a CONST_INT. | |
1333 | A boolean indicating if a volatile operand is found or if the expression | |
1334 | contains something we don't want to insert in the table is stored in | |
1335 | DO_NOT_RECORD_P. | |
1336 | ||
1337 | ??? One might want to merge this with canon_hash. Later. */ | |
1338 | ||
1339 | static unsigned int | |
1340 | hash_expr (x, mode, do_not_record_p, hash_table_size) | |
1341 | rtx x; | |
1342 | enum machine_mode mode; | |
1343 | int *do_not_record_p; | |
1344 | int hash_table_size; | |
1345 | { | |
1346 | unsigned int hash; | |
1347 | ||
1348 | *do_not_record_p = 0; | |
1349 | ||
1350 | hash = hash_expr_1 (x, mode, do_not_record_p); | |
1351 | return hash % hash_table_size; | |
1352 | } | |
1353 | ||
1354 | /* Subroutine of hash_expr to do the actual work. */ | |
1355 | ||
1356 | static unsigned int | |
1357 | hash_expr_1 (x, mode, do_not_record_p) | |
1358 | rtx x; | |
1359 | enum machine_mode mode; | |
1360 | int *do_not_record_p; | |
1361 | { | |
1362 | int i, j; | |
1363 | unsigned hash = 0; | |
1364 | enum rtx_code code; | |
6f7d635c | 1365 | const char *fmt; |
7506f491 DE |
1366 | |
1367 | /* repeat is used to turn tail-recursion into iteration. */ | |
1368 | repeat: | |
1369 | ||
1370 | if (x == 0) | |
1371 | return hash; | |
1372 | ||
1373 | code = GET_CODE (x); | |
1374 | switch (code) | |
1375 | { | |
1376 | case REG: | |
1377 | { | |
1378 | register int regno = REGNO (x); | |
1379 | hash += ((unsigned) REG << 7) + regno; | |
1380 | return hash; | |
1381 | } | |
1382 | ||
1383 | case CONST_INT: | |
1384 | { | |
1385 | unsigned HOST_WIDE_INT tem = INTVAL (x); | |
1386 | hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem; | |
1387 | return hash; | |
1388 | } | |
1389 | ||
1390 | case CONST_DOUBLE: | |
1391 | /* This is like the general case, except that it only counts | |
1392 | the integers representing the constant. */ | |
1393 | hash += (unsigned) code + (unsigned) GET_MODE (x); | |
1394 | if (GET_MODE (x) != VOIDmode) | |
1395 | for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++) | |
1396 | { | |
8a34409d | 1397 | unsigned tem = XWINT (x, i); |
7506f491 DE |
1398 | hash += tem; |
1399 | } | |
1400 | else | |
1401 | hash += ((unsigned) CONST_DOUBLE_LOW (x) | |
1402 | + (unsigned) CONST_DOUBLE_HIGH (x)); | |
1403 | return hash; | |
1404 | ||
1405 | /* Assume there is only one rtx object for any given label. */ | |
1406 | case LABEL_REF: | |
1407 | /* We don't hash on the address of the CODE_LABEL to avoid bootstrap | |
1408 | differences and differences between each stage's debugging dumps. */ | |
1409 | hash += ((unsigned) LABEL_REF << 7) + CODE_LABEL_NUMBER (XEXP (x, 0)); | |
1410 | return hash; | |
1411 | ||
1412 | case SYMBOL_REF: | |
1413 | { | |
1414 | /* Don't hash on the symbol's address to avoid bootstrap differences. | |
1415 | Different hash values may cause expressions to be recorded in | |
1416 | different orders and thus different registers to be used in the | |
1417 | final assembler. This also avoids differences in the dump files | |
1418 | between various stages. */ | |
1419 | unsigned int h = 0; | |
1420 | unsigned char *p = (unsigned char *) XSTR (x, 0); | |
1421 | while (*p) | |
1422 | h += (h << 7) + *p++; /* ??? revisit */ | |
1423 | hash += ((unsigned) SYMBOL_REF << 7) + h; | |
1424 | return hash; | |
1425 | } | |
1426 | ||
1427 | case MEM: | |
1428 | if (MEM_VOLATILE_P (x)) | |
1429 | { | |
1430 | *do_not_record_p = 1; | |
1431 | return 0; | |
1432 | } | |
1433 | hash += (unsigned) MEM; | |
1434 | x = XEXP (x, 0); | |
1435 | goto repeat; | |
1436 | ||
1437 | case PRE_DEC: | |
1438 | case PRE_INC: | |
1439 | case POST_DEC: | |
1440 | case POST_INC: | |
1441 | case PC: | |
1442 | case CC0: | |
1443 | case CALL: | |
1444 | case UNSPEC_VOLATILE: | |
1445 | *do_not_record_p = 1; | |
1446 | return 0; | |
1447 | ||
1448 | case ASM_OPERANDS: | |
1449 | if (MEM_VOLATILE_P (x)) | |
1450 | { | |
1451 | *do_not_record_p = 1; | |
1452 | return 0; | |
1453 | } | |
1454 | ||
1455 | default: | |
1456 | break; | |
1457 | } | |
1458 | ||
1459 | i = GET_RTX_LENGTH (code) - 1; | |
1460 | hash += (unsigned) code + (unsigned) GET_MODE (x); | |
1461 | fmt = GET_RTX_FORMAT (code); | |
1462 | for (; i >= 0; i--) | |
1463 | { | |
1464 | if (fmt[i] == 'e') | |
1465 | { | |
1466 | rtx tem = XEXP (x, i); | |
1467 | ||
1468 | /* If we are about to do the last recursive call | |
1469 | needed at this level, change it into iteration. | |
1470 | This function is called enough to be worth it. */ | |
1471 | if (i == 0) | |
1472 | { | |
1473 | x = tem; | |
1474 | goto repeat; | |
1475 | } | |
1476 | hash += hash_expr_1 (tem, 0, do_not_record_p); | |
1477 | if (*do_not_record_p) | |
1478 | return 0; | |
1479 | } | |
1480 | else if (fmt[i] == 'E') | |
1481 | for (j = 0; j < XVECLEN (x, i); j++) | |
1482 | { | |
1483 | hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p); | |
1484 | if (*do_not_record_p) | |
1485 | return 0; | |
1486 | } | |
1487 | else if (fmt[i] == 's') | |
1488 | { | |
1489 | register unsigned char *p = (unsigned char *) XSTR (x, i); | |
1490 | if (p) | |
1491 | while (*p) | |
1492 | hash += *p++; | |
1493 | } | |
1494 | else if (fmt[i] == 'i') | |
1495 | { | |
1496 | register unsigned tem = XINT (x, i); | |
1497 | hash += tem; | |
1498 | } | |
1499 | else | |
1500 | abort (); | |
1501 | } | |
1502 | ||
1503 | return hash; | |
1504 | } | |
1505 | ||
1506 | /* Hash a set of register REGNO. | |
1507 | ||
1508 | Sets are hashed on the register that is set. | |
1509 | This simplifies the PRE copy propagation code. | |
1510 | ||
1511 | ??? May need to make things more elaborate. Later, as necessary. */ | |
1512 | ||
1513 | static unsigned int | |
1514 | hash_set (regno, hash_table_size) | |
1515 | int regno; | |
1516 | int hash_table_size; | |
1517 | { | |
1518 | unsigned int hash; | |
1519 | ||
1520 | hash = regno; | |
1521 | return hash % hash_table_size; | |
1522 | } | |
1523 | ||
1524 | /* Return non-zero if exp1 is equivalent to exp2. | |
1525 | ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */ | |
1526 | ||
1527 | static int | |
1528 | expr_equiv_p (x, y) | |
1529 | rtx x, y; | |
1530 | { | |
1531 | register int i, j; | |
1532 | register enum rtx_code code; | |
6f7d635c | 1533 | register const char *fmt; |
7506f491 DE |
1534 | |
1535 | if (x == y) | |
1536 | return 1; | |
1537 | if (x == 0 || y == 0) | |
1538 | return x == y; | |
1539 | ||
1540 | code = GET_CODE (x); | |
1541 | if (code != GET_CODE (y)) | |
1542 | return 0; | |
1543 | ||
1544 | /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */ | |
1545 | if (GET_MODE (x) != GET_MODE (y)) | |
1546 | return 0; | |
1547 | ||
1548 | switch (code) | |
1549 | { | |
1550 | case PC: | |
1551 | case CC0: | |
1552 | return x == y; | |
1553 | ||
1554 | case CONST_INT: | |
1555 | return INTVAL (x) == INTVAL (y); | |
1556 | ||
1557 | case LABEL_REF: | |
1558 | return XEXP (x, 0) == XEXP (y, 0); | |
1559 | ||
1560 | case SYMBOL_REF: | |
1561 | return XSTR (x, 0) == XSTR (y, 0); | |
1562 | ||
1563 | case REG: | |
1564 | return REGNO (x) == REGNO (y); | |
1565 | ||
1566 | /* For commutative operations, check both orders. */ | |
1567 | case PLUS: | |
1568 | case MULT: | |
1569 | case AND: | |
1570 | case IOR: | |
1571 | case XOR: | |
1572 | case NE: | |
1573 | case EQ: | |
1574 | return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0)) | |
1575 | && expr_equiv_p (XEXP (x, 1), XEXP (y, 1))) | |
1576 | || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1)) | |
1577 | && expr_equiv_p (XEXP (x, 1), XEXP (y, 0)))); | |
1578 | ||
1579 | default: | |
1580 | break; | |
1581 | } | |
1582 | ||
1583 | /* Compare the elements. If any pair of corresponding elements | |
1584 | fail to match, return 0 for the whole thing. */ | |
1585 | ||
1586 | fmt = GET_RTX_FORMAT (code); | |
1587 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
1588 | { | |
1589 | switch (fmt[i]) | |
1590 | { | |
1591 | case 'e': | |
1592 | if (! expr_equiv_p (XEXP (x, i), XEXP (y, i))) | |
1593 | return 0; | |
1594 | break; | |
1595 | ||
1596 | case 'E': | |
1597 | if (XVECLEN (x, i) != XVECLEN (y, i)) | |
1598 | return 0; | |
1599 | for (j = 0; j < XVECLEN (x, i); j++) | |
1600 | if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j))) | |
1601 | return 0; | |
1602 | break; | |
1603 | ||
1604 | case 's': | |
1605 | if (strcmp (XSTR (x, i), XSTR (y, i))) | |
1606 | return 0; | |
1607 | break; | |
1608 | ||
1609 | case 'i': | |
1610 | if (XINT (x, i) != XINT (y, i)) | |
1611 | return 0; | |
1612 | break; | |
1613 | ||
1614 | case 'w': | |
1615 | if (XWINT (x, i) != XWINT (y, i)) | |
1616 | return 0; | |
1617 | break; | |
1618 | ||
1619 | case '0': | |
1620 | break; | |
1621 | ||
1622 | default: | |
1623 | abort (); | |
1624 | } | |
1625 | } | |
1626 | ||
1627 | return 1; | |
1628 | } | |
1629 | ||
1630 | /* Insert expression X in INSN in the hash table. | |
1631 | If it is already present, record it as the last occurrence in INSN's | |
1632 | basic block. | |
1633 | ||
1634 | MODE is the mode of the value X is being stored into. | |
1635 | It is only used if X is a CONST_INT. | |
1636 | ||
1637 | ANTIC_P is non-zero if X is an anticipatable expression. | |
1638 | AVAIL_P is non-zero if X is an available expression. */ | |
1639 | ||
1640 | static void | |
1641 | insert_expr_in_table (x, mode, insn, antic_p, avail_p) | |
1642 | rtx x; | |
1643 | enum machine_mode mode; | |
1644 | rtx insn; | |
1645 | int antic_p, avail_p; | |
1646 | { | |
1647 | int found, do_not_record_p; | |
1648 | unsigned int hash; | |
1649 | struct expr *cur_expr, *last_expr = NULL; | |
1650 | struct occr *antic_occr, *avail_occr; | |
1651 | struct occr *last_occr = NULL; | |
1652 | ||
1653 | hash = hash_expr (x, mode, &do_not_record_p, expr_hash_table_size); | |
1654 | ||
1655 | /* Do not insert expression in table if it contains volatile operands, | |
1656 | or if hash_expr determines the expression is something we don't want | |
1657 | to or can't handle. */ | |
1658 | if (do_not_record_p) | |
1659 | return; | |
1660 | ||
1661 | cur_expr = expr_hash_table[hash]; | |
1662 | found = 0; | |
1663 | ||
1664 | while (cur_expr && ! (found = expr_equiv_p (cur_expr->expr, x))) | |
1665 | { | |
1666 | /* If the expression isn't found, save a pointer to the end of | |
1667 | the list. */ | |
1668 | last_expr = cur_expr; | |
1669 | cur_expr = cur_expr->next_same_hash; | |
1670 | } | |
1671 | ||
1672 | if (! found) | |
1673 | { | |
1674 | cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr)); | |
1675 | bytes_used += sizeof (struct expr); | |
1676 | if (expr_hash_table[hash] == NULL) | |
1677 | { | |
1678 | /* This is the first pattern that hashed to this index. */ | |
1679 | expr_hash_table[hash] = cur_expr; | |
1680 | } | |
1681 | else | |
1682 | { | |
1683 | /* Add EXPR to end of this hash chain. */ | |
1684 | last_expr->next_same_hash = cur_expr; | |
1685 | } | |
1686 | /* Set the fields of the expr element. */ | |
1687 | cur_expr->expr = x; | |
1688 | cur_expr->bitmap_index = n_exprs++; | |
1689 | cur_expr->next_same_hash = NULL; | |
1690 | cur_expr->antic_occr = NULL; | |
1691 | cur_expr->avail_occr = NULL; | |
1692 | } | |
1693 | ||
1694 | /* Now record the occurrence(s). */ | |
1695 | ||
1696 | if (antic_p) | |
1697 | { | |
1698 | antic_occr = cur_expr->antic_occr; | |
1699 | ||
1700 | /* Search for another occurrence in the same basic block. */ | |
1701 | while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn)) | |
1702 | { | |
1703 | /* If an occurrence isn't found, save a pointer to the end of | |
1704 | the list. */ | |
1705 | last_occr = antic_occr; | |
1706 | antic_occr = antic_occr->next; | |
1707 | } | |
1708 | ||
1709 | if (antic_occr) | |
1710 | { | |
1711 | /* Found another instance of the expression in the same basic block. | |
1712 | Prefer the currently recorded one. We want the first one in the | |
1713 | block and the block is scanned from start to end. */ | |
1714 | ; /* nothing to do */ | |
1715 | } | |
1716 | else | |
1717 | { | |
1718 | /* First occurrence of this expression in this basic block. */ | |
1719 | antic_occr = (struct occr *) gcse_alloc (sizeof (struct occr)); | |
1720 | bytes_used += sizeof (struct occr); | |
1721 | /* First occurrence of this expression in any block? */ | |
1722 | if (cur_expr->antic_occr == NULL) | |
1723 | cur_expr->antic_occr = antic_occr; | |
1724 | else | |
1725 | last_occr->next = antic_occr; | |
1726 | antic_occr->insn = insn; | |
1727 | antic_occr->next = NULL; | |
1728 | } | |
1729 | } | |
1730 | ||
1731 | if (avail_p) | |
1732 | { | |
1733 | avail_occr = cur_expr->avail_occr; | |
1734 | ||
1735 | /* Search for another occurrence in the same basic block. */ | |
1736 | while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn)) | |
1737 | { | |
1738 | /* If an occurrence isn't found, save a pointer to the end of | |
1739 | the list. */ | |
1740 | last_occr = avail_occr; | |
1741 | avail_occr = avail_occr->next; | |
1742 | } | |
1743 | ||
1744 | if (avail_occr) | |
1745 | { | |
1746 | /* Found another instance of the expression in the same basic block. | |
1747 | Prefer this occurrence to the currently recorded one. We want | |
1748 | the last one in the block and the block is scanned from start | |
1749 | to end. */ | |
1750 | avail_occr->insn = insn; | |
1751 | } | |
1752 | else | |
1753 | { | |
1754 | /* First occurrence of this expression in this basic block. */ | |
1755 | avail_occr = (struct occr *) gcse_alloc (sizeof (struct occr)); | |
1756 | bytes_used += sizeof (struct occr); | |
1757 | /* First occurrence of this expression in any block? */ | |
1758 | if (cur_expr->avail_occr == NULL) | |
1759 | cur_expr->avail_occr = avail_occr; | |
1760 | else | |
1761 | last_occr->next = avail_occr; | |
1762 | avail_occr->insn = insn; | |
1763 | avail_occr->next = NULL; | |
1764 | } | |
1765 | } | |
1766 | } | |
1767 | ||
1768 | /* Insert pattern X in INSN in the hash table. | |
1769 | X is a SET of a reg to either another reg or a constant. | |
1770 | If it is already present, record it as the last occurrence in INSN's | |
1771 | basic block. */ | |
1772 | ||
1773 | static void | |
1774 | insert_set_in_table (x, insn) | |
1775 | rtx x; | |
1776 | rtx insn; | |
1777 | { | |
1778 | int found; | |
1779 | unsigned int hash; | |
1780 | struct expr *cur_expr, *last_expr = NULL; | |
1781 | struct occr *cur_occr, *last_occr = NULL; | |
1782 | ||
1783 | if (GET_CODE (x) != SET | |
1784 | || GET_CODE (SET_DEST (x)) != REG) | |
1785 | abort (); | |
1786 | ||
1787 | hash = hash_set (REGNO (SET_DEST (x)), set_hash_table_size); | |
1788 | ||
1789 | cur_expr = set_hash_table[hash]; | |
1790 | found = 0; | |
1791 | ||
1792 | while (cur_expr && ! (found = expr_equiv_p (cur_expr->expr, x))) | |
1793 | { | |
1794 | /* If the expression isn't found, save a pointer to the end of | |
1795 | the list. */ | |
1796 | last_expr = cur_expr; | |
1797 | cur_expr = cur_expr->next_same_hash; | |
1798 | } | |
1799 | ||
1800 | if (! found) | |
1801 | { | |
1802 | cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr)); | |
1803 | bytes_used += sizeof (struct expr); | |
1804 | if (set_hash_table[hash] == NULL) | |
1805 | { | |
1806 | /* This is the first pattern that hashed to this index. */ | |
1807 | set_hash_table[hash] = cur_expr; | |
1808 | } | |
1809 | else | |
1810 | { | |
1811 | /* Add EXPR to end of this hash chain. */ | |
1812 | last_expr->next_same_hash = cur_expr; | |
1813 | } | |
1814 | /* Set the fields of the expr element. | |
1815 | We must copy X because it can be modified when copy propagation is | |
1816 | performed on its operands. */ | |
1817 | /* ??? Should this go in a different obstack? */ | |
1818 | cur_expr->expr = copy_rtx (x); | |
1819 | cur_expr->bitmap_index = n_sets++; | |
1820 | cur_expr->next_same_hash = NULL; | |
1821 | cur_expr->antic_occr = NULL; | |
1822 | cur_expr->avail_occr = NULL; | |
1823 | } | |
1824 | ||
1825 | /* Now record the occurrence. */ | |
1826 | ||
1827 | cur_occr = cur_expr->avail_occr; | |
1828 | ||
1829 | /* Search for another occurrence in the same basic block. */ | |
1830 | while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn)) | |
1831 | { | |
1832 | /* If an occurrence isn't found, save a pointer to the end of | |
1833 | the list. */ | |
1834 | last_occr = cur_occr; | |
1835 | cur_occr = cur_occr->next; | |
1836 | } | |
1837 | ||
1838 | if (cur_occr) | |
1839 | { | |
1840 | /* Found another instance of the expression in the same basic block. | |
1841 | Prefer this occurrence to the currently recorded one. We want | |
1842 | the last one in the block and the block is scanned from start | |
1843 | to end. */ | |
1844 | cur_occr->insn = insn; | |
1845 | } | |
1846 | else | |
1847 | { | |
1848 | /* First occurrence of this expression in this basic block. */ | |
1849 | cur_occr = (struct occr *) gcse_alloc (sizeof (struct occr)); | |
1850 | bytes_used += sizeof (struct occr); | |
1851 | /* First occurrence of this expression in any block? */ | |
1852 | if (cur_expr->avail_occr == NULL) | |
1853 | cur_expr->avail_occr = cur_occr; | |
1854 | else | |
1855 | last_occr->next = cur_occr; | |
1856 | cur_occr->insn = insn; | |
1857 | cur_occr->next = NULL; | |
1858 | } | |
1859 | } | |
1860 | ||
1861 | /* Scan pattern PAT of INSN and add an entry to the hash table. | |
1862 | If SET_P is non-zero, this is for the assignment hash table, | |
1863 | otherwise it is for the expression hash table. */ | |
1864 | ||
1865 | static void | |
1866 | hash_scan_set (pat, insn, set_p) | |
1867 | rtx pat, insn; | |
1868 | int set_p; | |
1869 | { | |
1870 | rtx src = SET_SRC (pat); | |
1871 | rtx dest = SET_DEST (pat); | |
1872 | ||
1873 | if (GET_CODE (src) == CALL) | |
1874 | hash_scan_call (src, insn); | |
1875 | ||
1876 | if (GET_CODE (dest) == REG) | |
1877 | { | |
1878 | int regno = REGNO (dest); | |
1879 | rtx tmp; | |
1880 | ||
1881 | /* Only record sets of pseudo-regs in the hash table. */ | |
1882 | if (! set_p | |
1883 | && regno >= FIRST_PSEUDO_REGISTER | |
1884 | /* Don't GCSE something if we can't do a reg/reg copy. */ | |
1885 | && can_copy_p [GET_MODE (dest)] | |
1886 | /* Is SET_SRC something we want to gcse? */ | |
1887 | && want_to_gcse_p (src)) | |
1888 | { | |
1889 | /* An expression is not anticipatable if its operands are | |
1890 | modified before this insn. */ | |
1891 | int antic_p = ! optimize_size && oprs_anticipatable_p (src, insn); | |
1892 | /* An expression is not available if its operands are | |
1893 | subsequently modified, including this insn. */ | |
1894 | int avail_p = oprs_available_p (src, insn); | |
1895 | insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p); | |
1896 | } | |
1897 | /* Record sets for constant/copy propagation. */ | |
1898 | else if (set_p | |
1899 | && regno >= FIRST_PSEUDO_REGISTER | |
1900 | && ((GET_CODE (src) == REG | |
1901 | && REGNO (src) >= FIRST_PSEUDO_REGISTER | |
1902 | && can_copy_p [GET_MODE (dest)]) | |
e78d9500 | 1903 | || GET_CODE (src) == CONST_INT |
05f6f07c | 1904 | || GET_CODE (src) == SYMBOL_REF |
e78d9500 | 1905 | || GET_CODE (src) == CONST_DOUBLE) |
7506f491 DE |
1906 | /* A copy is not available if its src or dest is subsequently |
1907 | modified. Here we want to search from INSN+1 on, but | |
1908 | oprs_available_p searches from INSN on. */ | |
1909 | && (insn == BLOCK_END (BLOCK_NUM (insn)) | |
1910 | || ((tmp = next_nonnote_insn (insn)) != NULL_RTX | |
1911 | && oprs_available_p (pat, tmp)))) | |
1912 | insert_set_in_table (pat, insn); | |
1913 | } | |
7506f491 DE |
1914 | } |
1915 | ||
1916 | static void | |
1917 | hash_scan_clobber (x, insn) | |
50b2596f | 1918 | rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED; |
7506f491 DE |
1919 | { |
1920 | /* Currently nothing to do. */ | |
1921 | } | |
1922 | ||
1923 | static void | |
1924 | hash_scan_call (x, insn) | |
50b2596f | 1925 | rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED; |
7506f491 DE |
1926 | { |
1927 | /* Currently nothing to do. */ | |
1928 | } | |
1929 | ||
1930 | /* Process INSN and add hash table entries as appropriate. | |
1931 | ||
1932 | Only available expressions that set a single pseudo-reg are recorded. | |
1933 | ||
1934 | Single sets in a PARALLEL could be handled, but it's an extra complication | |
1935 | that isn't dealt with right now. The trick is handling the CLOBBERs that | |
1936 | are also in the PARALLEL. Later. | |
1937 | ||
1938 | If SET_P is non-zero, this is for the assignment hash table, | |
ed79bb3d R |
1939 | otherwise it is for the expression hash table. |
1940 | If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should | |
1941 | not record any expressions. */ | |
7506f491 DE |
1942 | |
1943 | static void | |
ed79bb3d | 1944 | hash_scan_insn (insn, set_p, in_libcall_block) |
7506f491 DE |
1945 | rtx insn; |
1946 | int set_p; | |
48e87cef | 1947 | int in_libcall_block; |
7506f491 DE |
1948 | { |
1949 | rtx pat = PATTERN (insn); | |
1950 | ||
1951 | /* Pick out the sets of INSN and for other forms of instructions record | |
1952 | what's been modified. */ | |
1953 | ||
ed79bb3d | 1954 | if (GET_CODE (pat) == SET && ! in_libcall_block) |
21e3a717 BS |
1955 | { |
1956 | /* Ignore obvious no-ops. */ | |
1957 | if (SET_SRC (pat) != SET_DEST (pat)) | |
1958 | hash_scan_set (pat, insn, set_p); | |
1959 | } | |
7506f491 DE |
1960 | else if (GET_CODE (pat) == PARALLEL) |
1961 | { | |
1962 | int i; | |
1963 | ||
1964 | for (i = 0; i < XVECLEN (pat, 0); i++) | |
1965 | { | |
1966 | rtx x = XVECEXP (pat, 0, i); | |
1967 | ||
1968 | if (GET_CODE (x) == SET) | |
1969 | { | |
1970 | if (GET_CODE (SET_SRC (x)) == CALL) | |
1971 | hash_scan_call (SET_SRC (x), insn); | |
7506f491 DE |
1972 | } |
1973 | else if (GET_CODE (x) == CLOBBER) | |
1974 | hash_scan_clobber (x, insn); | |
1975 | else if (GET_CODE (x) == CALL) | |
1976 | hash_scan_call (x, insn); | |
1977 | } | |
1978 | } | |
1979 | else if (GET_CODE (pat) == CLOBBER) | |
1980 | hash_scan_clobber (pat, insn); | |
1981 | else if (GET_CODE (pat) == CALL) | |
1982 | hash_scan_call (pat, insn); | |
1983 | } | |
1984 | ||
1985 | static void | |
1986 | dump_hash_table (file, name, table, table_size, total_size) | |
1987 | FILE *file; | |
dff01034 | 1988 | const char *name; |
7506f491 DE |
1989 | struct expr **table; |
1990 | int table_size, total_size; | |
1991 | { | |
1992 | int i; | |
1993 | /* Flattened out table, so it's printed in proper order. */ | |
1994 | struct expr **flat_table = (struct expr **) alloca (total_size * sizeof (struct expr *)); | |
1995 | unsigned int *hash_val = (unsigned int *) alloca (total_size * sizeof (unsigned int)); | |
1996 | ||
1997 | bzero ((char *) flat_table, total_size * sizeof (struct expr *)); | |
1998 | for (i = 0; i < table_size; i++) | |
1999 | { | |
2000 | struct expr *expr; | |
2001 | ||
2002 | for (expr = table[i]; expr != NULL; expr = expr->next_same_hash) | |
2003 | { | |
2004 | flat_table[expr->bitmap_index] = expr; | |
2005 | hash_val[expr->bitmap_index] = i; | |
2006 | } | |
2007 | } | |
2008 | ||
2009 | fprintf (file, "%s hash table (%d buckets, %d entries)\n", | |
2010 | name, table_size, total_size); | |
2011 | ||
2012 | for (i = 0; i < total_size; i++) | |
2013 | { | |
2014 | struct expr *expr = flat_table[i]; | |
2015 | ||
2016 | fprintf (file, "Index %d (hash value %d)\n ", | |
2017 | expr->bitmap_index, hash_val[i]); | |
2018 | print_rtl (file, expr->expr); | |
2019 | fprintf (file, "\n"); | |
2020 | } | |
2021 | ||
2022 | fprintf (file, "\n"); | |
2023 | } | |
2024 | ||
2025 | /* Record register first/last/block set information for REGNO in INSN. | |
2026 | reg_first_set records the first place in the block where the register | |
2027 | is set and is used to compute "anticipatability". | |
2028 | reg_last_set records the last place in the block where the register | |
2029 | is set and is used to compute "availability". | |
2030 | reg_set_in_block records whether the register is set in the block | |
2031 | and is used to compute "transparency". */ | |
2032 | ||
2033 | static void | |
2034 | record_last_reg_set_info (insn, regno) | |
2035 | rtx insn; | |
2036 | int regno; | |
2037 | { | |
b86ba9c8 | 2038 | if (reg_first_set[regno] == NEVER_SET) |
7506f491 DE |
2039 | reg_first_set[regno] = INSN_CUID (insn); |
2040 | reg_last_set[regno] = INSN_CUID (insn); | |
2041 | SET_BIT (reg_set_in_block[BLOCK_NUM (insn)], regno); | |
2042 | } | |
2043 | ||
2044 | /* Record memory first/last/block set information for INSN. */ | |
2045 | ||
2046 | static void | |
2047 | record_last_mem_set_info (insn) | |
2048 | rtx insn; | |
2049 | { | |
b86ba9c8 | 2050 | if (mem_first_set == NEVER_SET) |
7506f491 DE |
2051 | mem_first_set = INSN_CUID (insn); |
2052 | mem_last_set = INSN_CUID (insn); | |
2053 | mem_set_in_block[BLOCK_NUM (insn)] = 1; | |
2054 | } | |
2055 | ||
2056 | /* Used for communicating between next two routines. */ | |
2057 | static rtx last_set_insn; | |
2058 | ||
2059 | /* Called from compute_hash_table via note_stores to handle one | |
2060 | SET or CLOBBER in an insn. */ | |
2061 | ||
2062 | static void | |
2063 | record_last_set_info (dest, setter) | |
50b2596f | 2064 | rtx dest, setter ATTRIBUTE_UNUSED; |
7506f491 DE |
2065 | { |
2066 | if (GET_CODE (dest) == SUBREG) | |
2067 | dest = SUBREG_REG (dest); | |
2068 | ||
2069 | if (GET_CODE (dest) == REG) | |
2070 | record_last_reg_set_info (last_set_insn, REGNO (dest)); | |
2071 | else if (GET_CODE (dest) == MEM | |
2072 | /* Ignore pushes, they clobber nothing. */ | |
2073 | && ! push_operand (dest, GET_MODE (dest))) | |
2074 | record_last_mem_set_info (last_set_insn); | |
2075 | } | |
2076 | ||
2077 | /* Top level function to create an expression or assignment hash table. | |
2078 | ||
2079 | Expression entries are placed in the hash table if | |
2080 | - they are of the form (set (pseudo-reg) src), | |
2081 | - src is something we want to perform GCSE on, | |
2082 | - none of the operands are subsequently modified in the block | |
2083 | ||
2084 | Assignment entries are placed in the hash table if | |
2085 | - they are of the form (set (pseudo-reg) src), | |
2086 | - src is something we want to perform const/copy propagation on, | |
2087 | - none of the operands or target are subsequently modified in the block | |
2088 | Currently src must be a pseudo-reg or a const_int. | |
2089 | ||
2090 | F is the first insn. | |
2091 | SET_P is non-zero for computing the assignment hash table. */ | |
2092 | ||
2093 | static void | |
b5ce41ff | 2094 | compute_hash_table (set_p) |
7506f491 DE |
2095 | int set_p; |
2096 | { | |
2097 | int bb; | |
2098 | ||
2099 | /* While we compute the hash table we also compute a bit array of which | |
2100 | registers are set in which blocks. | |
2101 | We also compute which blocks set memory, in the absence of aliasing | |
2102 | support [which is TODO]. | |
2103 | ??? This isn't needed during const/copy propagation, but it's cheap to | |
2104 | compute. Later. */ | |
2105 | sbitmap_vector_zero (reg_set_in_block, n_basic_blocks); | |
2106 | bzero ((char *) mem_set_in_block, n_basic_blocks); | |
2107 | ||
2108 | /* Some working arrays used to track first and last set in each block. */ | |
2109 | /* ??? One could use alloca here, but at some size a threshold is crossed | |
2110 | beyond which one should use malloc. Are we at that threshold here? */ | |
2111 | reg_first_set = (int *) gmalloc (max_gcse_regno * sizeof (int)); | |
2112 | reg_last_set = (int *) gmalloc (max_gcse_regno * sizeof (int)); | |
2113 | ||
2114 | for (bb = 0; bb < n_basic_blocks; bb++) | |
2115 | { | |
2116 | rtx insn; | |
2117 | int regno; | |
ed79bb3d | 2118 | int in_libcall_block; |
b86ba9c8 | 2119 | int i; |
7506f491 DE |
2120 | |
2121 | /* First pass over the instructions records information used to | |
2122 | determine when registers and memory are first and last set. | |
2123 | ??? The mem_set_in_block and hard-reg reg_set_in_block computation | |
2124 | could be moved to compute_sets since they currently don't change. */ | |
2125 | ||
b86ba9c8 GK |
2126 | for (i = 0; i < max_gcse_regno; i++) |
2127 | reg_first_set[i] = reg_last_set[i] = NEVER_SET; | |
2128 | mem_first_set = NEVER_SET; | |
2129 | mem_last_set = NEVER_SET; | |
7506f491 | 2130 | |
3b413743 RH |
2131 | for (insn = BLOCK_HEAD (bb); |
2132 | insn && insn != NEXT_INSN (BLOCK_END (bb)); | |
7506f491 DE |
2133 | insn = NEXT_INSN (insn)) |
2134 | { | |
2135 | #ifdef NON_SAVING_SETJMP | |
2136 | if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE | |
2137 | && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP) | |
2138 | { | |
2139 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
2140 | record_last_reg_set_info (insn, regno); | |
2141 | continue; | |
2142 | } | |
2143 | #endif | |
2144 | ||
2145 | if (GET_RTX_CLASS (GET_CODE (insn)) != 'i') | |
2146 | continue; | |
2147 | ||
2148 | if (GET_CODE (insn) == CALL_INSN) | |
2149 | { | |
2150 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
15f8470f JL |
2151 | if ((call_used_regs[regno] |
2152 | && regno != STACK_POINTER_REGNUM | |
2153 | #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM | |
2154 | && regno != HARD_FRAME_POINTER_REGNUM | |
2155 | #endif | |
2156 | #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM | |
2157 | && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno]) | |
2158 | #endif | |
2159 | #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED) | |
2160 | && ! (regno == PIC_OFFSET_TABLE_REGNUM && flag_pic) | |
2161 | #endif | |
2162 | ||
2163 | && regno != FRAME_POINTER_REGNUM) | |
2164 | || global_regs[regno]) | |
7506f491 DE |
2165 | record_last_reg_set_info (insn, regno); |
2166 | if (! CONST_CALL_P (insn)) | |
2167 | record_last_mem_set_info (insn); | |
2168 | } | |
2169 | ||
2170 | last_set_insn = insn; | |
2171 | note_stores (PATTERN (insn), record_last_set_info); | |
2172 | } | |
2173 | ||
2174 | /* The next pass builds the hash table. */ | |
2175 | ||
3b413743 RH |
2176 | for (insn = BLOCK_HEAD (bb), in_libcall_block = 0; |
2177 | insn && insn != NEXT_INSN (BLOCK_END (bb)); | |
7506f491 DE |
2178 | insn = NEXT_INSN (insn)) |
2179 | { | |
2180 | if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') | |
ed79bb3d R |
2181 | { |
2182 | if (find_reg_note (insn, REG_LIBCALL, NULL_RTX)) | |
2183 | in_libcall_block = 1; | |
2184 | else if (find_reg_note (insn, REG_RETVAL, NULL_RTX)) | |
2185 | in_libcall_block = 0; | |
2186 | hash_scan_insn (insn, set_p, in_libcall_block); | |
2187 | } | |
7506f491 DE |
2188 | } |
2189 | } | |
2190 | ||
2191 | free (reg_first_set); | |
2192 | free (reg_last_set); | |
2193 | /* Catch bugs early. */ | |
2194 | reg_first_set = reg_last_set = 0; | |
2195 | } | |
2196 | ||
2197 | /* Allocate space for the set hash table. | |
2198 | N_INSNS is the number of instructions in the function. | |
2199 | It is used to determine the number of buckets to use. */ | |
2200 | ||
2201 | static void | |
2202 | alloc_set_hash_table (n_insns) | |
2203 | int n_insns; | |
2204 | { | |
2205 | int n; | |
2206 | ||
2207 | set_hash_table_size = n_insns / 4; | |
2208 | if (set_hash_table_size < 11) | |
2209 | set_hash_table_size = 11; | |
2210 | /* Attempt to maintain efficient use of hash table. | |
2211 | Making it an odd number is simplest for now. | |
2212 | ??? Later take some measurements. */ | |
2213 | set_hash_table_size |= 1; | |
2214 | n = set_hash_table_size * sizeof (struct expr *); | |
2215 | set_hash_table = (struct expr **) gmalloc (n); | |
2216 | } | |
2217 | ||
2218 | /* Free things allocated by alloc_set_hash_table. */ | |
2219 | ||
2220 | static void | |
2221 | free_set_hash_table () | |
2222 | { | |
2223 | free (set_hash_table); | |
2224 | } | |
2225 | ||
2226 | /* Compute the hash table for doing copy/const propagation. */ | |
2227 | ||
2228 | static void | |
b5ce41ff | 2229 | compute_set_hash_table () |
7506f491 DE |
2230 | { |
2231 | /* Initialize count of number of entries in hash table. */ | |
2232 | n_sets = 0; | |
2233 | bzero ((char *) set_hash_table, set_hash_table_size * sizeof (struct expr *)); | |
2234 | ||
b5ce41ff | 2235 | compute_hash_table (1); |
7506f491 DE |
2236 | } |
2237 | ||
2238 | /* Allocate space for the expression hash table. | |
2239 | N_INSNS is the number of instructions in the function. | |
2240 | It is used to determine the number of buckets to use. */ | |
2241 | ||
2242 | static void | |
2243 | alloc_expr_hash_table (n_insns) | |
2244 | int n_insns; | |
2245 | { | |
2246 | int n; | |
2247 | ||
2248 | expr_hash_table_size = n_insns / 2; | |
2249 | /* Make sure the amount is usable. */ | |
2250 | if (expr_hash_table_size < 11) | |
2251 | expr_hash_table_size = 11; | |
2252 | /* Attempt to maintain efficient use of hash table. | |
2253 | Making it an odd number is simplest for now. | |
2254 | ??? Later take some measurements. */ | |
2255 | expr_hash_table_size |= 1; | |
2256 | n = expr_hash_table_size * sizeof (struct expr *); | |
2257 | expr_hash_table = (struct expr **) gmalloc (n); | |
2258 | } | |
2259 | ||
2260 | /* Free things allocated by alloc_expr_hash_table. */ | |
2261 | ||
2262 | static void | |
2263 | free_expr_hash_table () | |
2264 | { | |
2265 | free (expr_hash_table); | |
2266 | } | |
2267 | ||
2268 | /* Compute the hash table for doing GCSE. */ | |
2269 | ||
2270 | static void | |
b5ce41ff | 2271 | compute_expr_hash_table () |
7506f491 DE |
2272 | { |
2273 | /* Initialize count of number of entries in hash table. */ | |
2274 | n_exprs = 0; | |
2275 | bzero ((char *) expr_hash_table, expr_hash_table_size * sizeof (struct expr *)); | |
2276 | ||
b5ce41ff | 2277 | compute_hash_table (0); |
7506f491 DE |
2278 | } |
2279 | \f | |
2280 | /* Expression tracking support. */ | |
2281 | ||
2282 | /* Lookup pattern PAT in the expression table. | |
2283 | The result is a pointer to the table entry, or NULL if not found. */ | |
2284 | ||
2285 | static struct expr * | |
2286 | lookup_expr (pat) | |
2287 | rtx pat; | |
2288 | { | |
2289 | int do_not_record_p; | |
2290 | unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p, | |
2291 | expr_hash_table_size); | |
2292 | struct expr *expr; | |
2293 | ||
2294 | if (do_not_record_p) | |
2295 | return NULL; | |
2296 | ||
2297 | expr = expr_hash_table[hash]; | |
2298 | ||
2299 | while (expr && ! expr_equiv_p (expr->expr, pat)) | |
2300 | expr = expr->next_same_hash; | |
2301 | ||
2302 | return expr; | |
2303 | } | |
2304 | ||
2305 | /* Lookup REGNO in the set table. | |
2306 | If PAT is non-NULL look for the entry that matches it, otherwise return | |
2307 | the first entry for REGNO. | |
2308 | The result is a pointer to the table entry, or NULL if not found. */ | |
2309 | ||
2310 | static struct expr * | |
2311 | lookup_set (regno, pat) | |
2312 | int regno; | |
2313 | rtx pat; | |
2314 | { | |
2315 | unsigned int hash = hash_set (regno, set_hash_table_size); | |
2316 | struct expr *expr; | |
2317 | ||
2318 | expr = set_hash_table[hash]; | |
2319 | ||
2320 | if (pat) | |
2321 | { | |
2322 | while (expr && ! expr_equiv_p (expr->expr, pat)) | |
2323 | expr = expr->next_same_hash; | |
2324 | } | |
2325 | else | |
2326 | { | |
2327 | while (expr && REGNO (SET_DEST (expr->expr)) != regno) | |
2328 | expr = expr->next_same_hash; | |
2329 | } | |
2330 | ||
2331 | return expr; | |
2332 | } | |
2333 | ||
2334 | /* Return the next entry for REGNO in list EXPR. */ | |
2335 | ||
2336 | static struct expr * | |
2337 | next_set (regno, expr) | |
2338 | int regno; | |
2339 | struct expr *expr; | |
2340 | { | |
2341 | do | |
2342 | expr = expr->next_same_hash; | |
2343 | while (expr && REGNO (SET_DEST (expr->expr)) != regno); | |
2344 | return expr; | |
2345 | } | |
2346 | ||
2347 | /* Reset tables used to keep track of what's still available [since the | |
2348 | start of the block]. */ | |
2349 | ||
2350 | static void | |
2351 | reset_opr_set_tables () | |
2352 | { | |
2353 | /* Maintain a bitmap of which regs have been set since beginning of | |
2354 | the block. */ | |
2355 | sbitmap_zero (reg_set_bitmap); | |
2356 | /* Also keep a record of the last instruction to modify memory. | |
2357 | For now this is very trivial, we only record whether any memory | |
2358 | location has been modified. */ | |
2359 | mem_last_set = 0; | |
2360 | } | |
2361 | ||
2362 | /* Return non-zero if the operands of X are not set before INSN in | |
2363 | INSN's basic block. */ | |
2364 | ||
2365 | static int | |
2366 | oprs_not_set_p (x, insn) | |
2367 | rtx x, insn; | |
2368 | { | |
2369 | int i; | |
2370 | enum rtx_code code; | |
6f7d635c | 2371 | const char *fmt; |
7506f491 DE |
2372 | |
2373 | /* repeat is used to turn tail-recursion into iteration. */ | |
2374 | repeat: | |
2375 | ||
2376 | if (x == 0) | |
2377 | return 1; | |
2378 | ||
2379 | code = GET_CODE (x); | |
2380 | switch (code) | |
2381 | { | |
2382 | case PC: | |
2383 | case CC0: | |
2384 | case CONST: | |
2385 | case CONST_INT: | |
2386 | case CONST_DOUBLE: | |
2387 | case SYMBOL_REF: | |
2388 | case LABEL_REF: | |
2389 | case ADDR_VEC: | |
2390 | case ADDR_DIFF_VEC: | |
2391 | return 1; | |
2392 | ||
2393 | case MEM: | |
2394 | if (mem_last_set != 0) | |
2395 | return 0; | |
2396 | x = XEXP (x, 0); | |
2397 | goto repeat; | |
2398 | ||
2399 | case REG: | |
2400 | return ! TEST_BIT (reg_set_bitmap, REGNO (x)); | |
2401 | ||
2402 | default: | |
2403 | break; | |
2404 | } | |
2405 | ||
2406 | fmt = GET_RTX_FORMAT (code); | |
2407 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
2408 | { | |
2409 | if (fmt[i] == 'e') | |
2410 | { | |
2411 | int not_set_p; | |
2412 | /* If we are about to do the last recursive call | |
2413 | needed at this level, change it into iteration. | |
2414 | This function is called enough to be worth it. */ | |
2415 | if (i == 0) | |
2416 | { | |
2417 | x = XEXP (x, 0); | |
2418 | goto repeat; | |
2419 | } | |
2420 | not_set_p = oprs_not_set_p (XEXP (x, i), insn); | |
2421 | if (! not_set_p) | |
2422 | return 0; | |
2423 | } | |
2424 | else if (fmt[i] == 'E') | |
2425 | { | |
2426 | int j; | |
2427 | for (j = 0; j < XVECLEN (x, i); j++) | |
2428 | { | |
2429 | int not_set_p = oprs_not_set_p (XVECEXP (x, i, j), insn); | |
2430 | if (! not_set_p) | |
2431 | return 0; | |
2432 | } | |
2433 | } | |
2434 | } | |
2435 | ||
2436 | return 1; | |
2437 | } | |
2438 | ||
2439 | /* Mark things set by a CALL. */ | |
2440 | ||
2441 | static void | |
b5ce41ff JL |
2442 | mark_call (insn) |
2443 | rtx insn; | |
7506f491 DE |
2444 | { |
2445 | mem_last_set = INSN_CUID (insn); | |
2446 | } | |
2447 | ||
2448 | /* Mark things set by a SET. */ | |
2449 | ||
2450 | static void | |
2451 | mark_set (pat, insn) | |
2452 | rtx pat, insn; | |
2453 | { | |
2454 | rtx dest = SET_DEST (pat); | |
2455 | ||
2456 | while (GET_CODE (dest) == SUBREG | |
2457 | || GET_CODE (dest) == ZERO_EXTRACT | |
2458 | || GET_CODE (dest) == SIGN_EXTRACT | |
2459 | || GET_CODE (dest) == STRICT_LOW_PART) | |
2460 | dest = XEXP (dest, 0); | |
2461 | ||
2462 | if (GET_CODE (dest) == REG) | |
2463 | SET_BIT (reg_set_bitmap, REGNO (dest)); | |
2464 | else if (GET_CODE (dest) == MEM) | |
2465 | mem_last_set = INSN_CUID (insn); | |
2466 | ||
2467 | if (GET_CODE (SET_SRC (pat)) == CALL) | |
b5ce41ff | 2468 | mark_call (insn); |
7506f491 DE |
2469 | } |
2470 | ||
2471 | /* Record things set by a CLOBBER. */ | |
2472 | ||
2473 | static void | |
2474 | mark_clobber (pat, insn) | |
2475 | rtx pat, insn; | |
2476 | { | |
2477 | rtx clob = XEXP (pat, 0); | |
2478 | ||
2479 | while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART) | |
2480 | clob = XEXP (clob, 0); | |
2481 | ||
2482 | if (GET_CODE (clob) == REG) | |
2483 | SET_BIT (reg_set_bitmap, REGNO (clob)); | |
2484 | else | |
2485 | mem_last_set = INSN_CUID (insn); | |
2486 | } | |
2487 | ||
2488 | /* Record things set by INSN. | |
2489 | This data is used by oprs_not_set_p. */ | |
2490 | ||
2491 | static void | |
2492 | mark_oprs_set (insn) | |
2493 | rtx insn; | |
2494 | { | |
2495 | rtx pat = PATTERN (insn); | |
2496 | ||
2497 | if (GET_CODE (pat) == SET) | |
2498 | mark_set (pat, insn); | |
2499 | else if (GET_CODE (pat) == PARALLEL) | |
2500 | { | |
2501 | int i; | |
2502 | ||
2503 | for (i = 0; i < XVECLEN (pat, 0); i++) | |
2504 | { | |
2505 | rtx x = XVECEXP (pat, 0, i); | |
2506 | ||
2507 | if (GET_CODE (x) == SET) | |
2508 | mark_set (x, insn); | |
2509 | else if (GET_CODE (x) == CLOBBER) | |
2510 | mark_clobber (x, insn); | |
2511 | else if (GET_CODE (x) == CALL) | |
b5ce41ff | 2512 | mark_call (insn); |
7506f491 DE |
2513 | } |
2514 | } | |
2515 | else if (GET_CODE (pat) == CLOBBER) | |
2516 | mark_clobber (pat, insn); | |
2517 | else if (GET_CODE (pat) == CALL) | |
b5ce41ff | 2518 | mark_call (insn); |
7506f491 | 2519 | } |
b5ce41ff | 2520 | |
7506f491 DE |
2521 | \f |
2522 | /* Classic GCSE reaching definition support. */ | |
2523 | ||
2524 | /* Allocate reaching def variables. */ | |
2525 | ||
2526 | static void | |
2527 | alloc_rd_mem (n_blocks, n_insns) | |
2528 | int n_blocks, n_insns; | |
2529 | { | |
2530 | rd_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns); | |
2531 | sbitmap_vector_zero (rd_kill, n_basic_blocks); | |
2532 | ||
2533 | rd_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns); | |
2534 | sbitmap_vector_zero (rd_gen, n_basic_blocks); | |
2535 | ||
2536 | reaching_defs = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns); | |
2537 | sbitmap_vector_zero (reaching_defs, n_basic_blocks); | |
2538 | ||
2539 | rd_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns); | |
2540 | sbitmap_vector_zero (rd_out, n_basic_blocks); | |
2541 | } | |
2542 | ||
2543 | /* Free reaching def variables. */ | |
2544 | ||
2545 | static void | |
2546 | free_rd_mem () | |
2547 | { | |
2548 | free (rd_kill); | |
2549 | free (rd_gen); | |
2550 | free (reaching_defs); | |
2551 | free (rd_out); | |
2552 | } | |
2553 | ||
2554 | /* Add INSN to the kills of BB. | |
2555 | REGNO, set in BB, is killed by INSN. */ | |
2556 | ||
2557 | static void | |
2558 | handle_rd_kill_set (insn, regno, bb) | |
2559 | rtx insn; | |
2560 | int regno, bb; | |
2561 | { | |
2562 | struct reg_set *this_reg = reg_set_table[regno]; | |
2563 | ||
2564 | while (this_reg) | |
2565 | { | |
2566 | if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn)) | |
2567 | SET_BIT (rd_kill[bb], INSN_CUID (this_reg->insn)); | |
2568 | this_reg = this_reg->next; | |
2569 | } | |
2570 | } | |
2571 | ||
7506f491 DE |
2572 | /* Compute the set of kill's for reaching definitions. */ |
2573 | ||
2574 | static void | |
2575 | compute_kill_rd () | |
2576 | { | |
2577 | int bb,cuid; | |
2578 | ||
2579 | /* For each block | |
2580 | For each set bit in `gen' of the block (i.e each insn which | |
ac7c5af5 JL |
2581 | generates a definition in the block) |
2582 | Call the reg set by the insn corresponding to that bit regx | |
2583 | Look at the linked list starting at reg_set_table[regx] | |
2584 | For each setting of regx in the linked list, which is not in | |
2585 | this block | |
2586 | Set the bit in `kill' corresponding to that insn | |
7506f491 DE |
2587 | */ |
2588 | ||
2589 | for (bb = 0; bb < n_basic_blocks; bb++) | |
2590 | { | |
2591 | for (cuid = 0; cuid < max_cuid; cuid++) | |
2592 | { | |
2593 | if (TEST_BIT (rd_gen[bb], cuid)) | |
ac7c5af5 | 2594 | { |
7506f491 DE |
2595 | rtx insn = CUID_INSN (cuid); |
2596 | rtx pat = PATTERN (insn); | |
2597 | ||
2598 | if (GET_CODE (insn) == CALL_INSN) | |
ac7c5af5 | 2599 | { |
7506f491 DE |
2600 | int regno; |
2601 | ||
2602 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
ac7c5af5 | 2603 | { |
15f8470f JL |
2604 | if ((call_used_regs[regno] |
2605 | && regno != STACK_POINTER_REGNUM | |
2606 | #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM | |
2607 | && regno != HARD_FRAME_POINTER_REGNUM | |
2608 | #endif | |
2609 | #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM | |
2610 | && ! (regno == ARG_POINTER_REGNUM | |
2611 | && fixed_regs[regno]) | |
2612 | #endif | |
2613 | #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED) | |
2614 | && ! (regno == PIC_OFFSET_TABLE_REGNUM && flag_pic) | |
2615 | #endif | |
2616 | && regno != FRAME_POINTER_REGNUM) | |
2617 | || global_regs[regno]) | |
7506f491 | 2618 | handle_rd_kill_set (insn, regno, bb); |
ac7c5af5 JL |
2619 | } |
2620 | } | |
7506f491 DE |
2621 | |
2622 | if (GET_CODE (pat) == PARALLEL) | |
2623 | { | |
2624 | int i; | |
2625 | ||
2626 | /* We work backwards because ... */ | |
2627 | for (i = XVECLEN (pat, 0) - 1; i >= 0; i--) | |
2628 | { | |
2629 | enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i)); | |
2630 | if ((code == SET || code == CLOBBER) | |
2631 | && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG) | |
2632 | handle_rd_kill_set (insn, | |
2633 | REGNO (XEXP (XVECEXP (pat, 0, i), 0)), | |
2634 | bb); | |
2635 | } | |
2636 | } | |
2637 | else if (GET_CODE (pat) == SET) | |
2638 | { | |
2639 | if (GET_CODE (SET_DEST (pat)) == REG) | |
2640 | { | |
2641 | /* Each setting of this register outside of this block | |
2642 | must be marked in the set of kills in this block. */ | |
2643 | handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb); | |
2644 | } | |
ac7c5af5 | 2645 | } |
7506f491 | 2646 | /* FIXME: CLOBBER? */ |
ac7c5af5 | 2647 | } |
7506f491 DE |
2648 | } |
2649 | } | |
2650 | } | |
2651 | ||
2652 | /* Compute the reaching definitions as in | |
2653 | Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman, | |
2654 | Chapter 10. It is the same algorithm as used for computing available | |
2655 | expressions but applied to the gens and kills of reaching definitions. */ | |
2656 | ||
2657 | static void | |
2658 | compute_rd () | |
2659 | { | |
2660 | int bb, changed, passes; | |
2661 | ||
2662 | for (bb = 0; bb < n_basic_blocks; bb++) | |
2663 | sbitmap_copy (rd_out[bb] /*dst*/, rd_gen[bb] /*src*/); | |
2664 | ||
2665 | passes = 0; | |
2666 | changed = 1; | |
2667 | while (changed) | |
2668 | { | |
2669 | changed = 0; | |
2670 | for (bb = 0; bb < n_basic_blocks; bb++) | |
ac7c5af5 | 2671 | { |
36349f8b | 2672 | sbitmap_union_of_preds (reaching_defs[bb], rd_out, bb); |
7506f491 DE |
2673 | changed |= sbitmap_union_of_diff (rd_out[bb], rd_gen[bb], |
2674 | reaching_defs[bb], rd_kill[bb]); | |
ac7c5af5 | 2675 | } |
7506f491 DE |
2676 | passes++; |
2677 | } | |
2678 | ||
2679 | if (gcse_file) | |
2680 | fprintf (gcse_file, "reaching def computation: %d passes\n", passes); | |
2681 | } | |
2682 | \f | |
2683 | /* Classic GCSE available expression support. */ | |
2684 | ||
2685 | /* Allocate memory for available expression computation. */ | |
2686 | ||
2687 | static void | |
2688 | alloc_avail_expr_mem (n_blocks, n_exprs) | |
2689 | int n_blocks, n_exprs; | |
2690 | { | |
2691 | ae_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs); | |
2692 | sbitmap_vector_zero (ae_kill, n_basic_blocks); | |
2693 | ||
2694 | ae_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs); | |
2695 | sbitmap_vector_zero (ae_gen, n_basic_blocks); | |
2696 | ||
2697 | ae_in = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs); | |
2698 | sbitmap_vector_zero (ae_in, n_basic_blocks); | |
2699 | ||
2700 | ae_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs); | |
2701 | sbitmap_vector_zero (ae_out, n_basic_blocks); | |
2702 | ||
2703 | u_bitmap = (sbitmap) sbitmap_alloc (n_exprs); | |
2704 | sbitmap_ones (u_bitmap); | |
2705 | } | |
2706 | ||
2707 | static void | |
2708 | free_avail_expr_mem () | |
2709 | { | |
2710 | free (ae_kill); | |
2711 | free (ae_gen); | |
2712 | free (ae_in); | |
2713 | free (ae_out); | |
2714 | free (u_bitmap); | |
2715 | } | |
2716 | ||
2717 | /* Compute the set of available expressions generated in each basic block. */ | |
2718 | ||
2719 | static void | |
2720 | compute_ae_gen () | |
2721 | { | |
2722 | int i; | |
2723 | ||
2724 | /* For each recorded occurrence of each expression, set ae_gen[bb][expr]. | |
2725 | This is all we have to do because an expression is not recorded if it | |
2726 | is not available, and the only expressions we want to work with are the | |
2727 | ones that are recorded. */ | |
2728 | ||
2729 | for (i = 0; i < expr_hash_table_size; i++) | |
2730 | { | |
2731 | struct expr *expr = expr_hash_table[i]; | |
2732 | while (expr != NULL) | |
2733 | { | |
2734 | struct occr *occr = expr->avail_occr; | |
2735 | while (occr != NULL) | |
2736 | { | |
2737 | SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index); | |
2738 | occr = occr->next; | |
2739 | } | |
2740 | expr = expr->next_same_hash; | |
2741 | } | |
2742 | } | |
2743 | } | |
2744 | ||
2745 | /* Return non-zero if expression X is killed in BB. */ | |
2746 | ||
2747 | static int | |
2748 | expr_killed_p (x, bb) | |
2749 | rtx x; | |
2750 | int bb; | |
2751 | { | |
2752 | int i; | |
2753 | enum rtx_code code; | |
6f7d635c | 2754 | const char *fmt; |
7506f491 DE |
2755 | |
2756 | /* repeat is used to turn tail-recursion into iteration. */ | |
2757 | repeat: | |
2758 | ||
2759 | if (x == 0) | |
2760 | return 1; | |
2761 | ||
2762 | code = GET_CODE (x); | |
2763 | switch (code) | |
2764 | { | |
2765 | case REG: | |
2766 | return TEST_BIT (reg_set_in_block[bb], REGNO (x)); | |
2767 | ||
2768 | case MEM: | |
2769 | if (mem_set_in_block[bb]) | |
2770 | return 1; | |
2771 | x = XEXP (x, 0); | |
2772 | goto repeat; | |
2773 | ||
2774 | case PC: | |
2775 | case CC0: /*FIXME*/ | |
2776 | case CONST: | |
2777 | case CONST_INT: | |
2778 | case CONST_DOUBLE: | |
2779 | case SYMBOL_REF: | |
2780 | case LABEL_REF: | |
2781 | case ADDR_VEC: | |
2782 | case ADDR_DIFF_VEC: | |
2783 | return 0; | |
2784 | ||
2785 | default: | |
2786 | break; | |
2787 | } | |
2788 | ||
2789 | i = GET_RTX_LENGTH (code) - 1; | |
2790 | fmt = GET_RTX_FORMAT (code); | |
2791 | for (; i >= 0; i--) | |
2792 | { | |
2793 | if (fmt[i] == 'e') | |
2794 | { | |
2795 | rtx tem = XEXP (x, i); | |
2796 | ||
2797 | /* If we are about to do the last recursive call | |
2798 | needed at this level, change it into iteration. | |
2799 | This function is called enough to be worth it. */ | |
2800 | if (i == 0) | |
2801 | { | |
2802 | x = tem; | |
2803 | goto repeat; | |
2804 | } | |
2805 | if (expr_killed_p (tem, bb)) | |
2806 | return 1; | |
2807 | } | |
2808 | else if (fmt[i] == 'E') | |
2809 | { | |
2810 | int j; | |
2811 | for (j = 0; j < XVECLEN (x, i); j++) | |
2812 | { | |
2813 | if (expr_killed_p (XVECEXP (x, i, j), bb)) | |
2814 | return 1; | |
2815 | } | |
2816 | } | |
2817 | } | |
2818 | ||
2819 | return 0; | |
2820 | } | |
2821 | ||
2822 | /* Compute the set of available expressions killed in each basic block. */ | |
2823 | ||
2824 | static void | |
2825 | compute_ae_kill () | |
2826 | { | |
2827 | int bb,i; | |
2828 | ||
2829 | for (bb = 0; bb < n_basic_blocks; bb++) | |
2830 | { | |
2831 | for (i = 0; i < expr_hash_table_size; i++) | |
2832 | { | |
2833 | struct expr *expr = expr_hash_table[i]; | |
2834 | ||
2835 | for ( ; expr != NULL; expr = expr->next_same_hash) | |
2836 | { | |
2837 | /* Skip EXPR if generated in this block. */ | |
2838 | if (TEST_BIT (ae_gen[bb], expr->bitmap_index)) | |
2839 | continue; | |
2840 | ||
2841 | if (expr_killed_p (expr->expr, bb)) | |
2842 | SET_BIT (ae_kill[bb], expr->bitmap_index); | |
2843 | } | |
2844 | } | |
2845 | } | |
2846 | } | |
2847 | ||
2848 | /* Compute available expressions. | |
2849 | ||
2850 | Implement the algorithm to find available expressions | |
2851 | as given in the Aho Sethi Ullman book, pages 627-631. */ | |
2852 | ||
2853 | static void | |
2854 | compute_available () | |
2855 | { | |
2856 | int bb, changed, passes; | |
2857 | ||
2858 | sbitmap_zero (ae_in[0]); | |
2859 | ||
2860 | sbitmap_copy (ae_out[0] /*dst*/, ae_gen[0] /*src*/); | |
2861 | ||
2862 | for (bb = 1; bb < n_basic_blocks; bb++) | |
2863 | sbitmap_difference (ae_out[bb], u_bitmap, ae_kill[bb]); | |
2864 | ||
2865 | passes = 0; | |
2866 | changed = 1; | |
2867 | while (changed) | |
2868 | { | |
2869 | changed = 0; | |
2870 | for (bb = 1; bb < n_basic_blocks; bb++) | |
2871 | { | |
36349f8b | 2872 | sbitmap_intersection_of_preds (ae_in[bb], ae_out, bb); |
7506f491 DE |
2873 | changed |= sbitmap_union_of_diff (ae_out[bb], ae_gen[bb], |
2874 | ae_in[bb], ae_kill[bb]); | |
2875 | } | |
2876 | passes++; | |
2877 | } | |
2878 | ||
2879 | if (gcse_file) | |
2880 | fprintf (gcse_file, "avail expr computation: %d passes\n", passes); | |
2881 | } | |
2882 | \f | |
2883 | /* Actually perform the Classic GCSE optimizations. */ | |
2884 | ||
2885 | /* Return non-zero if occurrence OCCR of expression EXPR reaches block BB. | |
2886 | ||
2887 | CHECK_SELF_LOOP is non-zero if we should consider a block reaching itself | |
2888 | as a positive reach. We want to do this when there are two computations | |
2889 | of the expression in the block. | |
2890 | ||
2891 | VISITED is a pointer to a working buffer for tracking which BB's have | |
2892 | been visited. It is NULL for the top-level call. | |
2893 | ||
2894 | We treat reaching expressions that go through blocks containing the same | |
2895 | reaching expression as "not reaching". E.g. if EXPR is generated in blocks | |
2896 | 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block | |
2897 | 2 as not reaching. The intent is to improve the probability of finding | |
2898 | only one reaching expression and to reduce register lifetimes by picking | |
2899 | the closest such expression. */ | |
2900 | ||
2901 | static int | |
2902 | expr_reaches_here_p (occr, expr, bb, check_self_loop, visited) | |
2903 | struct occr *occr; | |
2904 | struct expr *expr; | |
2905 | int bb; | |
2906 | int check_self_loop; | |
2907 | char *visited; | |
2908 | { | |
36349f8b | 2909 | edge pred; |
7506f491 DE |
2910 | |
2911 | if (visited == NULL) | |
2912 | { | |
2913 | visited = (char *) alloca (n_basic_blocks); | |
2914 | bzero (visited, n_basic_blocks); | |
2915 | } | |
2916 | ||
36349f8b | 2917 | for (pred = BASIC_BLOCK(bb)->pred; pred != NULL; pred = pred->pred_next) |
7506f491 | 2918 | { |
36349f8b | 2919 | int pred_bb = pred->src->index; |
7506f491 DE |
2920 | |
2921 | if (visited[pred_bb]) | |
ac7c5af5 | 2922 | { |
7506f491 DE |
2923 | /* This predecessor has already been visited. |
2924 | Nothing to do. */ | |
2925 | ; | |
2926 | } | |
2927 | else if (pred_bb == bb) | |
ac7c5af5 | 2928 | { |
7506f491 DE |
2929 | /* BB loops on itself. */ |
2930 | if (check_self_loop | |
2931 | && TEST_BIT (ae_gen[pred_bb], expr->bitmap_index) | |
2932 | && BLOCK_NUM (occr->insn) == pred_bb) | |
2933 | return 1; | |
2934 | visited[pred_bb] = 1; | |
ac7c5af5 | 2935 | } |
7506f491 DE |
2936 | /* Ignore this predecessor if it kills the expression. */ |
2937 | else if (TEST_BIT (ae_kill[pred_bb], expr->bitmap_index)) | |
2938 | visited[pred_bb] = 1; | |
2939 | /* Does this predecessor generate this expression? */ | |
2940 | else if (TEST_BIT (ae_gen[pred_bb], expr->bitmap_index)) | |
2941 | { | |
2942 | /* Is this the occurrence we're looking for? | |
2943 | Note that there's only one generating occurrence per block | |
2944 | so we just need to check the block number. */ | |
2945 | if (BLOCK_NUM (occr->insn) == pred_bb) | |
2946 | return 1; | |
2947 | visited[pred_bb] = 1; | |
2948 | } | |
2949 | /* Neither gen nor kill. */ | |
2950 | else | |
ac7c5af5 | 2951 | { |
7506f491 DE |
2952 | visited[pred_bb] = 1; |
2953 | if (expr_reaches_here_p (occr, expr, pred_bb, check_self_loop, visited)) | |
2954 | return 1; | |
ac7c5af5 | 2955 | } |
7506f491 DE |
2956 | } |
2957 | ||
2958 | /* All paths have been checked. */ | |
2959 | return 0; | |
2960 | } | |
2961 | ||
2962 | /* Return the instruction that computes EXPR that reaches INSN's basic block. | |
2963 | If there is more than one such instruction, return NULL. | |
2964 | ||
2965 | Called only by handle_avail_expr. */ | |
2966 | ||
2967 | static rtx | |
2968 | computing_insn (expr, insn) | |
2969 | struct expr *expr; | |
2970 | rtx insn; | |
2971 | { | |
2972 | int bb = BLOCK_NUM (insn); | |
2973 | ||
2974 | if (expr->avail_occr->next == NULL) | |
2975 | { | |
2976 | if (BLOCK_NUM (expr->avail_occr->insn) == bb) | |
2977 | { | |
2978 | /* The available expression is actually itself | |
2979 | (i.e. a loop in the flow graph) so do nothing. */ | |
2980 | return NULL; | |
2981 | } | |
2982 | /* (FIXME) Case that we found a pattern that was created by | |
2983 | a substitution that took place. */ | |
2984 | return expr->avail_occr->insn; | |
2985 | } | |
2986 | else | |
2987 | { | |
2988 | /* Pattern is computed more than once. | |
2989 | Search backwards from this insn to see how many of these | |
2990 | computations actually reach this insn. */ | |
2991 | struct occr *occr; | |
2992 | rtx insn_computes_expr = NULL; | |
2993 | int can_reach = 0; | |
2994 | ||
2995 | for (occr = expr->avail_occr; occr != NULL; occr = occr->next) | |
2996 | { | |
2997 | if (BLOCK_NUM (occr->insn) == bb) | |
2998 | { | |
2999 | /* The expression is generated in this block. | |
3000 | The only time we care about this is when the expression | |
3001 | is generated later in the block [and thus there's a loop]. | |
3002 | We let the normal cse pass handle the other cases. */ | |
3003 | if (INSN_CUID (insn) < INSN_CUID (occr->insn)) | |
3004 | { | |
3005 | if (expr_reaches_here_p (occr, expr, bb, 1, NULL)) | |
3006 | { | |
3007 | can_reach++; | |
3008 | if (can_reach > 1) | |
3009 | return NULL; | |
3010 | insn_computes_expr = occr->insn; | |
3011 | } | |
3012 | } | |
3013 | } | |
3014 | else /* Computation of the pattern outside this block. */ | |
3015 | { | |
3016 | if (expr_reaches_here_p (occr, expr, bb, 0, NULL)) | |
3017 | { | |
3018 | can_reach++; | |
3019 | if (can_reach > 1) | |
3020 | return NULL; | |
3021 | insn_computes_expr = occr->insn; | |
3022 | } | |
3023 | } | |
3024 | } | |
3025 | ||
3026 | if (insn_computes_expr == NULL) | |
3027 | abort (); | |
3028 | return insn_computes_expr; | |
3029 | } | |
3030 | } | |
3031 | ||
3032 | /* Return non-zero if the definition in DEF_INSN can reach INSN. | |
3033 | Only called by can_disregard_other_sets. */ | |
3034 | ||
3035 | static int | |
3036 | def_reaches_here_p (insn, def_insn) | |
3037 | rtx insn, def_insn; | |
3038 | { | |
3039 | rtx reg; | |
3040 | ||
3041 | if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn))) | |
3042 | return 1; | |
3043 | ||
3044 | if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn)) | |
3045 | { | |
3046 | if (INSN_CUID (def_insn) < INSN_CUID (insn)) | |
ac7c5af5 | 3047 | { |
7506f491 DE |
3048 | if (GET_CODE (PATTERN (def_insn)) == PARALLEL) |
3049 | return 1; | |
3050 | if (GET_CODE (PATTERN (def_insn)) == CLOBBER) | |
3051 | reg = XEXP (PATTERN (def_insn), 0); | |
3052 | else if (GET_CODE (PATTERN (def_insn)) == SET) | |
3053 | reg = SET_DEST (PATTERN (def_insn)); | |
3054 | else | |
3055 | abort (); | |
3056 | return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn); | |
3057 | } | |
3058 | else | |
3059 | return 0; | |
3060 | } | |
3061 | ||
3062 | return 0; | |
3063 | } | |
3064 | ||
3065 | /* Return non-zero if *ADDR_THIS_REG can only have one value at INSN. | |
3066 | The value returned is the number of definitions that reach INSN. | |
3067 | Returning a value of zero means that [maybe] more than one definition | |
3068 | reaches INSN and the caller can't perform whatever optimization it is | |
3069 | trying. i.e. it is always safe to return zero. */ | |
3070 | ||
3071 | static int | |
3072 | can_disregard_other_sets (addr_this_reg, insn, for_combine) | |
3073 | struct reg_set **addr_this_reg; | |
3074 | rtx insn; | |
3075 | int for_combine; | |
3076 | { | |
3077 | int number_of_reaching_defs = 0; | |
3078 | struct reg_set *this_reg = *addr_this_reg; | |
3079 | ||
3080 | while (this_reg) | |
3081 | { | |
3082 | if (def_reaches_here_p (insn, this_reg->insn)) | |
3083 | { | |
3084 | number_of_reaching_defs++; | |
3085 | /* Ignore parallels for now. */ | |
3086 | if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL) | |
3087 | return 0; | |
3088 | if (!for_combine | |
3089 | && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER | |
3090 | || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)), | |
3091 | SET_SRC (PATTERN (insn))))) | |
3092 | { | |
3093 | /* A setting of the reg to a different value reaches INSN. */ | |
3094 | return 0; | |
3095 | } | |
3096 | if (number_of_reaching_defs > 1) | |
3097 | { | |
3098 | /* If in this setting the value the register is being | |
3099 | set to is equal to the previous value the register | |
3100 | was set to and this setting reaches the insn we are | |
3101 | trying to do the substitution on then we are ok. */ | |
3102 | ||
3103 | if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER) | |
3104 | return 0; | |
3105 | if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)), | |
3106 | SET_SRC (PATTERN (insn)))) | |
3107 | return 0; | |
3108 | } | |
3109 | *addr_this_reg = this_reg; | |
3110 | } | |
3111 | ||
3112 | /* prev_this_reg = this_reg; */ | |
3113 | this_reg = this_reg->next; | |
3114 | } | |
3115 | ||
3116 | return number_of_reaching_defs; | |
3117 | } | |
3118 | ||
3119 | /* Expression computed by insn is available and the substitution is legal, | |
3120 | so try to perform the substitution. | |
3121 | ||
3122 | The result is non-zero if any changes were made. */ | |
3123 | ||
3124 | static int | |
3125 | handle_avail_expr (insn, expr) | |
3126 | rtx insn; | |
3127 | struct expr *expr; | |
3128 | { | |
3129 | rtx pat, insn_computes_expr; | |
3130 | rtx to; | |
3131 | struct reg_set *this_reg; | |
3132 | int found_setting, use_src; | |
3133 | int changed = 0; | |
3134 | ||
3135 | /* We only handle the case where one computation of the expression | |
3136 | reaches this instruction. */ | |
3137 | insn_computes_expr = computing_insn (expr, insn); | |
3138 | if (insn_computes_expr == NULL) | |
3139 | return 0; | |
3140 | ||
3141 | found_setting = 0; | |
3142 | use_src = 0; | |
3143 | ||
3144 | /* At this point we know only one computation of EXPR outside of this | |
3145 | block reaches this insn. Now try to find a register that the | |
3146 | expression is computed into. */ | |
3147 | ||
3148 | if (GET_CODE (SET_SRC (PATTERN (insn_computes_expr))) == REG) | |
3149 | { | |
3150 | /* This is the case when the available expression that reaches | |
3151 | here has already been handled as an available expression. */ | |
3152 | int regnum_for_replacing = REGNO (SET_SRC (PATTERN (insn_computes_expr))); | |
3153 | /* If the register was created by GCSE we can't use `reg_set_table', | |
3154 | however we know it's set only once. */ | |
3155 | if (regnum_for_replacing >= max_gcse_regno | |
3156 | /* If the register the expression is computed into is set only once, | |
3157 | or only one set reaches this insn, we can use it. */ | |
3158 | || (((this_reg = reg_set_table[regnum_for_replacing]), | |
3159 | this_reg->next == NULL) | |
3160 | || can_disregard_other_sets (&this_reg, insn, 0))) | |
3161 | { | |
3162 | use_src = 1; | |
3163 | found_setting = 1; | |
3164 | } | |
3165 | } | |
3166 | ||
3167 | if (!found_setting) | |
3168 | { | |
3169 | int regnum_for_replacing = REGNO (SET_DEST (PATTERN (insn_computes_expr))); | |
3170 | /* This shouldn't happen. */ | |
3171 | if (regnum_for_replacing >= max_gcse_regno) | |
3172 | abort (); | |
3173 | this_reg = reg_set_table[regnum_for_replacing]; | |
3174 | /* If the register the expression is computed into is set only once, | |
3175 | or only one set reaches this insn, use it. */ | |
3176 | if (this_reg->next == NULL | |
3177 | || can_disregard_other_sets (&this_reg, insn, 0)) | |
3178 | found_setting = 1; | |
3179 | } | |
3180 | ||
3181 | if (found_setting) | |
3182 | { | |
3183 | pat = PATTERN (insn); | |
3184 | if (use_src) | |
3185 | to = SET_SRC (PATTERN (insn_computes_expr)); | |
3186 | else | |
3187 | to = SET_DEST (PATTERN (insn_computes_expr)); | |
3188 | changed = validate_change (insn, &SET_SRC (pat), to, 0); | |
3189 | ||
3190 | /* We should be able to ignore the return code from validate_change but | |
3191 | to play it safe we check. */ | |
3192 | if (changed) | |
3193 | { | |
3194 | gcse_subst_count++; | |
3195 | if (gcse_file != NULL) | |
3196 | { | |
3197 | fprintf (gcse_file, "GCSE: Replacing the source in insn %d with reg %d %s insn %d\n", | |
3198 | INSN_UID (insn), REGNO (to), | |
3199 | use_src ? "from" : "set in", | |
3200 | INSN_UID (insn_computes_expr)); | |
3201 | } | |
3202 | ||
3203 | } | |
3204 | } | |
3205 | /* The register that the expr is computed into is set more than once. */ | |
3206 | else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/) | |
3207 | { | |
3208 | /* Insert an insn after insnx that copies the reg set in insnx | |
3209 | into a new pseudo register call this new register REGN. | |
3210 | From insnb until end of basic block or until REGB is set | |
3211 | replace all uses of REGB with REGN. */ | |
3212 | rtx new_insn; | |
3213 | ||
3214 | to = gen_reg_rtx (GET_MODE (SET_DEST (PATTERN (insn_computes_expr)))); | |
3215 | ||
3216 | /* Generate the new insn. */ | |
3217 | /* ??? If the change fails, we return 0, even though we created | |
3218 | an insn. I think this is ok. */ | |
9e6a5703 JC |
3219 | new_insn |
3220 | = emit_insn_after (gen_rtx_SET (VOIDmode, to, | |
3221 | SET_DEST (PATTERN (insn_computes_expr))), | |
7506f491 DE |
3222 | insn_computes_expr); |
3223 | /* Keep block number table up to date. */ | |
3224 | set_block_num (new_insn, BLOCK_NUM (insn_computes_expr)); | |
3225 | /* Keep register set table up to date. */ | |
3226 | record_one_set (REGNO (to), new_insn); | |
3227 | ||
3228 | gcse_create_count++; | |
3229 | if (gcse_file != NULL) | |
ac7c5af5 | 3230 | { |
7506f491 DE |
3231 | fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d, computed in insn %d,\n", |
3232 | INSN_UID (NEXT_INSN (insn_computes_expr)), | |
3233 | REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))), | |
3234 | INSN_UID (insn_computes_expr)); | |
3235 | fprintf (gcse_file, " into newly allocated reg %d\n", REGNO (to)); | |
ac7c5af5 | 3236 | } |
7506f491 DE |
3237 | |
3238 | pat = PATTERN (insn); | |
3239 | ||
3240 | /* Do register replacement for INSN. */ | |
3241 | changed = validate_change (insn, &SET_SRC (pat), | |
3242 | SET_DEST (PATTERN (NEXT_INSN (insn_computes_expr))), | |
3243 | 0); | |
3244 | ||
3245 | /* We should be able to ignore the return code from validate_change but | |
3246 | to play it safe we check. */ | |
3247 | if (changed) | |
3248 | { | |
3249 | gcse_subst_count++; | |
3250 | if (gcse_file != NULL) | |
3251 | { | |
3252 | fprintf (gcse_file, "GCSE: Replacing the source in insn %d with reg %d set in insn %d\n", | |
3253 | INSN_UID (insn), | |
3254 | REGNO (SET_DEST (PATTERN (NEXT_INSN (insn_computes_expr)))), | |
3255 | INSN_UID (insn_computes_expr)); | |
3256 | } | |
3257 | ||
3258 | } | |
3259 | } | |
3260 | ||
3261 | return changed; | |
3262 | } | |
3263 | ||
3264 | /* Perform classic GCSE. | |
3265 | This is called by one_classic_gcse_pass after all the dataflow analysis | |
3266 | has been done. | |
3267 | ||
3268 | The result is non-zero if a change was made. */ | |
3269 | ||
3270 | static int | |
3271 | classic_gcse () | |
3272 | { | |
3273 | int bb, changed; | |
3274 | rtx insn; | |
3275 | ||
3276 | /* Note we start at block 1. */ | |
3277 | ||
3278 | changed = 0; | |
3279 | for (bb = 1; bb < n_basic_blocks; bb++) | |
3280 | { | |
3281 | /* Reset tables used to keep track of what's still valid [since the | |
3282 | start of the block]. */ | |
3283 | reset_opr_set_tables (); | |
3284 | ||
3b413743 RH |
3285 | for (insn = BLOCK_HEAD (bb); |
3286 | insn != NULL && insn != NEXT_INSN (BLOCK_END (bb)); | |
7506f491 DE |
3287 | insn = NEXT_INSN (insn)) |
3288 | { | |
3289 | /* Is insn of form (set (pseudo-reg) ...)? */ | |
3290 | ||
3291 | if (GET_CODE (insn) == INSN | |
3292 | && GET_CODE (PATTERN (insn)) == SET | |
3293 | && GET_CODE (SET_DEST (PATTERN (insn))) == REG | |
3294 | && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER) | |
3295 | { | |
3296 | rtx pat = PATTERN (insn); | |
3297 | rtx src = SET_SRC (pat); | |
3298 | struct expr *expr; | |
3299 | ||
3300 | if (want_to_gcse_p (src) | |
3301 | /* Is the expression recorded? */ | |
3302 | && ((expr = lookup_expr (src)) != NULL) | |
3303 | /* Is the expression available [at the start of the | |
3304 | block]? */ | |
3305 | && TEST_BIT (ae_in[bb], expr->bitmap_index) | |
3306 | /* Are the operands unchanged since the start of the | |
3307 | block? */ | |
3308 | && oprs_not_set_p (src, insn)) | |
3309 | changed |= handle_avail_expr (insn, expr); | |
3310 | } | |
3311 | ||
3312 | /* Keep track of everything modified by this insn. */ | |
3313 | /* ??? Need to be careful w.r.t. mods done to INSN. */ | |
3314 | if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') | |
3315 | mark_oprs_set (insn); | |
ac7c5af5 | 3316 | } |
7506f491 DE |
3317 | } |
3318 | ||
3319 | return changed; | |
3320 | } | |
3321 | ||
3322 | /* Top level routine to perform one classic GCSE pass. | |
3323 | ||
3324 | Return non-zero if a change was made. */ | |
3325 | ||
3326 | static int | |
b5ce41ff | 3327 | one_classic_gcse_pass (pass) |
7506f491 DE |
3328 | int pass; |
3329 | { | |
3330 | int changed = 0; | |
3331 | ||
3332 | gcse_subst_count = 0; | |
3333 | gcse_create_count = 0; | |
3334 | ||
3335 | alloc_expr_hash_table (max_cuid); | |
3336 | alloc_rd_mem (n_basic_blocks, max_cuid); | |
b5ce41ff | 3337 | compute_expr_hash_table (); |
7506f491 DE |
3338 | if (gcse_file) |
3339 | dump_hash_table (gcse_file, "Expression", expr_hash_table, | |
3340 | expr_hash_table_size, n_exprs); | |
3341 | if (n_exprs > 0) | |
3342 | { | |
3343 | compute_kill_rd (); | |
3344 | compute_rd (); | |
3345 | alloc_avail_expr_mem (n_basic_blocks, n_exprs); | |
3346 | compute_ae_gen (); | |
3347 | compute_ae_kill (); | |
3348 | compute_available (); | |
3349 | changed = classic_gcse (); | |
3350 | free_avail_expr_mem (); | |
3351 | } | |
3352 | free_rd_mem (); | |
3353 | free_expr_hash_table (); | |
3354 | ||
3355 | if (gcse_file) | |
3356 | { | |
3357 | fprintf (gcse_file, "\n"); | |
3358 | fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs, %d insns created\n", | |
3359 | current_function_name, pass, | |
3360 | bytes_used, gcse_subst_count, gcse_create_count); | |
3361 | } | |
3362 | ||
3363 | return changed; | |
3364 | } | |
3365 | \f | |
3366 | /* Compute copy/constant propagation working variables. */ | |
3367 | ||
3368 | /* Local properties of assignments. */ | |
3369 | ||
3370 | static sbitmap *cprop_pavloc; | |
3371 | static sbitmap *cprop_absaltered; | |
3372 | ||
3373 | /* Global properties of assignments (computed from the local properties). */ | |
3374 | ||
3375 | static sbitmap *cprop_avin; | |
3376 | static sbitmap *cprop_avout; | |
3377 | ||
3378 | /* Allocate vars used for copy/const propagation. | |
3379 | N_BLOCKS is the number of basic blocks. | |
3380 | N_SETS is the number of sets. */ | |
3381 | ||
3382 | static void | |
3383 | alloc_cprop_mem (n_blocks, n_sets) | |
3384 | int n_blocks, n_sets; | |
3385 | { | |
3386 | cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets); | |
3387 | cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets); | |
3388 | ||
3389 | cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets); | |
3390 | cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets); | |
3391 | } | |
3392 | ||
3393 | /* Free vars used by copy/const propagation. */ | |
3394 | ||
3395 | static void | |
3396 | free_cprop_mem () | |
3397 | { | |
3398 | free (cprop_pavloc); | |
3399 | free (cprop_absaltered); | |
3400 | free (cprop_avin); | |
3401 | free (cprop_avout); | |
3402 | } | |
3403 | ||
7506f491 DE |
3404 | /* For each block, compute whether X is transparent. |
3405 | X is either an expression or an assignment [though we don't care which, | |
3406 | for this context an assignment is treated as an expression]. | |
3407 | For each block where an element of X is modified, set (SET_P == 1) or reset | |
3408 | (SET_P == 0) the INDX bit in BMAP. */ | |
3409 | ||
3410 | static void | |
3411 | compute_transp (x, indx, bmap, set_p) | |
3412 | rtx x; | |
3413 | int indx; | |
3414 | sbitmap *bmap; | |
3415 | int set_p; | |
3416 | { | |
3417 | int bb,i; | |
3418 | enum rtx_code code; | |
6f7d635c | 3419 | const char *fmt; |
7506f491 DE |
3420 | |
3421 | /* repeat is used to turn tail-recursion into iteration. */ | |
3422 | repeat: | |
3423 | ||
3424 | if (x == 0) | |
3425 | return; | |
3426 | ||
3427 | code = GET_CODE (x); | |
3428 | switch (code) | |
3429 | { | |
3430 | case REG: | |
3431 | { | |
3432 | reg_set *r; | |
3433 | int regno = REGNO (x); | |
3434 | ||
3435 | if (set_p) | |
3436 | { | |
3437 | if (regno < FIRST_PSEUDO_REGISTER) | |
3438 | { | |
3439 | for (bb = 0; bb < n_basic_blocks; bb++) | |
3440 | if (TEST_BIT (reg_set_in_block[bb], regno)) | |
3441 | SET_BIT (bmap[bb], indx); | |
3442 | } | |
3443 | else | |
3444 | { | |
3445 | for (r = reg_set_table[regno]; r != NULL; r = r->next) | |
3446 | { | |
3447 | bb = BLOCK_NUM (r->insn); | |
3448 | SET_BIT (bmap[bb], indx); | |
3449 | } | |
3450 | } | |
3451 | } | |
3452 | else | |
3453 | { | |
3454 | if (regno < FIRST_PSEUDO_REGISTER) | |
3455 | { | |
3456 | for (bb = 0; bb < n_basic_blocks; bb++) | |
3457 | if (TEST_BIT (reg_set_in_block[bb], regno)) | |
3458 | RESET_BIT (bmap[bb], indx); | |
3459 | } | |
3460 | else | |
3461 | { | |
3462 | for (r = reg_set_table[regno]; r != NULL; r = r->next) | |
3463 | { | |
3464 | bb = BLOCK_NUM (r->insn); | |
3465 | RESET_BIT (bmap[bb], indx); | |
3466 | } | |
3467 | } | |
3468 | } | |
3469 | return; | |
3470 | } | |
3471 | ||
3472 | case MEM: | |
3473 | if (set_p) | |
3474 | { | |
3475 | for (bb = 0; bb < n_basic_blocks; bb++) | |
3476 | if (mem_set_in_block[bb]) | |
3477 | SET_BIT (bmap[bb], indx); | |
3478 | } | |
3479 | else | |
3480 | { | |
3481 | for (bb = 0; bb < n_basic_blocks; bb++) | |
3482 | if (mem_set_in_block[bb]) | |
3483 | RESET_BIT (bmap[bb], indx); | |
3484 | } | |
3485 | x = XEXP (x, 0); | |
3486 | goto repeat; | |
3487 | ||
3488 | case PC: | |
3489 | case CC0: /*FIXME*/ | |
3490 | case CONST: | |
3491 | case CONST_INT: | |
3492 | case CONST_DOUBLE: | |
3493 | case SYMBOL_REF: | |
3494 | case LABEL_REF: | |
3495 | case ADDR_VEC: | |
3496 | case ADDR_DIFF_VEC: | |
3497 | return; | |
3498 | ||
3499 | default: | |
3500 | break; | |
3501 | } | |
3502 | ||
3503 | i = GET_RTX_LENGTH (code) - 1; | |
3504 | fmt = GET_RTX_FORMAT (code); | |
3505 | for (; i >= 0; i--) | |
3506 | { | |
3507 | if (fmt[i] == 'e') | |
3508 | { | |
3509 | rtx tem = XEXP (x, i); | |
3510 | ||
3511 | /* If we are about to do the last recursive call | |
3512 | needed at this level, change it into iteration. | |
3513 | This function is called enough to be worth it. */ | |
3514 | if (i == 0) | |
3515 | { | |
3516 | x = tem; | |
3517 | goto repeat; | |
3518 | } | |
3519 | compute_transp (tem, indx, bmap, set_p); | |
3520 | } | |
3521 | else if (fmt[i] == 'E') | |
3522 | { | |
3523 | int j; | |
3524 | for (j = 0; j < XVECLEN (x, i); j++) | |
3525 | compute_transp (XVECEXP (x, i, j), indx, bmap, set_p); | |
3526 | } | |
3527 | } | |
3528 | } | |
3529 | ||
b5ce41ff JL |
3530 | /* Compute the available expressions at the start and end of each |
3531 | basic block for cprop. This particular dataflow equation is | |
3532 | used often enough that we might want to generalize it and make | |
3533 | as a subroutine for other global optimizations that need available | |
3534 | in/out information. */ | |
7506f491 DE |
3535 | static void |
3536 | compute_cprop_avinout () | |
3537 | { | |
3538 | int bb, changed, passes; | |
3539 | ||
3540 | sbitmap_zero (cprop_avin[0]); | |
3541 | sbitmap_vector_ones (cprop_avout, n_basic_blocks); | |
3542 | ||
3543 | passes = 0; | |
3544 | changed = 1; | |
3545 | while (changed) | |
3546 | { | |
3547 | changed = 0; | |
3548 | for (bb = 0; bb < n_basic_blocks; bb++) | |
ac7c5af5 | 3549 | { |
7506f491 | 3550 | if (bb != 0) |
36349f8b | 3551 | sbitmap_intersection_of_preds (cprop_avin[bb], cprop_avout, bb); |
7506f491 DE |
3552 | changed |= sbitmap_union_of_diff (cprop_avout[bb], |
3553 | cprop_pavloc[bb], | |
3554 | cprop_avin[bb], | |
3555 | cprop_absaltered[bb]); | |
3556 | } | |
3557 | passes++; | |
3558 | } | |
3559 | ||
3560 | if (gcse_file) | |
3561 | fprintf (gcse_file, "cprop avail expr computation: %d passes\n", passes); | |
3562 | } | |
3563 | ||
3564 | /* Top level routine to do the dataflow analysis needed by copy/const | |
3565 | propagation. */ | |
3566 | ||
3567 | static void | |
3568 | compute_cprop_data () | |
3569 | { | |
b5ce41ff | 3570 | compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, 1); |
7506f491 DE |
3571 | compute_cprop_avinout (); |
3572 | } | |
3573 | \f | |
3574 | /* Copy/constant propagation. */ | |
3575 | ||
7506f491 DE |
3576 | /* Maximum number of register uses in an insn that we handle. */ |
3577 | #define MAX_USES 8 | |
3578 | ||
3579 | /* Table of uses found in an insn. | |
3580 | Allocated statically to avoid alloc/free complexity and overhead. */ | |
3581 | static struct reg_use reg_use_table[MAX_USES]; | |
3582 | ||
3583 | /* Index into `reg_use_table' while building it. */ | |
3584 | static int reg_use_count; | |
3585 | ||
3586 | /* Set up a list of register numbers used in INSN. | |
3587 | The found uses are stored in `reg_use_table'. | |
3588 | `reg_use_count' is initialized to zero before entry, and | |
3589 | contains the number of uses in the table upon exit. | |
3590 | ||
3591 | ??? If a register appears multiple times we will record it multiple | |
3592 | times. This doesn't hurt anything but it will slow things down. */ | |
3593 | ||
3594 | static void | |
3595 | find_used_regs (x) | |
3596 | rtx x; | |
3597 | { | |
3598 | int i; | |
3599 | enum rtx_code code; | |
6f7d635c | 3600 | const char *fmt; |
7506f491 DE |
3601 | |
3602 | /* repeat is used to turn tail-recursion into iteration. */ | |
3603 | repeat: | |
3604 | ||
3605 | if (x == 0) | |
3606 | return; | |
3607 | ||
3608 | code = GET_CODE (x); | |
3609 | switch (code) | |
3610 | { | |
3611 | case REG: | |
3612 | if (reg_use_count == MAX_USES) | |
3613 | return; | |
3614 | reg_use_table[reg_use_count].reg_rtx = x; | |
3615 | reg_use_count++; | |
3616 | return; | |
3617 | ||
3618 | case MEM: | |
3619 | x = XEXP (x, 0); | |
3620 | goto repeat; | |
3621 | ||
3622 | case PC: | |
3623 | case CC0: | |
3624 | case CONST: | |
3625 | case CONST_INT: | |
3626 | case CONST_DOUBLE: | |
3627 | case SYMBOL_REF: | |
3628 | case LABEL_REF: | |
3629 | case CLOBBER: | |
3630 | case ADDR_VEC: | |
3631 | case ADDR_DIFF_VEC: | |
3632 | case ASM_INPUT: /*FIXME*/ | |
3633 | return; | |
3634 | ||
3635 | case SET: | |
3636 | if (GET_CODE (SET_DEST (x)) == MEM) | |
3637 | find_used_regs (SET_DEST (x)); | |
3638 | x = SET_SRC (x); | |
3639 | goto repeat; | |
3640 | ||
3641 | default: | |
3642 | break; | |
3643 | } | |
3644 | ||
3645 | /* Recursively scan the operands of this expression. */ | |
3646 | ||
3647 | fmt = GET_RTX_FORMAT (code); | |
3648 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
3649 | { | |
3650 | if (fmt[i] == 'e') | |
3651 | { | |
3652 | /* If we are about to do the last recursive call | |
3653 | needed at this level, change it into iteration. | |
3654 | This function is called enough to be worth it. */ | |
3655 | if (i == 0) | |
3656 | { | |
3657 | x = XEXP (x, 0); | |
3658 | goto repeat; | |
3659 | } | |
3660 | find_used_regs (XEXP (x, i)); | |
3661 | } | |
3662 | else if (fmt[i] == 'E') | |
3663 | { | |
3664 | int j; | |
3665 | for (j = 0; j < XVECLEN (x, i); j++) | |
3666 | find_used_regs (XVECEXP (x, i, j)); | |
3667 | } | |
3668 | } | |
3669 | } | |
3670 | ||
3671 | /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO. | |
3672 | Returns non-zero is successful. */ | |
3673 | ||
3674 | static int | |
3675 | try_replace_reg (from, to, insn) | |
3676 | rtx from, to, insn; | |
3677 | { | |
e78d9500 JL |
3678 | /* If this fails we could try to simplify the result of the |
3679 | replacement and attempt to recognize the simplified insn. | |
3680 | ||
3681 | But we need a general simplify_rtx that doesn't have pass | |
3682 | specific state variables. I'm not aware of one at the moment. */ | |
7506f491 DE |
3683 | return validate_replace_src (from, to, insn); |
3684 | } | |
3685 | ||
3686 | /* Find a set of REGNO that is available on entry to INSN's block. | |
3687 | Returns NULL if not found. */ | |
3688 | ||
3689 | static struct expr * | |
3690 | find_avail_set (regno, insn) | |
3691 | int regno; | |
3692 | rtx insn; | |
3693 | { | |
cafba495 BS |
3694 | /* SET1 contains the last set found that can be returned to the caller for |
3695 | use in a substitution. */ | |
3696 | struct expr *set1 = 0; | |
3697 | ||
3698 | /* Loops are not possible here. To get a loop we would need two sets | |
3699 | available at the start of the block containing INSN. ie we would | |
3700 | need two sets like this available at the start of the block: | |
3701 | ||
3702 | (set (reg X) (reg Y)) | |
3703 | (set (reg Y) (reg X)) | |
3704 | ||
3705 | This can not happen since the set of (reg Y) would have killed the | |
3706 | set of (reg X) making it unavailable at the start of this block. */ | |
3707 | while (1) | |
3708 | { | |
3709 | rtx src; | |
3710 | struct expr *set = lookup_set (regno, NULL_RTX); | |
3711 | ||
3712 | /* Find a set that is available at the start of the block | |
3713 | which contains INSN. */ | |
3714 | while (set) | |
3715 | { | |
3716 | if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index)) | |
3717 | break; | |
3718 | set = next_set (regno, set); | |
3719 | } | |
7506f491 | 3720 | |
cafba495 BS |
3721 | /* If no available set was found we've reached the end of the |
3722 | (possibly empty) copy chain. */ | |
3723 | if (set == 0) | |
3724 | break; | |
3725 | ||
3726 | if (GET_CODE (set->expr) != SET) | |
3727 | abort (); | |
3728 | ||
3729 | src = SET_SRC (set->expr); | |
3730 | ||
3731 | /* We know the set is available. | |
3732 | Now check that SRC is ANTLOC (i.e. none of the source operands | |
3733 | have changed since the start of the block). | |
3734 | ||
3735 | If the source operand changed, we may still use it for the next | |
3736 | iteration of this loop, but we may not use it for substitutions. */ | |
3737 | if (CONSTANT_P (src) || oprs_not_set_p (src, insn)) | |
3738 | set1 = set; | |
3739 | ||
3740 | /* If the source of the set is anything except a register, then | |
3741 | we have reached the end of the copy chain. */ | |
3742 | if (GET_CODE (src) != REG) | |
7506f491 | 3743 | break; |
7506f491 | 3744 | |
cafba495 BS |
3745 | /* Follow the copy chain, ie start another iteration of the loop |
3746 | and see if we have an available copy into SRC. */ | |
3747 | regno = REGNO (src); | |
3748 | } | |
3749 | ||
3750 | /* SET1 holds the last set that was available and anticipatable at | |
3751 | INSN. */ | |
3752 | return set1; | |
7506f491 DE |
3753 | } |
3754 | ||
abd535b6 BS |
3755 | /* Subroutine of cprop_insn that tries to propagate constants into |
3756 | JUMP_INSNS. INSN must be a conditional jump; COPY is a copy of it | |
3757 | that we can use for substitutions. | |
3758 | REG_USED is the use we will try to replace, SRC is the constant we | |
3759 | will try to substitute for it. | |
3760 | Returns nonzero if a change was made. */ | |
3761 | static int | |
3762 | cprop_jump (insn, copy, reg_used, src) | |
3763 | rtx insn, copy; | |
3764 | struct reg_use *reg_used; | |
3765 | rtx src; | |
3766 | { | |
3767 | rtx set = PATTERN (copy); | |
3768 | rtx temp; | |
3769 | ||
3770 | /* Replace the register with the appropriate constant. */ | |
3771 | replace_rtx (SET_SRC (set), reg_used->reg_rtx, src); | |
3772 | ||
3773 | temp = simplify_ternary_operation (GET_CODE (SET_SRC (set)), | |
3774 | GET_MODE (SET_SRC (set)), | |
3775 | GET_MODE (XEXP (SET_SRC (set), 0)), | |
3776 | XEXP (SET_SRC (set), 0), | |
3777 | XEXP (SET_SRC (set), 1), | |
3778 | XEXP (SET_SRC (set), 2)); | |
3779 | ||
3780 | /* If no simplification can be made, then try the next | |
3781 | register. */ | |
3782 | if (temp == 0) | |
3783 | return 0; | |
3784 | ||
3785 | SET_SRC (set) = temp; | |
3786 | ||
3787 | /* That may have changed the structure of TEMP, so | |
3788 | force it to be rerecognized if it has not turned | |
3789 | into a nop or unconditional jump. */ | |
3790 | ||
3791 | INSN_CODE (copy) = -1; | |
3792 | if ((SET_DEST (set) == pc_rtx | |
3793 | && (SET_SRC (set) == pc_rtx | |
3794 | || GET_CODE (SET_SRC (set)) == LABEL_REF)) | |
3795 | || recog (PATTERN (copy), copy, NULL) >= 0) | |
3796 | { | |
3797 | /* This has either become an unconditional jump | |
3798 | or a nop-jump. We'd like to delete nop jumps | |
3799 | here, but doing so confuses gcse. So we just | |
3800 | make the replacement and let later passes | |
3801 | sort things out. */ | |
3802 | PATTERN (insn) = set; | |
3803 | INSN_CODE (insn) = -1; | |
3804 | ||
3805 | /* One less use of the label this insn used to jump to | |
3806 | if we turned this into a NOP jump. */ | |
3807 | if (SET_SRC (set) == pc_rtx && JUMP_LABEL (insn) != 0) | |
3808 | --LABEL_NUSES (JUMP_LABEL (insn)); | |
3809 | ||
3810 | /* If this has turned into an unconditional jump, | |
3811 | then put a barrier after it so that the unreachable | |
3812 | code will be deleted. */ | |
3813 | if (GET_CODE (SET_SRC (set)) == LABEL_REF) | |
3814 | emit_barrier_after (insn); | |
3815 | ||
3816 | run_jump_opt_after_gcse = 1; | |
3817 | ||
3818 | const_prop_count++; | |
3819 | if (gcse_file != NULL) | |
3820 | { | |
3821 | int regno = REGNO (reg_used->reg_rtx); | |
3822 | fprintf (gcse_file, "CONST-PROP: Replacing reg %d in insn %d with constant ", | |
3823 | regno, INSN_UID (insn)); | |
3824 | print_rtl (gcse_file, src); | |
3825 | fprintf (gcse_file, "\n"); | |
3826 | } | |
3827 | return 1; | |
3828 | } | |
3829 | return 0; | |
3830 | } | |
3831 | ||
3832 | #ifdef HAVE_cc0 | |
3833 | /* Subroutine of cprop_insn that tries to propagate constants into | |
3834 | JUMP_INSNS for machines that have CC0. INSN is a single set that | |
3835 | stores into CC0; the insn following it is a conditional jump. | |
3836 | REG_USED is the use we will try to replace, SRC is the constant we | |
3837 | will try to substitute for it. | |
3838 | Returns nonzero if a change was made. */ | |
3839 | static int | |
3840 | cprop_cc0_jump (insn, reg_used, src) | |
3841 | rtx insn; | |
3842 | struct reg_use *reg_used; | |
3843 | rtx src; | |
3844 | { | |
3845 | rtx jump = NEXT_INSN (insn); | |
3846 | rtx copy = copy_rtx (jump); | |
3847 | rtx set = PATTERN (copy); | |
3848 | ||
3849 | /* We need to copy the source of the cc0 setter, as cprop_jump is going to | |
3850 | substitute into it. */ | |
3851 | replace_rtx (SET_SRC (set), cc0_rtx, copy_rtx (SET_SRC (PATTERN (insn)))); | |
3852 | if (! cprop_jump (jump, copy, reg_used, src)) | |
3853 | return 0; | |
3854 | ||
3855 | /* If we succeeded, delete the cc0 setter. */ | |
3856 | PUT_CODE (insn, NOTE); | |
3857 | NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; | |
3858 | NOTE_SOURCE_FILE (insn) = 0; | |
3859 | return 1; | |
3860 | } | |
3861 | #endif | |
3862 | ||
7506f491 DE |
3863 | /* Perform constant and copy propagation on INSN. |
3864 | The result is non-zero if a change was made. */ | |
3865 | ||
3866 | static int | |
b5ce41ff | 3867 | cprop_insn (insn, alter_jumps) |
7506f491 | 3868 | rtx insn; |
b5ce41ff | 3869 | int alter_jumps; |
7506f491 DE |
3870 | { |
3871 | struct reg_use *reg_used; | |
3872 | int changed = 0; | |
3873 | ||
e78d9500 JL |
3874 | /* Only propagate into SETs. Note that a conditional jump is a |
3875 | SET with pc_rtx as the destination. */ | |
3876 | if ((GET_CODE (insn) != INSN | |
3877 | && GET_CODE (insn) != JUMP_INSN) | |
7506f491 DE |
3878 | || GET_CODE (PATTERN (insn)) != SET) |
3879 | return 0; | |
3880 | ||
3881 | reg_use_count = 0; | |
3882 | find_used_regs (PATTERN (insn)); | |
3883 | ||
3884 | reg_used = ®_use_table[0]; | |
3885 | for ( ; reg_use_count > 0; reg_used++, reg_use_count--) | |
3886 | { | |
3887 | rtx pat, src; | |
3888 | struct expr *set; | |
3889 | int regno = REGNO (reg_used->reg_rtx); | |
3890 | ||
3891 | /* Ignore registers created by GCSE. | |
3892 | We do this because ... */ | |
3893 | if (regno >= max_gcse_regno) | |
3894 | continue; | |
3895 | ||
3896 | /* If the register has already been set in this block, there's | |
3897 | nothing we can do. */ | |
3898 | if (! oprs_not_set_p (reg_used->reg_rtx, insn)) | |
3899 | continue; | |
3900 | ||
3901 | /* Find an assignment that sets reg_used and is available | |
3902 | at the start of the block. */ | |
3903 | set = find_avail_set (regno, insn); | |
3904 | if (! set) | |
3905 | continue; | |
3906 | ||
3907 | pat = set->expr; | |
3908 | /* ??? We might be able to handle PARALLELs. Later. */ | |
3909 | if (GET_CODE (pat) != SET) | |
3910 | abort (); | |
3911 | src = SET_SRC (pat); | |
3912 | ||
e78d9500 | 3913 | /* Constant propagation. */ |
05f6f07c BS |
3914 | if (GET_CODE (src) == CONST_INT || GET_CODE (src) == CONST_DOUBLE |
3915 | || GET_CODE (src) == SYMBOL_REF) | |
7506f491 | 3916 | { |
e78d9500 JL |
3917 | /* Handle normal insns first. */ |
3918 | if (GET_CODE (insn) == INSN | |
3919 | && try_replace_reg (reg_used->reg_rtx, src, insn)) | |
7506f491 DE |
3920 | { |
3921 | changed = 1; | |
3922 | const_prop_count++; | |
3923 | if (gcse_file != NULL) | |
3924 | { | |
3925 | fprintf (gcse_file, "CONST-PROP: Replacing reg %d in insn %d with constant ", | |
3926 | regno, INSN_UID (insn)); | |
e78d9500 | 3927 | print_rtl (gcse_file, src); |
7506f491 DE |
3928 | fprintf (gcse_file, "\n"); |
3929 | } | |
3930 | ||
3931 | /* The original insn setting reg_used may or may not now be | |
3932 | deletable. We leave the deletion to flow. */ | |
3933 | } | |
e78d9500 JL |
3934 | |
3935 | /* Try to propagate a CONST_INT into a conditional jump. | |
3936 | We're pretty specific about what we will handle in this | |
3937 | code, we can extend this as necessary over time. | |
3938 | ||
3939 | Right now the insn in question must look like | |
abd535b6 | 3940 | (set (pc) (if_then_else ...)) */ |
b5ce41ff | 3941 | else if (alter_jumps |
6e9a3c38 JL |
3942 | && GET_CODE (insn) == JUMP_INSN |
3943 | && condjump_p (insn) | |
3944 | && ! simplejump_p (insn)) | |
abd535b6 BS |
3945 | changed |= cprop_jump (insn, copy_rtx (insn), reg_used, src); |
3946 | #ifdef HAVE_cc0 | |
3947 | /* Similar code for machines that use a pair of CC0 setter and | |
3948 | conditional jump insn. */ | |
3949 | else if (alter_jumps | |
3950 | && GET_CODE (PATTERN (insn)) == SET | |
3951 | && SET_DEST (PATTERN (insn)) == cc0_rtx | |
3952 | && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN | |
3953 | && condjump_p (NEXT_INSN (insn)) | |
3954 | && ! simplejump_p (NEXT_INSN (insn))) | |
3955 | changed |= cprop_cc0_jump (insn, reg_used, src); | |
3956 | #endif | |
7506f491 DE |
3957 | } |
3958 | else if (GET_CODE (src) == REG | |
3959 | && REGNO (src) >= FIRST_PSEUDO_REGISTER | |
3960 | && REGNO (src) != regno) | |
3961 | { | |
cafba495 | 3962 | if (try_replace_reg (reg_used->reg_rtx, src, insn)) |
7506f491 | 3963 | { |
cafba495 BS |
3964 | changed = 1; |
3965 | copy_prop_count++; | |
3966 | if (gcse_file != NULL) | |
7506f491 | 3967 | { |
cafba495 BS |
3968 | fprintf (gcse_file, "COPY-PROP: Replacing reg %d in insn %d with reg %d\n", |
3969 | regno, INSN_UID (insn), REGNO (src)); | |
7506f491 | 3970 | } |
cafba495 BS |
3971 | |
3972 | /* The original insn setting reg_used may or may not now be | |
3973 | deletable. We leave the deletion to flow. */ | |
3974 | /* FIXME: If it turns out that the insn isn't deletable, | |
3975 | then we may have unnecessarily extended register lifetimes | |
3976 | and made things worse. */ | |
7506f491 DE |
3977 | } |
3978 | } | |
3979 | } | |
3980 | ||
3981 | return changed; | |
3982 | } | |
3983 | ||
3984 | /* Forward propagate copies. | |
3985 | This includes copies and constants. | |
3986 | Return non-zero if a change was made. */ | |
3987 | ||
3988 | static int | |
b5ce41ff JL |
3989 | cprop (alter_jumps) |
3990 | int alter_jumps; | |
7506f491 DE |
3991 | { |
3992 | int bb, changed; | |
3993 | rtx insn; | |
3994 | ||
3995 | /* Note we start at block 1. */ | |
3996 | ||
3997 | changed = 0; | |
3998 | for (bb = 1; bb < n_basic_blocks; bb++) | |
3999 | { | |
4000 | /* Reset tables used to keep track of what's still valid [since the | |
4001 | start of the block]. */ | |
4002 | reset_opr_set_tables (); | |
4003 | ||
3b413743 RH |
4004 | for (insn = BLOCK_HEAD (bb); |
4005 | insn != NULL && insn != NEXT_INSN (BLOCK_END (bb)); | |
7506f491 DE |
4006 | insn = NEXT_INSN (insn)) |
4007 | { | |
4008 | if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') | |
4009 | { | |
b5ce41ff | 4010 | changed |= cprop_insn (insn, alter_jumps); |
7506f491 DE |
4011 | |
4012 | /* Keep track of everything modified by this insn. */ | |
abd535b6 BS |
4013 | /* ??? Need to be careful w.r.t. mods done to INSN. Don't |
4014 | call mark_oprs_set if we turned the insn into a NOTE. */ | |
4015 | if (GET_CODE (insn) != NOTE) | |
4016 | mark_oprs_set (insn); | |
7506f491 | 4017 | } |
ac7c5af5 | 4018 | } |
7506f491 DE |
4019 | } |
4020 | ||
4021 | if (gcse_file != NULL) | |
4022 | fprintf (gcse_file, "\n"); | |
4023 | ||
4024 | return changed; | |
4025 | } | |
4026 | ||
4027 | /* Perform one copy/constant propagation pass. | |
4028 | F is the first insn in the function. | |
4029 | PASS is the pass count. */ | |
4030 | ||
4031 | static int | |
b5ce41ff | 4032 | one_cprop_pass (pass, alter_jumps) |
7506f491 | 4033 | int pass; |
b5ce41ff | 4034 | int alter_jumps; |
7506f491 DE |
4035 | { |
4036 | int changed = 0; | |
4037 | ||
4038 | const_prop_count = 0; | |
4039 | copy_prop_count = 0; | |
4040 | ||
4041 | alloc_set_hash_table (max_cuid); | |
b5ce41ff | 4042 | compute_set_hash_table (); |
7506f491 DE |
4043 | if (gcse_file) |
4044 | dump_hash_table (gcse_file, "SET", set_hash_table, set_hash_table_size, | |
4045 | n_sets); | |
4046 | if (n_sets > 0) | |
4047 | { | |
4048 | alloc_cprop_mem (n_basic_blocks, n_sets); | |
4049 | compute_cprop_data (); | |
b5ce41ff | 4050 | changed = cprop (alter_jumps); |
7506f491 DE |
4051 | free_cprop_mem (); |
4052 | } | |
4053 | free_set_hash_table (); | |
4054 | ||
4055 | if (gcse_file) | |
4056 | { | |
4057 | fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, %d const props, %d copy props\n", | |
4058 | current_function_name, pass, | |
4059 | bytes_used, const_prop_count, copy_prop_count); | |
4060 | fprintf (gcse_file, "\n"); | |
4061 | } | |
4062 | ||
4063 | return changed; | |
4064 | } | |
4065 | \f | |
a65f3558 | 4066 | /* Compute PRE+LCM working variables. */ |
7506f491 DE |
4067 | |
4068 | /* Local properties of expressions. */ | |
4069 | /* Nonzero for expressions that are transparent in the block. */ | |
a65f3558 | 4070 | static sbitmap *transp; |
7506f491 | 4071 | |
5c35539b RH |
4072 | /* Nonzero for expressions that are transparent at the end of the block. |
4073 | This is only zero for expressions killed by abnormal critical edge | |
4074 | created by a calls. */ | |
a65f3558 | 4075 | static sbitmap *transpout; |
5c35539b | 4076 | |
a65f3558 JL |
4077 | /* Nonzero for expressions that are computed (available) in the block. */ |
4078 | static sbitmap *comp; | |
7506f491 | 4079 | |
a65f3558 JL |
4080 | /* Nonzero for expressions that are locally anticipatable in the block. */ |
4081 | static sbitmap *antloc; | |
7506f491 | 4082 | |
a65f3558 JL |
4083 | /* Nonzero for expressions where this block is an optimal computation |
4084 | point. */ | |
4085 | static sbitmap *pre_optimal; | |
5c35539b | 4086 | |
a65f3558 JL |
4087 | /* Nonzero for expressions which are redundant in a particular block. */ |
4088 | static sbitmap *pre_redundant; | |
7506f491 | 4089 | |
a65f3558 | 4090 | static sbitmap *temp_bitmap; |
7506f491 | 4091 | |
a65f3558 JL |
4092 | /* Redundant insns. */ |
4093 | static sbitmap pre_redundant_insns; | |
7506f491 | 4094 | |
a65f3558 | 4095 | /* Allocate vars used for PRE analysis. */ |
7506f491 DE |
4096 | |
4097 | static void | |
a65f3558 JL |
4098 | alloc_pre_mem (n_blocks, n_exprs) |
4099 | int n_blocks, n_exprs; | |
7506f491 | 4100 | { |
a65f3558 JL |
4101 | transp = sbitmap_vector_alloc (n_blocks, n_exprs); |
4102 | comp = sbitmap_vector_alloc (n_blocks, n_exprs); | |
4103 | antloc = sbitmap_vector_alloc (n_blocks, n_exprs); | |
4104 | ||
4105 | temp_bitmap = sbitmap_vector_alloc (n_blocks, n_exprs); | |
4106 | pre_optimal = sbitmap_vector_alloc (n_blocks, n_exprs); | |
4107 | pre_redundant = sbitmap_vector_alloc (n_blocks, n_exprs); | |
4108 | transpout = sbitmap_vector_alloc (n_blocks, n_exprs); | |
7506f491 DE |
4109 | } |
4110 | ||
a65f3558 | 4111 | /* Free vars used for PRE analysis. */ |
7506f491 DE |
4112 | |
4113 | static void | |
a65f3558 | 4114 | free_pre_mem () |
7506f491 | 4115 | { |
a65f3558 JL |
4116 | free (transp); |
4117 | free (comp); | |
4118 | free (antloc); | |
7506f491 | 4119 | |
76095e2f | 4120 | free (temp_bitmap); |
a65f3558 JL |
4121 | free (pre_optimal); |
4122 | free (pre_redundant); | |
4123 | free (transpout); | |
7506f491 DE |
4124 | } |
4125 | ||
4126 | /* Top level routine to do the dataflow analysis needed by PRE. */ | |
4127 | ||
4128 | static void | |
4129 | compute_pre_data () | |
4130 | { | |
a65f3558 JL |
4131 | compute_local_properties (transp, comp, antloc, 0); |
4132 | compute_transpout (); | |
4133 | pre_lcm (n_basic_blocks, n_exprs, s_preds, s_succs, transp, | |
4134 | antloc, pre_redundant, pre_optimal); | |
7506f491 | 4135 | } |
a65f3558 | 4136 | |
7506f491 DE |
4137 | \f |
4138 | /* PRE utilities */ | |
4139 | ||
a65f3558 JL |
4140 | /* Return non-zero if an occurrence of expression EXPR in OCCR_BB would reach |
4141 | block BB. | |
7506f491 DE |
4142 | |
4143 | VISITED is a pointer to a working buffer for tracking which BB's have | |
4144 | been visited. It is NULL for the top-level call. | |
4145 | ||
a65f3558 JL |
4146 | CHECK_PRE_COMP controls whether or not we check for a computation of |
4147 | EXPR in OCCR_BB. | |
4148 | ||
7506f491 DE |
4149 | We treat reaching expressions that go through blocks containing the same |
4150 | reaching expression as "not reaching". E.g. if EXPR is generated in blocks | |
4151 | 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block | |
4152 | 2 as not reaching. The intent is to improve the probability of finding | |
4153 | only one reaching expression and to reduce register lifetimes by picking | |
4154 | the closest such expression. */ | |
4155 | ||
4156 | static int | |
a65f3558 JL |
4157 | pre_expr_reaches_here_p (occr_bb, expr, bb, check_pre_comp, visited) |
4158 | int occr_bb; | |
7506f491 DE |
4159 | struct expr *expr; |
4160 | int bb; | |
a65f3558 | 4161 | int check_pre_comp; |
7506f491 DE |
4162 | char *visited; |
4163 | { | |
36349f8b | 4164 | edge pred; |
7506f491 DE |
4165 | |
4166 | if (visited == NULL) | |
4167 | { | |
4168 | visited = (char *) alloca (n_basic_blocks); | |
4169 | bzero (visited, n_basic_blocks); | |
4170 | } | |
4171 | ||
36349f8b | 4172 | for (pred = BASIC_BLOCK (bb)->pred; pred != NULL; pred = pred->pred_next) |
7506f491 | 4173 | { |
36349f8b | 4174 | int pred_bb = pred->src->index; |
7506f491 | 4175 | |
36349f8b | 4176 | if (pred->src == ENTRY_BLOCK_PTR |
7506f491 DE |
4177 | /* Has predecessor has already been visited? */ |
4178 | || visited[pred_bb]) | |
ac7c5af5 | 4179 | { |
7506f491 DE |
4180 | /* Nothing to do. */ |
4181 | } | |
4182 | /* Does this predecessor generate this expression? */ | |
a65f3558 JL |
4183 | else if ((!check_pre_comp && occr_bb == pred_bb) |
4184 | || TEST_BIT (comp[pred_bb], expr->bitmap_index)) | |
7506f491 DE |
4185 | { |
4186 | /* Is this the occurrence we're looking for? | |
4187 | Note that there's only one generating occurrence per block | |
4188 | so we just need to check the block number. */ | |
a65f3558 | 4189 | if (occr_bb == pred_bb) |
7506f491 DE |
4190 | return 1; |
4191 | visited[pred_bb] = 1; | |
4192 | } | |
4193 | /* Ignore this predecessor if it kills the expression. */ | |
a65f3558 | 4194 | else if (! TEST_BIT (transp[pred_bb], expr->bitmap_index)) |
7506f491 DE |
4195 | visited[pred_bb] = 1; |
4196 | /* Neither gen nor kill. */ | |
4197 | else | |
ac7c5af5 | 4198 | { |
7506f491 | 4199 | visited[pred_bb] = 1; |
a65f3558 JL |
4200 | if (pre_expr_reaches_here_p (occr_bb, expr, pred_bb, |
4201 | check_pre_comp, visited)) | |
7506f491 | 4202 | return 1; |
ac7c5af5 | 4203 | } |
7506f491 DE |
4204 | } |
4205 | ||
4206 | /* All paths have been checked. */ | |
4207 | return 0; | |
4208 | } | |
4209 | \f | |
a65f3558 JL |
4210 | /* Add EXPR to the end of basic block BB. |
4211 | ||
4212 | This is used by both the PRE and code hoisting. | |
4213 | ||
4214 | For PRE, we want to verify that the expr is either transparent | |
4215 | or locally anticipatable in the target block. This check makes | |
4216 | no sense for code hoisting. */ | |
7506f491 DE |
4217 | |
4218 | static void | |
a65f3558 | 4219 | insert_insn_end_bb (expr, bb, pre) |
7506f491 DE |
4220 | struct expr *expr; |
4221 | int bb; | |
a65f3558 | 4222 | int pre; |
7506f491 DE |
4223 | { |
4224 | rtx insn = BLOCK_END (bb); | |
4225 | rtx new_insn; | |
4226 | rtx reg = expr->reaching_reg; | |
4227 | int regno = REGNO (reg); | |
a65f3558 JL |
4228 | rtx pat, copied_expr; |
4229 | rtx first_new_insn; | |
7506f491 | 4230 | |
a65f3558 JL |
4231 | start_sequence (); |
4232 | copied_expr = copy_rtx (expr->expr); | |
4233 | emit_move_insn (reg, copied_expr); | |
4234 | first_new_insn = get_insns (); | |
4235 | pat = gen_sequence (); | |
4236 | end_sequence (); | |
7506f491 DE |
4237 | |
4238 | /* If the last insn is a jump, insert EXPR in front [taking care to | |
4239 | handle cc0, etc. properly]. */ | |
4240 | ||
4241 | if (GET_CODE (insn) == JUMP_INSN) | |
4242 | { | |
50b2596f | 4243 | #ifdef HAVE_cc0 |
7506f491 | 4244 | rtx note; |
50b2596f | 4245 | #endif |
7506f491 DE |
4246 | |
4247 | /* If this is a jump table, then we can't insert stuff here. Since | |
4248 | we know the previous real insn must be the tablejump, we insert | |
4249 | the new instruction just before the tablejump. */ | |
4250 | if (GET_CODE (PATTERN (insn)) == ADDR_VEC | |
4251 | || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC) | |
4252 | insn = prev_real_insn (insn); | |
4253 | ||
4254 | #ifdef HAVE_cc0 | |
4255 | /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts | |
4256 | if cc0 isn't set. */ | |
4257 | note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX); | |
4258 | if (note) | |
4259 | insn = XEXP (note, 0); | |
4260 | else | |
4261 | { | |
4262 | rtx maybe_cc0_setter = prev_nonnote_insn (insn); | |
4263 | if (maybe_cc0_setter | |
4264 | && GET_RTX_CLASS (GET_CODE (maybe_cc0_setter)) == 'i' | |
4265 | && sets_cc0_p (PATTERN (maybe_cc0_setter))) | |
4266 | insn = maybe_cc0_setter; | |
4267 | } | |
4268 | #endif | |
4269 | /* FIXME: What if something in cc0/jump uses value set in new insn? */ | |
4270 | new_insn = emit_insn_before (pat, insn); | |
4271 | if (BLOCK_HEAD (bb) == insn) | |
4272 | BLOCK_HEAD (bb) = new_insn; | |
3947e2f9 RH |
4273 | } |
4274 | /* Likewise if the last insn is a call, as will happen in the presence | |
4275 | of exception handling. */ | |
5c35539b | 4276 | else if (GET_CODE (insn) == CALL_INSN) |
3947e2f9 RH |
4277 | { |
4278 | HARD_REG_SET parm_regs; | |
4279 | int nparm_regs; | |
4280 | rtx p; | |
4281 | ||
4282 | /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers, | |
4283 | we search backward and place the instructions before the first | |
4284 | parameter is loaded. Do this for everyone for consistency and a | |
4285 | presumtion that we'll get better code elsewhere as well. */ | |
4286 | ||
4287 | /* It should always be the case that we can put these instructions | |
a65f3558 JL |
4288 | anywhere in the basic block with performing PRE optimizations. |
4289 | Check this. */ | |
4290 | if (pre | |
4291 | && !TEST_BIT (antloc[bb], expr->bitmap_index) | |
4292 | && !TEST_BIT (transp[bb], expr->bitmap_index)) | |
3947e2f9 RH |
4293 | abort (); |
4294 | ||
4295 | /* Since different machines initialize their parameter registers | |
4296 | in different orders, assume nothing. Collect the set of all | |
4297 | parameter registers. */ | |
4298 | CLEAR_HARD_REG_SET (parm_regs); | |
4299 | nparm_regs = 0; | |
4300 | for (p = CALL_INSN_FUNCTION_USAGE (insn); p ; p = XEXP (p, 1)) | |
4301 | if (GET_CODE (XEXP (p, 0)) == USE | |
4302 | && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG) | |
4303 | { | |
4304 | int regno = REGNO (XEXP (XEXP (p, 0), 0)); | |
4305 | if (regno >= FIRST_PSEUDO_REGISTER) | |
5c35539b | 4306 | abort (); |
3947e2f9 RH |
4307 | SET_HARD_REG_BIT (parm_regs, regno); |
4308 | nparm_regs++; | |
4309 | } | |
4310 | ||
4311 | /* Search backward for the first set of a register in this set. */ | |
4312 | while (nparm_regs && BLOCK_HEAD (bb) != insn) | |
4313 | { | |
4314 | insn = PREV_INSN (insn); | |
4315 | p = single_set (insn); | |
4316 | if (p && GET_CODE (SET_DEST (p)) == REG | |
4317 | && REGNO (SET_DEST (p)) < FIRST_PSEUDO_REGISTER | |
4318 | && TEST_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p)))) | |
4319 | { | |
4320 | CLEAR_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p))); | |
4321 | nparm_regs--; | |
4322 | } | |
4323 | } | |
4324 | ||
b1d26727 JL |
4325 | /* If we found all the parameter loads, then we want to insert |
4326 | before the first parameter load. | |
4327 | ||
4328 | If we did not find all the parameter loads, then we might have | |
4329 | stopped on the head of the block, which could be a CODE_LABEL. | |
4330 | If we inserted before the CODE_LABEL, then we would be putting | |
4331 | the insn in the wrong basic block. In that case, put the insn | |
4332 | after the CODE_LABEL. | |
4333 | ||
4334 | ?!? Do we need to account for NOTE_INSN_BASIC_BLOCK here? */ | |
4335 | if (GET_CODE (insn) != CODE_LABEL) | |
4336 | { | |
4337 | new_insn = emit_insn_before (pat, insn); | |
4338 | if (BLOCK_HEAD (bb) == insn) | |
4339 | BLOCK_HEAD (bb) = new_insn; | |
4340 | } | |
4341 | else | |
4342 | { | |
4343 | new_insn = emit_insn_after (pat, insn); | |
4344 | } | |
7506f491 DE |
4345 | } |
4346 | else | |
4347 | { | |
4348 | new_insn = emit_insn_after (pat, insn); | |
4349 | BLOCK_END (bb) = new_insn; | |
7506f491 DE |
4350 | } |
4351 | ||
a65f3558 JL |
4352 | /* Keep block number table up to date. |
4353 | Note, PAT could be a multiple insn sequence, we have to make | |
4354 | sure that each insn in the sequence is handled. */ | |
4355 | if (GET_CODE (pat) == SEQUENCE) | |
4356 | { | |
4357 | int i; | |
4358 | ||
4359 | for (i = 0; i < XVECLEN (pat, 0); i++) | |
4360 | { | |
4361 | rtx insn = XVECEXP (pat, 0, i); | |
4362 | set_block_num (insn, bb); | |
4363 | if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') | |
4364 | add_label_notes (PATTERN (insn), new_insn); | |
4365 | record_set_insn = insn; | |
4366 | note_stores (PATTERN (insn), record_set_info); | |
4367 | } | |
4368 | } | |
4369 | else | |
4370 | { | |
4371 | add_label_notes (SET_SRC (pat), new_insn); | |
4372 | set_block_num (new_insn, bb); | |
4373 | /* Keep register set table up to date. */ | |
4374 | record_one_set (regno, new_insn); | |
4375 | } | |
3947e2f9 | 4376 | |
7506f491 DE |
4377 | gcse_create_count++; |
4378 | ||
4379 | if (gcse_file) | |
4380 | { | |
a65f3558 | 4381 | fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, copying expression %d to reg %d\n", |
7506f491 DE |
4382 | bb, INSN_UID (new_insn), expr->bitmap_index, regno); |
4383 | } | |
4384 | } | |
4385 | ||
4386 | /* Insert partially redundant expressions at the ends of appropriate basic | |
a65f3558 | 4387 | blocks making them fully redundant. */ |
7506f491 DE |
4388 | |
4389 | static void | |
4390 | pre_insert (index_map) | |
4391 | struct expr **index_map; | |
4392 | { | |
a65f3558 JL |
4393 | int bb, i, set_size; |
4394 | sbitmap *inserted; | |
4395 | ||
4396 | /* Compute INSERT = PRE_OPTIMAL & ~PRE_REDUNDANT. | |
4397 | Where INSERT is nonzero, we add the expression at the end of the basic | |
4398 | block if it reaches any of the deleted expressions. */ | |
7506f491 | 4399 | |
a65f3558 JL |
4400 | set_size = pre_optimal[0]->size; |
4401 | inserted = sbitmap_vector_alloc (n_basic_blocks, n_exprs); | |
4402 | sbitmap_vector_zero (inserted, n_basic_blocks); | |
7506f491 | 4403 | |
7506f491 DE |
4404 | for (bb = 0; bb < n_basic_blocks; bb++) |
4405 | { | |
4406 | int indx; | |
a65f3558 JL |
4407 | |
4408 | /* This computes the number of potential insertions we need. */ | |
4409 | sbitmap_not (temp_bitmap[bb], pre_redundant[bb]); | |
4410 | sbitmap_a_and_b (temp_bitmap[bb], temp_bitmap[bb], pre_optimal[bb]); | |
4411 | ||
4412 | /* TEMP_BITMAP[bb] now contains a bitmap of the expressions that we need | |
4413 | to insert at the end of this basic block. */ | |
4414 | for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS) | |
7506f491 | 4415 | { |
a65f3558 | 4416 | SBITMAP_ELT_TYPE insert = temp_bitmap[bb]->elms[i]; |
7506f491 | 4417 | int j; |
7506f491 | 4418 | |
a65f3558 | 4419 | for (j = indx; insert && j < n_exprs; j++, insert >>= 1) |
7506f491 | 4420 | { |
a65f3558 JL |
4421 | if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX) |
4422 | { | |
4423 | struct expr *expr = index_map[j]; | |
4424 | struct occr *occr; | |
4425 | ||
4426 | /* Now look at each deleted occurence of this expression. */ | |
4427 | for (occr = expr->antic_occr; occr != NULL; occr = occr->next) | |
4428 | { | |
4429 | if (! occr->deleted_p) | |
4430 | continue; | |
4431 | ||
4432 | /* Insert this expression at the end of BB if it would | |
4433 | reach the deleted occurence. */ | |
4434 | if (!TEST_BIT (inserted[bb], j) | |
4435 | && pre_expr_reaches_here_p (bb, expr, | |
4436 | BLOCK_NUM (occr->insn), 0, | |
4437 | NULL)) | |
4438 | { | |
4439 | SET_BIT (inserted[bb], j); | |
4440 | insert_insn_end_bb (index_map[j], bb, 1); | |
4441 | } | |
4442 | } | |
4443 | } | |
7506f491 DE |
4444 | } |
4445 | } | |
4446 | } | |
76095e2f RH |
4447 | |
4448 | sbitmap_vector_free (inserted); | |
7506f491 DE |
4449 | } |
4450 | ||
4451 | /* Copy the result of INSN to REG. | |
4452 | INDX is the expression number. */ | |
4453 | ||
4454 | static void | |
4455 | pre_insert_copy_insn (expr, insn) | |
4456 | struct expr *expr; | |
4457 | rtx insn; | |
4458 | { | |
4459 | rtx reg = expr->reaching_reg; | |
4460 | int regno = REGNO (reg); | |
4461 | int indx = expr->bitmap_index; | |
4462 | rtx set = single_set (insn); | |
4463 | rtx new_insn; | |
4464 | ||
4465 | if (!set) | |
4466 | abort (); | |
9e6a5703 | 4467 | new_insn = emit_insn_after (gen_rtx_SET (VOIDmode, reg, SET_DEST (set)), |
7506f491 DE |
4468 | insn); |
4469 | /* Keep block number table up to date. */ | |
4470 | set_block_num (new_insn, BLOCK_NUM (insn)); | |
4471 | /* Keep register set table up to date. */ | |
4472 | record_one_set (regno, new_insn); | |
4473 | ||
4474 | gcse_create_count++; | |
4475 | ||
4476 | if (gcse_file) | |
4477 | { | |
4478 | fprintf (gcse_file, "PRE: bb %d, insn %d, copying expression %d in insn %d to reg %d\n", | |
4479 | BLOCK_NUM (insn), INSN_UID (new_insn), indx, INSN_UID (insn), regno); | |
4480 | } | |
4481 | } | |
4482 | ||
4483 | /* Copy available expressions that reach the redundant expression | |
4484 | to `reaching_reg'. */ | |
4485 | ||
4486 | static void | |
4487 | pre_insert_copies () | |
4488 | { | |
a65f3558 JL |
4489 | int i, bb; |
4490 | ||
4491 | for (bb = 0; bb < n_basic_blocks; bb++) | |
4492 | { | |
4493 | sbitmap_a_and_b (temp_bitmap[bb], pre_optimal[bb], pre_redundant[bb]); | |
4494 | } | |
7506f491 DE |
4495 | |
4496 | /* For each available expression in the table, copy the result to | |
4497 | `reaching_reg' if the expression reaches a deleted one. | |
4498 | ||
4499 | ??? The current algorithm is rather brute force. | |
4500 | Need to do some profiling. */ | |
4501 | ||
4502 | for (i = 0; i < expr_hash_table_size; i++) | |
4503 | { | |
4504 | struct expr *expr; | |
4505 | ||
4506 | for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash) | |
4507 | { | |
4508 | struct occr *occr; | |
4509 | ||
4510 | /* If the basic block isn't reachable, PPOUT will be TRUE. | |
4511 | However, we don't want to insert a copy here because the | |
4512 | expression may not really be redundant. So only insert | |
4513 | an insn if the expression was deleted. | |
4514 | This test also avoids further processing if the expression | |
4515 | wasn't deleted anywhere. */ | |
4516 | if (expr->reaching_reg == NULL) | |
4517 | continue; | |
4518 | ||
4519 | for (occr = expr->antic_occr; occr != NULL; occr = occr->next) | |
4520 | { | |
4521 | struct occr *avail; | |
4522 | ||
4523 | if (! occr->deleted_p) | |
4524 | continue; | |
4525 | ||
4526 | for (avail = expr->avail_occr; avail != NULL; avail = avail->next) | |
4527 | { | |
4528 | rtx insn = avail->insn; | |
a65f3558 JL |
4529 | int bb = BLOCK_NUM (insn); |
4530 | ||
4531 | if (!TEST_BIT (temp_bitmap[bb], expr->bitmap_index)) | |
4532 | continue; | |
7506f491 DE |
4533 | |
4534 | /* No need to handle this one if handled already. */ | |
4535 | if (avail->copied_p) | |
4536 | continue; | |
4537 | /* Don't handle this one if it's a redundant one. */ | |
a65f3558 | 4538 | if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn))) |
7506f491 DE |
4539 | continue; |
4540 | /* Or if the expression doesn't reach the deleted one. */ | |
a65f3558 | 4541 | if (! pre_expr_reaches_here_p (BLOCK_NUM (avail->insn), expr, |
7506f491 | 4542 | BLOCK_NUM (occr->insn), |
a65f3558 | 4543 | 1, NULL)) |
7506f491 DE |
4544 | continue; |
4545 | ||
4546 | /* Copy the result of avail to reaching_reg. */ | |
4547 | pre_insert_copy_insn (expr, insn); | |
4548 | avail->copied_p = 1; | |
4549 | } | |
4550 | } | |
4551 | } | |
4552 | } | |
4553 | } | |
4554 | ||
4555 | /* Delete redundant computations. | |
7506f491 DE |
4556 | Deletion is done by changing the insn to copy the `reaching_reg' of |
4557 | the expression into the result of the SET. It is left to later passes | |
4558 | (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it. | |
4559 | ||
4560 | Returns non-zero if a change is made. */ | |
4561 | ||
4562 | static int | |
4563 | pre_delete () | |
4564 | { | |
a65f3558 JL |
4565 | int i, bb, changed; |
4566 | ||
4567 | /* Compute the expressions which are redundant and need to be replaced by | |
4568 | copies from the reaching reg to the target reg. */ | |
4569 | for (bb = 0; bb < n_basic_blocks; bb++) | |
4570 | { | |
4571 | sbitmap_not (temp_bitmap[bb], pre_optimal[bb]); | |
4572 | sbitmap_a_and_b (temp_bitmap[bb], temp_bitmap[bb], pre_redundant[bb]); | |
4573 | } | |
7506f491 DE |
4574 | |
4575 | changed = 0; | |
4576 | for (i = 0; i < expr_hash_table_size; i++) | |
4577 | { | |
4578 | struct expr *expr; | |
4579 | ||
4580 | for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash) | |
4581 | { | |
4582 | struct occr *occr; | |
4583 | int indx = expr->bitmap_index; | |
4584 | ||
4585 | /* We only need to search antic_occr since we require | |
4586 | ANTLOC != 0. */ | |
4587 | ||
4588 | for (occr = expr->antic_occr; occr != NULL; occr = occr->next) | |
4589 | { | |
4590 | rtx insn = occr->insn; | |
4591 | rtx set; | |
4592 | int bb = BLOCK_NUM (insn); | |
7506f491 | 4593 | |
a65f3558 | 4594 | if (TEST_BIT (temp_bitmap[bb], indx)) |
7506f491 | 4595 | { |
7506f491 DE |
4596 | set = single_set (insn); |
4597 | if (! set) | |
4598 | abort (); | |
4599 | ||
d3903c22 JL |
4600 | /* Create a pseudo-reg to store the result of reaching |
4601 | expressions into. Get the mode for the new pseudo | |
4602 | from the mode of the original destination pseudo. */ | |
4603 | if (expr->reaching_reg == NULL) | |
4604 | expr->reaching_reg | |
4605 | = gen_reg_rtx (GET_MODE (SET_DEST (set))); | |
4606 | ||
7506f491 DE |
4607 | /* In theory this should never fail since we're creating |
4608 | a reg->reg copy. | |
4609 | ||
4610 | However, on the x86 some of the movXX patterns actually | |
4611 | contain clobbers of scratch regs. This may cause the | |
db35306d | 4612 | insn created by validate_change to not match any pattern |
7506f491 DE |
4613 | and thus cause validate_change to fail. */ |
4614 | if (validate_change (insn, &SET_SRC (set), | |
4615 | expr->reaching_reg, 0)) | |
4616 | { | |
4617 | occr->deleted_p = 1; | |
a65f3558 | 4618 | SET_BIT (pre_redundant_insns, INSN_CUID (insn)); |
7506f491 DE |
4619 | changed = 1; |
4620 | gcse_subst_count++; | |
4621 | } | |
4622 | ||
4623 | if (gcse_file) | |
4624 | { | |
a65f3558 JL |
4625 | fprintf (gcse_file, |
4626 | "PRE: redundant insn %d (expression %d) in bb %d, reaching reg is %d\n", | |
7506f491 DE |
4627 | INSN_UID (insn), indx, bb, REGNO (expr->reaching_reg)); |
4628 | } | |
4629 | } | |
4630 | } | |
4631 | } | |
4632 | } | |
4633 | ||
4634 | return changed; | |
4635 | } | |
4636 | ||
4637 | /* Perform GCSE optimizations using PRE. | |
4638 | This is called by one_pre_gcse_pass after all the dataflow analysis | |
4639 | has been done. | |
4640 | ||
a65f3558 JL |
4641 | This is based on the original Morel-Renvoise paper Fred Chow's thesis, |
4642 | and lazy code motion from Knoop, Ruthing and Steffen as described in | |
4643 | Advanced Compiler Design and Implementation. | |
7506f491 DE |
4644 | |
4645 | ??? A new pseudo reg is created to hold the reaching expression. | |
4646 | The nice thing about the classical approach is that it would try to | |
4647 | use an existing reg. If the register can't be adequately optimized | |
4648 | [i.e. we introduce reload problems], one could add a pass here to | |
4649 | propagate the new register through the block. | |
4650 | ||
4651 | ??? We don't handle single sets in PARALLELs because we're [currently] | |
4652 | not able to copy the rest of the parallel when we insert copies to create | |
4653 | full redundancies from partial redundancies. However, there's no reason | |
4654 | why we can't handle PARALLELs in the cases where there are no partial | |
4655 | redundancies. */ | |
4656 | ||
4657 | static int | |
4658 | pre_gcse () | |
4659 | { | |
4660 | int i; | |
4661 | int changed; | |
4662 | struct expr **index_map; | |
4663 | ||
4664 | /* Compute a mapping from expression number (`bitmap_index') to | |
4665 | hash table entry. */ | |
4666 | ||
4667 | index_map = (struct expr **) alloca (n_exprs * sizeof (struct expr *)); | |
4668 | bzero ((char *) index_map, n_exprs * sizeof (struct expr *)); | |
4669 | for (i = 0; i < expr_hash_table_size; i++) | |
4670 | { | |
4671 | struct expr *expr; | |
4672 | ||
4673 | for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash) | |
4674 | index_map[expr->bitmap_index] = expr; | |
4675 | } | |
4676 | ||
4677 | /* Reset bitmap used to track which insns are redundant. */ | |
a65f3558 JL |
4678 | pre_redundant_insns = sbitmap_alloc (max_cuid); |
4679 | sbitmap_zero (pre_redundant_insns); | |
7506f491 DE |
4680 | |
4681 | /* Delete the redundant insns first so that | |
4682 | - we know what register to use for the new insns and for the other | |
4683 | ones with reaching expressions | |
4684 | - we know which insns are redundant when we go to create copies */ | |
4685 | changed = pre_delete (); | |
4686 | ||
4687 | /* Insert insns in places that make partially redundant expressions | |
4688 | fully redundant. */ | |
4689 | pre_insert (index_map); | |
4690 | ||
4691 | /* In other places with reaching expressions, copy the expression to the | |
4692 | specially allocated pseudo-reg that reaches the redundant expression. */ | |
4693 | pre_insert_copies (); | |
4694 | ||
a65f3558 | 4695 | free (pre_redundant_insns); |
7506f491 DE |
4696 | |
4697 | return changed; | |
4698 | } | |
4699 | ||
4700 | /* Top level routine to perform one PRE GCSE pass. | |
4701 | ||
4702 | Return non-zero if a change was made. */ | |
4703 | ||
4704 | static int | |
b5ce41ff | 4705 | one_pre_gcse_pass (pass) |
7506f491 DE |
4706 | int pass; |
4707 | { | |
4708 | int changed = 0; | |
4709 | ||
4710 | gcse_subst_count = 0; | |
4711 | gcse_create_count = 0; | |
4712 | ||
4713 | alloc_expr_hash_table (max_cuid); | |
b5ce41ff | 4714 | compute_expr_hash_table (); |
7506f491 DE |
4715 | if (gcse_file) |
4716 | dump_hash_table (gcse_file, "Expression", expr_hash_table, | |
4717 | expr_hash_table_size, n_exprs); | |
4718 | if (n_exprs > 0) | |
4719 | { | |
4720 | alloc_pre_mem (n_basic_blocks, n_exprs); | |
4721 | compute_pre_data (); | |
4722 | changed |= pre_gcse (); | |
4723 | free_pre_mem (); | |
4724 | } | |
4725 | free_expr_hash_table (); | |
4726 | ||
4727 | if (gcse_file) | |
4728 | { | |
4729 | fprintf (gcse_file, "\n"); | |
4730 | fprintf (gcse_file, "PRE GCSE of %s, pass %d: %d bytes needed, %d substs, %d insns created\n", | |
4731 | current_function_name, pass, | |
4732 | bytes_used, gcse_subst_count, gcse_create_count); | |
4733 | } | |
4734 | ||
4735 | return changed; | |
4736 | } | |
aeb2f500 JW |
4737 | \f |
4738 | /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN. | |
4739 | We have to add REG_LABEL notes, because the following loop optimization | |
4740 | pass requires them. */ | |
4741 | ||
4742 | /* ??? This is very similar to the loop.c add_label_notes function. We | |
4743 | could probably share code here. */ | |
4744 | ||
4745 | /* ??? If there was a jump optimization pass after gcse and before loop, | |
4746 | then we would not need to do this here, because jump would add the | |
4747 | necessary REG_LABEL notes. */ | |
4748 | ||
4749 | static void | |
4750 | add_label_notes (x, insn) | |
4751 | rtx x; | |
4752 | rtx insn; | |
4753 | { | |
4754 | enum rtx_code code = GET_CODE (x); | |
4755 | int i, j; | |
6f7d635c | 4756 | const char *fmt; |
aeb2f500 JW |
4757 | |
4758 | if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x)) | |
4759 | { | |
6b3603c2 | 4760 | /* This code used to ignore labels that referred to dispatch tables to |
ac7c5af5 | 4761 | avoid flow generating (slighly) worse code. |
6b3603c2 | 4762 | |
ac7c5af5 JL |
4763 | We no longer ignore such label references (see LABEL_REF handling in |
4764 | mark_jump_label for additional information). */ | |
6b3603c2 JL |
4765 | REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL, XEXP (x, 0), |
4766 | REG_NOTES (insn)); | |
aeb2f500 JW |
4767 | return; |
4768 | } | |
4769 | ||
4770 | fmt = GET_RTX_FORMAT (code); | |
4771 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
4772 | { | |
4773 | if (fmt[i] == 'e') | |
4774 | add_label_notes (XEXP (x, i), insn); | |
4775 | else if (fmt[i] == 'E') | |
4776 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
4777 | add_label_notes (XVECEXP (x, i, j), insn); | |
4778 | } | |
4779 | } | |
a65f3558 JL |
4780 | |
4781 | /* Compute transparent outgoing information for each block. | |
4782 | ||
4783 | An expression is transparent to an edge unless it is killed by | |
4784 | the edge itself. This can only happen with abnormal control flow, | |
4785 | when the edge is traversed through a call. This happens with | |
4786 | non-local labels and exceptions. | |
4787 | ||
4788 | This would not be necessary if we split the edge. While this is | |
4789 | normally impossible for abnormal critical edges, with some effort | |
4790 | it should be possible with exception handling, since we still have | |
4791 | control over which handler should be invoked. But due to increased | |
4792 | EH table sizes, this may not be worthwhile. */ | |
4793 | ||
4794 | static void | |
4795 | compute_transpout () | |
4796 | { | |
4797 | int bb; | |
4798 | ||
4799 | sbitmap_vector_ones (transpout, n_basic_blocks); | |
4800 | ||
4801 | for (bb = 0; bb < n_basic_blocks; ++bb) | |
4802 | { | |
4803 | int i; | |
4804 | ||
4805 | /* Note that flow inserted a nop a the end of basic blocks that | |
4806 | end in call instructions for reasons other than abnormal | |
4807 | control flow. */ | |
4808 | if (GET_CODE (BLOCK_END (bb)) != CALL_INSN) | |
4809 | continue; | |
4810 | ||
4811 | for (i = 0; i < expr_hash_table_size; i++) | |
4812 | { | |
4813 | struct expr *expr; | |
4814 | for (expr = expr_hash_table[i]; expr ; expr = expr->next_same_hash) | |
4815 | if (GET_CODE (expr->expr) == MEM) | |
4816 | { | |
4817 | rtx addr = XEXP (expr->expr, 0); | |
4818 | ||
4819 | if (GET_CODE (addr) == SYMBOL_REF | |
4820 | && CONSTANT_POOL_ADDRESS_P (addr)) | |
4821 | continue; | |
4822 | ||
4823 | /* ??? Optimally, we would use interprocedural alias | |
4824 | analysis to determine if this mem is actually killed | |
4825 | by this call. */ | |
4826 | RESET_BIT (transpout[bb], expr->bitmap_index); | |
4827 | } | |
4828 | } | |
4829 | } | |
4830 | } | |
dfdb644f JL |
4831 | |
4832 | /* Removal of useless null pointer checks */ | |
4833 | ||
4834 | /* These need to be file static for communication between | |
4835 | invalidate_nonnull_info and delete_null_pointer_checks. */ | |
4836 | static int current_block; | |
4837 | static sbitmap *nonnull_local; | |
4838 | static sbitmap *nonnull_killed; | |
4839 | ||
4840 | /* Called via note_stores. X is set by SETTER. If X is a register we must | |
4841 | invalidate nonnull_local and set nonnull_killed. | |
4842 | ||
4843 | We ignore hard registers. */ | |
4844 | static void | |
4845 | invalidate_nonnull_info (x, setter) | |
4846 | rtx x; | |
4847 | rtx setter ATTRIBUTE_UNUSED; | |
4848 | { | |
4849 | int offset, regno; | |
4850 | ||
4851 | offset = 0; | |
4852 | while (GET_CODE (x) == SUBREG) | |
4853 | x = SUBREG_REG (x); | |
4854 | ||
4855 | /* Ignore anything that is not a register or is a hard register. */ | |
4856 | if (GET_CODE (x) != REG | |
4857 | || REGNO (x) < FIRST_PSEUDO_REGISTER) | |
4858 | return; | |
4859 | ||
4860 | regno = REGNO (x); | |
4861 | ||
4862 | RESET_BIT (nonnull_local[current_block], regno); | |
4863 | SET_BIT (nonnull_killed[current_block], regno); | |
4864 | ||
4865 | } | |
4866 | ||
4867 | /* Find EQ/NE comparisons against zero which can be (indirectly) evaluated | |
4868 | at compile time. | |
4869 | ||
4870 | This is conceptually similar to global constant/copy propagation and | |
4871 | classic global CSE (it even uses the same dataflow equations as cprop). | |
4872 | ||
4873 | If a register is used as memory address with the form (mem (reg)), then we | |
4874 | know that REG can not be zero at that point in the program. Any instruction | |
4875 | which sets REG "kills" this property. | |
4876 | ||
4877 | So, if every path leading to a conditional branch has an available memory | |
4878 | reference of that form, then we know the register can not have the value | |
4879 | zero at the conditional branch. | |
4880 | ||
4881 | So we merely need to compute the local properies and propagate that data | |
4882 | around the cfg, then optimize where possible. | |
4883 | ||
4884 | We run this pass two times. Once before CSE, then again after CSE. This | |
4885 | has proven to be the most profitable approach. It is rare for new | |
4886 | optimization opportunities of this nature to appear after the first CSE | |
4887 | pass. | |
4888 | ||
4889 | This could probably be integrated with global cprop with a little work. */ | |
4890 | ||
4891 | void | |
4892 | delete_null_pointer_checks (f) | |
4893 | rtx f; | |
4894 | { | |
4895 | int_list_ptr *s_preds, *s_succs; | |
4896 | int *num_preds, *num_succs; | |
4897 | int changed, bb; | |
4898 | sbitmap *nonnull_avin, *nonnull_avout; | |
4899 | ||
4900 | /* First break the program into basic blocks. */ | |
4901 | find_basic_blocks (f, max_reg_num (), NULL, 1); | |
4902 | ||
4903 | /* If we have only a single block, then there's nothing to do. */ | |
4904 | if (n_basic_blocks <= 1) | |
4905 | { | |
4906 | /* Free storage allocated by find_basic_blocks. */ | |
4907 | free_basic_block_vars (0); | |
4908 | return; | |
4909 | } | |
4910 | ||
4911 | /* We need predecessor/successor lists as well as pred/succ counts for | |
4912 | each basic block. */ | |
4913 | s_preds = (int_list_ptr *) alloca (n_basic_blocks * sizeof (int_list_ptr)); | |
4914 | s_succs = (int_list_ptr *) alloca (n_basic_blocks * sizeof (int_list_ptr)); | |
4915 | num_preds = (int *) alloca (n_basic_blocks * sizeof (int)); | |
4916 | num_succs = (int *) alloca (n_basic_blocks * sizeof (int)); | |
4917 | compute_preds_succs (s_preds, s_succs, num_preds, num_succs); | |
4918 | ||
4919 | /* Allocate bitmaps to hold local and global properties. */ | |
4920 | nonnull_local = sbitmap_vector_alloc (n_basic_blocks, max_reg_num ()); | |
4921 | nonnull_killed = sbitmap_vector_alloc (n_basic_blocks, max_reg_num ()); | |
4922 | nonnull_avin = sbitmap_vector_alloc (n_basic_blocks, max_reg_num ()); | |
4923 | nonnull_avout = sbitmap_vector_alloc (n_basic_blocks, max_reg_num ()); | |
4924 | ||
4925 | /* Compute local properties, nonnull and killed. A register will have | |
4926 | the nonnull property if at the end of the current block its value is | |
4927 | known to be nonnull. The killed property indicates that somewhere in | |
4928 | the block any information we had about the register is killed. | |
4929 | ||
4930 | Note that a register can have both properties in a single block. That | |
4931 | indicates that it's killed, then later in the block a new value is | |
4932 | computed. */ | |
4933 | sbitmap_vector_zero (nonnull_local, n_basic_blocks); | |
4934 | sbitmap_vector_zero (nonnull_killed, n_basic_blocks); | |
4935 | for (current_block = 0; current_block < n_basic_blocks; current_block++) | |
4936 | { | |
4937 | rtx insn, stop_insn; | |
4938 | ||
4939 | /* Scan each insn in the basic block looking for memory references and | |
4940 | register sets. */ | |
4941 | stop_insn = NEXT_INSN (BLOCK_END (current_block)); | |
4942 | for (insn = BLOCK_HEAD (current_block); | |
4943 | insn != stop_insn; | |
4944 | insn = NEXT_INSN (insn)) | |
4945 | { | |
4946 | rtx set; | |
4947 | ||
4948 | /* Ignore anything that is not a normal insn. */ | |
4949 | if (GET_RTX_CLASS (GET_CODE (insn)) != 'i') | |
4950 | continue; | |
4951 | ||
4952 | /* Basically ignore anything that is not a simple SET. We do have | |
4953 | to make sure to invalidate nonnull_local and set nonnull_killed | |
4954 | for such insns though. */ | |
4955 | set = single_set (insn); | |
4956 | if (!set) | |
4957 | { | |
4958 | note_stores (PATTERN (insn), invalidate_nonnull_info); | |
4959 | continue; | |
4960 | } | |
4961 | ||
4962 | /* See if we've got a useable memory load. We handle it first | |
4963 | in case it uses its address register as a dest (which kills | |
4964 | the nonnull property). */ | |
4965 | if (GET_CODE (SET_SRC (set)) == MEM | |
4966 | && GET_CODE (XEXP (SET_SRC (set), 0)) == REG | |
4967 | && REGNO (XEXP (SET_SRC (set), 0)) >= FIRST_PSEUDO_REGISTER) | |
4968 | SET_BIT (nonnull_local[current_block], | |
4969 | REGNO (XEXP (SET_SRC (set), 0))); | |
4970 | ||
4971 | /* Now invalidate stuff clobbered by this insn. */ | |
4972 | note_stores (PATTERN (insn), invalidate_nonnull_info); | |
4973 | ||
4974 | /* And handle stores, we do these last since any sets in INSN can | |
4975 | not kill the nonnull property if it is derived from a MEM | |
4976 | appearing in a SET_DEST. */ | |
4977 | if (GET_CODE (SET_DEST (set)) == MEM | |
4978 | && GET_CODE (XEXP (SET_DEST (set), 0)) == REG) | |
4979 | SET_BIT (nonnull_local[current_block], | |
4980 | REGNO (XEXP (SET_DEST (set), 0))); | |
4981 | } | |
4982 | } | |
4983 | ||
4984 | /* Now compute global properties based on the local properties. This | |
4985 | is a classic global availablity algorithm. */ | |
4986 | sbitmap_zero (nonnull_avin[0]); | |
4987 | sbitmap_vector_ones (nonnull_avout, n_basic_blocks); | |
4988 | changed = 1; | |
4989 | while (changed) | |
4990 | { | |
4991 | changed = 0; | |
4992 | ||
4993 | for (bb = 0; bb < n_basic_blocks; bb++) | |
4994 | { | |
4995 | if (bb != 0) | |
4996 | sbitmap_intersect_of_predecessors (nonnull_avin[bb], | |
4997 | nonnull_avout, bb, s_preds); | |
4998 | ||
4999 | changed |= sbitmap_union_of_diff (nonnull_avout[bb], | |
5000 | nonnull_local[bb], | |
5001 | nonnull_avin[bb], | |
5002 | nonnull_killed[bb]); | |
5003 | } | |
5004 | } | |
5005 | ||
5006 | /* Now look at each bb and see if it ends with a compare of a value | |
5007 | against zero. */ | |
5008 | for (bb = 0; bb < n_basic_blocks; bb++) | |
5009 | { | |
5010 | rtx last_insn = BLOCK_END (bb); | |
5011 | rtx condition, earliest, reg; | |
5012 | int compare_and_branch; | |
5013 | ||
5014 | /* We only want conditional branches. */ | |
5015 | if (GET_CODE (last_insn) != JUMP_INSN | |
5016 | || !condjump_p (last_insn) | |
5017 | || simplejump_p (last_insn)) | |
5018 | continue; | |
5019 | ||
5020 | /* LAST_INSN is a conditional jump. Get its condition. */ | |
5021 | condition = get_condition (last_insn, &earliest); | |
5022 | ||
5023 | /* If we were unable to get the condition, or it is not a equality | |
5024 | comparison against zero then there's nothing we can do. */ | |
5025 | if (!condition | |
5026 | || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ) | |
5027 | || GET_CODE (XEXP (condition, 1)) != CONST_INT | |
5028 | || XEXP (condition, 1) != CONST0_RTX (GET_MODE (XEXP (condition, 0)))) | |
5029 | continue; | |
5030 | ||
5031 | /* We must be checking a register against zero. */ | |
5032 | reg = XEXP (condition, 0); | |
5033 | if (GET_CODE (reg) != REG) | |
5034 | continue; | |
5035 | ||
5036 | /* Is the register known to have a nonzero value? */ | |
5037 | if (!TEST_BIT (nonnull_avout[bb], REGNO (reg))) | |
5038 | continue; | |
5039 | ||
5040 | /* Try to compute whether the compare/branch at the loop end is one or | |
5041 | two instructions. */ | |
5042 | if (earliest == last_insn) | |
5043 | compare_and_branch = 1; | |
5044 | else if (earliest == prev_nonnote_insn (last_insn)) | |
5045 | compare_and_branch = 2; | |
5046 | else | |
5047 | continue; | |
5048 | ||
5049 | /* We know the register in this comparison is nonnull at exit from | |
5050 | this block. We can optimize this comparison. */ | |
5051 | if (GET_CODE (condition) == NE) | |
5052 | { | |
5053 | rtx new_jump; | |
5054 | ||
5055 | new_jump = emit_jump_insn_before (gen_jump (JUMP_LABEL (last_insn)), | |
5056 | last_insn); | |
5057 | JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn); | |
5058 | LABEL_NUSES (JUMP_LABEL (new_jump))++; | |
5059 | emit_barrier_after (new_jump); | |
5060 | } | |
5061 | delete_insn (last_insn); | |
5062 | if (compare_and_branch == 2) | |
5063 | delete_insn (earliest); | |
5064 | } | |
5065 | ||
5066 | /* Free storage allocated by find_basic_blocks. */ | |
5067 | free_basic_block_vars (0); | |
5068 | ||
5069 | /* Free bitmaps. */ | |
5070 | free (nonnull_local); | |
5071 | free (nonnull_killed); | |
5072 | free (nonnull_avin); | |
5073 | free (nonnull_avout); | |
5074 | } | |
bb457bd9 JL |
5075 | |
5076 | /* Code Hoisting variables and subroutines. */ | |
5077 | ||
5078 | /* Very busy expressions. */ | |
5079 | static sbitmap *hoist_vbein; | |
5080 | static sbitmap *hoist_vbeout; | |
5081 | ||
5082 | /* Hoistable expressions. */ | |
5083 | static sbitmap *hoist_exprs; | |
5084 | ||
5085 | /* Dominator bitmaps. */ | |
5086 | static sbitmap *dominators; | |
5087 | static sbitmap *post_dominators; | |
5088 | ||
5089 | /* ??? We could compute post dominators and run this algorithm in | |
5090 | reverse to to perform tail merging, doing so would probably be | |
5091 | more effective than the tail merging code in jump.c. | |
5092 | ||
5093 | It's unclear if tail merging could be run in parallel with | |
5094 | code hoisting. It would be nice. */ | |
5095 | ||
5096 | /* Allocate vars used for code hoisting analysis. */ | |
5097 | ||
5098 | static void | |
5099 | alloc_code_hoist_mem (n_blocks, n_exprs) | |
5100 | int n_blocks, n_exprs; | |
5101 | { | |
5102 | antloc = sbitmap_vector_alloc (n_blocks, n_exprs); | |
5103 | transp = sbitmap_vector_alloc (n_blocks, n_exprs); | |
5104 | comp = sbitmap_vector_alloc (n_blocks, n_exprs); | |
5105 | ||
5106 | hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs); | |
5107 | hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs); | |
5108 | hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs); | |
5109 | transpout = sbitmap_vector_alloc (n_blocks, n_exprs); | |
5110 | ||
5111 | dominators = sbitmap_vector_alloc (n_blocks, n_blocks); | |
5112 | post_dominators = sbitmap_vector_alloc (n_blocks, n_blocks); | |
5113 | } | |
5114 | ||
5115 | /* Free vars used for code hoisting analysis. */ | |
5116 | ||
5117 | static void | |
5118 | free_code_hoist_mem () | |
5119 | { | |
5120 | free (antloc); | |
5121 | free (transp); | |
5122 | free (comp); | |
5123 | ||
5124 | free (hoist_vbein); | |
5125 | free (hoist_vbeout); | |
5126 | free (hoist_exprs); | |
5127 | free (transpout); | |
5128 | ||
5129 | free (dominators); | |
5130 | free (post_dominators); | |
5131 | } | |
5132 | ||
5133 | /* Compute the very busy expressions at entry/exit from each block. | |
5134 | ||
5135 | An expression is very busy if all paths from a given point | |
5136 | compute the expression. */ | |
5137 | ||
5138 | static void | |
5139 | compute_code_hoist_vbeinout () | |
5140 | { | |
5141 | int bb, changed, passes; | |
5142 | ||
5143 | sbitmap_vector_zero (hoist_vbeout, n_basic_blocks); | |
5144 | sbitmap_vector_zero (hoist_vbein, n_basic_blocks); | |
5145 | ||
5146 | passes = 0; | |
5147 | changed = 1; | |
5148 | while (changed) | |
5149 | { | |
5150 | changed = 0; | |
5151 | /* We scan the blocks in the reverse order to speed up | |
5152 | the convergence. */ | |
5153 | for (bb = n_basic_blocks - 1; bb >= 0; bb--) | |
5154 | { | |
5155 | changed |= sbitmap_a_or_b_and_c (hoist_vbein[bb], antloc[bb], | |
5156 | hoist_vbeout[bb], transp[bb]); | |
5157 | if (bb != n_basic_blocks - 1) | |
5158 | sbitmap_intersect_of_successors (hoist_vbeout[bb], hoist_vbein, | |
5159 | bb, s_succs); | |
5160 | } | |
5161 | passes++; | |
5162 | } | |
5163 | ||
5164 | if (gcse_file) | |
5165 | fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes); | |
5166 | } | |
5167 | ||
5168 | /* Top level routine to do the dataflow analysis needed by code hoisting. */ | |
5169 | ||
5170 | static void | |
5171 | compute_code_hoist_data () | |
5172 | { | |
5173 | compute_local_properties (transp, comp, antloc, 0); | |
5174 | compute_transpout (); | |
5175 | compute_code_hoist_vbeinout (); | |
5176 | compute_flow_dominators (dominators, post_dominators); | |
5177 | if (gcse_file) | |
5178 | fprintf (gcse_file, "\n"); | |
5179 | } | |
5180 | ||
5181 | /* Determine if the expression identified by EXPR_INDEX would | |
5182 | reach BB unimpared if it was placed at the end of EXPR_BB. | |
5183 | ||
5184 | It's unclear exactly what Muchnick meant by "unimpared". It seems | |
5185 | to me that the expression must either be computed or transparent in | |
5186 | *every* block in the path(s) from EXPR_BB to BB. Any other definition | |
5187 | would allow the expression to be hoisted out of loops, even if | |
5188 | the expression wasn't a loop invariant. | |
5189 | ||
5190 | Contrast this to reachability for PRE where an expression is | |
5191 | considered reachable if *any* path reaches instead of *all* | |
5192 | paths. */ | |
5193 | ||
5194 | static int | |
5195 | hoist_expr_reaches_here_p (expr_bb, expr_index, bb, visited) | |
5196 | int expr_bb; | |
5197 | int expr_index; | |
5198 | int bb; | |
5199 | char *visited; | |
5200 | { | |
5201 | edge pred; | |
5202 | ||
5203 | if (visited == NULL) | |
5204 | { | |
5205 | visited = (char *) alloca (n_basic_blocks); | |
5206 | bzero (visited, n_basic_blocks); | |
5207 | } | |
5208 | ||
5209 | visited[expr_bb] = 1; | |
5210 | for (pred = BASIC_BLOCK (bb)->pred; pred != NULL; pred = pred->pred_next) | |
5211 | { | |
5212 | int pred_bb = pred->src->index; | |
5213 | ||
5214 | if (pred->src == ENTRY_BLOCK_PTR) | |
5215 | break; | |
5216 | else if (visited[pred_bb]) | |
5217 | continue; | |
5218 | /* Does this predecessor generate this expression? */ | |
5219 | else if (TEST_BIT (comp[pred_bb], expr_index)) | |
5220 | break; | |
5221 | else if (! TEST_BIT (transp[pred_bb], expr_index)) | |
5222 | break; | |
5223 | /* Not killed. */ | |
5224 | else | |
5225 | { | |
5226 | visited[pred_bb] = 1; | |
5227 | if (! hoist_expr_reaches_here_p (expr_bb, expr_index, | |
5228 | pred_bb, visited)) | |
5229 | break; | |
5230 | } | |
5231 | } | |
5232 | ||
5233 | return (pred == NULL); | |
5234 | } | |
5235 | \f | |
5236 | /* Actually perform code hoisting. */ | |
5237 | static void | |
5238 | hoist_code () | |
5239 | { | |
5240 | int bb, dominated, i; | |
5241 | struct expr **index_map; | |
5242 | ||
5243 | sbitmap_vector_zero (hoist_exprs, n_basic_blocks); | |
5244 | ||
5245 | /* Compute a mapping from expression number (`bitmap_index') to | |
5246 | hash table entry. */ | |
5247 | ||
5248 | index_map = (struct expr **) alloca (n_exprs * sizeof (struct expr *)); | |
5249 | bzero ((char *) index_map, n_exprs * sizeof (struct expr *)); | |
5250 | for (i = 0; i < expr_hash_table_size; i++) | |
5251 | { | |
5252 | struct expr *expr; | |
5253 | ||
5254 | for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash) | |
5255 | index_map[expr->bitmap_index] = expr; | |
5256 | } | |
5257 | ||
5258 | /* Walk over each basic block looking for potentially hoistable | |
5259 | expressions, nothing gets hoisted from the entry block. */ | |
5260 | for (bb = 0; bb < n_basic_blocks; bb++) | |
5261 | { | |
5262 | int found = 0; | |
5263 | int insn_inserted_p; | |
5264 | ||
5265 | /* Examine each expression that is very busy at the exit of this | |
5266 | block. These are the potentially hoistable expressions. */ | |
5267 | for (i = 0; i < hoist_vbeout[bb]->n_bits; i++) | |
5268 | { | |
5269 | int hoistable = 0; | |
5270 | if (TEST_BIT (hoist_vbeout[bb], i) | |
5271 | && TEST_BIT (transpout[bb], i)) | |
5272 | { | |
5273 | /* We've found a potentially hoistable expression, now | |
5274 | we look at every block BB dominates to see if it | |
5275 | computes the expression. */ | |
5276 | for (dominated = 0; dominated < n_basic_blocks; dominated++) | |
5277 | { | |
5278 | /* Ignore self dominance. */ | |
5279 | if (bb == dominated | |
5280 | || ! TEST_BIT (dominators[dominated], bb)) | |
5281 | continue; | |
5282 | ||
5283 | /* We've found a dominated block, now see if it computes | |
5284 | the busy expression and whether or not moving that | |
5285 | expression to the "beginning" of that block is safe. */ | |
5286 | if (!TEST_BIT (antloc[dominated], i)) | |
5287 | continue; | |
5288 | ||
5289 | /* Note if the expression would reach the dominated block | |
5290 | unimpared if it was placed at the end of BB. | |
5291 | ||
5292 | Keep track of how many times this expression is hoistable | |
5293 | from a dominated block into BB. */ | |
5294 | if (hoist_expr_reaches_here_p (bb, i, dominated, NULL)) | |
5295 | hoistable++; | |
5296 | } | |
5297 | ||
5298 | /* If we found more than one hoistable occurence of this | |
5299 | expression, then note it in the bitmap of expressions to | |
5300 | hoist. It makes no sense to hoist things which are computed | |
5301 | in only one BB, and doing so tends to pessimize register | |
5302 | allocation. One could increase this value to try harder | |
5303 | to avoid any possible code expansion due to register | |
5304 | allocation issues; however experiments have shown that | |
5305 | the vast majority of hoistable expressions are only movable | |
5306 | from two successors, so raising this threshhold is likely | |
5307 | to nullify any benefit we get from code hoisting. */ | |
5308 | if (hoistable > 1) | |
5309 | { | |
5310 | SET_BIT (hoist_exprs[bb], i); | |
5311 | found = 1; | |
5312 | } | |
5313 | } | |
5314 | } | |
5315 | ||
5316 | /* If we found nothing to hoist, then quit now. */ | |
5317 | if (! found) | |
5318 | continue; | |
5319 | ||
5320 | /* Loop over all the hoistable expressions. */ | |
5321 | for (i = 0; i < hoist_exprs[bb]->n_bits; i++) | |
5322 | { | |
5323 | /* We want to insert the expression into BB only once, so | |
5324 | note when we've inserted it. */ | |
5325 | insn_inserted_p = 0; | |
5326 | ||
5327 | /* These tests should be the same as the tests above. */ | |
5328 | if (TEST_BIT (hoist_vbeout[bb], i)) | |
5329 | { | |
5330 | /* We've found a potentially hoistable expression, now | |
5331 | we look at every block BB dominates to see if it | |
5332 | computes the expression. */ | |
5333 | for (dominated = 0; dominated < n_basic_blocks; dominated++) | |
5334 | { | |
5335 | /* Ignore self dominance. */ | |
5336 | if (bb == dominated | |
5337 | || ! TEST_BIT (dominators[dominated], bb)) | |
5338 | continue; | |
5339 | ||
5340 | /* We've found a dominated block, now see if it computes | |
5341 | the busy expression and whether or not moving that | |
5342 | expression to the "beginning" of that block is safe. */ | |
5343 | if (!TEST_BIT (antloc[dominated], i)) | |
5344 | continue; | |
5345 | ||
5346 | /* The expression is computed in the dominated block and | |
5347 | it would be safe to compute it at the start of the | |
5348 | dominated block. Now we have to determine if the | |
5349 | expresion would reach the dominated block if it was | |
5350 | placed at the end of BB. */ | |
5351 | if (hoist_expr_reaches_here_p (bb, i, dominated, NULL)) | |
5352 | { | |
5353 | struct expr *expr = index_map[i]; | |
5354 | struct occr *occr = expr->antic_occr; | |
5355 | rtx insn; | |
5356 | rtx set; | |
5357 | ||
5358 | ||
5359 | /* Find the right occurence of this expression. */ | |
5360 | while (BLOCK_NUM (occr->insn) != dominated && occr) | |
5361 | occr = occr->next; | |
5362 | ||
5363 | /* Should never happen. */ | |
5364 | if (!occr) | |
5365 | abort (); | |
5366 | ||
5367 | insn = occr->insn; | |
5368 | ||
5369 | set = single_set (insn); | |
5370 | if (! set) | |
5371 | abort (); | |
5372 | ||
5373 | /* Create a pseudo-reg to store the result of reaching | |
5374 | expressions into. Get the mode for the new pseudo | |
5375 | from the mode of the original destination pseudo. */ | |
5376 | if (expr->reaching_reg == NULL) | |
5377 | expr->reaching_reg | |
5378 | = gen_reg_rtx (GET_MODE (SET_DEST (set))); | |
5379 | ||
5380 | /* In theory this should never fail since we're creating | |
5381 | a reg->reg copy. | |
5382 | ||
5383 | However, on the x86 some of the movXX patterns actually | |
5384 | contain clobbers of scratch regs. This may cause the | |
5385 | insn created by validate_change to not match any | |
5386 | pattern and thus cause validate_change to fail. */ | |
5387 | if (validate_change (insn, &SET_SRC (set), | |
5388 | expr->reaching_reg, 0)) | |
5389 | { | |
5390 | occr->deleted_p = 1; | |
5391 | if (!insn_inserted_p) | |
5392 | { | |
5393 | insert_insn_end_bb (index_map[i], bb, 0); | |
5394 | insn_inserted_p = 1; | |
5395 | } | |
5396 | } | |
5397 | } | |
5398 | } | |
5399 | } | |
5400 | } | |
5401 | } | |
5402 | } | |
5403 | ||
5404 | /* Top level routine to perform one code hoisting (aka unification) pass | |
5405 | ||
5406 | Return non-zero if a change was made. */ | |
5407 | ||
5408 | static int | |
5409 | one_code_hoisting_pass () | |
5410 | { | |
5411 | int changed = 0; | |
5412 | ||
5413 | alloc_expr_hash_table (max_cuid); | |
5414 | compute_expr_hash_table (); | |
5415 | if (gcse_file) | |
5416 | dump_hash_table (gcse_file, "Code Hosting Expressions", expr_hash_table, | |
5417 | expr_hash_table_size, n_exprs); | |
5418 | if (n_exprs > 0) | |
5419 | { | |
5420 | alloc_code_hoist_mem (n_basic_blocks, n_exprs); | |
5421 | compute_code_hoist_data (); | |
5422 | hoist_code (); | |
5423 | free_code_hoist_mem (); | |
5424 | } | |
5425 | free_expr_hash_table (); | |
5426 | ||
5427 | return changed; | |
5428 | } |