]> gcc.gnu.org Git - gcc.git/blame - gcc/gcse.c
acinclude.m4 (GLIBCPP_ENABLE_CHEADERS): Use glibcpp_srcdir when setting C_INCLUDE_DIR.
[gcc.git] / gcc / gcse.c
CommitLineData
f4e584dc 1/* Global common subexpression elimination/Partial redundancy elimination
7506f491 2 and global constant/copy propagation for GNU compiler.
5b1ef594 3 Copyright (C) 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
7506f491
DE
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
21
22/* TODO
23 - reordering of memory allocation and freeing to be more space efficient
24 - do rough calc of how many regs are needed in each block, and a rough
25 calc of how many regs are available in each class and use that to
26 throttle back the code in cases where RTX_COST is minimal.
f4e584dc
JL
27 - dead store elimination
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
7506f491
DE
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
34
7506f491
DE
35*/
36
37/* References searched while implementing this.
7506f491
DE
38
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
42
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
46
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
50
7506f491
DE
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
54
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
59
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
64
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
69
7506f491
DE
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
73
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
79
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
84
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
88
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
92
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
96
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
100
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
104
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
108
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
112
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
116
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
120
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
124
f4e584dc
JL
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
128
a42cd965
AM
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
132
f4e584dc
JL
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
137
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
141
7506f491
DE
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
144*/
145
146#include "config.h"
50b2596f 147#include "system.h"
01198c2f 148#include "toplev.h"
7506f491
DE
149
150#include "rtl.h"
6baf1cc8 151#include "tm_p.h"
7506f491
DE
152#include "regs.h"
153#include "hard-reg-set.h"
154#include "flags.h"
155#include "real.h"
156#include "insn-config.h"
157#include "recog.h"
158#include "basic-block.h"
50b2596f 159#include "output.h"
49ad7cfa 160#include "function.h"
3cdbd1f8 161#include "expr.h"
fb0c0a12 162#include "ggc.h"
f1fa37ff 163#include "params.h"
7506f491
DE
164
165#include "obstack.h"
166#define obstack_chunk_alloc gmalloc
167#define obstack_chunk_free free
168
7506f491
DE
169/* Propagate flow information through back edges and thus enable PRE's
170 moving loop invariant calculations out of loops.
171
172 Originally this tended to create worse overall code, but several
173 improvements during the development of PRE seem to have made following
174 back edges generally a win.
175
176 Note much of the loop invariant code motion done here would normally
177 be done by loop.c, which has more heuristics for when to move invariants
178 out of loops. At some point we might need to move some of those
179 heuristics into gcse.c. */
180#define FOLLOW_BACK_EDGES 1
181
f4e584dc
JL
182/* We support GCSE via Partial Redundancy Elimination. PRE optimizations
183 are a superset of those done by GCSE.
7506f491 184
f4e584dc 185 We perform the following steps:
7506f491
DE
186
187 1) Compute basic block information.
188
189 2) Compute table of places where registers are set.
190
191 3) Perform copy/constant propagation.
192
193 4) Perform global cse.
194
e78d9500 195 5) Perform another pass of copy/constant propagation.
7506f491
DE
196
197 Two passes of copy/constant propagation are done because the first one
198 enables more GCSE and the second one helps to clean up the copies that
199 GCSE creates. This is needed more for PRE than for Classic because Classic
200 GCSE will try to use an existing register containing the common
201 subexpression rather than create a new one. This is harder to do for PRE
202 because of the code motion (which Classic GCSE doesn't do).
203
204 Expressions we are interested in GCSE-ing are of the form
205 (set (pseudo-reg) (expression)).
206 Function want_to_gcse_p says what these are.
207
208 PRE handles moving invariant expressions out of loops (by treating them as
f4e584dc 209 partially redundant).
7506f491
DE
210
211 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
212 assignment) based GVN (global value numbering). L. T. Simpson's paper
213 (Rice University) on value numbering is a useful reference for this.
214
215 **********************
216
217 We used to support multiple passes but there are diminishing returns in
218 doing so. The first pass usually makes 90% of the changes that are doable.
219 A second pass can make a few more changes made possible by the first pass.
220 Experiments show any further passes don't make enough changes to justify
221 the expense.
222
223 A study of spec92 using an unlimited number of passes:
224 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
225 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
226 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
227
228 It was found doing copy propagation between each pass enables further
229 substitutions.
230
231 PRE is quite expensive in complicated functions because the DFA can take
740f35a0 232 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
7506f491
DE
233 be modified if one wants to experiment.
234
235 **********************
236
237 The steps for PRE are:
238
239 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
240
241 2) Perform the data flow analysis for PRE.
242
243 3) Delete the redundant instructions
244
245 4) Insert the required copies [if any] that make the partially
246 redundant instructions fully redundant.
247
248 5) For other reaching expressions, insert an instruction to copy the value
249 to a newly created pseudo that will reach the redundant instruction.
250
251 The deletion is done first so that when we do insertions we
252 know which pseudo reg to use.
253
254 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
255 argue it is not. The number of iterations for the algorithm to converge
256 is typically 2-4 so I don't view it as that expensive (relatively speaking).
257
f4e584dc 258 PRE GCSE depends heavily on the second CSE pass to clean up the copies
7506f491
DE
259 we create. To make an expression reach the place where it's redundant,
260 the result of the expression is copied to a new register, and the redundant
261 expression is deleted by replacing it with this new register. Classic GCSE
262 doesn't have this problem as much as it computes the reaching defs of
263 each register in each block and thus can try to use an existing register.
264
265 **********************
266
7506f491
DE
267 A fair bit of simplicity is created by creating small functions for simple
268 tasks, even when the function is only called in one place. This may
269 measurably slow things down [or may not] by creating more function call
270 overhead than is necessary. The source is laid out so that it's trivial
271 to make the affected functions inline so that one can measure what speed
272 up, if any, can be achieved, and maybe later when things settle things can
273 be rearranged.
274
275 Help stamp out big monolithic functions! */
276\f
277/* GCSE global vars. */
278
279/* -dG dump file. */
280static FILE *gcse_file;
281
f4e584dc
JL
282/* Note whether or not we should run jump optimization after gcse. We
283 want to do this for two cases.
284
285 * If we changed any jumps via cprop.
286
287 * If we added any labels via edge splitting. */
288
289static int run_jump_opt_after_gcse;
290
7506f491
DE
291/* Bitmaps are normally not included in debugging dumps.
292 However it's useful to be able to print them from GDB.
293 We could create special functions for this, but it's simpler to
294 just allow passing stderr to the dump_foo fns. Since stderr can
295 be a macro, we store a copy here. */
296static FILE *debug_stderr;
297
298/* An obstack for our working variables. */
299static struct obstack gcse_obstack;
300
301/* Non-zero for each mode that supports (set (reg) (reg)).
302 This is trivially true for integer and floating point values.
303 It may or may not be true for condition codes. */
304static char can_copy_p[(int) NUM_MACHINE_MODES];
305
306/* Non-zero if can_copy_p has been initialized. */
307static int can_copy_init_p;
308
c4c81601 309struct reg_use {rtx reg_rtx; };
abd535b6 310
7506f491
DE
311/* Hash table of expressions. */
312
313struct expr
314{
315 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
316 rtx expr;
317 /* Index in the available expression bitmaps. */
318 int bitmap_index;
319 /* Next entry with the same hash. */
320 struct expr *next_same_hash;
321 /* List of anticipatable occurrences in basic blocks in the function.
322 An "anticipatable occurrence" is one that is the first occurrence in the
f4e584dc
JL
323 basic block, the operands are not modified in the basic block prior
324 to the occurrence and the output is not used between the start of
325 the block and the occurrence. */
7506f491
DE
326 struct occr *antic_occr;
327 /* List of available occurrence in basic blocks in the function.
328 An "available occurrence" is one that is the last occurrence in the
329 basic block and the operands are not modified by following statements in
330 the basic block [including this insn]. */
331 struct occr *avail_occr;
332 /* Non-null if the computation is PRE redundant.
333 The value is the newly created pseudo-reg to record a copy of the
334 expression in all the places that reach the redundant copy. */
335 rtx reaching_reg;
336};
337
338/* Occurrence of an expression.
339 There is one per basic block. If a pattern appears more than once the
340 last appearance is used [or first for anticipatable expressions]. */
341
342struct occr
343{
344 /* Next occurrence of this expression. */
345 struct occr *next;
346 /* The insn that computes the expression. */
347 rtx insn;
348 /* Non-zero if this [anticipatable] occurrence has been deleted. */
349 char deleted_p;
350 /* Non-zero if this [available] occurrence has been copied to
351 reaching_reg. */
352 /* ??? This is mutually exclusive with deleted_p, so they could share
353 the same byte. */
354 char copied_p;
355};
356
357/* Expression and copy propagation hash tables.
358 Each hash table is an array of buckets.
359 ??? It is known that if it were an array of entries, structure elements
360 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
361 not clear whether in the final analysis a sufficient amount of memory would
362 be saved as the size of the available expression bitmaps would be larger
363 [one could build a mapping table without holes afterwards though].
c4c81601 364 Someday I'll perform the computation and figure it out. */
7506f491
DE
365
366/* Total size of the expression hash table, in elements. */
2e653e39
RK
367static unsigned int expr_hash_table_size;
368
7506f491
DE
369/* The table itself.
370 This is an array of `expr_hash_table_size' elements. */
371static struct expr **expr_hash_table;
372
373/* Total size of the copy propagation hash table, in elements. */
ebb13e7e 374static unsigned int set_hash_table_size;
c4c81601 375
7506f491
DE
376/* The table itself.
377 This is an array of `set_hash_table_size' elements. */
378static struct expr **set_hash_table;
379
380/* Mapping of uids to cuids.
381 Only real insns get cuids. */
382static int *uid_cuid;
383
384/* Highest UID in UID_CUID. */
385static int max_uid;
386
387/* Get the cuid of an insn. */
b86db3eb
BS
388#ifdef ENABLE_CHECKING
389#define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
390#else
7506f491 391#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
b86db3eb 392#endif
7506f491
DE
393
394/* Number of cuids. */
395static int max_cuid;
396
397/* Mapping of cuids to insns. */
398static rtx *cuid_insn;
399
400/* Get insn from cuid. */
401#define CUID_INSN(CUID) (cuid_insn[CUID])
402
403/* Maximum register number in function prior to doing gcse + 1.
404 Registers created during this pass have regno >= max_gcse_regno.
405 This is named with "gcse" to not collide with global of same name. */
770ae6cc 406static unsigned int max_gcse_regno;
7506f491
DE
407
408/* Maximum number of cse-able expressions found. */
409static int n_exprs;
c4c81601 410
7506f491
DE
411/* Maximum number of assignments for copy propagation found. */
412static int n_sets;
413
414/* Table of registers that are modified.
c4c81601 415
7506f491
DE
416 For each register, each element is a list of places where the pseudo-reg
417 is set.
418
419 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
420 requires knowledge of which blocks kill which regs [and thus could use
f4e584dc 421 a bitmap instead of the lists `reg_set_table' uses].
7506f491 422
c4c81601
RK
423 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
424 num-regs) [however perhaps it may be useful to keep the data as is]. One
425 advantage of recording things this way is that `reg_set_table' is fairly
426 sparse with respect to pseudo regs but for hard regs could be fairly dense
427 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
7506f491
DE
428 up functions like compute_transp since in the case of pseudo-regs we only
429 need to iterate over the number of times a pseudo-reg is set, not over the
430 number of basic blocks [clearly there is a bit of a slow down in the cases
431 where a pseudo is set more than once in a block, however it is believed
432 that the net effect is to speed things up]. This isn't done for hard-regs
433 because recording call-clobbered hard-regs in `reg_set_table' at each
c4c81601
RK
434 function call can consume a fair bit of memory, and iterating over
435 hard-regs stored this way in compute_transp will be more expensive. */
7506f491 436
c4c81601
RK
437typedef struct reg_set
438{
7506f491
DE
439 /* The next setting of this register. */
440 struct reg_set *next;
441 /* The insn where it was set. */
442 rtx insn;
443} reg_set;
c4c81601 444
7506f491 445static reg_set **reg_set_table;
c4c81601 446
7506f491
DE
447/* Size of `reg_set_table'.
448 The table starts out at max_gcse_regno + slop, and is enlarged as
449 necessary. */
450static int reg_set_table_size;
c4c81601 451
7506f491
DE
452/* Amount to grow `reg_set_table' by when it's full. */
453#define REG_SET_TABLE_SLOP 100
454
a13d4ebf
AM
455/* This is a list of expressions which are MEMs and will be used by load
456 or store motion.
457 Load motion tracks MEMs which aren't killed by
458 anything except itself. (ie, loads and stores to a single location).
459 We can then allow movement of these MEM refs with a little special
460 allowance. (all stores copy the same value to the reaching reg used
461 for the loads). This means all values used to store into memory must have
462 no side effects so we can re-issue the setter value.
463 Store Motion uses this structure as an expression table to track stores
464 which look interesting, and might be moveable towards the exit block. */
465
466struct ls_expr
467{
468 struct expr * expr; /* Gcse expression reference for LM. */
469 rtx pattern; /* Pattern of this mem. */
470 rtx loads; /* INSN list of loads seen. */
471 rtx stores; /* INSN list of stores seen. */
472 struct ls_expr * next; /* Next in the list. */
473 int invalid; /* Invalid for some reason. */
474 int index; /* If it maps to a bitmap index. */
475 int hash_index; /* Index when in a hash table. */
476 rtx reaching_reg; /* Register to use when re-writing. */
477};
478
479/* Head of the list of load/store memory refs. */
480static struct ls_expr * pre_ldst_mems = NULL;
481
7506f491
DE
482/* Bitmap containing one bit for each register in the program.
483 Used when performing GCSE to track which registers have been set since
484 the start of the basic block. */
485static sbitmap reg_set_bitmap;
486
487/* For each block, a bitmap of registers set in the block.
488 This is used by expr_killed_p and compute_transp.
489 It is computed during hash table computation and not by compute_sets
490 as it includes registers added since the last pass (or between cprop and
491 gcse) and it's currently not easy to realloc sbitmap vectors. */
492static sbitmap *reg_set_in_block;
493
a13d4ebf
AM
494/* Array, indexed by basic block number for a list of insns which modify
495 memory within that block. */
496static rtx * modify_mem_list;
497
498/* This array parallels modify_mem_list, but is kept canonicalized. */
499static rtx * canon_modify_mem_list;
500
7506f491
DE
501/* For each block, non-zero if memory is set in that block.
502 This is computed during hash table computation and is used by
503 expr_killed_p and compute_transp.
504 ??? Handling of memory is very simple, we don't make any attempt
505 to optimize things (later).
506 ??? This can be computed by compute_sets since the information
507 doesn't change. */
508static char *mem_set_in_block;
509
510/* Various variables for statistics gathering. */
511
512/* Memory used in a pass.
513 This isn't intended to be absolutely precise. Its intent is only
514 to keep an eye on memory usage. */
515static int bytes_used;
c4c81601 516
7506f491
DE
517/* GCSE substitutions made. */
518static int gcse_subst_count;
519/* Number of copy instructions created. */
520static int gcse_create_count;
521/* Number of constants propagated. */
522static int const_prop_count;
523/* Number of copys propagated. */
524static int copy_prop_count;
7506f491
DE
525\f
526/* These variables are used by classic GCSE.
527 Normally they'd be defined a bit later, but `rd_gen' needs to
528 be declared sooner. */
529
7506f491
DE
530/* Each block has a bitmap of each type.
531 The length of each blocks bitmap is:
532
533 max_cuid - for reaching definitions
534 n_exprs - for available expressions
535
536 Thus we view the bitmaps as 2 dimensional arrays. i.e.
537 rd_kill[block_num][cuid_num]
c4c81601 538 ae_kill[block_num][expr_num] */
7506f491
DE
539
540/* For reaching defs */
541static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
542
543/* for available exprs */
544static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
b5ce41ff 545
0511851c
MM
546/* Objects of this type are passed around by the null-pointer check
547 removal routines. */
c4c81601
RK
548struct null_pointer_info
549{
0511851c
MM
550 /* The basic block being processed. */
551 int current_block;
552 /* The first register to be handled in this pass. */
770ae6cc 553 unsigned int min_reg;
0511851c 554 /* One greater than the last register to be handled in this pass. */
770ae6cc 555 unsigned int max_reg;
0511851c
MM
556 sbitmap *nonnull_local;
557 sbitmap *nonnull_killed;
558};
7506f491 559\f
c4c81601
RK
560static void compute_can_copy PARAMS ((void));
561static char *gmalloc PARAMS ((unsigned int));
562static char *grealloc PARAMS ((char *, unsigned int));
563static char *gcse_alloc PARAMS ((unsigned long));
564static void alloc_gcse_mem PARAMS ((rtx));
565static void free_gcse_mem PARAMS ((void));
566static void alloc_reg_set_mem PARAMS ((int));
567static void free_reg_set_mem PARAMS ((void));
568static int get_bitmap_width PARAMS ((int, int, int));
569static void record_one_set PARAMS ((int, rtx));
570static void record_set_info PARAMS ((rtx, rtx, void *));
571static void compute_sets PARAMS ((rtx));
572static void hash_scan_insn PARAMS ((rtx, int, int));
573static void hash_scan_set PARAMS ((rtx, rtx, int));
574static void hash_scan_clobber PARAMS ((rtx, rtx));
575static void hash_scan_call PARAMS ((rtx, rtx));
576static int want_to_gcse_p PARAMS ((rtx));
577static int oprs_unchanged_p PARAMS ((rtx, rtx, int));
578static int oprs_anticipatable_p PARAMS ((rtx, rtx));
579static int oprs_available_p PARAMS ((rtx, rtx));
580static void insert_expr_in_table PARAMS ((rtx, enum machine_mode, rtx,
581 int, int));
582static void insert_set_in_table PARAMS ((rtx, rtx));
583static unsigned int hash_expr PARAMS ((rtx, enum machine_mode, int *, int));
584static unsigned int hash_expr_1 PARAMS ((rtx, enum machine_mode, int *));
c0712acb 585static unsigned int hash_string_1 PARAMS ((const char *));
c4c81601
RK
586static unsigned int hash_set PARAMS ((int, int));
587static int expr_equiv_p PARAMS ((rtx, rtx));
588static void record_last_reg_set_info PARAMS ((rtx, int));
589static void record_last_mem_set_info PARAMS ((rtx));
590static void record_last_set_info PARAMS ((rtx, rtx, void *));
711d877c 591static void compute_hash_table PARAMS ((int));
c4c81601
RK
592static void alloc_set_hash_table PARAMS ((int));
593static void free_set_hash_table PARAMS ((void));
594static void compute_set_hash_table PARAMS ((void));
2e653e39 595static void alloc_expr_hash_table PARAMS ((unsigned int));
c4c81601
RK
596static void free_expr_hash_table PARAMS ((void));
597static void compute_expr_hash_table PARAMS ((void));
598static void dump_hash_table PARAMS ((FILE *, const char *, struct expr **,
599 int, int));
600static struct expr *lookup_expr PARAMS ((rtx));
770ae6cc
RK
601static struct expr *lookup_set PARAMS ((unsigned int, rtx));
602static struct expr *next_set PARAMS ((unsigned int, struct expr *));
c4c81601
RK
603static void reset_opr_set_tables PARAMS ((void));
604static int oprs_not_set_p PARAMS ((rtx, rtx));
605static void mark_call PARAMS ((rtx));
606static void mark_set PARAMS ((rtx, rtx));
607static void mark_clobber PARAMS ((rtx, rtx));
608static void mark_oprs_set PARAMS ((rtx));
609static void alloc_cprop_mem PARAMS ((int, int));
610static void free_cprop_mem PARAMS ((void));
611static void compute_transp PARAMS ((rtx, int, sbitmap *, int));
612static void compute_transpout PARAMS ((void));
613static void compute_local_properties PARAMS ((sbitmap *, sbitmap *, sbitmap *,
614 int));
711d877c 615static void compute_cprop_data PARAMS ((void));
9e71c818 616static void find_used_regs PARAMS ((rtx *, void *));
c4c81601
RK
617static int try_replace_reg PARAMS ((rtx, rtx, rtx));
618static struct expr *find_avail_set PARAMS ((int, rtx));
172890a2 619static int cprop_jump PARAMS ((rtx, rtx, rtx));
e2bef702 620#ifdef HAVE_cc0
c4c81601 621static int cprop_cc0_jump PARAMS ((rtx, struct reg_use *, rtx));
e2bef702 622#endif
a13d4ebf 623static void mems_conflict_for_gcse_p PARAMS ((rtx, rtx, void *));
e2d2ed72 624static int load_killed_in_block_p PARAMS ((basic_block, int, rtx, int));
a13d4ebf 625static void canon_list_insert PARAMS ((rtx, rtx, void *));
c4c81601
RK
626static int cprop_insn PARAMS ((rtx, int));
627static int cprop PARAMS ((int));
628static int one_cprop_pass PARAMS ((int, int));
629static void alloc_pre_mem PARAMS ((int, int));
630static void free_pre_mem PARAMS ((void));
631static void compute_pre_data PARAMS ((void));
e2d2ed72
AM
632static int pre_expr_reaches_here_p PARAMS ((basic_block, struct expr *,
633 basic_block));
634static void insert_insn_end_bb PARAMS ((struct expr *, basic_block, int));
c4c81601
RK
635static void pre_insert_copy_insn PARAMS ((struct expr *, rtx));
636static void pre_insert_copies PARAMS ((void));
637static int pre_delete PARAMS ((void));
638static int pre_gcse PARAMS ((void));
639static int one_pre_gcse_pass PARAMS ((int));
640static void add_label_notes PARAMS ((rtx, rtx));
641static void alloc_code_hoist_mem PARAMS ((int, int));
642static void free_code_hoist_mem PARAMS ((void));
711d877c 643static void compute_code_hoist_vbeinout PARAMS ((void));
c4c81601 644static void compute_code_hoist_data PARAMS ((void));
e2d2ed72
AM
645static int hoist_expr_reaches_here_p PARAMS ((basic_block, int, basic_block,
646 char *));
c4c81601
RK
647static void hoist_code PARAMS ((void));
648static int one_code_hoisting_pass PARAMS ((void));
649static void alloc_rd_mem PARAMS ((int, int));
650static void free_rd_mem PARAMS ((void));
e2d2ed72 651static void handle_rd_kill_set PARAMS ((rtx, int, basic_block));
c4c81601 652static void compute_kill_rd PARAMS ((void));
711d877c 653static void compute_rd PARAMS ((void));
c4c81601
RK
654static void alloc_avail_expr_mem PARAMS ((int, int));
655static void free_avail_expr_mem PARAMS ((void));
656static void compute_ae_gen PARAMS ((void));
e2d2ed72 657static int expr_killed_p PARAMS ((rtx, basic_block));
c4c81601 658static void compute_ae_kill PARAMS ((sbitmap *, sbitmap *));
711d877c 659static int expr_reaches_here_p PARAMS ((struct occr *, struct expr *,
e2d2ed72 660 basic_block, int));
c4c81601
RK
661static rtx computing_insn PARAMS ((struct expr *, rtx));
662static int def_reaches_here_p PARAMS ((rtx, rtx));
663static int can_disregard_other_sets PARAMS ((struct reg_set **, rtx, int));
664static int handle_avail_expr PARAMS ((rtx, struct expr *));
665static int classic_gcse PARAMS ((void));
666static int one_classic_gcse_pass PARAMS ((int));
667static void invalidate_nonnull_info PARAMS ((rtx, rtx, void *));
8e184d9c
JJ
668static void delete_null_pointer_checks_1 PARAMS ((varray_type *, unsigned int *,
669 sbitmap *, sbitmap *,
711d877c
KG
670 struct null_pointer_info *));
671static rtx process_insert_insn PARAMS ((struct expr *));
672static int pre_edge_insert PARAMS ((struct edge_list *, struct expr **));
c4c81601 673static int expr_reaches_here_p_work PARAMS ((struct occr *, struct expr *,
e2d2ed72
AM
674 basic_block, int, char *));
675static int pre_expr_reaches_here_p_work PARAMS ((basic_block, struct expr *,
676 basic_block, char *));
a13d4ebf
AM
677static struct ls_expr * ldst_entry PARAMS ((rtx));
678static void free_ldst_entry PARAMS ((struct ls_expr *));
679static void free_ldst_mems PARAMS ((void));
680static void print_ldst_list PARAMS ((FILE *));
681static struct ls_expr * find_rtx_in_ldst PARAMS ((rtx));
682static int enumerate_ldsts PARAMS ((void));
683static inline struct ls_expr * first_ls_expr PARAMS ((void));
684static inline struct ls_expr * next_ls_expr PARAMS ((struct ls_expr *));
685static int simple_mem PARAMS ((rtx));
686static void invalidate_any_buried_refs PARAMS ((rtx));
687static void compute_ld_motion_mems PARAMS ((void));
688static void trim_ld_motion_mems PARAMS ((void));
689static void update_ld_motion_stores PARAMS ((struct expr *));
690static void reg_set_info PARAMS ((rtx, rtx, void *));
e2d2ed72 691static int store_ops_ok PARAMS ((rtx, basic_block));
a13d4ebf
AM
692static void find_moveable_store PARAMS ((rtx));
693static int compute_store_table PARAMS ((void));
694static int load_kills_store PARAMS ((rtx, rtx));
695static int find_loads PARAMS ((rtx, rtx));
696static int store_killed_in_insn PARAMS ((rtx, rtx));
e2d2ed72
AM
697static int store_killed_after PARAMS ((rtx, rtx, basic_block));
698static int store_killed_before PARAMS ((rtx, rtx, basic_block));
a13d4ebf 699static void build_store_vectors PARAMS ((void));
e2d2ed72 700static void insert_insn_start_bb PARAMS ((rtx, basic_block));
a13d4ebf 701static int insert_store PARAMS ((struct ls_expr *, edge));
e2d2ed72
AM
702static void replace_store_insn PARAMS ((rtx, rtx, basic_block));
703static void delete_store PARAMS ((struct ls_expr *,
704 basic_block));
a13d4ebf
AM
705static void free_store_memory PARAMS ((void));
706static void store_motion PARAMS ((void));
7506f491
DE
707\f
708/* Entry point for global common subexpression elimination.
709 F is the first instruction in the function. */
710
e78d9500 711int
7506f491
DE
712gcse_main (f, file)
713 rtx f;
714 FILE *file;
715{
716 int changed, pass;
717 /* Bytes used at start of pass. */
718 int initial_bytes_used;
719 /* Maximum number of bytes used by a pass. */
720 int max_pass_bytes;
721 /* Point to release obstack data from for each pass. */
722 char *gcse_obstack_bottom;
723
a13d4ebf
AM
724 /* Insertion of instructions on edges can create new basic blocks; we
725 need the original basic block count so that we can properly deallocate
726 arrays sized on the number of basic blocks originally in the cfg. */
727 int orig_bb_count;
b5ce41ff
JL
728 /* We do not construct an accurate cfg in functions which call
729 setjmp, so just punt to be safe. */
7506f491 730 if (current_function_calls_setjmp)
e78d9500 731 return 0;
7506f491 732
b5ce41ff
JL
733 /* Assume that we do not need to run jump optimizations after gcse. */
734 run_jump_opt_after_gcse = 0;
735
7506f491
DE
736 /* For calling dump_foo fns from gdb. */
737 debug_stderr = stderr;
b5ce41ff 738 gcse_file = file;
7506f491 739
b5ce41ff
JL
740 /* Identify the basic block information for this function, including
741 successors and predecessors. */
7506f491 742 max_gcse_regno = max_reg_num ();
7506f491 743
a42cd965
AM
744 if (file)
745 dump_flow_info (file);
746
a13d4ebf 747 orig_bb_count = n_basic_blocks;
7506f491
DE
748 /* Return if there's nothing to do. */
749 if (n_basic_blocks <= 1)
a18820c6 750 return 0;
7506f491 751
55f7891b
JL
752 /* Trying to perform global optimizations on flow graphs which have
753 a high connectivity will take a long time and is unlikely to be
754 particularly useful.
755
43e72072 756 In normal circumstances a cfg should have about twice as many edges
55f7891b
JL
757 as blocks. But we do not want to punish small functions which have
758 a couple switch statements. So we require a relatively large number
759 of basic blocks and the ratio of edges to blocks to be high. */
760 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
18424ae1
BL
761 {
762 if (warn_disabled_optimization)
763 warning ("GCSE disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
764 n_basic_blocks, n_edges / n_basic_blocks);
765 return 0;
766 }
55f7891b 767
f1fa37ff
MM
768 /* If allocating memory for the cprop bitmap would take up too much
769 storage it's better just to disable the optimization. */
770 if ((n_basic_blocks
771 * SBITMAP_SET_SIZE (max_gcse_regno)
772 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
773 {
774 if (warn_disabled_optimization)
775 warning ("GCSE disabled: %d basic blocks and %d registers",
776 n_basic_blocks, max_gcse_regno);
777
778 return 0;
779 }
780
7506f491
DE
781 /* See what modes support reg/reg copy operations. */
782 if (! can_copy_init_p)
783 {
784 compute_can_copy ();
785 can_copy_init_p = 1;
786 }
787
788 gcc_obstack_init (&gcse_obstack);
a42cd965 789 bytes_used = 0;
7506f491 790
a13d4ebf
AM
791 /* We need alias. */
792 init_alias_analysis ();
c4c81601
RK
793 /* Record where pseudo-registers are set. This data is kept accurate
794 during each pass. ??? We could also record hard-reg information here
795 [since it's unchanging], however it is currently done during hash table
796 computation.
b5ce41ff 797
c4c81601
RK
798 It may be tempting to compute MEM set information here too, but MEM sets
799 will be subject to code motion one day and thus we need to compute
b5ce41ff 800 information about memory sets when we build the hash tables. */
7506f491
DE
801
802 alloc_reg_set_mem (max_gcse_regno);
803 compute_sets (f);
804
805 pass = 0;
806 initial_bytes_used = bytes_used;
807 max_pass_bytes = 0;
808 gcse_obstack_bottom = gcse_alloc (1);
809 changed = 1;
740f35a0 810 while (changed && pass < MAX_GCSE_PASSES)
7506f491
DE
811 {
812 changed = 0;
813 if (file)
814 fprintf (file, "GCSE pass %d\n\n", pass + 1);
815
816 /* Initialize bytes_used to the space for the pred/succ lists,
817 and the reg_set_table data. */
818 bytes_used = initial_bytes_used;
819
820 /* Each pass may create new registers, so recalculate each time. */
821 max_gcse_regno = max_reg_num ();
822
823 alloc_gcse_mem (f);
824
b5ce41ff
JL
825 /* Don't allow constant propagation to modify jumps
826 during this pass. */
827 changed = one_cprop_pass (pass + 1, 0);
7506f491
DE
828
829 if (optimize_size)
b5ce41ff 830 changed |= one_classic_gcse_pass (pass + 1);
7506f491 831 else
a42cd965
AM
832 {
833 changed |= one_pre_gcse_pass (pass + 1);
a13d4ebf
AM
834 /* We may have just created new basic blocks. Release and
835 recompute various things which are sized on the number of
836 basic blocks. */
837 if (changed)
838 {
839 int i;
840
841 for (i = 0; i < orig_bb_count; i++)
842 {
843 if (modify_mem_list[i])
844 free_INSN_LIST_list (modify_mem_list + i);
845 if (canon_modify_mem_list[i])
846 free_INSN_LIST_list (canon_modify_mem_list + i);
847 }
848 modify_mem_list
849 = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx *));
850 canon_modify_mem_list
851 = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx *));
852 memset ((char *) modify_mem_list, 0, n_basic_blocks * sizeof (rtx *));
853 memset ((char *) canon_modify_mem_list, 0, n_basic_blocks * sizeof (rtx *));
854 orig_bb_count = n_basic_blocks;
855 }
a42cd965
AM
856 free_reg_set_mem ();
857 alloc_reg_set_mem (max_reg_num ());
858 compute_sets (f);
859 run_jump_opt_after_gcse = 1;
860 }
7506f491
DE
861
862 if (max_pass_bytes < bytes_used)
863 max_pass_bytes = bytes_used;
864
bb457bd9
JL
865 /* Free up memory, then reallocate for code hoisting. We can
866 not re-use the existing allocated memory because the tables
867 will not have info for the insns or registers created by
868 partial redundancy elimination. */
7506f491
DE
869 free_gcse_mem ();
870
bb457bd9
JL
871 /* It does not make sense to run code hoisting unless we optimizing
872 for code size -- it rarely makes programs faster, and can make
873 them bigger if we did partial redundancy elimination (when optimizing
874 for space, we use a classic gcse algorithm instead of partial
875 redundancy algorithms). */
876 if (optimize_size)
877 {
878 max_gcse_regno = max_reg_num ();
879 alloc_gcse_mem (f);
880 changed |= one_code_hoisting_pass ();
881 free_gcse_mem ();
882
883 if (max_pass_bytes < bytes_used)
884 max_pass_bytes = bytes_used;
885 }
886
7506f491
DE
887 if (file)
888 {
889 fprintf (file, "\n");
890 fflush (file);
891 }
c4c81601 892
7506f491
DE
893 obstack_free (&gcse_obstack, gcse_obstack_bottom);
894 pass++;
895 }
896
b5ce41ff
JL
897 /* Do one last pass of copy propagation, including cprop into
898 conditional jumps. */
899
900 max_gcse_regno = max_reg_num ();
901 alloc_gcse_mem (f);
902 /* This time, go ahead and allow cprop to alter jumps. */
903 one_cprop_pass (pass + 1, 1);
904 free_gcse_mem ();
7506f491
DE
905
906 if (file)
907 {
908 fprintf (file, "GCSE of %s: %d basic blocks, ",
909 current_function_name, n_basic_blocks);
910 fprintf (file, "%d pass%s, %d bytes\n\n",
911 pass, pass > 1 ? "es" : "", max_pass_bytes);
912 }
913
6496a589 914 obstack_free (&gcse_obstack, NULL);
7506f491 915 free_reg_set_mem ();
a13d4ebf
AM
916 /* We are finished with alias. */
917 end_alias_analysis ();
918 allocate_reg_info (max_reg_num (), FALSE, FALSE);
919
920 if (!optimize_size && flag_gcse_sm)
921 store_motion ();
922 /* Record where pseudo-registers are set. */
e78d9500 923 return run_jump_opt_after_gcse;
7506f491
DE
924}
925\f
926/* Misc. utilities. */
927
928/* Compute which modes support reg/reg copy operations. */
929
930static void
931compute_can_copy ()
932{
933 int i;
50b2596f 934#ifndef AVOID_CCMODE_COPIES
7506f491 935 rtx reg,insn;
50b2596f 936#endif
961192e1 937 memset (can_copy_p, 0, NUM_MACHINE_MODES);
7506f491
DE
938
939 start_sequence ();
940 for (i = 0; i < NUM_MACHINE_MODES; i++)
c4c81601
RK
941 if (GET_MODE_CLASS (i) == MODE_CC)
942 {
7506f491 943#ifdef AVOID_CCMODE_COPIES
c4c81601 944 can_copy_p[i] = 0;
7506f491 945#else
c4c81601
RK
946 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
947 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
9714cf43 948 if (recog (PATTERN (insn), insn, NULL) >= 0)
c4c81601 949 can_copy_p[i] = 1;
7506f491 950#endif
c4c81601 951 }
141b5810
AO
952 else
953 can_copy_p[i] = 1;
c4c81601 954
7506f491 955 end_sequence ();
7506f491
DE
956}
957\f
958/* Cover function to xmalloc to record bytes allocated. */
959
960static char *
961gmalloc (size)
962 unsigned int size;
963{
964 bytes_used += size;
965 return xmalloc (size);
966}
967
968/* Cover function to xrealloc.
969 We don't record the additional size since we don't know it.
970 It won't affect memory usage stats much anyway. */
971
972static char *
973grealloc (ptr, size)
974 char *ptr;
975 unsigned int size;
976{
977 return xrealloc (ptr, size);
978}
979
980/* Cover function to obstack_alloc.
981 We don't need to record the bytes allocated here since
982 obstack_chunk_alloc is set to gmalloc. */
983
984static char *
985gcse_alloc (size)
986 unsigned long size;
987{
988 return (char *) obstack_alloc (&gcse_obstack, size);
989}
990
991/* Allocate memory for the cuid mapping array,
992 and reg/memory set tracking tables.
993
994 This is called at the start of each pass. */
995
996static void
997alloc_gcse_mem (f)
998 rtx f;
999{
1000 int i,n;
1001 rtx insn;
1002
1003 /* Find the largest UID and create a mapping from UIDs to CUIDs.
1004 CUIDs are like UIDs except they increase monotonically, have no gaps,
1005 and only apply to real insns. */
1006
1007 max_uid = get_max_uid ();
1008 n = (max_uid + 1) * sizeof (int);
1009 uid_cuid = (int *) gmalloc (n);
961192e1 1010 memset ((char *) uid_cuid, 0, n);
7506f491
DE
1011 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1012 {
2c3c49de 1013 if (INSN_P (insn))
b86db3eb 1014 uid_cuid[INSN_UID (insn)] = i++;
7506f491 1015 else
b86db3eb 1016 uid_cuid[INSN_UID (insn)] = i;
7506f491
DE
1017 }
1018
1019 /* Create a table mapping cuids to insns. */
1020
1021 max_cuid = i;
1022 n = (max_cuid + 1) * sizeof (rtx);
1023 cuid_insn = (rtx *) gmalloc (n);
961192e1 1024 memset ((char *) cuid_insn, 0, n);
7506f491 1025 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
2c3c49de 1026 if (INSN_P (insn))
c4c81601 1027 CUID_INSN (i++) = insn;
7506f491
DE
1028
1029 /* Allocate vars to track sets of regs. */
7506f491
DE
1030 reg_set_bitmap = (sbitmap) sbitmap_alloc (max_gcse_regno);
1031
1032 /* Allocate vars to track sets of regs, memory per block. */
7506f491
DE
1033 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks,
1034 max_gcse_regno);
1035 mem_set_in_block = (char *) gmalloc (n_basic_blocks);
a13d4ebf
AM
1036 /* Allocate array to keep a list of insns which modify memory in each
1037 basic block. */
1038 modify_mem_list = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx *));
1039 canon_modify_mem_list = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx *));
1040 memset ((char *) modify_mem_list, 0, n_basic_blocks * sizeof (rtx *));
1041 memset ((char *) canon_modify_mem_list, 0, n_basic_blocks * sizeof (rtx *));
7506f491
DE
1042}
1043
1044/* Free memory allocated by alloc_gcse_mem. */
1045
1046static void
1047free_gcse_mem ()
1048{
1049 free (uid_cuid);
1050 free (cuid_insn);
1051
1052 free (reg_set_bitmap);
1053
5a660bff 1054 sbitmap_vector_free (reg_set_in_block);
7506f491 1055 free (mem_set_in_block);
a13d4ebf
AM
1056 /* re-Cache any INSN_LIST nodes we have allocated. */
1057 {
1058 int i;
1059
1060 for (i = 0; i < n_basic_blocks; i++)
1061 {
1062 if (modify_mem_list[i])
1063 free_INSN_LIST_list (modify_mem_list + i);
1064 if (canon_modify_mem_list[i])
1065 free_INSN_LIST_list (canon_modify_mem_list + i);
1066 }
1067
1068 free (modify_mem_list);
1069 free (canon_modify_mem_list);
1070 modify_mem_list = 0;
1071 canon_modify_mem_list = 0;
1072 }
7506f491
DE
1073}
1074
0511851c
MM
1075/* Many of the global optimization algorithms work by solving dataflow
1076 equations for various expressions. Initially, some local value is
c4c81601
RK
1077 computed for each expression in each block. Then, the values across the
1078 various blocks are combined (by following flow graph edges) to arrive at
1079 global values. Conceptually, each set of equations is independent. We
1080 may therefore solve all the equations in parallel, solve them one at a
1081 time, or pick any intermediate approach.
1082
1083 When you're going to need N two-dimensional bitmaps, each X (say, the
1084 number of blocks) by Y (say, the number of expressions), call this
1085 function. It's not important what X and Y represent; only that Y
1086 correspond to the things that can be done in parallel. This function will
1087 return an appropriate chunking factor C; you should solve C sets of
1088 equations in parallel. By going through this function, we can easily
1089 trade space against time; by solving fewer equations in parallel we use
1090 less space. */
0511851c
MM
1091
1092static int
1093get_bitmap_width (n, x, y)
1094 int n;
1095 int x;
1096 int y;
1097{
1098 /* It's not really worth figuring out *exactly* how much memory will
1099 be used by a particular choice. The important thing is to get
1100 something approximately right. */
1101 size_t max_bitmap_memory = 10 * 1024 * 1024;
1102
1103 /* The number of bytes we'd use for a single column of minimum
1104 width. */
1105 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
1106
1107 /* Often, it's reasonable just to solve all the equations in
1108 parallel. */
1109 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
1110 return y;
1111
1112 /* Otherwise, pick the largest width we can, without going over the
1113 limit. */
1114 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
1115 / column_size);
1116}
b5ce41ff
JL
1117\f
1118/* Compute the local properties of each recorded expression.
c4c81601
RK
1119
1120 Local properties are those that are defined by the block, irrespective of
1121 other blocks.
b5ce41ff
JL
1122
1123 An expression is transparent in a block if its operands are not modified
1124 in the block.
1125
1126 An expression is computed (locally available) in a block if it is computed
1127 at least once and expression would contain the same value if the
1128 computation was moved to the end of the block.
1129
1130 An expression is locally anticipatable in a block if it is computed at
1131 least once and expression would contain the same value if the computation
1132 was moved to the beginning of the block.
1133
c4c81601
RK
1134 We call this routine for cprop, pre and code hoisting. They all compute
1135 basically the same information and thus can easily share this code.
7506f491 1136
c4c81601
RK
1137 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1138 properties. If NULL, then it is not necessary to compute or record that
1139 particular property.
b5ce41ff 1140
c4c81601
RK
1141 SETP controls which hash table to look at. If zero, this routine looks at
1142 the expr hash table; if nonzero this routine looks at the set hash table.
1143 Additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1144 ABSALTERED. */
b5ce41ff
JL
1145
1146static void
1147compute_local_properties (transp, comp, antloc, setp)
1148 sbitmap *transp;
1149 sbitmap *comp;
1150 sbitmap *antloc;
1151 int setp;
1152{
2e653e39 1153 unsigned int i, hash_table_size;
b5ce41ff
JL
1154 struct expr **hash_table;
1155
1156 /* Initialize any bitmaps that were passed in. */
1157 if (transp)
695ab36a
BS
1158 {
1159 if (setp)
1160 sbitmap_vector_zero (transp, n_basic_blocks);
1161 else
1162 sbitmap_vector_ones (transp, n_basic_blocks);
1163 }
c4c81601 1164
b5ce41ff
JL
1165 if (comp)
1166 sbitmap_vector_zero (comp, n_basic_blocks);
1167 if (antloc)
1168 sbitmap_vector_zero (antloc, n_basic_blocks);
1169
1170 /* We use the same code for cprop, pre and hoisting. For cprop
1171 we care about the set hash table, for pre and hoisting we
1172 care about the expr hash table. */
1173 hash_table_size = setp ? set_hash_table_size : expr_hash_table_size;
1174 hash_table = setp ? set_hash_table : expr_hash_table;
1175
1176 for (i = 0; i < hash_table_size; i++)
7506f491 1177 {
b5ce41ff
JL
1178 struct expr *expr;
1179
1180 for (expr = hash_table[i]; expr != NULL; expr = expr->next_same_hash)
1181 {
b5ce41ff 1182 int indx = expr->bitmap_index;
c4c81601 1183 struct occr *occr;
b5ce41ff
JL
1184
1185 /* The expression is transparent in this block if it is not killed.
1186 We start by assuming all are transparent [none are killed], and
1187 then reset the bits for those that are. */
b5ce41ff
JL
1188 if (transp)
1189 compute_transp (expr->expr, indx, transp, setp);
1190
1191 /* The occurrences recorded in antic_occr are exactly those that
1192 we want to set to non-zero in ANTLOC. */
b5ce41ff 1193 if (antloc)
c4c81601
RK
1194 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1195 {
1196 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
b5ce41ff 1197
c4c81601
RK
1198 /* While we're scanning the table, this is a good place to
1199 initialize this. */
1200 occr->deleted_p = 0;
1201 }
b5ce41ff
JL
1202
1203 /* The occurrences recorded in avail_occr are exactly those that
1204 we want to set to non-zero in COMP. */
1205 if (comp)
c4c81601
RK
1206 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1207 {
1208 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
b5ce41ff 1209
c4c81601
RK
1210 /* While we're scanning the table, this is a good place to
1211 initialize this. */
1212 occr->copied_p = 0;
1213 }
b5ce41ff
JL
1214
1215 /* While we're scanning the table, this is a good place to
1216 initialize this. */
1217 expr->reaching_reg = 0;
1218 }
7506f491 1219 }
7506f491
DE
1220}
1221\f
1222/* Register set information.
1223
1224 `reg_set_table' records where each register is set or otherwise
1225 modified. */
1226
1227static struct obstack reg_set_obstack;
1228
1229static void
1230alloc_reg_set_mem (n_regs)
1231 int n_regs;
1232{
c4c81601 1233 unsigned int n;
7506f491
DE
1234
1235 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1236 n = reg_set_table_size * sizeof (struct reg_set *);
1237 reg_set_table = (struct reg_set **) gmalloc (n);
961192e1 1238 memset ((char *) reg_set_table, 0, n);
7506f491
DE
1239
1240 gcc_obstack_init (&reg_set_obstack);
1241}
1242
1243static void
1244free_reg_set_mem ()
1245{
1246 free (reg_set_table);
6496a589 1247 obstack_free (&reg_set_obstack, NULL);
7506f491
DE
1248}
1249
1250/* Record REGNO in the reg_set table. */
1251
1252static void
1253record_one_set (regno, insn)
1254 int regno;
1255 rtx insn;
1256{
172890a2 1257 /* Allocate a new reg_set element and link it onto the list. */
63bc1d05 1258 struct reg_set *new_reg_info;
7506f491
DE
1259
1260 /* If the table isn't big enough, enlarge it. */
1261 if (regno >= reg_set_table_size)
1262 {
1263 int new_size = regno + REG_SET_TABLE_SLOP;
c4c81601
RK
1264
1265 reg_set_table
1266 = (struct reg_set **) grealloc ((char *) reg_set_table,
1267 new_size * sizeof (struct reg_set *));
961192e1 1268 memset ((char *) (reg_set_table + reg_set_table_size), 0,
7506f491
DE
1269 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1270 reg_set_table_size = new_size;
1271 }
1272
1273 new_reg_info = (struct reg_set *) obstack_alloc (&reg_set_obstack,
1274 sizeof (struct reg_set));
1275 bytes_used += sizeof (struct reg_set);
1276 new_reg_info->insn = insn;
274969ea
MM
1277 new_reg_info->next = reg_set_table[regno];
1278 reg_set_table[regno] = new_reg_info;
7506f491
DE
1279}
1280
c4c81601
RK
1281/* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1282 an insn. The DATA is really the instruction in which the SET is
1283 occurring. */
7506f491
DE
1284
1285static void
84832317 1286record_set_info (dest, setter, data)
50b2596f 1287 rtx dest, setter ATTRIBUTE_UNUSED;
84832317 1288 void *data;
7506f491 1289{
84832317
MM
1290 rtx record_set_insn = (rtx) data;
1291
c4c81601
RK
1292 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1293 record_one_set (REGNO (dest), record_set_insn);
7506f491
DE
1294}
1295
1296/* Scan the function and record each set of each pseudo-register.
1297
c4c81601
RK
1298 This is called once, at the start of the gcse pass. See the comments for
1299 `reg_set_table' for further documenation. */
7506f491
DE
1300
1301static void
1302compute_sets (f)
1303 rtx f;
1304{
c4c81601 1305 rtx insn;
7506f491 1306
c4c81601 1307 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
2c3c49de 1308 if (INSN_P (insn))
c4c81601 1309 note_stores (PATTERN (insn), record_set_info, insn);
7506f491
DE
1310}
1311\f
1312/* Hash table support. */
1313
1314/* For each register, the cuid of the first/last insn in the block to set it,
e7d99f1e 1315 or -1 if not set. */
c4c81601 1316#define NEVER_SET -1
7506f491
DE
1317static int *reg_first_set;
1318static int *reg_last_set;
1319
1320/* While computing "first/last set" info, this is the CUID of first/last insn
e7d99f1e 1321 to set memory or -1 if not set. `mem_last_set' is also used when
7506f491
DE
1322 performing GCSE to record whether memory has been set since the beginning
1323 of the block.
c4c81601 1324
7506f491
DE
1325 Note that handling of memory is very simple, we don't make any attempt
1326 to optimize things (later). */
1327static int mem_first_set;
1328static int mem_last_set;
1329
fb0c0a12
RK
1330/* See whether X, the source of a set, is something we want to consider for
1331 GCSE. */
7506f491
DE
1332
1333static int
1334want_to_gcse_p (x)
1335 rtx x;
1336{
fb0c0a12
RK
1337 static rtx test_insn = 0;
1338 int num_clobbers = 0;
1339 int icode;
1340
c4c81601 1341 switch (GET_CODE (x))
7506f491
DE
1342 {
1343 case REG:
1344 case SUBREG:
1345 case CONST_INT:
1346 case CONST_DOUBLE:
1347 case CALL:
1348 return 0;
1349
1350 default:
1351 break;
1352 }
1353
fb0c0a12
RK
1354 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1355 if (general_operand (x, GET_MODE (x)))
1356 return 1;
1357 else if (GET_MODE (x) == VOIDmode)
1358 return 0;
1359
1360 /* Otherwise, check if we can make a valid insn from it. First initialize
1361 our test insn if we haven't already. */
1362 if (test_insn == 0)
1363 {
1364 test_insn
1365 = make_insn_raw (gen_rtx_SET (VOIDmode,
1366 gen_rtx_REG (word_mode,
1367 FIRST_PSEUDO_REGISTER * 2),
1368 const0_rtx));
1369 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1370 ggc_add_rtx_root (&test_insn, 1);
1371 }
1372
1373 /* Now make an insn like the one we would make when GCSE'ing and see if
1374 valid. */
1375 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1376 SET_SRC (PATTERN (test_insn)) = x;
1377 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1378 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
7506f491
DE
1379}
1380
1381/* Return non-zero if the operands of expression X are unchanged from the
1382 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1383 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1384
1385static int
1386oprs_unchanged_p (x, insn, avail_p)
1387 rtx x, insn;
1388 int avail_p;
1389{
c4c81601 1390 int i, j;
7506f491 1391 enum rtx_code code;
6f7d635c 1392 const char *fmt;
7506f491 1393
7506f491
DE
1394 if (x == 0)
1395 return 1;
1396
1397 code = GET_CODE (x);
1398 switch (code)
1399 {
1400 case REG:
1401 if (avail_p)
b86ba9c8 1402 return (reg_last_set[REGNO (x)] == NEVER_SET
7506f491
DE
1403 || reg_last_set[REGNO (x)] < INSN_CUID (insn));
1404 else
b86ba9c8 1405 return (reg_first_set[REGNO (x)] == NEVER_SET
7506f491
DE
1406 || reg_first_set[REGNO (x)] >= INSN_CUID (insn));
1407
1408 case MEM:
e2d2ed72 1409 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn), INSN_CUID (insn),
a13d4ebf
AM
1410 x, avail_p))
1411 return 0;
c4c81601
RK
1412 if (avail_p && mem_last_set != NEVER_SET
1413 && mem_last_set >= INSN_CUID (insn))
1414 return 0;
1415 else if (! avail_p && mem_first_set != NEVER_SET
1416 && mem_first_set < INSN_CUID (insn))
1417 return 0;
7506f491 1418 else
c4c81601 1419 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
7506f491
DE
1420
1421 case PRE_DEC:
1422 case PRE_INC:
1423 case POST_DEC:
1424 case POST_INC:
4b983fdc
RH
1425 case PRE_MODIFY:
1426 case POST_MODIFY:
7506f491
DE
1427 return 0;
1428
1429 case PC:
1430 case CC0: /*FIXME*/
1431 case CONST:
1432 case CONST_INT:
1433 case CONST_DOUBLE:
1434 case SYMBOL_REF:
1435 case LABEL_REF:
1436 case ADDR_VEC:
1437 case ADDR_DIFF_VEC:
1438 return 1;
1439
1440 default:
1441 break;
1442 }
1443
c4c81601 1444 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
1445 {
1446 if (fmt[i] == 'e')
1447 {
c4c81601
RK
1448 /* If we are about to do the last recursive call needed at this
1449 level, change it into iteration. This function is called enough
1450 to be worth it. */
7506f491 1451 if (i == 0)
c4c81601
RK
1452 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1453
1454 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
7506f491
DE
1455 return 0;
1456 }
1457 else if (fmt[i] == 'E')
c4c81601
RK
1458 for (j = 0; j < XVECLEN (x, i); j++)
1459 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1460 return 0;
7506f491
DE
1461 }
1462
1463 return 1;
1464}
1465
a13d4ebf
AM
1466/* Used for communication between mems_conflict_for_gcse_p and
1467 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1468 conflict between two memory references. */
1469static int gcse_mems_conflict_p;
1470
1471/* Used for communication between mems_conflict_for_gcse_p and
1472 load_killed_in_block_p. A memory reference for a load instruction,
1473 mems_conflict_for_gcse_p will see if a memory store conflicts with
1474 this memory load. */
1475static rtx gcse_mem_operand;
1476
1477/* DEST is the output of an instruction. If it is a memory reference, and
1478 possibly conflicts with the load found in gcse_mem_operand, then set
1479 gcse_mems_conflict_p to a nonzero value. */
1480
1481static void
1482mems_conflict_for_gcse_p (dest, setter, data)
1483 rtx dest, setter ATTRIBUTE_UNUSED;
1484 void *data ATTRIBUTE_UNUSED;
1485{
1486 while (GET_CODE (dest) == SUBREG
1487 || GET_CODE (dest) == ZERO_EXTRACT
1488 || GET_CODE (dest) == SIGN_EXTRACT
1489 || GET_CODE (dest) == STRICT_LOW_PART)
1490 dest = XEXP (dest, 0);
1491
1492 /* If DEST is not a MEM, then it will not conflict with the load. Note
1493 that function calls are assumed to clobber memory, but are handled
1494 elsewhere. */
1495 if (GET_CODE (dest) != MEM)
1496 return;
1497
1498 /* If we are setting a MEM in our list of specially recognized MEMs,
1499 don't mark as killed this time. */
1500
1501 if (dest == gcse_mem_operand && pre_ldst_mems != NULL)
1502 {
1503 if (!find_rtx_in_ldst (dest))
1504 gcse_mems_conflict_p = 1;
1505 return;
1506 }
1507
1508 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1509 rtx_addr_varies_p))
1510 gcse_mems_conflict_p = 1;
1511}
1512
1513/* Return nonzero if the expression in X (a memory reference) is killed
1514 in block BB before or after the insn with the CUID in UID_LIMIT.
1515 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1516 before UID_LIMIT.
1517
1518 To check the entire block, set UID_LIMIT to max_uid + 1 and
1519 AVAIL_P to 0. */
1520
1521static int
1522load_killed_in_block_p (bb, uid_limit, x, avail_p)
e2d2ed72 1523 basic_block bb;
a13d4ebf
AM
1524 int uid_limit;
1525 rtx x;
1526 int avail_p;
1527{
e2d2ed72 1528 rtx list_entry = modify_mem_list[bb->index];
a13d4ebf
AM
1529 while (list_entry)
1530 {
1531 rtx setter;
1532 /* Ignore entries in the list that do not apply. */
1533 if ((avail_p
1534 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1535 || (! avail_p
1536 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1537 {
1538 list_entry = XEXP (list_entry, 1);
1539 continue;
1540 }
1541
1542 setter = XEXP (list_entry, 0);
1543
1544 /* If SETTER is a call everything is clobbered. Note that calls
1545 to pure functions are never put on the list, so we need not
1546 worry about them. */
1547 if (GET_CODE (setter) == CALL_INSN)
1548 return 1;
1549
1550 /* SETTER must be an INSN of some kind that sets memory. Call
1551 note_stores to examine each hunk of memory that is modified.
1552
1553 The note_stores interface is pretty limited, so we have to
1554 communicate via global variables. Yuk. */
1555 gcse_mem_operand = x;
1556 gcse_mems_conflict_p = 0;
1557 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1558 if (gcse_mems_conflict_p)
1559 return 1;
1560 list_entry = XEXP (list_entry, 1);
1561 }
1562 return 0;
1563}
1564
7506f491
DE
1565/* Return non-zero if the operands of expression X are unchanged from
1566 the start of INSN's basic block up to but not including INSN. */
1567
1568static int
1569oprs_anticipatable_p (x, insn)
1570 rtx x, insn;
1571{
1572 return oprs_unchanged_p (x, insn, 0);
1573}
1574
1575/* Return non-zero if the operands of expression X are unchanged from
1576 INSN to the end of INSN's basic block. */
1577
1578static int
1579oprs_available_p (x, insn)
1580 rtx x, insn;
1581{
1582 return oprs_unchanged_p (x, insn, 1);
1583}
1584
1585/* Hash expression X.
c4c81601
RK
1586
1587 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1588 indicating if a volatile operand is found or if the expression contains
1589 something we don't want to insert in the table.
7506f491
DE
1590
1591 ??? One might want to merge this with canon_hash. Later. */
1592
1593static unsigned int
1594hash_expr (x, mode, do_not_record_p, hash_table_size)
1595 rtx x;
1596 enum machine_mode mode;
1597 int *do_not_record_p;
1598 int hash_table_size;
1599{
1600 unsigned int hash;
1601
1602 *do_not_record_p = 0;
1603
1604 hash = hash_expr_1 (x, mode, do_not_record_p);
1605 return hash % hash_table_size;
1606}
172890a2 1607
6462bb43 1608/* Hash a string. Just add its bytes up. */
172890a2 1609
6462bb43
AO
1610static inline unsigned
1611hash_string_1 (ps)
1612 const char *ps;
1613{
1614 unsigned hash = 0;
1615 const unsigned char *p = (const unsigned char *)ps;
1616
1617 if (p)
1618 while (*p)
1619 hash += *p++;
1620
1621 return hash;
1622}
7506f491
DE
1623
1624/* Subroutine of hash_expr to do the actual work. */
1625
1626static unsigned int
1627hash_expr_1 (x, mode, do_not_record_p)
1628 rtx x;
1629 enum machine_mode mode;
1630 int *do_not_record_p;
1631{
1632 int i, j;
1633 unsigned hash = 0;
1634 enum rtx_code code;
6f7d635c 1635 const char *fmt;
7506f491 1636
c4c81601
RK
1637 /* Used to turn recursion into iteration. We can't rely on GCC's
1638 tail-recursion eliminatio since we need to keep accumulating values
1639 in HASH. */
7506f491
DE
1640
1641 if (x == 0)
1642 return hash;
1643
c4c81601 1644 repeat:
7506f491
DE
1645 code = GET_CODE (x);
1646 switch (code)
1647 {
1648 case REG:
c4c81601
RK
1649 hash += ((unsigned int) REG << 7) + REGNO (x);
1650 return hash;
7506f491
DE
1651
1652 case CONST_INT:
c4c81601
RK
1653 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1654 + (unsigned int) INTVAL (x));
1655 return hash;
7506f491
DE
1656
1657 case CONST_DOUBLE:
1658 /* This is like the general case, except that it only counts
1659 the integers representing the constant. */
c4c81601 1660 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
7506f491
DE
1661 if (GET_MODE (x) != VOIDmode)
1662 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
c4c81601 1663 hash += (unsigned int) XWINT (x, i);
7506f491 1664 else
c4c81601
RK
1665 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1666 + (unsigned int) CONST_DOUBLE_HIGH (x));
7506f491
DE
1667 return hash;
1668
1669 /* Assume there is only one rtx object for any given label. */
1670 case LABEL_REF:
1671 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1672 differences and differences between each stage's debugging dumps. */
c4c81601
RK
1673 hash += (((unsigned int) LABEL_REF << 7)
1674 + CODE_LABEL_NUMBER (XEXP (x, 0)));
7506f491
DE
1675 return hash;
1676
1677 case SYMBOL_REF:
1678 {
1679 /* Don't hash on the symbol's address to avoid bootstrap differences.
1680 Different hash values may cause expressions to be recorded in
1681 different orders and thus different registers to be used in the
1682 final assembler. This also avoids differences in the dump files
1683 between various stages. */
1684 unsigned int h = 0;
3cce094d 1685 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
c4c81601 1686
7506f491
DE
1687 while (*p)
1688 h += (h << 7) + *p++; /* ??? revisit */
c4c81601
RK
1689
1690 hash += ((unsigned int) SYMBOL_REF << 7) + h;
7506f491
DE
1691 return hash;
1692 }
1693
1694 case MEM:
1695 if (MEM_VOLATILE_P (x))
1696 {
1697 *do_not_record_p = 1;
1698 return 0;
1699 }
c4c81601
RK
1700
1701 hash += (unsigned int) MEM;
297c3335 1702 hash += MEM_ALIAS_SET (x);
7506f491
DE
1703 x = XEXP (x, 0);
1704 goto repeat;
1705
1706 case PRE_DEC:
1707 case PRE_INC:
1708 case POST_DEC:
1709 case POST_INC:
1710 case PC:
1711 case CC0:
1712 case CALL:
1713 case UNSPEC_VOLATILE:
1714 *do_not_record_p = 1;
1715 return 0;
1716
1717 case ASM_OPERANDS:
1718 if (MEM_VOLATILE_P (x))
1719 {
1720 *do_not_record_p = 1;
1721 return 0;
1722 }
6462bb43
AO
1723 else
1724 {
1725 /* We don't want to take the filename and line into account. */
1726 hash += (unsigned) code + (unsigned) GET_MODE (x)
1727 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1728 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1729 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1730
1731 if (ASM_OPERANDS_INPUT_LENGTH (x))
1732 {
1733 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1734 {
1735 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1736 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1737 do_not_record_p)
1738 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1739 (x, i)));
1740 }
1741
1742 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1743 x = ASM_OPERANDS_INPUT (x, 0);
1744 mode = GET_MODE (x);
1745 goto repeat;
1746 }
1747 return hash;
1748 }
7506f491
DE
1749
1750 default:
1751 break;
1752 }
1753
7506f491 1754 hash += (unsigned) code + (unsigned) GET_MODE (x);
c4c81601 1755 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
1756 {
1757 if (fmt[i] == 'e')
1758 {
7506f491
DE
1759 /* If we are about to do the last recursive call
1760 needed at this level, change it into iteration.
1761 This function is called enough to be worth it. */
1762 if (i == 0)
1763 {
c4c81601 1764 x = XEXP (x, i);
7506f491
DE
1765 goto repeat;
1766 }
c4c81601
RK
1767
1768 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
7506f491
DE
1769 if (*do_not_record_p)
1770 return 0;
1771 }
c4c81601 1772
7506f491
DE
1773 else if (fmt[i] == 'E')
1774 for (j = 0; j < XVECLEN (x, i); j++)
1775 {
1776 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1777 if (*do_not_record_p)
1778 return 0;
1779 }
c4c81601 1780
7506f491 1781 else if (fmt[i] == 's')
6462bb43 1782 hash += hash_string_1 (XSTR (x, i));
7506f491 1783 else if (fmt[i] == 'i')
c4c81601 1784 hash += (unsigned int) XINT (x, i);
7506f491
DE
1785 else
1786 abort ();
1787 }
1788
1789 return hash;
1790}
1791
1792/* Hash a set of register REGNO.
1793
c4c81601
RK
1794 Sets are hashed on the register that is set. This simplifies the PRE copy
1795 propagation code.
7506f491
DE
1796
1797 ??? May need to make things more elaborate. Later, as necessary. */
1798
1799static unsigned int
1800hash_set (regno, hash_table_size)
1801 int regno;
1802 int hash_table_size;
1803{
1804 unsigned int hash;
1805
1806 hash = regno;
1807 return hash % hash_table_size;
1808}
1809
1810/* Return non-zero if exp1 is equivalent to exp2.
1811 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1812
1813static int
1814expr_equiv_p (x, y)
1815 rtx x, y;
1816{
1817 register int i, j;
1818 register enum rtx_code code;
6f7d635c 1819 register const char *fmt;
7506f491
DE
1820
1821 if (x == y)
1822 return 1;
c4c81601 1823
7506f491
DE
1824 if (x == 0 || y == 0)
1825 return x == y;
1826
1827 code = GET_CODE (x);
1828 if (code != GET_CODE (y))
1829 return 0;
1830
1831 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1832 if (GET_MODE (x) != GET_MODE (y))
1833 return 0;
1834
1835 switch (code)
1836 {
1837 case PC:
1838 case CC0:
1839 return x == y;
1840
1841 case CONST_INT:
1842 return INTVAL (x) == INTVAL (y);
1843
1844 case LABEL_REF:
1845 return XEXP (x, 0) == XEXP (y, 0);
1846
1847 case SYMBOL_REF:
1848 return XSTR (x, 0) == XSTR (y, 0);
1849
1850 case REG:
1851 return REGNO (x) == REGNO (y);
1852
297c3335
RH
1853 case MEM:
1854 /* Can't merge two expressions in different alias sets, since we can
1855 decide that the expression is transparent in a block when it isn't,
1856 due to it being set with the different alias set. */
1857 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1858 return 0;
1859 break;
1860
7506f491
DE
1861 /* For commutative operations, check both orders. */
1862 case PLUS:
1863 case MULT:
1864 case AND:
1865 case IOR:
1866 case XOR:
1867 case NE:
1868 case EQ:
1869 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1870 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1871 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1872 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1873
6462bb43
AO
1874 case ASM_OPERANDS:
1875 /* We don't use the generic code below because we want to
1876 disregard filename and line numbers. */
1877
1878 /* A volatile asm isn't equivalent to any other. */
1879 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1880 return 0;
1881
1882 if (GET_MODE (x) != GET_MODE (y)
1883 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1884 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1885 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1886 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1887 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1888 return 0;
1889
1890 if (ASM_OPERANDS_INPUT_LENGTH (x))
1891 {
1892 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1893 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1894 ASM_OPERANDS_INPUT (y, i))
1895 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1896 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1897 return 0;
1898 }
1899
1900 return 1;
1901
7506f491
DE
1902 default:
1903 break;
1904 }
1905
1906 /* Compare the elements. If any pair of corresponding elements
1907 fail to match, return 0 for the whole thing. */
1908
1909 fmt = GET_RTX_FORMAT (code);
1910 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1911 {
1912 switch (fmt[i])
1913 {
1914 case 'e':
1915 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1916 return 0;
1917 break;
1918
1919 case 'E':
1920 if (XVECLEN (x, i) != XVECLEN (y, i))
1921 return 0;
1922 for (j = 0; j < XVECLEN (x, i); j++)
1923 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1924 return 0;
1925 break;
1926
1927 case 's':
1928 if (strcmp (XSTR (x, i), XSTR (y, i)))
1929 return 0;
1930 break;
1931
1932 case 'i':
1933 if (XINT (x, i) != XINT (y, i))
1934 return 0;
1935 break;
1936
1937 case 'w':
1938 if (XWINT (x, i) != XWINT (y, i))
1939 return 0;
1940 break;
1941
1942 case '0':
1943 break;
1944
1945 default:
1946 abort ();
1947 }
1948 }
1949
1950 return 1;
1951}
1952
1953/* Insert expression X in INSN in the hash table.
1954 If it is already present, record it as the last occurrence in INSN's
1955 basic block.
1956
1957 MODE is the mode of the value X is being stored into.
1958 It is only used if X is a CONST_INT.
1959
1960 ANTIC_P is non-zero if X is an anticipatable expression.
1961 AVAIL_P is non-zero if X is an available expression. */
1962
1963static void
1964insert_expr_in_table (x, mode, insn, antic_p, avail_p)
1965 rtx x;
1966 enum machine_mode mode;
1967 rtx insn;
1968 int antic_p, avail_p;
1969{
1970 int found, do_not_record_p;
1971 unsigned int hash;
1972 struct expr *cur_expr, *last_expr = NULL;
1973 struct occr *antic_occr, *avail_occr;
1974 struct occr *last_occr = NULL;
1975
1976 hash = hash_expr (x, mode, &do_not_record_p, expr_hash_table_size);
1977
1978 /* Do not insert expression in table if it contains volatile operands,
1979 or if hash_expr determines the expression is something we don't want
1980 to or can't handle. */
1981 if (do_not_record_p)
1982 return;
1983
1984 cur_expr = expr_hash_table[hash];
1985 found = 0;
1986
c4c81601 1987 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
7506f491
DE
1988 {
1989 /* If the expression isn't found, save a pointer to the end of
1990 the list. */
1991 last_expr = cur_expr;
1992 cur_expr = cur_expr->next_same_hash;
1993 }
1994
1995 if (! found)
1996 {
1997 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
1998 bytes_used += sizeof (struct expr);
1999 if (expr_hash_table[hash] == NULL)
c4c81601
RK
2000 /* This is the first pattern that hashed to this index. */
2001 expr_hash_table[hash] = cur_expr;
7506f491 2002 else
c4c81601
RK
2003 /* Add EXPR to end of this hash chain. */
2004 last_expr->next_same_hash = cur_expr;
2005
7506f491
DE
2006 /* Set the fields of the expr element. */
2007 cur_expr->expr = x;
2008 cur_expr->bitmap_index = n_exprs++;
2009 cur_expr->next_same_hash = NULL;
2010 cur_expr->antic_occr = NULL;
2011 cur_expr->avail_occr = NULL;
2012 }
2013
2014 /* Now record the occurrence(s). */
7506f491
DE
2015 if (antic_p)
2016 {
2017 antic_occr = cur_expr->antic_occr;
2018
2019 /* Search for another occurrence in the same basic block. */
2020 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
2021 {
2022 /* If an occurrence isn't found, save a pointer to the end of
2023 the list. */
2024 last_occr = antic_occr;
2025 antic_occr = antic_occr->next;
2026 }
2027
2028 if (antic_occr)
c4c81601
RK
2029 /* Found another instance of the expression in the same basic block.
2030 Prefer the currently recorded one. We want the first one in the
2031 block and the block is scanned from start to end. */
2032 ; /* nothing to do */
7506f491
DE
2033 else
2034 {
2035 /* First occurrence of this expression in this basic block. */
2036 antic_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2037 bytes_used += sizeof (struct occr);
2038 /* First occurrence of this expression in any block? */
2039 if (cur_expr->antic_occr == NULL)
2040 cur_expr->antic_occr = antic_occr;
2041 else
2042 last_occr->next = antic_occr;
c4c81601 2043
7506f491
DE
2044 antic_occr->insn = insn;
2045 antic_occr->next = NULL;
2046 }
2047 }
2048
2049 if (avail_p)
2050 {
2051 avail_occr = cur_expr->avail_occr;
2052
2053 /* Search for another occurrence in the same basic block. */
2054 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
2055 {
2056 /* If an occurrence isn't found, save a pointer to the end of
2057 the list. */
2058 last_occr = avail_occr;
2059 avail_occr = avail_occr->next;
2060 }
2061
2062 if (avail_occr)
c4c81601
RK
2063 /* Found another instance of the expression in the same basic block.
2064 Prefer this occurrence to the currently recorded one. We want
2065 the last one in the block and the block is scanned from start
2066 to end. */
2067 avail_occr->insn = insn;
7506f491
DE
2068 else
2069 {
2070 /* First occurrence of this expression in this basic block. */
2071 avail_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2072 bytes_used += sizeof (struct occr);
c4c81601 2073
7506f491
DE
2074 /* First occurrence of this expression in any block? */
2075 if (cur_expr->avail_occr == NULL)
2076 cur_expr->avail_occr = avail_occr;
2077 else
2078 last_occr->next = avail_occr;
c4c81601 2079
7506f491
DE
2080 avail_occr->insn = insn;
2081 avail_occr->next = NULL;
2082 }
2083 }
2084}
2085
2086/* Insert pattern X in INSN in the hash table.
2087 X is a SET of a reg to either another reg or a constant.
2088 If it is already present, record it as the last occurrence in INSN's
2089 basic block. */
2090
2091static void
2092insert_set_in_table (x, insn)
2093 rtx x;
2094 rtx insn;
2095{
2096 int found;
2097 unsigned int hash;
2098 struct expr *cur_expr, *last_expr = NULL;
2099 struct occr *cur_occr, *last_occr = NULL;
2100
2101 if (GET_CODE (x) != SET
2102 || GET_CODE (SET_DEST (x)) != REG)
2103 abort ();
2104
2105 hash = hash_set (REGNO (SET_DEST (x)), set_hash_table_size);
2106
2107 cur_expr = set_hash_table[hash];
2108 found = 0;
2109
c4c81601 2110 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
7506f491
DE
2111 {
2112 /* If the expression isn't found, save a pointer to the end of
2113 the list. */
2114 last_expr = cur_expr;
2115 cur_expr = cur_expr->next_same_hash;
2116 }
2117
2118 if (! found)
2119 {
2120 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
2121 bytes_used += sizeof (struct expr);
2122 if (set_hash_table[hash] == NULL)
c4c81601
RK
2123 /* This is the first pattern that hashed to this index. */
2124 set_hash_table[hash] = cur_expr;
7506f491 2125 else
c4c81601
RK
2126 /* Add EXPR to end of this hash chain. */
2127 last_expr->next_same_hash = cur_expr;
2128
7506f491
DE
2129 /* Set the fields of the expr element.
2130 We must copy X because it can be modified when copy propagation is
2131 performed on its operands. */
7506f491
DE
2132 cur_expr->expr = copy_rtx (x);
2133 cur_expr->bitmap_index = n_sets++;
2134 cur_expr->next_same_hash = NULL;
2135 cur_expr->antic_occr = NULL;
2136 cur_expr->avail_occr = NULL;
2137 }
2138
2139 /* Now record the occurrence. */
7506f491
DE
2140 cur_occr = cur_expr->avail_occr;
2141
2142 /* Search for another occurrence in the same basic block. */
2143 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2144 {
2145 /* If an occurrence isn't found, save a pointer to the end of
2146 the list. */
2147 last_occr = cur_occr;
2148 cur_occr = cur_occr->next;
2149 }
2150
2151 if (cur_occr)
c4c81601
RK
2152 /* Found another instance of the expression in the same basic block.
2153 Prefer this occurrence to the currently recorded one. We want the
2154 last one in the block and the block is scanned from start to end. */
2155 cur_occr->insn = insn;
7506f491
DE
2156 else
2157 {
2158 /* First occurrence of this expression in this basic block. */
2159 cur_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2160 bytes_used += sizeof (struct occr);
c4c81601 2161
7506f491
DE
2162 /* First occurrence of this expression in any block? */
2163 if (cur_expr->avail_occr == NULL)
2164 cur_expr->avail_occr = cur_occr;
2165 else
2166 last_occr->next = cur_occr;
c4c81601 2167
7506f491
DE
2168 cur_occr->insn = insn;
2169 cur_occr->next = NULL;
2170 }
2171}
2172
c4c81601
RK
2173/* Scan pattern PAT of INSN and add an entry to the hash table. If SET_P is
2174 non-zero, this is for the assignment hash table, otherwise it is for the
2175 expression hash table. */
7506f491
DE
2176
2177static void
2178hash_scan_set (pat, insn, set_p)
2179 rtx pat, insn;
2180 int set_p;
2181{
2182 rtx src = SET_SRC (pat);
2183 rtx dest = SET_DEST (pat);
172890a2 2184 rtx note;
7506f491
DE
2185
2186 if (GET_CODE (src) == CALL)
2187 hash_scan_call (src, insn);
2188
172890a2 2189 else if (GET_CODE (dest) == REG)
7506f491 2190 {
172890a2 2191 unsigned int regno = REGNO (dest);
7506f491
DE
2192 rtx tmp;
2193
172890a2
RK
2194 /* If this is a single set and we are doing constant propagation,
2195 see if a REG_NOTE shows this equivalent to a constant. */
2196 if (set_p && (note = find_reg_equal_equiv_note (insn)) != 0
2197 && CONSTANT_P (XEXP (note, 0)))
2198 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2199
7506f491
DE
2200 /* Only record sets of pseudo-regs in the hash table. */
2201 if (! set_p
2202 && regno >= FIRST_PSEUDO_REGISTER
2203 /* Don't GCSE something if we can't do a reg/reg copy. */
2204 && can_copy_p [GET_MODE (dest)]
2205 /* Is SET_SRC something we want to gcse? */
172890a2
RK
2206 && want_to_gcse_p (src)
2207 /* Don't CSE a nop. */
43e72072
JJ
2208 && ! set_noop_p (pat)
2209 /* Don't GCSE if it has attached REG_EQUIV note.
2210 At this point this only function parameters should have
2211 REG_EQUIV notes and if the argument slot is used somewhere
2212 explicitely, it means address of parameter has been taken,
2213 so we should not extend the lifetime of the pseudo. */
2214 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2215 || GET_CODE (XEXP (note, 0)) != MEM))
7506f491
DE
2216 {
2217 /* An expression is not anticipatable if its operands are
52d76e11
RK
2218 modified before this insn or if this is not the only SET in
2219 this insn. */
2220 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
7506f491
DE
2221 /* An expression is not available if its operands are
2222 subsequently modified, including this insn. */
2223 int avail_p = oprs_available_p (src, insn);
c4c81601 2224
7506f491
DE
2225 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p);
2226 }
c4c81601 2227
7506f491
DE
2228 /* Record sets for constant/copy propagation. */
2229 else if (set_p
2230 && regno >= FIRST_PSEUDO_REGISTER
2231 && ((GET_CODE (src) == REG
2232 && REGNO (src) >= FIRST_PSEUDO_REGISTER
172890a2
RK
2233 && can_copy_p [GET_MODE (dest)]
2234 && REGNO (src) != regno)
e78d9500 2235 || GET_CODE (src) == CONST_INT
05f6f07c 2236 || GET_CODE (src) == SYMBOL_REF
e78d9500 2237 || GET_CODE (src) == CONST_DOUBLE)
7506f491
DE
2238 /* A copy is not available if its src or dest is subsequently
2239 modified. Here we want to search from INSN+1 on, but
2240 oprs_available_p searches from INSN on. */
2241 && (insn == BLOCK_END (BLOCK_NUM (insn))
2242 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2243 && oprs_available_p (pat, tmp))))
2244 insert_set_in_table (pat, insn);
2245 }
7506f491
DE
2246}
2247
2248static void
2249hash_scan_clobber (x, insn)
50b2596f 2250 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
7506f491
DE
2251{
2252 /* Currently nothing to do. */
2253}
2254
2255static void
2256hash_scan_call (x, insn)
50b2596f 2257 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
7506f491
DE
2258{
2259 /* Currently nothing to do. */
2260}
2261
2262/* Process INSN and add hash table entries as appropriate.
2263
2264 Only available expressions that set a single pseudo-reg are recorded.
2265
2266 Single sets in a PARALLEL could be handled, but it's an extra complication
2267 that isn't dealt with right now. The trick is handling the CLOBBERs that
2268 are also in the PARALLEL. Later.
2269
2270 If SET_P is non-zero, this is for the assignment hash table,
ed79bb3d
R
2271 otherwise it is for the expression hash table.
2272 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2273 not record any expressions. */
7506f491
DE
2274
2275static void
ed79bb3d 2276hash_scan_insn (insn, set_p, in_libcall_block)
7506f491
DE
2277 rtx insn;
2278 int set_p;
48e87cef 2279 int in_libcall_block;
7506f491
DE
2280{
2281 rtx pat = PATTERN (insn);
c4c81601 2282 int i;
7506f491 2283
172890a2
RK
2284 if (in_libcall_block)
2285 return;
2286
7506f491
DE
2287 /* Pick out the sets of INSN and for other forms of instructions record
2288 what's been modified. */
2289
172890a2
RK
2290 if (GET_CODE (pat) == SET)
2291 hash_scan_set (pat, insn, set_p);
7506f491 2292 else if (GET_CODE (pat) == PARALLEL)
c4c81601
RK
2293 for (i = 0; i < XVECLEN (pat, 0); i++)
2294 {
2295 rtx x = XVECEXP (pat, 0, i);
7506f491 2296
c4c81601 2297 if (GET_CODE (x) == SET)
172890a2 2298 hash_scan_set (x, insn, set_p);
c4c81601
RK
2299 else if (GET_CODE (x) == CLOBBER)
2300 hash_scan_clobber (x, insn);
2301 else if (GET_CODE (x) == CALL)
2302 hash_scan_call (x, insn);
2303 }
7506f491 2304
7506f491
DE
2305 else if (GET_CODE (pat) == CLOBBER)
2306 hash_scan_clobber (pat, insn);
2307 else if (GET_CODE (pat) == CALL)
2308 hash_scan_call (pat, insn);
2309}
2310
2311static void
2312dump_hash_table (file, name, table, table_size, total_size)
2313 FILE *file;
dff01034 2314 const char *name;
7506f491
DE
2315 struct expr **table;
2316 int table_size, total_size;
2317{
2318 int i;
2319 /* Flattened out table, so it's printed in proper order. */
4da896b2
MM
2320 struct expr **flat_table;
2321 unsigned int *hash_val;
c4c81601 2322 struct expr *expr;
4da896b2
MM
2323
2324 flat_table
2325 = (struct expr **) xcalloc (total_size, sizeof (struct expr *));
2326 hash_val = (unsigned int *) xmalloc (total_size * sizeof (unsigned int));
7506f491 2327
7506f491 2328 for (i = 0; i < table_size; i++)
c4c81601
RK
2329 for (expr = table[i]; expr != NULL; expr = expr->next_same_hash)
2330 {
2331 flat_table[expr->bitmap_index] = expr;
2332 hash_val[expr->bitmap_index] = i;
2333 }
7506f491
DE
2334
2335 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
2336 name, table_size, total_size);
2337
2338 for (i = 0; i < total_size; i++)
21318741
RK
2339 if (flat_table[i] != 0)
2340 {
a0ac9e5a 2341 expr = flat_table[i];
21318741
RK
2342 fprintf (file, "Index %d (hash value %d)\n ",
2343 expr->bitmap_index, hash_val[i]);
a0ac9e5a 2344 print_rtl (file, expr->expr);
21318741
RK
2345 fprintf (file, "\n");
2346 }
7506f491
DE
2347
2348 fprintf (file, "\n");
4da896b2 2349
4da896b2
MM
2350 free (flat_table);
2351 free (hash_val);
7506f491
DE
2352}
2353
2354/* Record register first/last/block set information for REGNO in INSN.
c4c81601 2355
7506f491
DE
2356 reg_first_set records the first place in the block where the register
2357 is set and is used to compute "anticipatability".
c4c81601 2358
7506f491
DE
2359 reg_last_set records the last place in the block where the register
2360 is set and is used to compute "availability".
c4c81601 2361
7506f491
DE
2362 reg_set_in_block records whether the register is set in the block
2363 and is used to compute "transparency". */
2364
2365static void
2366record_last_reg_set_info (insn, regno)
2367 rtx insn;
2368 int regno;
2369{
b86ba9c8 2370 if (reg_first_set[regno] == NEVER_SET)
7506f491 2371 reg_first_set[regno] = INSN_CUID (insn);
c4c81601 2372
7506f491
DE
2373 reg_last_set[regno] = INSN_CUID (insn);
2374 SET_BIT (reg_set_in_block[BLOCK_NUM (insn)], regno);
2375}
2376
a13d4ebf
AM
2377
2378/* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2379 Note we store a pair of elements in the list, so they have to be
2380 taken off pairwise. */
2381
2382static void
2383canon_list_insert (dest, unused1, v_insn)
2384 rtx dest ATTRIBUTE_UNUSED;
2385 rtx unused1 ATTRIBUTE_UNUSED;
2386 void * v_insn;
2387{
2388 rtx dest_addr, insn;
2389
2390 while (GET_CODE (dest) == SUBREG
2391 || GET_CODE (dest) == ZERO_EXTRACT
2392 || GET_CODE (dest) == SIGN_EXTRACT
2393 || GET_CODE (dest) == STRICT_LOW_PART)
2394 dest = XEXP (dest, 0);
2395
2396 /* If DEST is not a MEM, then it will not conflict with a load. Note
2397 that function calls are assumed to clobber memory, but are handled
2398 elsewhere. */
2399
2400 if (GET_CODE (dest) != MEM)
2401 return;
2402
2403 dest_addr = get_addr (XEXP (dest, 0));
2404 dest_addr = canon_rtx (dest_addr);
2405 insn = (rtx) v_insn;
2406
2407 canon_modify_mem_list[BLOCK_NUM (insn)] =
2408 alloc_INSN_LIST (dest_addr, canon_modify_mem_list[BLOCK_NUM (insn)]);
2409 canon_modify_mem_list[BLOCK_NUM (insn)] =
2410 alloc_INSN_LIST (dest, canon_modify_mem_list[BLOCK_NUM (insn)]);
2411}
2412
7506f491 2413/* Record memory first/last/block set information for INSN. */
a13d4ebf
AM
2414/* Record memory modification information for INSN. We do not actually care
2415 about the memory location(s) that are set, or even how they are set (consider
2416 a CALL_INSN). We merely need to record which insns modify memory. */
7506f491
DE
2417
2418static void
2419record_last_mem_set_info (insn)
2420 rtx insn;
2421{
b86ba9c8 2422 if (mem_first_set == NEVER_SET)
7506f491 2423 mem_first_set = INSN_CUID (insn);
c4c81601 2424
7506f491
DE
2425 mem_last_set = INSN_CUID (insn);
2426 mem_set_in_block[BLOCK_NUM (insn)] = 1;
a13d4ebf
AM
2427 modify_mem_list[BLOCK_NUM (insn)] =
2428 alloc_INSN_LIST (insn, modify_mem_list[BLOCK_NUM (insn)]);
2429
2430 if (GET_CODE (insn) == CALL_INSN)
2431 {
2432 /* Note that traversals of this loop (other than for free-ing)
2433 will break after encountering a CALL_INSN. So, there's no
2434 need to insert a pair of items, as canon_list_insert does. */
2435 canon_modify_mem_list[BLOCK_NUM (insn)] =
2436 alloc_INSN_LIST (insn, canon_modify_mem_list[BLOCK_NUM (insn)]);
2437 }
2438 else
2439 note_stores (PATTERN (insn), canon_list_insert, (void*)insn );
7506f491
DE
2440}
2441
7506f491 2442/* Called from compute_hash_table via note_stores to handle one
84832317
MM
2443 SET or CLOBBER in an insn. DATA is really the instruction in which
2444 the SET is taking place. */
7506f491
DE
2445
2446static void
84832317 2447record_last_set_info (dest, setter, data)
50b2596f 2448 rtx dest, setter ATTRIBUTE_UNUSED;
84832317 2449 void *data;
7506f491 2450{
84832317
MM
2451 rtx last_set_insn = (rtx) data;
2452
7506f491
DE
2453 if (GET_CODE (dest) == SUBREG)
2454 dest = SUBREG_REG (dest);
2455
2456 if (GET_CODE (dest) == REG)
2457 record_last_reg_set_info (last_set_insn, REGNO (dest));
2458 else if (GET_CODE (dest) == MEM
2459 /* Ignore pushes, they clobber nothing. */
2460 && ! push_operand (dest, GET_MODE (dest)))
2461 record_last_mem_set_info (last_set_insn);
2462}
2463
2464/* Top level function to create an expression or assignment hash table.
2465
2466 Expression entries are placed in the hash table if
2467 - they are of the form (set (pseudo-reg) src),
2468 - src is something we want to perform GCSE on,
2469 - none of the operands are subsequently modified in the block
2470
2471 Assignment entries are placed in the hash table if
2472 - they are of the form (set (pseudo-reg) src),
2473 - src is something we want to perform const/copy propagation on,
2474 - none of the operands or target are subsequently modified in the block
c4c81601 2475
7506f491
DE
2476 Currently src must be a pseudo-reg or a const_int.
2477
2478 F is the first insn.
2479 SET_P is non-zero for computing the assignment hash table. */
2480
2481static void
b5ce41ff 2482compute_hash_table (set_p)
7506f491
DE
2483 int set_p;
2484{
2485 int bb;
2486
2487 /* While we compute the hash table we also compute a bit array of which
2488 registers are set in which blocks.
2489 We also compute which blocks set memory, in the absence of aliasing
2490 support [which is TODO].
2491 ??? This isn't needed during const/copy propagation, but it's cheap to
2492 compute. Later. */
2493 sbitmap_vector_zero (reg_set_in_block, n_basic_blocks);
961192e1 2494 memset ((char *) mem_set_in_block, 0, n_basic_blocks);
7506f491 2495
a13d4ebf
AM
2496 /* re-Cache any INSN_LIST nodes we have allocated. */
2497 {
2498 int i;
2499 for (i = 0; i < n_basic_blocks; i++)
2500 {
2501 if (modify_mem_list[i])
2502 free_INSN_LIST_list (modify_mem_list + i);
2503 if (canon_modify_mem_list[i])
2504 free_INSN_LIST_list (canon_modify_mem_list + i);
2505 }
2506 }
7506f491
DE
2507 /* Some working arrays used to track first and last set in each block. */
2508 /* ??? One could use alloca here, but at some size a threshold is crossed
2509 beyond which one should use malloc. Are we at that threshold here? */
2510 reg_first_set = (int *) gmalloc (max_gcse_regno * sizeof (int));
2511 reg_last_set = (int *) gmalloc (max_gcse_regno * sizeof (int));
2512
2513 for (bb = 0; bb < n_basic_blocks; bb++)
2514 {
2515 rtx insn;
770ae6cc 2516 unsigned int regno;
ed79bb3d 2517 int in_libcall_block;
770ae6cc 2518 unsigned int i;
7506f491
DE
2519
2520 /* First pass over the instructions records information used to
2521 determine when registers and memory are first and last set.
2522 ??? The mem_set_in_block and hard-reg reg_set_in_block computation
2523 could be moved to compute_sets since they currently don't change. */
2524
b86ba9c8
GK
2525 for (i = 0; i < max_gcse_regno; i++)
2526 reg_first_set[i] = reg_last_set[i] = NEVER_SET;
770ae6cc 2527
b86ba9c8
GK
2528 mem_first_set = NEVER_SET;
2529 mem_last_set = NEVER_SET;
7506f491 2530
3b413743
RH
2531 for (insn = BLOCK_HEAD (bb);
2532 insn && insn != NEXT_INSN (BLOCK_END (bb));
7506f491
DE
2533 insn = NEXT_INSN (insn))
2534 {
2535#ifdef NON_SAVING_SETJMP
2536 if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE
2537 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
2538 {
2539 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2540 record_last_reg_set_info (insn, regno);
2541 continue;
2542 }
2543#endif
2544
2c3c49de 2545 if (! INSN_P (insn))
7506f491
DE
2546 continue;
2547
2548 if (GET_CODE (insn) == CALL_INSN)
2549 {
2550 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
15f8470f
JL
2551 if ((call_used_regs[regno]
2552 && regno != STACK_POINTER_REGNUM
2553#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2554 && regno != HARD_FRAME_POINTER_REGNUM
2555#endif
2556#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2557 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2558#endif
848e0190 2559#if !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
15f8470f
JL
2560 && ! (regno == PIC_OFFSET_TABLE_REGNUM && flag_pic)
2561#endif
2562
2563 && regno != FRAME_POINTER_REGNUM)
2564 || global_regs[regno])
7506f491 2565 record_last_reg_set_info (insn, regno);
c4c81601 2566
7506f491
DE
2567 if (! CONST_CALL_P (insn))
2568 record_last_mem_set_info (insn);
2569 }
2570
84832317 2571 note_stores (PATTERN (insn), record_last_set_info, insn);
7506f491
DE
2572 }
2573
2574 /* The next pass builds the hash table. */
2575
3b413743
RH
2576 for (insn = BLOCK_HEAD (bb), in_libcall_block = 0;
2577 insn && insn != NEXT_INSN (BLOCK_END (bb));
7506f491 2578 insn = NEXT_INSN (insn))
2c3c49de 2579 if (INSN_P (insn))
c4c81601
RK
2580 {
2581 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2582 in_libcall_block = 1;
2583 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
2584 in_libcall_block = 0;
2585 hash_scan_insn (insn, set_p, in_libcall_block);
7506f491
DE
2586 }
2587 }
2588
2589 free (reg_first_set);
2590 free (reg_last_set);
c4c81601 2591
7506f491
DE
2592 /* Catch bugs early. */
2593 reg_first_set = reg_last_set = 0;
2594}
2595
2596/* Allocate space for the set hash table.
2597 N_INSNS is the number of instructions in the function.
2598 It is used to determine the number of buckets to use. */
2599
2600static void
2601alloc_set_hash_table (n_insns)
2602 int n_insns;
2603{
2604 int n;
2605
2606 set_hash_table_size = n_insns / 4;
2607 if (set_hash_table_size < 11)
2608 set_hash_table_size = 11;
c4c81601 2609
7506f491
DE
2610 /* Attempt to maintain efficient use of hash table.
2611 Making it an odd number is simplest for now.
2612 ??? Later take some measurements. */
2613 set_hash_table_size |= 1;
2614 n = set_hash_table_size * sizeof (struct expr *);
2615 set_hash_table = (struct expr **) gmalloc (n);
2616}
2617
2618/* Free things allocated by alloc_set_hash_table. */
2619
2620static void
2621free_set_hash_table ()
2622{
2623 free (set_hash_table);
2624}
2625
2626/* Compute the hash table for doing copy/const propagation. */
2627
2628static void
b5ce41ff 2629compute_set_hash_table ()
7506f491
DE
2630{
2631 /* Initialize count of number of entries in hash table. */
2632 n_sets = 0;
961192e1 2633 memset ((char *) set_hash_table, 0,
c4c81601 2634 set_hash_table_size * sizeof (struct expr *));
7506f491 2635
b5ce41ff 2636 compute_hash_table (1);
7506f491
DE
2637}
2638
2639/* Allocate space for the expression hash table.
2640 N_INSNS is the number of instructions in the function.
2641 It is used to determine the number of buckets to use. */
2642
2643static void
2644alloc_expr_hash_table (n_insns)
2e653e39 2645 unsigned int n_insns;
7506f491
DE
2646{
2647 int n;
2648
2649 expr_hash_table_size = n_insns / 2;
2650 /* Make sure the amount is usable. */
2651 if (expr_hash_table_size < 11)
2652 expr_hash_table_size = 11;
c4c81601 2653
7506f491
DE
2654 /* Attempt to maintain efficient use of hash table.
2655 Making it an odd number is simplest for now.
2656 ??? Later take some measurements. */
2657 expr_hash_table_size |= 1;
2658 n = expr_hash_table_size * sizeof (struct expr *);
2659 expr_hash_table = (struct expr **) gmalloc (n);
2660}
2661
2662/* Free things allocated by alloc_expr_hash_table. */
2663
2664static void
2665free_expr_hash_table ()
2666{
2667 free (expr_hash_table);
2668}
2669
2670/* Compute the hash table for doing GCSE. */
2671
2672static void
b5ce41ff 2673compute_expr_hash_table ()
7506f491
DE
2674{
2675 /* Initialize count of number of entries in hash table. */
2676 n_exprs = 0;
961192e1 2677 memset ((char *) expr_hash_table, 0,
c4c81601 2678 expr_hash_table_size * sizeof (struct expr *));
7506f491 2679
b5ce41ff 2680 compute_hash_table (0);
7506f491
DE
2681}
2682\f
2683/* Expression tracking support. */
2684
2685/* Lookup pattern PAT in the expression table.
2686 The result is a pointer to the table entry, or NULL if not found. */
2687
2688static struct expr *
2689lookup_expr (pat)
2690 rtx pat;
2691{
2692 int do_not_record_p;
2693 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
2694 expr_hash_table_size);
2695 struct expr *expr;
2696
2697 if (do_not_record_p)
2698 return NULL;
2699
2700 expr = expr_hash_table[hash];
2701
2702 while (expr && ! expr_equiv_p (expr->expr, pat))
2703 expr = expr->next_same_hash;
2704
2705 return expr;
2706}
2707
c4c81601
RK
2708/* Lookup REGNO in the set table. If PAT is non-NULL look for the entry that
2709 matches it, otherwise return the first entry for REGNO. The result is a
2710 pointer to the table entry, or NULL if not found. */
7506f491
DE
2711
2712static struct expr *
2713lookup_set (regno, pat)
770ae6cc 2714 unsigned int regno;
7506f491
DE
2715 rtx pat;
2716{
2717 unsigned int hash = hash_set (regno, set_hash_table_size);
2718 struct expr *expr;
2719
2720 expr = set_hash_table[hash];
2721
2722 if (pat)
2723 {
2724 while (expr && ! expr_equiv_p (expr->expr, pat))
2725 expr = expr->next_same_hash;
2726 }
2727 else
2728 {
2729 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2730 expr = expr->next_same_hash;
2731 }
2732
2733 return expr;
2734}
2735
2736/* Return the next entry for REGNO in list EXPR. */
2737
2738static struct expr *
2739next_set (regno, expr)
770ae6cc 2740 unsigned int regno;
7506f491
DE
2741 struct expr *expr;
2742{
2743 do
2744 expr = expr->next_same_hash;
2745 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
c4c81601 2746
7506f491
DE
2747 return expr;
2748}
2749
2750/* Reset tables used to keep track of what's still available [since the
2751 start of the block]. */
2752
2753static void
2754reset_opr_set_tables ()
2755{
2756 /* Maintain a bitmap of which regs have been set since beginning of
2757 the block. */
2758 sbitmap_zero (reg_set_bitmap);
c4c81601 2759
7506f491
DE
2760 /* Also keep a record of the last instruction to modify memory.
2761 For now this is very trivial, we only record whether any memory
2762 location has been modified. */
2763 mem_last_set = 0;
a13d4ebf
AM
2764 {
2765 int i;
2766
2767 /* re-Cache any INSN_LIST nodes we have allocated. */
2768 for (i = 0; i < n_basic_blocks; i++)
2769 {
2770 if (modify_mem_list[i])
2771 free_INSN_LIST_list (modify_mem_list + i);
2772 if (canon_modify_mem_list[i])
2773 free_INSN_LIST_list (canon_modify_mem_list + i);
2774 }
2775 }
7506f491
DE
2776}
2777
2778/* Return non-zero if the operands of X are not set before INSN in
2779 INSN's basic block. */
2780
2781static int
2782oprs_not_set_p (x, insn)
2783 rtx x, insn;
2784{
c4c81601 2785 int i, j;
7506f491 2786 enum rtx_code code;
6f7d635c 2787 const char *fmt;
7506f491 2788
7506f491
DE
2789 if (x == 0)
2790 return 1;
2791
2792 code = GET_CODE (x);
2793 switch (code)
2794 {
2795 case PC:
2796 case CC0:
2797 case CONST:
2798 case CONST_INT:
2799 case CONST_DOUBLE:
2800 case SYMBOL_REF:
2801 case LABEL_REF:
2802 case ADDR_VEC:
2803 case ADDR_DIFF_VEC:
2804 return 1;
2805
2806 case MEM:
e2d2ed72
AM
2807 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2808 INSN_CUID (insn), x, 0))
a13d4ebf 2809 return 0;
7506f491
DE
2810 if (mem_last_set != 0)
2811 return 0;
c4c81601
RK
2812 else
2813 return oprs_not_set_p (XEXP (x, 0), insn);
7506f491
DE
2814
2815 case REG:
2816 return ! TEST_BIT (reg_set_bitmap, REGNO (x));
2817
2818 default:
2819 break;
2820 }
2821
c4c81601 2822 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
2823 {
2824 if (fmt[i] == 'e')
2825 {
7506f491
DE
2826 /* If we are about to do the last recursive call
2827 needed at this level, change it into iteration.
2828 This function is called enough to be worth it. */
2829 if (i == 0)
c4c81601
RK
2830 return oprs_not_set_p (XEXP (x, i), insn);
2831
2832 if (! oprs_not_set_p (XEXP (x, i), insn))
7506f491
DE
2833 return 0;
2834 }
2835 else if (fmt[i] == 'E')
c4c81601
RK
2836 for (j = 0; j < XVECLEN (x, i); j++)
2837 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2838 return 0;
7506f491
DE
2839 }
2840
2841 return 1;
2842}
2843
2844/* Mark things set by a CALL. */
2845
2846static void
b5ce41ff
JL
2847mark_call (insn)
2848 rtx insn;
7506f491
DE
2849{
2850 mem_last_set = INSN_CUID (insn);
a13d4ebf
AM
2851 if (! CONST_CALL_P (insn))
2852 record_last_mem_set_info (insn);
7506f491
DE
2853}
2854
2855/* Mark things set by a SET. */
2856
2857static void
2858mark_set (pat, insn)
2859 rtx pat, insn;
2860{
2861 rtx dest = SET_DEST (pat);
2862
2863 while (GET_CODE (dest) == SUBREG
2864 || GET_CODE (dest) == ZERO_EXTRACT
2865 || GET_CODE (dest) == SIGN_EXTRACT
2866 || GET_CODE (dest) == STRICT_LOW_PART)
2867 dest = XEXP (dest, 0);
2868
a13d4ebf
AM
2869 if (GET_CODE (dest) == REG)
2870 SET_BIT (reg_set_bitmap, REGNO (dest));
2871 else if (GET_CODE (dest) == MEM)
2872 record_last_mem_set_info (insn);
2873
7506f491
DE
2874 if (GET_CODE (dest) == REG)
2875 SET_BIT (reg_set_bitmap, REGNO (dest));
2876 else if (GET_CODE (dest) == MEM)
2877 mem_last_set = INSN_CUID (insn);
2878
2879 if (GET_CODE (SET_SRC (pat)) == CALL)
b5ce41ff 2880 mark_call (insn);
7506f491
DE
2881}
2882
2883/* Record things set by a CLOBBER. */
2884
2885static void
2886mark_clobber (pat, insn)
2887 rtx pat, insn;
2888{
2889 rtx clob = XEXP (pat, 0);
2890
2891 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2892 clob = XEXP (clob, 0);
2893
2894 if (GET_CODE (clob) == REG)
2895 SET_BIT (reg_set_bitmap, REGNO (clob));
2896 else
2897 mem_last_set = INSN_CUID (insn);
a13d4ebf
AM
2898 if (GET_CODE (clob) == REG)
2899 SET_BIT (reg_set_bitmap, REGNO (clob));
2900 else
2901 record_last_mem_set_info (insn);
7506f491
DE
2902}
2903
2904/* Record things set by INSN.
2905 This data is used by oprs_not_set_p. */
2906
2907static void
2908mark_oprs_set (insn)
2909 rtx insn;
2910{
2911 rtx pat = PATTERN (insn);
c4c81601 2912 int i;
7506f491
DE
2913
2914 if (GET_CODE (pat) == SET)
2915 mark_set (pat, insn);
2916 else if (GET_CODE (pat) == PARALLEL)
c4c81601
RK
2917 for (i = 0; i < XVECLEN (pat, 0); i++)
2918 {
2919 rtx x = XVECEXP (pat, 0, i);
2920
2921 if (GET_CODE (x) == SET)
2922 mark_set (x, insn);
2923 else if (GET_CODE (x) == CLOBBER)
2924 mark_clobber (x, insn);
2925 else if (GET_CODE (x) == CALL)
2926 mark_call (insn);
2927 }
7506f491 2928
7506f491
DE
2929 else if (GET_CODE (pat) == CLOBBER)
2930 mark_clobber (pat, insn);
2931 else if (GET_CODE (pat) == CALL)
b5ce41ff 2932 mark_call (insn);
7506f491 2933}
b5ce41ff 2934
7506f491
DE
2935\f
2936/* Classic GCSE reaching definition support. */
2937
2938/* Allocate reaching def variables. */
2939
2940static void
2941alloc_rd_mem (n_blocks, n_insns)
2942 int n_blocks, n_insns;
2943{
2944 rd_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2945 sbitmap_vector_zero (rd_kill, n_basic_blocks);
2946
2947 rd_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2948 sbitmap_vector_zero (rd_gen, n_basic_blocks);
2949
2950 reaching_defs = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2951 sbitmap_vector_zero (reaching_defs, n_basic_blocks);
2952
2953 rd_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2954 sbitmap_vector_zero (rd_out, n_basic_blocks);
2955}
2956
2957/* Free reaching def variables. */
2958
2959static void
2960free_rd_mem ()
2961{
5a660bff
DB
2962 sbitmap_vector_free (rd_kill);
2963 sbitmap_vector_free (rd_gen);
2964 sbitmap_vector_free (reaching_defs);
2965 sbitmap_vector_free (rd_out);
7506f491
DE
2966}
2967
c4c81601 2968/* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
7506f491
DE
2969
2970static void
2971handle_rd_kill_set (insn, regno, bb)
2972 rtx insn;
e2d2ed72
AM
2973 int regno;
2974 basic_block bb;
7506f491 2975{
c4c81601 2976 struct reg_set *this_reg;
7506f491 2977
c4c81601
RK
2978 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2979 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
e2d2ed72 2980 SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn));
7506f491
DE
2981}
2982
7506f491
DE
2983/* Compute the set of kill's for reaching definitions. */
2984
2985static void
2986compute_kill_rd ()
2987{
c4c81601 2988 int bb, cuid;
172890a2
RK
2989 unsigned int regno;
2990 int i;
7506f491
DE
2991
2992 /* For each block
2993 For each set bit in `gen' of the block (i.e each insn which
ac7c5af5
JL
2994 generates a definition in the block)
2995 Call the reg set by the insn corresponding to that bit regx
2996 Look at the linked list starting at reg_set_table[regx]
2997 For each setting of regx in the linked list, which is not in
2998 this block
c4c81601 2999 Set the bit in `kill' corresponding to that insn. */
7506f491 3000 for (bb = 0; bb < n_basic_blocks; bb++)
c4c81601
RK
3001 for (cuid = 0; cuid < max_cuid; cuid++)
3002 if (TEST_BIT (rd_gen[bb], cuid))
7506f491 3003 {
c4c81601
RK
3004 rtx insn = CUID_INSN (cuid);
3005 rtx pat = PATTERN (insn);
7506f491 3006
c4c81601
RK
3007 if (GET_CODE (insn) == CALL_INSN)
3008 {
3009 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
ac7c5af5 3010 {
c4c81601
RK
3011 if ((call_used_regs[regno]
3012 && regno != STACK_POINTER_REGNUM
15f8470f 3013#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
c4c81601 3014 && regno != HARD_FRAME_POINTER_REGNUM
15f8470f
JL
3015#endif
3016#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
c4c81601
RK
3017 && ! (regno == ARG_POINTER_REGNUM
3018 && fixed_regs[regno])
15f8470f 3019#endif
848e0190 3020#if !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
c4c81601 3021 && ! (regno == PIC_OFFSET_TABLE_REGNUM && flag_pic)
15f8470f 3022#endif
c4c81601
RK
3023 && regno != FRAME_POINTER_REGNUM)
3024 || global_regs[regno])
e2d2ed72 3025 handle_rd_kill_set (insn, regno, BASIC_BLOCK (bb));
ac7c5af5 3026 }
c4c81601 3027 }
7506f491 3028
c4c81601
RK
3029 if (GET_CODE (pat) == PARALLEL)
3030 {
3031 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7506f491 3032 {
c4c81601 3033 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
7506f491 3034
c4c81601
RK
3035 if ((code == SET || code == CLOBBER)
3036 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
3037 handle_rd_kill_set (insn,
3038 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
e2d2ed72 3039 BASIC_BLOCK (bb));
ac7c5af5 3040 }
ac7c5af5 3041 }
c4c81601
RK
3042 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
3043 /* Each setting of this register outside of this block
3044 must be marked in the set of kills in this block. */
e2d2ed72 3045 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), BASIC_BLOCK (bb));
7506f491 3046 }
7506f491
DE
3047}
3048
3049/* Compute the reaching definitions as in
3050 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
3051 Chapter 10. It is the same algorithm as used for computing available
3052 expressions but applied to the gens and kills of reaching definitions. */
3053
3054static void
3055compute_rd ()
3056{
3057 int bb, changed, passes;
3058
3059 for (bb = 0; bb < n_basic_blocks; bb++)
3060 sbitmap_copy (rd_out[bb] /*dst*/, rd_gen[bb] /*src*/);
3061
3062 passes = 0;
3063 changed = 1;
3064 while (changed)
3065 {
3066 changed = 0;
3067 for (bb = 0; bb < n_basic_blocks; bb++)
ac7c5af5 3068 {
36349f8b 3069 sbitmap_union_of_preds (reaching_defs[bb], rd_out, bb);
7506f491
DE
3070 changed |= sbitmap_union_of_diff (rd_out[bb], rd_gen[bb],
3071 reaching_defs[bb], rd_kill[bb]);
ac7c5af5 3072 }
7506f491
DE
3073 passes++;
3074 }
3075
3076 if (gcse_file)
3077 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
3078}
3079\f
3080/* Classic GCSE available expression support. */
3081
3082/* Allocate memory for available expression computation. */
3083
3084static void
3085alloc_avail_expr_mem (n_blocks, n_exprs)
3086 int n_blocks, n_exprs;
3087{
3088 ae_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3089 sbitmap_vector_zero (ae_kill, n_basic_blocks);
3090
3091 ae_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3092 sbitmap_vector_zero (ae_gen, n_basic_blocks);
3093
3094 ae_in = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3095 sbitmap_vector_zero (ae_in, n_basic_blocks);
3096
3097 ae_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3098 sbitmap_vector_zero (ae_out, n_basic_blocks);
7506f491
DE
3099}
3100
3101static void
3102free_avail_expr_mem ()
3103{
5a660bff
DB
3104 sbitmap_vector_free (ae_kill);
3105 sbitmap_vector_free (ae_gen);
3106 sbitmap_vector_free (ae_in);
3107 sbitmap_vector_free (ae_out);
7506f491
DE
3108}
3109
3110/* Compute the set of available expressions generated in each basic block. */
3111
3112static void
3113compute_ae_gen ()
3114{
2e653e39 3115 unsigned int i;
c4c81601
RK
3116 struct expr *expr;
3117 struct occr *occr;
7506f491
DE
3118
3119 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
3120 This is all we have to do because an expression is not recorded if it
3121 is not available, and the only expressions we want to work with are the
3122 ones that are recorded. */
7506f491 3123 for (i = 0; i < expr_hash_table_size; i++)
c4c81601
RK
3124 for (expr = expr_hash_table[i]; expr != 0; expr = expr->next_same_hash)
3125 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
3126 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
7506f491
DE
3127}
3128
3129/* Return non-zero if expression X is killed in BB. */
3130
3131static int
3132expr_killed_p (x, bb)
3133 rtx x;
e2d2ed72 3134 basic_block bb;
7506f491 3135{
c4c81601 3136 int i, j;
7506f491 3137 enum rtx_code code;
6f7d635c 3138 const char *fmt;
7506f491 3139
7506f491
DE
3140 if (x == 0)
3141 return 1;
3142
3143 code = GET_CODE (x);
3144 switch (code)
3145 {
3146 case REG:
e2d2ed72 3147 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
7506f491
DE
3148
3149 case MEM:
a13d4ebf
AM
3150 if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0))
3151 return 1;
e2d2ed72 3152 if (mem_set_in_block[bb->index])
7506f491 3153 return 1;
c4c81601
RK
3154 else
3155 return expr_killed_p (XEXP (x, 0), bb);
7506f491
DE
3156
3157 case PC:
3158 case CC0: /*FIXME*/
3159 case CONST:
3160 case CONST_INT:
3161 case CONST_DOUBLE:
3162 case SYMBOL_REF:
3163 case LABEL_REF:
3164 case ADDR_VEC:
3165 case ADDR_DIFF_VEC:
3166 return 0;
3167
3168 default:
3169 break;
3170 }
3171
c4c81601 3172 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3173 {
3174 if (fmt[i] == 'e')
3175 {
7506f491
DE
3176 /* If we are about to do the last recursive call
3177 needed at this level, change it into iteration.
3178 This function is called enough to be worth it. */
3179 if (i == 0)
c4c81601
RK
3180 return expr_killed_p (XEXP (x, i), bb);
3181 else if (expr_killed_p (XEXP (x, i), bb))
7506f491
DE
3182 return 1;
3183 }
3184 else if (fmt[i] == 'E')
c4c81601
RK
3185 for (j = 0; j < XVECLEN (x, i); j++)
3186 if (expr_killed_p (XVECEXP (x, i, j), bb))
3187 return 1;
7506f491
DE
3188 }
3189
3190 return 0;
3191}
3192
3193/* Compute the set of available expressions killed in each basic block. */
3194
3195static void
a42cd965
AM
3196compute_ae_kill (ae_gen, ae_kill)
3197 sbitmap *ae_gen, *ae_kill;
7506f491 3198{
2e653e39
RK
3199 int bb;
3200 unsigned int i;
c4c81601 3201 struct expr *expr;
7506f491
DE
3202
3203 for (bb = 0; bb < n_basic_blocks; bb++)
c4c81601
RK
3204 for (i = 0; i < expr_hash_table_size; i++)
3205 for (expr = expr_hash_table[i]; expr; expr = expr->next_same_hash)
7506f491 3206 {
c4c81601
RK
3207 /* Skip EXPR if generated in this block. */
3208 if (TEST_BIT (ae_gen[bb], expr->bitmap_index))
3209 continue;
7506f491 3210
e2d2ed72 3211 if (expr_killed_p (expr->expr, BASIC_BLOCK (bb)))
c4c81601 3212 SET_BIT (ae_kill[bb], expr->bitmap_index);
7506f491 3213 }
7506f491 3214}
7506f491
DE
3215\f
3216/* Actually perform the Classic GCSE optimizations. */
3217
3218/* Return non-zero if occurrence OCCR of expression EXPR reaches block BB.
3219
3220 CHECK_SELF_LOOP is non-zero if we should consider a block reaching itself
3221 as a positive reach. We want to do this when there are two computations
3222 of the expression in the block.
3223
3224 VISITED is a pointer to a working buffer for tracking which BB's have
3225 been visited. It is NULL for the top-level call.
3226
3227 We treat reaching expressions that go through blocks containing the same
3228 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3229 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3230 2 as not reaching. The intent is to improve the probability of finding
3231 only one reaching expression and to reduce register lifetimes by picking
3232 the closest such expression. */
3233
3234static int
283a2545 3235expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited)
7506f491
DE
3236 struct occr *occr;
3237 struct expr *expr;
e2d2ed72 3238 basic_block bb;
7506f491
DE
3239 int check_self_loop;
3240 char *visited;
3241{
36349f8b 3242 edge pred;
7506f491 3243
e2d2ed72 3244 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
7506f491 3245 {
e2d2ed72 3246 basic_block pred_bb = pred->src;
7506f491 3247
e2d2ed72 3248 if (visited[pred_bb->index])
c4c81601 3249 /* This predecessor has already been visited. Nothing to do. */
7506f491 3250 ;
7506f491 3251 else if (pred_bb == bb)
ac7c5af5 3252 {
7506f491
DE
3253 /* BB loops on itself. */
3254 if (check_self_loop
e2d2ed72
AM
3255 && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)
3256 && BLOCK_NUM (occr->insn) == pred_bb->index)
7506f491 3257 return 1;
c4c81601 3258
e2d2ed72 3259 visited[pred_bb->index] = 1;
ac7c5af5 3260 }
c4c81601 3261
7506f491 3262 /* Ignore this predecessor if it kills the expression. */
e2d2ed72
AM
3263 else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index))
3264 visited[pred_bb->index] = 1;
c4c81601 3265
7506f491 3266 /* Does this predecessor generate this expression? */
e2d2ed72 3267 else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index))
7506f491
DE
3268 {
3269 /* Is this the occurrence we're looking for?
3270 Note that there's only one generating occurrence per block
3271 so we just need to check the block number. */
e2d2ed72 3272 if (BLOCK_NUM (occr->insn) == pred_bb->index)
7506f491 3273 return 1;
c4c81601 3274
e2d2ed72 3275 visited[pred_bb->index] = 1;
7506f491 3276 }
c4c81601 3277
7506f491
DE
3278 /* Neither gen nor kill. */
3279 else
ac7c5af5 3280 {
e2d2ed72 3281 visited[pred_bb->index] = 1;
283a2545
RL
3282 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
3283 visited))
c4c81601 3284
7506f491 3285 return 1;
ac7c5af5 3286 }
7506f491
DE
3287 }
3288
3289 /* All paths have been checked. */
3290 return 0;
3291}
3292
283a2545
RL
3293/* This wrapper for expr_reaches_here_p_work() is to ensure that any
3294 memory allocated for that function is returned. */
3295
3296static int
3297expr_reaches_here_p (occr, expr, bb, check_self_loop)
3298 struct occr *occr;
3299 struct expr *expr;
e2d2ed72 3300 basic_block bb;
283a2545
RL
3301 int check_self_loop;
3302{
3303 int rval;
c4c81601 3304 char *visited = (char *) xcalloc (n_basic_blocks, 1);
283a2545 3305
c4c81601 3306 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
283a2545
RL
3307
3308 free (visited);
c4c81601 3309 return rval;
283a2545
RL
3310}
3311
7506f491
DE
3312/* Return the instruction that computes EXPR that reaches INSN's basic block.
3313 If there is more than one such instruction, return NULL.
3314
3315 Called only by handle_avail_expr. */
3316
3317static rtx
3318computing_insn (expr, insn)
3319 struct expr *expr;
3320 rtx insn;
3321{
e2d2ed72 3322 basic_block bb = BLOCK_FOR_INSN (insn);
7506f491
DE
3323
3324 if (expr->avail_occr->next == NULL)
3325 {
e2d2ed72 3326 if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb)
c4c81601
RK
3327 /* The available expression is actually itself
3328 (i.e. a loop in the flow graph) so do nothing. */
3329 return NULL;
3330
7506f491
DE
3331 /* (FIXME) Case that we found a pattern that was created by
3332 a substitution that took place. */
3333 return expr->avail_occr->insn;
3334 }
3335 else
3336 {
3337 /* Pattern is computed more than once.
3338 Search backwards from this insn to see how many of these
3339 computations actually reach this insn. */
3340 struct occr *occr;
3341 rtx insn_computes_expr = NULL;
3342 int can_reach = 0;
3343
3344 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3345 {
e2d2ed72 3346 if (BLOCK_FOR_INSN (occr->insn) == bb)
7506f491
DE
3347 {
3348 /* The expression is generated in this block.
3349 The only time we care about this is when the expression
3350 is generated later in the block [and thus there's a loop].
3351 We let the normal cse pass handle the other cases. */
c4c81601
RK
3352 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3353 && expr_reaches_here_p (occr, expr, bb, 1))
7506f491
DE
3354 {
3355 can_reach++;
3356 if (can_reach > 1)
3357 return NULL;
c4c81601 3358
7506f491
DE
3359 insn_computes_expr = occr->insn;
3360 }
3361 }
c4c81601
RK
3362 else if (expr_reaches_here_p (occr, expr, bb, 0))
3363 {
3364 can_reach++;
3365 if (can_reach > 1)
3366 return NULL;
3367
3368 insn_computes_expr = occr->insn;
3369 }
7506f491
DE
3370 }
3371
3372 if (insn_computes_expr == NULL)
3373 abort ();
c4c81601 3374
7506f491
DE
3375 return insn_computes_expr;
3376 }
3377}
3378
3379/* Return non-zero if the definition in DEF_INSN can reach INSN.
3380 Only called by can_disregard_other_sets. */
3381
3382static int
3383def_reaches_here_p (insn, def_insn)
3384 rtx insn, def_insn;
3385{
3386 rtx reg;
3387
3388 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3389 return 1;
3390
3391 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3392 {
3393 if (INSN_CUID (def_insn) < INSN_CUID (insn))
ac7c5af5 3394 {
7506f491
DE
3395 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3396 return 1;
c4c81601 3397 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
7506f491
DE
3398 reg = XEXP (PATTERN (def_insn), 0);
3399 else if (GET_CODE (PATTERN (def_insn)) == SET)
3400 reg = SET_DEST (PATTERN (def_insn));
3401 else
3402 abort ();
c4c81601 3403
7506f491
DE
3404 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3405 }
3406 else
3407 return 0;
3408 }
3409
3410 return 0;
3411}
3412
c4c81601
RK
3413/* Return non-zero if *ADDR_THIS_REG can only have one value at INSN. The
3414 value returned is the number of definitions that reach INSN. Returning a
3415 value of zero means that [maybe] more than one definition reaches INSN and
3416 the caller can't perform whatever optimization it is trying. i.e. it is
3417 always safe to return zero. */
7506f491
DE
3418
3419static int
3420can_disregard_other_sets (addr_this_reg, insn, for_combine)
3421 struct reg_set **addr_this_reg;
3422 rtx insn;
3423 int for_combine;
3424{
3425 int number_of_reaching_defs = 0;
c4c81601 3426 struct reg_set *this_reg;
7506f491 3427
c4c81601
RK
3428 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3429 if (def_reaches_here_p (insn, this_reg->insn))
3430 {
3431 number_of_reaching_defs++;
3432 /* Ignore parallels for now. */
3433 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3434 return 0;
3435
3436 if (!for_combine
3437 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3438 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3439 SET_SRC (PATTERN (insn)))))
3440 /* A setting of the reg to a different value reaches INSN. */
3441 return 0;
3442
3443 if (number_of_reaching_defs > 1)
3444 {
3445 /* If in this setting the value the register is being set to is
3446 equal to the previous value the register was set to and this
3447 setting reaches the insn we are trying to do the substitution
3448 on then we are ok. */
3449 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
7506f491 3450 return 0;
c4c81601
RK
3451 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3452 SET_SRC (PATTERN (insn))))
3453 return 0;
3454 }
7506f491 3455
c4c81601
RK
3456 *addr_this_reg = this_reg;
3457 }
7506f491
DE
3458
3459 return number_of_reaching_defs;
3460}
3461
3462/* Expression computed by insn is available and the substitution is legal,
3463 so try to perform the substitution.
3464
3465 The result is non-zero if any changes were made. */
3466
3467static int
3468handle_avail_expr (insn, expr)
3469 rtx insn;
3470 struct expr *expr;
3471{
0631e0bf 3472 rtx pat, insn_computes_expr, expr_set;
7506f491
DE
3473 rtx to;
3474 struct reg_set *this_reg;
3475 int found_setting, use_src;
3476 int changed = 0;
3477
3478 /* We only handle the case where one computation of the expression
3479 reaches this instruction. */
3480 insn_computes_expr = computing_insn (expr, insn);
3481 if (insn_computes_expr == NULL)
3482 return 0;
0631e0bf
JH
3483 expr_set = single_set (insn_computes_expr);
3484 if (!expr_set)
3485 abort ();
7506f491
DE
3486
3487 found_setting = 0;
3488 use_src = 0;
3489
3490 /* At this point we know only one computation of EXPR outside of this
3491 block reaches this insn. Now try to find a register that the
3492 expression is computed into. */
0631e0bf 3493 if (GET_CODE (SET_SRC (expr_set)) == REG)
7506f491
DE
3494 {
3495 /* This is the case when the available expression that reaches
3496 here has already been handled as an available expression. */
770ae6cc 3497 unsigned int regnum_for_replacing
0631e0bf 3498 = REGNO (SET_SRC (expr_set));
c4c81601 3499
7506f491
DE
3500 /* If the register was created by GCSE we can't use `reg_set_table',
3501 however we know it's set only once. */
3502 if (regnum_for_replacing >= max_gcse_regno
3503 /* If the register the expression is computed into is set only once,
3504 or only one set reaches this insn, we can use it. */
3505 || (((this_reg = reg_set_table[regnum_for_replacing]),
3506 this_reg->next == NULL)
3507 || can_disregard_other_sets (&this_reg, insn, 0)))
3508 {
3509 use_src = 1;
3510 found_setting = 1;
3511 }
3512 }
3513
3514 if (!found_setting)
3515 {
770ae6cc 3516 unsigned int regnum_for_replacing
0631e0bf 3517 = REGNO (SET_DEST (expr_set));
c4c81601 3518
7506f491
DE
3519 /* This shouldn't happen. */
3520 if (regnum_for_replacing >= max_gcse_regno)
3521 abort ();
c4c81601 3522
7506f491 3523 this_reg = reg_set_table[regnum_for_replacing];
c4c81601 3524
7506f491
DE
3525 /* If the register the expression is computed into is set only once,
3526 or only one set reaches this insn, use it. */
3527 if (this_reg->next == NULL
3528 || can_disregard_other_sets (&this_reg, insn, 0))
3529 found_setting = 1;
3530 }
3531
3532 if (found_setting)
3533 {
3534 pat = PATTERN (insn);
3535 if (use_src)
0631e0bf 3536 to = SET_SRC (expr_set);
7506f491 3537 else
0631e0bf 3538 to = SET_DEST (expr_set);
7506f491
DE
3539 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3540
3541 /* We should be able to ignore the return code from validate_change but
3542 to play it safe we check. */
3543 if (changed)
3544 {
3545 gcse_subst_count++;
3546 if (gcse_file != NULL)
3547 {
c4c81601
RK
3548 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3549 INSN_UID (insn));
3550 fprintf (gcse_file, " reg %d %s insn %d\n",
3551 REGNO (to), use_src ? "from" : "set in",
7506f491
DE
3552 INSN_UID (insn_computes_expr));
3553 }
7506f491
DE
3554 }
3555 }
c4c81601 3556
7506f491
DE
3557 /* The register that the expr is computed into is set more than once. */
3558 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3559 {
3560 /* Insert an insn after insnx that copies the reg set in insnx
3561 into a new pseudo register call this new register REGN.
3562 From insnb until end of basic block or until REGB is set
3563 replace all uses of REGB with REGN. */
3564 rtx new_insn;
3565
0631e0bf 3566 to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set)));
7506f491
DE
3567
3568 /* Generate the new insn. */
3569 /* ??? If the change fails, we return 0, even though we created
3570 an insn. I think this is ok. */
9e6a5703
JC
3571 new_insn
3572 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
0631e0bf 3573 SET_DEST (expr_set)),
c4c81601
RK
3574 insn_computes_expr);
3575
7506f491 3576 /* Keep block number table up to date. */
ccbaf064 3577 set_block_for_new_insns (new_insn, BLOCK_FOR_INSN (insn_computes_expr));
c4c81601 3578
7506f491
DE
3579 /* Keep register set table up to date. */
3580 record_one_set (REGNO (to), new_insn);
3581
3582 gcse_create_count++;
3583 if (gcse_file != NULL)
ac7c5af5 3584 {
c4c81601 3585 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
7506f491 3586 INSN_UID (NEXT_INSN (insn_computes_expr)),
c4c81601
RK
3587 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3588 fprintf (gcse_file, ", computed in insn %d,\n",
7506f491 3589 INSN_UID (insn_computes_expr));
c4c81601
RK
3590 fprintf (gcse_file, " into newly allocated reg %d\n",
3591 REGNO (to));
ac7c5af5 3592 }
7506f491
DE
3593
3594 pat = PATTERN (insn);
3595
3596 /* Do register replacement for INSN. */
3597 changed = validate_change (insn, &SET_SRC (pat),
c4c81601
RK
3598 SET_DEST (PATTERN
3599 (NEXT_INSN (insn_computes_expr))),
7506f491
DE
3600 0);
3601
3602 /* We should be able to ignore the return code from validate_change but
3603 to play it safe we check. */
3604 if (changed)
3605 {
3606 gcse_subst_count++;
3607 if (gcse_file != NULL)
3608 {
c4c81601
RK
3609 fprintf (gcse_file,
3610 "GCSE: Replacing the source in insn %d with reg %d ",
7506f491 3611 INSN_UID (insn),
c4c81601
RK
3612 REGNO (SET_DEST (PATTERN (NEXT_INSN
3613 (insn_computes_expr)))));
3614 fprintf (gcse_file, "set in insn %d\n",
7506f491
DE
3615 INSN_UID (insn_computes_expr));
3616 }
7506f491
DE
3617 }
3618 }
3619
3620 return changed;
3621}
3622
c4c81601
RK
3623/* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3624 the dataflow analysis has been done.
7506f491
DE
3625
3626 The result is non-zero if a change was made. */
3627
3628static int
3629classic_gcse ()
3630{
3631 int bb, changed;
3632 rtx insn;
3633
3634 /* Note we start at block 1. */
3635
3636 changed = 0;
3637 for (bb = 1; bb < n_basic_blocks; bb++)
3638 {
3639 /* Reset tables used to keep track of what's still valid [since the
3640 start of the block]. */
3641 reset_opr_set_tables ();
3642
3b413743
RH
3643 for (insn = BLOCK_HEAD (bb);
3644 insn != NULL && insn != NEXT_INSN (BLOCK_END (bb));
7506f491
DE
3645 insn = NEXT_INSN (insn))
3646 {
3647 /* Is insn of form (set (pseudo-reg) ...)? */
7506f491
DE
3648 if (GET_CODE (insn) == INSN
3649 && GET_CODE (PATTERN (insn)) == SET
3650 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3651 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3652 {
3653 rtx pat = PATTERN (insn);
3654 rtx src = SET_SRC (pat);
3655 struct expr *expr;
3656
3657 if (want_to_gcse_p (src)
3658 /* Is the expression recorded? */
3659 && ((expr = lookup_expr (src)) != NULL)
3660 /* Is the expression available [at the start of the
3661 block]? */
3662 && TEST_BIT (ae_in[bb], expr->bitmap_index)
3663 /* Are the operands unchanged since the start of the
3664 block? */
3665 && oprs_not_set_p (src, insn))
3666 changed |= handle_avail_expr (insn, expr);
3667 }
3668
3669 /* Keep track of everything modified by this insn. */
3670 /* ??? Need to be careful w.r.t. mods done to INSN. */
2c3c49de 3671 if (INSN_P (insn))
7506f491 3672 mark_oprs_set (insn);
ac7c5af5 3673 }
7506f491
DE
3674 }
3675
3676 return changed;
3677}
3678
3679/* Top level routine to perform one classic GCSE pass.
3680
3681 Return non-zero if a change was made. */
3682
3683static int
b5ce41ff 3684one_classic_gcse_pass (pass)
7506f491
DE
3685 int pass;
3686{
3687 int changed = 0;
3688
3689 gcse_subst_count = 0;
3690 gcse_create_count = 0;
3691
3692 alloc_expr_hash_table (max_cuid);
3693 alloc_rd_mem (n_basic_blocks, max_cuid);
b5ce41ff 3694 compute_expr_hash_table ();
7506f491
DE
3695 if (gcse_file)
3696 dump_hash_table (gcse_file, "Expression", expr_hash_table,
3697 expr_hash_table_size, n_exprs);
c4c81601 3698
7506f491
DE
3699 if (n_exprs > 0)
3700 {
3701 compute_kill_rd ();
3702 compute_rd ();
3703 alloc_avail_expr_mem (n_basic_blocks, n_exprs);
3704 compute_ae_gen ();
a42cd965 3705 compute_ae_kill (ae_gen, ae_kill);
bd0eaec2 3706 compute_available (ae_gen, ae_kill, ae_out, ae_in);
7506f491
DE
3707 changed = classic_gcse ();
3708 free_avail_expr_mem ();
3709 }
c4c81601 3710
7506f491
DE
3711 free_rd_mem ();
3712 free_expr_hash_table ();
3713
3714 if (gcse_file)
3715 {
3716 fprintf (gcse_file, "\n");
c4c81601
RK
3717 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3718 current_function_name, pass, bytes_used, gcse_subst_count);
3719 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
7506f491
DE
3720 }
3721
3722 return changed;
3723}
3724\f
3725/* Compute copy/constant propagation working variables. */
3726
3727/* Local properties of assignments. */
7506f491
DE
3728static sbitmap *cprop_pavloc;
3729static sbitmap *cprop_absaltered;
3730
3731/* Global properties of assignments (computed from the local properties). */
7506f491
DE
3732static sbitmap *cprop_avin;
3733static sbitmap *cprop_avout;
3734
c4c81601
RK
3735/* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3736 basic blocks. N_SETS is the number of sets. */
7506f491
DE
3737
3738static void
3739alloc_cprop_mem (n_blocks, n_sets)
3740 int n_blocks, n_sets;
3741{
3742 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3743 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3744
3745 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3746 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3747}
3748
3749/* Free vars used by copy/const propagation. */
3750
3751static void
3752free_cprop_mem ()
3753{
5a660bff
DB
3754 sbitmap_vector_free (cprop_pavloc);
3755 sbitmap_vector_free (cprop_absaltered);
3756 sbitmap_vector_free (cprop_avin);
3757 sbitmap_vector_free (cprop_avout);
7506f491
DE
3758}
3759
c4c81601
RK
3760/* For each block, compute whether X is transparent. X is either an
3761 expression or an assignment [though we don't care which, for this context
3762 an assignment is treated as an expression]. For each block where an
3763 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3764 bit in BMAP. */
7506f491
DE
3765
3766static void
3767compute_transp (x, indx, bmap, set_p)
3768 rtx x;
3769 int indx;
3770 sbitmap *bmap;
3771 int set_p;
3772{
c4c81601 3773 int bb, i, j;
7506f491 3774 enum rtx_code code;
c4c81601 3775 reg_set *r;
6f7d635c 3776 const char *fmt;
7506f491 3777
c4c81601
RK
3778 /* repeat is used to turn tail-recursion into iteration since GCC
3779 can't do it when there's no return value. */
7506f491
DE
3780 repeat:
3781
3782 if (x == 0)
3783 return;
3784
3785 code = GET_CODE (x);
3786 switch (code)
3787 {
3788 case REG:
c4c81601
RK
3789 if (set_p)
3790 {
3791 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3792 {
3793 for (bb = 0; bb < n_basic_blocks; bb++)
3794 if (TEST_BIT (reg_set_in_block[bb], REGNO (x)))
3795 SET_BIT (bmap[bb], indx);
3796 }
3797 else
3798 {
3799 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3800 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3801 }
3802 }
3803 else
3804 {
3805 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3806 {
3807 for (bb = 0; bb < n_basic_blocks; bb++)
3808 if (TEST_BIT (reg_set_in_block[bb], REGNO (x)))
3809 RESET_BIT (bmap[bb], indx);
3810 }
3811 else
3812 {
3813 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3814 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3815 }
3816 }
7506f491 3817
c4c81601 3818 return;
7506f491
DE
3819
3820 case MEM:
a13d4ebf
AM
3821 for (bb = 0; bb < n_basic_blocks; bb++)
3822 {
3823 rtx list_entry = canon_modify_mem_list[bb];
3824
3825 while (list_entry)
3826 {
3827 rtx dest, dest_addr;
3828
3829 if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
3830 {
3831 if (set_p)
3832 SET_BIT (bmap[bb], indx);
3833 else
3834 RESET_BIT (bmap[bb], indx);
3835 break;
3836 }
3837 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3838 Examine each hunk of memory that is modified. */
3839
3840 dest = XEXP (list_entry, 0);
3841 list_entry = XEXP (list_entry, 1);
3842 dest_addr = XEXP (list_entry, 0);
3843
3844 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
3845 x, rtx_addr_varies_p))
3846 {
3847 if (set_p)
3848 SET_BIT (bmap[bb], indx);
3849 else
3850 RESET_BIT (bmap[bb], indx);
3851 break;
3852 }
3853 list_entry = XEXP (list_entry, 1);
3854 }
3855 }
7506f491
DE
3856 if (set_p)
3857 {
3858 for (bb = 0; bb < n_basic_blocks; bb++)
3859 if (mem_set_in_block[bb])
3860 SET_BIT (bmap[bb], indx);
3861 }
3862 else
3863 {
3864 for (bb = 0; bb < n_basic_blocks; bb++)
3865 if (mem_set_in_block[bb])
3866 RESET_BIT (bmap[bb], indx);
3867 }
c4c81601 3868
7506f491
DE
3869 x = XEXP (x, 0);
3870 goto repeat;
3871
3872 case PC:
3873 case CC0: /*FIXME*/
3874 case CONST:
3875 case CONST_INT:
3876 case CONST_DOUBLE:
3877 case SYMBOL_REF:
3878 case LABEL_REF:
3879 case ADDR_VEC:
3880 case ADDR_DIFF_VEC:
3881 return;
3882
3883 default:
3884 break;
3885 }
3886
c4c81601 3887 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3888 {
3889 if (fmt[i] == 'e')
3890 {
7506f491
DE
3891 /* If we are about to do the last recursive call
3892 needed at this level, change it into iteration.
3893 This function is called enough to be worth it. */
3894 if (i == 0)
3895 {
c4c81601 3896 x = XEXP (x, i);
7506f491
DE
3897 goto repeat;
3898 }
c4c81601
RK
3899
3900 compute_transp (XEXP (x, i), indx, bmap, set_p);
7506f491
DE
3901 }
3902 else if (fmt[i] == 'E')
c4c81601
RK
3903 for (j = 0; j < XVECLEN (x, i); j++)
3904 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
7506f491
DE
3905 }
3906}
3907
7506f491
DE
3908/* Top level routine to do the dataflow analysis needed by copy/const
3909 propagation. */
3910
3911static void
3912compute_cprop_data ()
3913{
b5ce41ff 3914 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, 1);
ce724250
JL
3915 compute_available (cprop_pavloc, cprop_absaltered,
3916 cprop_avout, cprop_avin);
7506f491
DE
3917}
3918\f
3919/* Copy/constant propagation. */
3920
7506f491
DE
3921/* Maximum number of register uses in an insn that we handle. */
3922#define MAX_USES 8
3923
3924/* Table of uses found in an insn.
3925 Allocated statically to avoid alloc/free complexity and overhead. */
3926static struct reg_use reg_use_table[MAX_USES];
3927
3928/* Index into `reg_use_table' while building it. */
3929static int reg_use_count;
3930
c4c81601
RK
3931/* Set up a list of register numbers used in INSN. The found uses are stored
3932 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3933 and contains the number of uses in the table upon exit.
7506f491 3934
c4c81601
RK
3935 ??? If a register appears multiple times we will record it multiple times.
3936 This doesn't hurt anything but it will slow things down. */
7506f491
DE
3937
3938static void
9e71c818
JH
3939find_used_regs (xptr, data)
3940 rtx *xptr;
3941 void *data ATTRIBUTE_UNUSED;
7506f491 3942{
c4c81601 3943 int i, j;
7506f491 3944 enum rtx_code code;
6f7d635c 3945 const char *fmt;
9e71c818 3946 rtx x = *xptr;
7506f491 3947
c4c81601
RK
3948 /* repeat is used to turn tail-recursion into iteration since GCC
3949 can't do it when there's no return value. */
7506f491 3950 repeat:
7506f491
DE
3951 if (x == 0)
3952 return;
3953
3954 code = GET_CODE (x);
9e71c818 3955 if (REG_P (x))
7506f491 3956 {
7506f491
DE
3957 if (reg_use_count == MAX_USES)
3958 return;
c4c81601 3959
7506f491
DE
3960 reg_use_table[reg_use_count].reg_rtx = x;
3961 reg_use_count++;
7506f491
DE
3962 }
3963
3964 /* Recursively scan the operands of this expression. */
3965
c4c81601 3966 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3967 {
3968 if (fmt[i] == 'e')
3969 {
3970 /* If we are about to do the last recursive call
3971 needed at this level, change it into iteration.
3972 This function is called enough to be worth it. */
3973 if (i == 0)
3974 {
3975 x = XEXP (x, 0);
3976 goto repeat;
3977 }
c4c81601 3978
9e71c818 3979 find_used_regs (&XEXP (x, i), data);
7506f491
DE
3980 }
3981 else if (fmt[i] == 'E')
c4c81601 3982 for (j = 0; j < XVECLEN (x, i); j++)
9e71c818 3983 find_used_regs (&XVECEXP (x, i, j), data);
7506f491
DE
3984 }
3985}
3986
3987/* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3988 Returns non-zero is successful. */
3989
3990static int
3991try_replace_reg (from, to, insn)
3992 rtx from, to, insn;
3993{
172890a2 3994 rtx note = find_reg_equal_equiv_note (insn);
fb0c0a12 3995 rtx src = 0;
172890a2
RK
3996 int success = 0;
3997 rtx set = single_set (insn);
833fc3ad 3998
9e71c818
JH
3999 success = validate_replace_src (from, to, insn);
4000
4001 /* If above failed and this is a single set, try to simplify the source of
4002 the set given our substitution. We could perhaps try this for multiple
4003 SETs, but it probably won't buy us anything. */
4004 if (!success && set != 0)
833fc3ad 4005 {
172890a2
RK
4006 src = simplify_replace_rtx (SET_SRC (set), from, to);
4007
9e71c818
JH
4008 if (!rtx_equal_p (src, SET_SRC (set))
4009 && validate_change (insn, &SET_SRC (set), src, 0))
172890a2 4010 success = 1;
833fc3ad
JH
4011 }
4012
fb0c0a12
RK
4013 /* If we've failed to do replacement, have a single SET, and don't already
4014 have a note, add a REG_EQUAL note to not lose information. */
172890a2 4015 if (!success && note == 0 && set != 0)
fb0c0a12 4016 note = REG_NOTES (insn)
172890a2 4017 = gen_rtx_EXPR_LIST (REG_EQUAL, src, REG_NOTES (insn));
e251e2a2 4018
172890a2
RK
4019 /* If there is already a NOTE, update the expression in it with our
4020 replacement. */
4021 else if (note != 0)
4022 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
833fc3ad 4023
172890a2
RK
4024 /* REG_EQUAL may get simplified into register.
4025 We don't allow that. Remove that note. This code ought
4026 not to hapen, because previous code ought to syntetize
4027 reg-reg move, but be on the safe side. */
4028 if (note && REG_P (XEXP (note, 0)))
4029 remove_note (insn, note);
833fc3ad 4030
833fc3ad
JH
4031 return success;
4032}
c4c81601
RK
4033
4034/* Find a set of REGNOs that are available on entry to INSN's block. Returns
4035 NULL no such set is found. */
7506f491
DE
4036
4037static struct expr *
4038find_avail_set (regno, insn)
4039 int regno;
4040 rtx insn;
4041{
cafba495
BS
4042 /* SET1 contains the last set found that can be returned to the caller for
4043 use in a substitution. */
4044 struct expr *set1 = 0;
4045
4046 /* Loops are not possible here. To get a loop we would need two sets
4047 available at the start of the block containing INSN. ie we would
4048 need two sets like this available at the start of the block:
4049
4050 (set (reg X) (reg Y))
4051 (set (reg Y) (reg X))
4052
4053 This can not happen since the set of (reg Y) would have killed the
4054 set of (reg X) making it unavailable at the start of this block. */
4055 while (1)
4056 {
4057 rtx src;
4058 struct expr *set = lookup_set (regno, NULL_RTX);
4059
4060 /* Find a set that is available at the start of the block
4061 which contains INSN. */
4062 while (set)
4063 {
4064 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
4065 break;
4066 set = next_set (regno, set);
4067 }
7506f491 4068
cafba495
BS
4069 /* If no available set was found we've reached the end of the
4070 (possibly empty) copy chain. */
4071 if (set == 0)
4072 break;
4073
4074 if (GET_CODE (set->expr) != SET)
4075 abort ();
4076
4077 src = SET_SRC (set->expr);
4078
4079 /* We know the set is available.
4080 Now check that SRC is ANTLOC (i.e. none of the source operands
4081 have changed since the start of the block).
4082
4083 If the source operand changed, we may still use it for the next
4084 iteration of this loop, but we may not use it for substitutions. */
c4c81601 4085
cafba495
BS
4086 if (CONSTANT_P (src) || oprs_not_set_p (src, insn))
4087 set1 = set;
4088
4089 /* If the source of the set is anything except a register, then
4090 we have reached the end of the copy chain. */
4091 if (GET_CODE (src) != REG)
7506f491 4092 break;
7506f491 4093
cafba495
BS
4094 /* Follow the copy chain, ie start another iteration of the loop
4095 and see if we have an available copy into SRC. */
4096 regno = REGNO (src);
4097 }
4098
4099 /* SET1 holds the last set that was available and anticipatable at
4100 INSN. */
4101 return set1;
7506f491
DE
4102}
4103
abd535b6 4104/* Subroutine of cprop_insn that tries to propagate constants into
172890a2
RK
4105 JUMP_INSNS. INSN must be a conditional jump. FROM is what we will try to
4106 replace, SRC is the constant we will try to substitute for it. Returns
4107 nonzero if a change was made. We know INSN has just a SET. */
c4c81601 4108
abd535b6 4109static int
172890a2
RK
4110cprop_jump (insn, from, src)
4111 rtx insn;
4112 rtx from;
abd535b6
BS
4113 rtx src;
4114{
172890a2
RK
4115 rtx set = PATTERN (insn);
4116 rtx new = simplify_replace_rtx (SET_SRC (set), from, src);
abd535b6
BS
4117
4118 /* If no simplification can be made, then try the next
4119 register. */
172890a2 4120 if (rtx_equal_p (new, SET_SRC (set)))
abd535b6
BS
4121 return 0;
4122
172890a2
RK
4123 /* If this is now a no-op leave it that way, but update LABEL_NUSED if
4124 necessary. */
4125 if (new == pc_rtx)
abd535b6 4126 {
172890a2
RK
4127 SET_SRC (set) = new;
4128
4129 if (JUMP_LABEL (insn) != 0)
abd535b6 4130 --LABEL_NUSES (JUMP_LABEL (insn));
172890a2 4131 }
abd535b6 4132
172890a2
RK
4133 /* Otherwise, this must be a valid instruction. */
4134 else if (! validate_change (insn, &SET_SRC (set), new, 0))
4135 return 0;
abd535b6 4136
172890a2
RK
4137 /* If this has turned into an unconditional jump,
4138 then put a barrier after it so that the unreachable
4139 code will be deleted. */
4140 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
4141 emit_barrier_after (insn);
abd535b6 4142
172890a2 4143 run_jump_opt_after_gcse = 1;
c4c81601 4144
172890a2
RK
4145 const_prop_count++;
4146 if (gcse_file != NULL)
4147 {
4148 fprintf (gcse_file,
4149 "CONST-PROP: Replacing reg %d in insn %d with constant ",
4150 REGNO (from), INSN_UID (insn));
4151 print_rtl (gcse_file, src);
4152 fprintf (gcse_file, "\n");
abd535b6 4153 }
172890a2
RK
4154
4155 return 1;
abd535b6
BS
4156}
4157
4158#ifdef HAVE_cc0
c4c81601
RK
4159
4160/* Subroutine of cprop_insn that tries to propagate constants into JUMP_INSNS
4161 for machines that have CC0. INSN is a single set that stores into CC0;
4162 the insn following it is a conditional jump. REG_USED is the use we will
4163 try to replace, SRC is the constant we will try to substitute for it.
abd535b6 4164 Returns nonzero if a change was made. */
c4c81601 4165
abd535b6
BS
4166static int
4167cprop_cc0_jump (insn, reg_used, src)
4168 rtx insn;
4169 struct reg_use *reg_used;
4170 rtx src;
4171{
172890a2
RK
4172 /* First substitute in the SET_SRC of INSN, then substitute that for
4173 CC0 in JUMP. */
abd535b6 4174 rtx jump = NEXT_INSN (insn);
172890a2
RK
4175 rtx new_src = simplify_replace_rtx (SET_SRC (PATTERN (insn)),
4176 reg_used->reg_rtx, src);
abd535b6 4177
172890a2 4178 if (! cprop_jump (jump, cc0_rtx, new_src))
abd535b6
BS
4179 return 0;
4180
4181 /* If we succeeded, delete the cc0 setter. */
4182 PUT_CODE (insn, NOTE);
4183 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4184 NOTE_SOURCE_FILE (insn) = 0;
172890a2 4185
abd535b6
BS
4186 return 1;
4187 }
4188#endif
4189
7506f491
DE
4190/* Perform constant and copy propagation on INSN.
4191 The result is non-zero if a change was made. */
4192
4193static int
b5ce41ff 4194cprop_insn (insn, alter_jumps)
7506f491 4195 rtx insn;
b5ce41ff 4196 int alter_jumps;
7506f491
DE
4197{
4198 struct reg_use *reg_used;
4199 int changed = 0;
833fc3ad 4200 rtx note;
7506f491 4201
9e71c818 4202 if (!INSN_P (insn))
7506f491
DE
4203 return 0;
4204
4205 reg_use_count = 0;
9e71c818 4206 note_uses (&PATTERN (insn), find_used_regs, NULL);
833fc3ad 4207
172890a2 4208 note = find_reg_equal_equiv_note (insn);
833fc3ad
JH
4209
4210 /* We may win even when propagating constants into notes. */
4211 if (note)
9e71c818 4212 find_used_regs (&XEXP (note, 0), NULL);
7506f491 4213
c4c81601
RK
4214 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4215 reg_used++, reg_use_count--)
7506f491 4216 {
770ae6cc 4217 unsigned int regno = REGNO (reg_used->reg_rtx);
7506f491
DE
4218 rtx pat, src;
4219 struct expr *set;
7506f491
DE
4220
4221 /* Ignore registers created by GCSE.
4222 We do this because ... */
4223 if (regno >= max_gcse_regno)
4224 continue;
4225
4226 /* If the register has already been set in this block, there's
4227 nothing we can do. */
4228 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
4229 continue;
4230
4231 /* Find an assignment that sets reg_used and is available
4232 at the start of the block. */
4233 set = find_avail_set (regno, insn);
4234 if (! set)
4235 continue;
4236
4237 pat = set->expr;
4238 /* ??? We might be able to handle PARALLELs. Later. */
4239 if (GET_CODE (pat) != SET)
4240 abort ();
c4c81601 4241
7506f491
DE
4242 src = SET_SRC (pat);
4243
e78d9500 4244 /* Constant propagation. */
05f6f07c
BS
4245 if (GET_CODE (src) == CONST_INT || GET_CODE (src) == CONST_DOUBLE
4246 || GET_CODE (src) == SYMBOL_REF)
7506f491 4247 {
e78d9500
JL
4248 /* Handle normal insns first. */
4249 if (GET_CODE (insn) == INSN
4250 && try_replace_reg (reg_used->reg_rtx, src, insn))
7506f491
DE
4251 {
4252 changed = 1;
4253 const_prop_count++;
4254 if (gcse_file != NULL)
4255 {
c4c81601
RK
4256 fprintf (gcse_file, "CONST-PROP: Replacing reg %d in ",
4257 regno);
4258 fprintf (gcse_file, "insn %d with constant ",
4259 INSN_UID (insn));
e78d9500 4260 print_rtl (gcse_file, src);
7506f491
DE
4261 fprintf (gcse_file, "\n");
4262 }
4263
4264 /* The original insn setting reg_used may or may not now be
4265 deletable. We leave the deletion to flow. */
4266 }
e78d9500
JL
4267
4268 /* Try to propagate a CONST_INT into a conditional jump.
4269 We're pretty specific about what we will handle in this
4270 code, we can extend this as necessary over time.
4271
4272 Right now the insn in question must look like
abd535b6 4273 (set (pc) (if_then_else ...)) */
b5ce41ff 4274 else if (alter_jumps
6e9a3c38
JL
4275 && GET_CODE (insn) == JUMP_INSN
4276 && condjump_p (insn)
4277 && ! simplejump_p (insn))
172890a2
RK
4278 changed |= cprop_jump (insn, reg_used->reg_rtx, src);
4279
abd535b6
BS
4280#ifdef HAVE_cc0
4281 /* Similar code for machines that use a pair of CC0 setter and
4282 conditional jump insn. */
4283 else if (alter_jumps
4284 && GET_CODE (PATTERN (insn)) == SET
4285 && SET_DEST (PATTERN (insn)) == cc0_rtx
4286 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
4287 && condjump_p (NEXT_INSN (insn))
172890a2
RK
4288 && ! simplejump_p (NEXT_INSN (insn))
4289 && cprop_cc0_jump (insn, reg_used, src))
4290 {
4291 changed = 1;
4292 break;
d7836e38 4293 }
abd535b6 4294#endif
7506f491
DE
4295 }
4296 else if (GET_CODE (src) == REG
4297 && REGNO (src) >= FIRST_PSEUDO_REGISTER
4298 && REGNO (src) != regno)
4299 {
cafba495 4300 if (try_replace_reg (reg_used->reg_rtx, src, insn))
7506f491 4301 {
cafba495
BS
4302 changed = 1;
4303 copy_prop_count++;
4304 if (gcse_file != NULL)
7506f491 4305 {
c4c81601
RK
4306 fprintf (gcse_file, "COPY-PROP: Replacing reg %d in insn %d",
4307 regno, INSN_UID (insn));
4308 fprintf (gcse_file, " with reg %d\n", REGNO (src));
7506f491 4309 }
cafba495
BS
4310
4311 /* The original insn setting reg_used may or may not now be
4312 deletable. We leave the deletion to flow. */
4313 /* FIXME: If it turns out that the insn isn't deletable,
4314 then we may have unnecessarily extended register lifetimes
4315 and made things worse. */
7506f491
DE
4316 }
4317 }
4318 }
4319
4320 return changed;
4321}
4322
c4c81601
RK
4323/* Forward propagate copies. This includes copies and constants. Return
4324 non-zero if a change was made. */
7506f491
DE
4325
4326static int
b5ce41ff
JL
4327cprop (alter_jumps)
4328 int alter_jumps;
7506f491
DE
4329{
4330 int bb, changed;
4331 rtx insn;
4332
4333 /* Note we start at block 1. */
4334
4335 changed = 0;
4336 for (bb = 1; bb < n_basic_blocks; bb++)
4337 {
4338 /* Reset tables used to keep track of what's still valid [since the
4339 start of the block]. */
4340 reset_opr_set_tables ();
4341
3b413743
RH
4342 for (insn = BLOCK_HEAD (bb);
4343 insn != NULL && insn != NEXT_INSN (BLOCK_END (bb));
7506f491 4344 insn = NEXT_INSN (insn))
172890a2
RK
4345 if (INSN_P (insn))
4346 {
4347 changed |= cprop_insn (insn, alter_jumps);
7506f491 4348
172890a2
RK
4349 /* Keep track of everything modified by this insn. */
4350 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4351 call mark_oprs_set if we turned the insn into a NOTE. */
4352 if (GET_CODE (insn) != NOTE)
4353 mark_oprs_set (insn);
ac7c5af5 4354 }
7506f491
DE
4355 }
4356
4357 if (gcse_file != NULL)
4358 fprintf (gcse_file, "\n");
4359
4360 return changed;
4361}
4362
4363/* Perform one copy/constant propagation pass.
4364 F is the first insn in the function.
4365 PASS is the pass count. */
4366
4367static int
b5ce41ff 4368one_cprop_pass (pass, alter_jumps)
7506f491 4369 int pass;
b5ce41ff 4370 int alter_jumps;
7506f491
DE
4371{
4372 int changed = 0;
4373
4374 const_prop_count = 0;
4375 copy_prop_count = 0;
4376
4377 alloc_set_hash_table (max_cuid);
b5ce41ff 4378 compute_set_hash_table ();
7506f491
DE
4379 if (gcse_file)
4380 dump_hash_table (gcse_file, "SET", set_hash_table, set_hash_table_size,
4381 n_sets);
4382 if (n_sets > 0)
4383 {
4384 alloc_cprop_mem (n_basic_blocks, n_sets);
4385 compute_cprop_data ();
b5ce41ff 4386 changed = cprop (alter_jumps);
7506f491
DE
4387 free_cprop_mem ();
4388 }
c4c81601 4389
7506f491
DE
4390 free_set_hash_table ();
4391
4392 if (gcse_file)
4393 {
c4c81601
RK
4394 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
4395 current_function_name, pass, bytes_used);
4396 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4397 const_prop_count, copy_prop_count);
7506f491
DE
4398 }
4399
4400 return changed;
4401}
4402\f
a65f3558 4403/* Compute PRE+LCM working variables. */
7506f491
DE
4404
4405/* Local properties of expressions. */
4406/* Nonzero for expressions that are transparent in the block. */
a65f3558 4407static sbitmap *transp;
7506f491 4408
5c35539b
RH
4409/* Nonzero for expressions that are transparent at the end of the block.
4410 This is only zero for expressions killed by abnormal critical edge
4411 created by a calls. */
a65f3558 4412static sbitmap *transpout;
5c35539b 4413
a65f3558
JL
4414/* Nonzero for expressions that are computed (available) in the block. */
4415static sbitmap *comp;
7506f491 4416
a65f3558
JL
4417/* Nonzero for expressions that are locally anticipatable in the block. */
4418static sbitmap *antloc;
7506f491 4419
a65f3558
JL
4420/* Nonzero for expressions where this block is an optimal computation
4421 point. */
4422static sbitmap *pre_optimal;
5c35539b 4423
a65f3558
JL
4424/* Nonzero for expressions which are redundant in a particular block. */
4425static sbitmap *pre_redundant;
7506f491 4426
a42cd965
AM
4427/* Nonzero for expressions which should be inserted on a specific edge. */
4428static sbitmap *pre_insert_map;
4429
4430/* Nonzero for expressions which should be deleted in a specific block. */
4431static sbitmap *pre_delete_map;
4432
4433/* Contains the edge_list returned by pre_edge_lcm. */
4434static struct edge_list *edge_list;
4435
a65f3558
JL
4436/* Redundant insns. */
4437static sbitmap pre_redundant_insns;
7506f491 4438
a65f3558 4439/* Allocate vars used for PRE analysis. */
7506f491
DE
4440
4441static void
a65f3558
JL
4442alloc_pre_mem (n_blocks, n_exprs)
4443 int n_blocks, n_exprs;
7506f491 4444{
a65f3558
JL
4445 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4446 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4447 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5faf03ae 4448
a42cd965
AM
4449 pre_optimal = NULL;
4450 pre_redundant = NULL;
4451 pre_insert_map = NULL;
4452 pre_delete_map = NULL;
4453 ae_in = NULL;
4454 ae_out = NULL;
a42cd965 4455 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
c4c81601 4456
a42cd965 4457 /* pre_insert and pre_delete are allocated later. */
7506f491
DE
4458}
4459
a65f3558 4460/* Free vars used for PRE analysis. */
7506f491
DE
4461
4462static void
a65f3558 4463free_pre_mem ()
7506f491 4464{
5a660bff
DB
4465 sbitmap_vector_free (transp);
4466 sbitmap_vector_free (comp);
bd3675fc
JL
4467
4468 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
7506f491 4469
a42cd965 4470 if (pre_optimal)
5a660bff 4471 sbitmap_vector_free (pre_optimal);
a42cd965 4472 if (pre_redundant)
5a660bff 4473 sbitmap_vector_free (pre_redundant);
a42cd965 4474 if (pre_insert_map)
5a660bff 4475 sbitmap_vector_free (pre_insert_map);
a42cd965 4476 if (pre_delete_map)
5a660bff 4477 sbitmap_vector_free (pre_delete_map);
a42cd965 4478 if (ae_in)
5a660bff 4479 sbitmap_vector_free (ae_in);
a42cd965 4480 if (ae_out)
5a660bff 4481 sbitmap_vector_free (ae_out);
a42cd965 4482
bd3675fc 4483 transp = comp = NULL;
a42cd965 4484 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
55d3f917 4485 ae_in = ae_out = NULL;
7506f491
DE
4486}
4487
4488/* Top level routine to do the dataflow analysis needed by PRE. */
4489
4490static void
4491compute_pre_data ()
4492{
b614171e 4493 sbitmap trapping_expr;
c66e8ae9 4494 int i;
b614171e 4495 unsigned int ui;
c66e8ae9 4496
a65f3558 4497 compute_local_properties (transp, comp, antloc, 0);
a42cd965 4498 sbitmap_vector_zero (ae_kill, n_basic_blocks);
c66e8ae9 4499
b614171e
MM
4500 /* Collect expressions which might trap. */
4501 trapping_expr = sbitmap_alloc (n_exprs);
4502 sbitmap_zero (trapping_expr);
4503 for (ui = 0; ui < expr_hash_table_size; ui++)
4504 {
4505 struct expr *e;
4506 for (e = expr_hash_table[ui]; e != NULL; e = e->next_same_hash)
4507 if (may_trap_p (e->expr))
4508 SET_BIT (trapping_expr, e->bitmap_index);
4509 }
4510
c66e8ae9
JL
4511 /* Compute ae_kill for each basic block using:
4512
4513 ~(TRANSP | COMP)
4514
a2e90653 4515 This is significantly faster than compute_ae_kill. */
c66e8ae9
JL
4516
4517 for (i = 0; i < n_basic_blocks; i++)
4518 {
b614171e
MM
4519 edge e;
4520
4521 /* If the current block is the destination of an abnormal edge, we
4522 kill all trapping expressions because we won't be able to properly
4523 place the instruction on the edge. So make them neither
4524 anticipatable nor transparent. This is fairly conservative. */
4525 for (e = BASIC_BLOCK (i)->pred; e ; e = e->pred_next)
4526 if (e->flags & EDGE_ABNORMAL)
4527 {
4528 sbitmap_difference (antloc[i], antloc[i], trapping_expr);
4529 sbitmap_difference (transp[i], transp[i], trapping_expr);
4530 break;
4531 }
4532
c66e8ae9
JL
4533 sbitmap_a_or_b (ae_kill[i], transp[i], comp[i]);
4534 sbitmap_not (ae_kill[i], ae_kill[i]);
4535 }
4536
a42cd965
AM
4537 edge_list = pre_edge_lcm (gcse_file, n_exprs, transp, comp, antloc,
4538 ae_kill, &pre_insert_map, &pre_delete_map);
5a660bff 4539 sbitmap_vector_free (antloc);
bd3675fc 4540 antloc = NULL;
5a660bff 4541 sbitmap_vector_free (ae_kill);
bd3675fc 4542 ae_kill = NULL;
b614171e 4543 free (trapping_expr);
7506f491
DE
4544}
4545\f
4546/* PRE utilities */
4547
a65f3558
JL
4548/* Return non-zero if an occurrence of expression EXPR in OCCR_BB would reach
4549 block BB.
7506f491
DE
4550
4551 VISITED is a pointer to a working buffer for tracking which BB's have
4552 been visited. It is NULL for the top-level call.
4553
4554 We treat reaching expressions that go through blocks containing the same
4555 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
4556 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
4557 2 as not reaching. The intent is to improve the probability of finding
4558 only one reaching expression and to reduce register lifetimes by picking
4559 the closest such expression. */
4560
4561static int
89e606c9 4562pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited)
e2d2ed72 4563 basic_block occr_bb;
7506f491 4564 struct expr *expr;
e2d2ed72 4565 basic_block bb;
7506f491
DE
4566 char *visited;
4567{
36349f8b 4568 edge pred;
7506f491 4569
e2d2ed72 4570 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
7506f491 4571 {
e2d2ed72 4572 basic_block pred_bb = pred->src;
7506f491 4573
36349f8b 4574 if (pred->src == ENTRY_BLOCK_PTR
7506f491 4575 /* Has predecessor has already been visited? */
e2d2ed72 4576 || visited[pred_bb->index])
c4c81601
RK
4577 ;/* Nothing to do. */
4578
7506f491 4579 /* Does this predecessor generate this expression? */
e2d2ed72 4580 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
7506f491
DE
4581 {
4582 /* Is this the occurrence we're looking for?
4583 Note that there's only one generating occurrence per block
4584 so we just need to check the block number. */
a65f3558 4585 if (occr_bb == pred_bb)
7506f491 4586 return 1;
c4c81601 4587
e2d2ed72 4588 visited[pred_bb->index] = 1;
7506f491
DE
4589 }
4590 /* Ignore this predecessor if it kills the expression. */
e2d2ed72
AM
4591 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
4592 visited[pred_bb->index] = 1;
c4c81601 4593
7506f491
DE
4594 /* Neither gen nor kill. */
4595 else
ac7c5af5 4596 {
e2d2ed72 4597 visited[pred_bb->index] = 1;
89e606c9 4598 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
7506f491 4599 return 1;
ac7c5af5 4600 }
7506f491
DE
4601 }
4602
4603 /* All paths have been checked. */
4604 return 0;
4605}
283a2545
RL
4606
4607/* The wrapper for pre_expr_reaches_here_work that ensures that any
4608 memory allocated for that function is returned. */
4609
4610static int
89e606c9 4611pre_expr_reaches_here_p (occr_bb, expr, bb)
e2d2ed72 4612 basic_block occr_bb;
283a2545 4613 struct expr *expr;
e2d2ed72 4614 basic_block bb;
283a2545
RL
4615{
4616 int rval;
c4c81601 4617 char *visited = (char *) xcalloc (n_basic_blocks, 1);
283a2545 4618
89e606c9 4619 rval = pre_expr_reaches_here_p_work(occr_bb, expr, bb, visited);
283a2545
RL
4620
4621 free (visited);
c4c81601 4622 return rval;
283a2545 4623}
7506f491 4624\f
a42cd965
AM
4625
4626/* Given an expr, generate RTL which we can insert at the end of a BB,
4627 or on an edge. Set the block number of any insns generated to
4628 the value of BB. */
4629
4630static rtx
4631process_insert_insn (expr)
4632 struct expr *expr;
4633{
4634 rtx reg = expr->reaching_reg;
fb0c0a12
RK
4635 rtx exp = copy_rtx (expr->expr);
4636 rtx pat;
a42cd965
AM
4637
4638 start_sequence ();
fb0c0a12
RK
4639
4640 /* If the expression is something that's an operand, like a constant,
4641 just copy it to a register. */
4642 if (general_operand (exp, GET_MODE (reg)))
4643 emit_move_insn (reg, exp);
4644
4645 /* Otherwise, make a new insn to compute this expression and make sure the
4646 insn will be recognized (this also adds any needed CLOBBERs). Copy the
4647 expression to make sure we don't have any sharing issues. */
8d444206 4648 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
fb0c0a12
RK
4649 abort ();
4650
a42cd965
AM
4651 pat = gen_sequence ();
4652 end_sequence ();
4653
4654 return pat;
4655}
4656
a65f3558
JL
4657/* Add EXPR to the end of basic block BB.
4658
4659 This is used by both the PRE and code hoisting.
4660
4661 For PRE, we want to verify that the expr is either transparent
4662 or locally anticipatable in the target block. This check makes
4663 no sense for code hoisting. */
7506f491
DE
4664
4665static void
a65f3558 4666insert_insn_end_bb (expr, bb, pre)
7506f491 4667 struct expr *expr;
e2d2ed72 4668 basic_block bb;
a65f3558 4669 int pre;
7506f491 4670{
e2d2ed72 4671 rtx insn = bb->end;
7506f491
DE
4672 rtx new_insn;
4673 rtx reg = expr->reaching_reg;
4674 int regno = REGNO (reg);
a42cd965 4675 rtx pat;
c4c81601 4676 int i;
7506f491 4677
a42cd965 4678 pat = process_insert_insn (expr);
7506f491
DE
4679
4680 /* If the last insn is a jump, insert EXPR in front [taking care to
4681 handle cc0, etc. properly]. */
4682
4683 if (GET_CODE (insn) == JUMP_INSN)
4684 {
50b2596f 4685#ifdef HAVE_cc0
7506f491 4686 rtx note;
50b2596f 4687#endif
7506f491
DE
4688
4689 /* If this is a jump table, then we can't insert stuff here. Since
4690 we know the previous real insn must be the tablejump, we insert
4691 the new instruction just before the tablejump. */
4692 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
4693 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
4694 insn = prev_real_insn (insn);
4695
4696#ifdef HAVE_cc0
4697 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
4698 if cc0 isn't set. */
4699 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
4700 if (note)
4701 insn = XEXP (note, 0);
4702 else
4703 {
4704 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
4705 if (maybe_cc0_setter
2c3c49de 4706 && INSN_P (maybe_cc0_setter)
7506f491
DE
4707 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
4708 insn = maybe_cc0_setter;
4709 }
4710#endif
4711 /* FIXME: What if something in cc0/jump uses value set in new insn? */
e2d2ed72 4712 new_insn = emit_block_insn_before (pat, insn, bb);
3947e2f9 4713 }
c4c81601 4714
3947e2f9
RH
4715 /* Likewise if the last insn is a call, as will happen in the presence
4716 of exception handling. */
5c35539b 4717 else if (GET_CODE (insn) == CALL_INSN)
3947e2f9
RH
4718 {
4719 HARD_REG_SET parm_regs;
4720 int nparm_regs;
4721 rtx p;
4722
4723 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
4724 we search backward and place the instructions before the first
4725 parameter is loaded. Do this for everyone for consistency and a
c4c81601 4726 presumtion that we'll get better code elsewhere as well.
3947e2f9 4727
c4c81601 4728 It should always be the case that we can put these instructions
a65f3558
JL
4729 anywhere in the basic block with performing PRE optimizations.
4730 Check this. */
c4c81601 4731
a65f3558 4732 if (pre
e2d2ed72
AM
4733 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
4734 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
3947e2f9
RH
4735 abort ();
4736
4737 /* Since different machines initialize their parameter registers
4738 in different orders, assume nothing. Collect the set of all
4739 parameter registers. */
4740 CLEAR_HARD_REG_SET (parm_regs);
4741 nparm_regs = 0;
4742 for (p = CALL_INSN_FUNCTION_USAGE (insn); p ; p = XEXP (p, 1))
4743 if (GET_CODE (XEXP (p, 0)) == USE
4744 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
4745 {
c4c81601 4746 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
5c35539b 4747 abort ();
c4c81601 4748
8598a9a2
JL
4749 /* We only care about registers which can hold function
4750 arguments. */
4751 if (! FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
4752 continue;
4753
c4c81601 4754 SET_HARD_REG_BIT (parm_regs, REGNO (XEXP (XEXP (p, 0), 0)));
3947e2f9
RH
4755 nparm_regs++;
4756 }
4757
4758 /* Search backward for the first set of a register in this set. */
e2d2ed72 4759 while (nparm_regs && bb->head != insn)
3947e2f9
RH
4760 {
4761 insn = PREV_INSN (insn);
4762 p = single_set (insn);
4763 if (p && GET_CODE (SET_DEST (p)) == REG
4764 && REGNO (SET_DEST (p)) < FIRST_PSEUDO_REGISTER
4765 && TEST_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p))))
4766 {
4767 CLEAR_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p)));
4768 nparm_regs--;
4769 }
4770 }
4771
b1d26727
JL
4772 /* If we found all the parameter loads, then we want to insert
4773 before the first parameter load.
4774
4775 If we did not find all the parameter loads, then we might have
4776 stopped on the head of the block, which could be a CODE_LABEL.
4777 If we inserted before the CODE_LABEL, then we would be putting
4778 the insn in the wrong basic block. In that case, put the insn
b5229628 4779 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
0a377997 4780 while (GET_CODE (insn) == CODE_LABEL
589ca5cb 4781 || NOTE_INSN_BASIC_BLOCK_P (insn))
b5229628 4782 insn = NEXT_INSN (insn);
c4c81601 4783
e2d2ed72 4784 new_insn = emit_block_insn_before (pat, insn, bb);
7506f491
DE
4785 }
4786 else
4787 {
4788 new_insn = emit_insn_after (pat, insn);
e2d2ed72 4789 bb->end = new_insn;
7506f491
DE
4790 }
4791
a65f3558
JL
4792 /* Keep block number table up to date.
4793 Note, PAT could be a multiple insn sequence, we have to make
4794 sure that each insn in the sequence is handled. */
4795 if (GET_CODE (pat) == SEQUENCE)
4796 {
a65f3558
JL
4797 for (i = 0; i < XVECLEN (pat, 0); i++)
4798 {
4799 rtx insn = XVECEXP (pat, 0, i);
c4c81601 4800
e2d2ed72 4801 set_block_for_insn (insn, bb);
2c3c49de 4802 if (INSN_P (insn))
a65f3558 4803 add_label_notes (PATTERN (insn), new_insn);
c4c81601 4804
84832317 4805 note_stores (PATTERN (insn), record_set_info, insn);
a65f3558
JL
4806 }
4807 }
4808 else
4809 {
4810 add_label_notes (SET_SRC (pat), new_insn);
e2d2ed72 4811 set_block_for_new_insns (new_insn, bb);
c4c81601 4812
a65f3558
JL
4813 /* Keep register set table up to date. */
4814 record_one_set (regno, new_insn);
4815 }
3947e2f9 4816
7506f491
DE
4817 gcse_create_count++;
4818
4819 if (gcse_file)
4820 {
c4c81601 4821 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
e2d2ed72 4822 bb->index, INSN_UID (new_insn));
c4c81601
RK
4823 fprintf (gcse_file, "copying expression %d to reg %d\n",
4824 expr->bitmap_index, regno);
7506f491
DE
4825 }
4826}
4827
a42cd965
AM
4828/* Insert partially redundant expressions on edges in the CFG to make
4829 the expressions fully redundant. */
7506f491 4830
a42cd965
AM
4831static int
4832pre_edge_insert (edge_list, index_map)
4833 struct edge_list *edge_list;
7506f491
DE
4834 struct expr **index_map;
4835{
c4c81601 4836 int e, i, j, num_edges, set_size, did_insert = 0;
a65f3558
JL
4837 sbitmap *inserted;
4838
a42cd965
AM
4839 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
4840 if it reaches any of the deleted expressions. */
7506f491 4841
a42cd965
AM
4842 set_size = pre_insert_map[0]->size;
4843 num_edges = NUM_EDGES (edge_list);
4844 inserted = sbitmap_vector_alloc (num_edges, n_exprs);
4845 sbitmap_vector_zero (inserted, num_edges);
7506f491 4846
a42cd965 4847 for (e = 0; e < num_edges; e++)
7506f491
DE
4848 {
4849 int indx;
e2d2ed72 4850 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
a65f3558 4851
a65f3558 4852 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
7506f491 4853 {
a42cd965 4854 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
7506f491 4855
a65f3558 4856 for (j = indx; insert && j < n_exprs; j++, insert >>= 1)
c4c81601
RK
4857 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
4858 {
4859 struct expr *expr = index_map[j];
4860 struct occr *occr;
a65f3558 4861
c4c81601
RK
4862 /* Now look at each deleted occurence of this expression. */
4863 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4864 {
4865 if (! occr->deleted_p)
4866 continue;
4867
4868 /* Insert this expression on this edge if if it would
4869 reach the deleted occurence in BB. */
4870 if (!TEST_BIT (inserted[e], j))
4871 {
4872 rtx insn;
4873 edge eg = INDEX_EDGE (edge_list, e);
4874
4875 /* We can't insert anything on an abnormal and
4876 critical edge, so we insert the insn at the end of
4877 the previous block. There are several alternatives
4878 detailed in Morgans book P277 (sec 10.5) for
4879 handling this situation. This one is easiest for
4880 now. */
4881
4882 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
4883 insert_insn_end_bb (index_map[j], bb, 0);
4884 else
4885 {
4886 insn = process_insert_insn (index_map[j]);
4887 insert_insn_on_edge (insn, eg);
4888 }
4889
4890 if (gcse_file)
4891 {
4892 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
e2d2ed72 4893 bb->index,
c4c81601
RK
4894 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
4895 fprintf (gcse_file, "copy expression %d\n",
4896 expr->bitmap_index);
4897 }
4898
a13d4ebf 4899 update_ld_motion_stores (expr);
c4c81601
RK
4900 SET_BIT (inserted[e], j);
4901 did_insert = 1;
4902 gcse_create_count++;
4903 }
4904 }
4905 }
7506f491
DE
4906 }
4907 }
5faf03ae 4908
5a660bff 4909 sbitmap_vector_free (inserted);
a42cd965 4910 return did_insert;
7506f491
DE
4911}
4912
c4c81601 4913/* Copy the result of INSN to REG. INDX is the expression number. */
7506f491
DE
4914
4915static void
4916pre_insert_copy_insn (expr, insn)
4917 struct expr *expr;
4918 rtx insn;
4919{
4920 rtx reg = expr->reaching_reg;
4921 int regno = REGNO (reg);
4922 int indx = expr->bitmap_index;
4923 rtx set = single_set (insn);
4924 rtx new_insn;
e2d2ed72 4925 basic_block bb = BLOCK_FOR_INSN (insn);
7506f491
DE
4926
4927 if (!set)
4928 abort ();
c4c81601 4929
cccf0ae8 4930 new_insn = emit_insn_after (gen_move_insn (reg, SET_DEST (set)), insn);
c4c81601 4931
7506f491 4932 /* Keep block number table up to date. */
e2d2ed72 4933 set_block_for_new_insns (new_insn, bb);
c4c81601 4934
7506f491
DE
4935 /* Keep register set table up to date. */
4936 record_one_set (regno, new_insn);
e2d2ed72
AM
4937 if (insn == bb->end)
4938 bb->end = new_insn;
7506f491
DE
4939
4940 gcse_create_count++;
4941
4942 if (gcse_file)
a42cd965
AM
4943 fprintf (gcse_file,
4944 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
4945 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
4946 INSN_UID (insn), regno);
7506f491
DE
4947}
4948
4949/* Copy available expressions that reach the redundant expression
4950 to `reaching_reg'. */
4951
4952static void
4953pre_insert_copies ()
4954{
2e653e39 4955 unsigned int i;
c4c81601
RK
4956 struct expr *expr;
4957 struct occr *occr;
4958 struct occr *avail;
a65f3558 4959
7506f491
DE
4960 /* For each available expression in the table, copy the result to
4961 `reaching_reg' if the expression reaches a deleted one.
4962
4963 ??? The current algorithm is rather brute force.
4964 Need to do some profiling. */
4965
4966 for (i = 0; i < expr_hash_table_size; i++)
c4c81601
RK
4967 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
4968 {
4969 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
4970 we don't want to insert a copy here because the expression may not
4971 really be redundant. So only insert an insn if the expression was
4972 deleted. This test also avoids further processing if the
4973 expression wasn't deleted anywhere. */
4974 if (expr->reaching_reg == NULL)
4975 continue;
4976
4977 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4978 {
4979 if (! occr->deleted_p)
4980 continue;
7506f491 4981
c4c81601
RK
4982 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
4983 {
4984 rtx insn = avail->insn;
7506f491 4985
c4c81601
RK
4986 /* No need to handle this one if handled already. */
4987 if (avail->copied_p)
4988 continue;
7506f491 4989
c4c81601
RK
4990 /* Don't handle this one if it's a redundant one. */
4991 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
4992 continue;
7506f491 4993
c4c81601 4994 /* Or if the expression doesn't reach the deleted one. */
e2d2ed72
AM
4995 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
4996 expr,
4997 BLOCK_FOR_INSN (occr->insn)))
c4c81601 4998 continue;
7506f491 4999
c4c81601
RK
5000 /* Copy the result of avail to reaching_reg. */
5001 pre_insert_copy_insn (expr, insn);
5002 avail->copied_p = 1;
5003 }
5004 }
5005 }
7506f491
DE
5006}
5007
5008/* Delete redundant computations.
7506f491
DE
5009 Deletion is done by changing the insn to copy the `reaching_reg' of
5010 the expression into the result of the SET. It is left to later passes
5011 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
5012
5013 Returns non-zero if a change is made. */
5014
5015static int
5016pre_delete ()
5017{
2e653e39 5018 unsigned int i;
63bc1d05 5019 int changed;
c4c81601
RK
5020 struct expr *expr;
5021 struct occr *occr;
a65f3558 5022
7506f491
DE
5023 changed = 0;
5024 for (i = 0; i < expr_hash_table_size; i++)
c4c81601
RK
5025 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
5026 {
5027 int indx = expr->bitmap_index;
7506f491 5028
c4c81601
RK
5029 /* We only need to search antic_occr since we require
5030 ANTLOC != 0. */
7506f491 5031
c4c81601
RK
5032 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5033 {
5034 rtx insn = occr->insn;
5035 rtx set;
e2d2ed72 5036 basic_block bb = BLOCK_FOR_INSN (insn);
7506f491 5037
e2d2ed72 5038 if (TEST_BIT (pre_delete_map[bb->index], indx))
c4c81601
RK
5039 {
5040 set = single_set (insn);
5041 if (! set)
5042 abort ();
5043
5044 /* Create a pseudo-reg to store the result of reaching
5045 expressions into. Get the mode for the new pseudo from
5046 the mode of the original destination pseudo. */
5047 if (expr->reaching_reg == NULL)
5048 expr->reaching_reg
5049 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5050
5051 /* In theory this should never fail since we're creating
5052 a reg->reg copy.
5053
5054 However, on the x86 some of the movXX patterns actually
5055 contain clobbers of scratch regs. This may cause the
5056 insn created by validate_change to not match any pattern
5057 and thus cause validate_change to fail. */
5058 if (validate_change (insn, &SET_SRC (set),
5059 expr->reaching_reg, 0))
5060 {
5061 occr->deleted_p = 1;
5062 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
5063 changed = 1;
5064 gcse_subst_count++;
5065 }
7506f491 5066
c4c81601
RK
5067 if (gcse_file)
5068 {
5069 fprintf (gcse_file,
5070 "PRE: redundant insn %d (expression %d) in ",
5071 INSN_UID (insn), indx);
5072 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
e2d2ed72 5073 bb->index, REGNO (expr->reaching_reg));
c4c81601
RK
5074 }
5075 }
5076 }
5077 }
7506f491
DE
5078
5079 return changed;
5080}
5081
5082/* Perform GCSE optimizations using PRE.
5083 This is called by one_pre_gcse_pass after all the dataflow analysis
5084 has been done.
5085
c4c81601
RK
5086 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
5087 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
5088 Compiler Design and Implementation.
7506f491 5089
c4c81601
RK
5090 ??? A new pseudo reg is created to hold the reaching expression. The nice
5091 thing about the classical approach is that it would try to use an existing
5092 reg. If the register can't be adequately optimized [i.e. we introduce
5093 reload problems], one could add a pass here to propagate the new register
5094 through the block.
7506f491 5095
c4c81601
RK
5096 ??? We don't handle single sets in PARALLELs because we're [currently] not
5097 able to copy the rest of the parallel when we insert copies to create full
5098 redundancies from partial redundancies. However, there's no reason why we
5099 can't handle PARALLELs in the cases where there are no partial
7506f491
DE
5100 redundancies. */
5101
5102static int
5103pre_gcse ()
5104{
2e653e39
RK
5105 unsigned int i;
5106 int did_insert, changed;
7506f491 5107 struct expr **index_map;
c4c81601 5108 struct expr *expr;
7506f491
DE
5109
5110 /* Compute a mapping from expression number (`bitmap_index') to
5111 hash table entry. */
5112
dd1bd863 5113 index_map = (struct expr **) xcalloc (n_exprs, sizeof (struct expr *));
7506f491 5114 for (i = 0; i < expr_hash_table_size; i++)
c4c81601
RK
5115 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
5116 index_map[expr->bitmap_index] = expr;
7506f491
DE
5117
5118 /* Reset bitmap used to track which insns are redundant. */
a65f3558
JL
5119 pre_redundant_insns = sbitmap_alloc (max_cuid);
5120 sbitmap_zero (pre_redundant_insns);
7506f491
DE
5121
5122 /* Delete the redundant insns first so that
5123 - we know what register to use for the new insns and for the other
5124 ones with reaching expressions
5125 - we know which insns are redundant when we go to create copies */
c4c81601 5126
7506f491
DE
5127 changed = pre_delete ();
5128
a42cd965 5129 did_insert = pre_edge_insert (edge_list, index_map);
c4c81601 5130
7506f491 5131 /* In other places with reaching expressions, copy the expression to the
a42cd965 5132 specially allocated pseudo-reg that reaches the redundant expr. */
7506f491 5133 pre_insert_copies ();
a42cd965
AM
5134 if (did_insert)
5135 {
5136 commit_edge_insertions ();
5137 changed = 1;
5138 }
7506f491 5139
283a2545 5140 free (index_map);
a65f3558 5141 free (pre_redundant_insns);
7506f491
DE
5142 return changed;
5143}
5144
5145/* Top level routine to perform one PRE GCSE pass.
5146
5147 Return non-zero if a change was made. */
5148
5149static int
b5ce41ff 5150one_pre_gcse_pass (pass)
7506f491
DE
5151 int pass;
5152{
5153 int changed = 0;
5154
5155 gcse_subst_count = 0;
5156 gcse_create_count = 0;
5157
5158 alloc_expr_hash_table (max_cuid);
a42cd965 5159 add_noreturn_fake_exit_edges ();
a13d4ebf
AM
5160 if (flag_gcse_lm)
5161 compute_ld_motion_mems ();
5162
b5ce41ff 5163 compute_expr_hash_table ();
a13d4ebf 5164 trim_ld_motion_mems ();
7506f491
DE
5165 if (gcse_file)
5166 dump_hash_table (gcse_file, "Expression", expr_hash_table,
5167 expr_hash_table_size, n_exprs);
c4c81601 5168
7506f491
DE
5169 if (n_exprs > 0)
5170 {
5171 alloc_pre_mem (n_basic_blocks, n_exprs);
5172 compute_pre_data ();
5173 changed |= pre_gcse ();
a42cd965 5174 free_edge_list (edge_list);
7506f491
DE
5175 free_pre_mem ();
5176 }
c4c81601 5177
a13d4ebf 5178 free_ldst_mems ();
a42cd965 5179 remove_fake_edges ();
7506f491
DE
5180 free_expr_hash_table ();
5181
5182 if (gcse_file)
5183 {
c4c81601
RK
5184 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
5185 current_function_name, pass, bytes_used);
5186 fprintf (gcse_file, "%d substs, %d insns created\n",
5187 gcse_subst_count, gcse_create_count);
7506f491
DE
5188 }
5189
5190 return changed;
5191}
aeb2f500
JW
5192\f
5193/* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5b1ef594
JDA
5194 If notes are added to an insn which references a CODE_LABEL, the
5195 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5196 because the following loop optimization pass requires them. */
aeb2f500
JW
5197
5198/* ??? This is very similar to the loop.c add_label_notes function. We
5199 could probably share code here. */
5200
5201/* ??? If there was a jump optimization pass after gcse and before loop,
5202 then we would not need to do this here, because jump would add the
5203 necessary REG_LABEL notes. */
5204
5205static void
5206add_label_notes (x, insn)
5207 rtx x;
5208 rtx insn;
5209{
5210 enum rtx_code code = GET_CODE (x);
5211 int i, j;
6f7d635c 5212 const char *fmt;
aeb2f500
JW
5213
5214 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
5215 {
6b3603c2 5216 /* This code used to ignore labels that referred to dispatch tables to
ac7c5af5 5217 avoid flow generating (slighly) worse code.
6b3603c2 5218
ac7c5af5
JL
5219 We no longer ignore such label references (see LABEL_REF handling in
5220 mark_jump_label for additional information). */
c4c81601 5221
6b3603c2
JL
5222 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL, XEXP (x, 0),
5223 REG_NOTES (insn));
5b1ef594
JDA
5224 if (LABEL_P (XEXP (x, 0)))
5225 LABEL_NUSES (XEXP (x, 0))++;
aeb2f500
JW
5226 return;
5227 }
5228
c4c81601 5229 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
aeb2f500
JW
5230 {
5231 if (fmt[i] == 'e')
5232 add_label_notes (XEXP (x, i), insn);
5233 else if (fmt[i] == 'E')
5234 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5235 add_label_notes (XVECEXP (x, i, j), insn);
5236 }
5237}
a65f3558
JL
5238
5239/* Compute transparent outgoing information for each block.
5240
5241 An expression is transparent to an edge unless it is killed by
5242 the edge itself. This can only happen with abnormal control flow,
5243 when the edge is traversed through a call. This happens with
5244 non-local labels and exceptions.
5245
5246 This would not be necessary if we split the edge. While this is
5247 normally impossible for abnormal critical edges, with some effort
5248 it should be possible with exception handling, since we still have
5249 control over which handler should be invoked. But due to increased
5250 EH table sizes, this may not be worthwhile. */
5251
5252static void
5253compute_transpout ()
5254{
5255 int bb;
2e653e39 5256 unsigned int i;
c4c81601 5257 struct expr *expr;
a65f3558
JL
5258
5259 sbitmap_vector_ones (transpout, n_basic_blocks);
5260
5261 for (bb = 0; bb < n_basic_blocks; ++bb)
5262 {
a65f3558
JL
5263 /* Note that flow inserted a nop a the end of basic blocks that
5264 end in call instructions for reasons other than abnormal
5265 control flow. */
5266 if (GET_CODE (BLOCK_END (bb)) != CALL_INSN)
5267 continue;
5268
5269 for (i = 0; i < expr_hash_table_size; i++)
c4c81601
RK
5270 for (expr = expr_hash_table[i]; expr ; expr = expr->next_same_hash)
5271 if (GET_CODE (expr->expr) == MEM)
5272 {
5273 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5274 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5275 continue;
a65f3558 5276
c4c81601
RK
5277 /* ??? Optimally, we would use interprocedural alias
5278 analysis to determine if this mem is actually killed
5279 by this call. */
5280 RESET_BIT (transpout[bb], expr->bitmap_index);
5281 }
a65f3558
JL
5282 }
5283}
dfdb644f
JL
5284
5285/* Removal of useless null pointer checks */
5286
dfdb644f 5287/* Called via note_stores. X is set by SETTER. If X is a register we must
0511851c
MM
5288 invalidate nonnull_local and set nonnull_killed. DATA is really a
5289 `null_pointer_info *'.
dfdb644f
JL
5290
5291 We ignore hard registers. */
c4c81601 5292
dfdb644f 5293static void
84832317 5294invalidate_nonnull_info (x, setter, data)
dfdb644f
JL
5295 rtx x;
5296 rtx setter ATTRIBUTE_UNUSED;
0511851c 5297 void *data;
dfdb644f 5298{
770ae6cc
RK
5299 unsigned int regno;
5300 struct null_pointer_info *npi = (struct null_pointer_info *) data;
c4c81601 5301
dfdb644f
JL
5302 while (GET_CODE (x) == SUBREG)
5303 x = SUBREG_REG (x);
5304
5305 /* Ignore anything that is not a register or is a hard register. */
5306 if (GET_CODE (x) != REG
0511851c
MM
5307 || REGNO (x) < npi->min_reg
5308 || REGNO (x) >= npi->max_reg)
dfdb644f
JL
5309 return;
5310
0511851c 5311 regno = REGNO (x) - npi->min_reg;
dfdb644f 5312
0511851c
MM
5313 RESET_BIT (npi->nonnull_local[npi->current_block], regno);
5314 SET_BIT (npi->nonnull_killed[npi->current_block], regno);
dfdb644f
JL
5315}
5316
0511851c
MM
5317/* Do null-pointer check elimination for the registers indicated in
5318 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5319 they are not our responsibility to free. */
dfdb644f 5320
0511851c 5321static void
8e184d9c
JJ
5322delete_null_pointer_checks_1 (delete_list, block_reg, nonnull_avin,
5323 nonnull_avout, npi)
5324 varray_type *delete_list;
770ae6cc 5325 unsigned int *block_reg;
0511851c
MM
5326 sbitmap *nonnull_avin;
5327 sbitmap *nonnull_avout;
5328 struct null_pointer_info *npi;
dfdb644f 5329{
ce724250 5330 int bb;
0511851c
MM
5331 int current_block;
5332 sbitmap *nonnull_local = npi->nonnull_local;
5333 sbitmap *nonnull_killed = npi->nonnull_killed;
dfdb644f 5334
dfdb644f
JL
5335 /* Compute local properties, nonnull and killed. A register will have
5336 the nonnull property if at the end of the current block its value is
5337 known to be nonnull. The killed property indicates that somewhere in
5338 the block any information we had about the register is killed.
5339
5340 Note that a register can have both properties in a single block. That
5341 indicates that it's killed, then later in the block a new value is
5342 computed. */
5343 sbitmap_vector_zero (nonnull_local, n_basic_blocks);
5344 sbitmap_vector_zero (nonnull_killed, n_basic_blocks);
c4c81601 5345
dfdb644f
JL
5346 for (current_block = 0; current_block < n_basic_blocks; current_block++)
5347 {
5348 rtx insn, stop_insn;
5349
0511851c
MM
5350 /* Set the current block for invalidate_nonnull_info. */
5351 npi->current_block = current_block;
5352
dfdb644f
JL
5353 /* Scan each insn in the basic block looking for memory references and
5354 register sets. */
5355 stop_insn = NEXT_INSN (BLOCK_END (current_block));
5356 for (insn = BLOCK_HEAD (current_block);
5357 insn != stop_insn;
5358 insn = NEXT_INSN (insn))
5359 {
5360 rtx set;
0511851c 5361 rtx reg;
dfdb644f
JL
5362
5363 /* Ignore anything that is not a normal insn. */
2c3c49de 5364 if (! INSN_P (insn))
dfdb644f
JL
5365 continue;
5366
5367 /* Basically ignore anything that is not a simple SET. We do have
5368 to make sure to invalidate nonnull_local and set nonnull_killed
5369 for such insns though. */
5370 set = single_set (insn);
5371 if (!set)
5372 {
0511851c 5373 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
dfdb644f
JL
5374 continue;
5375 }
5376
5377 /* See if we've got a useable memory load. We handle it first
5378 in case it uses its address register as a dest (which kills
5379 the nonnull property). */
5380 if (GET_CODE (SET_SRC (set)) == MEM
0511851c
MM
5381 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
5382 && REGNO (reg) >= npi->min_reg
5383 && REGNO (reg) < npi->max_reg)
dfdb644f 5384 SET_BIT (nonnull_local[current_block],
0511851c 5385 REGNO (reg) - npi->min_reg);
dfdb644f
JL
5386
5387 /* Now invalidate stuff clobbered by this insn. */
0511851c 5388 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
dfdb644f
JL
5389
5390 /* And handle stores, we do these last since any sets in INSN can
5391 not kill the nonnull property if it is derived from a MEM
5392 appearing in a SET_DEST. */
5393 if (GET_CODE (SET_DEST (set)) == MEM
0511851c
MM
5394 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
5395 && REGNO (reg) >= npi->min_reg
5396 && REGNO (reg) < npi->max_reg)
dfdb644f 5397 SET_BIT (nonnull_local[current_block],
0511851c 5398 REGNO (reg) - npi->min_reg);
dfdb644f
JL
5399 }
5400 }
5401
5402 /* Now compute global properties based on the local properties. This
5403 is a classic global availablity algorithm. */
ce724250
JL
5404 compute_available (nonnull_local, nonnull_killed,
5405 nonnull_avout, nonnull_avin);
dfdb644f
JL
5406
5407 /* Now look at each bb and see if it ends with a compare of a value
5408 against zero. */
5409 for (bb = 0; bb < n_basic_blocks; bb++)
5410 {
5411 rtx last_insn = BLOCK_END (bb);
0511851c 5412 rtx condition, earliest;
dfdb644f
JL
5413 int compare_and_branch;
5414
0511851c
MM
5415 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
5416 since BLOCK_REG[BB] is zero if this block did not end with a
5417 comparison against zero, this condition works. */
5418 if (block_reg[bb] < npi->min_reg
5419 || block_reg[bb] >= npi->max_reg)
dfdb644f
JL
5420 continue;
5421
5422 /* LAST_INSN is a conditional jump. Get its condition. */
5423 condition = get_condition (last_insn, &earliest);
5424
40d7a3fe
NB
5425 /* If we can't determine the condition then skip. */
5426 if (! condition)
5427 continue;
5428
dfdb644f 5429 /* Is the register known to have a nonzero value? */
0511851c 5430 if (!TEST_BIT (nonnull_avout[bb], block_reg[bb] - npi->min_reg))
dfdb644f
JL
5431 continue;
5432
5433 /* Try to compute whether the compare/branch at the loop end is one or
5434 two instructions. */
5435 if (earliest == last_insn)
5436 compare_and_branch = 1;
5437 else if (earliest == prev_nonnote_insn (last_insn))
5438 compare_and_branch = 2;
5439 else
5440 continue;
5441
5442 /* We know the register in this comparison is nonnull at exit from
5443 this block. We can optimize this comparison. */
5444 if (GET_CODE (condition) == NE)
5445 {
5446 rtx new_jump;
5447
5448 new_jump = emit_jump_insn_before (gen_jump (JUMP_LABEL (last_insn)),
5449 last_insn);
5450 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
5451 LABEL_NUSES (JUMP_LABEL (new_jump))++;
5452 emit_barrier_after (new_jump);
5453 }
8e184d9c
JJ
5454 if (!*delete_list)
5455 VARRAY_RTX_INIT (*delete_list, 10, "delete_list");
5456
5457 VARRAY_PUSH_RTX (*delete_list, last_insn);
dfdb644f 5458 if (compare_and_branch == 2)
8e184d9c 5459 VARRAY_PUSH_RTX (*delete_list, earliest);
0511851c
MM
5460
5461 /* Don't check this block again. (Note that BLOCK_END is
5462 invalid here; we deleted the last instruction in the
5463 block.) */
5464 block_reg[bb] = 0;
5465 }
5466}
5467
5468/* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
5469 at compile time.
5470
5471 This is conceptually similar to global constant/copy propagation and
5472 classic global CSE (it even uses the same dataflow equations as cprop).
5473
5474 If a register is used as memory address with the form (mem (reg)), then we
5475 know that REG can not be zero at that point in the program. Any instruction
5476 which sets REG "kills" this property.
5477
5478 So, if every path leading to a conditional branch has an available memory
5479 reference of that form, then we know the register can not have the value
5480 zero at the conditional branch.
5481
5482 So we merely need to compute the local properies and propagate that data
5483 around the cfg, then optimize where possible.
5484
5485 We run this pass two times. Once before CSE, then again after CSE. This
5486 has proven to be the most profitable approach. It is rare for new
5487 optimization opportunities of this nature to appear after the first CSE
5488 pass.
5489
5490 This could probably be integrated with global cprop with a little work. */
5491
5492void
5493delete_null_pointer_checks (f)
2e653e39 5494 rtx f ATTRIBUTE_UNUSED;
0511851c 5495{
0511851c 5496 sbitmap *nonnull_avin, *nonnull_avout;
770ae6cc 5497 unsigned int *block_reg;
8e184d9c 5498 varray_type delete_list = NULL;
0511851c
MM
5499 int bb;
5500 int reg;
5501 int regs_per_pass;
5502 int max_reg;
8e184d9c 5503 unsigned int i;
0511851c
MM
5504 struct null_pointer_info npi;
5505
0511851c
MM
5506 /* If we have only a single block, then there's nothing to do. */
5507 if (n_basic_blocks <= 1)
a18820c6 5508 return;
0511851c
MM
5509
5510 /* Trying to perform global optimizations on flow graphs which have
5511 a high connectivity will take a long time and is unlikely to be
5512 particularly useful.
5513
43e72072 5514 In normal circumstances a cfg should have about twice as many edges
0511851c
MM
5515 as blocks. But we do not want to punish small functions which have
5516 a couple switch statements. So we require a relatively large number
5517 of basic blocks and the ratio of edges to blocks to be high. */
5518 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
a18820c6 5519 return;
0511851c 5520
0511851c
MM
5521 /* We need four bitmaps, each with a bit for each register in each
5522 basic block. */
5523 max_reg = max_reg_num ();
5524 regs_per_pass = get_bitmap_width (4, n_basic_blocks, max_reg);
5525
5526 /* Allocate bitmaps to hold local and global properties. */
5527 npi.nonnull_local = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass);
5528 npi.nonnull_killed = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass);
5529 nonnull_avin = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass);
5530 nonnull_avout = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass);
5531
5532 /* Go through the basic blocks, seeing whether or not each block
5533 ends with a conditional branch whose condition is a comparison
5534 against zero. Record the register compared in BLOCK_REG. */
f9e158c3 5535 block_reg = (unsigned int *) xcalloc (n_basic_blocks, sizeof (int));
0511851c
MM
5536 for (bb = 0; bb < n_basic_blocks; bb++)
5537 {
5538 rtx last_insn = BLOCK_END (bb);
5539 rtx condition, earliest, reg;
5540
5541 /* We only want conditional branches. */
5542 if (GET_CODE (last_insn) != JUMP_INSN
7f1c097d
JH
5543 || !any_condjump_p (last_insn)
5544 || !onlyjump_p (last_insn))
0511851c
MM
5545 continue;
5546
5547 /* LAST_INSN is a conditional jump. Get its condition. */
5548 condition = get_condition (last_insn, &earliest);
5549
5550 /* If we were unable to get the condition, or it is not a equality
5551 comparison against zero then there's nothing we can do. */
5552 if (!condition
5553 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
5554 || GET_CODE (XEXP (condition, 1)) != CONST_INT
5555 || (XEXP (condition, 1)
5556 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
5557 continue;
5558
5559 /* We must be checking a register against zero. */
5560 reg = XEXP (condition, 0);
5561 if (GET_CODE (reg) != REG)
5562 continue;
5563
5564 block_reg[bb] = REGNO (reg);
5565 }
5566
5567 /* Go through the algorithm for each block of registers. */
5568 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
5569 {
5570 npi.min_reg = reg;
5571 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
8e184d9c 5572 delete_null_pointer_checks_1 (&delete_list, block_reg, nonnull_avin,
0511851c 5573 nonnull_avout, &npi);
dfdb644f
JL
5574 }
5575
8e184d9c
JJ
5576 /* Now delete the instructions all at once. This breaks the CFG. */
5577 if (delete_list)
5578 {
5579 for (i = 0; i < VARRAY_ACTIVE_SIZE (delete_list); i++)
5580 delete_insn (VARRAY_RTX (delete_list, i));
5581 VARRAY_FREE (delete_list);
5582 }
5583
0511851c
MM
5584 /* Free the table of registers compared at the end of every block. */
5585 free (block_reg);
5586
dfdb644f 5587 /* Free bitmaps. */
5a660bff
DB
5588 sbitmap_vector_free (npi.nonnull_local);
5589 sbitmap_vector_free (npi.nonnull_killed);
5590 sbitmap_vector_free (nonnull_avin);
5591 sbitmap_vector_free (nonnull_avout);
dfdb644f 5592}
bb457bd9
JL
5593
5594/* Code Hoisting variables and subroutines. */
5595
5596/* Very busy expressions. */
5597static sbitmap *hoist_vbein;
5598static sbitmap *hoist_vbeout;
5599
5600/* Hoistable expressions. */
5601static sbitmap *hoist_exprs;
5602
5603/* Dominator bitmaps. */
5604static sbitmap *dominators;
bb457bd9
JL
5605
5606/* ??? We could compute post dominators and run this algorithm in
5607 reverse to to perform tail merging, doing so would probably be
5608 more effective than the tail merging code in jump.c.
5609
5610 It's unclear if tail merging could be run in parallel with
5611 code hoisting. It would be nice. */
5612
5613/* Allocate vars used for code hoisting analysis. */
5614
5615static void
5616alloc_code_hoist_mem (n_blocks, n_exprs)
5617 int n_blocks, n_exprs;
5618{
5619 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5620 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
5621 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
5622
5623 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
5624 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
5625 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
5626 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
5627
5628 dominators = sbitmap_vector_alloc (n_blocks, n_blocks);
bb457bd9
JL
5629}
5630
5631/* Free vars used for code hoisting analysis. */
5632
5633static void
5634free_code_hoist_mem ()
5635{
5a660bff
DB
5636 sbitmap_vector_free (antloc);
5637 sbitmap_vector_free (transp);
5638 sbitmap_vector_free (comp);
bb457bd9 5639
5a660bff
DB
5640 sbitmap_vector_free (hoist_vbein);
5641 sbitmap_vector_free (hoist_vbeout);
5642 sbitmap_vector_free (hoist_exprs);
5643 sbitmap_vector_free (transpout);
bb457bd9 5644
5a660bff 5645 sbitmap_vector_free (dominators);
bb457bd9
JL
5646}
5647
5648/* Compute the very busy expressions at entry/exit from each block.
5649
5650 An expression is very busy if all paths from a given point
5651 compute the expression. */
5652
5653static void
5654compute_code_hoist_vbeinout ()
5655{
5656 int bb, changed, passes;
5657
5658 sbitmap_vector_zero (hoist_vbeout, n_basic_blocks);
5659 sbitmap_vector_zero (hoist_vbein, n_basic_blocks);
5660
5661 passes = 0;
5662 changed = 1;
c4c81601 5663
bb457bd9
JL
5664 while (changed)
5665 {
5666 changed = 0;
c4c81601 5667
bb457bd9
JL
5668 /* We scan the blocks in the reverse order to speed up
5669 the convergence. */
5670 for (bb = n_basic_blocks - 1; bb >= 0; bb--)
5671 {
5672 changed |= sbitmap_a_or_b_and_c (hoist_vbein[bb], antloc[bb],
5673 hoist_vbeout[bb], transp[bb]);
5674 if (bb != n_basic_blocks - 1)
a42cd965 5675 sbitmap_intersection_of_succs (hoist_vbeout[bb], hoist_vbein, bb);
bb457bd9 5676 }
c4c81601 5677
bb457bd9
JL
5678 passes++;
5679 }
5680
5681 if (gcse_file)
5682 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
5683}
5684
5685/* Top level routine to do the dataflow analysis needed by code hoisting. */
5686
5687static void
5688compute_code_hoist_data ()
5689{
5690 compute_local_properties (transp, comp, antloc, 0);
5691 compute_transpout ();
5692 compute_code_hoist_vbeinout ();
f8032688 5693 calculate_dominance_info (NULL, dominators, CDI_DOMINATORS);
bb457bd9
JL
5694 if (gcse_file)
5695 fprintf (gcse_file, "\n");
5696}
5697
5698/* Determine if the expression identified by EXPR_INDEX would
5699 reach BB unimpared if it was placed at the end of EXPR_BB.
5700
5701 It's unclear exactly what Muchnick meant by "unimpared". It seems
5702 to me that the expression must either be computed or transparent in
5703 *every* block in the path(s) from EXPR_BB to BB. Any other definition
5704 would allow the expression to be hoisted out of loops, even if
5705 the expression wasn't a loop invariant.
5706
5707 Contrast this to reachability for PRE where an expression is
5708 considered reachable if *any* path reaches instead of *all*
5709 paths. */
5710
5711static int
5712hoist_expr_reaches_here_p (expr_bb, expr_index, bb, visited)
e2d2ed72 5713 basic_block expr_bb;
bb457bd9 5714 int expr_index;
e2d2ed72 5715 basic_block bb;
bb457bd9
JL
5716 char *visited;
5717{
5718 edge pred;
283a2545
RL
5719 int visited_allocated_locally = 0;
5720
bb457bd9
JL
5721
5722 if (visited == NULL)
5723 {
283a2545
RL
5724 visited_allocated_locally = 1;
5725 visited = xcalloc (n_basic_blocks, 1);
bb457bd9
JL
5726 }
5727
e2d2ed72 5728 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
bb457bd9 5729 {
e2d2ed72 5730 basic_block pred_bb = pred->src;
bb457bd9
JL
5731
5732 if (pred->src == ENTRY_BLOCK_PTR)
5733 break;
e2d2ed72 5734 else if (visited[pred_bb->index])
bb457bd9 5735 continue;
c4c81601 5736
bb457bd9 5737 /* Does this predecessor generate this expression? */
e2d2ed72 5738 else if (TEST_BIT (comp[pred_bb->index], expr_index))
bb457bd9 5739 break;
e2d2ed72 5740 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
bb457bd9 5741 break;
c4c81601 5742
bb457bd9
JL
5743 /* Not killed. */
5744 else
5745 {
e2d2ed72 5746 visited[pred_bb->index] = 1;
bb457bd9
JL
5747 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
5748 pred_bb, visited))
5749 break;
5750 }
5751 }
283a2545
RL
5752 if (visited_allocated_locally)
5753 free (visited);
c4c81601 5754
bb457bd9
JL
5755 return (pred == NULL);
5756}
5757\f
5758/* Actually perform code hoisting. */
c4c81601 5759
bb457bd9
JL
5760static void
5761hoist_code ()
5762{
2e653e39
RK
5763 int bb, dominated;
5764 unsigned int i;
bb457bd9 5765 struct expr **index_map;
c4c81601 5766 struct expr *expr;
bb457bd9
JL
5767
5768 sbitmap_vector_zero (hoist_exprs, n_basic_blocks);
5769
5770 /* Compute a mapping from expression number (`bitmap_index') to
5771 hash table entry. */
5772
dd1bd863 5773 index_map = (struct expr **) xcalloc (n_exprs, sizeof (struct expr *));
bb457bd9 5774 for (i = 0; i < expr_hash_table_size; i++)
c4c81601
RK
5775 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
5776 index_map[expr->bitmap_index] = expr;
bb457bd9
JL
5777
5778 /* Walk over each basic block looking for potentially hoistable
5779 expressions, nothing gets hoisted from the entry block. */
5780 for (bb = 0; bb < n_basic_blocks; bb++)
5781 {
5782 int found = 0;
5783 int insn_inserted_p;
5784
5785 /* Examine each expression that is very busy at the exit of this
5786 block. These are the potentially hoistable expressions. */
5787 for (i = 0; i < hoist_vbeout[bb]->n_bits; i++)
5788 {
5789 int hoistable = 0;
c4c81601
RK
5790
5791 if (TEST_BIT (hoist_vbeout[bb], i) && TEST_BIT (transpout[bb], i))
bb457bd9
JL
5792 {
5793 /* We've found a potentially hoistable expression, now
5794 we look at every block BB dominates to see if it
5795 computes the expression. */
5796 for (dominated = 0; dominated < n_basic_blocks; dominated++)
5797 {
5798 /* Ignore self dominance. */
5799 if (bb == dominated
5800 || ! TEST_BIT (dominators[dominated], bb))
5801 continue;
5802
5803 /* We've found a dominated block, now see if it computes
5804 the busy expression and whether or not moving that
5805 expression to the "beginning" of that block is safe. */
5806 if (!TEST_BIT (antloc[dominated], i))
5807 continue;
5808
5809 /* Note if the expression would reach the dominated block
5810 unimpared if it was placed at the end of BB.
5811
5812 Keep track of how many times this expression is hoistable
5813 from a dominated block into BB. */
e2d2ed72
AM
5814 if (hoist_expr_reaches_here_p (BASIC_BLOCK (bb), i,
5815 BASIC_BLOCK (dominated), NULL))
bb457bd9
JL
5816 hoistable++;
5817 }
5818
5819 /* If we found more than one hoistable occurence of this
5820 expression, then note it in the bitmap of expressions to
5821 hoist. It makes no sense to hoist things which are computed
5822 in only one BB, and doing so tends to pessimize register
5823 allocation. One could increase this value to try harder
5824 to avoid any possible code expansion due to register
5825 allocation issues; however experiments have shown that
5826 the vast majority of hoistable expressions are only movable
5827 from two successors, so raising this threshhold is likely
5828 to nullify any benefit we get from code hoisting. */
5829 if (hoistable > 1)
5830 {
5831 SET_BIT (hoist_exprs[bb], i);
5832 found = 1;
5833 }
5834 }
5835 }
5836
5837 /* If we found nothing to hoist, then quit now. */
5838 if (! found)
5839 continue;
5840
5841 /* Loop over all the hoistable expressions. */
5842 for (i = 0; i < hoist_exprs[bb]->n_bits; i++)
5843 {
5844 /* We want to insert the expression into BB only once, so
5845 note when we've inserted it. */
5846 insn_inserted_p = 0;
5847
5848 /* These tests should be the same as the tests above. */
5849 if (TEST_BIT (hoist_vbeout[bb], i))
5850 {
5851 /* We've found a potentially hoistable expression, now
5852 we look at every block BB dominates to see if it
5853 computes the expression. */
5854 for (dominated = 0; dominated < n_basic_blocks; dominated++)
5855 {
5856 /* Ignore self dominance. */
5857 if (bb == dominated
5858 || ! TEST_BIT (dominators[dominated], bb))
5859 continue;
5860
5861 /* We've found a dominated block, now see if it computes
5862 the busy expression and whether or not moving that
5863 expression to the "beginning" of that block is safe. */
5864 if (!TEST_BIT (antloc[dominated], i))
5865 continue;
5866
5867 /* The expression is computed in the dominated block and
5868 it would be safe to compute it at the start of the
5869 dominated block. Now we have to determine if the
5870 expresion would reach the dominated block if it was
5871 placed at the end of BB. */
e2d2ed72
AM
5872 if (hoist_expr_reaches_here_p (BASIC_BLOCK (bb), i,
5873 BASIC_BLOCK (dominated), NULL))
bb457bd9
JL
5874 {
5875 struct expr *expr = index_map[i];
5876 struct occr *occr = expr->antic_occr;
5877 rtx insn;
5878 rtx set;
5879
bb457bd9
JL
5880 /* Find the right occurence of this expression. */
5881 while (BLOCK_NUM (occr->insn) != dominated && occr)
5882 occr = occr->next;
5883
5884 /* Should never happen. */
5885 if (!occr)
5886 abort ();
5887
5888 insn = occr->insn;
5889
5890 set = single_set (insn);
5891 if (! set)
5892 abort ();
5893
5894 /* Create a pseudo-reg to store the result of reaching
5895 expressions into. Get the mode for the new pseudo
5896 from the mode of the original destination pseudo. */
5897 if (expr->reaching_reg == NULL)
5898 expr->reaching_reg
5899 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5900
5901 /* In theory this should never fail since we're creating
5902 a reg->reg copy.
5903
c4c81601
RK
5904 However, on the x86 some of the movXX patterns
5905 actually contain clobbers of scratch regs. This may
5906 cause the insn created by validate_change to not
5907 match any pattern and thus cause validate_change to
5908 fail. */
bb457bd9
JL
5909 if (validate_change (insn, &SET_SRC (set),
5910 expr->reaching_reg, 0))
5911 {
5912 occr->deleted_p = 1;
5913 if (!insn_inserted_p)
5914 {
e2d2ed72
AM
5915 insert_insn_end_bb (index_map[i],
5916 BASIC_BLOCK (bb), 0);
bb457bd9
JL
5917 insn_inserted_p = 1;
5918 }
5919 }
5920 }
5921 }
5922 }
5923 }
5924 }
c4c81601 5925
283a2545 5926 free (index_map);
bb457bd9
JL
5927}
5928
5929/* Top level routine to perform one code hoisting (aka unification) pass
5930
5931 Return non-zero if a change was made. */
5932
5933static int
5934one_code_hoisting_pass ()
5935{
5936 int changed = 0;
5937
5938 alloc_expr_hash_table (max_cuid);
5939 compute_expr_hash_table ();
5940 if (gcse_file)
5941 dump_hash_table (gcse_file, "Code Hosting Expressions", expr_hash_table,
5942 expr_hash_table_size, n_exprs);
c4c81601 5943
bb457bd9
JL
5944 if (n_exprs > 0)
5945 {
5946 alloc_code_hoist_mem (n_basic_blocks, n_exprs);
5947 compute_code_hoist_data ();
5948 hoist_code ();
5949 free_code_hoist_mem ();
5950 }
c4c81601 5951
bb457bd9
JL
5952 free_expr_hash_table ();
5953
5954 return changed;
5955}
a13d4ebf
AM
5956\f
5957/* Here we provide the things required to do store motion towards
5958 the exit. In order for this to be effective, gcse also needed to
5959 be taught how to move a load when it is kill only by a store to itself.
5960
5961 int i;
5962 float a[10];
5963
5964 void foo(float scale)
5965 {
5966 for (i=0; i<10; i++)
5967 a[i] *= scale;
5968 }
5969
5970 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
5971 the load out since its live around the loop, and stored at the bottom
5972 of the loop.
5973
5974 The 'Load Motion' referred to and implemented in this file is
5975 an enhancement to gcse which when using edge based lcm, recognizes
5976 this situation and allows gcse to move the load out of the loop.
5977
5978 Once gcse has hoisted the load, store motion can then push this
5979 load towards the exit, and we end up with no loads or stores of 'i'
5980 in the loop. */
5981
5982/* This will search the ldst list for a matching expresion. If it
5983 doesn't find one, we create one and initialize it. */
5984
5985static struct ls_expr *
5986ldst_entry (x)
5987 rtx x;
5988{
5989 struct ls_expr * ptr;
5990
5991 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
5992 if (expr_equiv_p (ptr->pattern, x))
5993 break;
5994
5995 if (!ptr)
5996 {
5997 ptr = (struct ls_expr *) xmalloc (sizeof (struct ls_expr));
5998
5999 ptr->next = pre_ldst_mems;
6000 ptr->expr = NULL;
6001 ptr->pattern = x;
6002 ptr->loads = NULL_RTX;
6003 ptr->stores = NULL_RTX;
6004 ptr->reaching_reg = NULL_RTX;
6005 ptr->invalid = 0;
6006 ptr->index = 0;
6007 ptr->hash_index = 0;
6008 pre_ldst_mems = ptr;
6009 }
6010
6011 return ptr;
6012}
6013
6014/* Free up an individual ldst entry. */
6015
6016static void
6017free_ldst_entry (ptr)
6018 struct ls_expr * ptr;
6019{
6020 free_INSN_LIST_list (& ptr->loads);
6021 free_INSN_LIST_list (& ptr->stores);
6022
6023 free (ptr);
6024}
6025
6026/* Free up all memory associated with the ldst list. */
6027
6028static void
6029free_ldst_mems ()
6030{
6031 while (pre_ldst_mems)
6032 {
6033 struct ls_expr * tmp = pre_ldst_mems;
6034
6035 pre_ldst_mems = pre_ldst_mems->next;
6036
6037 free_ldst_entry (tmp);
6038 }
6039
6040 pre_ldst_mems = NULL;
6041}
6042
6043/* Dump debugging info about the ldst list. */
6044
6045static void
6046print_ldst_list (file)
6047 FILE * file;
6048{
6049 struct ls_expr * ptr;
6050
6051 fprintf (file, "LDST list: \n");
6052
6053 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6054 {
6055 fprintf (file, " Pattern (%3d): ", ptr->index);
6056
6057 print_rtl (file, ptr->pattern);
6058
6059 fprintf (file, "\n Loads : ");
6060
6061 if (ptr->loads)
6062 print_rtl (file, ptr->loads);
6063 else
6064 fprintf (file, "(nil)");
6065
6066 fprintf (file, "\n Stores : ");
6067
6068 if (ptr->stores)
6069 print_rtl (file, ptr->stores);
6070 else
6071 fprintf (file, "(nil)");
6072
6073 fprintf (file, "\n\n");
6074 }
6075
6076 fprintf (file, "\n");
6077}
6078
6079/* Returns 1 if X is in the list of ldst only expressions. */
6080
6081static struct ls_expr *
6082find_rtx_in_ldst (x)
6083 rtx x;
6084{
6085 struct ls_expr * ptr;
6086
6087 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6088 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
6089 return ptr;
6090
6091 return NULL;
6092}
6093
6094/* Assign each element of the list of mems a monotonically increasing value. */
6095
6096static int
6097enumerate_ldsts ()
6098{
6099 struct ls_expr * ptr;
6100 int n = 0;
6101
6102 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6103 ptr->index = n++;
6104
6105 return n;
6106}
6107
6108/* Return first item in the list. */
6109
6110static inline struct ls_expr *
6111first_ls_expr ()
6112{
6113 return pre_ldst_mems;
6114}
6115
6116/* Return the next item in ther list after the specified one. */
6117
6118static inline struct ls_expr *
6119next_ls_expr (ptr)
6120 struct ls_expr * ptr;
6121{
6122 return ptr->next;
6123}
6124\f
6125/* Load Motion for loads which only kill themselves. */
6126
6127/* Return true if x is a simple MEM operation, with no registers or
6128 side effects. These are the types of loads we consider for the
6129 ld_motion list, otherwise we let the usual aliasing take care of it. */
6130
6131static int
6132simple_mem (x)
6133 rtx x;
6134{
6135 if (GET_CODE (x) != MEM)
6136 return 0;
6137
6138 if (MEM_VOLATILE_P (x))
6139 return 0;
6140
6141 if (GET_MODE (x) == BLKmode)
6142 return 0;
6143
6144 if (!rtx_varies_p (XEXP (x, 0), 0))
6145 return 1;
6146
6147 return 0;
6148}
6149
6150/* Make sure there isn't a buried reference in this pattern anywhere.
6151 If there is, invalidate the entry for it since we're not capable
6152 of fixing it up just yet.. We have to be sure we know about ALL
6153 loads since the aliasing code will allow all entries in the
6154 ld_motion list to not-alias itself. If we miss a load, we will get
6155 the wrong value since gcse might common it and we won't know to
6156 fix it up. */
6157
6158static void
6159invalidate_any_buried_refs (x)
6160 rtx x;
6161{
6162 const char * fmt;
6163 int i,j;
6164 struct ls_expr * ptr;
6165
6166 /* Invalidate it in the list. */
6167 if (GET_CODE (x) == MEM && simple_mem (x))
6168 {
6169 ptr = ldst_entry (x);
6170 ptr->invalid = 1;
6171 }
6172
6173 /* Recursively process the insn. */
6174 fmt = GET_RTX_FORMAT (GET_CODE (x));
6175
6176 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6177 {
6178 if (fmt[i] == 'e')
6179 invalidate_any_buried_refs (XEXP (x, i));
6180 else if (fmt[i] == 'E')
6181 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6182 invalidate_any_buried_refs (XVECEXP (x, i, j));
6183 }
6184}
6185
6186/* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6187 being defined as MEM loads and stores to symbols, with no
6188 side effects and no registers in the expression. If there are any
6189 uses/defs which dont match this criteria, it is invalidated and
6190 trimmed out later. */
6191
6192static void
6193compute_ld_motion_mems ()
6194{
6195 struct ls_expr * ptr;
6196 int bb;
6197 rtx insn;
6198
6199 pre_ldst_mems = NULL;
6200
6201 for (bb = 0; bb < n_basic_blocks; bb++)
6202 {
6203 for (insn = BLOCK_HEAD (bb);
6204 insn && insn != NEXT_INSN (BLOCK_END (bb));
6205 insn = NEXT_INSN (insn))
6206 {
6207 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6208 {
6209 if (GET_CODE (PATTERN (insn)) == SET)
6210 {
6211 rtx src = SET_SRC (PATTERN (insn));
6212 rtx dest = SET_DEST (PATTERN (insn));
6213
6214 /* Check for a simple LOAD... */
6215 if (GET_CODE (src) == MEM && simple_mem (src))
6216 {
6217 ptr = ldst_entry (src);
6218 if (GET_CODE (dest) == REG)
6219 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
6220 else
6221 ptr->invalid = 1;
6222 }
6223 else
6224 {
6225 /* Make sure there isn't a buried load somewhere. */
6226 invalidate_any_buried_refs (src);
6227 }
6228
6229 /* Check for stores. Don't worry about aliased ones, they
6230 will block any movement we might do later. We only care
6231 about this exact pattern since those are the only
6232 circumstance that we will ignore the aliasing info. */
6233 if (GET_CODE (dest) == MEM && simple_mem (dest))
6234 {
6235 ptr = ldst_entry (dest);
6236
f54104df
AO
6237 if (GET_CODE (src) != MEM
6238 && GET_CODE (src) != ASM_OPERANDS)
a13d4ebf
AM
6239 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6240 else
6241 ptr->invalid = 1;
6242 }
6243 }
6244 else
6245 invalidate_any_buried_refs (PATTERN (insn));
6246 }
6247 }
6248 }
6249}
6250
6251/* Remove any references that have been either invalidated or are not in the
6252 expression list for pre gcse. */
6253
6254static void
6255trim_ld_motion_mems ()
6256{
6257 struct ls_expr * last = NULL;
6258 struct ls_expr * ptr = first_ls_expr ();
6259
6260 while (ptr != NULL)
6261 {
6262 int del = ptr->invalid;
6263 struct expr * expr = NULL;
6264
6265 /* Delete if entry has been made invalid. */
6266 if (!del)
6267 {
6268 unsigned int i;
6269
6270 del = 1;
6271 /* Delete if we cannot find this mem in the expression list. */
6272 for (i = 0; i < expr_hash_table_size && del; i++)
6273 {
6274 for (expr = expr_hash_table[i];
6275 expr != NULL;
6276 expr = expr->next_same_hash)
6277 if (expr_equiv_p (expr->expr, ptr->pattern))
6278 {
6279 del = 0;
6280 break;
6281 }
6282 }
6283 }
6284
6285 if (del)
6286 {
6287 if (last != NULL)
6288 {
6289 last->next = ptr->next;
6290 free_ldst_entry (ptr);
6291 ptr = last->next;
6292 }
6293 else
6294 {
6295 pre_ldst_mems = pre_ldst_mems->next;
6296 free_ldst_entry (ptr);
6297 ptr = pre_ldst_mems;
6298 }
6299 }
6300 else
6301 {
6302 /* Set the expression field if we are keeping it. */
6303 last = ptr;
6304 ptr->expr = expr;
6305 ptr = ptr->next;
6306 }
6307 }
6308
6309 /* Show the world what we've found. */
6310 if (gcse_file && pre_ldst_mems != NULL)
6311 print_ldst_list (gcse_file);
6312}
6313
6314/* This routine will take an expression which we are replacing with
6315 a reaching register, and update any stores that are needed if
6316 that expression is in the ld_motion list. Stores are updated by
6317 copying their SRC to the reaching register, and then storeing
6318 the reaching register into the store location. These keeps the
6319 correct value in the reaching register for the loads. */
6320
6321static void
6322update_ld_motion_stores (expr)
6323 struct expr * expr;
6324{
6325 struct ls_expr * mem_ptr;
6326
6327 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
6328 {
6329 /* We can try to find just the REACHED stores, but is shouldn't
6330 matter to set the reaching reg everywhere... some might be
6331 dead and should be eliminated later. */
6332
6333 /* We replace SET mem = expr with
6334 SET reg = expr
6335 SET mem = reg , where reg is the
6336 reaching reg used in the load. */
6337 rtx list = mem_ptr->stores;
6338
6339 for ( ; list != NULL_RTX; list = XEXP (list, 1))
6340 {
6341 rtx insn = XEXP (list, 0);
6342 rtx pat = PATTERN (insn);
6343 rtx src = SET_SRC (pat);
6344 rtx reg = expr->reaching_reg;
c57718d3 6345 rtx copy, new;
a13d4ebf
AM
6346
6347 /* If we've already copied it, continue. */
6348 if (expr->reaching_reg == src)
6349 continue;
6350
6351 if (gcse_file)
6352 {
6353 fprintf (gcse_file, "PRE: store updated with reaching reg ");
6354 print_rtl (gcse_file, expr->reaching_reg);
6355 fprintf (gcse_file, ":\n ");
6356 print_inline_rtx (gcse_file, insn, 8);
6357 fprintf (gcse_file, "\n");
6358 }
6359
6360 copy = gen_move_insn ( reg, SET_SRC (pat));
c57718d3
RK
6361 new = emit_insn_before (copy, insn);
6362 record_one_set (REGNO (reg), new);
6363 set_block_for_new_insns (new, BLOCK_FOR_INSN (insn));
a13d4ebf
AM
6364 SET_SRC (pat) = reg;
6365
6366 /* un-recognize this pattern since it's probably different now. */
6367 INSN_CODE (insn) = -1;
6368 gcse_create_count++;
6369 }
6370 }
6371}
6372\f
6373/* Store motion code. */
6374
6375/* This is used to communicate the target bitvector we want to use in the
6376 reg_set_info routine when called via the note_stores mechanism. */
6377static sbitmap * regvec;
6378
6379/* Used in computing the reverse edge graph bit vectors. */
6380static sbitmap * st_antloc;
6381
6382/* Global holding the number of store expressions we are dealing with. */
6383static int num_stores;
6384
6385/* Checks to set if we need to mark a register set. Called from note_stores. */
6386
6387static void
6388reg_set_info (dest, setter, data)
6389 rtx dest, setter ATTRIBUTE_UNUSED;
6390 void * data ATTRIBUTE_UNUSED;
6391{
6392 if (GET_CODE (dest) == SUBREG)
6393 dest = SUBREG_REG (dest);
6394
6395 if (GET_CODE (dest) == REG)
6396 SET_BIT (*regvec, REGNO (dest));
6397}
6398
6399/* Return non-zero if the register operands of expression X are killed
6400 anywhere in basic block BB. */
6401
6402static int
6403store_ops_ok (x, bb)
6404 rtx x;
e2d2ed72 6405 basic_block bb;
a13d4ebf
AM
6406{
6407 int i;
6408 enum rtx_code code;
6409 const char * fmt;
6410
6411 /* Repeat is used to turn tail-recursion into iteration. */
6412 repeat:
6413
6414 if (x == 0)
6415 return 1;
6416
6417 code = GET_CODE (x);
6418 switch (code)
6419 {
6420 case REG:
6421 /* If a reg has changed after us in this
6422 block, the operand has been killed. */
e2d2ed72 6423 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
a13d4ebf
AM
6424
6425 case MEM:
6426 x = XEXP (x, 0);
6427 goto repeat;
6428
6429 case PRE_DEC:
6430 case PRE_INC:
6431 case POST_DEC:
6432 case POST_INC:
6433 return 0;
6434
6435 case PC:
6436 case CC0: /*FIXME*/
6437 case CONST:
6438 case CONST_INT:
6439 case CONST_DOUBLE:
6440 case SYMBOL_REF:
6441 case LABEL_REF:
6442 case ADDR_VEC:
6443 case ADDR_DIFF_VEC:
6444 return 1;
6445
6446 default:
6447 break;
6448 }
6449
6450 i = GET_RTX_LENGTH (code) - 1;
6451 fmt = GET_RTX_FORMAT (code);
6452
6453 for (; i >= 0; i--)
6454 {
6455 if (fmt[i] == 'e')
6456 {
6457 rtx tem = XEXP (x, i);
6458
6459 /* If we are about to do the last recursive call
6460 needed at this level, change it into iteration.
6461 This function is called enough to be worth it. */
6462 if (i == 0)
6463 {
6464 x = tem;
6465 goto repeat;
6466 }
6467
6468 if (! store_ops_ok (tem, bb))
6469 return 0;
6470 }
6471 else if (fmt[i] == 'E')
6472 {
6473 int j;
6474
6475 for (j = 0; j < XVECLEN (x, i); j++)
6476 {
6477 if (! store_ops_ok (XVECEXP (x, i, j), bb))
6478 return 0;
6479 }
6480 }
6481 }
6482
6483 return 1;
6484}
6485
6486/* Determine whether insn is MEM store pattern that we will consider moving. */
6487
6488static void
6489find_moveable_store (insn)
6490 rtx insn;
6491{
6492 struct ls_expr * ptr;
6493 rtx dest = PATTERN (insn);
6494
f54104df
AO
6495 if (GET_CODE (dest) != SET
6496 || GET_CODE (SET_SRC (dest)) == ASM_OPERANDS)
a13d4ebf
AM
6497 return;
6498
6499 dest = SET_DEST (dest);
6500
6501 if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
6502 || GET_MODE (dest) == BLKmode)
6503 return;
6504
6505 if (GET_CODE (XEXP (dest, 0)) != SYMBOL_REF)
6506 return;
6507
6508 if (rtx_varies_p (XEXP (dest, 0), 0))
6509 return;
6510
6511 ptr = ldst_entry (dest);
6512 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6513}
6514
6515/* Perform store motion. Much like gcse, except we move expressions the
6516 other way by looking at the flowgraph in reverse. */
6517
6518static int
6519compute_store_table ()
6520{
6521 int bb, ret;
6522 unsigned regno;
6523 rtx insn, pat;
6524
6525 max_gcse_regno = max_reg_num ();
6526
6527 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks,
6528 max_gcse_regno);
6529 sbitmap_vector_zero (reg_set_in_block, n_basic_blocks);
6530 pre_ldst_mems = 0;
6531
6532 /* Find all the stores we care about. */
6533 for (bb = 0; bb < n_basic_blocks; bb++)
6534 {
6535 regvec = & (reg_set_in_block[bb]);
6536 for (insn = BLOCK_END (bb);
6537 insn && insn != PREV_INSN (BLOCK_HEAD (bb));
6538 insn = PREV_INSN (insn))
6539 {
6540#ifdef NON_SAVING_SETJMP
6541 if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE
6542 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
6543 {
6544 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6545 SET_BIT (reg_set_in_block[bb], regno);
6546 continue;
6547 }
6548#endif
6549 /* Ignore anything that is not a normal insn. */
6550 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
6551 continue;
6552
6553 if (GET_CODE (insn) == CALL_INSN)
6554 {
6555 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6556 if ((call_used_regs[regno]
6557 && regno != STACK_POINTER_REGNUM
6558#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
6559 && regno != HARD_FRAME_POINTER_REGNUM
6560#endif
6561#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
6562 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
6563#endif
6564#if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
6565 && ! (regno == PIC_OFFSET_TABLE_REGNUM && flag_pic)
6566#endif
6567
6568 && regno != FRAME_POINTER_REGNUM)
6569 || global_regs[regno])
6570 SET_BIT (reg_set_in_block[bb], regno);
6571 }
6572
6573 pat = PATTERN (insn);
6574 note_stores (pat, reg_set_info, NULL);
6575
6576 /* Now that we've marked regs, look for stores. */
6577 if (GET_CODE (pat) == SET)
6578 find_moveable_store (insn);
6579 }
6580 }
6581
6582 ret = enumerate_ldsts ();
6583
6584 if (gcse_file)
6585 {
6586 fprintf (gcse_file, "Store Motion Expressions.\n");
6587 print_ldst_list (gcse_file);
6588 }
6589
6590 return ret;
6591}
6592
6593/* Check to see if the load X is aliased with STORE_PATTERN. */
6594
6595static int
6596load_kills_store (x, store_pattern)
6597 rtx x, store_pattern;
6598{
6599 if (true_dependence (x, GET_MODE (x), store_pattern, rtx_addr_varies_p))
6600 return 1;
6601 return 0;
6602}
6603
6604/* Go through the entire insn X, looking for any loads which might alias
6605 STORE_PATTERN. Return 1 if found. */
6606
6607static int
6608find_loads (x, store_pattern)
6609 rtx x, store_pattern;
6610{
6611 const char * fmt;
6612 int i,j;
6613 int ret = 0;
6614
6615 if (GET_CODE (x) == SET)
6616 x = SET_SRC (x);
6617
6618 if (GET_CODE (x) == MEM)
6619 {
6620 if (load_kills_store (x, store_pattern))
6621 return 1;
6622 }
6623
6624 /* Recursively process the insn. */
6625 fmt = GET_RTX_FORMAT (GET_CODE (x));
6626
6627 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
6628 {
6629 if (fmt[i] == 'e')
6630 ret |= find_loads (XEXP (x, i), store_pattern);
6631 else if (fmt[i] == 'E')
6632 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6633 ret |= find_loads (XVECEXP (x, i, j), store_pattern);
6634 }
6635 return ret;
6636}
6637
6638/* Check if INSN kills the store pattern X (is aliased with it).
6639 Return 1 if it it does. */
6640
6641static int
6642store_killed_in_insn (x, insn)
6643 rtx x, insn;
6644{
6645 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
6646 return 0;
6647
6648 if (GET_CODE (insn) == CALL_INSN)
6649 {
6650 if (CONST_CALL_P (insn))
6651 return 0;
6652 else
6653 return 1;
6654 }
6655
6656 if (GET_CODE (PATTERN (insn)) == SET)
6657 {
6658 rtx pat = PATTERN (insn);
6659 /* Check for memory stores to aliased objects. */
6660 if (GET_CODE (SET_DEST (pat)) == MEM && !expr_equiv_p (SET_DEST (pat), x))
6661 /* pretend its a load and check for aliasing. */
6662 if (find_loads (SET_DEST (pat), x))
6663 return 1;
6664 return find_loads (SET_SRC (pat), x);
6665 }
6666 else
6667 return find_loads (PATTERN (insn), x);
6668}
6669
6670/* Returns 1 if the expression X is loaded or clobbered on or after INSN
6671 within basic block BB. */
6672
6673static int
6674store_killed_after (x, insn, bb)
6675 rtx x, insn;
e2d2ed72 6676 basic_block bb;
a13d4ebf 6677{
e2d2ed72 6678 rtx last = bb->end;
a13d4ebf
AM
6679
6680 if (insn == last)
6681 return 0;
6682
6683 /* Check if the register operands of the store are OK in this block.
6684 Note that if registers are changed ANYWHERE in the block, we'll
6685 decide we can't move it, regardless of whether it changed above
6686 or below the store. This could be improved by checking the register
6687 operands while lookinng for aliasing in each insn. */
6688 if (!store_ops_ok (XEXP (x, 0), bb))
6689 return 1;
6690
6691 for ( ; insn && insn != NEXT_INSN (last); insn = NEXT_INSN (insn))
6692 if (store_killed_in_insn (x, insn))
6693 return 1;
6694
6695 return 0;
6696}
6697
6698/* Returns 1 if the expression X is loaded or clobbered on or before INSN
6699 within basic block BB. */
6700static int
6701store_killed_before (x, insn, bb)
6702 rtx x, insn;
e2d2ed72 6703 basic_block bb;
a13d4ebf 6704{
e2d2ed72 6705 rtx first = bb->head;
a13d4ebf
AM
6706
6707 if (insn == first)
6708 return store_killed_in_insn (x, insn);
6709
6710 /* Check if the register operands of the store are OK in this block.
6711 Note that if registers are changed ANYWHERE in the block, we'll
6712 decide we can't move it, regardless of whether it changed above
6713 or below the store. This could be improved by checking the register
6714 operands while lookinng for aliasing in each insn. */
6715 if (!store_ops_ok (XEXP (x, 0), bb))
6716 return 1;
6717
6718 for ( ; insn && insn != PREV_INSN (first); insn = PREV_INSN (insn))
6719 if (store_killed_in_insn (x, insn))
6720 return 1;
6721
6722 return 0;
6723}
6724
6725#define ANTIC_STORE_LIST(x) ((x)->loads)
6726#define AVAIL_STORE_LIST(x) ((x)->stores)
6727
6728/* Given the table of available store insns at the end of blocks,
6729 determine which ones are not killed by aliasing, and generate
6730 the appropriate vectors for gen and killed. */
6731static void
6732build_store_vectors ()
6733{
e2d2ed72
AM
6734 basic_block bb;
6735 int b;
a13d4ebf
AM
6736 rtx insn, st;
6737 struct ls_expr * ptr;
6738
6739 /* Build the gen_vector. This is any store in the table which is not killed
6740 by aliasing later in its block. */
6741 ae_gen = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, num_stores);
6742 sbitmap_vector_zero (ae_gen, n_basic_blocks);
6743
6744 st_antloc = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, num_stores);
6745 sbitmap_vector_zero (st_antloc, n_basic_blocks);
6746
6747 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6748 {
6749 /* Put all the stores into either the antic list, or the avail list,
6750 or both. */
6751 rtx store_list = ptr->stores;
6752 ptr->stores = NULL_RTX;
6753
6754 for (st = store_list; st != NULL; st = XEXP (st, 1))
6755 {
6756 insn = XEXP (st, 0);
e2d2ed72 6757 bb = BLOCK_FOR_INSN (insn);
a13d4ebf
AM
6758
6759 if (!store_killed_after (ptr->pattern, insn, bb))
6760 {
6761 /* If we've already seen an availale expression in this block,
6762 we can delete the one we saw already (It occurs earlier in
6763 the block), and replace it with this one). We'll copy the
6764 old SRC expression to an unused register in case there
6765 are any side effects. */
e2d2ed72 6766 if (TEST_BIT (ae_gen[bb->index], ptr->index))
a13d4ebf
AM
6767 {
6768 /* Find previous store. */
6769 rtx st;
6770 for (st = AVAIL_STORE_LIST (ptr); st ; st = XEXP (st, 1))
e2d2ed72 6771 if (BLOCK_FOR_INSN (XEXP (st, 0)) == bb)
a13d4ebf
AM
6772 break;
6773 if (st)
6774 {
6775 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
6776 if (gcse_file)
6777 fprintf(gcse_file, "Removing redundant store:\n");
6778 replace_store_insn (r, XEXP (st, 0), bb);
6779 XEXP (st, 0) = insn;
6780 continue;
6781 }
6782 }
e2d2ed72 6783 SET_BIT (ae_gen[bb->index], ptr->index);
a13d4ebf
AM
6784 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn,
6785 AVAIL_STORE_LIST (ptr));
6786 }
6787
6788 if (!store_killed_before (ptr->pattern, insn, bb))
6789 {
6790 SET_BIT (st_antloc[BLOCK_NUM (insn)], ptr->index);
6791 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (insn,
6792 ANTIC_STORE_LIST (ptr));
6793 }
6794 }
6795
6796 /* Free the original list of store insns. */
6797 free_INSN_LIST_list (&store_list);
6798 }
6799
6800 ae_kill = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, num_stores);
6801 sbitmap_vector_zero (ae_kill, n_basic_blocks);
6802
6803 transp = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, num_stores);
6804 sbitmap_vector_zero (transp, n_basic_blocks);
6805
6806 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
e2d2ed72 6807 for (b = 0; b < n_basic_blocks; b++)
a13d4ebf 6808 {
e2d2ed72 6809 if (store_killed_after (ptr->pattern, BLOCK_HEAD (b), BASIC_BLOCK (b)))
a13d4ebf
AM
6810 {
6811 /* The anticipatable expression is not killed if it's gen'd. */
6812 /*
6813 We leave this check out for now. If we have a code sequence
6814 in a block which looks like:
6815 ST MEMa = x
6816 L y = MEMa
6817 ST MEMa = z
6818 We should flag this as having an ANTIC expression, NOT
6819 transparent, NOT killed, and AVAIL.
6820 Unfortunately, since we haven't re-written all loads to
6821 use the reaching reg, we'll end up doing an incorrect
6822 Load in the middle here if we push the store down. It happens in
6823 gcc.c-torture/execute/960311-1.c with -O3
6824 If we always kill it in this case, we'll sometimes do
6825 uneccessary work, but it shouldn't actually hurt anything.
e2d2ed72
AM
6826 if (!TEST_BIT (ae_gen[b], ptr->index)). */
6827 SET_BIT (ae_kill[b], ptr->index);
a13d4ebf
AM
6828 }
6829 else
e2d2ed72 6830 SET_BIT (transp[b], ptr->index);
a13d4ebf
AM
6831 }
6832
6833 /* Any block with no exits calls some non-returning function, so
6834 we better mark the store killed here, or we might not store to
6835 it at all. If we knew it was abort, we wouldn't have to store,
6836 but we don't know that for sure. */
6837 if (gcse_file)
6838 {
6839 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
6840 print_ldst_list (gcse_file);
6841 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, n_basic_blocks);
6842 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, n_basic_blocks);
6843 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, n_basic_blocks);
6844 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, n_basic_blocks);
6845 }
6846}
6847
6848/* Insert an instruction at the begining of a basic block, and update
6849 the BLOCK_HEAD if needed. */
6850
6851static void
6852insert_insn_start_bb (insn, bb)
6853 rtx insn;
e2d2ed72 6854 basic_block bb;
a13d4ebf
AM
6855{
6856 /* Insert at start of successor block. */
e2d2ed72
AM
6857 rtx prev = PREV_INSN (bb->head);
6858 rtx before = bb->head;
a13d4ebf
AM
6859 while (before != 0)
6860 {
6861 if (GET_CODE (before) != CODE_LABEL
6862 && (GET_CODE (before) != NOTE
6863 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
6864 break;
6865 prev = before;
e2d2ed72 6866 if (prev == bb->end)
a13d4ebf
AM
6867 break;
6868 before = NEXT_INSN (before);
6869 }
6870
6871 insn = emit_insn_after (insn, prev);
6872
e2d2ed72
AM
6873 if (prev == bb->end)
6874 bb->end = insn;
ccbaf064 6875
e2d2ed72 6876 set_block_for_new_insns (insn, bb);
a13d4ebf
AM
6877
6878 if (gcse_file)
6879 {
6880 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
e2d2ed72 6881 bb->index);
a13d4ebf
AM
6882 print_inline_rtx (gcse_file, insn, 6);
6883 fprintf (gcse_file, "\n");
6884 }
6885}
6886
6887/* This routine will insert a store on an edge. EXPR is the ldst entry for
6888 the memory reference, and E is the edge to insert it on. Returns non-zero
6889 if an edge insertion was performed. */
6890
6891static int
6892insert_store (expr, e)
6893 struct ls_expr * expr;
6894 edge e;
6895{
6896 rtx reg, insn;
e2d2ed72 6897 basic_block bb;
a13d4ebf
AM
6898 edge tmp;
6899
6900 /* We did all the deleted before this insert, so if we didn't delete a
6901 store, then we haven't set the reaching reg yet either. */
6902 if (expr->reaching_reg == NULL_RTX)
6903 return 0;
6904
6905 reg = expr->reaching_reg;
6906 insn = gen_move_insn (expr->pattern, reg);
6907
6908 /* If we are inserting this expression on ALL predecessor edges of a BB,
6909 insert it at the start of the BB, and reset the insert bits on the other
6910 edges so we don;t try to insert it on the other edges. */
e2d2ed72 6911 bb = e->dest;
a13d4ebf
AM
6912 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
6913 {
6914 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6915 if (index == EDGE_INDEX_NO_EDGE)
6916 abort ();
6917 if (! TEST_BIT (pre_insert_map[index], expr->index))
6918 break;
6919 }
6920
6921 /* If tmp is NULL, we found an insertion on every edge, blank the
6922 insertion vector for these edges, and insert at the start of the BB. */
e2d2ed72 6923 if (!tmp && bb != EXIT_BLOCK_PTR)
a13d4ebf
AM
6924 {
6925 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
6926 {
6927 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6928 RESET_BIT (pre_insert_map[index], expr->index);
6929 }
6930 insert_insn_start_bb (insn, bb);
6931 return 0;
6932 }
6933
6934 /* We can't insert on this edge, so we'll insert at the head of the
6935 successors block. See Morgan, sec 10.5. */
6936 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
6937 {
6938 insert_insn_start_bb (insn, bb);
6939 return 0;
6940 }
6941
6942 insert_insn_on_edge (insn, e);
6943
6944 if (gcse_file)
6945 {
6946 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
6947 e->src->index, e->dest->index);
6948 print_inline_rtx (gcse_file, insn, 6);
6949 fprintf (gcse_file, "\n");
6950 }
6951
6952 return 1;
6953}
6954
6955/* This routine will replace a store with a SET to a specified register. */
6956
6957static void
6958replace_store_insn (reg, del, bb)
6959 rtx reg, del;
e2d2ed72 6960 basic_block bb;
a13d4ebf
AM
6961{
6962 rtx insn;
6963
6964 insn = gen_move_insn (reg, SET_SRC (PATTERN (del)));
6965 insn = emit_insn_after (insn, del);
e2d2ed72 6966 set_block_for_new_insns (insn, bb);
a13d4ebf
AM
6967
6968 if (gcse_file)
6969 {
6970 fprintf (gcse_file,
e2d2ed72 6971 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
a13d4ebf
AM
6972 print_inline_rtx (gcse_file, del, 6);
6973 fprintf(gcse_file, "\nSTORE MOTION replaced with insn:\n ");
6974 print_inline_rtx (gcse_file, insn, 6);
6975 fprintf(gcse_file, "\n");
6976 }
6977
e2d2ed72
AM
6978 if (bb->end == del)
6979 bb->end = insn;
a13d4ebf 6980
e2d2ed72
AM
6981 if (bb->head == del)
6982 bb->head = insn;
a13d4ebf
AM
6983
6984 delete_insn (del);
6985}
6986
6987
6988/* Delete a store, but copy the value that would have been stored into
6989 the reaching_reg for later storing. */
6990
6991static void
6992delete_store (expr, bb)
6993 struct ls_expr * expr;
e2d2ed72 6994 basic_block bb;
a13d4ebf
AM
6995{
6996 rtx reg, i, del;
6997
6998 if (expr->reaching_reg == NULL_RTX)
6999 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
7000
7001
7002 /* If there is more than 1 store, the earlier ones will be dead,
7003 but it doesn't hurt to replace them here. */
7004 reg = expr->reaching_reg;
7005
7006 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
7007 {
7008 del = XEXP (i, 0);
e2d2ed72 7009 if (BLOCK_FOR_INSN (del) == bb)
a13d4ebf
AM
7010 {
7011 /* We know there is only one since we deleted redundant
7012 ones during the available computation. */
7013 replace_store_insn (reg, del, bb);
7014 break;
7015 }
7016 }
7017}
7018
7019/* Free memory used by store motion. */
7020
7021static void
7022free_store_memory ()
7023{
7024 free_ldst_mems ();
7025
7026 if (ae_gen)
5a660bff 7027 sbitmap_vector_free (ae_gen);
a13d4ebf 7028 if (ae_kill)
5a660bff 7029 sbitmap_vector_free (ae_kill);
a13d4ebf 7030 if (transp)
5a660bff 7031 sbitmap_vector_free (transp);
a13d4ebf 7032 if (st_antloc)
5a660bff 7033 sbitmap_vector_free (st_antloc);
a13d4ebf 7034 if (pre_insert_map)
5a660bff 7035 sbitmap_vector_free (pre_insert_map);
a13d4ebf 7036 if (pre_delete_map)
5a660bff 7037 sbitmap_vector_free (pre_delete_map);
a13d4ebf 7038 if (reg_set_in_block)
5a660bff 7039 sbitmap_vector_free (reg_set_in_block);
a13d4ebf
AM
7040
7041 ae_gen = ae_kill = transp = st_antloc = NULL;
7042 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
7043}
7044
7045/* Perform store motion. Much like gcse, except we move expressions the
7046 other way by looking at the flowgraph in reverse. */
7047
7048static void
7049store_motion ()
7050{
7051 int x;
7052 struct ls_expr * ptr;
7053 int update_flow = 0;
7054
7055 if (gcse_file)
7056 {
7057 fprintf (gcse_file, "before store motion\n");
7058 print_rtl (gcse_file, get_insns ());
7059 }
7060
7061
7062 init_alias_analysis ();
7063
7064 /* Find all the stores that are live to the end of their block. */
7065 num_stores = compute_store_table ();
7066 if (num_stores == 0)
7067 {
5a660bff 7068 sbitmap_vector_free (reg_set_in_block);
a13d4ebf
AM
7069 end_alias_analysis ();
7070 return;
7071 }
7072
7073 /* Now compute whats actually available to move. */
7074 add_noreturn_fake_exit_edges ();
7075 build_store_vectors ();
7076
7077 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
7078 st_antloc, ae_kill, &pre_insert_map,
7079 &pre_delete_map);
7080
7081 /* Now we want to insert the new stores which are going to be needed. */
7082 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7083 {
7084 for (x = 0; x < n_basic_blocks; x++)
7085 if (TEST_BIT (pre_delete_map[x], ptr->index))
e2d2ed72 7086 delete_store (ptr, BASIC_BLOCK (x));
a13d4ebf
AM
7087
7088 for (x = 0; x < NUM_EDGES (edge_list); x++)
7089 if (TEST_BIT (pre_insert_map[x], ptr->index))
7090 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
7091 }
7092
7093 if (update_flow)
7094 commit_edge_insertions ();
7095
7096 free_store_memory ();
7097 free_edge_list (edge_list);
7098 remove_fake_edges ();
7099 end_alias_analysis ();
7100}
This page took 1.470218 seconds and 5 git commands to generate.