]> gcc.gnu.org Git - gcc.git/blame - gcc/gcse.c
* i386.c (x86_function_profiler): Fix format specifier.
[gcc.git] / gcc / gcse.c
CommitLineData
f4e584dc 1/* Global common subexpression elimination/Partial redundancy elimination
7506f491 2 and global constant/copy propagation for GNU compiler.
a0134312 3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003
8e42ace1 4 Free Software Foundation, Inc.
7506f491 5
1322177d 6This file is part of GCC.
7506f491 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
7506f491 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
7506f491
DE
17
18You should have received a copy of the GNU General Public License
1322177d
LB
19along with GCC; see the file COPYING. If not, write to the Free
20Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2102111-1307, USA. */
7506f491
DE
22
23/* TODO
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
f4e584dc
JL
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
7506f491
DE
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
34
7506f491
DE
35*/
36
37/* References searched while implementing this.
7506f491
DE
38
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
42
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
46
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
50
7506f491
DE
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
54
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
59
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
64
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
69
7506f491
DE
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
73
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
79
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
84
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
88
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
92
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
96
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
100
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
104
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
108
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
112
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
116
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
120
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
124
f4e584dc
JL
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
128
a42cd965
AM
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
132
f4e584dc
JL
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
137
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
141
7506f491
DE
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
144*/
145
146#include "config.h"
50b2596f 147#include "system.h"
4977bab6
ZW
148#include "coretypes.h"
149#include "tm.h"
01198c2f 150#include "toplev.h"
7506f491
DE
151
152#include "rtl.h"
6baf1cc8 153#include "tm_p.h"
7506f491
DE
154#include "regs.h"
155#include "hard-reg-set.h"
156#include "flags.h"
157#include "real.h"
158#include "insn-config.h"
159#include "recog.h"
160#include "basic-block.h"
50b2596f 161#include "output.h"
49ad7cfa 162#include "function.h"
589005ff 163#include "expr.h"
e7d482b9 164#include "except.h"
fb0c0a12 165#include "ggc.h"
f1fa37ff 166#include "params.h"
ae860ff7 167#include "cselib.h"
aaa4ca30 168
7506f491 169#include "obstack.h"
4fa31c2a 170
7506f491
DE
171/* Propagate flow information through back edges and thus enable PRE's
172 moving loop invariant calculations out of loops.
173
174 Originally this tended to create worse overall code, but several
175 improvements during the development of PRE seem to have made following
176 back edges generally a win.
177
178 Note much of the loop invariant code motion done here would normally
179 be done by loop.c, which has more heuristics for when to move invariants
180 out of loops. At some point we might need to move some of those
181 heuristics into gcse.c. */
7506f491 182
f4e584dc
JL
183/* We support GCSE via Partial Redundancy Elimination. PRE optimizations
184 are a superset of those done by GCSE.
7506f491 185
f4e584dc 186 We perform the following steps:
7506f491
DE
187
188 1) Compute basic block information.
189
190 2) Compute table of places where registers are set.
191
192 3) Perform copy/constant propagation.
193
194 4) Perform global cse.
195
e78d9500 196 5) Perform another pass of copy/constant propagation.
7506f491
DE
197
198 Two passes of copy/constant propagation are done because the first one
199 enables more GCSE and the second one helps to clean up the copies that
200 GCSE creates. This is needed more for PRE than for Classic because Classic
201 GCSE will try to use an existing register containing the common
202 subexpression rather than create a new one. This is harder to do for PRE
203 because of the code motion (which Classic GCSE doesn't do).
204
205 Expressions we are interested in GCSE-ing are of the form
206 (set (pseudo-reg) (expression)).
207 Function want_to_gcse_p says what these are.
208
209 PRE handles moving invariant expressions out of loops (by treating them as
f4e584dc 210 partially redundant).
7506f491
DE
211
212 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
213 assignment) based GVN (global value numbering). L. T. Simpson's paper
214 (Rice University) on value numbering is a useful reference for this.
215
216 **********************
217
218 We used to support multiple passes but there are diminishing returns in
219 doing so. The first pass usually makes 90% of the changes that are doable.
220 A second pass can make a few more changes made possible by the first pass.
221 Experiments show any further passes don't make enough changes to justify
222 the expense.
223
224 A study of spec92 using an unlimited number of passes:
225 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
226 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
227 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
228
229 It was found doing copy propagation between each pass enables further
230 substitutions.
231
232 PRE is quite expensive in complicated functions because the DFA can take
740f35a0 233 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
7506f491
DE
234 be modified if one wants to experiment.
235
236 **********************
237
238 The steps for PRE are:
239
240 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
241
242 2) Perform the data flow analysis for PRE.
243
244 3) Delete the redundant instructions
245
246 4) Insert the required copies [if any] that make the partially
247 redundant instructions fully redundant.
248
249 5) For other reaching expressions, insert an instruction to copy the value
250 to a newly created pseudo that will reach the redundant instruction.
251
252 The deletion is done first so that when we do insertions we
253 know which pseudo reg to use.
254
255 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
256 argue it is not. The number of iterations for the algorithm to converge
257 is typically 2-4 so I don't view it as that expensive (relatively speaking).
258
f4e584dc 259 PRE GCSE depends heavily on the second CSE pass to clean up the copies
7506f491
DE
260 we create. To make an expression reach the place where it's redundant,
261 the result of the expression is copied to a new register, and the redundant
262 expression is deleted by replacing it with this new register. Classic GCSE
263 doesn't have this problem as much as it computes the reaching defs of
264 each register in each block and thus can try to use an existing register.
265
266 **********************
267
7506f491
DE
268 A fair bit of simplicity is created by creating small functions for simple
269 tasks, even when the function is only called in one place. This may
270 measurably slow things down [or may not] by creating more function call
271 overhead than is necessary. The source is laid out so that it's trivial
272 to make the affected functions inline so that one can measure what speed
273 up, if any, can be achieved, and maybe later when things settle things can
274 be rearranged.
275
276 Help stamp out big monolithic functions! */
277\f
278/* GCSE global vars. */
279
280/* -dG dump file. */
281static FILE *gcse_file;
282
f4e584dc
JL
283/* Note whether or not we should run jump optimization after gcse. We
284 want to do this for two cases.
285
286 * If we changed any jumps via cprop.
287
288 * If we added any labels via edge splitting. */
289
290static int run_jump_opt_after_gcse;
291
7506f491
DE
292/* Bitmaps are normally not included in debugging dumps.
293 However it's useful to be able to print them from GDB.
294 We could create special functions for this, but it's simpler to
295 just allow passing stderr to the dump_foo fns. Since stderr can
296 be a macro, we store a copy here. */
297static FILE *debug_stderr;
298
299/* An obstack for our working variables. */
300static struct obstack gcse_obstack;
301
cc2902df 302/* Nonzero for each mode that supports (set (reg) (reg)).
7506f491
DE
303 This is trivially true for integer and floating point values.
304 It may or may not be true for condition codes. */
305static char can_copy_p[(int) NUM_MACHINE_MODES];
306
cc2902df 307/* Nonzero if can_copy_p has been initialized. */
7506f491
DE
308static int can_copy_init_p;
309
c4c81601 310struct reg_use {rtx reg_rtx; };
abd535b6 311
7506f491
DE
312/* Hash table of expressions. */
313
314struct expr
315{
316 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
317 rtx expr;
318 /* Index in the available expression bitmaps. */
319 int bitmap_index;
320 /* Next entry with the same hash. */
321 struct expr *next_same_hash;
322 /* List of anticipatable occurrences in basic blocks in the function.
323 An "anticipatable occurrence" is one that is the first occurrence in the
f4e584dc
JL
324 basic block, the operands are not modified in the basic block prior
325 to the occurrence and the output is not used between the start of
326 the block and the occurrence. */
7506f491
DE
327 struct occr *antic_occr;
328 /* List of available occurrence in basic blocks in the function.
329 An "available occurrence" is one that is the last occurrence in the
330 basic block and the operands are not modified by following statements in
331 the basic block [including this insn]. */
332 struct occr *avail_occr;
333 /* Non-null if the computation is PRE redundant.
334 The value is the newly created pseudo-reg to record a copy of the
335 expression in all the places that reach the redundant copy. */
336 rtx reaching_reg;
337};
338
339/* Occurrence of an expression.
340 There is one per basic block. If a pattern appears more than once the
341 last appearance is used [or first for anticipatable expressions]. */
342
343struct occr
344{
345 /* Next occurrence of this expression. */
346 struct occr *next;
347 /* The insn that computes the expression. */
348 rtx insn;
cc2902df 349 /* Nonzero if this [anticipatable] occurrence has been deleted. */
7506f491 350 char deleted_p;
cc2902df 351 /* Nonzero if this [available] occurrence has been copied to
7506f491
DE
352 reaching_reg. */
353 /* ??? This is mutually exclusive with deleted_p, so they could share
354 the same byte. */
355 char copied_p;
356};
357
358/* Expression and copy propagation hash tables.
359 Each hash table is an array of buckets.
360 ??? It is known that if it were an array of entries, structure elements
361 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
362 not clear whether in the final analysis a sufficient amount of memory would
363 be saved as the size of the available expression bitmaps would be larger
364 [one could build a mapping table without holes afterwards though].
c4c81601 365 Someday I'll perform the computation and figure it out. */
7506f491 366
02280659
ZD
367struct hash_table
368{
369 /* The table itself.
370 This is an array of `expr_hash_table_size' elements. */
371 struct expr **table;
372
373 /* Size of the hash table, in elements. */
374 unsigned int size;
2e653e39 375
02280659
ZD
376 /* Number of hash table elements. */
377 unsigned int n_elems;
7506f491 378
02280659
ZD
379 /* Whether the table is expression of copy propagation one. */
380 int set_p;
381};
c4c81601 382
02280659
ZD
383/* Expression hash table. */
384static struct hash_table expr_hash_table;
385
386/* Copy propagation hash table. */
387static struct hash_table set_hash_table;
7506f491
DE
388
389/* Mapping of uids to cuids.
390 Only real insns get cuids. */
391static int *uid_cuid;
392
393/* Highest UID in UID_CUID. */
394static int max_uid;
395
396/* Get the cuid of an insn. */
b86db3eb
BS
397#ifdef ENABLE_CHECKING
398#define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
399#else
7506f491 400#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
b86db3eb 401#endif
7506f491
DE
402
403/* Number of cuids. */
404static int max_cuid;
405
406/* Mapping of cuids to insns. */
407static rtx *cuid_insn;
408
409/* Get insn from cuid. */
410#define CUID_INSN(CUID) (cuid_insn[CUID])
411
412/* Maximum register number in function prior to doing gcse + 1.
413 Registers created during this pass have regno >= max_gcse_regno.
414 This is named with "gcse" to not collide with global of same name. */
770ae6cc 415static unsigned int max_gcse_regno;
7506f491 416
7506f491 417/* Table of registers that are modified.
c4c81601 418
7506f491
DE
419 For each register, each element is a list of places where the pseudo-reg
420 is set.
421
422 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
423 requires knowledge of which blocks kill which regs [and thus could use
f4e584dc 424 a bitmap instead of the lists `reg_set_table' uses].
7506f491 425
c4c81601
RK
426 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
427 num-regs) [however perhaps it may be useful to keep the data as is]. One
428 advantage of recording things this way is that `reg_set_table' is fairly
429 sparse with respect to pseudo regs but for hard regs could be fairly dense
430 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
7506f491
DE
431 up functions like compute_transp since in the case of pseudo-regs we only
432 need to iterate over the number of times a pseudo-reg is set, not over the
433 number of basic blocks [clearly there is a bit of a slow down in the cases
434 where a pseudo is set more than once in a block, however it is believed
435 that the net effect is to speed things up]. This isn't done for hard-regs
436 because recording call-clobbered hard-regs in `reg_set_table' at each
c4c81601
RK
437 function call can consume a fair bit of memory, and iterating over
438 hard-regs stored this way in compute_transp will be more expensive. */
7506f491 439
c4c81601
RK
440typedef struct reg_set
441{
7506f491
DE
442 /* The next setting of this register. */
443 struct reg_set *next;
444 /* The insn where it was set. */
445 rtx insn;
446} reg_set;
c4c81601 447
7506f491 448static reg_set **reg_set_table;
c4c81601 449
7506f491
DE
450/* Size of `reg_set_table'.
451 The table starts out at max_gcse_regno + slop, and is enlarged as
452 necessary. */
453static int reg_set_table_size;
c4c81601 454
7506f491
DE
455/* Amount to grow `reg_set_table' by when it's full. */
456#define REG_SET_TABLE_SLOP 100
457
a13d4ebf 458/* This is a list of expressions which are MEMs and will be used by load
589005ff 459 or store motion.
a13d4ebf
AM
460 Load motion tracks MEMs which aren't killed by
461 anything except itself. (ie, loads and stores to a single location).
589005ff 462 We can then allow movement of these MEM refs with a little special
a13d4ebf
AM
463 allowance. (all stores copy the same value to the reaching reg used
464 for the loads). This means all values used to store into memory must have
589005ff 465 no side effects so we can re-issue the setter value.
a13d4ebf
AM
466 Store Motion uses this structure as an expression table to track stores
467 which look interesting, and might be moveable towards the exit block. */
468
469struct ls_expr
470{
471 struct expr * expr; /* Gcse expression reference for LM. */
472 rtx pattern; /* Pattern of this mem. */
aaa4ca30
AJ
473 rtx loads; /* INSN list of loads seen. */
474 rtx stores; /* INSN list of stores seen. */
a13d4ebf
AM
475 struct ls_expr * next; /* Next in the list. */
476 int invalid; /* Invalid for some reason. */
477 int index; /* If it maps to a bitmap index. */
478 int hash_index; /* Index when in a hash table. */
479 rtx reaching_reg; /* Register to use when re-writing. */
480};
481
482/* Head of the list of load/store memory refs. */
483static struct ls_expr * pre_ldst_mems = NULL;
484
7506f491
DE
485/* Bitmap containing one bit for each register in the program.
486 Used when performing GCSE to track which registers have been set since
487 the start of the basic block. */
73991d6a 488static regset reg_set_bitmap;
7506f491
DE
489
490/* For each block, a bitmap of registers set in the block.
491 This is used by expr_killed_p and compute_transp.
492 It is computed during hash table computation and not by compute_sets
493 as it includes registers added since the last pass (or between cprop and
494 gcse) and it's currently not easy to realloc sbitmap vectors. */
495static sbitmap *reg_set_in_block;
496
a13d4ebf
AM
497/* Array, indexed by basic block number for a list of insns which modify
498 memory within that block. */
499static rtx * modify_mem_list;
73991d6a 500bitmap modify_mem_list_set;
a13d4ebf
AM
501
502/* This array parallels modify_mem_list, but is kept canonicalized. */
503static rtx * canon_modify_mem_list;
73991d6a 504bitmap canon_modify_mem_list_set;
7506f491
DE
505/* Various variables for statistics gathering. */
506
507/* Memory used in a pass.
508 This isn't intended to be absolutely precise. Its intent is only
509 to keep an eye on memory usage. */
510static int bytes_used;
c4c81601 511
7506f491
DE
512/* GCSE substitutions made. */
513static int gcse_subst_count;
514/* Number of copy instructions created. */
515static int gcse_create_count;
516/* Number of constants propagated. */
517static int const_prop_count;
518/* Number of copys propagated. */
519static int copy_prop_count;
7506f491
DE
520\f
521/* These variables are used by classic GCSE.
522 Normally they'd be defined a bit later, but `rd_gen' needs to
523 be declared sooner. */
524
7506f491
DE
525/* Each block has a bitmap of each type.
526 The length of each blocks bitmap is:
527
528 max_cuid - for reaching definitions
529 n_exprs - for available expressions
530
531 Thus we view the bitmaps as 2 dimensional arrays. i.e.
532 rd_kill[block_num][cuid_num]
c4c81601 533 ae_kill[block_num][expr_num] */
7506f491
DE
534
535/* For reaching defs */
536static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
537
538/* for available exprs */
539static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
b5ce41ff 540
0511851c
MM
541/* Objects of this type are passed around by the null-pointer check
542 removal routines. */
c4c81601
RK
543struct null_pointer_info
544{
0511851c 545 /* The basic block being processed. */
e0082a72 546 basic_block current_block;
0511851c 547 /* The first register to be handled in this pass. */
770ae6cc 548 unsigned int min_reg;
0511851c 549 /* One greater than the last register to be handled in this pass. */
770ae6cc 550 unsigned int max_reg;
0511851c
MM
551 sbitmap *nonnull_local;
552 sbitmap *nonnull_killed;
553};
7506f491 554\f
c4c81601
RK
555static void compute_can_copy PARAMS ((void));
556static char *gmalloc PARAMS ((unsigned int));
557static char *grealloc PARAMS ((char *, unsigned int));
558static char *gcse_alloc PARAMS ((unsigned long));
559static void alloc_gcse_mem PARAMS ((rtx));
560static void free_gcse_mem PARAMS ((void));
561static void alloc_reg_set_mem PARAMS ((int));
562static void free_reg_set_mem PARAMS ((void));
563static int get_bitmap_width PARAMS ((int, int, int));
564static void record_one_set PARAMS ((int, rtx));
565static void record_set_info PARAMS ((rtx, rtx, void *));
566static void compute_sets PARAMS ((rtx));
02280659
ZD
567static void hash_scan_insn PARAMS ((rtx, struct hash_table *, int));
568static void hash_scan_set PARAMS ((rtx, rtx, struct hash_table *));
569static void hash_scan_clobber PARAMS ((rtx, rtx, struct hash_table *));
570static void hash_scan_call PARAMS ((rtx, rtx, struct hash_table *));
c4c81601
RK
571static int want_to_gcse_p PARAMS ((rtx));
572static int oprs_unchanged_p PARAMS ((rtx, rtx, int));
573static int oprs_anticipatable_p PARAMS ((rtx, rtx));
574static int oprs_available_p PARAMS ((rtx, rtx));
575static void insert_expr_in_table PARAMS ((rtx, enum machine_mode, rtx,
02280659
ZD
576 int, int, struct hash_table *));
577static void insert_set_in_table PARAMS ((rtx, rtx, struct hash_table *));
c4c81601
RK
578static unsigned int hash_expr PARAMS ((rtx, enum machine_mode, int *, int));
579static unsigned int hash_expr_1 PARAMS ((rtx, enum machine_mode, int *));
c0712acb 580static unsigned int hash_string_1 PARAMS ((const char *));
c4c81601
RK
581static unsigned int hash_set PARAMS ((int, int));
582static int expr_equiv_p PARAMS ((rtx, rtx));
583static void record_last_reg_set_info PARAMS ((rtx, int));
584static void record_last_mem_set_info PARAMS ((rtx));
585static void record_last_set_info PARAMS ((rtx, rtx, void *));
02280659
ZD
586static void compute_hash_table PARAMS ((struct hash_table *));
587static void alloc_hash_table PARAMS ((int, struct hash_table *, int));
588static void free_hash_table PARAMS ((struct hash_table *));
589static void compute_hash_table_work PARAMS ((struct hash_table *));
590static void dump_hash_table PARAMS ((FILE *, const char *,
591 struct hash_table *));
592static struct expr *lookup_expr PARAMS ((rtx, struct hash_table *));
593static struct expr *lookup_set PARAMS ((unsigned int, rtx, struct hash_table *));
770ae6cc 594static struct expr *next_set PARAMS ((unsigned int, struct expr *));
c4c81601
RK
595static void reset_opr_set_tables PARAMS ((void));
596static int oprs_not_set_p PARAMS ((rtx, rtx));
597static void mark_call PARAMS ((rtx));
598static void mark_set PARAMS ((rtx, rtx));
599static void mark_clobber PARAMS ((rtx, rtx));
600static void mark_oprs_set PARAMS ((rtx));
601static void alloc_cprop_mem PARAMS ((int, int));
602static void free_cprop_mem PARAMS ((void));
603static void compute_transp PARAMS ((rtx, int, sbitmap *, int));
604static void compute_transpout PARAMS ((void));
605static void compute_local_properties PARAMS ((sbitmap *, sbitmap *, sbitmap *,
02280659 606 struct hash_table *));
711d877c 607static void compute_cprop_data PARAMS ((void));
9e71c818 608static void find_used_regs PARAMS ((rtx *, void *));
c4c81601
RK
609static int try_replace_reg PARAMS ((rtx, rtx, rtx));
610static struct expr *find_avail_set PARAMS ((int, rtx));
0e3f0221 611static int cprop_jump PARAMS ((basic_block, rtx, rtx, rtx, rtx));
a13d4ebf 612static void mems_conflict_for_gcse_p PARAMS ((rtx, rtx, void *));
e2d2ed72 613static int load_killed_in_block_p PARAMS ((basic_block, int, rtx, int));
a13d4ebf 614static void canon_list_insert PARAMS ((rtx, rtx, void *));
ae860ff7 615static int cprop_insn PARAMS ((rtx, int));
c4c81601 616static int cprop PARAMS ((int));
a0134312 617static int one_cprop_pass PARAMS ((int, int, int));
ae860ff7 618static bool constprop_register PARAMS ((rtx, rtx, rtx, int));
0e3f0221
RS
619static struct expr *find_bypass_set PARAMS ((int, int));
620static int bypass_block PARAMS ((basic_block, rtx, rtx));
621static int bypass_conditional_jumps PARAMS ((void));
c4c81601
RK
622static void alloc_pre_mem PARAMS ((int, int));
623static void free_pre_mem PARAMS ((void));
624static void compute_pre_data PARAMS ((void));
589005ff 625static int pre_expr_reaches_here_p PARAMS ((basic_block, struct expr *,
e2d2ed72
AM
626 basic_block));
627static void insert_insn_end_bb PARAMS ((struct expr *, basic_block, int));
c4c81601
RK
628static void pre_insert_copy_insn PARAMS ((struct expr *, rtx));
629static void pre_insert_copies PARAMS ((void));
630static int pre_delete PARAMS ((void));
631static int pre_gcse PARAMS ((void));
632static int one_pre_gcse_pass PARAMS ((int));
633static void add_label_notes PARAMS ((rtx, rtx));
634static void alloc_code_hoist_mem PARAMS ((int, int));
635static void free_code_hoist_mem PARAMS ((void));
711d877c 636static void compute_code_hoist_vbeinout PARAMS ((void));
c4c81601 637static void compute_code_hoist_data PARAMS ((void));
589005ff 638static int hoist_expr_reaches_here_p PARAMS ((basic_block, int, basic_block,
e2d2ed72 639 char *));
c4c81601
RK
640static void hoist_code PARAMS ((void));
641static int one_code_hoisting_pass PARAMS ((void));
642static void alloc_rd_mem PARAMS ((int, int));
643static void free_rd_mem PARAMS ((void));
e2d2ed72 644static void handle_rd_kill_set PARAMS ((rtx, int, basic_block));
c4c81601 645static void compute_kill_rd PARAMS ((void));
711d877c 646static void compute_rd PARAMS ((void));
c4c81601
RK
647static void alloc_avail_expr_mem PARAMS ((int, int));
648static void free_avail_expr_mem PARAMS ((void));
02280659 649static void compute_ae_gen PARAMS ((struct hash_table *));
e2d2ed72 650static int expr_killed_p PARAMS ((rtx, basic_block));
02280659 651static void compute_ae_kill PARAMS ((sbitmap *, sbitmap *, struct hash_table *));
711d877c 652static int expr_reaches_here_p PARAMS ((struct occr *, struct expr *,
e2d2ed72 653 basic_block, int));
c4c81601
RK
654static rtx computing_insn PARAMS ((struct expr *, rtx));
655static int def_reaches_here_p PARAMS ((rtx, rtx));
656static int can_disregard_other_sets PARAMS ((struct reg_set **, rtx, int));
657static int handle_avail_expr PARAMS ((rtx, struct expr *));
658static int classic_gcse PARAMS ((void));
659static int one_classic_gcse_pass PARAMS ((int));
660static void invalidate_nonnull_info PARAMS ((rtx, rtx, void *));
99a15921 661static int delete_null_pointer_checks_1 PARAMS ((unsigned int *,
8e184d9c 662 sbitmap *, sbitmap *,
711d877c
KG
663 struct null_pointer_info *));
664static rtx process_insert_insn PARAMS ((struct expr *));
665static int pre_edge_insert PARAMS ((struct edge_list *, struct expr **));
c4c81601 666static int expr_reaches_here_p_work PARAMS ((struct occr *, struct expr *,
e2d2ed72
AM
667 basic_block, int, char *));
668static int pre_expr_reaches_here_p_work PARAMS ((basic_block, struct expr *,
669 basic_block, char *));
a13d4ebf
AM
670static struct ls_expr * ldst_entry PARAMS ((rtx));
671static void free_ldst_entry PARAMS ((struct ls_expr *));
672static void free_ldst_mems PARAMS ((void));
673static void print_ldst_list PARAMS ((FILE *));
674static struct ls_expr * find_rtx_in_ldst PARAMS ((rtx));
675static int enumerate_ldsts PARAMS ((void));
676static inline struct ls_expr * first_ls_expr PARAMS ((void));
677static inline struct ls_expr * next_ls_expr PARAMS ((struct ls_expr *));
678static int simple_mem PARAMS ((rtx));
679static void invalidate_any_buried_refs PARAMS ((rtx));
589005ff 680static void compute_ld_motion_mems PARAMS ((void));
a13d4ebf
AM
681static void trim_ld_motion_mems PARAMS ((void));
682static void update_ld_motion_stores PARAMS ((struct expr *));
aaa4ca30
AJ
683static void reg_set_info PARAMS ((rtx, rtx, void *));
684static int store_ops_ok PARAMS ((rtx, basic_block));
a13d4ebf
AM
685static void find_moveable_store PARAMS ((rtx));
686static int compute_store_table PARAMS ((void));
687static int load_kills_store PARAMS ((rtx, rtx));
688static int find_loads PARAMS ((rtx, rtx));
689static int store_killed_in_insn PARAMS ((rtx, rtx));
aaa4ca30 690static int store_killed_after PARAMS ((rtx, rtx, basic_block));
e2d2ed72 691static int store_killed_before PARAMS ((rtx, rtx, basic_block));
a13d4ebf 692static void build_store_vectors PARAMS ((void));
e2d2ed72 693static void insert_insn_start_bb PARAMS ((rtx, basic_block));
a13d4ebf 694static int insert_store PARAMS ((struct ls_expr *, edge));
e2d2ed72 695static void replace_store_insn PARAMS ((rtx, rtx, basic_block));
589005ff 696static void delete_store PARAMS ((struct ls_expr *,
e2d2ed72 697 basic_block));
a13d4ebf
AM
698static void free_store_memory PARAMS ((void));
699static void store_motion PARAMS ((void));
0fe854a7 700static void free_insn_expr_list_list PARAMS ((rtx *));
73991d6a
JH
701static void clear_modify_mem_tables PARAMS ((void));
702static void free_modify_mem_tables PARAMS ((void));
10d1bb36 703static rtx gcse_emit_move_after PARAMS ((rtx, rtx, rtx));
8ba46434
R
704static bool do_local_cprop PARAMS ((rtx, rtx, int, rtx*));
705static bool adjust_libcall_notes PARAMS ((rtx, rtx, rtx, rtx*));
ae860ff7 706static void local_cprop_pass PARAMS ((int));
7506f491
DE
707\f
708/* Entry point for global common subexpression elimination.
709 F is the first instruction in the function. */
710
e78d9500 711int
7506f491
DE
712gcse_main (f, file)
713 rtx f;
714 FILE *file;
715{
716 int changed, pass;
717 /* Bytes used at start of pass. */
718 int initial_bytes_used;
719 /* Maximum number of bytes used by a pass. */
720 int max_pass_bytes;
721 /* Point to release obstack data from for each pass. */
722 char *gcse_obstack_bottom;
723
b5ce41ff
JL
724 /* We do not construct an accurate cfg in functions which call
725 setjmp, so just punt to be safe. */
7506f491 726 if (current_function_calls_setjmp)
e78d9500 727 return 0;
589005ff 728
b5ce41ff
JL
729 /* Assume that we do not need to run jump optimizations after gcse. */
730 run_jump_opt_after_gcse = 0;
731
7506f491
DE
732 /* For calling dump_foo fns from gdb. */
733 debug_stderr = stderr;
b5ce41ff 734 gcse_file = file;
7506f491 735
b5ce41ff
JL
736 /* Identify the basic block information for this function, including
737 successors and predecessors. */
7506f491 738 max_gcse_regno = max_reg_num ();
7506f491 739
a42cd965
AM
740 if (file)
741 dump_flow_info (file);
742
7506f491 743 /* Return if there's nothing to do. */
0b17ab2f 744 if (n_basic_blocks <= 1)
a18820c6 745 return 0;
7506f491 746
55f7891b
JL
747 /* Trying to perform global optimizations on flow graphs which have
748 a high connectivity will take a long time and is unlikely to be
749 particularly useful.
750
43e72072 751 In normal circumstances a cfg should have about twice as many edges
55f7891b
JL
752 as blocks. But we do not want to punish small functions which have
753 a couple switch statements. So we require a relatively large number
754 of basic blocks and the ratio of edges to blocks to be high. */
0b17ab2f 755 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
18424ae1
BL
756 {
757 if (warn_disabled_optimization)
8e42ace1 758 warning ("GCSE disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
0b17ab2f 759 n_basic_blocks, n_edges / n_basic_blocks);
18424ae1
BL
760 return 0;
761 }
55f7891b 762
f1fa37ff
MM
763 /* If allocating memory for the cprop bitmap would take up too much
764 storage it's better just to disable the optimization. */
589005ff 765 if ((n_basic_blocks
f1fa37ff
MM
766 * SBITMAP_SET_SIZE (max_gcse_regno)
767 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
768 {
769 if (warn_disabled_optimization)
770 warning ("GCSE disabled: %d basic blocks and %d registers",
0b17ab2f 771 n_basic_blocks, max_gcse_regno);
f1fa37ff
MM
772
773 return 0;
774 }
775
7506f491
DE
776 /* See what modes support reg/reg copy operations. */
777 if (! can_copy_init_p)
778 {
779 compute_can_copy ();
780 can_copy_init_p = 1;
781 }
782
783 gcc_obstack_init (&gcse_obstack);
a42cd965 784 bytes_used = 0;
7506f491 785
a13d4ebf
AM
786 /* We need alias. */
787 init_alias_analysis ();
c4c81601
RK
788 /* Record where pseudo-registers are set. This data is kept accurate
789 during each pass. ??? We could also record hard-reg information here
790 [since it's unchanging], however it is currently done during hash table
791 computation.
b5ce41ff 792
c4c81601
RK
793 It may be tempting to compute MEM set information here too, but MEM sets
794 will be subject to code motion one day and thus we need to compute
b5ce41ff 795 information about memory sets when we build the hash tables. */
7506f491
DE
796
797 alloc_reg_set_mem (max_gcse_regno);
798 compute_sets (f);
799
800 pass = 0;
801 initial_bytes_used = bytes_used;
802 max_pass_bytes = 0;
803 gcse_obstack_bottom = gcse_alloc (1);
804 changed = 1;
740f35a0 805 while (changed && pass < MAX_GCSE_PASSES)
7506f491
DE
806 {
807 changed = 0;
808 if (file)
809 fprintf (file, "GCSE pass %d\n\n", pass + 1);
810
811 /* Initialize bytes_used to the space for the pred/succ lists,
812 and the reg_set_table data. */
813 bytes_used = initial_bytes_used;
814
815 /* Each pass may create new registers, so recalculate each time. */
816 max_gcse_regno = max_reg_num ();
817
818 alloc_gcse_mem (f);
819
b5ce41ff
JL
820 /* Don't allow constant propagation to modify jumps
821 during this pass. */
a0134312 822 changed = one_cprop_pass (pass + 1, 0, 0);
7506f491
DE
823
824 if (optimize_size)
b5ce41ff 825 changed |= one_classic_gcse_pass (pass + 1);
7506f491 826 else
589005ff 827 {
a42cd965 828 changed |= one_pre_gcse_pass (pass + 1);
a13d4ebf
AM
829 /* We may have just created new basic blocks. Release and
830 recompute various things which are sized on the number of
831 basic blocks. */
832 if (changed)
833 {
73991d6a 834 free_modify_mem_tables ();
a13d4ebf 835 modify_mem_list
d55bc081 836 = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
a13d4ebf 837 canon_modify_mem_list
d55bc081
ZD
838 = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
839 memset ((char *) modify_mem_list, 0, last_basic_block * sizeof (rtx));
840 memset ((char *) canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
a13d4ebf 841 }
a42cd965
AM
842 free_reg_set_mem ();
843 alloc_reg_set_mem (max_reg_num ());
844 compute_sets (f);
845 run_jump_opt_after_gcse = 1;
846 }
7506f491
DE
847
848 if (max_pass_bytes < bytes_used)
849 max_pass_bytes = bytes_used;
850
bb457bd9
JL
851 /* Free up memory, then reallocate for code hoisting. We can
852 not re-use the existing allocated memory because the tables
853 will not have info for the insns or registers created by
854 partial redundancy elimination. */
7506f491
DE
855 free_gcse_mem ();
856
bb457bd9
JL
857 /* It does not make sense to run code hoisting unless we optimizing
858 for code size -- it rarely makes programs faster, and can make
859 them bigger if we did partial redundancy elimination (when optimizing
860 for space, we use a classic gcse algorithm instead of partial
861 redundancy algorithms). */
862 if (optimize_size)
589005ff 863 {
bb457bd9
JL
864 max_gcse_regno = max_reg_num ();
865 alloc_gcse_mem (f);
866 changed |= one_code_hoisting_pass ();
867 free_gcse_mem ();
868
869 if (max_pass_bytes < bytes_used)
870 max_pass_bytes = bytes_used;
589005ff 871 }
bb457bd9 872
7506f491
DE
873 if (file)
874 {
875 fprintf (file, "\n");
876 fflush (file);
877 }
c4c81601 878
7506f491
DE
879 obstack_free (&gcse_obstack, gcse_obstack_bottom);
880 pass++;
881 }
882
b5ce41ff
JL
883 /* Do one last pass of copy propagation, including cprop into
884 conditional jumps. */
885
886 max_gcse_regno = max_reg_num ();
887 alloc_gcse_mem (f);
888 /* This time, go ahead and allow cprop to alter jumps. */
a0134312 889 one_cprop_pass (pass + 1, 1, 0);
b5ce41ff 890 free_gcse_mem ();
7506f491
DE
891
892 if (file)
893 {
894 fprintf (file, "GCSE of %s: %d basic blocks, ",
0b17ab2f 895 current_function_name, n_basic_blocks);
7506f491
DE
896 fprintf (file, "%d pass%s, %d bytes\n\n",
897 pass, pass > 1 ? "es" : "", max_pass_bytes);
898 }
899
6496a589 900 obstack_free (&gcse_obstack, NULL);
7506f491 901 free_reg_set_mem ();
a13d4ebf
AM
902 /* We are finished with alias. */
903 end_alias_analysis ();
904 allocate_reg_info (max_reg_num (), FALSE, FALSE);
905
61ad9a34
JJ
906 /* Store motion disabled until it is fixed. */
907 if (0 && !optimize_size && flag_gcse_sm)
a13d4ebf
AM
908 store_motion ();
909 /* Record where pseudo-registers are set. */
e78d9500 910 return run_jump_opt_after_gcse;
7506f491
DE
911}
912\f
913/* Misc. utilities. */
914
915/* Compute which modes support reg/reg copy operations. */
916
917static void
918compute_can_copy ()
919{
920 int i;
50b2596f 921#ifndef AVOID_CCMODE_COPIES
8e42ace1 922 rtx reg, insn;
50b2596f 923#endif
961192e1 924 memset (can_copy_p, 0, NUM_MACHINE_MODES);
7506f491
DE
925
926 start_sequence ();
927 for (i = 0; i < NUM_MACHINE_MODES; i++)
c4c81601
RK
928 if (GET_MODE_CLASS (i) == MODE_CC)
929 {
7506f491 930#ifdef AVOID_CCMODE_COPIES
c4c81601 931 can_copy_p[i] = 0;
7506f491 932#else
c4c81601
RK
933 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
934 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
9714cf43 935 if (recog (PATTERN (insn), insn, NULL) >= 0)
c4c81601 936 can_copy_p[i] = 1;
7506f491 937#endif
c4c81601 938 }
141b5810
AO
939 else
940 can_copy_p[i] = 1;
c4c81601 941
7506f491 942 end_sequence ();
7506f491
DE
943}
944\f
945/* Cover function to xmalloc to record bytes allocated. */
946
947static char *
948gmalloc (size)
949 unsigned int size;
950{
951 bytes_used += size;
952 return xmalloc (size);
953}
954
955/* Cover function to xrealloc.
956 We don't record the additional size since we don't know it.
957 It won't affect memory usage stats much anyway. */
958
959static char *
960grealloc (ptr, size)
961 char *ptr;
962 unsigned int size;
963{
964 return xrealloc (ptr, size);
965}
966
77bbd421 967/* Cover function to obstack_alloc. */
7506f491
DE
968
969static char *
970gcse_alloc (size)
971 unsigned long size;
972{
77bbd421 973 bytes_used += size;
7506f491
DE
974 return (char *) obstack_alloc (&gcse_obstack, size);
975}
976
977/* Allocate memory for the cuid mapping array,
978 and reg/memory set tracking tables.
979
980 This is called at the start of each pass. */
981
982static void
983alloc_gcse_mem (f)
984 rtx f;
985{
8e42ace1 986 int i, n;
7506f491
DE
987 rtx insn;
988
989 /* Find the largest UID and create a mapping from UIDs to CUIDs.
990 CUIDs are like UIDs except they increase monotonically, have no gaps,
991 and only apply to real insns. */
992
993 max_uid = get_max_uid ();
994 n = (max_uid + 1) * sizeof (int);
995 uid_cuid = (int *) gmalloc (n);
961192e1 996 memset ((char *) uid_cuid, 0, n);
7506f491
DE
997 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
998 {
2c3c49de 999 if (INSN_P (insn))
b86db3eb 1000 uid_cuid[INSN_UID (insn)] = i++;
7506f491 1001 else
b86db3eb 1002 uid_cuid[INSN_UID (insn)] = i;
7506f491
DE
1003 }
1004
1005 /* Create a table mapping cuids to insns. */
1006
1007 max_cuid = i;
1008 n = (max_cuid + 1) * sizeof (rtx);
1009 cuid_insn = (rtx *) gmalloc (n);
961192e1 1010 memset ((char *) cuid_insn, 0, n);
7506f491 1011 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
2c3c49de 1012 if (INSN_P (insn))
c4c81601 1013 CUID_INSN (i++) = insn;
7506f491
DE
1014
1015 /* Allocate vars to track sets of regs. */
73991d6a 1016 reg_set_bitmap = BITMAP_XMALLOC ();
7506f491
DE
1017
1018 /* Allocate vars to track sets of regs, memory per block. */
d55bc081 1019 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (last_basic_block,
7506f491 1020 max_gcse_regno);
a13d4ebf
AM
1021 /* Allocate array to keep a list of insns which modify memory in each
1022 basic block. */
d55bc081
ZD
1023 modify_mem_list = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
1024 canon_modify_mem_list = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
1025 memset ((char *) modify_mem_list, 0, last_basic_block * sizeof (rtx));
1026 memset ((char *) canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
73991d6a
JH
1027 modify_mem_list_set = BITMAP_XMALLOC ();
1028 canon_modify_mem_list_set = BITMAP_XMALLOC ();
7506f491
DE
1029}
1030
1031/* Free memory allocated by alloc_gcse_mem. */
1032
1033static void
1034free_gcse_mem ()
1035{
1036 free (uid_cuid);
1037 free (cuid_insn);
1038
73991d6a 1039 BITMAP_XFREE (reg_set_bitmap);
7506f491 1040
5a660bff 1041 sbitmap_vector_free (reg_set_in_block);
73991d6a
JH
1042 free_modify_mem_tables ();
1043 BITMAP_XFREE (modify_mem_list_set);
1044 BITMAP_XFREE (canon_modify_mem_list_set);
7506f491
DE
1045}
1046
0511851c
MM
1047/* Many of the global optimization algorithms work by solving dataflow
1048 equations for various expressions. Initially, some local value is
c4c81601
RK
1049 computed for each expression in each block. Then, the values across the
1050 various blocks are combined (by following flow graph edges) to arrive at
1051 global values. Conceptually, each set of equations is independent. We
1052 may therefore solve all the equations in parallel, solve them one at a
1053 time, or pick any intermediate approach.
1054
1055 When you're going to need N two-dimensional bitmaps, each X (say, the
1056 number of blocks) by Y (say, the number of expressions), call this
1057 function. It's not important what X and Y represent; only that Y
1058 correspond to the things that can be done in parallel. This function will
1059 return an appropriate chunking factor C; you should solve C sets of
1060 equations in parallel. By going through this function, we can easily
1061 trade space against time; by solving fewer equations in parallel we use
1062 less space. */
0511851c
MM
1063
1064static int
1065get_bitmap_width (n, x, y)
1066 int n;
1067 int x;
1068 int y;
1069{
1070 /* It's not really worth figuring out *exactly* how much memory will
1071 be used by a particular choice. The important thing is to get
1072 something approximately right. */
1073 size_t max_bitmap_memory = 10 * 1024 * 1024;
1074
1075 /* The number of bytes we'd use for a single column of minimum
1076 width. */
1077 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
1078
1079 /* Often, it's reasonable just to solve all the equations in
1080 parallel. */
1081 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
1082 return y;
1083
1084 /* Otherwise, pick the largest width we can, without going over the
1085 limit. */
1086 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
1087 / column_size);
1088}
b5ce41ff
JL
1089\f
1090/* Compute the local properties of each recorded expression.
c4c81601
RK
1091
1092 Local properties are those that are defined by the block, irrespective of
1093 other blocks.
b5ce41ff
JL
1094
1095 An expression is transparent in a block if its operands are not modified
1096 in the block.
1097
1098 An expression is computed (locally available) in a block if it is computed
1099 at least once and expression would contain the same value if the
1100 computation was moved to the end of the block.
1101
1102 An expression is locally anticipatable in a block if it is computed at
1103 least once and expression would contain the same value if the computation
1104 was moved to the beginning of the block.
1105
c4c81601
RK
1106 We call this routine for cprop, pre and code hoisting. They all compute
1107 basically the same information and thus can easily share this code.
7506f491 1108
c4c81601
RK
1109 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1110 properties. If NULL, then it is not necessary to compute or record that
1111 particular property.
b5ce41ff 1112
02280659
ZD
1113 TABLE controls which hash table to look at. If it is set hash table,
1114 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
c4c81601 1115 ABSALTERED. */
589005ff 1116
b5ce41ff 1117static void
02280659 1118compute_local_properties (transp, comp, antloc, table)
b5ce41ff
JL
1119 sbitmap *transp;
1120 sbitmap *comp;
1121 sbitmap *antloc;
02280659 1122 struct hash_table *table;
b5ce41ff 1123{
02280659 1124 unsigned int i;
589005ff 1125
b5ce41ff
JL
1126 /* Initialize any bitmaps that were passed in. */
1127 if (transp)
695ab36a 1128 {
02280659 1129 if (table->set_p)
d55bc081 1130 sbitmap_vector_zero (transp, last_basic_block);
695ab36a 1131 else
d55bc081 1132 sbitmap_vector_ones (transp, last_basic_block);
695ab36a 1133 }
c4c81601 1134
b5ce41ff 1135 if (comp)
d55bc081 1136 sbitmap_vector_zero (comp, last_basic_block);
b5ce41ff 1137 if (antloc)
d55bc081 1138 sbitmap_vector_zero (antloc, last_basic_block);
b5ce41ff 1139
02280659 1140 for (i = 0; i < table->size; i++)
7506f491 1141 {
b5ce41ff
JL
1142 struct expr *expr;
1143
02280659 1144 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
b5ce41ff 1145 {
b5ce41ff 1146 int indx = expr->bitmap_index;
c4c81601 1147 struct occr *occr;
b5ce41ff
JL
1148
1149 /* The expression is transparent in this block if it is not killed.
1150 We start by assuming all are transparent [none are killed], and
1151 then reset the bits for those that are. */
b5ce41ff 1152 if (transp)
02280659 1153 compute_transp (expr->expr, indx, transp, table->set_p);
b5ce41ff
JL
1154
1155 /* The occurrences recorded in antic_occr are exactly those that
cc2902df 1156 we want to set to nonzero in ANTLOC. */
b5ce41ff 1157 if (antloc)
c4c81601
RK
1158 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1159 {
1160 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
b5ce41ff 1161
c4c81601
RK
1162 /* While we're scanning the table, this is a good place to
1163 initialize this. */
1164 occr->deleted_p = 0;
1165 }
b5ce41ff
JL
1166
1167 /* The occurrences recorded in avail_occr are exactly those that
cc2902df 1168 we want to set to nonzero in COMP. */
b5ce41ff 1169 if (comp)
c4c81601
RK
1170 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1171 {
1172 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
b5ce41ff 1173
c4c81601
RK
1174 /* While we're scanning the table, this is a good place to
1175 initialize this. */
1176 occr->copied_p = 0;
1177 }
b5ce41ff
JL
1178
1179 /* While we're scanning the table, this is a good place to
1180 initialize this. */
1181 expr->reaching_reg = 0;
1182 }
7506f491 1183 }
7506f491
DE
1184}
1185\f
1186/* Register set information.
1187
1188 `reg_set_table' records where each register is set or otherwise
1189 modified. */
1190
1191static struct obstack reg_set_obstack;
1192
1193static void
1194alloc_reg_set_mem (n_regs)
1195 int n_regs;
1196{
c4c81601 1197 unsigned int n;
7506f491
DE
1198
1199 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1200 n = reg_set_table_size * sizeof (struct reg_set *);
1201 reg_set_table = (struct reg_set **) gmalloc (n);
961192e1 1202 memset ((char *) reg_set_table, 0, n);
7506f491
DE
1203
1204 gcc_obstack_init (&reg_set_obstack);
1205}
1206
1207static void
1208free_reg_set_mem ()
1209{
1210 free (reg_set_table);
6496a589 1211 obstack_free (&reg_set_obstack, NULL);
7506f491
DE
1212}
1213
1214/* Record REGNO in the reg_set table. */
1215
1216static void
1217record_one_set (regno, insn)
1218 int regno;
1219 rtx insn;
1220{
172890a2 1221 /* Allocate a new reg_set element and link it onto the list. */
63bc1d05 1222 struct reg_set *new_reg_info;
7506f491
DE
1223
1224 /* If the table isn't big enough, enlarge it. */
1225 if (regno >= reg_set_table_size)
1226 {
1227 int new_size = regno + REG_SET_TABLE_SLOP;
c4c81601
RK
1228
1229 reg_set_table
1230 = (struct reg_set **) grealloc ((char *) reg_set_table,
1231 new_size * sizeof (struct reg_set *));
961192e1 1232 memset ((char *) (reg_set_table + reg_set_table_size), 0,
8e42ace1 1233 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
7506f491
DE
1234 reg_set_table_size = new_size;
1235 }
1236
1237 new_reg_info = (struct reg_set *) obstack_alloc (&reg_set_obstack,
1238 sizeof (struct reg_set));
1239 bytes_used += sizeof (struct reg_set);
1240 new_reg_info->insn = insn;
274969ea
MM
1241 new_reg_info->next = reg_set_table[regno];
1242 reg_set_table[regno] = new_reg_info;
7506f491
DE
1243}
1244
c4c81601
RK
1245/* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1246 an insn. The DATA is really the instruction in which the SET is
1247 occurring. */
7506f491
DE
1248
1249static void
84832317 1250record_set_info (dest, setter, data)
50b2596f 1251 rtx dest, setter ATTRIBUTE_UNUSED;
84832317 1252 void *data;
7506f491 1253{
84832317
MM
1254 rtx record_set_insn = (rtx) data;
1255
c4c81601
RK
1256 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1257 record_one_set (REGNO (dest), record_set_insn);
7506f491
DE
1258}
1259
1260/* Scan the function and record each set of each pseudo-register.
1261
c4c81601
RK
1262 This is called once, at the start of the gcse pass. See the comments for
1263 `reg_set_table' for further documenation. */
7506f491
DE
1264
1265static void
1266compute_sets (f)
1267 rtx f;
1268{
c4c81601 1269 rtx insn;
7506f491 1270
c4c81601 1271 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
2c3c49de 1272 if (INSN_P (insn))
c4c81601 1273 note_stores (PATTERN (insn), record_set_info, insn);
7506f491
DE
1274}
1275\f
1276/* Hash table support. */
1277
80c29cc4
RZ
1278struct reg_avail_info
1279{
e0082a72 1280 basic_block last_bb;
80c29cc4
RZ
1281 int first_set;
1282 int last_set;
1283};
1284
1285static struct reg_avail_info *reg_avail_info;
e0082a72 1286static basic_block current_bb;
7506f491 1287
7506f491 1288
fb0c0a12
RK
1289/* See whether X, the source of a set, is something we want to consider for
1290 GCSE. */
7506f491 1291
e2500fed 1292static GTY(()) rtx test_insn;
7506f491
DE
1293static int
1294want_to_gcse_p (x)
1295 rtx x;
1296{
fb0c0a12
RK
1297 int num_clobbers = 0;
1298 int icode;
1299
c4c81601 1300 switch (GET_CODE (x))
7506f491
DE
1301 {
1302 case REG:
1303 case SUBREG:
1304 case CONST_INT:
1305 case CONST_DOUBLE:
69ef87e2 1306 case CONST_VECTOR:
7506f491
DE
1307 case CALL:
1308 return 0;
1309
1310 default:
1311 break;
1312 }
1313
fb0c0a12
RK
1314 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1315 if (general_operand (x, GET_MODE (x)))
1316 return 1;
1317 else if (GET_MODE (x) == VOIDmode)
1318 return 0;
1319
1320 /* Otherwise, check if we can make a valid insn from it. First initialize
1321 our test insn if we haven't already. */
1322 if (test_insn == 0)
1323 {
1324 test_insn
1325 = make_insn_raw (gen_rtx_SET (VOIDmode,
1326 gen_rtx_REG (word_mode,
1327 FIRST_PSEUDO_REGISTER * 2),
1328 const0_rtx));
1329 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
fb0c0a12
RK
1330 }
1331
1332 /* Now make an insn like the one we would make when GCSE'ing and see if
1333 valid. */
1334 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1335 SET_SRC (PATTERN (test_insn)) = x;
1336 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1337 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
7506f491
DE
1338}
1339
cc2902df 1340/* Return nonzero if the operands of expression X are unchanged from the
7506f491
DE
1341 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1342 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1343
1344static int
1345oprs_unchanged_p (x, insn, avail_p)
1346 rtx x, insn;
1347 int avail_p;
1348{
c4c81601 1349 int i, j;
7506f491 1350 enum rtx_code code;
6f7d635c 1351 const char *fmt;
7506f491 1352
7506f491
DE
1353 if (x == 0)
1354 return 1;
1355
1356 code = GET_CODE (x);
1357 switch (code)
1358 {
1359 case REG:
80c29cc4
RZ
1360 {
1361 struct reg_avail_info *info = &reg_avail_info[REGNO (x)];
1362
1363 if (info->last_bb != current_bb)
1364 return 1;
589005ff 1365 if (avail_p)
80c29cc4
RZ
1366 return info->last_set < INSN_CUID (insn);
1367 else
1368 return info->first_set >= INSN_CUID (insn);
1369 }
7506f491
DE
1370
1371 case MEM:
e0082a72 1372 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
a13d4ebf
AM
1373 x, avail_p))
1374 return 0;
7506f491 1375 else
c4c81601 1376 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
7506f491
DE
1377
1378 case PRE_DEC:
1379 case PRE_INC:
1380 case POST_DEC:
1381 case POST_INC:
4b983fdc
RH
1382 case PRE_MODIFY:
1383 case POST_MODIFY:
7506f491
DE
1384 return 0;
1385
1386 case PC:
1387 case CC0: /*FIXME*/
1388 case CONST:
1389 case CONST_INT:
1390 case CONST_DOUBLE:
69ef87e2 1391 case CONST_VECTOR:
7506f491
DE
1392 case SYMBOL_REF:
1393 case LABEL_REF:
1394 case ADDR_VEC:
1395 case ADDR_DIFF_VEC:
1396 return 1;
1397
1398 default:
1399 break;
1400 }
1401
c4c81601 1402 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
1403 {
1404 if (fmt[i] == 'e')
1405 {
c4c81601
RK
1406 /* If we are about to do the last recursive call needed at this
1407 level, change it into iteration. This function is called enough
1408 to be worth it. */
7506f491 1409 if (i == 0)
c4c81601
RK
1410 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1411
1412 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
7506f491
DE
1413 return 0;
1414 }
1415 else if (fmt[i] == 'E')
c4c81601
RK
1416 for (j = 0; j < XVECLEN (x, i); j++)
1417 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1418 return 0;
7506f491
DE
1419 }
1420
1421 return 1;
1422}
1423
a13d4ebf
AM
1424/* Used for communication between mems_conflict_for_gcse_p and
1425 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1426 conflict between two memory references. */
1427static int gcse_mems_conflict_p;
1428
1429/* Used for communication between mems_conflict_for_gcse_p and
1430 load_killed_in_block_p. A memory reference for a load instruction,
1431 mems_conflict_for_gcse_p will see if a memory store conflicts with
1432 this memory load. */
1433static rtx gcse_mem_operand;
1434
1435/* DEST is the output of an instruction. If it is a memory reference, and
1436 possibly conflicts with the load found in gcse_mem_operand, then set
1437 gcse_mems_conflict_p to a nonzero value. */
1438
1439static void
1440mems_conflict_for_gcse_p (dest, setter, data)
1441 rtx dest, setter ATTRIBUTE_UNUSED;
1442 void *data ATTRIBUTE_UNUSED;
1443{
1444 while (GET_CODE (dest) == SUBREG
1445 || GET_CODE (dest) == ZERO_EXTRACT
1446 || GET_CODE (dest) == SIGN_EXTRACT
1447 || GET_CODE (dest) == STRICT_LOW_PART)
1448 dest = XEXP (dest, 0);
1449
1450 /* If DEST is not a MEM, then it will not conflict with the load. Note
1451 that function calls are assumed to clobber memory, but are handled
1452 elsewhere. */
1453 if (GET_CODE (dest) != MEM)
1454 return;
aaa4ca30 1455
a13d4ebf 1456 /* If we are setting a MEM in our list of specially recognized MEMs,
589005ff
KH
1457 don't mark as killed this time. */
1458
a13d4ebf
AM
1459 if (dest == gcse_mem_operand && pre_ldst_mems != NULL)
1460 {
1461 if (!find_rtx_in_ldst (dest))
1462 gcse_mems_conflict_p = 1;
1463 return;
1464 }
aaa4ca30 1465
a13d4ebf
AM
1466 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1467 rtx_addr_varies_p))
1468 gcse_mems_conflict_p = 1;
1469}
1470
1471/* Return nonzero if the expression in X (a memory reference) is killed
1472 in block BB before or after the insn with the CUID in UID_LIMIT.
1473 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1474 before UID_LIMIT.
1475
1476 To check the entire block, set UID_LIMIT to max_uid + 1 and
1477 AVAIL_P to 0. */
1478
1479static int
1480load_killed_in_block_p (bb, uid_limit, x, avail_p)
e2d2ed72 1481 basic_block bb;
a13d4ebf
AM
1482 int uid_limit;
1483 rtx x;
1484 int avail_p;
1485{
0b17ab2f 1486 rtx list_entry = modify_mem_list[bb->index];
a13d4ebf
AM
1487 while (list_entry)
1488 {
1489 rtx setter;
1490 /* Ignore entries in the list that do not apply. */
1491 if ((avail_p
1492 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1493 || (! avail_p
1494 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1495 {
1496 list_entry = XEXP (list_entry, 1);
1497 continue;
1498 }
1499
1500 setter = XEXP (list_entry, 0);
1501
1502 /* If SETTER is a call everything is clobbered. Note that calls
1503 to pure functions are never put on the list, so we need not
1504 worry about them. */
1505 if (GET_CODE (setter) == CALL_INSN)
1506 return 1;
1507
1508 /* SETTER must be an INSN of some kind that sets memory. Call
589005ff 1509 note_stores to examine each hunk of memory that is modified.
a13d4ebf
AM
1510
1511 The note_stores interface is pretty limited, so we have to
1512 communicate via global variables. Yuk. */
1513 gcse_mem_operand = x;
1514 gcse_mems_conflict_p = 0;
1515 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1516 if (gcse_mems_conflict_p)
1517 return 1;
1518 list_entry = XEXP (list_entry, 1);
1519 }
1520 return 0;
1521}
1522
cc2902df 1523/* Return nonzero if the operands of expression X are unchanged from
7506f491
DE
1524 the start of INSN's basic block up to but not including INSN. */
1525
1526static int
1527oprs_anticipatable_p (x, insn)
1528 rtx x, insn;
1529{
1530 return oprs_unchanged_p (x, insn, 0);
1531}
1532
cc2902df 1533/* Return nonzero if the operands of expression X are unchanged from
7506f491
DE
1534 INSN to the end of INSN's basic block. */
1535
1536static int
1537oprs_available_p (x, insn)
1538 rtx x, insn;
1539{
1540 return oprs_unchanged_p (x, insn, 1);
1541}
1542
1543/* Hash expression X.
c4c81601
RK
1544
1545 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1546 indicating if a volatile operand is found or if the expression contains
1547 something we don't want to insert in the table.
7506f491
DE
1548
1549 ??? One might want to merge this with canon_hash. Later. */
1550
1551static unsigned int
1552hash_expr (x, mode, do_not_record_p, hash_table_size)
1553 rtx x;
1554 enum machine_mode mode;
1555 int *do_not_record_p;
1556 int hash_table_size;
1557{
1558 unsigned int hash;
1559
1560 *do_not_record_p = 0;
1561
1562 hash = hash_expr_1 (x, mode, do_not_record_p);
1563 return hash % hash_table_size;
1564}
172890a2 1565
6462bb43 1566/* Hash a string. Just add its bytes up. */
172890a2 1567
6462bb43
AO
1568static inline unsigned
1569hash_string_1 (ps)
1570 const char *ps;
1571{
1572 unsigned hash = 0;
8e42ace1 1573 const unsigned char *p = (const unsigned char *) ps;
589005ff 1574
6462bb43
AO
1575 if (p)
1576 while (*p)
1577 hash += *p++;
1578
1579 return hash;
1580}
7506f491
DE
1581
1582/* Subroutine of hash_expr to do the actual work. */
1583
1584static unsigned int
1585hash_expr_1 (x, mode, do_not_record_p)
1586 rtx x;
1587 enum machine_mode mode;
1588 int *do_not_record_p;
1589{
1590 int i, j;
1591 unsigned hash = 0;
1592 enum rtx_code code;
6f7d635c 1593 const char *fmt;
7506f491 1594
c4c81601
RK
1595 /* Used to turn recursion into iteration. We can't rely on GCC's
1596 tail-recursion eliminatio since we need to keep accumulating values
1597 in HASH. */
7506f491
DE
1598
1599 if (x == 0)
1600 return hash;
1601
c4c81601 1602 repeat:
7506f491
DE
1603 code = GET_CODE (x);
1604 switch (code)
1605 {
1606 case REG:
c4c81601
RK
1607 hash += ((unsigned int) REG << 7) + REGNO (x);
1608 return hash;
7506f491
DE
1609
1610 case CONST_INT:
c4c81601
RK
1611 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1612 + (unsigned int) INTVAL (x));
1613 return hash;
7506f491
DE
1614
1615 case CONST_DOUBLE:
1616 /* This is like the general case, except that it only counts
1617 the integers representing the constant. */
c4c81601 1618 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
7506f491
DE
1619 if (GET_MODE (x) != VOIDmode)
1620 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
c4c81601 1621 hash += (unsigned int) XWINT (x, i);
7506f491 1622 else
c4c81601
RK
1623 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1624 + (unsigned int) CONST_DOUBLE_HIGH (x));
7506f491
DE
1625 return hash;
1626
69ef87e2
AH
1627 case CONST_VECTOR:
1628 {
1629 int units;
1630 rtx elt;
1631
1632 units = CONST_VECTOR_NUNITS (x);
1633
1634 for (i = 0; i < units; ++i)
1635 {
1636 elt = CONST_VECTOR_ELT (x, i);
1637 hash += hash_expr_1 (elt, GET_MODE (elt), do_not_record_p);
1638 }
1639
1640 return hash;
1641 }
1642
7506f491
DE
1643 /* Assume there is only one rtx object for any given label. */
1644 case LABEL_REF:
1645 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1646 differences and differences between each stage's debugging dumps. */
c4c81601
RK
1647 hash += (((unsigned int) LABEL_REF << 7)
1648 + CODE_LABEL_NUMBER (XEXP (x, 0)));
7506f491
DE
1649 return hash;
1650
1651 case SYMBOL_REF:
1652 {
1653 /* Don't hash on the symbol's address to avoid bootstrap differences.
1654 Different hash values may cause expressions to be recorded in
1655 different orders and thus different registers to be used in the
1656 final assembler. This also avoids differences in the dump files
1657 between various stages. */
1658 unsigned int h = 0;
3cce094d 1659 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
c4c81601 1660
7506f491
DE
1661 while (*p)
1662 h += (h << 7) + *p++; /* ??? revisit */
c4c81601
RK
1663
1664 hash += ((unsigned int) SYMBOL_REF << 7) + h;
7506f491
DE
1665 return hash;
1666 }
1667
1668 case MEM:
1669 if (MEM_VOLATILE_P (x))
1670 {
1671 *do_not_record_p = 1;
1672 return 0;
1673 }
c4c81601
RK
1674
1675 hash += (unsigned int) MEM;
d51f3632
JH
1676 /* We used alias set for hashing, but this is not good, since the alias
1677 set may differ in -fprofile-arcs and -fbranch-probabilities compilation
1678 causing the profiles to fail to match. */
7506f491
DE
1679 x = XEXP (x, 0);
1680 goto repeat;
1681
1682 case PRE_DEC:
1683 case PRE_INC:
1684 case POST_DEC:
1685 case POST_INC:
1686 case PC:
1687 case CC0:
1688 case CALL:
1689 case UNSPEC_VOLATILE:
1690 *do_not_record_p = 1;
1691 return 0;
1692
1693 case ASM_OPERANDS:
1694 if (MEM_VOLATILE_P (x))
1695 {
1696 *do_not_record_p = 1;
1697 return 0;
1698 }
6462bb43
AO
1699 else
1700 {
1701 /* We don't want to take the filename and line into account. */
1702 hash += (unsigned) code + (unsigned) GET_MODE (x)
1703 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1704 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1705 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1706
1707 if (ASM_OPERANDS_INPUT_LENGTH (x))
1708 {
1709 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1710 {
1711 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1712 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1713 do_not_record_p)
1714 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1715 (x, i)));
1716 }
1717
1718 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1719 x = ASM_OPERANDS_INPUT (x, 0);
1720 mode = GET_MODE (x);
1721 goto repeat;
1722 }
1723 return hash;
1724 }
7506f491
DE
1725
1726 default:
1727 break;
1728 }
1729
7506f491 1730 hash += (unsigned) code + (unsigned) GET_MODE (x);
c4c81601 1731 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
1732 {
1733 if (fmt[i] == 'e')
1734 {
7506f491
DE
1735 /* If we are about to do the last recursive call
1736 needed at this level, change it into iteration.
1737 This function is called enough to be worth it. */
1738 if (i == 0)
1739 {
c4c81601 1740 x = XEXP (x, i);
7506f491
DE
1741 goto repeat;
1742 }
c4c81601
RK
1743
1744 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
7506f491
DE
1745 if (*do_not_record_p)
1746 return 0;
1747 }
c4c81601 1748
7506f491
DE
1749 else if (fmt[i] == 'E')
1750 for (j = 0; j < XVECLEN (x, i); j++)
1751 {
1752 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1753 if (*do_not_record_p)
1754 return 0;
1755 }
c4c81601 1756
7506f491 1757 else if (fmt[i] == 's')
6462bb43 1758 hash += hash_string_1 (XSTR (x, i));
7506f491 1759 else if (fmt[i] == 'i')
c4c81601 1760 hash += (unsigned int) XINT (x, i);
7506f491
DE
1761 else
1762 abort ();
1763 }
1764
1765 return hash;
1766}
1767
1768/* Hash a set of register REGNO.
1769
c4c81601
RK
1770 Sets are hashed on the register that is set. This simplifies the PRE copy
1771 propagation code.
7506f491
DE
1772
1773 ??? May need to make things more elaborate. Later, as necessary. */
1774
1775static unsigned int
1776hash_set (regno, hash_table_size)
1777 int regno;
1778 int hash_table_size;
1779{
1780 unsigned int hash;
1781
1782 hash = regno;
1783 return hash % hash_table_size;
1784}
1785
cc2902df 1786/* Return nonzero if exp1 is equivalent to exp2.
7506f491
DE
1787 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1788
1789static int
1790expr_equiv_p (x, y)
1791 rtx x, y;
1792{
b3694847
SS
1793 int i, j;
1794 enum rtx_code code;
1795 const char *fmt;
7506f491
DE
1796
1797 if (x == y)
1798 return 1;
c4c81601 1799
7506f491
DE
1800 if (x == 0 || y == 0)
1801 return x == y;
1802
1803 code = GET_CODE (x);
1804 if (code != GET_CODE (y))
1805 return 0;
1806
1807 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1808 if (GET_MODE (x) != GET_MODE (y))
1809 return 0;
1810
1811 switch (code)
1812 {
1813 case PC:
1814 case CC0:
1815 return x == y;
1816
1817 case CONST_INT:
1818 return INTVAL (x) == INTVAL (y);
1819
1820 case LABEL_REF:
1821 return XEXP (x, 0) == XEXP (y, 0);
1822
1823 case SYMBOL_REF:
1824 return XSTR (x, 0) == XSTR (y, 0);
1825
1826 case REG:
1827 return REGNO (x) == REGNO (y);
1828
297c3335
RH
1829 case MEM:
1830 /* Can't merge two expressions in different alias sets, since we can
1831 decide that the expression is transparent in a block when it isn't,
1832 due to it being set with the different alias set. */
1833 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1834 return 0;
1835 break;
1836
7506f491
DE
1837 /* For commutative operations, check both orders. */
1838 case PLUS:
1839 case MULT:
1840 case AND:
1841 case IOR:
1842 case XOR:
1843 case NE:
1844 case EQ:
1845 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1846 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1847 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1848 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1849
6462bb43
AO
1850 case ASM_OPERANDS:
1851 /* We don't use the generic code below because we want to
1852 disregard filename and line numbers. */
1853
1854 /* A volatile asm isn't equivalent to any other. */
1855 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1856 return 0;
1857
1858 if (GET_MODE (x) != GET_MODE (y)
1859 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1860 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1861 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1862 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1863 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1864 return 0;
1865
1866 if (ASM_OPERANDS_INPUT_LENGTH (x))
1867 {
1868 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1869 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1870 ASM_OPERANDS_INPUT (y, i))
1871 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1872 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1873 return 0;
1874 }
1875
1876 return 1;
1877
7506f491
DE
1878 default:
1879 break;
1880 }
1881
1882 /* Compare the elements. If any pair of corresponding elements
1883 fail to match, return 0 for the whole thing. */
1884
1885 fmt = GET_RTX_FORMAT (code);
1886 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1887 {
1888 switch (fmt[i])
1889 {
1890 case 'e':
1891 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1892 return 0;
1893 break;
1894
1895 case 'E':
1896 if (XVECLEN (x, i) != XVECLEN (y, i))
1897 return 0;
1898 for (j = 0; j < XVECLEN (x, i); j++)
1899 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1900 return 0;
1901 break;
1902
1903 case 's':
1904 if (strcmp (XSTR (x, i), XSTR (y, i)))
1905 return 0;
1906 break;
1907
1908 case 'i':
1909 if (XINT (x, i) != XINT (y, i))
1910 return 0;
1911 break;
1912
1913 case 'w':
1914 if (XWINT (x, i) != XWINT (y, i))
1915 return 0;
1916 break;
1917
1918 case '0':
1919 break;
aaa4ca30 1920
7506f491
DE
1921 default:
1922 abort ();
1923 }
8e42ace1 1924 }
7506f491
DE
1925
1926 return 1;
1927}
1928
02280659 1929/* Insert expression X in INSN in the hash TABLE.
7506f491
DE
1930 If it is already present, record it as the last occurrence in INSN's
1931 basic block.
1932
1933 MODE is the mode of the value X is being stored into.
1934 It is only used if X is a CONST_INT.
1935
cc2902df
KH
1936 ANTIC_P is nonzero if X is an anticipatable expression.
1937 AVAIL_P is nonzero if X is an available expression. */
7506f491
DE
1938
1939static void
02280659 1940insert_expr_in_table (x, mode, insn, antic_p, avail_p, table)
7506f491
DE
1941 rtx x;
1942 enum machine_mode mode;
1943 rtx insn;
1944 int antic_p, avail_p;
02280659 1945 struct hash_table *table;
7506f491
DE
1946{
1947 int found, do_not_record_p;
1948 unsigned int hash;
1949 struct expr *cur_expr, *last_expr = NULL;
1950 struct occr *antic_occr, *avail_occr;
1951 struct occr *last_occr = NULL;
1952
02280659 1953 hash = hash_expr (x, mode, &do_not_record_p, table->size);
7506f491
DE
1954
1955 /* Do not insert expression in table if it contains volatile operands,
1956 or if hash_expr determines the expression is something we don't want
1957 to or can't handle. */
1958 if (do_not_record_p)
1959 return;
1960
02280659 1961 cur_expr = table->table[hash];
7506f491
DE
1962 found = 0;
1963
c4c81601 1964 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
7506f491
DE
1965 {
1966 /* If the expression isn't found, save a pointer to the end of
1967 the list. */
1968 last_expr = cur_expr;
1969 cur_expr = cur_expr->next_same_hash;
1970 }
1971
1972 if (! found)
1973 {
1974 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
1975 bytes_used += sizeof (struct expr);
02280659 1976 if (table->table[hash] == NULL)
c4c81601 1977 /* This is the first pattern that hashed to this index. */
02280659 1978 table->table[hash] = cur_expr;
7506f491 1979 else
c4c81601
RK
1980 /* Add EXPR to end of this hash chain. */
1981 last_expr->next_same_hash = cur_expr;
1982
589005ff 1983 /* Set the fields of the expr element. */
7506f491 1984 cur_expr->expr = x;
02280659 1985 cur_expr->bitmap_index = table->n_elems++;
7506f491
DE
1986 cur_expr->next_same_hash = NULL;
1987 cur_expr->antic_occr = NULL;
1988 cur_expr->avail_occr = NULL;
1989 }
1990
1991 /* Now record the occurrence(s). */
7506f491
DE
1992 if (antic_p)
1993 {
1994 antic_occr = cur_expr->antic_occr;
1995
1996 /* Search for another occurrence in the same basic block. */
1997 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1998 {
1999 /* If an occurrence isn't found, save a pointer to the end of
2000 the list. */
2001 last_occr = antic_occr;
2002 antic_occr = antic_occr->next;
2003 }
2004
2005 if (antic_occr)
c4c81601
RK
2006 /* Found another instance of the expression in the same basic block.
2007 Prefer the currently recorded one. We want the first one in the
2008 block and the block is scanned from start to end. */
2009 ; /* nothing to do */
7506f491
DE
2010 else
2011 {
2012 /* First occurrence of this expression in this basic block. */
2013 antic_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2014 bytes_used += sizeof (struct occr);
2015 /* First occurrence of this expression in any block? */
2016 if (cur_expr->antic_occr == NULL)
2017 cur_expr->antic_occr = antic_occr;
2018 else
2019 last_occr->next = antic_occr;
c4c81601 2020
7506f491
DE
2021 antic_occr->insn = insn;
2022 antic_occr->next = NULL;
2023 }
2024 }
2025
2026 if (avail_p)
2027 {
2028 avail_occr = cur_expr->avail_occr;
2029
2030 /* Search for another occurrence in the same basic block. */
2031 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
2032 {
2033 /* If an occurrence isn't found, save a pointer to the end of
2034 the list. */
2035 last_occr = avail_occr;
2036 avail_occr = avail_occr->next;
2037 }
2038
2039 if (avail_occr)
c4c81601
RK
2040 /* Found another instance of the expression in the same basic block.
2041 Prefer this occurrence to the currently recorded one. We want
2042 the last one in the block and the block is scanned from start
2043 to end. */
2044 avail_occr->insn = insn;
7506f491
DE
2045 else
2046 {
2047 /* First occurrence of this expression in this basic block. */
2048 avail_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2049 bytes_used += sizeof (struct occr);
c4c81601 2050
7506f491
DE
2051 /* First occurrence of this expression in any block? */
2052 if (cur_expr->avail_occr == NULL)
2053 cur_expr->avail_occr = avail_occr;
2054 else
2055 last_occr->next = avail_occr;
c4c81601 2056
7506f491
DE
2057 avail_occr->insn = insn;
2058 avail_occr->next = NULL;
2059 }
2060 }
2061}
2062
2063/* Insert pattern X in INSN in the hash table.
2064 X is a SET of a reg to either another reg or a constant.
2065 If it is already present, record it as the last occurrence in INSN's
2066 basic block. */
2067
2068static void
02280659 2069insert_set_in_table (x, insn, table)
7506f491
DE
2070 rtx x;
2071 rtx insn;
02280659 2072 struct hash_table *table;
7506f491
DE
2073{
2074 int found;
2075 unsigned int hash;
2076 struct expr *cur_expr, *last_expr = NULL;
2077 struct occr *cur_occr, *last_occr = NULL;
2078
2079 if (GET_CODE (x) != SET
2080 || GET_CODE (SET_DEST (x)) != REG)
2081 abort ();
2082
02280659 2083 hash = hash_set (REGNO (SET_DEST (x)), table->size);
7506f491 2084
02280659 2085 cur_expr = table->table[hash];
7506f491
DE
2086 found = 0;
2087
c4c81601 2088 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
7506f491
DE
2089 {
2090 /* If the expression isn't found, save a pointer to the end of
2091 the list. */
2092 last_expr = cur_expr;
2093 cur_expr = cur_expr->next_same_hash;
2094 }
2095
2096 if (! found)
2097 {
2098 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
2099 bytes_used += sizeof (struct expr);
02280659 2100 if (table->table[hash] == NULL)
c4c81601 2101 /* This is the first pattern that hashed to this index. */
02280659 2102 table->table[hash] = cur_expr;
7506f491 2103 else
c4c81601
RK
2104 /* Add EXPR to end of this hash chain. */
2105 last_expr->next_same_hash = cur_expr;
2106
7506f491
DE
2107 /* Set the fields of the expr element.
2108 We must copy X because it can be modified when copy propagation is
2109 performed on its operands. */
7506f491 2110 cur_expr->expr = copy_rtx (x);
02280659 2111 cur_expr->bitmap_index = table->n_elems++;
7506f491
DE
2112 cur_expr->next_same_hash = NULL;
2113 cur_expr->antic_occr = NULL;
2114 cur_expr->avail_occr = NULL;
2115 }
2116
2117 /* Now record the occurrence. */
7506f491
DE
2118 cur_occr = cur_expr->avail_occr;
2119
2120 /* Search for another occurrence in the same basic block. */
2121 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2122 {
2123 /* If an occurrence isn't found, save a pointer to the end of
2124 the list. */
2125 last_occr = cur_occr;
2126 cur_occr = cur_occr->next;
2127 }
2128
2129 if (cur_occr)
c4c81601
RK
2130 /* Found another instance of the expression in the same basic block.
2131 Prefer this occurrence to the currently recorded one. We want the
2132 last one in the block and the block is scanned from start to end. */
2133 cur_occr->insn = insn;
7506f491
DE
2134 else
2135 {
2136 /* First occurrence of this expression in this basic block. */
2137 cur_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2138 bytes_used += sizeof (struct occr);
c4c81601 2139
7506f491
DE
2140 /* First occurrence of this expression in any block? */
2141 if (cur_expr->avail_occr == NULL)
2142 cur_expr->avail_occr = cur_occr;
2143 else
2144 last_occr->next = cur_occr;
c4c81601 2145
7506f491
DE
2146 cur_occr->insn = insn;
2147 cur_occr->next = NULL;
2148 }
2149}
2150
02280659
ZD
2151/* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
2152 expression one). */
7506f491
DE
2153
2154static void
02280659 2155hash_scan_set (pat, insn, table)
7506f491 2156 rtx pat, insn;
02280659 2157 struct hash_table *table;
7506f491
DE
2158{
2159 rtx src = SET_SRC (pat);
2160 rtx dest = SET_DEST (pat);
172890a2 2161 rtx note;
7506f491
DE
2162
2163 if (GET_CODE (src) == CALL)
02280659 2164 hash_scan_call (src, insn, table);
7506f491 2165
172890a2 2166 else if (GET_CODE (dest) == REG)
7506f491 2167 {
172890a2 2168 unsigned int regno = REGNO (dest);
7506f491
DE
2169 rtx tmp;
2170
172890a2
RK
2171 /* If this is a single set and we are doing constant propagation,
2172 see if a REG_NOTE shows this equivalent to a constant. */
02280659 2173 if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
172890a2
RK
2174 && CONSTANT_P (XEXP (note, 0)))
2175 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2176
7506f491 2177 /* Only record sets of pseudo-regs in the hash table. */
02280659 2178 if (! table->set_p
7506f491
DE
2179 && regno >= FIRST_PSEUDO_REGISTER
2180 /* Don't GCSE something if we can't do a reg/reg copy. */
2181 && can_copy_p [GET_MODE (dest)]
068473ec
JH
2182 /* GCSE commonly inserts instruction after the insn. We can't
2183 do that easily for EH_REGION notes so disable GCSE on these
2184 for now. */
2185 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
7506f491 2186 /* Is SET_SRC something we want to gcse? */
172890a2
RK
2187 && want_to_gcse_p (src)
2188 /* Don't CSE a nop. */
43e72072
JJ
2189 && ! set_noop_p (pat)
2190 /* Don't GCSE if it has attached REG_EQUIV note.
2191 At this point this only function parameters should have
2192 REG_EQUIV notes and if the argument slot is used somewhere
a1f300c0 2193 explicitly, it means address of parameter has been taken,
43e72072
JJ
2194 so we should not extend the lifetime of the pseudo. */
2195 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2196 || GET_CODE (XEXP (note, 0)) != MEM))
7506f491
DE
2197 {
2198 /* An expression is not anticipatable if its operands are
52d76e11
RK
2199 modified before this insn or if this is not the only SET in
2200 this insn. */
2201 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
7506f491 2202 /* An expression is not available if its operands are
eb296bd9
GK
2203 subsequently modified, including this insn. It's also not
2204 available if this is a branch, because we can't insert
2205 a set after the branch. */
2206 int avail_p = (oprs_available_p (src, insn)
2207 && ! JUMP_P (insn));
c4c81601 2208
02280659 2209 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
7506f491 2210 }
c4c81601 2211
7506f491 2212 /* Record sets for constant/copy propagation. */
02280659 2213 else if (table->set_p
7506f491
DE
2214 && regno >= FIRST_PSEUDO_REGISTER
2215 && ((GET_CODE (src) == REG
2216 && REGNO (src) >= FIRST_PSEUDO_REGISTER
172890a2
RK
2217 && can_copy_p [GET_MODE (dest)]
2218 && REGNO (src) != regno)
b446e5a2 2219 || CONSTANT_P (src))
7506f491
DE
2220 /* A copy is not available if its src or dest is subsequently
2221 modified. Here we want to search from INSN+1 on, but
2222 oprs_available_p searches from INSN on. */
2223 && (insn == BLOCK_END (BLOCK_NUM (insn))
2224 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2225 && oprs_available_p (pat, tmp))))
02280659 2226 insert_set_in_table (pat, insn, table);
7506f491 2227 }
7506f491
DE
2228}
2229
2230static void
02280659 2231hash_scan_clobber (x, insn, table)
50b2596f 2232 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
02280659 2233 struct hash_table *table ATTRIBUTE_UNUSED;
7506f491
DE
2234{
2235 /* Currently nothing to do. */
2236}
2237
2238static void
02280659 2239hash_scan_call (x, insn, table)
50b2596f 2240 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
02280659 2241 struct hash_table *table ATTRIBUTE_UNUSED;
7506f491
DE
2242{
2243 /* Currently nothing to do. */
2244}
2245
2246/* Process INSN and add hash table entries as appropriate.
2247
2248 Only available expressions that set a single pseudo-reg are recorded.
2249
2250 Single sets in a PARALLEL could be handled, but it's an extra complication
2251 that isn't dealt with right now. The trick is handling the CLOBBERs that
2252 are also in the PARALLEL. Later.
2253
cc2902df 2254 If SET_P is nonzero, this is for the assignment hash table,
ed79bb3d
R
2255 otherwise it is for the expression hash table.
2256 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2257 not record any expressions. */
7506f491
DE
2258
2259static void
02280659 2260hash_scan_insn (insn, table, in_libcall_block)
7506f491 2261 rtx insn;
02280659 2262 struct hash_table *table;
48e87cef 2263 int in_libcall_block;
7506f491
DE
2264{
2265 rtx pat = PATTERN (insn);
c4c81601 2266 int i;
7506f491 2267
172890a2
RK
2268 if (in_libcall_block)
2269 return;
2270
7506f491
DE
2271 /* Pick out the sets of INSN and for other forms of instructions record
2272 what's been modified. */
2273
172890a2 2274 if (GET_CODE (pat) == SET)
02280659 2275 hash_scan_set (pat, insn, table);
7506f491 2276 else if (GET_CODE (pat) == PARALLEL)
c4c81601
RK
2277 for (i = 0; i < XVECLEN (pat, 0); i++)
2278 {
2279 rtx x = XVECEXP (pat, 0, i);
7506f491 2280
c4c81601 2281 if (GET_CODE (x) == SET)
02280659 2282 hash_scan_set (x, insn, table);
c4c81601 2283 else if (GET_CODE (x) == CLOBBER)
02280659 2284 hash_scan_clobber (x, insn, table);
c4c81601 2285 else if (GET_CODE (x) == CALL)
02280659 2286 hash_scan_call (x, insn, table);
c4c81601 2287 }
7506f491 2288
7506f491 2289 else if (GET_CODE (pat) == CLOBBER)
02280659 2290 hash_scan_clobber (pat, insn, table);
7506f491 2291 else if (GET_CODE (pat) == CALL)
02280659 2292 hash_scan_call (pat, insn, table);
7506f491
DE
2293}
2294
2295static void
02280659 2296dump_hash_table (file, name, table)
7506f491 2297 FILE *file;
dff01034 2298 const char *name;
02280659 2299 struct hash_table *table;
7506f491
DE
2300{
2301 int i;
2302 /* Flattened out table, so it's printed in proper order. */
4da896b2
MM
2303 struct expr **flat_table;
2304 unsigned int *hash_val;
c4c81601 2305 struct expr *expr;
4da896b2 2306
589005ff 2307 flat_table
02280659
ZD
2308 = (struct expr **) xcalloc (table->n_elems, sizeof (struct expr *));
2309 hash_val = (unsigned int *) xmalloc (table->n_elems * sizeof (unsigned int));
7506f491 2310
02280659
ZD
2311 for (i = 0; i < (int) table->size; i++)
2312 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601
RK
2313 {
2314 flat_table[expr->bitmap_index] = expr;
2315 hash_val[expr->bitmap_index] = i;
2316 }
7506f491
DE
2317
2318 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
02280659 2319 name, table->size, table->n_elems);
7506f491 2320
02280659 2321 for (i = 0; i < (int) table->n_elems; i++)
21318741
RK
2322 if (flat_table[i] != 0)
2323 {
a0ac9e5a 2324 expr = flat_table[i];
21318741
RK
2325 fprintf (file, "Index %d (hash value %d)\n ",
2326 expr->bitmap_index, hash_val[i]);
a0ac9e5a 2327 print_rtl (file, expr->expr);
21318741
RK
2328 fprintf (file, "\n");
2329 }
7506f491
DE
2330
2331 fprintf (file, "\n");
4da896b2 2332
4da896b2
MM
2333 free (flat_table);
2334 free (hash_val);
7506f491
DE
2335}
2336
2337/* Record register first/last/block set information for REGNO in INSN.
c4c81601 2338
80c29cc4 2339 first_set records the first place in the block where the register
7506f491 2340 is set and is used to compute "anticipatability".
c4c81601 2341
80c29cc4 2342 last_set records the last place in the block where the register
7506f491 2343 is set and is used to compute "availability".
c4c81601 2344
80c29cc4
RZ
2345 last_bb records the block for which first_set and last_set are
2346 valid, as a quick test to invalidate them.
2347
7506f491
DE
2348 reg_set_in_block records whether the register is set in the block
2349 and is used to compute "transparency". */
2350
2351static void
2352record_last_reg_set_info (insn, regno)
2353 rtx insn;
2354 int regno;
2355{
80c29cc4
RZ
2356 struct reg_avail_info *info = &reg_avail_info[regno];
2357 int cuid = INSN_CUID (insn);
c4c81601 2358
80c29cc4
RZ
2359 info->last_set = cuid;
2360 if (info->last_bb != current_bb)
2361 {
2362 info->last_bb = current_bb;
2363 info->first_set = cuid;
e0082a72 2364 SET_BIT (reg_set_in_block[current_bb->index], regno);
80c29cc4 2365 }
7506f491
DE
2366}
2367
a13d4ebf
AM
2368
2369/* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2370 Note we store a pair of elements in the list, so they have to be
2371 taken off pairwise. */
2372
589005ff 2373static void
a13d4ebf
AM
2374canon_list_insert (dest, unused1, v_insn)
2375 rtx dest ATTRIBUTE_UNUSED;
2376 rtx unused1 ATTRIBUTE_UNUSED;
2377 void * v_insn;
2378{
2379 rtx dest_addr, insn;
0fe854a7 2380 int bb;
a13d4ebf
AM
2381
2382 while (GET_CODE (dest) == SUBREG
2383 || GET_CODE (dest) == ZERO_EXTRACT
2384 || GET_CODE (dest) == SIGN_EXTRACT
2385 || GET_CODE (dest) == STRICT_LOW_PART)
2386 dest = XEXP (dest, 0);
2387
2388 /* If DEST is not a MEM, then it will not conflict with a load. Note
2389 that function calls are assumed to clobber memory, but are handled
2390 elsewhere. */
2391
2392 if (GET_CODE (dest) != MEM)
2393 return;
2394
2395 dest_addr = get_addr (XEXP (dest, 0));
2396 dest_addr = canon_rtx (dest_addr);
589005ff 2397 insn = (rtx) v_insn;
0fe854a7 2398 bb = BLOCK_NUM (insn);
a13d4ebf 2399
589005ff 2400 canon_modify_mem_list[bb] =
0fe854a7 2401 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
589005ff 2402 canon_modify_mem_list[bb] =
0fe854a7
RH
2403 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
2404 bitmap_set_bit (canon_modify_mem_list_set, bb);
a13d4ebf
AM
2405}
2406
a13d4ebf
AM
2407/* Record memory modification information for INSN. We do not actually care
2408 about the memory location(s) that are set, or even how they are set (consider
2409 a CALL_INSN). We merely need to record which insns modify memory. */
7506f491
DE
2410
2411static void
2412record_last_mem_set_info (insn)
2413 rtx insn;
2414{
0fe854a7
RH
2415 int bb = BLOCK_NUM (insn);
2416
ccef9ef5 2417 /* load_killed_in_block_p will handle the case of calls clobbering
dc297297 2418 everything. */
0fe854a7
RH
2419 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
2420 bitmap_set_bit (modify_mem_list_set, bb);
a13d4ebf
AM
2421
2422 if (GET_CODE (insn) == CALL_INSN)
2423 {
2424 /* Note that traversals of this loop (other than for free-ing)
2425 will break after encountering a CALL_INSN. So, there's no
dc297297 2426 need to insert a pair of items, as canon_list_insert does. */
589005ff
KH
2427 canon_modify_mem_list[bb] =
2428 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
0fe854a7 2429 bitmap_set_bit (canon_modify_mem_list_set, bb);
a13d4ebf
AM
2430 }
2431 else
0fe854a7 2432 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
7506f491
DE
2433}
2434
7506f491 2435/* Called from compute_hash_table via note_stores to handle one
84832317
MM
2436 SET or CLOBBER in an insn. DATA is really the instruction in which
2437 the SET is taking place. */
7506f491
DE
2438
2439static void
84832317 2440record_last_set_info (dest, setter, data)
50b2596f 2441 rtx dest, setter ATTRIBUTE_UNUSED;
84832317 2442 void *data;
7506f491 2443{
84832317
MM
2444 rtx last_set_insn = (rtx) data;
2445
7506f491
DE
2446 if (GET_CODE (dest) == SUBREG)
2447 dest = SUBREG_REG (dest);
2448
2449 if (GET_CODE (dest) == REG)
2450 record_last_reg_set_info (last_set_insn, REGNO (dest));
2451 else if (GET_CODE (dest) == MEM
2452 /* Ignore pushes, they clobber nothing. */
2453 && ! push_operand (dest, GET_MODE (dest)))
2454 record_last_mem_set_info (last_set_insn);
2455}
2456
2457/* Top level function to create an expression or assignment hash table.
2458
2459 Expression entries are placed in the hash table if
2460 - they are of the form (set (pseudo-reg) src),
2461 - src is something we want to perform GCSE on,
2462 - none of the operands are subsequently modified in the block
2463
2464 Assignment entries are placed in the hash table if
2465 - they are of the form (set (pseudo-reg) src),
2466 - src is something we want to perform const/copy propagation on,
2467 - none of the operands or target are subsequently modified in the block
c4c81601 2468
7506f491
DE
2469 Currently src must be a pseudo-reg or a const_int.
2470
2471 F is the first insn.
02280659 2472 TABLE is the table computed. */
7506f491
DE
2473
2474static void
02280659
ZD
2475compute_hash_table_work (table)
2476 struct hash_table *table;
7506f491 2477{
80c29cc4 2478 unsigned int i;
7506f491
DE
2479
2480 /* While we compute the hash table we also compute a bit array of which
2481 registers are set in which blocks.
7506f491
DE
2482 ??? This isn't needed during const/copy propagation, but it's cheap to
2483 compute. Later. */
d55bc081 2484 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
7506f491 2485
a13d4ebf 2486 /* re-Cache any INSN_LIST nodes we have allocated. */
73991d6a 2487 clear_modify_mem_tables ();
7506f491 2488 /* Some working arrays used to track first and last set in each block. */
80c29cc4
RZ
2489 reg_avail_info = (struct reg_avail_info*)
2490 gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2491
2492 for (i = 0; i < max_gcse_regno; ++i)
e0082a72 2493 reg_avail_info[i].last_bb = NULL;
7506f491 2494
e0082a72 2495 FOR_EACH_BB (current_bb)
7506f491
DE
2496 {
2497 rtx insn;
770ae6cc 2498 unsigned int regno;
ed79bb3d 2499 int in_libcall_block;
7506f491
DE
2500
2501 /* First pass over the instructions records information used to
2502 determine when registers and memory are first and last set.
ccef9ef5 2503 ??? hard-reg reg_set_in_block computation
7506f491
DE
2504 could be moved to compute_sets since they currently don't change. */
2505
e0082a72
ZD
2506 for (insn = current_bb->head;
2507 insn && insn != NEXT_INSN (current_bb->end);
7506f491
DE
2508 insn = NEXT_INSN (insn))
2509 {
2c3c49de 2510 if (! INSN_P (insn))
7506f491
DE
2511 continue;
2512
2513 if (GET_CODE (insn) == CALL_INSN)
2514 {
19652adf 2515 bool clobbers_all = false;
589005ff 2516#ifdef NON_SAVING_SETJMP
19652adf
ZW
2517 if (NON_SAVING_SETJMP
2518 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
2519 clobbers_all = true;
2520#endif
2521
7506f491 2522 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
19652adf
ZW
2523 if (clobbers_all
2524 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7506f491 2525 record_last_reg_set_info (insn, regno);
c4c81601 2526
24a28584 2527 mark_call (insn);
7506f491
DE
2528 }
2529
84832317 2530 note_stores (PATTERN (insn), record_last_set_info, insn);
7506f491
DE
2531 }
2532
2533 /* The next pass builds the hash table. */
2534
e0082a72
ZD
2535 for (insn = current_bb->head, in_libcall_block = 0;
2536 insn && insn != NEXT_INSN (current_bb->end);
7506f491 2537 insn = NEXT_INSN (insn))
2c3c49de 2538 if (INSN_P (insn))
c4c81601
RK
2539 {
2540 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
589005ff 2541 in_libcall_block = 1;
02280659 2542 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
589005ff 2543 in_libcall_block = 0;
02280659
ZD
2544 hash_scan_insn (insn, table, in_libcall_block);
2545 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
589005ff 2546 in_libcall_block = 0;
8e42ace1 2547 }
7506f491
DE
2548 }
2549
80c29cc4
RZ
2550 free (reg_avail_info);
2551 reg_avail_info = NULL;
7506f491
DE
2552}
2553
02280659 2554/* Allocate space for the set/expr hash TABLE.
7506f491 2555 N_INSNS is the number of instructions in the function.
02280659
ZD
2556 It is used to determine the number of buckets to use.
2557 SET_P determines whether set or expression table will
2558 be created. */
7506f491
DE
2559
2560static void
02280659 2561alloc_hash_table (n_insns, table, set_p)
7506f491 2562 int n_insns;
02280659
ZD
2563 struct hash_table *table;
2564 int set_p;
7506f491
DE
2565{
2566 int n;
2567
02280659
ZD
2568 table->size = n_insns / 4;
2569 if (table->size < 11)
2570 table->size = 11;
c4c81601 2571
7506f491
DE
2572 /* Attempt to maintain efficient use of hash table.
2573 Making it an odd number is simplest for now.
2574 ??? Later take some measurements. */
02280659
ZD
2575 table->size |= 1;
2576 n = table->size * sizeof (struct expr *);
2577 table->table = (struct expr **) gmalloc (n);
2578 table->set_p = set_p;
7506f491
DE
2579}
2580
02280659 2581/* Free things allocated by alloc_hash_table. */
7506f491
DE
2582
2583static void
02280659
ZD
2584free_hash_table (table)
2585 struct hash_table *table;
7506f491 2586{
02280659 2587 free (table->table);
7506f491
DE
2588}
2589
02280659
ZD
2590/* Compute the hash TABLE for doing copy/const propagation or
2591 expression hash table. */
7506f491
DE
2592
2593static void
02280659
ZD
2594compute_hash_table (table)
2595 struct hash_table *table;
7506f491
DE
2596{
2597 /* Initialize count of number of entries in hash table. */
02280659
ZD
2598 table->n_elems = 0;
2599 memset ((char *) table->table, 0,
2600 table->size * sizeof (struct expr *));
7506f491 2601
02280659 2602 compute_hash_table_work (table);
7506f491
DE
2603}
2604\f
2605/* Expression tracking support. */
2606
02280659 2607/* Lookup pattern PAT in the expression TABLE.
7506f491
DE
2608 The result is a pointer to the table entry, or NULL if not found. */
2609
2610static struct expr *
02280659 2611lookup_expr (pat, table)
7506f491 2612 rtx pat;
02280659 2613 struct hash_table *table;
7506f491
DE
2614{
2615 int do_not_record_p;
2616 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
02280659 2617 table->size);
7506f491
DE
2618 struct expr *expr;
2619
2620 if (do_not_record_p)
2621 return NULL;
2622
02280659 2623 expr = table->table[hash];
7506f491
DE
2624
2625 while (expr && ! expr_equiv_p (expr->expr, pat))
2626 expr = expr->next_same_hash;
2627
2628 return expr;
2629}
2630
02280659 2631/* Lookup REGNO in the set TABLE. If PAT is non-NULL look for the entry that
c4c81601
RK
2632 matches it, otherwise return the first entry for REGNO. The result is a
2633 pointer to the table entry, or NULL if not found. */
7506f491
DE
2634
2635static struct expr *
02280659 2636lookup_set (regno, pat, table)
770ae6cc 2637 unsigned int regno;
7506f491 2638 rtx pat;
02280659 2639 struct hash_table *table;
7506f491 2640{
02280659 2641 unsigned int hash = hash_set (regno, table->size);
7506f491
DE
2642 struct expr *expr;
2643
02280659 2644 expr = table->table[hash];
7506f491
DE
2645
2646 if (pat)
2647 {
2648 while (expr && ! expr_equiv_p (expr->expr, pat))
2649 expr = expr->next_same_hash;
2650 }
2651 else
2652 {
2653 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2654 expr = expr->next_same_hash;
2655 }
2656
2657 return expr;
2658}
2659
2660/* Return the next entry for REGNO in list EXPR. */
2661
2662static struct expr *
2663next_set (regno, expr)
770ae6cc 2664 unsigned int regno;
7506f491
DE
2665 struct expr *expr;
2666{
2667 do
2668 expr = expr->next_same_hash;
2669 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
c4c81601 2670
7506f491
DE
2671 return expr;
2672}
2673
0fe854a7
RH
2674/* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2675 types may be mixed. */
2676
2677static void
2678free_insn_expr_list_list (listp)
2679 rtx *listp;
2680{
2681 rtx list, next;
2682
2683 for (list = *listp; list ; list = next)
2684 {
2685 next = XEXP (list, 1);
2686 if (GET_CODE (list) == EXPR_LIST)
2687 free_EXPR_LIST_node (list);
2688 else
2689 free_INSN_LIST_node (list);
2690 }
2691
2692 *listp = NULL;
2693}
2694
73991d6a
JH
2695/* Clear canon_modify_mem_list and modify_mem_list tables. */
2696static void
2697clear_modify_mem_tables ()
2698{
2699 int i;
2700
2701 EXECUTE_IF_SET_IN_BITMAP
0fe854a7
RH
2702 (modify_mem_list_set, 0, i, free_INSN_LIST_list (modify_mem_list + i));
2703 bitmap_clear (modify_mem_list_set);
73991d6a
JH
2704
2705 EXECUTE_IF_SET_IN_BITMAP
2706 (canon_modify_mem_list_set, 0, i,
0fe854a7
RH
2707 free_insn_expr_list_list (canon_modify_mem_list + i));
2708 bitmap_clear (canon_modify_mem_list_set);
73991d6a
JH
2709}
2710
2711/* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2712
2713static void
2714free_modify_mem_tables ()
2715{
2716 clear_modify_mem_tables ();
2717 free (modify_mem_list);
2718 free (canon_modify_mem_list);
2719 modify_mem_list = 0;
2720 canon_modify_mem_list = 0;
2721}
2722
7506f491
DE
2723/* Reset tables used to keep track of what's still available [since the
2724 start of the block]. */
2725
2726static void
2727reset_opr_set_tables ()
2728{
2729 /* Maintain a bitmap of which regs have been set since beginning of
2730 the block. */
73991d6a 2731 CLEAR_REG_SET (reg_set_bitmap);
c4c81601 2732
7506f491
DE
2733 /* Also keep a record of the last instruction to modify memory.
2734 For now this is very trivial, we only record whether any memory
2735 location has been modified. */
73991d6a 2736 clear_modify_mem_tables ();
7506f491
DE
2737}
2738
cc2902df 2739/* Return nonzero if the operands of X are not set before INSN in
7506f491
DE
2740 INSN's basic block. */
2741
2742static int
2743oprs_not_set_p (x, insn)
2744 rtx x, insn;
2745{
c4c81601 2746 int i, j;
7506f491 2747 enum rtx_code code;
6f7d635c 2748 const char *fmt;
7506f491 2749
7506f491
DE
2750 if (x == 0)
2751 return 1;
2752
2753 code = GET_CODE (x);
2754 switch (code)
2755 {
2756 case PC:
2757 case CC0:
2758 case CONST:
2759 case CONST_INT:
2760 case CONST_DOUBLE:
69ef87e2 2761 case CONST_VECTOR:
7506f491
DE
2762 case SYMBOL_REF:
2763 case LABEL_REF:
2764 case ADDR_VEC:
2765 case ADDR_DIFF_VEC:
2766 return 1;
2767
2768 case MEM:
589005ff 2769 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
e2d2ed72 2770 INSN_CUID (insn), x, 0))
a13d4ebf 2771 return 0;
c4c81601
RK
2772 else
2773 return oprs_not_set_p (XEXP (x, 0), insn);
7506f491
DE
2774
2775 case REG:
73991d6a 2776 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
7506f491
DE
2777
2778 default:
2779 break;
2780 }
2781
c4c81601 2782 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
2783 {
2784 if (fmt[i] == 'e')
2785 {
7506f491
DE
2786 /* If we are about to do the last recursive call
2787 needed at this level, change it into iteration.
2788 This function is called enough to be worth it. */
2789 if (i == 0)
c4c81601
RK
2790 return oprs_not_set_p (XEXP (x, i), insn);
2791
2792 if (! oprs_not_set_p (XEXP (x, i), insn))
7506f491
DE
2793 return 0;
2794 }
2795 else if (fmt[i] == 'E')
c4c81601
RK
2796 for (j = 0; j < XVECLEN (x, i); j++)
2797 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2798 return 0;
7506f491
DE
2799 }
2800
2801 return 1;
2802}
2803
2804/* Mark things set by a CALL. */
2805
2806static void
b5ce41ff
JL
2807mark_call (insn)
2808 rtx insn;
7506f491 2809{
24a28584 2810 if (! CONST_OR_PURE_CALL_P (insn))
a13d4ebf 2811 record_last_mem_set_info (insn);
7506f491
DE
2812}
2813
2814/* Mark things set by a SET. */
2815
2816static void
2817mark_set (pat, insn)
2818 rtx pat, insn;
2819{
2820 rtx dest = SET_DEST (pat);
2821
2822 while (GET_CODE (dest) == SUBREG
2823 || GET_CODE (dest) == ZERO_EXTRACT
2824 || GET_CODE (dest) == SIGN_EXTRACT
2825 || GET_CODE (dest) == STRICT_LOW_PART)
2826 dest = XEXP (dest, 0);
2827
a13d4ebf 2828 if (GET_CODE (dest) == REG)
73991d6a 2829 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
a13d4ebf
AM
2830 else if (GET_CODE (dest) == MEM)
2831 record_last_mem_set_info (insn);
2832
7506f491 2833 if (GET_CODE (SET_SRC (pat)) == CALL)
b5ce41ff 2834 mark_call (insn);
7506f491
DE
2835}
2836
2837/* Record things set by a CLOBBER. */
2838
2839static void
2840mark_clobber (pat, insn)
2841 rtx pat, insn;
2842{
2843 rtx clob = XEXP (pat, 0);
2844
2845 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2846 clob = XEXP (clob, 0);
2847
a13d4ebf 2848 if (GET_CODE (clob) == REG)
73991d6a 2849 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
a13d4ebf
AM
2850 else
2851 record_last_mem_set_info (insn);
7506f491
DE
2852}
2853
2854/* Record things set by INSN.
2855 This data is used by oprs_not_set_p. */
2856
2857static void
2858mark_oprs_set (insn)
2859 rtx insn;
2860{
2861 rtx pat = PATTERN (insn);
c4c81601 2862 int i;
7506f491
DE
2863
2864 if (GET_CODE (pat) == SET)
2865 mark_set (pat, insn);
2866 else if (GET_CODE (pat) == PARALLEL)
c4c81601
RK
2867 for (i = 0; i < XVECLEN (pat, 0); i++)
2868 {
2869 rtx x = XVECEXP (pat, 0, i);
2870
2871 if (GET_CODE (x) == SET)
2872 mark_set (x, insn);
2873 else if (GET_CODE (x) == CLOBBER)
2874 mark_clobber (x, insn);
2875 else if (GET_CODE (x) == CALL)
2876 mark_call (insn);
2877 }
7506f491 2878
7506f491
DE
2879 else if (GET_CODE (pat) == CLOBBER)
2880 mark_clobber (pat, insn);
2881 else if (GET_CODE (pat) == CALL)
b5ce41ff 2882 mark_call (insn);
7506f491 2883}
b5ce41ff 2884
7506f491
DE
2885\f
2886/* Classic GCSE reaching definition support. */
2887
2888/* Allocate reaching def variables. */
2889
2890static void
2891alloc_rd_mem (n_blocks, n_insns)
2892 int n_blocks, n_insns;
2893{
2894 rd_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
d55bc081 2895 sbitmap_vector_zero (rd_kill, n_blocks);
7506f491
DE
2896
2897 rd_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
d55bc081 2898 sbitmap_vector_zero (rd_gen, n_blocks);
7506f491
DE
2899
2900 reaching_defs = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
d55bc081 2901 sbitmap_vector_zero (reaching_defs, n_blocks);
7506f491
DE
2902
2903 rd_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
d55bc081 2904 sbitmap_vector_zero (rd_out, n_blocks);
7506f491
DE
2905}
2906
2907/* Free reaching def variables. */
2908
2909static void
2910free_rd_mem ()
2911{
5a660bff
DB
2912 sbitmap_vector_free (rd_kill);
2913 sbitmap_vector_free (rd_gen);
2914 sbitmap_vector_free (reaching_defs);
2915 sbitmap_vector_free (rd_out);
7506f491
DE
2916}
2917
c4c81601 2918/* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
7506f491
DE
2919
2920static void
2921handle_rd_kill_set (insn, regno, bb)
2922 rtx insn;
e2d2ed72
AM
2923 int regno;
2924 basic_block bb;
7506f491 2925{
c4c81601 2926 struct reg_set *this_reg;
7506f491 2927
c4c81601
RK
2928 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2929 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
0b17ab2f 2930 SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn));
7506f491
DE
2931}
2932
7506f491
DE
2933/* Compute the set of kill's for reaching definitions. */
2934
2935static void
2936compute_kill_rd ()
2937{
e0082a72 2938 int cuid;
172890a2
RK
2939 unsigned int regno;
2940 int i;
e0082a72 2941 basic_block bb;
7506f491
DE
2942
2943 /* For each block
2944 For each set bit in `gen' of the block (i.e each insn which
ac7c5af5
JL
2945 generates a definition in the block)
2946 Call the reg set by the insn corresponding to that bit regx
2947 Look at the linked list starting at reg_set_table[regx]
2948 For each setting of regx in the linked list, which is not in
2949 this block
6d2f8887 2950 Set the bit in `kill' corresponding to that insn. */
e0082a72 2951 FOR_EACH_BB (bb)
c4c81601 2952 for (cuid = 0; cuid < max_cuid; cuid++)
e0082a72 2953 if (TEST_BIT (rd_gen[bb->index], cuid))
7506f491 2954 {
c4c81601
RK
2955 rtx insn = CUID_INSN (cuid);
2956 rtx pat = PATTERN (insn);
7506f491 2957
c4c81601
RK
2958 if (GET_CODE (insn) == CALL_INSN)
2959 {
2960 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4e2db584 2961 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
e0082a72 2962 handle_rd_kill_set (insn, regno, bb);
c4c81601 2963 }
7506f491 2964
c4c81601
RK
2965 if (GET_CODE (pat) == PARALLEL)
2966 {
2967 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7506f491 2968 {
c4c81601 2969 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
7506f491 2970
c4c81601
RK
2971 if ((code == SET || code == CLOBBER)
2972 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
2973 handle_rd_kill_set (insn,
2974 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
e0082a72 2975 bb);
ac7c5af5 2976 }
ac7c5af5 2977 }
c4c81601
RK
2978 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
2979 /* Each setting of this register outside of this block
2980 must be marked in the set of kills in this block. */
e0082a72 2981 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
7506f491 2982 }
7506f491
DE
2983}
2984
589005ff 2985/* Compute the reaching definitions as in
7506f491
DE
2986 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2987 Chapter 10. It is the same algorithm as used for computing available
2988 expressions but applied to the gens and kills of reaching definitions. */
2989
2990static void
2991compute_rd ()
2992{
e0082a72
ZD
2993 int changed, passes;
2994 basic_block bb;
7506f491 2995
e0082a72
ZD
2996 FOR_EACH_BB (bb)
2997 sbitmap_copy (rd_out[bb->index] /*dst*/, rd_gen[bb->index] /*src*/);
7506f491
DE
2998
2999 passes = 0;
3000 changed = 1;
3001 while (changed)
3002 {
3003 changed = 0;
e0082a72 3004 FOR_EACH_BB (bb)
ac7c5af5 3005 {
e0082a72
ZD
3006 sbitmap_union_of_preds (reaching_defs[bb->index], rd_out, bb->index);
3007 changed |= sbitmap_union_of_diff_cg (rd_out[bb->index], rd_gen[bb->index],
3008 reaching_defs[bb->index], rd_kill[bb->index]);
ac7c5af5 3009 }
7506f491
DE
3010 passes++;
3011 }
3012
3013 if (gcse_file)
3014 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
3015}
3016\f
3017/* Classic GCSE available expression support. */
3018
3019/* Allocate memory for available expression computation. */
3020
3021static void
3022alloc_avail_expr_mem (n_blocks, n_exprs)
3023 int n_blocks, n_exprs;
3024{
3025 ae_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
d55bc081 3026 sbitmap_vector_zero (ae_kill, n_blocks);
7506f491
DE
3027
3028 ae_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
d55bc081 3029 sbitmap_vector_zero (ae_gen, n_blocks);
7506f491
DE
3030
3031 ae_in = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
d55bc081 3032 sbitmap_vector_zero (ae_in, n_blocks);
7506f491
DE
3033
3034 ae_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
d55bc081 3035 sbitmap_vector_zero (ae_out, n_blocks);
7506f491
DE
3036}
3037
3038static void
3039free_avail_expr_mem ()
3040{
5a660bff
DB
3041 sbitmap_vector_free (ae_kill);
3042 sbitmap_vector_free (ae_gen);
3043 sbitmap_vector_free (ae_in);
3044 sbitmap_vector_free (ae_out);
7506f491
DE
3045}
3046
3047/* Compute the set of available expressions generated in each basic block. */
3048
3049static void
02280659
ZD
3050compute_ae_gen (expr_hash_table)
3051 struct hash_table *expr_hash_table;
7506f491 3052{
2e653e39 3053 unsigned int i;
c4c81601
RK
3054 struct expr *expr;
3055 struct occr *occr;
7506f491
DE
3056
3057 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
3058 This is all we have to do because an expression is not recorded if it
3059 is not available, and the only expressions we want to work with are the
3060 ones that are recorded. */
02280659
ZD
3061 for (i = 0; i < expr_hash_table->size; i++)
3062 for (expr = expr_hash_table->table[i]; expr != 0; expr = expr->next_same_hash)
c4c81601
RK
3063 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
3064 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
7506f491
DE
3065}
3066
cc2902df 3067/* Return nonzero if expression X is killed in BB. */
7506f491
DE
3068
3069static int
3070expr_killed_p (x, bb)
3071 rtx x;
e2d2ed72 3072 basic_block bb;
7506f491 3073{
c4c81601 3074 int i, j;
7506f491 3075 enum rtx_code code;
6f7d635c 3076 const char *fmt;
7506f491 3077
7506f491
DE
3078 if (x == 0)
3079 return 1;
3080
3081 code = GET_CODE (x);
3082 switch (code)
3083 {
3084 case REG:
0b17ab2f 3085 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
7506f491
DE
3086
3087 case MEM:
a13d4ebf
AM
3088 if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0))
3089 return 1;
c4c81601
RK
3090 else
3091 return expr_killed_p (XEXP (x, 0), bb);
7506f491
DE
3092
3093 case PC:
3094 case CC0: /*FIXME*/
3095 case CONST:
3096 case CONST_INT:
3097 case CONST_DOUBLE:
69ef87e2 3098 case CONST_VECTOR:
7506f491
DE
3099 case SYMBOL_REF:
3100 case LABEL_REF:
3101 case ADDR_VEC:
3102 case ADDR_DIFF_VEC:
3103 return 0;
3104
3105 default:
3106 break;
3107 }
3108
c4c81601 3109 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3110 {
3111 if (fmt[i] == 'e')
3112 {
7506f491
DE
3113 /* If we are about to do the last recursive call
3114 needed at this level, change it into iteration.
3115 This function is called enough to be worth it. */
3116 if (i == 0)
c4c81601
RK
3117 return expr_killed_p (XEXP (x, i), bb);
3118 else if (expr_killed_p (XEXP (x, i), bb))
7506f491
DE
3119 return 1;
3120 }
3121 else if (fmt[i] == 'E')
c4c81601
RK
3122 for (j = 0; j < XVECLEN (x, i); j++)
3123 if (expr_killed_p (XVECEXP (x, i, j), bb))
3124 return 1;
7506f491
DE
3125 }
3126
3127 return 0;
3128}
3129
3130/* Compute the set of available expressions killed in each basic block. */
3131
3132static void
02280659 3133compute_ae_kill (ae_gen, ae_kill, expr_hash_table)
a42cd965 3134 sbitmap *ae_gen, *ae_kill;
02280659 3135 struct hash_table *expr_hash_table;
7506f491 3136{
e0082a72 3137 basic_block bb;
2e653e39 3138 unsigned int i;
c4c81601 3139 struct expr *expr;
7506f491 3140
e0082a72 3141 FOR_EACH_BB (bb)
02280659
ZD
3142 for (i = 0; i < expr_hash_table->size; i++)
3143 for (expr = expr_hash_table->table[i]; expr; expr = expr->next_same_hash)
7506f491 3144 {
c4c81601 3145 /* Skip EXPR if generated in this block. */
e0082a72 3146 if (TEST_BIT (ae_gen[bb->index], expr->bitmap_index))
c4c81601 3147 continue;
7506f491 3148
e0082a72
ZD
3149 if (expr_killed_p (expr->expr, bb))
3150 SET_BIT (ae_kill[bb->index], expr->bitmap_index);
7506f491 3151 }
7506f491 3152}
7506f491
DE
3153\f
3154/* Actually perform the Classic GCSE optimizations. */
3155
cc2902df 3156/* Return nonzero if occurrence OCCR of expression EXPR reaches block BB.
7506f491 3157
cc2902df 3158 CHECK_SELF_LOOP is nonzero if we should consider a block reaching itself
7506f491
DE
3159 as a positive reach. We want to do this when there are two computations
3160 of the expression in the block.
3161
3162 VISITED is a pointer to a working buffer for tracking which BB's have
3163 been visited. It is NULL for the top-level call.
3164
3165 We treat reaching expressions that go through blocks containing the same
3166 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3167 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3168 2 as not reaching. The intent is to improve the probability of finding
3169 only one reaching expression and to reduce register lifetimes by picking
3170 the closest such expression. */
3171
3172static int
283a2545 3173expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited)
7506f491
DE
3174 struct occr *occr;
3175 struct expr *expr;
e2d2ed72 3176 basic_block bb;
7506f491
DE
3177 int check_self_loop;
3178 char *visited;
3179{
36349f8b 3180 edge pred;
7506f491 3181
e2d2ed72 3182 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
7506f491 3183 {
e2d2ed72 3184 basic_block pred_bb = pred->src;
7506f491 3185
0b17ab2f 3186 if (visited[pred_bb->index])
c4c81601 3187 /* This predecessor has already been visited. Nothing to do. */
7506f491 3188 ;
7506f491 3189 else if (pred_bb == bb)
ac7c5af5 3190 {
7506f491
DE
3191 /* BB loops on itself. */
3192 if (check_self_loop
0b17ab2f
RH
3193 && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)
3194 && BLOCK_NUM (occr->insn) == pred_bb->index)
7506f491 3195 return 1;
c4c81601 3196
0b17ab2f 3197 visited[pred_bb->index] = 1;
ac7c5af5 3198 }
c4c81601 3199
7506f491 3200 /* Ignore this predecessor if it kills the expression. */
0b17ab2f
RH
3201 else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index))
3202 visited[pred_bb->index] = 1;
c4c81601 3203
7506f491 3204 /* Does this predecessor generate this expression? */
0b17ab2f 3205 else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index))
7506f491
DE
3206 {
3207 /* Is this the occurrence we're looking for?
3208 Note that there's only one generating occurrence per block
3209 so we just need to check the block number. */
0b17ab2f 3210 if (BLOCK_NUM (occr->insn) == pred_bb->index)
7506f491 3211 return 1;
c4c81601 3212
0b17ab2f 3213 visited[pred_bb->index] = 1;
7506f491 3214 }
c4c81601 3215
7506f491
DE
3216 /* Neither gen nor kill. */
3217 else
ac7c5af5 3218 {
0b17ab2f 3219 visited[pred_bb->index] = 1;
589005ff 3220 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
283a2545 3221 visited))
c4c81601 3222
7506f491 3223 return 1;
ac7c5af5 3224 }
7506f491
DE
3225 }
3226
3227 /* All paths have been checked. */
3228 return 0;
3229}
3230
283a2545 3231/* This wrapper for expr_reaches_here_p_work() is to ensure that any
dc297297 3232 memory allocated for that function is returned. */
283a2545
RL
3233
3234static int
3235expr_reaches_here_p (occr, expr, bb, check_self_loop)
3236 struct occr *occr;
3237 struct expr *expr;
e2d2ed72 3238 basic_block bb;
283a2545
RL
3239 int check_self_loop;
3240{
3241 int rval;
d55bc081 3242 char *visited = (char *) xcalloc (last_basic_block, 1);
283a2545 3243
c4c81601 3244 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
589005ff 3245
283a2545 3246 free (visited);
c4c81601 3247 return rval;
283a2545
RL
3248}
3249
7506f491
DE
3250/* Return the instruction that computes EXPR that reaches INSN's basic block.
3251 If there is more than one such instruction, return NULL.
3252
3253 Called only by handle_avail_expr. */
3254
3255static rtx
3256computing_insn (expr, insn)
3257 struct expr *expr;
3258 rtx insn;
3259{
e2d2ed72 3260 basic_block bb = BLOCK_FOR_INSN (insn);
7506f491
DE
3261
3262 if (expr->avail_occr->next == NULL)
589005ff 3263 {
e2d2ed72 3264 if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb)
c4c81601
RK
3265 /* The available expression is actually itself
3266 (i.e. a loop in the flow graph) so do nothing. */
3267 return NULL;
3268
7506f491
DE
3269 /* (FIXME) Case that we found a pattern that was created by
3270 a substitution that took place. */
3271 return expr->avail_occr->insn;
3272 }
3273 else
3274 {
3275 /* Pattern is computed more than once.
589005ff 3276 Search backwards from this insn to see how many of these
7506f491
DE
3277 computations actually reach this insn. */
3278 struct occr *occr;
3279 rtx insn_computes_expr = NULL;
3280 int can_reach = 0;
3281
3282 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3283 {
e2d2ed72 3284 if (BLOCK_FOR_INSN (occr->insn) == bb)
7506f491
DE
3285 {
3286 /* The expression is generated in this block.
3287 The only time we care about this is when the expression
3288 is generated later in the block [and thus there's a loop].
3289 We let the normal cse pass handle the other cases. */
c4c81601
RK
3290 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3291 && expr_reaches_here_p (occr, expr, bb, 1))
7506f491
DE
3292 {
3293 can_reach++;
3294 if (can_reach > 1)
3295 return NULL;
c4c81601 3296
7506f491
DE
3297 insn_computes_expr = occr->insn;
3298 }
3299 }
c4c81601
RK
3300 else if (expr_reaches_here_p (occr, expr, bb, 0))
3301 {
3302 can_reach++;
3303 if (can_reach > 1)
3304 return NULL;
3305
3306 insn_computes_expr = occr->insn;
3307 }
7506f491
DE
3308 }
3309
3310 if (insn_computes_expr == NULL)
3311 abort ();
c4c81601 3312
7506f491
DE
3313 return insn_computes_expr;
3314 }
3315}
3316
cc2902df 3317/* Return nonzero if the definition in DEF_INSN can reach INSN.
7506f491
DE
3318 Only called by can_disregard_other_sets. */
3319
3320static int
3321def_reaches_here_p (insn, def_insn)
3322 rtx insn, def_insn;
3323{
3324 rtx reg;
3325
3326 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3327 return 1;
3328
3329 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3330 {
3331 if (INSN_CUID (def_insn) < INSN_CUID (insn))
ac7c5af5 3332 {
7506f491
DE
3333 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3334 return 1;
c4c81601 3335 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
7506f491
DE
3336 reg = XEXP (PATTERN (def_insn), 0);
3337 else if (GET_CODE (PATTERN (def_insn)) == SET)
3338 reg = SET_DEST (PATTERN (def_insn));
3339 else
3340 abort ();
c4c81601 3341
7506f491
DE
3342 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3343 }
3344 else
3345 return 0;
3346 }
3347
3348 return 0;
3349}
3350
cc2902df 3351/* Return nonzero if *ADDR_THIS_REG can only have one value at INSN. The
c4c81601
RK
3352 value returned is the number of definitions that reach INSN. Returning a
3353 value of zero means that [maybe] more than one definition reaches INSN and
3354 the caller can't perform whatever optimization it is trying. i.e. it is
3355 always safe to return zero. */
7506f491
DE
3356
3357static int
3358can_disregard_other_sets (addr_this_reg, insn, for_combine)
3359 struct reg_set **addr_this_reg;
3360 rtx insn;
3361 int for_combine;
3362{
3363 int number_of_reaching_defs = 0;
c4c81601 3364 struct reg_set *this_reg;
7506f491 3365
c4c81601
RK
3366 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3367 if (def_reaches_here_p (insn, this_reg->insn))
3368 {
3369 number_of_reaching_defs++;
3370 /* Ignore parallels for now. */
3371 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3372 return 0;
3373
3374 if (!for_combine
3375 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3376 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3377 SET_SRC (PATTERN (insn)))))
3378 /* A setting of the reg to a different value reaches INSN. */
3379 return 0;
3380
3381 if (number_of_reaching_defs > 1)
3382 {
3383 /* If in this setting the value the register is being set to is
3384 equal to the previous value the register was set to and this
3385 setting reaches the insn we are trying to do the substitution
3386 on then we are ok. */
3387 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
7506f491 3388 return 0;
c4c81601
RK
3389 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3390 SET_SRC (PATTERN (insn))))
3391 return 0;
3392 }
7506f491 3393
589005ff 3394 *addr_this_reg = this_reg;
c4c81601 3395 }
7506f491
DE
3396
3397 return number_of_reaching_defs;
3398}
3399
3400/* Expression computed by insn is available and the substitution is legal,
3401 so try to perform the substitution.
3402
cc2902df 3403 The result is nonzero if any changes were made. */
7506f491
DE
3404
3405static int
3406handle_avail_expr (insn, expr)
3407 rtx insn;
3408 struct expr *expr;
3409{
0631e0bf 3410 rtx pat, insn_computes_expr, expr_set;
7506f491
DE
3411 rtx to;
3412 struct reg_set *this_reg;
3413 int found_setting, use_src;
3414 int changed = 0;
3415
3416 /* We only handle the case where one computation of the expression
3417 reaches this instruction. */
3418 insn_computes_expr = computing_insn (expr, insn);
3419 if (insn_computes_expr == NULL)
3420 return 0;
0631e0bf
JH
3421 expr_set = single_set (insn_computes_expr);
3422 if (!expr_set)
3423 abort ();
7506f491
DE
3424
3425 found_setting = 0;
3426 use_src = 0;
3427
3428 /* At this point we know only one computation of EXPR outside of this
3429 block reaches this insn. Now try to find a register that the
3430 expression is computed into. */
0631e0bf 3431 if (GET_CODE (SET_SRC (expr_set)) == REG)
7506f491
DE
3432 {
3433 /* This is the case when the available expression that reaches
3434 here has already been handled as an available expression. */
770ae6cc 3435 unsigned int regnum_for_replacing
0631e0bf 3436 = REGNO (SET_SRC (expr_set));
c4c81601 3437
7506f491
DE
3438 /* If the register was created by GCSE we can't use `reg_set_table',
3439 however we know it's set only once. */
3440 if (regnum_for_replacing >= max_gcse_regno
3441 /* If the register the expression is computed into is set only once,
3442 or only one set reaches this insn, we can use it. */
3443 || (((this_reg = reg_set_table[regnum_for_replacing]),
3444 this_reg->next == NULL)
3445 || can_disregard_other_sets (&this_reg, insn, 0)))
8e42ace1
KH
3446 {
3447 use_src = 1;
3448 found_setting = 1;
3449 }
7506f491
DE
3450 }
3451
3452 if (!found_setting)
3453 {
770ae6cc 3454 unsigned int regnum_for_replacing
0631e0bf 3455 = REGNO (SET_DEST (expr_set));
c4c81601 3456
7506f491
DE
3457 /* This shouldn't happen. */
3458 if (regnum_for_replacing >= max_gcse_regno)
3459 abort ();
c4c81601 3460
7506f491 3461 this_reg = reg_set_table[regnum_for_replacing];
c4c81601 3462
7506f491
DE
3463 /* If the register the expression is computed into is set only once,
3464 or only one set reaches this insn, use it. */
3465 if (this_reg->next == NULL
3466 || can_disregard_other_sets (&this_reg, insn, 0))
3467 found_setting = 1;
3468 }
3469
3470 if (found_setting)
3471 {
3472 pat = PATTERN (insn);
3473 if (use_src)
0631e0bf 3474 to = SET_SRC (expr_set);
7506f491 3475 else
0631e0bf 3476 to = SET_DEST (expr_set);
7506f491
DE
3477 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3478
3479 /* We should be able to ignore the return code from validate_change but
3480 to play it safe we check. */
3481 if (changed)
3482 {
3483 gcse_subst_count++;
3484 if (gcse_file != NULL)
3485 {
c4c81601
RK
3486 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3487 INSN_UID (insn));
3488 fprintf (gcse_file, " reg %d %s insn %d\n",
3489 REGNO (to), use_src ? "from" : "set in",
7506f491
DE
3490 INSN_UID (insn_computes_expr));
3491 }
7506f491
DE
3492 }
3493 }
c4c81601 3494
7506f491
DE
3495 /* The register that the expr is computed into is set more than once. */
3496 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3497 {
3498 /* Insert an insn after insnx that copies the reg set in insnx
3499 into a new pseudo register call this new register REGN.
3500 From insnb until end of basic block or until REGB is set
3501 replace all uses of REGB with REGN. */
3502 rtx new_insn;
3503
0631e0bf 3504 to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set)));
7506f491
DE
3505
3506 /* Generate the new insn. */
3507 /* ??? If the change fails, we return 0, even though we created
3508 an insn. I think this is ok. */
9e6a5703
JC
3509 new_insn
3510 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
0631e0bf 3511 SET_DEST (expr_set)),
c4c81601
RK
3512 insn_computes_expr);
3513
7506f491
DE
3514 /* Keep register set table up to date. */
3515 record_one_set (REGNO (to), new_insn);
3516
3517 gcse_create_count++;
3518 if (gcse_file != NULL)
ac7c5af5 3519 {
c4c81601 3520 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
7506f491 3521 INSN_UID (NEXT_INSN (insn_computes_expr)),
c4c81601
RK
3522 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3523 fprintf (gcse_file, ", computed in insn %d,\n",
7506f491 3524 INSN_UID (insn_computes_expr));
c4c81601
RK
3525 fprintf (gcse_file, " into newly allocated reg %d\n",
3526 REGNO (to));
ac7c5af5 3527 }
7506f491
DE
3528
3529 pat = PATTERN (insn);
3530
3531 /* Do register replacement for INSN. */
3532 changed = validate_change (insn, &SET_SRC (pat),
c4c81601
RK
3533 SET_DEST (PATTERN
3534 (NEXT_INSN (insn_computes_expr))),
7506f491
DE
3535 0);
3536
3537 /* We should be able to ignore the return code from validate_change but
3538 to play it safe we check. */
3539 if (changed)
3540 {
3541 gcse_subst_count++;
3542 if (gcse_file != NULL)
3543 {
c4c81601
RK
3544 fprintf (gcse_file,
3545 "GCSE: Replacing the source in insn %d with reg %d ",
7506f491 3546 INSN_UID (insn),
c4c81601
RK
3547 REGNO (SET_DEST (PATTERN (NEXT_INSN
3548 (insn_computes_expr)))));
3549 fprintf (gcse_file, "set in insn %d\n",
589005ff 3550 INSN_UID (insn_computes_expr));
7506f491 3551 }
7506f491
DE
3552 }
3553 }
3554
3555 return changed;
3556}
3557
c4c81601
RK
3558/* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3559 the dataflow analysis has been done.
7506f491 3560
cc2902df 3561 The result is nonzero if a change was made. */
7506f491
DE
3562
3563static int
3564classic_gcse ()
3565{
e0082a72 3566 int changed;
7506f491 3567 rtx insn;
e0082a72 3568 basic_block bb;
7506f491
DE
3569
3570 /* Note we start at block 1. */
3571
e0082a72
ZD
3572 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3573 return 0;
3574
7506f491 3575 changed = 0;
e0082a72 3576 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
7506f491
DE
3577 {
3578 /* Reset tables used to keep track of what's still valid [since the
3579 start of the block]. */
3580 reset_opr_set_tables ();
3581
e0082a72
ZD
3582 for (insn = bb->head;
3583 insn != NULL && insn != NEXT_INSN (bb->end);
7506f491
DE
3584 insn = NEXT_INSN (insn))
3585 {
3586 /* Is insn of form (set (pseudo-reg) ...)? */
7506f491
DE
3587 if (GET_CODE (insn) == INSN
3588 && GET_CODE (PATTERN (insn)) == SET
3589 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3590 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3591 {
3592 rtx pat = PATTERN (insn);
3593 rtx src = SET_SRC (pat);
3594 struct expr *expr;
3595
3596 if (want_to_gcse_p (src)
3597 /* Is the expression recorded? */
02280659 3598 && ((expr = lookup_expr (src, &expr_hash_table)) != NULL)
7506f491
DE
3599 /* Is the expression available [at the start of the
3600 block]? */
e0082a72 3601 && TEST_BIT (ae_in[bb->index], expr->bitmap_index)
7506f491
DE
3602 /* Are the operands unchanged since the start of the
3603 block? */
3604 && oprs_not_set_p (src, insn))
3605 changed |= handle_avail_expr (insn, expr);
3606 }
3607
3608 /* Keep track of everything modified by this insn. */
3609 /* ??? Need to be careful w.r.t. mods done to INSN. */
2c3c49de 3610 if (INSN_P (insn))
7506f491 3611 mark_oprs_set (insn);
ac7c5af5 3612 }
7506f491
DE
3613 }
3614
3615 return changed;
3616}
3617
3618/* Top level routine to perform one classic GCSE pass.
3619
cc2902df 3620 Return nonzero if a change was made. */
7506f491
DE
3621
3622static int
b5ce41ff 3623one_classic_gcse_pass (pass)
7506f491
DE
3624 int pass;
3625{
3626 int changed = 0;
3627
3628 gcse_subst_count = 0;
3629 gcse_create_count = 0;
3630
02280659 3631 alloc_hash_table (max_cuid, &expr_hash_table, 0);
d55bc081 3632 alloc_rd_mem (last_basic_block, max_cuid);
02280659 3633 compute_hash_table (&expr_hash_table);
7506f491 3634 if (gcse_file)
02280659 3635 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
c4c81601 3636
02280659 3637 if (expr_hash_table.n_elems > 0)
7506f491
DE
3638 {
3639 compute_kill_rd ();
3640 compute_rd ();
02280659
ZD
3641 alloc_avail_expr_mem (last_basic_block, expr_hash_table.n_elems);
3642 compute_ae_gen (&expr_hash_table);
3643 compute_ae_kill (ae_gen, ae_kill, &expr_hash_table);
bd0eaec2 3644 compute_available (ae_gen, ae_kill, ae_out, ae_in);
7506f491
DE
3645 changed = classic_gcse ();
3646 free_avail_expr_mem ();
3647 }
c4c81601 3648
7506f491 3649 free_rd_mem ();
02280659 3650 free_hash_table (&expr_hash_table);
7506f491
DE
3651
3652 if (gcse_file)
3653 {
3654 fprintf (gcse_file, "\n");
c4c81601
RK
3655 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3656 current_function_name, pass, bytes_used, gcse_subst_count);
3657 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
7506f491
DE
3658 }
3659
3660 return changed;
3661}
3662\f
3663/* Compute copy/constant propagation working variables. */
3664
3665/* Local properties of assignments. */
7506f491
DE
3666static sbitmap *cprop_pavloc;
3667static sbitmap *cprop_absaltered;
3668
3669/* Global properties of assignments (computed from the local properties). */
7506f491
DE
3670static sbitmap *cprop_avin;
3671static sbitmap *cprop_avout;
3672
c4c81601
RK
3673/* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3674 basic blocks. N_SETS is the number of sets. */
7506f491
DE
3675
3676static void
3677alloc_cprop_mem (n_blocks, n_sets)
3678 int n_blocks, n_sets;
3679{
3680 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3681 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3682
3683 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3684 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3685}
3686
3687/* Free vars used by copy/const propagation. */
3688
3689static void
3690free_cprop_mem ()
3691{
5a660bff
DB
3692 sbitmap_vector_free (cprop_pavloc);
3693 sbitmap_vector_free (cprop_absaltered);
3694 sbitmap_vector_free (cprop_avin);
3695 sbitmap_vector_free (cprop_avout);
7506f491
DE
3696}
3697
c4c81601
RK
3698/* For each block, compute whether X is transparent. X is either an
3699 expression or an assignment [though we don't care which, for this context
3700 an assignment is treated as an expression]. For each block where an
3701 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3702 bit in BMAP. */
7506f491
DE
3703
3704static void
3705compute_transp (x, indx, bmap, set_p)
3706 rtx x;
3707 int indx;
3708 sbitmap *bmap;
3709 int set_p;
3710{
e0082a72
ZD
3711 int i, j;
3712 basic_block bb;
7506f491 3713 enum rtx_code code;
c4c81601 3714 reg_set *r;
6f7d635c 3715 const char *fmt;
7506f491 3716
c4c81601
RK
3717 /* repeat is used to turn tail-recursion into iteration since GCC
3718 can't do it when there's no return value. */
7506f491
DE
3719 repeat:
3720
3721 if (x == 0)
3722 return;
3723
3724 code = GET_CODE (x);
3725 switch (code)
3726 {
3727 case REG:
c4c81601
RK
3728 if (set_p)
3729 {
3730 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3731 {
e0082a72
ZD
3732 FOR_EACH_BB (bb)
3733 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3734 SET_BIT (bmap[bb->index], indx);
c4c81601
RK
3735 }
3736 else
3737 {
3738 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3739 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3740 }
3741 }
3742 else
3743 {
3744 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3745 {
e0082a72
ZD
3746 FOR_EACH_BB (bb)
3747 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3748 RESET_BIT (bmap[bb->index], indx);
c4c81601
RK
3749 }
3750 else
3751 {
3752 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3753 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3754 }
3755 }
7506f491 3756
c4c81601 3757 return;
7506f491
DE
3758
3759 case MEM:
e0082a72 3760 FOR_EACH_BB (bb)
a13d4ebf 3761 {
e0082a72 3762 rtx list_entry = canon_modify_mem_list[bb->index];
a13d4ebf
AM
3763
3764 while (list_entry)
3765 {
3766 rtx dest, dest_addr;
3767
3768 if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
3769 {
3770 if (set_p)
e0082a72 3771 SET_BIT (bmap[bb->index], indx);
a13d4ebf 3772 else
e0082a72 3773 RESET_BIT (bmap[bb->index], indx);
a13d4ebf
AM
3774 break;
3775 }
3776 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3777 Examine each hunk of memory that is modified. */
3778
3779 dest = XEXP (list_entry, 0);
3780 list_entry = XEXP (list_entry, 1);
3781 dest_addr = XEXP (list_entry, 0);
589005ff 3782
a13d4ebf
AM
3783 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
3784 x, rtx_addr_varies_p))
3785 {
3786 if (set_p)
e0082a72 3787 SET_BIT (bmap[bb->index], indx);
a13d4ebf 3788 else
e0082a72 3789 RESET_BIT (bmap[bb->index], indx);
a13d4ebf
AM
3790 break;
3791 }
3792 list_entry = XEXP (list_entry, 1);
3793 }
3794 }
c4c81601 3795
7506f491
DE
3796 x = XEXP (x, 0);
3797 goto repeat;
3798
3799 case PC:
3800 case CC0: /*FIXME*/
3801 case CONST:
3802 case CONST_INT:
3803 case CONST_DOUBLE:
69ef87e2 3804 case CONST_VECTOR:
7506f491
DE
3805 case SYMBOL_REF:
3806 case LABEL_REF:
3807 case ADDR_VEC:
3808 case ADDR_DIFF_VEC:
3809 return;
3810
3811 default:
3812 break;
3813 }
3814
c4c81601 3815 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3816 {
3817 if (fmt[i] == 'e')
3818 {
7506f491
DE
3819 /* If we are about to do the last recursive call
3820 needed at this level, change it into iteration.
3821 This function is called enough to be worth it. */
3822 if (i == 0)
3823 {
c4c81601 3824 x = XEXP (x, i);
7506f491
DE
3825 goto repeat;
3826 }
c4c81601
RK
3827
3828 compute_transp (XEXP (x, i), indx, bmap, set_p);
7506f491
DE
3829 }
3830 else if (fmt[i] == 'E')
c4c81601
RK
3831 for (j = 0; j < XVECLEN (x, i); j++)
3832 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
7506f491
DE
3833 }
3834}
3835
7506f491
DE
3836/* Top level routine to do the dataflow analysis needed by copy/const
3837 propagation. */
3838
3839static void
3840compute_cprop_data ()
3841{
02280659 3842 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
ce724250
JL
3843 compute_available (cprop_pavloc, cprop_absaltered,
3844 cprop_avout, cprop_avin);
7506f491
DE
3845}
3846\f
3847/* Copy/constant propagation. */
3848
7506f491
DE
3849/* Maximum number of register uses in an insn that we handle. */
3850#define MAX_USES 8
3851
3852/* Table of uses found in an insn.
3853 Allocated statically to avoid alloc/free complexity and overhead. */
3854static struct reg_use reg_use_table[MAX_USES];
3855
3856/* Index into `reg_use_table' while building it. */
3857static int reg_use_count;
3858
c4c81601
RK
3859/* Set up a list of register numbers used in INSN. The found uses are stored
3860 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3861 and contains the number of uses in the table upon exit.
7506f491 3862
c4c81601
RK
3863 ??? If a register appears multiple times we will record it multiple times.
3864 This doesn't hurt anything but it will slow things down. */
7506f491
DE
3865
3866static void
9e71c818
JH
3867find_used_regs (xptr, data)
3868 rtx *xptr;
3869 void *data ATTRIBUTE_UNUSED;
7506f491 3870{
c4c81601 3871 int i, j;
7506f491 3872 enum rtx_code code;
6f7d635c 3873 const char *fmt;
9e71c818 3874 rtx x = *xptr;
7506f491 3875
c4c81601
RK
3876 /* repeat is used to turn tail-recursion into iteration since GCC
3877 can't do it when there's no return value. */
7506f491 3878 repeat:
7506f491
DE
3879 if (x == 0)
3880 return;
3881
3882 code = GET_CODE (x);
9e71c818 3883 if (REG_P (x))
7506f491 3884 {
7506f491
DE
3885 if (reg_use_count == MAX_USES)
3886 return;
c4c81601 3887
7506f491
DE
3888 reg_use_table[reg_use_count].reg_rtx = x;
3889 reg_use_count++;
7506f491
DE
3890 }
3891
3892 /* Recursively scan the operands of this expression. */
3893
c4c81601 3894 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3895 {
3896 if (fmt[i] == 'e')
3897 {
3898 /* If we are about to do the last recursive call
3899 needed at this level, change it into iteration.
3900 This function is called enough to be worth it. */
3901 if (i == 0)
3902 {
3903 x = XEXP (x, 0);
3904 goto repeat;
3905 }
c4c81601 3906
9e71c818 3907 find_used_regs (&XEXP (x, i), data);
7506f491
DE
3908 }
3909 else if (fmt[i] == 'E')
c4c81601 3910 for (j = 0; j < XVECLEN (x, i); j++)
9e71c818 3911 find_used_regs (&XVECEXP (x, i, j), data);
7506f491
DE
3912 }
3913}
3914
3915/* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
cc2902df 3916 Returns nonzero is successful. */
7506f491
DE
3917
3918static int
3919try_replace_reg (from, to, insn)
3920 rtx from, to, insn;
3921{
172890a2 3922 rtx note = find_reg_equal_equiv_note (insn);
fb0c0a12 3923 rtx src = 0;
172890a2
RK
3924 int success = 0;
3925 rtx set = single_set (insn);
833fc3ad 3926
2b773ee2
JH
3927 validate_replace_src_group (from, to, insn);
3928 if (num_changes_pending () && apply_change_group ())
3929 success = 1;
9e71c818 3930
f305679f 3931 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
833fc3ad 3932 {
f305679f
JH
3933 /* If above failed and this is a single set, try to simplify the source of
3934 the set given our substitution. We could perhaps try this for multiple
3935 SETs, but it probably won't buy us anything. */
172890a2
RK
3936 src = simplify_replace_rtx (SET_SRC (set), from, to);
3937
9e71c818
JH
3938 if (!rtx_equal_p (src, SET_SRC (set))
3939 && validate_change (insn, &SET_SRC (set), src, 0))
172890a2 3940 success = 1;
833fc3ad 3941
f305679f
JH
3942 /* If we've failed to do replacement, have a single SET, and don't already
3943 have a note, add a REG_EQUAL note to not lose information. */
3944 if (!success && note == 0 && set != 0)
3945 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
3946 }
e251e2a2 3947
172890a2
RK
3948 /* If there is already a NOTE, update the expression in it with our
3949 replacement. */
3950 else if (note != 0)
3951 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
833fc3ad 3952
172890a2
RK
3953 /* REG_EQUAL may get simplified into register.
3954 We don't allow that. Remove that note. This code ought
3955 not to hapen, because previous code ought to syntetize
3956 reg-reg move, but be on the safe side. */
3957 if (note && REG_P (XEXP (note, 0)))
3958 remove_note (insn, note);
833fc3ad 3959
833fc3ad
JH
3960 return success;
3961}
c4c81601
RK
3962
3963/* Find a set of REGNOs that are available on entry to INSN's block. Returns
3964 NULL no such set is found. */
7506f491
DE
3965
3966static struct expr *
3967find_avail_set (regno, insn)
3968 int regno;
3969 rtx insn;
3970{
cafba495
BS
3971 /* SET1 contains the last set found that can be returned to the caller for
3972 use in a substitution. */
3973 struct expr *set1 = 0;
589005ff 3974
cafba495
BS
3975 /* Loops are not possible here. To get a loop we would need two sets
3976 available at the start of the block containing INSN. ie we would
3977 need two sets like this available at the start of the block:
3978
3979 (set (reg X) (reg Y))
3980 (set (reg Y) (reg X))
3981
3982 This can not happen since the set of (reg Y) would have killed the
3983 set of (reg X) making it unavailable at the start of this block. */
3984 while (1)
8e42ace1 3985 {
cafba495 3986 rtx src;
02280659 3987 struct expr *set = lookup_set (regno, NULL_RTX, &set_hash_table);
cafba495
BS
3988
3989 /* Find a set that is available at the start of the block
3990 which contains INSN. */
3991 while (set)
3992 {
3993 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
3994 break;
3995 set = next_set (regno, set);
3996 }
7506f491 3997
cafba495
BS
3998 /* If no available set was found we've reached the end of the
3999 (possibly empty) copy chain. */
4000 if (set == 0)
589005ff 4001 break;
cafba495
BS
4002
4003 if (GET_CODE (set->expr) != SET)
4004 abort ();
4005
4006 src = SET_SRC (set->expr);
4007
4008 /* We know the set is available.
4009 Now check that SRC is ANTLOC (i.e. none of the source operands
589005ff 4010 have changed since the start of the block).
cafba495
BS
4011
4012 If the source operand changed, we may still use it for the next
4013 iteration of this loop, but we may not use it for substitutions. */
c4c81601 4014
cafba495
BS
4015 if (CONSTANT_P (src) || oprs_not_set_p (src, insn))
4016 set1 = set;
4017
4018 /* If the source of the set is anything except a register, then
4019 we have reached the end of the copy chain. */
4020 if (GET_CODE (src) != REG)
7506f491 4021 break;
7506f491 4022
cafba495
BS
4023 /* Follow the copy chain, ie start another iteration of the loop
4024 and see if we have an available copy into SRC. */
4025 regno = REGNO (src);
8e42ace1 4026 }
cafba495
BS
4027
4028 /* SET1 holds the last set that was available and anticipatable at
4029 INSN. */
4030 return set1;
7506f491
DE
4031}
4032
abd535b6 4033/* Subroutine of cprop_insn that tries to propagate constants into
0e3f0221
RS
4034 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
4035 it is the instruction that immediately preceeds JUMP, and must be a
818b6b7f 4036 single SET of a register. FROM is what we will try to replace,
0e3f0221 4037 SRC is the constant we will try to substitute for it. Returns nonzero
589005ff 4038 if a change was made. */
c4c81601 4039
abd535b6 4040static int
0e3f0221
RS
4041cprop_jump (bb, setcc, jump, from, src)
4042 basic_block bb;
4043 rtx setcc;
4044 rtx jump;
172890a2 4045 rtx from;
abd535b6
BS
4046 rtx src;
4047{
0e3f0221
RS
4048 rtx new, new_set;
4049 rtx set = pc_set (jump);
4050
4051 /* First substitute in the INSN condition as the SET_SRC of the JUMP,
4052 then substitute that given values in this expanded JUMP. */
48ddd46c
JH
4053 if (setcc != NULL
4054 && !modified_between_p (from, setcc, jump)
4055 && !modified_between_p (src, setcc, jump))
b2f02503
RS
4056 {
4057 rtx setcc_set = single_set (setcc);
4058 new_set = simplify_replace_rtx (SET_SRC (set),
4059 SET_DEST (setcc_set),
4060 SET_SRC (setcc_set));
4061 }
0e3f0221
RS
4062 else
4063 new_set = set;
4064
4065 new = simplify_replace_rtx (new_set, from, src);
abd535b6
BS
4066
4067 /* If no simplification can be made, then try the next
4068 register. */
48ddd46c 4069 if (rtx_equal_p (new, new_set) || rtx_equal_p (new, SET_SRC (set)))
abd535b6 4070 return 0;
589005ff 4071
7d5ab30e 4072 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
172890a2 4073 if (new == pc_rtx)
0e3f0221 4074 delete_insn (jump);
7d5ab30e 4075 else
abd535b6 4076 {
48ddd46c
JH
4077 /* Ensure the value computed inside the jump insn to be equivalent
4078 to one computed by setcc. */
4079 if (setcc
4080 && modified_in_p (new, setcc))
4081 return 0;
0e3f0221 4082 if (! validate_change (jump, &SET_SRC (set), new, 0))
7d5ab30e 4083 return 0;
abd535b6 4084
7d5ab30e
JH
4085 /* If this has turned into an unconditional jump,
4086 then put a barrier after it so that the unreachable
4087 code will be deleted. */
4088 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
0e3f0221 4089 emit_barrier_after (jump);
7d5ab30e 4090 }
abd535b6 4091
0e3f0221
RS
4092#ifdef HAVE_cc0
4093 /* Delete the cc0 setter. */
818b6b7f 4094 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
0e3f0221
RS
4095 delete_insn (setcc);
4096#endif
4097
172890a2 4098 run_jump_opt_after_gcse = 1;
c4c81601 4099
172890a2
RK
4100 const_prop_count++;
4101 if (gcse_file != NULL)
4102 {
4103 fprintf (gcse_file,
818b6b7f 4104 "CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
0e3f0221 4105 REGNO (from), INSN_UID (jump));
172890a2
RK
4106 print_rtl (gcse_file, src);
4107 fprintf (gcse_file, "\n");
abd535b6 4108 }
0005550b 4109 purge_dead_edges (bb);
172890a2
RK
4110
4111 return 1;
abd535b6
BS
4112}
4113
ae860ff7
JH
4114static bool
4115constprop_register (insn, from, to, alter_jumps)
4116 rtx insn;
4117 rtx from;
4118 rtx to;
4119 int alter_jumps;
4120{
4121 rtx sset;
4122
4123 /* Check for reg or cc0 setting instructions followed by
4124 conditional branch instructions first. */
4125 if (alter_jumps
4126 && (sset = single_set (insn)) != NULL
4127 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
4128 {
4129 rtx dest = SET_DEST (sset);
4130 if ((REG_P (dest) || CC0_P (dest))
4131 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
4132 return 1;
4133 }
4134
4135 /* Handle normal insns next. */
4136 if (GET_CODE (insn) == INSN
4137 && try_replace_reg (from, to, insn))
4138 return 1;
4139
4140 /* Try to propagate a CONST_INT into a conditional jump.
4141 We're pretty specific about what we will handle in this
4142 code, we can extend this as necessary over time.
4143
4144 Right now the insn in question must look like
4145 (set (pc) (if_then_else ...)) */
4146 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
4147 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
4148 return 0;
4149}
4150
7506f491 4151/* Perform constant and copy propagation on INSN.
cc2902df 4152 The result is nonzero if a change was made. */
7506f491
DE
4153
4154static int
ae860ff7 4155cprop_insn (insn, alter_jumps)
7506f491 4156 rtx insn;
b5ce41ff 4157 int alter_jumps;
7506f491
DE
4158{
4159 struct reg_use *reg_used;
4160 int changed = 0;
833fc3ad 4161 rtx note;
7506f491 4162
9e71c818 4163 if (!INSN_P (insn))
7506f491
DE
4164 return 0;
4165
4166 reg_use_count = 0;
9e71c818 4167 note_uses (&PATTERN (insn), find_used_regs, NULL);
589005ff 4168
172890a2 4169 note = find_reg_equal_equiv_note (insn);
833fc3ad 4170
dc297297 4171 /* We may win even when propagating constants into notes. */
833fc3ad 4172 if (note)
9e71c818 4173 find_used_regs (&XEXP (note, 0), NULL);
7506f491 4174
c4c81601
RK
4175 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4176 reg_used++, reg_use_count--)
7506f491 4177 {
770ae6cc 4178 unsigned int regno = REGNO (reg_used->reg_rtx);
7506f491
DE
4179 rtx pat, src;
4180 struct expr *set;
7506f491
DE
4181
4182 /* Ignore registers created by GCSE.
dc297297 4183 We do this because ... */
7506f491
DE
4184 if (regno >= max_gcse_regno)
4185 continue;
4186
4187 /* If the register has already been set in this block, there's
4188 nothing we can do. */
4189 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
4190 continue;
4191
4192 /* Find an assignment that sets reg_used and is available
4193 at the start of the block. */
4194 set = find_avail_set (regno, insn);
4195 if (! set)
4196 continue;
589005ff 4197
7506f491
DE
4198 pat = set->expr;
4199 /* ??? We might be able to handle PARALLELs. Later. */
4200 if (GET_CODE (pat) != SET)
4201 abort ();
c4c81601 4202
7506f491
DE
4203 src = SET_SRC (pat);
4204
e78d9500 4205 /* Constant propagation. */
b446e5a2 4206 if (CONSTANT_P (src))
7506f491 4207 {
ae860ff7 4208 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
7506f491
DE
4209 {
4210 changed = 1;
4211 const_prop_count++;
4212 if (gcse_file != NULL)
4213 {
ae860ff7
JH
4214 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
4215 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
e78d9500 4216 print_rtl (gcse_file, src);
7506f491
DE
4217 fprintf (gcse_file, "\n");
4218 }
7506f491
DE
4219 }
4220 }
4221 else if (GET_CODE (src) == REG
4222 && REGNO (src) >= FIRST_PSEUDO_REGISTER
4223 && REGNO (src) != regno)
4224 {
cafba495 4225 if (try_replace_reg (reg_used->reg_rtx, src, insn))
7506f491 4226 {
cafba495
BS
4227 changed = 1;
4228 copy_prop_count++;
4229 if (gcse_file != NULL)
7506f491 4230 {
ae860ff7 4231 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
c4c81601
RK
4232 regno, INSN_UID (insn));
4233 fprintf (gcse_file, " with reg %d\n", REGNO (src));
7506f491 4234 }
cafba495
BS
4235
4236 /* The original insn setting reg_used may or may not now be
4237 deletable. We leave the deletion to flow. */
4238 /* FIXME: If it turns out that the insn isn't deletable,
4239 then we may have unnecessarily extended register lifetimes
4240 and made things worse. */
7506f491
DE
4241 }
4242 }
4243 }
4244
4245 return changed;
4246}
4247
8ba46434
R
4248/* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4249 their REG_EQUAL notes need updating. */
e197b6fc 4250
ae860ff7 4251static bool
8ba46434 4252do_local_cprop (x, insn, alter_jumps, libcall_sp)
ae860ff7
JH
4253 rtx x;
4254 rtx insn;
4255 int alter_jumps;
8ba46434 4256 rtx *libcall_sp;
ae860ff7
JH
4257{
4258 rtx newreg = NULL, newcnst = NULL;
4259
e197b6fc
RH
4260 /* Rule out USE instructions and ASM statements as we don't want to
4261 change the hard registers mentioned. */
ae860ff7
JH
4262 if (GET_CODE (x) == REG
4263 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
e197b6fc
RH
4264 || (GET_CODE (PATTERN (insn)) != USE
4265 && asm_noperands (PATTERN (insn)) < 0)))
ae860ff7
JH
4266 {
4267 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
4268 struct elt_loc_list *l;
4269
4270 if (!val)
4271 return false;
4272 for (l = val->locs; l; l = l->next)
4273 {
4274 rtx this_rtx = l->loc;
46690369
JH
4275 rtx note;
4276
9635cfad
JH
4277 if (l->in_libcall)
4278 continue;
4279
ae860ff7
JH
4280 if (CONSTANT_P (this_rtx))
4281 newcnst = this_rtx;
46690369
JH
4282 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
4283 /* Don't copy propagate if it has attached REG_EQUIV note.
4284 At this point this only function parameters should have
4285 REG_EQUIV notes and if the argument slot is used somewhere
4286 explicitly, it means address of parameter has been taken,
4287 so we should not extend the lifetime of the pseudo. */
4288 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
4289 || GET_CODE (XEXP (note, 0)) != MEM))
ae860ff7
JH
4290 newreg = this_rtx;
4291 }
4292 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
4293 {
8ba46434
R
4294 /* If we find a case where we can't fix the retval REG_EQUAL notes
4295 match the new register, we either have to abandom this replacement
4296 or fix delete_trivially_dead_insns to preserve the setting insn,
4297 or make it delete the REG_EUAQL note, and fix up all passes that
4298 require the REG_EQUAL note there. */
4299 if (!adjust_libcall_notes (x, newcnst, insn, libcall_sp))
4300 abort ();
ae860ff7
JH
4301 if (gcse_file != NULL)
4302 {
4303 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
4304 REGNO (x));
4305 fprintf (gcse_file, "insn %d with constant ",
4306 INSN_UID (insn));
4307 print_rtl (gcse_file, newcnst);
4308 fprintf (gcse_file, "\n");
4309 }
4310 const_prop_count++;
4311 return true;
4312 }
4313 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
4314 {
8ba46434 4315 adjust_libcall_notes (x, newreg, insn, libcall_sp);
ae860ff7
JH
4316 if (gcse_file != NULL)
4317 {
4318 fprintf (gcse_file,
4319 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
4320 REGNO (x), INSN_UID (insn));
4321 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
4322 }
4323 copy_prop_count++;
4324 return true;
4325 }
4326 }
4327 return false;
4328}
4329
8ba46434
R
4330/* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4331 their REG_EQUAL notes need updating to reflect that OLDREG has been
f4e3e618
RH
4332 replaced with NEWVAL in INSN. Return true if all substitutions could
4333 be made. */
8ba46434
R
4334static bool
4335adjust_libcall_notes (oldreg, newval, insn, libcall_sp)
4336 rtx oldreg, newval, insn, *libcall_sp;
4337{
f4e3e618 4338 rtx end;
8ba46434
R
4339
4340 while ((end = *libcall_sp++))
4341 {
f4e3e618 4342 rtx note = find_reg_equal_equiv_note (end);
8ba46434
R
4343
4344 if (! note)
4345 continue;
4346
4347 if (REG_P (newval))
4348 {
4349 if (reg_set_between_p (newval, PREV_INSN (insn), end))
4350 {
4351 do
4352 {
4353 note = find_reg_equal_equiv_note (end);
4354 if (! note)
4355 continue;
4356 if (reg_mentioned_p (newval, XEXP (note, 0)))
4357 return false;
4358 }
4359 while ((end = *libcall_sp++));
4360 return true;
4361 }
4362 }
4363 XEXP (note, 0) = replace_rtx (XEXP (note, 0), oldreg, newval);
4364 insn = end;
4365 }
4366 return true;
4367}
4368
4369#define MAX_NESTED_LIBCALLS 9
4370
ae860ff7
JH
4371static void
4372local_cprop_pass (alter_jumps)
4373 int alter_jumps;
4374{
4375 rtx insn;
4376 struct reg_use *reg_used;
8ba46434 4377 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
ae860ff7
JH
4378
4379 cselib_init ();
8ba46434
R
4380 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
4381 *libcall_sp = 0;
ae860ff7
JH
4382 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4383 {
4384 if (INSN_P (insn))
4385 {
8ba46434 4386 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
ae860ff7 4387
8ba46434
R
4388 if (note)
4389 {
4390 if (libcall_sp == libcall_stack)
4391 abort ();
4392 *--libcall_sp = XEXP (note, 0);
4393 }
4394 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
4395 if (note)
4396 libcall_sp++;
4397 note = find_reg_equal_equiv_note (insn);
ae860ff7
JH
4398 do
4399 {
4400 reg_use_count = 0;
4401 note_uses (&PATTERN (insn), find_used_regs, NULL);
4402 if (note)
4403 find_used_regs (&XEXP (note, 0), NULL);
4404
4405 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4406 reg_used++, reg_use_count--)
8ba46434
R
4407 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
4408 libcall_sp))
ae860ff7
JH
4409 break;
4410 }
4411 while (reg_use_count);
4412 }
4413 cselib_process_insn (insn);
4414 }
4415 cselib_finish ();
4416}
4417
c4c81601 4418/* Forward propagate copies. This includes copies and constants. Return
cc2902df 4419 nonzero if a change was made. */
7506f491
DE
4420
4421static int
b5ce41ff
JL
4422cprop (alter_jumps)
4423 int alter_jumps;
7506f491 4424{
e0082a72
ZD
4425 int changed;
4426 basic_block bb;
7506f491
DE
4427 rtx insn;
4428
4429 /* Note we start at block 1. */
e0082a72
ZD
4430 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4431 {
4432 if (gcse_file != NULL)
4433 fprintf (gcse_file, "\n");
4434 return 0;
4435 }
7506f491
DE
4436
4437 changed = 0;
e0082a72 4438 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
7506f491
DE
4439 {
4440 /* Reset tables used to keep track of what's still valid [since the
4441 start of the block]. */
4442 reset_opr_set_tables ();
4443
e0082a72
ZD
4444 for (insn = bb->head;
4445 insn != NULL && insn != NEXT_INSN (bb->end);
7506f491 4446 insn = NEXT_INSN (insn))
172890a2
RK
4447 if (INSN_P (insn))
4448 {
ae860ff7 4449 changed |= cprop_insn (insn, alter_jumps);
7506f491 4450
172890a2
RK
4451 /* Keep track of everything modified by this insn. */
4452 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4453 call mark_oprs_set if we turned the insn into a NOTE. */
4454 if (GET_CODE (insn) != NOTE)
4455 mark_oprs_set (insn);
8e42ace1 4456 }
7506f491
DE
4457 }
4458
4459 if (gcse_file != NULL)
4460 fprintf (gcse_file, "\n");
4461
4462 return changed;
4463}
4464
4465/* Perform one copy/constant propagation pass.
a0134312
RS
4466 PASS is the pass count. If CPROP_JUMPS is true, perform constant
4467 propagation into conditional jumps. If BYPASS_JUMPS is true,
4468 perform conditional jump bypassing optimizations. */
7506f491
DE
4469
4470static int
a0134312 4471one_cprop_pass (pass, cprop_jumps, bypass_jumps)
7506f491 4472 int pass;
a0134312
RS
4473 int cprop_jumps;
4474 int bypass_jumps;
7506f491
DE
4475{
4476 int changed = 0;
4477
4478 const_prop_count = 0;
4479 copy_prop_count = 0;
4480
a0134312 4481 local_cprop_pass (cprop_jumps);
ae860ff7 4482
02280659
ZD
4483 alloc_hash_table (max_cuid, &set_hash_table, 1);
4484 compute_hash_table (&set_hash_table);
7506f491 4485 if (gcse_file)
02280659
ZD
4486 dump_hash_table (gcse_file, "SET", &set_hash_table);
4487 if (set_hash_table.n_elems > 0)
7506f491 4488 {
02280659 4489 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
7506f491 4490 compute_cprop_data ();
a0134312
RS
4491 changed = cprop (cprop_jumps);
4492 if (bypass_jumps)
0e3f0221 4493 changed |= bypass_conditional_jumps ();
7506f491
DE
4494 free_cprop_mem ();
4495 }
c4c81601 4496
02280659 4497 free_hash_table (&set_hash_table);
7506f491
DE
4498
4499 if (gcse_file)
4500 {
c4c81601
RK
4501 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
4502 current_function_name, pass, bytes_used);
4503 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4504 const_prop_count, copy_prop_count);
7506f491
DE
4505 }
4506
4507 return changed;
4508}
4509\f
0e3f0221
RS
4510/* Bypass conditional jumps. */
4511
4512/* Find a set of REGNO to a constant that is available at the end of basic
4513 block BB. Returns NULL if no such set is found. Based heavily upon
4514 find_avail_set. */
4515
4516static struct expr *
4517find_bypass_set (regno, bb)
4518 int regno;
4519 int bb;
4520{
4521 struct expr *result = 0;
4522
4523 for (;;)
4524 {
4525 rtx src;
02280659 4526 struct expr *set = lookup_set (regno, NULL_RTX, &set_hash_table);
0e3f0221
RS
4527
4528 while (set)
4529 {
4530 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
4531 break;
4532 set = next_set (regno, set);
4533 }
4534
4535 if (set == 0)
4536 break;
4537
4538 if (GET_CODE (set->expr) != SET)
4539 abort ();
4540
4541 src = SET_SRC (set->expr);
4542 if (CONSTANT_P (src))
4543 result = set;
4544
4545 if (GET_CODE (src) != REG)
4546 break;
4547
4548 regno = REGNO (src);
4549 }
4550 return result;
4551}
4552
4553
4554/* Subroutine of bypass_conditional_jumps that attempts to bypass the given
4555 basic block BB which has more than one predecessor. If not NULL, SETCC
4556 is the first instruction of BB, which is immediately followed by JUMP_INSN
4557 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
4558 Returns nonzero if a change was made. */
4559
4560static int
4561bypass_block (bb, setcc, jump)
4562 basic_block bb;
4563 rtx setcc, jump;
4564{
4565 rtx insn, note;
4566 edge e, enext;
818b6b7f 4567 int i, change;
0e3f0221
RS
4568
4569 insn = (setcc != NULL) ? setcc : jump;
4570
4571 /* Determine set of register uses in INSN. */
4572 reg_use_count = 0;
4573 note_uses (&PATTERN (insn), find_used_regs, NULL);
4574 note = find_reg_equal_equiv_note (insn);
4575 if (note)
4576 find_used_regs (&XEXP (note, 0), NULL);
4577
4578 change = 0;
4579 for (e = bb->pred; e; e = enext)
4580 {
4581 enext = e->pred_next;
4582 for (i = 0; i < reg_use_count; i++)
4583 {
4584 struct reg_use *reg_used = &reg_use_table[i];
589005ff 4585 unsigned int regno = REGNO (reg_used->reg_rtx);
818b6b7f 4586 basic_block dest, old_dest;
589005ff
KH
4587 struct expr *set;
4588 rtx src, new;
0e3f0221 4589
589005ff
KH
4590 if (regno >= max_gcse_regno)
4591 continue;
0e3f0221 4592
589005ff 4593 set = find_bypass_set (regno, e->src->index);
0e3f0221
RS
4594
4595 if (! set)
4596 continue;
4597
589005ff 4598 src = SET_SRC (pc_set (jump));
0e3f0221
RS
4599
4600 if (setcc != NULL)
4601 src = simplify_replace_rtx (src,
589005ff
KH
4602 SET_DEST (PATTERN (setcc)),
4603 SET_SRC (PATTERN (setcc)));
0e3f0221
RS
4604
4605 new = simplify_replace_rtx (src, reg_used->reg_rtx,
589005ff 4606 SET_SRC (set->expr));
0e3f0221 4607
589005ff 4608 if (new == pc_rtx)
0e3f0221
RS
4609 dest = FALLTHRU_EDGE (bb)->dest;
4610 else if (GET_CODE (new) == LABEL_REF)
4611 dest = BRANCH_EDGE (bb)->dest;
4612 else
4613 dest = NULL;
4614
4615 /* Once basic block indices are stable, we should be able
4616 to use redirect_edge_and_branch_force instead. */
818b6b7f
RH
4617 old_dest = e->dest;
4618 if (dest != NULL && dest != old_dest
0e3f0221
RS
4619 && redirect_edge_and_branch (e, dest))
4620 {
818b6b7f 4621 /* Copy the register setter to the redirected edge.
0e3f0221
RS
4622 Don't copy CC0 setters, as CC0 is dead after jump. */
4623 if (setcc)
4624 {
4625 rtx pat = PATTERN (setcc);
818b6b7f 4626 if (!CC0_P (SET_DEST (pat)))
0e3f0221
RS
4627 insert_insn_on_edge (copy_insn (pat), e);
4628 }
4629
4630 if (gcse_file != NULL)
4631 {
818b6b7f
RH
4632 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d in jump_insn %d equals constant ",
4633 regno, INSN_UID (jump));
0e3f0221
RS
4634 print_rtl (gcse_file, SET_SRC (set->expr));
4635 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
818b6b7f 4636 e->src->index, old_dest->index, dest->index);
0e3f0221
RS
4637 }
4638 change = 1;
4639 break;
4640 }
4641 }
4642 }
4643 return change;
4644}
4645
4646/* Find basic blocks with more than one predecessor that only contain a
4647 single conditional jump. If the result of the comparison is known at
4648 compile-time from any incoming edge, redirect that edge to the
4649 appropriate target. Returns nonzero if a change was made. */
4650
4651static int
4652bypass_conditional_jumps ()
4653{
4654 basic_block bb;
4655 int changed;
4656 rtx setcc;
4657 rtx insn;
4658 rtx dest;
4659
4660 /* Note we start at block 1. */
4661 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4662 return 0;
4663
4664 changed = 0;
4665 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
589005ff 4666 EXIT_BLOCK_PTR, next_bb)
0e3f0221
RS
4667 {
4668 /* Check for more than one predecessor. */
4669 if (bb->pred && bb->pred->pred_next)
4670 {
4671 setcc = NULL_RTX;
4672 for (insn = bb->head;
4673 insn != NULL && insn != NEXT_INSN (bb->end);
4674 insn = NEXT_INSN (insn))
4675 if (GET_CODE (insn) == INSN)
4676 {
9543a9d2 4677 if (setcc)
0e3f0221 4678 break;
ba4f7968 4679 if (GET_CODE (PATTERN (insn)) != SET)
0e3f0221
RS
4680 break;
4681
ba4f7968 4682 dest = SET_DEST (PATTERN (insn));
818b6b7f 4683 if (REG_P (dest) || CC0_P (dest))
0e3f0221 4684 setcc = insn;
0e3f0221
RS
4685 else
4686 break;
4687 }
4688 else if (GET_CODE (insn) == JUMP_INSN)
4689 {
4690 if (any_condjump_p (insn) && onlyjump_p (insn))
4691 changed |= bypass_block (bb, setcc, insn);
4692 break;
4693 }
4694 else if (INSN_P (insn))
4695 break;
4696 }
4697 }
4698
818b6b7f 4699 /* If we bypassed any register setting insns, we inserted a
0e3f0221
RS
4700 copy on the redirected edge. These need to be commited. */
4701 if (changed)
4702 commit_edge_insertions();
4703
4704 return changed;
4705}
4706\f
a65f3558 4707/* Compute PRE+LCM working variables. */
7506f491
DE
4708
4709/* Local properties of expressions. */
4710/* Nonzero for expressions that are transparent in the block. */
a65f3558 4711static sbitmap *transp;
7506f491 4712
5c35539b
RH
4713/* Nonzero for expressions that are transparent at the end of the block.
4714 This is only zero for expressions killed by abnormal critical edge
4715 created by a calls. */
a65f3558 4716static sbitmap *transpout;
5c35539b 4717
a65f3558
JL
4718/* Nonzero for expressions that are computed (available) in the block. */
4719static sbitmap *comp;
7506f491 4720
a65f3558
JL
4721/* Nonzero for expressions that are locally anticipatable in the block. */
4722static sbitmap *antloc;
7506f491 4723
a65f3558
JL
4724/* Nonzero for expressions where this block is an optimal computation
4725 point. */
4726static sbitmap *pre_optimal;
5c35539b 4727
a65f3558
JL
4728/* Nonzero for expressions which are redundant in a particular block. */
4729static sbitmap *pre_redundant;
7506f491 4730
a42cd965
AM
4731/* Nonzero for expressions which should be inserted on a specific edge. */
4732static sbitmap *pre_insert_map;
4733
4734/* Nonzero for expressions which should be deleted in a specific block. */
4735static sbitmap *pre_delete_map;
4736
4737/* Contains the edge_list returned by pre_edge_lcm. */
4738static struct edge_list *edge_list;
4739
a65f3558
JL
4740/* Redundant insns. */
4741static sbitmap pre_redundant_insns;
7506f491 4742
a65f3558 4743/* Allocate vars used for PRE analysis. */
7506f491
DE
4744
4745static void
a65f3558
JL
4746alloc_pre_mem (n_blocks, n_exprs)
4747 int n_blocks, n_exprs;
7506f491 4748{
a65f3558
JL
4749 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4750 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4751 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5faf03ae 4752
a42cd965
AM
4753 pre_optimal = NULL;
4754 pre_redundant = NULL;
4755 pre_insert_map = NULL;
4756 pre_delete_map = NULL;
4757 ae_in = NULL;
4758 ae_out = NULL;
a42cd965 4759 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
c4c81601 4760
a42cd965 4761 /* pre_insert and pre_delete are allocated later. */
7506f491
DE
4762}
4763
a65f3558 4764/* Free vars used for PRE analysis. */
7506f491
DE
4765
4766static void
a65f3558 4767free_pre_mem ()
7506f491 4768{
5a660bff
DB
4769 sbitmap_vector_free (transp);
4770 sbitmap_vector_free (comp);
bd3675fc
JL
4771
4772 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
7506f491 4773
a42cd965 4774 if (pre_optimal)
5a660bff 4775 sbitmap_vector_free (pre_optimal);
a42cd965 4776 if (pre_redundant)
5a660bff 4777 sbitmap_vector_free (pre_redundant);
a42cd965 4778 if (pre_insert_map)
5a660bff 4779 sbitmap_vector_free (pre_insert_map);
a42cd965 4780 if (pre_delete_map)
5a660bff 4781 sbitmap_vector_free (pre_delete_map);
a42cd965 4782 if (ae_in)
5a660bff 4783 sbitmap_vector_free (ae_in);
a42cd965 4784 if (ae_out)
5a660bff 4785 sbitmap_vector_free (ae_out);
a42cd965 4786
bd3675fc 4787 transp = comp = NULL;
a42cd965 4788 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
55d3f917 4789 ae_in = ae_out = NULL;
7506f491
DE
4790}
4791
4792/* Top level routine to do the dataflow analysis needed by PRE. */
4793
4794static void
4795compute_pre_data ()
4796{
b614171e 4797 sbitmap trapping_expr;
e0082a72 4798 basic_block bb;
b614171e 4799 unsigned int ui;
c66e8ae9 4800
02280659 4801 compute_local_properties (transp, comp, antloc, &expr_hash_table);
d55bc081 4802 sbitmap_vector_zero (ae_kill, last_basic_block);
c66e8ae9 4803
b614171e 4804 /* Collect expressions which might trap. */
02280659 4805 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
b614171e 4806 sbitmap_zero (trapping_expr);
02280659 4807 for (ui = 0; ui < expr_hash_table.size; ui++)
b614171e
MM
4808 {
4809 struct expr *e;
02280659 4810 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
b614171e
MM
4811 if (may_trap_p (e->expr))
4812 SET_BIT (trapping_expr, e->bitmap_index);
4813 }
4814
c66e8ae9
JL
4815 /* Compute ae_kill for each basic block using:
4816
4817 ~(TRANSP | COMP)
4818
a2e90653 4819 This is significantly faster than compute_ae_kill. */
c66e8ae9 4820
e0082a72 4821 FOR_EACH_BB (bb)
c66e8ae9 4822 {
b614171e
MM
4823 edge e;
4824
4825 /* If the current block is the destination of an abnormal edge, we
4826 kill all trapping expressions because we won't be able to properly
4827 place the instruction on the edge. So make them neither
4828 anticipatable nor transparent. This is fairly conservative. */
e0082a72 4829 for (e = bb->pred; e ; e = e->pred_next)
b614171e
MM
4830 if (e->flags & EDGE_ABNORMAL)
4831 {
e0082a72
ZD
4832 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
4833 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
b614171e
MM
4834 break;
4835 }
4836
e0082a72
ZD
4837 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
4838 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
c66e8ae9
JL
4839 }
4840
02280659 4841 edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
a42cd965 4842 ae_kill, &pre_insert_map, &pre_delete_map);
5a660bff 4843 sbitmap_vector_free (antloc);
bd3675fc 4844 antloc = NULL;
5a660bff 4845 sbitmap_vector_free (ae_kill);
589005ff 4846 ae_kill = NULL;
76ac938b 4847 sbitmap_free (trapping_expr);
7506f491
DE
4848}
4849\f
4850/* PRE utilities */
4851
cc2902df 4852/* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
a65f3558 4853 block BB.
7506f491
DE
4854
4855 VISITED is a pointer to a working buffer for tracking which BB's have
4856 been visited. It is NULL for the top-level call.
4857
4858 We treat reaching expressions that go through blocks containing the same
4859 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
4860 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
4861 2 as not reaching. The intent is to improve the probability of finding
4862 only one reaching expression and to reduce register lifetimes by picking
4863 the closest such expression. */
4864
4865static int
89e606c9 4866pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited)
e2d2ed72 4867 basic_block occr_bb;
7506f491 4868 struct expr *expr;
e2d2ed72 4869 basic_block bb;
7506f491
DE
4870 char *visited;
4871{
36349f8b 4872 edge pred;
7506f491 4873
e2d2ed72 4874 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
7506f491 4875 {
e2d2ed72 4876 basic_block pred_bb = pred->src;
7506f491 4877
36349f8b 4878 if (pred->src == ENTRY_BLOCK_PTR
7506f491 4879 /* Has predecessor has already been visited? */
0b17ab2f 4880 || visited[pred_bb->index])
c4c81601
RK
4881 ;/* Nothing to do. */
4882
7506f491 4883 /* Does this predecessor generate this expression? */
0b17ab2f 4884 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
7506f491
DE
4885 {
4886 /* Is this the occurrence we're looking for?
4887 Note that there's only one generating occurrence per block
4888 so we just need to check the block number. */
a65f3558 4889 if (occr_bb == pred_bb)
7506f491 4890 return 1;
c4c81601 4891
0b17ab2f 4892 visited[pred_bb->index] = 1;
7506f491
DE
4893 }
4894 /* Ignore this predecessor if it kills the expression. */
0b17ab2f
RH
4895 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
4896 visited[pred_bb->index] = 1;
c4c81601 4897
7506f491
DE
4898 /* Neither gen nor kill. */
4899 else
ac7c5af5 4900 {
0b17ab2f 4901 visited[pred_bb->index] = 1;
89e606c9 4902 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
7506f491 4903 return 1;
ac7c5af5 4904 }
7506f491
DE
4905 }
4906
4907 /* All paths have been checked. */
4908 return 0;
4909}
283a2545
RL
4910
4911/* The wrapper for pre_expr_reaches_here_work that ensures that any
dc297297 4912 memory allocated for that function is returned. */
283a2545
RL
4913
4914static int
89e606c9 4915pre_expr_reaches_here_p (occr_bb, expr, bb)
e2d2ed72 4916 basic_block occr_bb;
283a2545 4917 struct expr *expr;
e2d2ed72 4918 basic_block bb;
283a2545
RL
4919{
4920 int rval;
d55bc081 4921 char *visited = (char *) xcalloc (last_basic_block, 1);
283a2545 4922
8e42ace1 4923 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
283a2545
RL
4924
4925 free (visited);
c4c81601 4926 return rval;
283a2545 4927}
7506f491 4928\f
a42cd965
AM
4929
4930/* Given an expr, generate RTL which we can insert at the end of a BB,
589005ff 4931 or on an edge. Set the block number of any insns generated to
a42cd965
AM
4932 the value of BB. */
4933
4934static rtx
4935process_insert_insn (expr)
4936 struct expr *expr;
4937{
4938 rtx reg = expr->reaching_reg;
fb0c0a12
RK
4939 rtx exp = copy_rtx (expr->expr);
4940 rtx pat;
a42cd965
AM
4941
4942 start_sequence ();
fb0c0a12
RK
4943
4944 /* If the expression is something that's an operand, like a constant,
4945 just copy it to a register. */
4946 if (general_operand (exp, GET_MODE (reg)))
4947 emit_move_insn (reg, exp);
4948
4949 /* Otherwise, make a new insn to compute this expression and make sure the
4950 insn will be recognized (this also adds any needed CLOBBERs). Copy the
4951 expression to make sure we don't have any sharing issues. */
8d444206 4952 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
fb0c0a12 4953 abort ();
589005ff 4954
2f937369 4955 pat = get_insns ();
a42cd965
AM
4956 end_sequence ();
4957
4958 return pat;
4959}
589005ff 4960
a65f3558
JL
4961/* Add EXPR to the end of basic block BB.
4962
4963 This is used by both the PRE and code hoisting.
4964
4965 For PRE, we want to verify that the expr is either transparent
4966 or locally anticipatable in the target block. This check makes
4967 no sense for code hoisting. */
7506f491
DE
4968
4969static void
a65f3558 4970insert_insn_end_bb (expr, bb, pre)
7506f491 4971 struct expr *expr;
e2d2ed72 4972 basic_block bb;
a65f3558 4973 int pre;
7506f491 4974{
e2d2ed72 4975 rtx insn = bb->end;
7506f491
DE
4976 rtx new_insn;
4977 rtx reg = expr->reaching_reg;
4978 int regno = REGNO (reg);
2f937369 4979 rtx pat, pat_end;
7506f491 4980
a42cd965 4981 pat = process_insert_insn (expr);
2f937369
DM
4982 if (pat == NULL_RTX || ! INSN_P (pat))
4983 abort ();
4984
4985 pat_end = pat;
4986 while (NEXT_INSN (pat_end) != NULL_RTX)
4987 pat_end = NEXT_INSN (pat_end);
7506f491
DE
4988
4989 /* If the last insn is a jump, insert EXPR in front [taking care to
068473ec
JH
4990 handle cc0, etc. properly]. Similary we need to care trapping
4991 instructions in presence of non-call exceptions. */
7506f491 4992
068473ec
JH
4993 if (GET_CODE (insn) == JUMP_INSN
4994 || (GET_CODE (insn) == INSN
4995 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
7506f491 4996 {
50b2596f 4997#ifdef HAVE_cc0
7506f491 4998 rtx note;
50b2596f 4999#endif
068473ec
JH
5000 /* It should always be the case that we can put these instructions
5001 anywhere in the basic block with performing PRE optimizations.
5002 Check this. */
3b25fbfe 5003 if (GET_CODE (insn) == INSN && pre
0b17ab2f 5004 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
589005ff 5005 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
068473ec 5006 abort ();
7506f491
DE
5007
5008 /* If this is a jump table, then we can't insert stuff here. Since
5009 we know the previous real insn must be the tablejump, we insert
5010 the new instruction just before the tablejump. */
5011 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
5012 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
5013 insn = prev_real_insn (insn);
5014
5015#ifdef HAVE_cc0
5016 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
5017 if cc0 isn't set. */
5018 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
5019 if (note)
5020 insn = XEXP (note, 0);
5021 else
5022 {
5023 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
5024 if (maybe_cc0_setter
2c3c49de 5025 && INSN_P (maybe_cc0_setter)
7506f491
DE
5026 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
5027 insn = maybe_cc0_setter;
5028 }
5029#endif
5030 /* FIXME: What if something in cc0/jump uses value set in new insn? */
3c030e88 5031 new_insn = emit_insn_before (pat, insn);
3947e2f9 5032 }
c4c81601 5033
3947e2f9
RH
5034 /* Likewise if the last insn is a call, as will happen in the presence
5035 of exception handling. */
068473ec
JH
5036 else if (GET_CODE (insn) == CALL_INSN
5037 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))
3947e2f9 5038 {
3947e2f9
RH
5039 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
5040 we search backward and place the instructions before the first
5041 parameter is loaded. Do this for everyone for consistency and a
589005ff 5042 presumtion that we'll get better code elsewhere as well.
3947e2f9 5043
c4c81601 5044 It should always be the case that we can put these instructions
a65f3558
JL
5045 anywhere in the basic block with performing PRE optimizations.
5046 Check this. */
c4c81601 5047
a65f3558 5048 if (pre
0b17ab2f 5049 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
589005ff 5050 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
3947e2f9
RH
5051 abort ();
5052
5053 /* Since different machines initialize their parameter registers
5054 in different orders, assume nothing. Collect the set of all
5055 parameter registers. */
833366d6 5056 insn = find_first_parameter_load (insn, bb->head);
3947e2f9 5057
b1d26727
JL
5058 /* If we found all the parameter loads, then we want to insert
5059 before the first parameter load.
5060
5061 If we did not find all the parameter loads, then we might have
5062 stopped on the head of the block, which could be a CODE_LABEL.
5063 If we inserted before the CODE_LABEL, then we would be putting
5064 the insn in the wrong basic block. In that case, put the insn
b5229628 5065 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
0a377997 5066 while (GET_CODE (insn) == CODE_LABEL
589ca5cb 5067 || NOTE_INSN_BASIC_BLOCK_P (insn))
b5229628 5068 insn = NEXT_INSN (insn);
c4c81601 5069
3c030e88 5070 new_insn = emit_insn_before (pat, insn);
7506f491
DE
5071 }
5072 else
3c030e88 5073 new_insn = emit_insn_after (pat, insn);
7506f491 5074
2f937369 5075 while (1)
a65f3558 5076 {
2f937369 5077 if (INSN_P (pat))
a65f3558 5078 {
2f937369
DM
5079 add_label_notes (PATTERN (pat), new_insn);
5080 note_stores (PATTERN (pat), record_set_info, pat);
a65f3558 5081 }
2f937369
DM
5082 if (pat == pat_end)
5083 break;
5084 pat = NEXT_INSN (pat);
a65f3558 5085 }
3947e2f9 5086
7506f491
DE
5087 gcse_create_count++;
5088
5089 if (gcse_file)
5090 {
c4c81601 5091 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
0b17ab2f 5092 bb->index, INSN_UID (new_insn));
c4c81601
RK
5093 fprintf (gcse_file, "copying expression %d to reg %d\n",
5094 expr->bitmap_index, regno);
7506f491
DE
5095 }
5096}
5097
a42cd965
AM
5098/* Insert partially redundant expressions on edges in the CFG to make
5099 the expressions fully redundant. */
7506f491 5100
a42cd965
AM
5101static int
5102pre_edge_insert (edge_list, index_map)
5103 struct edge_list *edge_list;
7506f491
DE
5104 struct expr **index_map;
5105{
c4c81601 5106 int e, i, j, num_edges, set_size, did_insert = 0;
a65f3558
JL
5107 sbitmap *inserted;
5108
a42cd965
AM
5109 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
5110 if it reaches any of the deleted expressions. */
7506f491 5111
a42cd965
AM
5112 set_size = pre_insert_map[0]->size;
5113 num_edges = NUM_EDGES (edge_list);
02280659 5114 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
a42cd965 5115 sbitmap_vector_zero (inserted, num_edges);
7506f491 5116
a42cd965 5117 for (e = 0; e < num_edges; e++)
7506f491
DE
5118 {
5119 int indx;
e2d2ed72 5120 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
a65f3558 5121
a65f3558 5122 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
7506f491 5123 {
a42cd965 5124 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
7506f491 5125
02280659 5126 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
c4c81601
RK
5127 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
5128 {
5129 struct expr *expr = index_map[j];
5130 struct occr *occr;
a65f3558 5131
ff7cc307 5132 /* Now look at each deleted occurrence of this expression. */
c4c81601
RK
5133 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5134 {
5135 if (! occr->deleted_p)
5136 continue;
5137
5138 /* Insert this expression on this edge if if it would
ff7cc307 5139 reach the deleted occurrence in BB. */
c4c81601
RK
5140 if (!TEST_BIT (inserted[e], j))
5141 {
5142 rtx insn;
5143 edge eg = INDEX_EDGE (edge_list, e);
5144
5145 /* We can't insert anything on an abnormal and
5146 critical edge, so we insert the insn at the end of
5147 the previous block. There are several alternatives
5148 detailed in Morgans book P277 (sec 10.5) for
5149 handling this situation. This one is easiest for
5150 now. */
5151
5152 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
5153 insert_insn_end_bb (index_map[j], bb, 0);
5154 else
5155 {
5156 insn = process_insert_insn (index_map[j]);
5157 insert_insn_on_edge (insn, eg);
5158 }
5159
5160 if (gcse_file)
5161 {
5162 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
0b17ab2f
RH
5163 bb->index,
5164 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
c4c81601
RK
5165 fprintf (gcse_file, "copy expression %d\n",
5166 expr->bitmap_index);
5167 }
5168
a13d4ebf 5169 update_ld_motion_stores (expr);
c4c81601
RK
5170 SET_BIT (inserted[e], j);
5171 did_insert = 1;
5172 gcse_create_count++;
5173 }
5174 }
5175 }
7506f491
DE
5176 }
5177 }
5faf03ae 5178
5a660bff 5179 sbitmap_vector_free (inserted);
a42cd965 5180 return did_insert;
7506f491
DE
5181}
5182
c4c81601 5183/* Copy the result of INSN to REG. INDX is the expression number. */
7506f491
DE
5184
5185static void
5186pre_insert_copy_insn (expr, insn)
5187 struct expr *expr;
5188 rtx insn;
5189{
5190 rtx reg = expr->reaching_reg;
5191 int regno = REGNO (reg);
5192 int indx = expr->bitmap_index;
5193 rtx set = single_set (insn);
5194 rtx new_insn;
5195
5196 if (!set)
5197 abort ();
c4c81601 5198
cccf0ae8 5199 new_insn = emit_insn_after (gen_move_insn (reg, SET_DEST (set)), insn);
c4c81601 5200
7506f491
DE
5201 /* Keep register set table up to date. */
5202 record_one_set (regno, new_insn);
5203
5204 gcse_create_count++;
5205
5206 if (gcse_file)
a42cd965
AM
5207 fprintf (gcse_file,
5208 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
5209 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
5210 INSN_UID (insn), regno);
222f7ba9 5211 update_ld_motion_stores (expr);
7506f491
DE
5212}
5213
5214/* Copy available expressions that reach the redundant expression
5215 to `reaching_reg'. */
5216
5217static void
5218pre_insert_copies ()
5219{
2e653e39 5220 unsigned int i;
c4c81601
RK
5221 struct expr *expr;
5222 struct occr *occr;
5223 struct occr *avail;
a65f3558 5224
7506f491
DE
5225 /* For each available expression in the table, copy the result to
5226 `reaching_reg' if the expression reaches a deleted one.
5227
5228 ??? The current algorithm is rather brute force.
5229 Need to do some profiling. */
5230
02280659
ZD
5231 for (i = 0; i < expr_hash_table.size; i++)
5232 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601
RK
5233 {
5234 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
5235 we don't want to insert a copy here because the expression may not
5236 really be redundant. So only insert an insn if the expression was
5237 deleted. This test also avoids further processing if the
5238 expression wasn't deleted anywhere. */
5239 if (expr->reaching_reg == NULL)
5240 continue;
5241
5242 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5243 {
5244 if (! occr->deleted_p)
5245 continue;
7506f491 5246
c4c81601
RK
5247 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
5248 {
5249 rtx insn = avail->insn;
7506f491 5250
c4c81601
RK
5251 /* No need to handle this one if handled already. */
5252 if (avail->copied_p)
5253 continue;
7506f491 5254
c4c81601
RK
5255 /* Don't handle this one if it's a redundant one. */
5256 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
5257 continue;
7506f491 5258
c4c81601 5259 /* Or if the expression doesn't reach the deleted one. */
589005ff 5260 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
e2d2ed72
AM
5261 expr,
5262 BLOCK_FOR_INSN (occr->insn)))
c4c81601 5263 continue;
7506f491 5264
c4c81601
RK
5265 /* Copy the result of avail to reaching_reg. */
5266 pre_insert_copy_insn (expr, insn);
5267 avail->copied_p = 1;
5268 }
5269 }
5270 }
7506f491
DE
5271}
5272
10d1bb36
JH
5273/* Emit move from SRC to DEST noting the equivalence with expression computed
5274 in INSN. */
5275static rtx
5276gcse_emit_move_after (src, dest, insn)
5277 rtx src, dest, insn;
5278{
5279 rtx new;
6bdb8dd6 5280 rtx set = single_set (insn), set2;
10d1bb36
JH
5281 rtx note;
5282 rtx eqv;
5283
5284 /* This should never fail since we're creating a reg->reg copy
5285 we've verified to be valid. */
5286
6bdb8dd6 5287 new = emit_insn_after (gen_move_insn (dest, src), insn);
285464d0 5288
10d1bb36 5289 /* Note the equivalence for local CSE pass. */
6bdb8dd6
JH
5290 set2 = single_set (new);
5291 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
5292 return new;
10d1bb36
JH
5293 if ((note = find_reg_equal_equiv_note (insn)))
5294 eqv = XEXP (note, 0);
5295 else
5296 eqv = SET_SRC (set);
5297
a500466b 5298 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
10d1bb36
JH
5299
5300 return new;
5301}
5302
7506f491 5303/* Delete redundant computations.
7506f491
DE
5304 Deletion is done by changing the insn to copy the `reaching_reg' of
5305 the expression into the result of the SET. It is left to later passes
5306 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
5307
cc2902df 5308 Returns nonzero if a change is made. */
7506f491
DE
5309
5310static int
5311pre_delete ()
5312{
2e653e39 5313 unsigned int i;
63bc1d05 5314 int changed;
c4c81601
RK
5315 struct expr *expr;
5316 struct occr *occr;
a65f3558 5317
7506f491 5318 changed = 0;
02280659
ZD
5319 for (i = 0; i < expr_hash_table.size; i++)
5320 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601
RK
5321 {
5322 int indx = expr->bitmap_index;
7506f491 5323
c4c81601
RK
5324 /* We only need to search antic_occr since we require
5325 ANTLOC != 0. */
7506f491 5326
c4c81601
RK
5327 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5328 {
5329 rtx insn = occr->insn;
5330 rtx set;
e2d2ed72 5331 basic_block bb = BLOCK_FOR_INSN (insn);
7506f491 5332
0b17ab2f 5333 if (TEST_BIT (pre_delete_map[bb->index], indx))
c4c81601
RK
5334 {
5335 set = single_set (insn);
5336 if (! set)
5337 abort ();
5338
5339 /* Create a pseudo-reg to store the result of reaching
5340 expressions into. Get the mode for the new pseudo from
5341 the mode of the original destination pseudo. */
5342 if (expr->reaching_reg == NULL)
5343 expr->reaching_reg
5344 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5345
10d1bb36
JH
5346 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
5347 delete_insn (insn);
5348 occr->deleted_p = 1;
5349 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
5350 changed = 1;
5351 gcse_subst_count++;
7506f491 5352
c4c81601
RK
5353 if (gcse_file)
5354 {
5355 fprintf (gcse_file,
5356 "PRE: redundant insn %d (expression %d) in ",
5357 INSN_UID (insn), indx);
5358 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
0b17ab2f 5359 bb->index, REGNO (expr->reaching_reg));
c4c81601
RK
5360 }
5361 }
5362 }
5363 }
7506f491
DE
5364
5365 return changed;
5366}
5367
5368/* Perform GCSE optimizations using PRE.
5369 This is called by one_pre_gcse_pass after all the dataflow analysis
5370 has been done.
5371
c4c81601
RK
5372 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
5373 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
5374 Compiler Design and Implementation.
7506f491 5375
c4c81601
RK
5376 ??? A new pseudo reg is created to hold the reaching expression. The nice
5377 thing about the classical approach is that it would try to use an existing
5378 reg. If the register can't be adequately optimized [i.e. we introduce
5379 reload problems], one could add a pass here to propagate the new register
5380 through the block.
7506f491 5381
c4c81601
RK
5382 ??? We don't handle single sets in PARALLELs because we're [currently] not
5383 able to copy the rest of the parallel when we insert copies to create full
5384 redundancies from partial redundancies. However, there's no reason why we
5385 can't handle PARALLELs in the cases where there are no partial
7506f491
DE
5386 redundancies. */
5387
5388static int
5389pre_gcse ()
5390{
2e653e39
RK
5391 unsigned int i;
5392 int did_insert, changed;
7506f491 5393 struct expr **index_map;
c4c81601 5394 struct expr *expr;
7506f491
DE
5395
5396 /* Compute a mapping from expression number (`bitmap_index') to
5397 hash table entry. */
5398
02280659
ZD
5399 index_map = (struct expr **) xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
5400 for (i = 0; i < expr_hash_table.size; i++)
5401 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601 5402 index_map[expr->bitmap_index] = expr;
7506f491
DE
5403
5404 /* Reset bitmap used to track which insns are redundant. */
a65f3558
JL
5405 pre_redundant_insns = sbitmap_alloc (max_cuid);
5406 sbitmap_zero (pre_redundant_insns);
7506f491
DE
5407
5408 /* Delete the redundant insns first so that
5409 - we know what register to use for the new insns and for the other
5410 ones with reaching expressions
5411 - we know which insns are redundant when we go to create copies */
c4c81601 5412
7506f491
DE
5413 changed = pre_delete ();
5414
a42cd965 5415 did_insert = pre_edge_insert (edge_list, index_map);
c4c81601 5416
7506f491 5417 /* In other places with reaching expressions, copy the expression to the
a42cd965 5418 specially allocated pseudo-reg that reaches the redundant expr. */
7506f491 5419 pre_insert_copies ();
a42cd965
AM
5420 if (did_insert)
5421 {
5422 commit_edge_insertions ();
5423 changed = 1;
5424 }
7506f491 5425
283a2545 5426 free (index_map);
76ac938b 5427 sbitmap_free (pre_redundant_insns);
7506f491
DE
5428 return changed;
5429}
5430
5431/* Top level routine to perform one PRE GCSE pass.
5432
cc2902df 5433 Return nonzero if a change was made. */
7506f491
DE
5434
5435static int
b5ce41ff 5436one_pre_gcse_pass (pass)
7506f491
DE
5437 int pass;
5438{
5439 int changed = 0;
5440
5441 gcse_subst_count = 0;
5442 gcse_create_count = 0;
5443
02280659 5444 alloc_hash_table (max_cuid, &expr_hash_table, 0);
a42cd965 5445 add_noreturn_fake_exit_edges ();
a13d4ebf
AM
5446 if (flag_gcse_lm)
5447 compute_ld_motion_mems ();
5448
02280659 5449 compute_hash_table (&expr_hash_table);
a13d4ebf 5450 trim_ld_motion_mems ();
7506f491 5451 if (gcse_file)
02280659 5452 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
c4c81601 5453
02280659 5454 if (expr_hash_table.n_elems > 0)
7506f491 5455 {
02280659 5456 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
7506f491
DE
5457 compute_pre_data ();
5458 changed |= pre_gcse ();
a42cd965 5459 free_edge_list (edge_list);
7506f491
DE
5460 free_pre_mem ();
5461 }
c4c81601 5462
a13d4ebf 5463 free_ldst_mems ();
a42cd965 5464 remove_fake_edges ();
02280659 5465 free_hash_table (&expr_hash_table);
7506f491
DE
5466
5467 if (gcse_file)
5468 {
c4c81601
RK
5469 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
5470 current_function_name, pass, bytes_used);
5471 fprintf (gcse_file, "%d substs, %d insns created\n",
5472 gcse_subst_count, gcse_create_count);
7506f491
DE
5473 }
5474
5475 return changed;
5476}
aeb2f500
JW
5477\f
5478/* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5b1ef594
JDA
5479 If notes are added to an insn which references a CODE_LABEL, the
5480 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5481 because the following loop optimization pass requires them. */
aeb2f500
JW
5482
5483/* ??? This is very similar to the loop.c add_label_notes function. We
5484 could probably share code here. */
5485
5486/* ??? If there was a jump optimization pass after gcse and before loop,
5487 then we would not need to do this here, because jump would add the
5488 necessary REG_LABEL notes. */
5489
5490static void
5491add_label_notes (x, insn)
5492 rtx x;
5493 rtx insn;
5494{
5495 enum rtx_code code = GET_CODE (x);
5496 int i, j;
6f7d635c 5497 const char *fmt;
aeb2f500
JW
5498
5499 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
5500 {
6b3603c2 5501 /* This code used to ignore labels that referred to dispatch tables to
ac7c5af5 5502 avoid flow generating (slighly) worse code.
6b3603c2 5503
ac7c5af5
JL
5504 We no longer ignore such label references (see LABEL_REF handling in
5505 mark_jump_label for additional information). */
c4c81601 5506
6b8c9327 5507 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
6b3603c2 5508 REG_NOTES (insn));
5b1ef594 5509 if (LABEL_P (XEXP (x, 0)))
589005ff 5510 LABEL_NUSES (XEXP (x, 0))++;
aeb2f500
JW
5511 return;
5512 }
5513
c4c81601 5514 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
aeb2f500
JW
5515 {
5516 if (fmt[i] == 'e')
5517 add_label_notes (XEXP (x, i), insn);
5518 else if (fmt[i] == 'E')
5519 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5520 add_label_notes (XVECEXP (x, i, j), insn);
5521 }
5522}
a65f3558
JL
5523
5524/* Compute transparent outgoing information for each block.
5525
5526 An expression is transparent to an edge unless it is killed by
5527 the edge itself. This can only happen with abnormal control flow,
5528 when the edge is traversed through a call. This happens with
5529 non-local labels and exceptions.
5530
5531 This would not be necessary if we split the edge. While this is
5532 normally impossible for abnormal critical edges, with some effort
5533 it should be possible with exception handling, since we still have
5534 control over which handler should be invoked. But due to increased
5535 EH table sizes, this may not be worthwhile. */
5536
5537static void
5538compute_transpout ()
5539{
e0082a72 5540 basic_block bb;
2e653e39 5541 unsigned int i;
c4c81601 5542 struct expr *expr;
a65f3558 5543
d55bc081 5544 sbitmap_vector_ones (transpout, last_basic_block);
a65f3558 5545
e0082a72 5546 FOR_EACH_BB (bb)
a65f3558 5547 {
a65f3558
JL
5548 /* Note that flow inserted a nop a the end of basic blocks that
5549 end in call instructions for reasons other than abnormal
5550 control flow. */
e0082a72 5551 if (GET_CODE (bb->end) != CALL_INSN)
a65f3558
JL
5552 continue;
5553
02280659
ZD
5554 for (i = 0; i < expr_hash_table.size; i++)
5555 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
c4c81601
RK
5556 if (GET_CODE (expr->expr) == MEM)
5557 {
5558 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5559 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5560 continue;
589005ff 5561
c4c81601
RK
5562 /* ??? Optimally, we would use interprocedural alias
5563 analysis to determine if this mem is actually killed
5564 by this call. */
e0082a72 5565 RESET_BIT (transpout[bb->index], expr->bitmap_index);
c4c81601 5566 }
a65f3558
JL
5567 }
5568}
dfdb644f
JL
5569
5570/* Removal of useless null pointer checks */
5571
dfdb644f 5572/* Called via note_stores. X is set by SETTER. If X is a register we must
0511851c
MM
5573 invalidate nonnull_local and set nonnull_killed. DATA is really a
5574 `null_pointer_info *'.
dfdb644f
JL
5575
5576 We ignore hard registers. */
c4c81601 5577
dfdb644f 5578static void
84832317 5579invalidate_nonnull_info (x, setter, data)
dfdb644f
JL
5580 rtx x;
5581 rtx setter ATTRIBUTE_UNUSED;
0511851c 5582 void *data;
dfdb644f 5583{
770ae6cc
RK
5584 unsigned int regno;
5585 struct null_pointer_info *npi = (struct null_pointer_info *) data;
c4c81601 5586
dfdb644f
JL
5587 while (GET_CODE (x) == SUBREG)
5588 x = SUBREG_REG (x);
5589
5590 /* Ignore anything that is not a register or is a hard register. */
5591 if (GET_CODE (x) != REG
0511851c
MM
5592 || REGNO (x) < npi->min_reg
5593 || REGNO (x) >= npi->max_reg)
dfdb644f
JL
5594 return;
5595
0511851c 5596 regno = REGNO (x) - npi->min_reg;
dfdb644f 5597
e0082a72
ZD
5598 RESET_BIT (npi->nonnull_local[npi->current_block->index], regno);
5599 SET_BIT (npi->nonnull_killed[npi->current_block->index], regno);
dfdb644f
JL
5600}
5601
0511851c
MM
5602/* Do null-pointer check elimination for the registers indicated in
5603 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5604 they are not our responsibility to free. */
dfdb644f 5605
99a15921 5606static int
9cd56be1 5607delete_null_pointer_checks_1 (block_reg, nonnull_avin,
8e184d9c 5608 nonnull_avout, npi)
770ae6cc 5609 unsigned int *block_reg;
0511851c
MM
5610 sbitmap *nonnull_avin;
5611 sbitmap *nonnull_avout;
5612 struct null_pointer_info *npi;
dfdb644f 5613{
e0082a72 5614 basic_block bb, current_block;
0511851c
MM
5615 sbitmap *nonnull_local = npi->nonnull_local;
5616 sbitmap *nonnull_killed = npi->nonnull_killed;
99a15921 5617 int something_changed = 0;
589005ff 5618
dfdb644f
JL
5619 /* Compute local properties, nonnull and killed. A register will have
5620 the nonnull property if at the end of the current block its value is
5621 known to be nonnull. The killed property indicates that somewhere in
5622 the block any information we had about the register is killed.
5623
5624 Note that a register can have both properties in a single block. That
5625 indicates that it's killed, then later in the block a new value is
5626 computed. */
d55bc081
ZD
5627 sbitmap_vector_zero (nonnull_local, last_basic_block);
5628 sbitmap_vector_zero (nonnull_killed, last_basic_block);
c4c81601 5629
e0082a72 5630 FOR_EACH_BB (current_block)
dfdb644f
JL
5631 {
5632 rtx insn, stop_insn;
5633
0511851c
MM
5634 /* Set the current block for invalidate_nonnull_info. */
5635 npi->current_block = current_block;
5636
dfdb644f
JL
5637 /* Scan each insn in the basic block looking for memory references and
5638 register sets. */
e0082a72
ZD
5639 stop_insn = NEXT_INSN (current_block->end);
5640 for (insn = current_block->head;
dfdb644f
JL
5641 insn != stop_insn;
5642 insn = NEXT_INSN (insn))
5643 {
5644 rtx set;
0511851c 5645 rtx reg;
dfdb644f
JL
5646
5647 /* Ignore anything that is not a normal insn. */
2c3c49de 5648 if (! INSN_P (insn))
dfdb644f
JL
5649 continue;
5650
5651 /* Basically ignore anything that is not a simple SET. We do have
5652 to make sure to invalidate nonnull_local and set nonnull_killed
5653 for such insns though. */
5654 set = single_set (insn);
5655 if (!set)
5656 {
0511851c 5657 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
dfdb644f
JL
5658 continue;
5659 }
5660
f63d1bf7 5661 /* See if we've got a usable memory load. We handle it first
dfdb644f
JL
5662 in case it uses its address register as a dest (which kills
5663 the nonnull property). */
5664 if (GET_CODE (SET_SRC (set)) == MEM
0511851c
MM
5665 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
5666 && REGNO (reg) >= npi->min_reg
5667 && REGNO (reg) < npi->max_reg)
e0082a72 5668 SET_BIT (nonnull_local[current_block->index],
0511851c 5669 REGNO (reg) - npi->min_reg);
dfdb644f
JL
5670
5671 /* Now invalidate stuff clobbered by this insn. */
0511851c 5672 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
dfdb644f
JL
5673
5674 /* And handle stores, we do these last since any sets in INSN can
5675 not kill the nonnull property if it is derived from a MEM
5676 appearing in a SET_DEST. */
5677 if (GET_CODE (SET_DEST (set)) == MEM
0511851c
MM
5678 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
5679 && REGNO (reg) >= npi->min_reg
5680 && REGNO (reg) < npi->max_reg)
e0082a72 5681 SET_BIT (nonnull_local[current_block->index],
0511851c 5682 REGNO (reg) - npi->min_reg);
dfdb644f
JL
5683 }
5684 }
5685
5686 /* Now compute global properties based on the local properties. This
5687 is a classic global availablity algorithm. */
ce724250
JL
5688 compute_available (nonnull_local, nonnull_killed,
5689 nonnull_avout, nonnull_avin);
dfdb644f
JL
5690
5691 /* Now look at each bb and see if it ends with a compare of a value
5692 against zero. */
e0082a72 5693 FOR_EACH_BB (bb)
dfdb644f 5694 {
e0082a72 5695 rtx last_insn = bb->end;
0511851c 5696 rtx condition, earliest;
dfdb644f
JL
5697 int compare_and_branch;
5698
0511851c
MM
5699 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
5700 since BLOCK_REG[BB] is zero if this block did not end with a
5701 comparison against zero, this condition works. */
e0082a72
ZD
5702 if (block_reg[bb->index] < npi->min_reg
5703 || block_reg[bb->index] >= npi->max_reg)
dfdb644f
JL
5704 continue;
5705
5706 /* LAST_INSN is a conditional jump. Get its condition. */
5707 condition = get_condition (last_insn, &earliest);
5708
40d7a3fe
NB
5709 /* If we can't determine the condition then skip. */
5710 if (! condition)
5711 continue;
5712
dfdb644f 5713 /* Is the register known to have a nonzero value? */
e0082a72 5714 if (!TEST_BIT (nonnull_avout[bb->index], block_reg[bb->index] - npi->min_reg))
dfdb644f
JL
5715 continue;
5716
5717 /* Try to compute whether the compare/branch at the loop end is one or
5718 two instructions. */
5719 if (earliest == last_insn)
5720 compare_and_branch = 1;
5721 else if (earliest == prev_nonnote_insn (last_insn))
5722 compare_and_branch = 2;
5723 else
5724 continue;
5725
5726 /* We know the register in this comparison is nonnull at exit from
5727 this block. We can optimize this comparison. */
5728 if (GET_CODE (condition) == NE)
5729 {
5730 rtx new_jump;
5731
38c1593d
JH
5732 new_jump = emit_jump_insn_after (gen_jump (JUMP_LABEL (last_insn)),
5733 last_insn);
dfdb644f
JL
5734 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
5735 LABEL_NUSES (JUMP_LABEL (new_jump))++;
5736 emit_barrier_after (new_jump);
5737 }
8e184d9c 5738
99a15921 5739 something_changed = 1;
9cd56be1 5740 delete_insn (last_insn);
dfdb644f 5741 if (compare_and_branch == 2)
589005ff 5742 delete_insn (earliest);
e0082a72 5743 purge_dead_edges (bb);
0511851c
MM
5744
5745 /* Don't check this block again. (Note that BLOCK_END is
589005ff 5746 invalid here; we deleted the last instruction in the
0511851c 5747 block.) */
e0082a72 5748 block_reg[bb->index] = 0;
0511851c 5749 }
99a15921
JL
5750
5751 return something_changed;
0511851c
MM
5752}
5753
5754/* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
5755 at compile time.
5756
5757 This is conceptually similar to global constant/copy propagation and
5758 classic global CSE (it even uses the same dataflow equations as cprop).
5759
5760 If a register is used as memory address with the form (mem (reg)), then we
5761 know that REG can not be zero at that point in the program. Any instruction
5762 which sets REG "kills" this property.
5763
5764 So, if every path leading to a conditional branch has an available memory
5765 reference of that form, then we know the register can not have the value
589005ff 5766 zero at the conditional branch.
0511851c
MM
5767
5768 So we merely need to compute the local properies and propagate that data
5769 around the cfg, then optimize where possible.
5770
5771 We run this pass two times. Once before CSE, then again after CSE. This
5772 has proven to be the most profitable approach. It is rare for new
5773 optimization opportunities of this nature to appear after the first CSE
5774 pass.
5775
5776 This could probably be integrated with global cprop with a little work. */
5777
99a15921 5778int
0511851c 5779delete_null_pointer_checks (f)
2e653e39 5780 rtx f ATTRIBUTE_UNUSED;
0511851c 5781{
0511851c 5782 sbitmap *nonnull_avin, *nonnull_avout;
770ae6cc 5783 unsigned int *block_reg;
e0082a72 5784 basic_block bb;
0511851c
MM
5785 int reg;
5786 int regs_per_pass;
5787 int max_reg;
5788 struct null_pointer_info npi;
99a15921 5789 int something_changed = 0;
0511851c 5790
0511851c 5791 /* If we have only a single block, then there's nothing to do. */
0b17ab2f 5792 if (n_basic_blocks <= 1)
99a15921 5793 return 0;
0511851c
MM
5794
5795 /* Trying to perform global optimizations on flow graphs which have
5796 a high connectivity will take a long time and is unlikely to be
5797 particularly useful.
5798
43e72072 5799 In normal circumstances a cfg should have about twice as many edges
0511851c
MM
5800 as blocks. But we do not want to punish small functions which have
5801 a couple switch statements. So we require a relatively large number
5802 of basic blocks and the ratio of edges to blocks to be high. */
0b17ab2f 5803 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
99a15921 5804 return 0;
0511851c 5805
0511851c
MM
5806 /* We need four bitmaps, each with a bit for each register in each
5807 basic block. */
5808 max_reg = max_reg_num ();
d55bc081 5809 regs_per_pass = get_bitmap_width (4, last_basic_block, max_reg);
0511851c
MM
5810
5811 /* Allocate bitmaps to hold local and global properties. */
d55bc081
ZD
5812 npi.nonnull_local = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5813 npi.nonnull_killed = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5814 nonnull_avin = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5815 nonnull_avout = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
0511851c
MM
5816
5817 /* Go through the basic blocks, seeing whether or not each block
5818 ends with a conditional branch whose condition is a comparison
5819 against zero. Record the register compared in BLOCK_REG. */
d55bc081 5820 block_reg = (unsigned int *) xcalloc (last_basic_block, sizeof (int));
e0082a72 5821 FOR_EACH_BB (bb)
0511851c 5822 {
e0082a72 5823 rtx last_insn = bb->end;
0511851c
MM
5824 rtx condition, earliest, reg;
5825
5826 /* We only want conditional branches. */
5827 if (GET_CODE (last_insn) != JUMP_INSN
7f1c097d
JH
5828 || !any_condjump_p (last_insn)
5829 || !onlyjump_p (last_insn))
0511851c
MM
5830 continue;
5831
5832 /* LAST_INSN is a conditional jump. Get its condition. */
5833 condition = get_condition (last_insn, &earliest);
5834
4fe9b91c 5835 /* If we were unable to get the condition, or it is not an equality
0511851c
MM
5836 comparison against zero then there's nothing we can do. */
5837 if (!condition
5838 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
5839 || GET_CODE (XEXP (condition, 1)) != CONST_INT
589005ff 5840 || (XEXP (condition, 1)
0511851c
MM
5841 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
5842 continue;
5843
5844 /* We must be checking a register against zero. */
5845 reg = XEXP (condition, 0);
5846 if (GET_CODE (reg) != REG)
5847 continue;
5848
e0082a72 5849 block_reg[bb->index] = REGNO (reg);
0511851c
MM
5850 }
5851
5852 /* Go through the algorithm for each block of registers. */
5853 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
5854 {
5855 npi.min_reg = reg;
5856 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
99a15921
JL
5857 something_changed |= delete_null_pointer_checks_1 (block_reg,
5858 nonnull_avin,
5859 nonnull_avout,
5860 &npi);
dfdb644f
JL
5861 }
5862
0511851c
MM
5863 /* Free the table of registers compared at the end of every block. */
5864 free (block_reg);
5865
dfdb644f 5866 /* Free bitmaps. */
5a660bff
DB
5867 sbitmap_vector_free (npi.nonnull_local);
5868 sbitmap_vector_free (npi.nonnull_killed);
5869 sbitmap_vector_free (nonnull_avin);
5870 sbitmap_vector_free (nonnull_avout);
99a15921
JL
5871
5872 return something_changed;
dfdb644f 5873}
bb457bd9
JL
5874
5875/* Code Hoisting variables and subroutines. */
5876
5877/* Very busy expressions. */
5878static sbitmap *hoist_vbein;
5879static sbitmap *hoist_vbeout;
5880
5881/* Hoistable expressions. */
5882static sbitmap *hoist_exprs;
5883
5884/* Dominator bitmaps. */
355be0dc 5885dominance_info dominators;
bb457bd9
JL
5886
5887/* ??? We could compute post dominators and run this algorithm in
68e82b83 5888 reverse to perform tail merging, doing so would probably be
bb457bd9
JL
5889 more effective than the tail merging code in jump.c.
5890
5891 It's unclear if tail merging could be run in parallel with
5892 code hoisting. It would be nice. */
5893
5894/* Allocate vars used for code hoisting analysis. */
5895
5896static void
5897alloc_code_hoist_mem (n_blocks, n_exprs)
5898 int n_blocks, n_exprs;
5899{
5900 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5901 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
5902 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
5903
5904 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
5905 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
5906 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
5907 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
bb457bd9
JL
5908}
5909
5910/* Free vars used for code hoisting analysis. */
5911
5912static void
5913free_code_hoist_mem ()
5914{
5a660bff
DB
5915 sbitmap_vector_free (antloc);
5916 sbitmap_vector_free (transp);
5917 sbitmap_vector_free (comp);
bb457bd9 5918
5a660bff
DB
5919 sbitmap_vector_free (hoist_vbein);
5920 sbitmap_vector_free (hoist_vbeout);
5921 sbitmap_vector_free (hoist_exprs);
5922 sbitmap_vector_free (transpout);
bb457bd9 5923
355be0dc 5924 free_dominance_info (dominators);
bb457bd9
JL
5925}
5926
5927/* Compute the very busy expressions at entry/exit from each block.
5928
5929 An expression is very busy if all paths from a given point
5930 compute the expression. */
5931
5932static void
5933compute_code_hoist_vbeinout ()
5934{
e0082a72
ZD
5935 int changed, passes;
5936 basic_block bb;
bb457bd9 5937
d55bc081
ZD
5938 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
5939 sbitmap_vector_zero (hoist_vbein, last_basic_block);
bb457bd9
JL
5940
5941 passes = 0;
5942 changed = 1;
c4c81601 5943
bb457bd9
JL
5944 while (changed)
5945 {
5946 changed = 0;
c4c81601 5947
bb457bd9
JL
5948 /* We scan the blocks in the reverse order to speed up
5949 the convergence. */
e0082a72 5950 FOR_EACH_BB_REVERSE (bb)
bb457bd9 5951 {
e0082a72
ZD
5952 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
5953 hoist_vbeout[bb->index], transp[bb->index]);
5954 if (bb->next_bb != EXIT_BLOCK_PTR)
5955 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
bb457bd9 5956 }
c4c81601 5957
bb457bd9
JL
5958 passes++;
5959 }
5960
5961 if (gcse_file)
5962 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
5963}
5964
5965/* Top level routine to do the dataflow analysis needed by code hoisting. */
5966
5967static void
5968compute_code_hoist_data ()
5969{
02280659 5970 compute_local_properties (transp, comp, antloc, &expr_hash_table);
bb457bd9
JL
5971 compute_transpout ();
5972 compute_code_hoist_vbeinout ();
355be0dc 5973 dominators = calculate_dominance_info (CDI_DOMINATORS);
bb457bd9
JL
5974 if (gcse_file)
5975 fprintf (gcse_file, "\n");
5976}
5977
5978/* Determine if the expression identified by EXPR_INDEX would
5979 reach BB unimpared if it was placed at the end of EXPR_BB.
5980
5981 It's unclear exactly what Muchnick meant by "unimpared". It seems
5982 to me that the expression must either be computed or transparent in
5983 *every* block in the path(s) from EXPR_BB to BB. Any other definition
5984 would allow the expression to be hoisted out of loops, even if
5985 the expression wasn't a loop invariant.
5986
5987 Contrast this to reachability for PRE where an expression is
5988 considered reachable if *any* path reaches instead of *all*
5989 paths. */
5990
5991static int
5992hoist_expr_reaches_here_p (expr_bb, expr_index, bb, visited)
e2d2ed72 5993 basic_block expr_bb;
bb457bd9 5994 int expr_index;
e2d2ed72 5995 basic_block bb;
bb457bd9
JL
5996 char *visited;
5997{
5998 edge pred;
283a2545 5999 int visited_allocated_locally = 0;
589005ff 6000
bb457bd9
JL
6001
6002 if (visited == NULL)
6003 {
8e42ace1 6004 visited_allocated_locally = 1;
d55bc081 6005 visited = xcalloc (last_basic_block, 1);
bb457bd9
JL
6006 }
6007
e2d2ed72 6008 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
bb457bd9 6009 {
e2d2ed72 6010 basic_block pred_bb = pred->src;
bb457bd9
JL
6011
6012 if (pred->src == ENTRY_BLOCK_PTR)
6013 break;
f305679f
JH
6014 else if (pred_bb == expr_bb)
6015 continue;
0b17ab2f 6016 else if (visited[pred_bb->index])
bb457bd9 6017 continue;
c4c81601 6018
bb457bd9 6019 /* Does this predecessor generate this expression? */
0b17ab2f 6020 else if (TEST_BIT (comp[pred_bb->index], expr_index))
bb457bd9 6021 break;
0b17ab2f 6022 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
bb457bd9 6023 break;
c4c81601 6024
bb457bd9
JL
6025 /* Not killed. */
6026 else
6027 {
0b17ab2f 6028 visited[pred_bb->index] = 1;
bb457bd9
JL
6029 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
6030 pred_bb, visited))
6031 break;
6032 }
6033 }
589005ff 6034 if (visited_allocated_locally)
283a2545 6035 free (visited);
c4c81601 6036
bb457bd9
JL
6037 return (pred == NULL);
6038}
6039\f
6040/* Actually perform code hoisting. */
c4c81601 6041
bb457bd9
JL
6042static void
6043hoist_code ()
6044{
e0082a72 6045 basic_block bb, dominated;
c635a1ec
DB
6046 basic_block *domby;
6047 unsigned int domby_len;
6048 unsigned int i,j;
bb457bd9 6049 struct expr **index_map;
c4c81601 6050 struct expr *expr;
bb457bd9 6051
d55bc081 6052 sbitmap_vector_zero (hoist_exprs, last_basic_block);
bb457bd9
JL
6053
6054 /* Compute a mapping from expression number (`bitmap_index') to
6055 hash table entry. */
6056
02280659
ZD
6057 index_map = (struct expr **) xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
6058 for (i = 0; i < expr_hash_table.size; i++)
6059 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601 6060 index_map[expr->bitmap_index] = expr;
bb457bd9
JL
6061
6062 /* Walk over each basic block looking for potentially hoistable
6063 expressions, nothing gets hoisted from the entry block. */
e0082a72 6064 FOR_EACH_BB (bb)
bb457bd9
JL
6065 {
6066 int found = 0;
6067 int insn_inserted_p;
6068
c635a1ec 6069 domby_len = get_dominated_by (dominators, bb, &domby);
bb457bd9
JL
6070 /* Examine each expression that is very busy at the exit of this
6071 block. These are the potentially hoistable expressions. */
e0082a72 6072 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
bb457bd9
JL
6073 {
6074 int hoistable = 0;
c4c81601 6075
c635a1ec
DB
6076 if (TEST_BIT (hoist_vbeout[bb->index], i)
6077 && TEST_BIT (transpout[bb->index], i))
bb457bd9
JL
6078 {
6079 /* We've found a potentially hoistable expression, now
6080 we look at every block BB dominates to see if it
6081 computes the expression. */
c635a1ec 6082 for (j = 0; j < domby_len; j++)
bb457bd9 6083 {
c635a1ec 6084 dominated = domby[j];
bb457bd9 6085 /* Ignore self dominance. */
c635a1ec 6086 if (bb == dominated)
bb457bd9 6087 continue;
bb457bd9
JL
6088 /* We've found a dominated block, now see if it computes
6089 the busy expression and whether or not moving that
6090 expression to the "beginning" of that block is safe. */
e0082a72 6091 if (!TEST_BIT (antloc[dominated->index], i))
bb457bd9
JL
6092 continue;
6093
6094 /* Note if the expression would reach the dominated block
589005ff 6095 unimpared if it was placed at the end of BB.
bb457bd9
JL
6096
6097 Keep track of how many times this expression is hoistable
6098 from a dominated block into BB. */
e0082a72 6099 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
bb457bd9
JL
6100 hoistable++;
6101 }
6102
ff7cc307 6103 /* If we found more than one hoistable occurrence of this
bb457bd9
JL
6104 expression, then note it in the bitmap of expressions to
6105 hoist. It makes no sense to hoist things which are computed
6106 in only one BB, and doing so tends to pessimize register
6107 allocation. One could increase this value to try harder
6108 to avoid any possible code expansion due to register
6109 allocation issues; however experiments have shown that
6110 the vast majority of hoistable expressions are only movable
6111 from two successors, so raising this threshhold is likely
6112 to nullify any benefit we get from code hoisting. */
6113 if (hoistable > 1)
6114 {
e0082a72 6115 SET_BIT (hoist_exprs[bb->index], i);
bb457bd9
JL
6116 found = 1;
6117 }
6118 }
6119 }
bb457bd9
JL
6120 /* If we found nothing to hoist, then quit now. */
6121 if (! found)
c635a1ec
DB
6122 {
6123 free (domby);
bb457bd9 6124 continue;
c635a1ec 6125 }
bb457bd9
JL
6126
6127 /* Loop over all the hoistable expressions. */
e0082a72 6128 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
bb457bd9
JL
6129 {
6130 /* We want to insert the expression into BB only once, so
6131 note when we've inserted it. */
6132 insn_inserted_p = 0;
6133
6134 /* These tests should be the same as the tests above. */
e0082a72 6135 if (TEST_BIT (hoist_vbeout[bb->index], i))
bb457bd9
JL
6136 {
6137 /* We've found a potentially hoistable expression, now
6138 we look at every block BB dominates to see if it
6139 computes the expression. */
c635a1ec 6140 for (j = 0; j < domby_len; j++)
bb457bd9 6141 {
c635a1ec 6142 dominated = domby[j];
bb457bd9 6143 /* Ignore self dominance. */
c635a1ec 6144 if (bb == dominated)
bb457bd9
JL
6145 continue;
6146
6147 /* We've found a dominated block, now see if it computes
6148 the busy expression and whether or not moving that
6149 expression to the "beginning" of that block is safe. */
e0082a72 6150 if (!TEST_BIT (antloc[dominated->index], i))
bb457bd9
JL
6151 continue;
6152
6153 /* The expression is computed in the dominated block and
6154 it would be safe to compute it at the start of the
6155 dominated block. Now we have to determine if the
ff7cc307 6156 expression would reach the dominated block if it was
bb457bd9 6157 placed at the end of BB. */
e0082a72 6158 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
bb457bd9
JL
6159 {
6160 struct expr *expr = index_map[i];
6161 struct occr *occr = expr->antic_occr;
6162 rtx insn;
6163 rtx set;
6164
ff7cc307 6165 /* Find the right occurrence of this expression. */
e0082a72 6166 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
bb457bd9
JL
6167 occr = occr->next;
6168
6169 /* Should never happen. */
6170 if (!occr)
6171 abort ();
6172
6173 insn = occr->insn;
589005ff 6174
bb457bd9
JL
6175 set = single_set (insn);
6176 if (! set)
6177 abort ();
6178
6179 /* Create a pseudo-reg to store the result of reaching
6180 expressions into. Get the mode for the new pseudo
6181 from the mode of the original destination pseudo. */
6182 if (expr->reaching_reg == NULL)
6183 expr->reaching_reg
6184 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
6185
10d1bb36
JH
6186 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
6187 delete_insn (insn);
6188 occr->deleted_p = 1;
6189 if (!insn_inserted_p)
bb457bd9 6190 {
10d1bb36
JH
6191 insert_insn_end_bb (index_map[i], bb, 0);
6192 insn_inserted_p = 1;
bb457bd9
JL
6193 }
6194 }
6195 }
6196 }
6197 }
c635a1ec 6198 free (domby);
bb457bd9 6199 }
c4c81601 6200
8e42ace1 6201 free (index_map);
bb457bd9
JL
6202}
6203
6204/* Top level routine to perform one code hoisting (aka unification) pass
6205
cc2902df 6206 Return nonzero if a change was made. */
bb457bd9
JL
6207
6208static int
6209one_code_hoisting_pass ()
6210{
6211 int changed = 0;
6212
02280659
ZD
6213 alloc_hash_table (max_cuid, &expr_hash_table, 0);
6214 compute_hash_table (&expr_hash_table);
bb457bd9 6215 if (gcse_file)
02280659 6216 dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
c4c81601 6217
02280659 6218 if (expr_hash_table.n_elems > 0)
bb457bd9 6219 {
02280659 6220 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
bb457bd9
JL
6221 compute_code_hoist_data ();
6222 hoist_code ();
6223 free_code_hoist_mem ();
6224 }
c4c81601 6225
02280659 6226 free_hash_table (&expr_hash_table);
bb457bd9
JL
6227
6228 return changed;
6229}
a13d4ebf
AM
6230\f
6231/* Here we provide the things required to do store motion towards
6232 the exit. In order for this to be effective, gcse also needed to
6233 be taught how to move a load when it is kill only by a store to itself.
6234
6235 int i;
6236 float a[10];
6237
6238 void foo(float scale)
6239 {
6240 for (i=0; i<10; i++)
6241 a[i] *= scale;
6242 }
6243
6244 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
589005ff
KH
6245 the load out since its live around the loop, and stored at the bottom
6246 of the loop.
a13d4ebf 6247
589005ff 6248 The 'Load Motion' referred to and implemented in this file is
a13d4ebf
AM
6249 an enhancement to gcse which when using edge based lcm, recognizes
6250 this situation and allows gcse to move the load out of the loop.
6251
6252 Once gcse has hoisted the load, store motion can then push this
6253 load towards the exit, and we end up with no loads or stores of 'i'
6254 in the loop. */
6255
ff7cc307 6256/* This will search the ldst list for a matching expression. If it
a13d4ebf
AM
6257 doesn't find one, we create one and initialize it. */
6258
6259static struct ls_expr *
6260ldst_entry (x)
6261 rtx x;
6262{
6263 struct ls_expr * ptr;
6264
6265 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6266 if (expr_equiv_p (ptr->pattern, x))
6267 break;
6268
6269 if (!ptr)
6270 {
6271 ptr = (struct ls_expr *) xmalloc (sizeof (struct ls_expr));
6272
6273 ptr->next = pre_ldst_mems;
6274 ptr->expr = NULL;
6275 ptr->pattern = x;
6276 ptr->loads = NULL_RTX;
6277 ptr->stores = NULL_RTX;
6278 ptr->reaching_reg = NULL_RTX;
6279 ptr->invalid = 0;
6280 ptr->index = 0;
6281 ptr->hash_index = 0;
6282 pre_ldst_mems = ptr;
6283 }
589005ff 6284
a13d4ebf
AM
6285 return ptr;
6286}
6287
6288/* Free up an individual ldst entry. */
6289
589005ff 6290static void
a13d4ebf
AM
6291free_ldst_entry (ptr)
6292 struct ls_expr * ptr;
6293{
aaa4ca30
AJ
6294 free_INSN_LIST_list (& ptr->loads);
6295 free_INSN_LIST_list (& ptr->stores);
a13d4ebf
AM
6296
6297 free (ptr);
6298}
6299
6300/* Free up all memory associated with the ldst list. */
6301
6302static void
6303free_ldst_mems ()
6304{
589005ff 6305 while (pre_ldst_mems)
a13d4ebf
AM
6306 {
6307 struct ls_expr * tmp = pre_ldst_mems;
6308
6309 pre_ldst_mems = pre_ldst_mems->next;
6310
6311 free_ldst_entry (tmp);
6312 }
6313
6314 pre_ldst_mems = NULL;
6315}
6316
6317/* Dump debugging info about the ldst list. */
6318
6319static void
6320print_ldst_list (file)
6321 FILE * file;
6322{
6323 struct ls_expr * ptr;
6324
6325 fprintf (file, "LDST list: \n");
6326
6327 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6328 {
6329 fprintf (file, " Pattern (%3d): ", ptr->index);
6330
6331 print_rtl (file, ptr->pattern);
6332
6333 fprintf (file, "\n Loads : ");
6334
6335 if (ptr->loads)
6336 print_rtl (file, ptr->loads);
6337 else
6338 fprintf (file, "(nil)");
6339
6340 fprintf (file, "\n Stores : ");
6341
6342 if (ptr->stores)
6343 print_rtl (file, ptr->stores);
6344 else
6345 fprintf (file, "(nil)");
6346
6347 fprintf (file, "\n\n");
6348 }
6349
6350 fprintf (file, "\n");
6351}
6352
6353/* Returns 1 if X is in the list of ldst only expressions. */
6354
6355static struct ls_expr *
6356find_rtx_in_ldst (x)
6357 rtx x;
6358{
6359 struct ls_expr * ptr;
589005ff 6360
a13d4ebf
AM
6361 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6362 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
6363 return ptr;
6364
6365 return NULL;
6366}
6367
6368/* Assign each element of the list of mems a monotonically increasing value. */
6369
6370static int
6371enumerate_ldsts ()
6372{
6373 struct ls_expr * ptr;
6374 int n = 0;
6375
6376 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6377 ptr->index = n++;
6378
6379 return n;
6380}
6381
6382/* Return first item in the list. */
6383
6384static inline struct ls_expr *
6385first_ls_expr ()
6386{
6387 return pre_ldst_mems;
6388}
6389
6390/* Return the next item in ther list after the specified one. */
6391
6392static inline struct ls_expr *
6393next_ls_expr (ptr)
6394 struct ls_expr * ptr;
6395{
6396 return ptr->next;
6397}
6398\f
6399/* Load Motion for loads which only kill themselves. */
6400
6401/* Return true if x is a simple MEM operation, with no registers or
6402 side effects. These are the types of loads we consider for the
6403 ld_motion list, otherwise we let the usual aliasing take care of it. */
6404
589005ff 6405static int
a13d4ebf
AM
6406simple_mem (x)
6407 rtx x;
6408{
6409 if (GET_CODE (x) != MEM)
6410 return 0;
589005ff 6411
a13d4ebf
AM
6412 if (MEM_VOLATILE_P (x))
6413 return 0;
589005ff 6414
a13d4ebf
AM
6415 if (GET_MODE (x) == BLKmode)
6416 return 0;
aaa4ca30
AJ
6417
6418 if (!rtx_varies_p (XEXP (x, 0), 0))
a13d4ebf 6419 return 1;
589005ff 6420
a13d4ebf
AM
6421 return 0;
6422}
6423
589005ff
KH
6424/* Make sure there isn't a buried reference in this pattern anywhere.
6425 If there is, invalidate the entry for it since we're not capable
6426 of fixing it up just yet.. We have to be sure we know about ALL
a13d4ebf
AM
6427 loads since the aliasing code will allow all entries in the
6428 ld_motion list to not-alias itself. If we miss a load, we will get
589005ff 6429 the wrong value since gcse might common it and we won't know to
a13d4ebf
AM
6430 fix it up. */
6431
6432static void
6433invalidate_any_buried_refs (x)
6434 rtx x;
6435{
6436 const char * fmt;
8e42ace1 6437 int i, j;
a13d4ebf
AM
6438 struct ls_expr * ptr;
6439
6440 /* Invalidate it in the list. */
6441 if (GET_CODE (x) == MEM && simple_mem (x))
6442 {
6443 ptr = ldst_entry (x);
6444 ptr->invalid = 1;
6445 }
6446
6447 /* Recursively process the insn. */
6448 fmt = GET_RTX_FORMAT (GET_CODE (x));
589005ff 6449
a13d4ebf
AM
6450 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6451 {
6452 if (fmt[i] == 'e')
6453 invalidate_any_buried_refs (XEXP (x, i));
6454 else if (fmt[i] == 'E')
6455 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6456 invalidate_any_buried_refs (XVECEXP (x, i, j));
6457 }
6458}
6459
6460/* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6461 being defined as MEM loads and stores to symbols, with no
589005ff 6462 side effects and no registers in the expression. If there are any
f63d1bf7 6463 uses/defs which don't match this criteria, it is invalidated and
a13d4ebf
AM
6464 trimmed out later. */
6465
589005ff 6466static void
a13d4ebf
AM
6467compute_ld_motion_mems ()
6468{
6469 struct ls_expr * ptr;
e0082a72 6470 basic_block bb;
a13d4ebf 6471 rtx insn;
589005ff 6472
a13d4ebf
AM
6473 pre_ldst_mems = NULL;
6474
e0082a72 6475 FOR_EACH_BB (bb)
a13d4ebf 6476 {
e0082a72
ZD
6477 for (insn = bb->head;
6478 insn && insn != NEXT_INSN (bb->end);
a13d4ebf
AM
6479 insn = NEXT_INSN (insn))
6480 {
6481 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6482 {
6483 if (GET_CODE (PATTERN (insn)) == SET)
6484 {
6485 rtx src = SET_SRC (PATTERN (insn));
6486 rtx dest = SET_DEST (PATTERN (insn));
6487
6488 /* Check for a simple LOAD... */
6489 if (GET_CODE (src) == MEM && simple_mem (src))
6490 {
6491 ptr = ldst_entry (src);
6492 if (GET_CODE (dest) == REG)
6493 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
6494 else
6495 ptr->invalid = 1;
6496 }
6497 else
6498 {
6499 /* Make sure there isn't a buried load somewhere. */
6500 invalidate_any_buried_refs (src);
6501 }
589005ff 6502
a13d4ebf
AM
6503 /* Check for stores. Don't worry about aliased ones, they
6504 will block any movement we might do later. We only care
6505 about this exact pattern since those are the only
6506 circumstance that we will ignore the aliasing info. */
6507 if (GET_CODE (dest) == MEM && simple_mem (dest))
6508 {
6509 ptr = ldst_entry (dest);
589005ff 6510
f54104df
AO
6511 if (GET_CODE (src) != MEM
6512 && GET_CODE (src) != ASM_OPERANDS)
a13d4ebf
AM
6513 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6514 else
6515 ptr->invalid = 1;
6516 }
6517 }
6518 else
6519 invalidate_any_buried_refs (PATTERN (insn));
6520 }
6521 }
6522 }
6523}
6524
589005ff 6525/* Remove any references that have been either invalidated or are not in the
a13d4ebf
AM
6526 expression list for pre gcse. */
6527
6528static void
6529trim_ld_motion_mems ()
6530{
6531 struct ls_expr * last = NULL;
6532 struct ls_expr * ptr = first_ls_expr ();
6533
6534 while (ptr != NULL)
6535 {
6536 int del = ptr->invalid;
6537 struct expr * expr = NULL;
589005ff 6538
a13d4ebf 6539 /* Delete if entry has been made invalid. */
589005ff 6540 if (!del)
a13d4ebf
AM
6541 {
6542 unsigned int i;
589005ff 6543
a13d4ebf
AM
6544 del = 1;
6545 /* Delete if we cannot find this mem in the expression list. */
02280659 6546 for (i = 0; i < expr_hash_table.size && del; i++)
a13d4ebf 6547 {
02280659 6548 for (expr = expr_hash_table.table[i];
589005ff 6549 expr != NULL;
a13d4ebf
AM
6550 expr = expr->next_same_hash)
6551 if (expr_equiv_p (expr->expr, ptr->pattern))
6552 {
6553 del = 0;
6554 break;
6555 }
6556 }
6557 }
589005ff 6558
a13d4ebf
AM
6559 if (del)
6560 {
6561 if (last != NULL)
6562 {
6563 last->next = ptr->next;
6564 free_ldst_entry (ptr);
6565 ptr = last->next;
6566 }
6567 else
6568 {
6569 pre_ldst_mems = pre_ldst_mems->next;
6570 free_ldst_entry (ptr);
6571 ptr = pre_ldst_mems;
6572 }
6573 }
6574 else
6575 {
6576 /* Set the expression field if we are keeping it. */
6577 last = ptr;
6578 ptr->expr = expr;
6579 ptr = ptr->next;
6580 }
6581 }
6582
6583 /* Show the world what we've found. */
6584 if (gcse_file && pre_ldst_mems != NULL)
6585 print_ldst_list (gcse_file);
6586}
6587
6588/* This routine will take an expression which we are replacing with
6589 a reaching register, and update any stores that are needed if
6590 that expression is in the ld_motion list. Stores are updated by
6591 copying their SRC to the reaching register, and then storeing
6592 the reaching register into the store location. These keeps the
6593 correct value in the reaching register for the loads. */
6594
6595static void
6596update_ld_motion_stores (expr)
6597 struct expr * expr;
6598{
6599 struct ls_expr * mem_ptr;
6600
6601 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
6602 {
589005ff
KH
6603 /* We can try to find just the REACHED stores, but is shouldn't
6604 matter to set the reaching reg everywhere... some might be
a13d4ebf
AM
6605 dead and should be eliminated later. */
6606
6607 /* We replace SET mem = expr with
6608 SET reg = expr
589005ff 6609 SET mem = reg , where reg is the
a13d4ebf
AM
6610 reaching reg used in the load. */
6611 rtx list = mem_ptr->stores;
589005ff 6612
a13d4ebf
AM
6613 for ( ; list != NULL_RTX; list = XEXP (list, 1))
6614 {
6615 rtx insn = XEXP (list, 0);
6616 rtx pat = PATTERN (insn);
6617 rtx src = SET_SRC (pat);
6618 rtx reg = expr->reaching_reg;
c57718d3 6619 rtx copy, new;
a13d4ebf
AM
6620
6621 /* If we've already copied it, continue. */
6622 if (expr->reaching_reg == src)
6623 continue;
589005ff 6624
a13d4ebf
AM
6625 if (gcse_file)
6626 {
6627 fprintf (gcse_file, "PRE: store updated with reaching reg ");
6628 print_rtl (gcse_file, expr->reaching_reg);
6629 fprintf (gcse_file, ":\n ");
6630 print_inline_rtx (gcse_file, insn, 8);
6631 fprintf (gcse_file, "\n");
6632 }
589005ff 6633
a13d4ebf 6634 copy = gen_move_insn ( reg, SET_SRC (pat));
c57718d3
RK
6635 new = emit_insn_before (copy, insn);
6636 record_one_set (REGNO (reg), new);
a13d4ebf
AM
6637 SET_SRC (pat) = reg;
6638
6639 /* un-recognize this pattern since it's probably different now. */
6640 INSN_CODE (insn) = -1;
6641 gcse_create_count++;
6642 }
6643 }
6644}
6645\f
6646/* Store motion code. */
6647
589005ff 6648/* This is used to communicate the target bitvector we want to use in the
aaa4ca30
AJ
6649 reg_set_info routine when called via the note_stores mechanism. */
6650static sbitmap * regvec;
6651
a13d4ebf
AM
6652/* Used in computing the reverse edge graph bit vectors. */
6653static sbitmap * st_antloc;
6654
6655/* Global holding the number of store expressions we are dealing with. */
6656static int num_stores;
6657
aaa4ca30 6658/* Checks to set if we need to mark a register set. Called from note_stores. */
a13d4ebf 6659
aaa4ca30
AJ
6660static void
6661reg_set_info (dest, setter, data)
6662 rtx dest, setter ATTRIBUTE_UNUSED;
6663 void * data ATTRIBUTE_UNUSED;
a13d4ebf 6664{
aaa4ca30
AJ
6665 if (GET_CODE (dest) == SUBREG)
6666 dest = SUBREG_REG (dest);
adfcce61 6667
aaa4ca30
AJ
6668 if (GET_CODE (dest) == REG)
6669 SET_BIT (*regvec, REGNO (dest));
a13d4ebf
AM
6670}
6671
cc2902df 6672/* Return nonzero if the register operands of expression X are killed
aaa4ca30 6673 anywhere in basic block BB. */
a13d4ebf
AM
6674
6675static int
aaa4ca30 6676store_ops_ok (x, bb)
a13d4ebf 6677 rtx x;
e2d2ed72 6678 basic_block bb;
a13d4ebf
AM
6679{
6680 int i;
6681 enum rtx_code code;
6682 const char * fmt;
6683
6684 /* Repeat is used to turn tail-recursion into iteration. */
6685 repeat:
6686
6687 if (x == 0)
6688 return 1;
6689
6690 code = GET_CODE (x);
6691 switch (code)
6692 {
6693 case REG:
aaa4ca30
AJ
6694 /* If a reg has changed after us in this
6695 block, the operand has been killed. */
0b17ab2f 6696 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
a13d4ebf
AM
6697
6698 case MEM:
6699 x = XEXP (x, 0);
6700 goto repeat;
6701
6702 case PRE_DEC:
6703 case PRE_INC:
6704 case POST_DEC:
6705 case POST_INC:
6706 return 0;
6707
6708 case PC:
6709 case CC0: /*FIXME*/
6710 case CONST:
6711 case CONST_INT:
6712 case CONST_DOUBLE:
69ef87e2 6713 case CONST_VECTOR:
a13d4ebf
AM
6714 case SYMBOL_REF:
6715 case LABEL_REF:
6716 case ADDR_VEC:
6717 case ADDR_DIFF_VEC:
6718 return 1;
6719
6720 default:
6721 break;
6722 }
6723
6724 i = GET_RTX_LENGTH (code) - 1;
6725 fmt = GET_RTX_FORMAT (code);
589005ff 6726
a13d4ebf
AM
6727 for (; i >= 0; i--)
6728 {
6729 if (fmt[i] == 'e')
6730 {
6731 rtx tem = XEXP (x, i);
6732
6733 /* If we are about to do the last recursive call
6734 needed at this level, change it into iteration.
6735 This function is called enough to be worth it. */
6736 if (i == 0)
6737 {
6738 x = tem;
6739 goto repeat;
6740 }
589005ff 6741
aaa4ca30 6742 if (! store_ops_ok (tem, bb))
a13d4ebf
AM
6743 return 0;
6744 }
6745 else if (fmt[i] == 'E')
6746 {
6747 int j;
589005ff 6748
a13d4ebf
AM
6749 for (j = 0; j < XVECLEN (x, i); j++)
6750 {
aaa4ca30 6751 if (! store_ops_ok (XVECEXP (x, i, j), bb))
a13d4ebf
AM
6752 return 0;
6753 }
6754 }
6755 }
6756
6757 return 1;
6758}
6759
aaa4ca30 6760/* Determine whether insn is MEM store pattern that we will consider moving. */
a13d4ebf
AM
6761
6762static void
6763find_moveable_store (insn)
6764 rtx insn;
6765{
6766 struct ls_expr * ptr;
6767 rtx dest = PATTERN (insn);
6768
f54104df
AO
6769 if (GET_CODE (dest) != SET
6770 || GET_CODE (SET_SRC (dest)) == ASM_OPERANDS)
a13d4ebf
AM
6771 return;
6772
6773 dest = SET_DEST (dest);
589005ff 6774
a13d4ebf
AM
6775 if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
6776 || GET_MODE (dest) == BLKmode)
aaa4ca30
AJ
6777 return;
6778
6779 if (GET_CODE (XEXP (dest, 0)) != SYMBOL_REF)
a13d4ebf 6780 return;
aaa4ca30
AJ
6781
6782 if (rtx_varies_p (XEXP (dest, 0), 0))
a13d4ebf 6783 return;
aaa4ca30 6784
a13d4ebf
AM
6785 ptr = ldst_entry (dest);
6786 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6787}
6788
aaa4ca30
AJ
6789/* Perform store motion. Much like gcse, except we move expressions the
6790 other way by looking at the flowgraph in reverse. */
a13d4ebf
AM
6791
6792static int
6793compute_store_table ()
6794{
e0082a72
ZD
6795 int ret;
6796 basic_block bb;
aaa4ca30 6797 unsigned regno;
a13d4ebf 6798 rtx insn, pat;
aaa4ca30 6799
a13d4ebf
AM
6800 max_gcse_regno = max_reg_num ();
6801
d55bc081 6802 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (last_basic_block,
aaa4ca30 6803 max_gcse_regno);
d55bc081 6804 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
a13d4ebf 6805 pre_ldst_mems = 0;
aaa4ca30 6806
a13d4ebf 6807 /* Find all the stores we care about. */
e0082a72 6808 FOR_EACH_BB (bb)
a13d4ebf 6809 {
e0082a72
ZD
6810 regvec = & (reg_set_in_block[bb->index]);
6811 for (insn = bb->end;
6812 insn && insn != PREV_INSN (bb->end);
a13d4ebf
AM
6813 insn = PREV_INSN (insn))
6814 {
19652adf
ZW
6815 /* Ignore anything that is not a normal insn. */
6816 if (! INSN_P (insn))
a13d4ebf
AM
6817 continue;
6818
aaa4ca30
AJ
6819 if (GET_CODE (insn) == CALL_INSN)
6820 {
19652adf 6821 bool clobbers_all = false;
589005ff 6822#ifdef NON_SAVING_SETJMP
19652adf
ZW
6823 if (NON_SAVING_SETJMP
6824 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
6825 clobbers_all = true;
6826#endif
6827
aaa4ca30 6828 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
19652adf
ZW
6829 if (clobbers_all
6830 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
e0082a72 6831 SET_BIT (reg_set_in_block[bb->index], regno);
aaa4ca30 6832 }
589005ff 6833
a13d4ebf 6834 pat = PATTERN (insn);
aaa4ca30 6835 note_stores (pat, reg_set_info, NULL);
589005ff 6836
a13d4ebf
AM
6837 /* Now that we've marked regs, look for stores. */
6838 if (GET_CODE (pat) == SET)
6839 find_moveable_store (insn);
6840 }
6841 }
6842
6843 ret = enumerate_ldsts ();
589005ff 6844
a13d4ebf
AM
6845 if (gcse_file)
6846 {
6847 fprintf (gcse_file, "Store Motion Expressions.\n");
6848 print_ldst_list (gcse_file);
6849 }
589005ff 6850
a13d4ebf
AM
6851 return ret;
6852}
6853
aaa4ca30 6854/* Check to see if the load X is aliased with STORE_PATTERN. */
a13d4ebf
AM
6855
6856static int
6857load_kills_store (x, store_pattern)
6858 rtx x, store_pattern;
6859{
6860 if (true_dependence (x, GET_MODE (x), store_pattern, rtx_addr_varies_p))
6861 return 1;
6862 return 0;
6863}
6864
589005ff 6865/* Go through the entire insn X, looking for any loads which might alias
aaa4ca30 6866 STORE_PATTERN. Return 1 if found. */
a13d4ebf
AM
6867
6868static int
6869find_loads (x, store_pattern)
6870 rtx x, store_pattern;
6871{
6872 const char * fmt;
8e42ace1 6873 int i, j;
a13d4ebf
AM
6874 int ret = 0;
6875
24a28584
JH
6876 if (!x)
6877 return 0;
6878
589005ff 6879 if (GET_CODE (x) == SET)
a13d4ebf
AM
6880 x = SET_SRC (x);
6881
6882 if (GET_CODE (x) == MEM)
6883 {
6884 if (load_kills_store (x, store_pattern))
6885 return 1;
6886 }
6887
6888 /* Recursively process the insn. */
6889 fmt = GET_RTX_FORMAT (GET_CODE (x));
589005ff 6890
a13d4ebf
AM
6891 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
6892 {
6893 if (fmt[i] == 'e')
6894 ret |= find_loads (XEXP (x, i), store_pattern);
6895 else if (fmt[i] == 'E')
6896 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6897 ret |= find_loads (XVECEXP (x, i, j), store_pattern);
6898 }
6899 return ret;
6900}
6901
589005ff 6902/* Check if INSN kills the store pattern X (is aliased with it).
a13d4ebf
AM
6903 Return 1 if it it does. */
6904
589005ff 6905static int
a13d4ebf
AM
6906store_killed_in_insn (x, insn)
6907 rtx x, insn;
6908{
6909 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
6910 return 0;
589005ff 6911
a13d4ebf
AM
6912 if (GET_CODE (insn) == CALL_INSN)
6913 {
1218665b
JJ
6914 /* A normal or pure call might read from pattern,
6915 but a const call will not. */
a6a063b8 6916 return ! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn);
a13d4ebf 6917 }
589005ff 6918
a13d4ebf
AM
6919 if (GET_CODE (PATTERN (insn)) == SET)
6920 {
6921 rtx pat = PATTERN (insn);
6922 /* Check for memory stores to aliased objects. */
6923 if (GET_CODE (SET_DEST (pat)) == MEM && !expr_equiv_p (SET_DEST (pat), x))
aaa4ca30 6924 /* pretend its a load and check for aliasing. */
a13d4ebf
AM
6925 if (find_loads (SET_DEST (pat), x))
6926 return 1;
6927 return find_loads (SET_SRC (pat), x);
6928 }
6929 else
6930 return find_loads (PATTERN (insn), x);
6931}
6932
6933/* Returns 1 if the expression X is loaded or clobbered on or after INSN
6934 within basic block BB. */
6935
589005ff 6936static int
aaa4ca30 6937store_killed_after (x, insn, bb)
a13d4ebf 6938 rtx x, insn;
e2d2ed72 6939 basic_block bb;
a13d4ebf 6940{
8e42ace1 6941 rtx last = bb->end;
589005ff 6942
8e42ace1
KH
6943 if (insn == last)
6944 return 0;
aaa4ca30
AJ
6945
6946 /* Check if the register operands of the store are OK in this block.
589005ff
KH
6947 Note that if registers are changed ANYWHERE in the block, we'll
6948 decide we can't move it, regardless of whether it changed above
aaa4ca30
AJ
6949 or below the store. This could be improved by checking the register
6950 operands while lookinng for aliasing in each insn. */
6951 if (!store_ops_ok (XEXP (x, 0), bb))
a13d4ebf
AM
6952 return 1;
6953
8e42ace1
KH
6954 for ( ; insn && insn != NEXT_INSN (last); insn = NEXT_INSN (insn))
6955 if (store_killed_in_insn (x, insn))
6956 return 1;
589005ff 6957
a13d4ebf
AM
6958 return 0;
6959}
6960
aaa4ca30 6961/* Returns 1 if the expression X is loaded or clobbered on or before INSN
a13d4ebf 6962 within basic block BB. */
589005ff 6963static int
a13d4ebf
AM
6964store_killed_before (x, insn, bb)
6965 rtx x, insn;
e2d2ed72 6966 basic_block bb;
a13d4ebf 6967{
8e42ace1 6968 rtx first = bb->head;
a13d4ebf 6969
8e42ace1
KH
6970 if (insn == first)
6971 return store_killed_in_insn (x, insn);
589005ff 6972
aaa4ca30 6973 /* Check if the register operands of the store are OK in this block.
589005ff
KH
6974 Note that if registers are changed ANYWHERE in the block, we'll
6975 decide we can't move it, regardless of whether it changed above
aaa4ca30
AJ
6976 or below the store. This could be improved by checking the register
6977 operands while lookinng for aliasing in each insn. */
6978 if (!store_ops_ok (XEXP (x, 0), bb))
a13d4ebf
AM
6979 return 1;
6980
8e42ace1
KH
6981 for ( ; insn && insn != PREV_INSN (first); insn = PREV_INSN (insn))
6982 if (store_killed_in_insn (x, insn))
6983 return 1;
589005ff 6984
8e42ace1 6985 return 0;
a13d4ebf
AM
6986}
6987
6988#define ANTIC_STORE_LIST(x) ((x)->loads)
6989#define AVAIL_STORE_LIST(x) ((x)->stores)
6990
6991/* Given the table of available store insns at the end of blocks,
6992 determine which ones are not killed by aliasing, and generate
6993 the appropriate vectors for gen and killed. */
6994static void
589005ff 6995build_store_vectors ()
a13d4ebf 6996{
e0082a72 6997 basic_block bb, b;
a13d4ebf
AM
6998 rtx insn, st;
6999 struct ls_expr * ptr;
7000
7001 /* Build the gen_vector. This is any store in the table which is not killed
7002 by aliasing later in its block. */
d55bc081
ZD
7003 ae_gen = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7004 sbitmap_vector_zero (ae_gen, last_basic_block);
a13d4ebf 7005
d55bc081
ZD
7006 st_antloc = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7007 sbitmap_vector_zero (st_antloc, last_basic_block);
aaa4ca30 7008
a13d4ebf 7009 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
589005ff 7010 {
a13d4ebf
AM
7011 /* Put all the stores into either the antic list, or the avail list,
7012 or both. */
7013 rtx store_list = ptr->stores;
7014 ptr->stores = NULL_RTX;
7015
7016 for (st = store_list; st != NULL; st = XEXP (st, 1))
7017 {
7018 insn = XEXP (st, 0);
e2d2ed72 7019 bb = BLOCK_FOR_INSN (insn);
589005ff 7020
aaa4ca30 7021 if (!store_killed_after (ptr->pattern, insn, bb))
a13d4ebf
AM
7022 {
7023 /* If we've already seen an availale expression in this block,
7024 we can delete the one we saw already (It occurs earlier in
7025 the block), and replace it with this one). We'll copy the
7026 old SRC expression to an unused register in case there
7027 are any side effects. */
0b17ab2f 7028 if (TEST_BIT (ae_gen[bb->index], ptr->index))
a13d4ebf
AM
7029 {
7030 /* Find previous store. */
7031 rtx st;
7032 for (st = AVAIL_STORE_LIST (ptr); st ; st = XEXP (st, 1))
e2d2ed72 7033 if (BLOCK_FOR_INSN (XEXP (st, 0)) == bb)
a13d4ebf
AM
7034 break;
7035 if (st)
7036 {
7037 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
7038 if (gcse_file)
8e42ace1 7039 fprintf (gcse_file, "Removing redundant store:\n");
a13d4ebf
AM
7040 replace_store_insn (r, XEXP (st, 0), bb);
7041 XEXP (st, 0) = insn;
7042 continue;
7043 }
7044 }
0b17ab2f 7045 SET_BIT (ae_gen[bb->index], ptr->index);
a13d4ebf
AM
7046 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn,
7047 AVAIL_STORE_LIST (ptr));
7048 }
589005ff 7049
a13d4ebf
AM
7050 if (!store_killed_before (ptr->pattern, insn, bb))
7051 {
7052 SET_BIT (st_antloc[BLOCK_NUM (insn)], ptr->index);
7053 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (insn,
7054 ANTIC_STORE_LIST (ptr));
7055 }
7056 }
589005ff 7057
a13d4ebf
AM
7058 /* Free the original list of store insns. */
7059 free_INSN_LIST_list (&store_list);
7060 }
589005ff 7061
d55bc081
ZD
7062 ae_kill = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7063 sbitmap_vector_zero (ae_kill, last_basic_block);
a13d4ebf 7064
d55bc081
ZD
7065 transp = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7066 sbitmap_vector_zero (transp, last_basic_block);
a13d4ebf
AM
7067
7068 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
e0082a72 7069 FOR_EACH_BB (b)
a13d4ebf 7070 {
e0082a72 7071 if (store_killed_after (ptr->pattern, b->head, b))
a13d4ebf 7072 {
dc297297 7073 /* The anticipatable expression is not killed if it's gen'd. */
aaa4ca30 7074 /*
589005ff 7075 We leave this check out for now. If we have a code sequence
aaa4ca30
AJ
7076 in a block which looks like:
7077 ST MEMa = x
7078 L y = MEMa
7079 ST MEMa = z
7080 We should flag this as having an ANTIC expression, NOT
7081 transparent, NOT killed, and AVAIL.
7082 Unfortunately, since we haven't re-written all loads to
589005ff 7083 use the reaching reg, we'll end up doing an incorrect
aaa4ca30
AJ
7084 Load in the middle here if we push the store down. It happens in
7085 gcc.c-torture/execute/960311-1.c with -O3
7086 If we always kill it in this case, we'll sometimes do
7087 uneccessary work, but it shouldn't actually hurt anything.
7088 if (!TEST_BIT (ae_gen[b], ptr->index)). */
e0082a72 7089 SET_BIT (ae_kill[b->index], ptr->index);
aaa4ca30
AJ
7090 }
7091 else
e0082a72 7092 SET_BIT (transp[b->index], ptr->index);
aaa4ca30
AJ
7093 }
7094
7095 /* Any block with no exits calls some non-returning function, so
7096 we better mark the store killed here, or we might not store to
7097 it at all. If we knew it was abort, we wouldn't have to store,
7098 but we don't know that for sure. */
589005ff 7099 if (gcse_file)
aaa4ca30
AJ
7100 {
7101 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
7102 print_ldst_list (gcse_file);
d55bc081
ZD
7103 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
7104 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
7105 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
7106 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
a13d4ebf
AM
7107 }
7108}
7109
589005ff 7110/* Insert an instruction at the begining of a basic block, and update
a13d4ebf
AM
7111 the BLOCK_HEAD if needed. */
7112
589005ff 7113static void
a13d4ebf
AM
7114insert_insn_start_bb (insn, bb)
7115 rtx insn;
e2d2ed72 7116 basic_block bb;
a13d4ebf
AM
7117{
7118 /* Insert at start of successor block. */
e2d2ed72
AM
7119 rtx prev = PREV_INSN (bb->head);
7120 rtx before = bb->head;
a13d4ebf
AM
7121 while (before != 0)
7122 {
7123 if (GET_CODE (before) != CODE_LABEL
7124 && (GET_CODE (before) != NOTE
7125 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
7126 break;
7127 prev = before;
e2d2ed72 7128 if (prev == bb->end)
a13d4ebf
AM
7129 break;
7130 before = NEXT_INSN (before);
7131 }
7132
7133 insn = emit_insn_after (insn, prev);
7134
a13d4ebf
AM
7135 if (gcse_file)
7136 {
7137 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
0b17ab2f 7138 bb->index);
a13d4ebf
AM
7139 print_inline_rtx (gcse_file, insn, 6);
7140 fprintf (gcse_file, "\n");
7141 }
7142}
7143
7144/* This routine will insert a store on an edge. EXPR is the ldst entry for
cc2902df 7145 the memory reference, and E is the edge to insert it on. Returns nonzero
a13d4ebf
AM
7146 if an edge insertion was performed. */
7147
7148static int
7149insert_store (expr, e)
7150 struct ls_expr * expr;
7151 edge e;
7152{
7153 rtx reg, insn;
e2d2ed72 7154 basic_block bb;
a13d4ebf
AM
7155 edge tmp;
7156
7157 /* We did all the deleted before this insert, so if we didn't delete a
7158 store, then we haven't set the reaching reg yet either. */
7159 if (expr->reaching_reg == NULL_RTX)
7160 return 0;
7161
7162 reg = expr->reaching_reg;
7163 insn = gen_move_insn (expr->pattern, reg);
589005ff 7164
a13d4ebf
AM
7165 /* If we are inserting this expression on ALL predecessor edges of a BB,
7166 insert it at the start of the BB, and reset the insert bits on the other
ff7cc307 7167 edges so we don't try to insert it on the other edges. */
e2d2ed72 7168 bb = e->dest;
a13d4ebf
AM
7169 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7170 {
7171 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7172 if (index == EDGE_INDEX_NO_EDGE)
7173 abort ();
7174 if (! TEST_BIT (pre_insert_map[index], expr->index))
7175 break;
7176 }
7177
7178 /* If tmp is NULL, we found an insertion on every edge, blank the
7179 insertion vector for these edges, and insert at the start of the BB. */
e2d2ed72 7180 if (!tmp && bb != EXIT_BLOCK_PTR)
a13d4ebf
AM
7181 {
7182 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7183 {
7184 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7185 RESET_BIT (pre_insert_map[index], expr->index);
7186 }
7187 insert_insn_start_bb (insn, bb);
7188 return 0;
7189 }
589005ff 7190
a13d4ebf
AM
7191 /* We can't insert on this edge, so we'll insert at the head of the
7192 successors block. See Morgan, sec 10.5. */
7193 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
7194 {
7195 insert_insn_start_bb (insn, bb);
7196 return 0;
7197 }
7198
7199 insert_insn_on_edge (insn, e);
589005ff 7200
a13d4ebf
AM
7201 if (gcse_file)
7202 {
7203 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
0b17ab2f 7204 e->src->index, e->dest->index);
a13d4ebf
AM
7205 print_inline_rtx (gcse_file, insn, 6);
7206 fprintf (gcse_file, "\n");
7207 }
589005ff 7208
a13d4ebf
AM
7209 return 1;
7210}
7211
7212/* This routine will replace a store with a SET to a specified register. */
7213
7214static void
7215replace_store_insn (reg, del, bb)
7216 rtx reg, del;
e2d2ed72 7217 basic_block bb;
a13d4ebf
AM
7218{
7219 rtx insn;
589005ff 7220
a13d4ebf
AM
7221 insn = gen_move_insn (reg, SET_SRC (PATTERN (del)));
7222 insn = emit_insn_after (insn, del);
589005ff 7223
a13d4ebf
AM
7224 if (gcse_file)
7225 {
589005ff 7226 fprintf (gcse_file,
0b17ab2f 7227 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
a13d4ebf 7228 print_inline_rtx (gcse_file, del, 6);
8e42ace1 7229 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
a13d4ebf 7230 print_inline_rtx (gcse_file, insn, 6);
8e42ace1 7231 fprintf (gcse_file, "\n");
a13d4ebf 7232 }
589005ff 7233
49ce134f 7234 delete_insn (del);
a13d4ebf
AM
7235}
7236
7237
7238/* Delete a store, but copy the value that would have been stored into
7239 the reaching_reg for later storing. */
7240
7241static void
7242delete_store (expr, bb)
7243 struct ls_expr * expr;
e2d2ed72 7244 basic_block bb;
a13d4ebf
AM
7245{
7246 rtx reg, i, del;
7247
7248 if (expr->reaching_reg == NULL_RTX)
7249 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
a13d4ebf 7250
589005ff
KH
7251
7252 /* If there is more than 1 store, the earlier ones will be dead,
7253 but it doesn't hurt to replace them here. */
a13d4ebf 7254 reg = expr->reaching_reg;
589005ff 7255
a13d4ebf
AM
7256 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
7257 {
7258 del = XEXP (i, 0);
e2d2ed72 7259 if (BLOCK_FOR_INSN (del) == bb)
a13d4ebf 7260 {
589005ff 7261 /* We know there is only one since we deleted redundant
a13d4ebf
AM
7262 ones during the available computation. */
7263 replace_store_insn (reg, del, bb);
7264 break;
7265 }
7266 }
7267}
7268
7269/* Free memory used by store motion. */
7270
589005ff 7271static void
a13d4ebf
AM
7272free_store_memory ()
7273{
7274 free_ldst_mems ();
589005ff 7275
a13d4ebf 7276 if (ae_gen)
5a660bff 7277 sbitmap_vector_free (ae_gen);
a13d4ebf 7278 if (ae_kill)
5a660bff 7279 sbitmap_vector_free (ae_kill);
a13d4ebf 7280 if (transp)
5a660bff 7281 sbitmap_vector_free (transp);
a13d4ebf 7282 if (st_antloc)
5a660bff 7283 sbitmap_vector_free (st_antloc);
a13d4ebf 7284 if (pre_insert_map)
5a660bff 7285 sbitmap_vector_free (pre_insert_map);
a13d4ebf 7286 if (pre_delete_map)
5a660bff 7287 sbitmap_vector_free (pre_delete_map);
aaa4ca30
AJ
7288 if (reg_set_in_block)
7289 sbitmap_vector_free (reg_set_in_block);
589005ff 7290
a13d4ebf
AM
7291 ae_gen = ae_kill = transp = st_antloc = NULL;
7292 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
7293}
7294
7295/* Perform store motion. Much like gcse, except we move expressions the
7296 other way by looking at the flowgraph in reverse. */
7297
7298static void
7299store_motion ()
7300{
e0082a72 7301 basic_block bb;
0b17ab2f 7302 int x;
a13d4ebf 7303 struct ls_expr * ptr;
adfcce61 7304 int update_flow = 0;
aaa4ca30 7305
a13d4ebf
AM
7306 if (gcse_file)
7307 {
7308 fprintf (gcse_file, "before store motion\n");
7309 print_rtl (gcse_file, get_insns ());
7310 }
7311
7312
7313 init_alias_analysis ();
aaa4ca30 7314
a13d4ebf
AM
7315 /* Find all the stores that are live to the end of their block. */
7316 num_stores = compute_store_table ();
7317 if (num_stores == 0)
7318 {
aaa4ca30 7319 sbitmap_vector_free (reg_set_in_block);
a13d4ebf
AM
7320 end_alias_analysis ();
7321 return;
7322 }
7323
7324 /* Now compute whats actually available to move. */
7325 add_noreturn_fake_exit_edges ();
7326 build_store_vectors ();
7327
589005ff
KH
7328 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
7329 st_antloc, ae_kill, &pre_insert_map,
a13d4ebf
AM
7330 &pre_delete_map);
7331
7332 /* Now we want to insert the new stores which are going to be needed. */
7333 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7334 {
e0082a72
ZD
7335 FOR_EACH_BB (bb)
7336 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
7337 delete_store (ptr, bb);
a13d4ebf 7338
0b17ab2f
RH
7339 for (x = 0; x < NUM_EDGES (edge_list); x++)
7340 if (TEST_BIT (pre_insert_map[x], ptr->index))
7341 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
a13d4ebf
AM
7342 }
7343
7344 if (update_flow)
7345 commit_edge_insertions ();
aaa4ca30 7346
a13d4ebf
AM
7347 free_store_memory ();
7348 free_edge_list (edge_list);
7349 remove_fake_edges ();
7350 end_alias_analysis ();
7351}
e2500fed 7352
a0134312
RS
7353\f
7354/* Entry point for jump bypassing optimization pass. */
7355
7356int
7357bypass_jumps (file)
7358 FILE *file;
7359{
7360 int changed;
7361
7362 /* We do not construct an accurate cfg in functions which call
7363 setjmp, so just punt to be safe. */
7364 if (current_function_calls_setjmp)
7365 return 0;
7366
7367 /* For calling dump_foo fns from gdb. */
7368 debug_stderr = stderr;
7369 gcse_file = file;
7370
7371 /* Identify the basic block information for this function, including
7372 successors and predecessors. */
7373 max_gcse_regno = max_reg_num ();
7374
7375 if (file)
7376 dump_flow_info (file);
7377
7378 /* Return if there's nothing to do. */
7379 if (n_basic_blocks <= 1)
7380 return 0;
7381
7382 /* Trying to perform global optimizations on flow graphs which have
7383 a high connectivity will take a long time and is unlikely to be
7384 particularly useful.
7385
7386 In normal circumstances a cfg should have about twice as many edges
7387 as blocks. But we do not want to punish small functions which have
7388 a couple switch statements. So we require a relatively large number
7389 of basic blocks and the ratio of edges to blocks to be high. */
7390 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
7391 {
7392 if (warn_disabled_optimization)
7393 warning ("BYPASS disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
7394 n_basic_blocks, n_edges / n_basic_blocks);
7395 return 0;
7396 }
7397
7398 /* If allocating memory for the cprop bitmap would take up too much
7399 storage it's better just to disable the optimization. */
7400 if ((n_basic_blocks
7401 * SBITMAP_SET_SIZE (max_gcse_regno)
7402 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
7403 {
7404 if (warn_disabled_optimization)
7405 warning ("GCSE disabled: %d basic blocks and %d registers",
7406 n_basic_blocks, max_gcse_regno);
7407
7408 return 0;
7409 }
7410
7411 /* See what modes support reg/reg copy operations. */
7412 if (! can_copy_init_p)
7413 {
7414 compute_can_copy ();
7415 can_copy_init_p = 1;
7416 }
7417
7418 gcc_obstack_init (&gcse_obstack);
7419 bytes_used = 0;
7420
7421 /* We need alias. */
7422 init_alias_analysis ();
7423
7424 /* Record where pseudo-registers are set. This data is kept accurate
7425 during each pass. ??? We could also record hard-reg information here
7426 [since it's unchanging], however it is currently done during hash table
7427 computation.
7428
7429 It may be tempting to compute MEM set information here too, but MEM sets
7430 will be subject to code motion one day and thus we need to compute
7431 information about memory sets when we build the hash tables. */
7432
7433 alloc_reg_set_mem (max_gcse_regno);
7434 compute_sets (get_insns ());
7435
7436 max_gcse_regno = max_reg_num ();
7437 alloc_gcse_mem (get_insns ());
7438 changed = one_cprop_pass (1, 1, 1);
7439 free_gcse_mem ();
7440
7441 if (file)
7442 {
7443 fprintf (file, "BYPASS of %s: %d basic blocks, ",
7444 current_function_name, n_basic_blocks);
7445 fprintf (file, "%d bytes\n\n", bytes_used);
7446 }
7447
7448 obstack_free (&gcse_obstack, NULL);
7449 free_reg_set_mem ();
7450
7451 /* We are finished with alias. */
7452 end_alias_analysis ();
7453 allocate_reg_info (max_reg_num (), FALSE, FALSE);
7454
7455 return changed;
7456}
7457
e2500fed 7458#include "gt-gcse.h"
This page took 1.947518 seconds and 5 git commands to generate.