]> gcc.gnu.org Git - gcc.git/blame - gcc/gcse.c
alias.c [...]: Remove unnecessary casts.
[gcc.git] / gcc / gcse.c
CommitLineData
f4e584dc 1/* Global common subexpression elimination/Partial redundancy elimination
7506f491 2 and global constant/copy propagation for GNU compiler.
a0134312 3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003
8e42ace1 4 Free Software Foundation, Inc.
7506f491 5
1322177d 6This file is part of GCC.
7506f491 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
7506f491 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
7506f491
DE
17
18You should have received a copy of the GNU General Public License
1322177d
LB
19along with GCC; see the file COPYING. If not, write to the Free
20Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2102111-1307, USA. */
7506f491
DE
22
23/* TODO
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
f4e584dc
JL
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
7506f491
DE
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
34
7506f491
DE
35*/
36
37/* References searched while implementing this.
7506f491
DE
38
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
42
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
46
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
50
7506f491
DE
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
54
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
59
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
64
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
69
7506f491
DE
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
73
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
79
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
84
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
88
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
92
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
96
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
100
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
104
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
108
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
112
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
116
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
120
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
124
f4e584dc
JL
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
128
a42cd965
AM
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
132
f4e584dc
JL
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
137
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
141
7506f491
DE
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
144*/
145
146#include "config.h"
50b2596f 147#include "system.h"
4977bab6
ZW
148#include "coretypes.h"
149#include "tm.h"
01198c2f 150#include "toplev.h"
7506f491
DE
151
152#include "rtl.h"
6baf1cc8 153#include "tm_p.h"
7506f491
DE
154#include "regs.h"
155#include "hard-reg-set.h"
156#include "flags.h"
157#include "real.h"
158#include "insn-config.h"
159#include "recog.h"
160#include "basic-block.h"
50b2596f 161#include "output.h"
49ad7cfa 162#include "function.h"
589005ff 163#include "expr.h"
e7d482b9 164#include "except.h"
fb0c0a12 165#include "ggc.h"
f1fa37ff 166#include "params.h"
ae860ff7 167#include "cselib.h"
aaa4ca30 168
7506f491 169#include "obstack.h"
4fa31c2a 170
7506f491
DE
171/* Propagate flow information through back edges and thus enable PRE's
172 moving loop invariant calculations out of loops.
173
174 Originally this tended to create worse overall code, but several
175 improvements during the development of PRE seem to have made following
176 back edges generally a win.
177
178 Note much of the loop invariant code motion done here would normally
179 be done by loop.c, which has more heuristics for when to move invariants
180 out of loops. At some point we might need to move some of those
181 heuristics into gcse.c. */
7506f491 182
f4e584dc
JL
183/* We support GCSE via Partial Redundancy Elimination. PRE optimizations
184 are a superset of those done by GCSE.
7506f491 185
f4e584dc 186 We perform the following steps:
7506f491
DE
187
188 1) Compute basic block information.
189
190 2) Compute table of places where registers are set.
191
192 3) Perform copy/constant propagation.
193
194 4) Perform global cse.
195
e78d9500 196 5) Perform another pass of copy/constant propagation.
7506f491
DE
197
198 Two passes of copy/constant propagation are done because the first one
199 enables more GCSE and the second one helps to clean up the copies that
200 GCSE creates. This is needed more for PRE than for Classic because Classic
201 GCSE will try to use an existing register containing the common
202 subexpression rather than create a new one. This is harder to do for PRE
203 because of the code motion (which Classic GCSE doesn't do).
204
205 Expressions we are interested in GCSE-ing are of the form
206 (set (pseudo-reg) (expression)).
207 Function want_to_gcse_p says what these are.
208
209 PRE handles moving invariant expressions out of loops (by treating them as
f4e584dc 210 partially redundant).
7506f491
DE
211
212 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
213 assignment) based GVN (global value numbering). L. T. Simpson's paper
214 (Rice University) on value numbering is a useful reference for this.
215
216 **********************
217
218 We used to support multiple passes but there are diminishing returns in
219 doing so. The first pass usually makes 90% of the changes that are doable.
220 A second pass can make a few more changes made possible by the first pass.
221 Experiments show any further passes don't make enough changes to justify
222 the expense.
223
224 A study of spec92 using an unlimited number of passes:
225 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
226 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
227 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
228
229 It was found doing copy propagation between each pass enables further
230 substitutions.
231
232 PRE is quite expensive in complicated functions because the DFA can take
740f35a0 233 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
7506f491
DE
234 be modified if one wants to experiment.
235
236 **********************
237
238 The steps for PRE are:
239
240 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
241
242 2) Perform the data flow analysis for PRE.
243
244 3) Delete the redundant instructions
245
246 4) Insert the required copies [if any] that make the partially
247 redundant instructions fully redundant.
248
249 5) For other reaching expressions, insert an instruction to copy the value
250 to a newly created pseudo that will reach the redundant instruction.
251
252 The deletion is done first so that when we do insertions we
253 know which pseudo reg to use.
254
255 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
256 argue it is not. The number of iterations for the algorithm to converge
257 is typically 2-4 so I don't view it as that expensive (relatively speaking).
258
f4e584dc 259 PRE GCSE depends heavily on the second CSE pass to clean up the copies
7506f491
DE
260 we create. To make an expression reach the place where it's redundant,
261 the result of the expression is copied to a new register, and the redundant
262 expression is deleted by replacing it with this new register. Classic GCSE
263 doesn't have this problem as much as it computes the reaching defs of
264 each register in each block and thus can try to use an existing register.
265
266 **********************
267
7506f491
DE
268 A fair bit of simplicity is created by creating small functions for simple
269 tasks, even when the function is only called in one place. This may
270 measurably slow things down [or may not] by creating more function call
271 overhead than is necessary. The source is laid out so that it's trivial
272 to make the affected functions inline so that one can measure what speed
273 up, if any, can be achieved, and maybe later when things settle things can
274 be rearranged.
275
276 Help stamp out big monolithic functions! */
277\f
278/* GCSE global vars. */
279
280/* -dG dump file. */
281static FILE *gcse_file;
282
f4e584dc
JL
283/* Note whether or not we should run jump optimization after gcse. We
284 want to do this for two cases.
285
286 * If we changed any jumps via cprop.
287
288 * If we added any labels via edge splitting. */
289
290static int run_jump_opt_after_gcse;
291
7506f491
DE
292/* Bitmaps are normally not included in debugging dumps.
293 However it's useful to be able to print them from GDB.
294 We could create special functions for this, but it's simpler to
295 just allow passing stderr to the dump_foo fns. Since stderr can
296 be a macro, we store a copy here. */
297static FILE *debug_stderr;
298
299/* An obstack for our working variables. */
300static struct obstack gcse_obstack;
301
c4c81601 302struct reg_use {rtx reg_rtx; };
abd535b6 303
7506f491
DE
304/* Hash table of expressions. */
305
306struct expr
307{
308 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
309 rtx expr;
310 /* Index in the available expression bitmaps. */
311 int bitmap_index;
312 /* Next entry with the same hash. */
313 struct expr *next_same_hash;
314 /* List of anticipatable occurrences in basic blocks in the function.
315 An "anticipatable occurrence" is one that is the first occurrence in the
f4e584dc
JL
316 basic block, the operands are not modified in the basic block prior
317 to the occurrence and the output is not used between the start of
318 the block and the occurrence. */
7506f491
DE
319 struct occr *antic_occr;
320 /* List of available occurrence in basic blocks in the function.
321 An "available occurrence" is one that is the last occurrence in the
322 basic block and the operands are not modified by following statements in
323 the basic block [including this insn]. */
324 struct occr *avail_occr;
325 /* Non-null if the computation is PRE redundant.
326 The value is the newly created pseudo-reg to record a copy of the
327 expression in all the places that reach the redundant copy. */
328 rtx reaching_reg;
329};
330
331/* Occurrence of an expression.
332 There is one per basic block. If a pattern appears more than once the
333 last appearance is used [or first for anticipatable expressions]. */
334
335struct occr
336{
337 /* Next occurrence of this expression. */
338 struct occr *next;
339 /* The insn that computes the expression. */
340 rtx insn;
cc2902df 341 /* Nonzero if this [anticipatable] occurrence has been deleted. */
7506f491 342 char deleted_p;
cc2902df 343 /* Nonzero if this [available] occurrence has been copied to
7506f491
DE
344 reaching_reg. */
345 /* ??? This is mutually exclusive with deleted_p, so they could share
346 the same byte. */
347 char copied_p;
348};
349
350/* Expression and copy propagation hash tables.
351 Each hash table is an array of buckets.
352 ??? It is known that if it were an array of entries, structure elements
353 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
354 not clear whether in the final analysis a sufficient amount of memory would
355 be saved as the size of the available expression bitmaps would be larger
356 [one could build a mapping table without holes afterwards though].
c4c81601 357 Someday I'll perform the computation and figure it out. */
7506f491 358
02280659
ZD
359struct hash_table
360{
361 /* The table itself.
362 This is an array of `expr_hash_table_size' elements. */
363 struct expr **table;
364
365 /* Size of the hash table, in elements. */
366 unsigned int size;
2e653e39 367
02280659
ZD
368 /* Number of hash table elements. */
369 unsigned int n_elems;
7506f491 370
02280659
ZD
371 /* Whether the table is expression of copy propagation one. */
372 int set_p;
373};
c4c81601 374
02280659
ZD
375/* Expression hash table. */
376static struct hash_table expr_hash_table;
377
378/* Copy propagation hash table. */
379static struct hash_table set_hash_table;
7506f491
DE
380
381/* Mapping of uids to cuids.
382 Only real insns get cuids. */
383static int *uid_cuid;
384
385/* Highest UID in UID_CUID. */
386static int max_uid;
387
388/* Get the cuid of an insn. */
b86db3eb
BS
389#ifdef ENABLE_CHECKING
390#define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
391#else
7506f491 392#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
b86db3eb 393#endif
7506f491
DE
394
395/* Number of cuids. */
396static int max_cuid;
397
398/* Mapping of cuids to insns. */
399static rtx *cuid_insn;
400
401/* Get insn from cuid. */
402#define CUID_INSN(CUID) (cuid_insn[CUID])
403
404/* Maximum register number in function prior to doing gcse + 1.
405 Registers created during this pass have regno >= max_gcse_regno.
406 This is named with "gcse" to not collide with global of same name. */
770ae6cc 407static unsigned int max_gcse_regno;
7506f491 408
7506f491 409/* Table of registers that are modified.
c4c81601 410
7506f491
DE
411 For each register, each element is a list of places where the pseudo-reg
412 is set.
413
414 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
415 requires knowledge of which blocks kill which regs [and thus could use
f4e584dc 416 a bitmap instead of the lists `reg_set_table' uses].
7506f491 417
c4c81601
RK
418 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
419 num-regs) [however perhaps it may be useful to keep the data as is]. One
420 advantage of recording things this way is that `reg_set_table' is fairly
421 sparse with respect to pseudo regs but for hard regs could be fairly dense
422 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
7506f491
DE
423 up functions like compute_transp since in the case of pseudo-regs we only
424 need to iterate over the number of times a pseudo-reg is set, not over the
425 number of basic blocks [clearly there is a bit of a slow down in the cases
426 where a pseudo is set more than once in a block, however it is believed
427 that the net effect is to speed things up]. This isn't done for hard-regs
428 because recording call-clobbered hard-regs in `reg_set_table' at each
c4c81601
RK
429 function call can consume a fair bit of memory, and iterating over
430 hard-regs stored this way in compute_transp will be more expensive. */
7506f491 431
c4c81601
RK
432typedef struct reg_set
433{
7506f491
DE
434 /* The next setting of this register. */
435 struct reg_set *next;
436 /* The insn where it was set. */
437 rtx insn;
438} reg_set;
c4c81601 439
7506f491 440static reg_set **reg_set_table;
c4c81601 441
7506f491
DE
442/* Size of `reg_set_table'.
443 The table starts out at max_gcse_regno + slop, and is enlarged as
444 necessary. */
445static int reg_set_table_size;
c4c81601 446
7506f491
DE
447/* Amount to grow `reg_set_table' by when it's full. */
448#define REG_SET_TABLE_SLOP 100
449
a13d4ebf 450/* This is a list of expressions which are MEMs and will be used by load
589005ff 451 or store motion.
a13d4ebf
AM
452 Load motion tracks MEMs which aren't killed by
453 anything except itself. (ie, loads and stores to a single location).
589005ff 454 We can then allow movement of these MEM refs with a little special
a13d4ebf
AM
455 allowance. (all stores copy the same value to the reaching reg used
456 for the loads). This means all values used to store into memory must have
589005ff 457 no side effects so we can re-issue the setter value.
a13d4ebf
AM
458 Store Motion uses this structure as an expression table to track stores
459 which look interesting, and might be moveable towards the exit block. */
460
461struct ls_expr
462{
463 struct expr * expr; /* Gcse expression reference for LM. */
464 rtx pattern; /* Pattern of this mem. */
47a3dae1 465 rtx pattern_regs; /* List of registers mentioned by the mem. */
aaa4ca30
AJ
466 rtx loads; /* INSN list of loads seen. */
467 rtx stores; /* INSN list of stores seen. */
a13d4ebf
AM
468 struct ls_expr * next; /* Next in the list. */
469 int invalid; /* Invalid for some reason. */
470 int index; /* If it maps to a bitmap index. */
471 int hash_index; /* Index when in a hash table. */
472 rtx reaching_reg; /* Register to use when re-writing. */
473};
474
fbef91d8
RS
475/* Array of implicit set patterns indexed by basic block index. */
476static rtx *implicit_sets;
477
a13d4ebf
AM
478/* Head of the list of load/store memory refs. */
479static struct ls_expr * pre_ldst_mems = NULL;
480
7506f491
DE
481/* Bitmap containing one bit for each register in the program.
482 Used when performing GCSE to track which registers have been set since
483 the start of the basic block. */
73991d6a 484static regset reg_set_bitmap;
7506f491
DE
485
486/* For each block, a bitmap of registers set in the block.
487 This is used by expr_killed_p and compute_transp.
488 It is computed during hash table computation and not by compute_sets
489 as it includes registers added since the last pass (or between cprop and
490 gcse) and it's currently not easy to realloc sbitmap vectors. */
491static sbitmap *reg_set_in_block;
492
a13d4ebf
AM
493/* Array, indexed by basic block number for a list of insns which modify
494 memory within that block. */
495static rtx * modify_mem_list;
73991d6a 496bitmap modify_mem_list_set;
a13d4ebf
AM
497
498/* This array parallels modify_mem_list, but is kept canonicalized. */
499static rtx * canon_modify_mem_list;
73991d6a 500bitmap canon_modify_mem_list_set;
7506f491
DE
501/* Various variables for statistics gathering. */
502
503/* Memory used in a pass.
504 This isn't intended to be absolutely precise. Its intent is only
505 to keep an eye on memory usage. */
506static int bytes_used;
c4c81601 507
7506f491
DE
508/* GCSE substitutions made. */
509static int gcse_subst_count;
510/* Number of copy instructions created. */
511static int gcse_create_count;
512/* Number of constants propagated. */
513static int const_prop_count;
514/* Number of copys propagated. */
515static int copy_prop_count;
7506f491
DE
516\f
517/* These variables are used by classic GCSE.
518 Normally they'd be defined a bit later, but `rd_gen' needs to
519 be declared sooner. */
520
7506f491
DE
521/* Each block has a bitmap of each type.
522 The length of each blocks bitmap is:
523
524 max_cuid - for reaching definitions
525 n_exprs - for available expressions
526
527 Thus we view the bitmaps as 2 dimensional arrays. i.e.
528 rd_kill[block_num][cuid_num]
c4c81601 529 ae_kill[block_num][expr_num] */
7506f491
DE
530
531/* For reaching defs */
532static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
533
534/* for available exprs */
535static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
b5ce41ff 536
0511851c
MM
537/* Objects of this type are passed around by the null-pointer check
538 removal routines. */
c4c81601
RK
539struct null_pointer_info
540{
0511851c 541 /* The basic block being processed. */
e0082a72 542 basic_block current_block;
0511851c 543 /* The first register to be handled in this pass. */
770ae6cc 544 unsigned int min_reg;
0511851c 545 /* One greater than the last register to be handled in this pass. */
770ae6cc 546 unsigned int max_reg;
0511851c
MM
547 sbitmap *nonnull_local;
548 sbitmap *nonnull_killed;
549};
7506f491 550\f
1d088dee 551static void compute_can_copy (void);
703ad42b
KG
552static void *gmalloc (unsigned int);
553static void *grealloc (void *, unsigned int);
554static void *gcse_alloc (unsigned long);
1d088dee
AJ
555static void alloc_gcse_mem (rtx);
556static void free_gcse_mem (void);
557static void alloc_reg_set_mem (int);
558static void free_reg_set_mem (void);
559static int get_bitmap_width (int, int, int);
560static void record_one_set (int, rtx);
561static void record_set_info (rtx, rtx, void *);
562static void compute_sets (rtx);
563static void hash_scan_insn (rtx, struct hash_table *, int);
564static void hash_scan_set (rtx, rtx, struct hash_table *);
565static void hash_scan_clobber (rtx, rtx, struct hash_table *);
566static void hash_scan_call (rtx, rtx, struct hash_table *);
567static int want_to_gcse_p (rtx);
568static bool gcse_constant_p (rtx);
569static int oprs_unchanged_p (rtx, rtx, int);
570static int oprs_anticipatable_p (rtx, rtx);
571static int oprs_available_p (rtx, rtx);
572static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int,
573 struct hash_table *);
574static void insert_set_in_table (rtx, rtx, struct hash_table *);
575static unsigned int hash_expr (rtx, enum machine_mode, int *, int);
576static unsigned int hash_expr_1 (rtx, enum machine_mode, int *);
577static unsigned int hash_string_1 (const char *);
578static unsigned int hash_set (int, int);
579static int expr_equiv_p (rtx, rtx);
580static void record_last_reg_set_info (rtx, int);
581static void record_last_mem_set_info (rtx);
582static void record_last_set_info (rtx, rtx, void *);
583static void compute_hash_table (struct hash_table *);
584static void alloc_hash_table (int, struct hash_table *, int);
585static void free_hash_table (struct hash_table *);
586static void compute_hash_table_work (struct hash_table *);
587static void dump_hash_table (FILE *, const char *, struct hash_table *);
588static struct expr *lookup_expr (rtx, struct hash_table *);
589static struct expr *lookup_set (unsigned int, struct hash_table *);
590static struct expr *next_set (unsigned int, struct expr *);
591static void reset_opr_set_tables (void);
592static int oprs_not_set_p (rtx, rtx);
593static void mark_call (rtx);
594static void mark_set (rtx, rtx);
595static void mark_clobber (rtx, rtx);
596static void mark_oprs_set (rtx);
597static void alloc_cprop_mem (int, int);
598static void free_cprop_mem (void);
599static void compute_transp (rtx, int, sbitmap *, int);
600static void compute_transpout (void);
601static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
602 struct hash_table *);
603static void compute_cprop_data (void);
604static void find_used_regs (rtx *, void *);
605static int try_replace_reg (rtx, rtx, rtx);
606static struct expr *find_avail_set (int, rtx);
607static int cprop_jump (basic_block, rtx, rtx, rtx, rtx);
608static void mems_conflict_for_gcse_p (rtx, rtx, void *);
609static int load_killed_in_block_p (basic_block, int, rtx, int);
610static void canon_list_insert (rtx, rtx, void *);
611static int cprop_insn (rtx, int);
612static int cprop (int);
613static void find_implicit_sets (void);
614static int one_cprop_pass (int, int, int);
615static bool constprop_register (rtx, rtx, rtx, int);
616static struct expr *find_bypass_set (int, int);
617static bool reg_killed_on_edge (rtx, edge);
618static int bypass_block (basic_block, rtx, rtx);
619static int bypass_conditional_jumps (void);
620static void alloc_pre_mem (int, int);
621static void free_pre_mem (void);
622static void compute_pre_data (void);
623static int pre_expr_reaches_here_p (basic_block, struct expr *,
624 basic_block);
625static void insert_insn_end_bb (struct expr *, basic_block, int);
626static void pre_insert_copy_insn (struct expr *, rtx);
627static void pre_insert_copies (void);
628static int pre_delete (void);
629static int pre_gcse (void);
630static int one_pre_gcse_pass (int);
631static void add_label_notes (rtx, rtx);
632static void alloc_code_hoist_mem (int, int);
633static void free_code_hoist_mem (void);
634static void compute_code_hoist_vbeinout (void);
635static void compute_code_hoist_data (void);
636static int hoist_expr_reaches_here_p (basic_block, int, basic_block, char *);
637static void hoist_code (void);
638static int one_code_hoisting_pass (void);
639static void alloc_rd_mem (int, int);
640static void free_rd_mem (void);
641static void handle_rd_kill_set (rtx, int, basic_block);
642static void compute_kill_rd (void);
643static void compute_rd (void);
644static void alloc_avail_expr_mem (int, int);
645static void free_avail_expr_mem (void);
646static void compute_ae_gen (struct hash_table *);
647static int expr_killed_p (rtx, basic_block);
648static void compute_ae_kill (sbitmap *, sbitmap *, struct hash_table *);
649static int expr_reaches_here_p (struct occr *, struct expr *, basic_block,
650 int);
651static rtx computing_insn (struct expr *, rtx);
652static int def_reaches_here_p (rtx, rtx);
653static int can_disregard_other_sets (struct reg_set **, rtx, int);
654static int handle_avail_expr (rtx, struct expr *);
655static int classic_gcse (void);
656static int one_classic_gcse_pass (int);
657static void invalidate_nonnull_info (rtx, rtx, void *);
658static int delete_null_pointer_checks_1 (unsigned int *, sbitmap *, sbitmap *,
659 struct null_pointer_info *);
660static rtx process_insert_insn (struct expr *);
661static int pre_edge_insert (struct edge_list *, struct expr **);
662static int expr_reaches_here_p_work (struct occr *, struct expr *,
663 basic_block, int, char *);
664static int pre_expr_reaches_here_p_work (basic_block, struct expr *,
665 basic_block, char *);
666static struct ls_expr * ldst_entry (rtx);
667static void free_ldst_entry (struct ls_expr *);
668static void free_ldst_mems (void);
669static void print_ldst_list (FILE *);
670static struct ls_expr * find_rtx_in_ldst (rtx);
671static int enumerate_ldsts (void);
672static inline struct ls_expr * first_ls_expr (void);
673static inline struct ls_expr * next_ls_expr (struct ls_expr *);
674static int simple_mem (rtx);
675static void invalidate_any_buried_refs (rtx);
676static void compute_ld_motion_mems (void);
677static void trim_ld_motion_mems (void);
678static void update_ld_motion_stores (struct expr *);
679static void reg_set_info (rtx, rtx, void *);
680static bool store_ops_ok (rtx, int *);
681static rtx extract_mentioned_regs (rtx);
682static rtx extract_mentioned_regs_helper (rtx, rtx);
683static void find_moveable_store (rtx, int *, int *);
684static int compute_store_table (void);
3b14e3af
ZD
685static bool load_kills_store (rtx, rtx, int);
686static bool find_loads (rtx, rtx, int);
687static bool store_killed_in_insn (rtx, rtx, rtx, int);
1d088dee
AJ
688static bool store_killed_after (rtx, rtx, rtx, basic_block, int *, rtx *);
689static bool store_killed_before (rtx, rtx, rtx, basic_block, int *);
690static void build_store_vectors (void);
691static void insert_insn_start_bb (rtx, basic_block);
692static int insert_store (struct ls_expr *, edge);
693static void replace_store_insn (rtx, rtx, basic_block);
694static void delete_store (struct ls_expr *, basic_block);
695static void free_store_memory (void);
696static void store_motion (void);
697static void free_insn_expr_list_list (rtx *);
698static void clear_modify_mem_tables (void);
699static void free_modify_mem_tables (void);
700static rtx gcse_emit_move_after (rtx, rtx, rtx);
701static void local_cprop_find_used_regs (rtx *, void *);
702static bool do_local_cprop (rtx, rtx, int, rtx*);
703static bool adjust_libcall_notes (rtx, rtx, rtx, rtx*);
704static void local_cprop_pass (int);
7506f491
DE
705\f
706/* Entry point for global common subexpression elimination.
707 F is the first instruction in the function. */
708
e78d9500 709int
1d088dee 710gcse_main (rtx f, FILE *file)
7506f491
DE
711{
712 int changed, pass;
713 /* Bytes used at start of pass. */
714 int initial_bytes_used;
715 /* Maximum number of bytes used by a pass. */
716 int max_pass_bytes;
717 /* Point to release obstack data from for each pass. */
718 char *gcse_obstack_bottom;
719
b5ce41ff
JL
720 /* We do not construct an accurate cfg in functions which call
721 setjmp, so just punt to be safe. */
7506f491 722 if (current_function_calls_setjmp)
e78d9500 723 return 0;
589005ff 724
b5ce41ff
JL
725 /* Assume that we do not need to run jump optimizations after gcse. */
726 run_jump_opt_after_gcse = 0;
727
7506f491
DE
728 /* For calling dump_foo fns from gdb. */
729 debug_stderr = stderr;
b5ce41ff 730 gcse_file = file;
7506f491 731
b5ce41ff
JL
732 /* Identify the basic block information for this function, including
733 successors and predecessors. */
7506f491 734 max_gcse_regno = max_reg_num ();
7506f491 735
a42cd965
AM
736 if (file)
737 dump_flow_info (file);
738
7506f491 739 /* Return if there's nothing to do. */
0b17ab2f 740 if (n_basic_blocks <= 1)
a18820c6 741 return 0;
7506f491 742
55f7891b
JL
743 /* Trying to perform global optimizations on flow graphs which have
744 a high connectivity will take a long time and is unlikely to be
745 particularly useful.
746
43e72072 747 In normal circumstances a cfg should have about twice as many edges
55f7891b
JL
748 as blocks. But we do not want to punish small functions which have
749 a couple switch statements. So we require a relatively large number
750 of basic blocks and the ratio of edges to blocks to be high. */
0b17ab2f 751 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
18424ae1
BL
752 {
753 if (warn_disabled_optimization)
8e42ace1 754 warning ("GCSE disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
0b17ab2f 755 n_basic_blocks, n_edges / n_basic_blocks);
18424ae1
BL
756 return 0;
757 }
55f7891b 758
f1fa37ff
MM
759 /* If allocating memory for the cprop bitmap would take up too much
760 storage it's better just to disable the optimization. */
589005ff 761 if ((n_basic_blocks
f1fa37ff
MM
762 * SBITMAP_SET_SIZE (max_gcse_regno)
763 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
764 {
765 if (warn_disabled_optimization)
766 warning ("GCSE disabled: %d basic blocks and %d registers",
0b17ab2f 767 n_basic_blocks, max_gcse_regno);
f1fa37ff
MM
768
769 return 0;
770 }
771
7506f491 772 gcc_obstack_init (&gcse_obstack);
a42cd965 773 bytes_used = 0;
7506f491 774
a13d4ebf
AM
775 /* We need alias. */
776 init_alias_analysis ();
c4c81601
RK
777 /* Record where pseudo-registers are set. This data is kept accurate
778 during each pass. ??? We could also record hard-reg information here
779 [since it's unchanging], however it is currently done during hash table
780 computation.
b5ce41ff 781
c4c81601
RK
782 It may be tempting to compute MEM set information here too, but MEM sets
783 will be subject to code motion one day and thus we need to compute
b5ce41ff 784 information about memory sets when we build the hash tables. */
7506f491
DE
785
786 alloc_reg_set_mem (max_gcse_regno);
787 compute_sets (f);
788
789 pass = 0;
790 initial_bytes_used = bytes_used;
791 max_pass_bytes = 0;
792 gcse_obstack_bottom = gcse_alloc (1);
793 changed = 1;
740f35a0 794 while (changed && pass < MAX_GCSE_PASSES)
7506f491
DE
795 {
796 changed = 0;
797 if (file)
798 fprintf (file, "GCSE pass %d\n\n", pass + 1);
799
800 /* Initialize bytes_used to the space for the pred/succ lists,
801 and the reg_set_table data. */
802 bytes_used = initial_bytes_used;
803
804 /* Each pass may create new registers, so recalculate each time. */
805 max_gcse_regno = max_reg_num ();
806
807 alloc_gcse_mem (f);
808
b5ce41ff
JL
809 /* Don't allow constant propagation to modify jumps
810 during this pass. */
a0134312 811 changed = one_cprop_pass (pass + 1, 0, 0);
7506f491
DE
812
813 if (optimize_size)
b5ce41ff 814 changed |= one_classic_gcse_pass (pass + 1);
7506f491 815 else
589005ff 816 {
a42cd965 817 changed |= one_pre_gcse_pass (pass + 1);
a13d4ebf
AM
818 /* We may have just created new basic blocks. Release and
819 recompute various things which are sized on the number of
820 basic blocks. */
821 if (changed)
822 {
73991d6a 823 free_modify_mem_tables ();
703ad42b 824 modify_mem_list = gmalloc (last_basic_block * sizeof (rtx));
a13d4ebf 825 canon_modify_mem_list
703ad42b
KG
826 = gmalloc (last_basic_block * sizeof (rtx));
827 memset (modify_mem_list, 0, last_basic_block * sizeof (rtx));
828 memset (canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
a13d4ebf 829 }
a42cd965
AM
830 free_reg_set_mem ();
831 alloc_reg_set_mem (max_reg_num ());
832 compute_sets (f);
833 run_jump_opt_after_gcse = 1;
834 }
7506f491
DE
835
836 if (max_pass_bytes < bytes_used)
837 max_pass_bytes = bytes_used;
838
bb457bd9
JL
839 /* Free up memory, then reallocate for code hoisting. We can
840 not re-use the existing allocated memory because the tables
841 will not have info for the insns or registers created by
842 partial redundancy elimination. */
7506f491
DE
843 free_gcse_mem ();
844
bb457bd9
JL
845 /* It does not make sense to run code hoisting unless we optimizing
846 for code size -- it rarely makes programs faster, and can make
847 them bigger if we did partial redundancy elimination (when optimizing
848 for space, we use a classic gcse algorithm instead of partial
849 redundancy algorithms). */
850 if (optimize_size)
589005ff 851 {
bb457bd9
JL
852 max_gcse_regno = max_reg_num ();
853 alloc_gcse_mem (f);
854 changed |= one_code_hoisting_pass ();
855 free_gcse_mem ();
856
857 if (max_pass_bytes < bytes_used)
858 max_pass_bytes = bytes_used;
589005ff 859 }
bb457bd9 860
7506f491
DE
861 if (file)
862 {
863 fprintf (file, "\n");
864 fflush (file);
865 }
c4c81601 866
7506f491
DE
867 obstack_free (&gcse_obstack, gcse_obstack_bottom);
868 pass++;
869 }
870
b5ce41ff
JL
871 /* Do one last pass of copy propagation, including cprop into
872 conditional jumps. */
873
874 max_gcse_regno = max_reg_num ();
875 alloc_gcse_mem (f);
876 /* This time, go ahead and allow cprop to alter jumps. */
a0134312 877 one_cprop_pass (pass + 1, 1, 0);
b5ce41ff 878 free_gcse_mem ();
7506f491
DE
879
880 if (file)
881 {
882 fprintf (file, "GCSE of %s: %d basic blocks, ",
0b17ab2f 883 current_function_name, n_basic_blocks);
7506f491
DE
884 fprintf (file, "%d pass%s, %d bytes\n\n",
885 pass, pass > 1 ? "es" : "", max_pass_bytes);
886 }
887
6496a589 888 obstack_free (&gcse_obstack, NULL);
7506f491 889 free_reg_set_mem ();
a13d4ebf
AM
890 /* We are finished with alias. */
891 end_alias_analysis ();
892 allocate_reg_info (max_reg_num (), FALSE, FALSE);
893
47a3dae1 894 if (!optimize_size && flag_gcse_sm)
a13d4ebf 895 store_motion ();
47a3dae1 896
a13d4ebf 897 /* Record where pseudo-registers are set. */
e78d9500 898 return run_jump_opt_after_gcse;
7506f491
DE
899}
900\f
901/* Misc. utilities. */
902
773eae39
EB
903/* Nonzero for each mode that supports (set (reg) (reg)).
904 This is trivially true for integer and floating point values.
905 It may or may not be true for condition codes. */
906static char can_copy[(int) NUM_MACHINE_MODES];
907
7506f491
DE
908/* Compute which modes support reg/reg copy operations. */
909
910static void
1d088dee 911compute_can_copy (void)
7506f491
DE
912{
913 int i;
50b2596f 914#ifndef AVOID_CCMODE_COPIES
8e42ace1 915 rtx reg, insn;
50b2596f 916#endif
773eae39 917 memset (can_copy, 0, NUM_MACHINE_MODES);
7506f491
DE
918
919 start_sequence ();
920 for (i = 0; i < NUM_MACHINE_MODES; i++)
c4c81601
RK
921 if (GET_MODE_CLASS (i) == MODE_CC)
922 {
7506f491 923#ifdef AVOID_CCMODE_COPIES
773eae39 924 can_copy[i] = 0;
7506f491 925#else
c4c81601
RK
926 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
927 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
9714cf43 928 if (recog (PATTERN (insn), insn, NULL) >= 0)
773eae39 929 can_copy[i] = 1;
7506f491 930#endif
c4c81601 931 }
141b5810 932 else
773eae39 933 can_copy[i] = 1;
c4c81601 934
7506f491 935 end_sequence ();
7506f491 936}
773eae39
EB
937
938/* Returns whether the mode supports reg/reg copy operations. */
939
940bool
1d088dee 941can_copy_p (enum machine_mode mode)
773eae39
EB
942{
943 static bool can_copy_init_p = false;
944
945 if (! can_copy_init_p)
946 {
947 compute_can_copy ();
948 can_copy_init_p = true;
949 }
950
951 return can_copy[mode] != 0;
952}
7506f491
DE
953\f
954/* Cover function to xmalloc to record bytes allocated. */
955
703ad42b 956static void *
1d088dee 957gmalloc (unsigned int size)
7506f491
DE
958{
959 bytes_used += size;
960 return xmalloc (size);
961}
962
963/* Cover function to xrealloc.
964 We don't record the additional size since we don't know it.
965 It won't affect memory usage stats much anyway. */
966
703ad42b
KG
967static void *
968grealloc (void *ptr, unsigned int size)
7506f491
DE
969{
970 return xrealloc (ptr, size);
971}
972
77bbd421 973/* Cover function to obstack_alloc. */
7506f491 974
703ad42b 975static void *
1d088dee 976gcse_alloc (unsigned long size)
7506f491 977{
77bbd421 978 bytes_used += size;
703ad42b 979 return obstack_alloc (&gcse_obstack, size);
7506f491
DE
980}
981
982/* Allocate memory for the cuid mapping array,
983 and reg/memory set tracking tables.
984
985 This is called at the start of each pass. */
986
987static void
1d088dee 988alloc_gcse_mem (rtx f)
7506f491 989{
8e42ace1 990 int i, n;
7506f491
DE
991 rtx insn;
992
993 /* Find the largest UID and create a mapping from UIDs to CUIDs.
994 CUIDs are like UIDs except they increase monotonically, have no gaps,
995 and only apply to real insns. */
996
997 max_uid = get_max_uid ();
998 n = (max_uid + 1) * sizeof (int);
703ad42b
KG
999 uid_cuid = gmalloc (n);
1000 memset (uid_cuid, 0, n);
7506f491
DE
1001 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1002 {
2c3c49de 1003 if (INSN_P (insn))
b86db3eb 1004 uid_cuid[INSN_UID (insn)] = i++;
7506f491 1005 else
b86db3eb 1006 uid_cuid[INSN_UID (insn)] = i;
7506f491
DE
1007 }
1008
1009 /* Create a table mapping cuids to insns. */
1010
1011 max_cuid = i;
1012 n = (max_cuid + 1) * sizeof (rtx);
703ad42b
KG
1013 cuid_insn = gmalloc (n);
1014 memset (cuid_insn, 0, n);
7506f491 1015 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
2c3c49de 1016 if (INSN_P (insn))
c4c81601 1017 CUID_INSN (i++) = insn;
7506f491
DE
1018
1019 /* Allocate vars to track sets of regs. */
73991d6a 1020 reg_set_bitmap = BITMAP_XMALLOC ();
7506f491
DE
1021
1022 /* Allocate vars to track sets of regs, memory per block. */
703ad42b 1023 reg_set_in_block = sbitmap_vector_alloc (last_basic_block, max_gcse_regno);
a13d4ebf
AM
1024 /* Allocate array to keep a list of insns which modify memory in each
1025 basic block. */
703ad42b
KG
1026 modify_mem_list = gmalloc (last_basic_block * sizeof (rtx));
1027 canon_modify_mem_list = gmalloc (last_basic_block * sizeof (rtx));
1028 memset (modify_mem_list, 0, last_basic_block * sizeof (rtx));
1029 memset (canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
73991d6a
JH
1030 modify_mem_list_set = BITMAP_XMALLOC ();
1031 canon_modify_mem_list_set = BITMAP_XMALLOC ();
7506f491
DE
1032}
1033
1034/* Free memory allocated by alloc_gcse_mem. */
1035
1036static void
1d088dee 1037free_gcse_mem (void)
7506f491
DE
1038{
1039 free (uid_cuid);
1040 free (cuid_insn);
1041
73991d6a 1042 BITMAP_XFREE (reg_set_bitmap);
7506f491 1043
5a660bff 1044 sbitmap_vector_free (reg_set_in_block);
73991d6a
JH
1045 free_modify_mem_tables ();
1046 BITMAP_XFREE (modify_mem_list_set);
1047 BITMAP_XFREE (canon_modify_mem_list_set);
7506f491
DE
1048}
1049
0511851c
MM
1050/* Many of the global optimization algorithms work by solving dataflow
1051 equations for various expressions. Initially, some local value is
c4c81601
RK
1052 computed for each expression in each block. Then, the values across the
1053 various blocks are combined (by following flow graph edges) to arrive at
1054 global values. Conceptually, each set of equations is independent. We
1055 may therefore solve all the equations in parallel, solve them one at a
1056 time, or pick any intermediate approach.
1057
1058 When you're going to need N two-dimensional bitmaps, each X (say, the
1059 number of blocks) by Y (say, the number of expressions), call this
1060 function. It's not important what X and Y represent; only that Y
1061 correspond to the things that can be done in parallel. This function will
1062 return an appropriate chunking factor C; you should solve C sets of
1063 equations in parallel. By going through this function, we can easily
1064 trade space against time; by solving fewer equations in parallel we use
1065 less space. */
0511851c
MM
1066
1067static int
1d088dee 1068get_bitmap_width (int n, int x, int y)
0511851c
MM
1069{
1070 /* It's not really worth figuring out *exactly* how much memory will
1071 be used by a particular choice. The important thing is to get
1072 something approximately right. */
1073 size_t max_bitmap_memory = 10 * 1024 * 1024;
1074
1075 /* The number of bytes we'd use for a single column of minimum
1076 width. */
1077 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
1078
1079 /* Often, it's reasonable just to solve all the equations in
1080 parallel. */
1081 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
1082 return y;
1083
1084 /* Otherwise, pick the largest width we can, without going over the
1085 limit. */
1086 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
1087 / column_size);
1088}
b5ce41ff
JL
1089\f
1090/* Compute the local properties of each recorded expression.
c4c81601
RK
1091
1092 Local properties are those that are defined by the block, irrespective of
1093 other blocks.
b5ce41ff
JL
1094
1095 An expression is transparent in a block if its operands are not modified
1096 in the block.
1097
1098 An expression is computed (locally available) in a block if it is computed
1099 at least once and expression would contain the same value if the
1100 computation was moved to the end of the block.
1101
1102 An expression is locally anticipatable in a block if it is computed at
1103 least once and expression would contain the same value if the computation
1104 was moved to the beginning of the block.
1105
c4c81601
RK
1106 We call this routine for cprop, pre and code hoisting. They all compute
1107 basically the same information and thus can easily share this code.
7506f491 1108
c4c81601
RK
1109 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1110 properties. If NULL, then it is not necessary to compute or record that
1111 particular property.
b5ce41ff 1112
02280659
ZD
1113 TABLE controls which hash table to look at. If it is set hash table,
1114 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
c4c81601 1115 ABSALTERED. */
589005ff 1116
b5ce41ff 1117static void
1d088dee 1118compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc, struct hash_table *table)
b5ce41ff 1119{
02280659 1120 unsigned int i;
589005ff 1121
b5ce41ff
JL
1122 /* Initialize any bitmaps that were passed in. */
1123 if (transp)
695ab36a 1124 {
02280659 1125 if (table->set_p)
d55bc081 1126 sbitmap_vector_zero (transp, last_basic_block);
695ab36a 1127 else
d55bc081 1128 sbitmap_vector_ones (transp, last_basic_block);
695ab36a 1129 }
c4c81601 1130
b5ce41ff 1131 if (comp)
d55bc081 1132 sbitmap_vector_zero (comp, last_basic_block);
b5ce41ff 1133 if (antloc)
d55bc081 1134 sbitmap_vector_zero (antloc, last_basic_block);
b5ce41ff 1135
02280659 1136 for (i = 0; i < table->size; i++)
7506f491 1137 {
b5ce41ff
JL
1138 struct expr *expr;
1139
02280659 1140 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
b5ce41ff 1141 {
b5ce41ff 1142 int indx = expr->bitmap_index;
c4c81601 1143 struct occr *occr;
b5ce41ff
JL
1144
1145 /* The expression is transparent in this block if it is not killed.
1146 We start by assuming all are transparent [none are killed], and
1147 then reset the bits for those that are. */
b5ce41ff 1148 if (transp)
02280659 1149 compute_transp (expr->expr, indx, transp, table->set_p);
b5ce41ff
JL
1150
1151 /* The occurrences recorded in antic_occr are exactly those that
cc2902df 1152 we want to set to nonzero in ANTLOC. */
b5ce41ff 1153 if (antloc)
c4c81601
RK
1154 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1155 {
1156 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
b5ce41ff 1157
c4c81601
RK
1158 /* While we're scanning the table, this is a good place to
1159 initialize this. */
1160 occr->deleted_p = 0;
1161 }
b5ce41ff
JL
1162
1163 /* The occurrences recorded in avail_occr are exactly those that
cc2902df 1164 we want to set to nonzero in COMP. */
b5ce41ff 1165 if (comp)
c4c81601
RK
1166 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1167 {
1168 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
b5ce41ff 1169
c4c81601
RK
1170 /* While we're scanning the table, this is a good place to
1171 initialize this. */
1172 occr->copied_p = 0;
1173 }
b5ce41ff
JL
1174
1175 /* While we're scanning the table, this is a good place to
1176 initialize this. */
1177 expr->reaching_reg = 0;
1178 }
7506f491 1179 }
7506f491
DE
1180}
1181\f
1182/* Register set information.
1183
1184 `reg_set_table' records where each register is set or otherwise
1185 modified. */
1186
1187static struct obstack reg_set_obstack;
1188
1189static void
1d088dee 1190alloc_reg_set_mem (int n_regs)
7506f491 1191{
c4c81601 1192 unsigned int n;
7506f491
DE
1193
1194 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1195 n = reg_set_table_size * sizeof (struct reg_set *);
703ad42b
KG
1196 reg_set_table = gmalloc (n);
1197 memset (reg_set_table, 0, n);
7506f491
DE
1198
1199 gcc_obstack_init (&reg_set_obstack);
1200}
1201
1202static void
1d088dee 1203free_reg_set_mem (void)
7506f491
DE
1204{
1205 free (reg_set_table);
6496a589 1206 obstack_free (&reg_set_obstack, NULL);
7506f491
DE
1207}
1208
1209/* Record REGNO in the reg_set table. */
1210
1211static void
1d088dee 1212record_one_set (int regno, rtx insn)
7506f491 1213{
172890a2 1214 /* Allocate a new reg_set element and link it onto the list. */
63bc1d05 1215 struct reg_set *new_reg_info;
7506f491
DE
1216
1217 /* If the table isn't big enough, enlarge it. */
1218 if (regno >= reg_set_table_size)
1219 {
1220 int new_size = regno + REG_SET_TABLE_SLOP;
c4c81601 1221
703ad42b
KG
1222 reg_set_table = grealloc (reg_set_table,
1223 new_size * sizeof (struct reg_set *));
1224 memset (reg_set_table + reg_set_table_size, 0,
8e42ace1 1225 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
7506f491
DE
1226 reg_set_table_size = new_size;
1227 }
1228
703ad42b 1229 new_reg_info = obstack_alloc (&reg_set_obstack, sizeof (struct reg_set));
7506f491
DE
1230 bytes_used += sizeof (struct reg_set);
1231 new_reg_info->insn = insn;
274969ea
MM
1232 new_reg_info->next = reg_set_table[regno];
1233 reg_set_table[regno] = new_reg_info;
7506f491
DE
1234}
1235
c4c81601
RK
1236/* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1237 an insn. The DATA is really the instruction in which the SET is
1238 occurring. */
7506f491
DE
1239
1240static void
1d088dee 1241record_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
7506f491 1242{
84832317
MM
1243 rtx record_set_insn = (rtx) data;
1244
c4c81601
RK
1245 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1246 record_one_set (REGNO (dest), record_set_insn);
7506f491
DE
1247}
1248
1249/* Scan the function and record each set of each pseudo-register.
1250
c4c81601 1251 This is called once, at the start of the gcse pass. See the comments for
fbe5a4a6 1252 `reg_set_table' for further documentation. */
7506f491
DE
1253
1254static void
1d088dee 1255compute_sets (rtx f)
7506f491 1256{
c4c81601 1257 rtx insn;
7506f491 1258
c4c81601 1259 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
2c3c49de 1260 if (INSN_P (insn))
c4c81601 1261 note_stores (PATTERN (insn), record_set_info, insn);
7506f491
DE
1262}
1263\f
1264/* Hash table support. */
1265
80c29cc4
RZ
1266struct reg_avail_info
1267{
e0082a72 1268 basic_block last_bb;
80c29cc4
RZ
1269 int first_set;
1270 int last_set;
1271};
1272
1273static struct reg_avail_info *reg_avail_info;
e0082a72 1274static basic_block current_bb;
7506f491 1275
7506f491 1276
fb0c0a12
RK
1277/* See whether X, the source of a set, is something we want to consider for
1278 GCSE. */
7506f491 1279
e2500fed 1280static GTY(()) rtx test_insn;
7506f491 1281static int
1d088dee 1282want_to_gcse_p (rtx x)
7506f491 1283{
fb0c0a12
RK
1284 int num_clobbers = 0;
1285 int icode;
1286
c4c81601 1287 switch (GET_CODE (x))
7506f491
DE
1288 {
1289 case REG:
1290 case SUBREG:
1291 case CONST_INT:
1292 case CONST_DOUBLE:
69ef87e2 1293 case CONST_VECTOR:
7506f491 1294 case CALL:
34ee7f82 1295 case CONSTANT_P_RTX:
7506f491
DE
1296 return 0;
1297
1298 default:
1299 break;
1300 }
1301
fb0c0a12
RK
1302 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1303 if (general_operand (x, GET_MODE (x)))
1304 return 1;
1305 else if (GET_MODE (x) == VOIDmode)
1306 return 0;
1307
1308 /* Otherwise, check if we can make a valid insn from it. First initialize
1309 our test insn if we haven't already. */
1310 if (test_insn == 0)
1311 {
1312 test_insn
1313 = make_insn_raw (gen_rtx_SET (VOIDmode,
1314 gen_rtx_REG (word_mode,
1315 FIRST_PSEUDO_REGISTER * 2),
1316 const0_rtx));
1317 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
fb0c0a12
RK
1318 }
1319
1320 /* Now make an insn like the one we would make when GCSE'ing and see if
1321 valid. */
1322 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1323 SET_SRC (PATTERN (test_insn)) = x;
1324 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1325 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
7506f491
DE
1326}
1327
cc2902df 1328/* Return nonzero if the operands of expression X are unchanged from the
7506f491
DE
1329 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1330 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1331
1332static int
1d088dee 1333oprs_unchanged_p (rtx x, rtx insn, int avail_p)
7506f491 1334{
c4c81601 1335 int i, j;
7506f491 1336 enum rtx_code code;
6f7d635c 1337 const char *fmt;
7506f491 1338
7506f491
DE
1339 if (x == 0)
1340 return 1;
1341
1342 code = GET_CODE (x);
1343 switch (code)
1344 {
1345 case REG:
80c29cc4
RZ
1346 {
1347 struct reg_avail_info *info = &reg_avail_info[REGNO (x)];
1348
1349 if (info->last_bb != current_bb)
1350 return 1;
589005ff 1351 if (avail_p)
80c29cc4
RZ
1352 return info->last_set < INSN_CUID (insn);
1353 else
1354 return info->first_set >= INSN_CUID (insn);
1355 }
7506f491
DE
1356
1357 case MEM:
e0082a72 1358 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
a13d4ebf
AM
1359 x, avail_p))
1360 return 0;
7506f491 1361 else
c4c81601 1362 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
7506f491
DE
1363
1364 case PRE_DEC:
1365 case PRE_INC:
1366 case POST_DEC:
1367 case POST_INC:
4b983fdc
RH
1368 case PRE_MODIFY:
1369 case POST_MODIFY:
7506f491
DE
1370 return 0;
1371
1372 case PC:
1373 case CC0: /*FIXME*/
1374 case CONST:
1375 case CONST_INT:
1376 case CONST_DOUBLE:
69ef87e2 1377 case CONST_VECTOR:
7506f491
DE
1378 case SYMBOL_REF:
1379 case LABEL_REF:
1380 case ADDR_VEC:
1381 case ADDR_DIFF_VEC:
1382 return 1;
1383
1384 default:
1385 break;
1386 }
1387
c4c81601 1388 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
1389 {
1390 if (fmt[i] == 'e')
1391 {
c4c81601
RK
1392 /* If we are about to do the last recursive call needed at this
1393 level, change it into iteration. This function is called enough
1394 to be worth it. */
7506f491 1395 if (i == 0)
c4c81601
RK
1396 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1397
1398 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
7506f491
DE
1399 return 0;
1400 }
1401 else if (fmt[i] == 'E')
c4c81601
RK
1402 for (j = 0; j < XVECLEN (x, i); j++)
1403 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1404 return 0;
7506f491
DE
1405 }
1406
1407 return 1;
1408}
1409
a13d4ebf
AM
1410/* Used for communication between mems_conflict_for_gcse_p and
1411 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1412 conflict between two memory references. */
1413static int gcse_mems_conflict_p;
1414
1415/* Used for communication between mems_conflict_for_gcse_p and
1416 load_killed_in_block_p. A memory reference for a load instruction,
1417 mems_conflict_for_gcse_p will see if a memory store conflicts with
1418 this memory load. */
1419static rtx gcse_mem_operand;
1420
1421/* DEST is the output of an instruction. If it is a memory reference, and
1422 possibly conflicts with the load found in gcse_mem_operand, then set
1423 gcse_mems_conflict_p to a nonzero value. */
1424
1425static void
1d088dee
AJ
1426mems_conflict_for_gcse_p (rtx dest, rtx setter ATTRIBUTE_UNUSED,
1427 void *data ATTRIBUTE_UNUSED)
a13d4ebf
AM
1428{
1429 while (GET_CODE (dest) == SUBREG
1430 || GET_CODE (dest) == ZERO_EXTRACT
1431 || GET_CODE (dest) == SIGN_EXTRACT
1432 || GET_CODE (dest) == STRICT_LOW_PART)
1433 dest = XEXP (dest, 0);
1434
1435 /* If DEST is not a MEM, then it will not conflict with the load. Note
1436 that function calls are assumed to clobber memory, but are handled
1437 elsewhere. */
1438 if (GET_CODE (dest) != MEM)
1439 return;
aaa4ca30 1440
a13d4ebf 1441 /* If we are setting a MEM in our list of specially recognized MEMs,
589005ff
KH
1442 don't mark as killed this time. */
1443
47a3dae1 1444 if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL)
a13d4ebf
AM
1445 {
1446 if (!find_rtx_in_ldst (dest))
1447 gcse_mems_conflict_p = 1;
1448 return;
1449 }
aaa4ca30 1450
a13d4ebf
AM
1451 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1452 rtx_addr_varies_p))
1453 gcse_mems_conflict_p = 1;
1454}
1455
1456/* Return nonzero if the expression in X (a memory reference) is killed
1457 in block BB before or after the insn with the CUID in UID_LIMIT.
1458 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1459 before UID_LIMIT.
1460
1461 To check the entire block, set UID_LIMIT to max_uid + 1 and
1462 AVAIL_P to 0. */
1463
1464static int
1d088dee 1465load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p)
a13d4ebf 1466{
0b17ab2f 1467 rtx list_entry = modify_mem_list[bb->index];
a13d4ebf
AM
1468 while (list_entry)
1469 {
1470 rtx setter;
1471 /* Ignore entries in the list that do not apply. */
1472 if ((avail_p
1473 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1474 || (! avail_p
1475 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1476 {
1477 list_entry = XEXP (list_entry, 1);
1478 continue;
1479 }
1480
1481 setter = XEXP (list_entry, 0);
1482
1483 /* If SETTER is a call everything is clobbered. Note that calls
1484 to pure functions are never put on the list, so we need not
1485 worry about them. */
1486 if (GET_CODE (setter) == CALL_INSN)
1487 return 1;
1488
1489 /* SETTER must be an INSN of some kind that sets memory. Call
589005ff 1490 note_stores to examine each hunk of memory that is modified.
a13d4ebf
AM
1491
1492 The note_stores interface is pretty limited, so we have to
1493 communicate via global variables. Yuk. */
1494 gcse_mem_operand = x;
1495 gcse_mems_conflict_p = 0;
1496 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1497 if (gcse_mems_conflict_p)
1498 return 1;
1499 list_entry = XEXP (list_entry, 1);
1500 }
1501 return 0;
1502}
1503
cc2902df 1504/* Return nonzero if the operands of expression X are unchanged from
7506f491
DE
1505 the start of INSN's basic block up to but not including INSN. */
1506
1507static int
1d088dee 1508oprs_anticipatable_p (rtx x, rtx insn)
7506f491
DE
1509{
1510 return oprs_unchanged_p (x, insn, 0);
1511}
1512
cc2902df 1513/* Return nonzero if the operands of expression X are unchanged from
7506f491
DE
1514 INSN to the end of INSN's basic block. */
1515
1516static int
1d088dee 1517oprs_available_p (rtx x, rtx insn)
7506f491
DE
1518{
1519 return oprs_unchanged_p (x, insn, 1);
1520}
1521
1522/* Hash expression X.
c4c81601
RK
1523
1524 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1525 indicating if a volatile operand is found or if the expression contains
1526 something we don't want to insert in the table.
7506f491
DE
1527
1528 ??? One might want to merge this with canon_hash. Later. */
1529
1530static unsigned int
1d088dee 1531hash_expr (rtx x, enum machine_mode mode, int *do_not_record_p, int hash_table_size)
7506f491
DE
1532{
1533 unsigned int hash;
1534
1535 *do_not_record_p = 0;
1536
1537 hash = hash_expr_1 (x, mode, do_not_record_p);
1538 return hash % hash_table_size;
1539}
172890a2 1540
6462bb43 1541/* Hash a string. Just add its bytes up. */
172890a2 1542
6462bb43 1543static inline unsigned
1d088dee 1544hash_string_1 (const char *ps)
6462bb43
AO
1545{
1546 unsigned hash = 0;
8e42ace1 1547 const unsigned char *p = (const unsigned char *) ps;
589005ff 1548
6462bb43
AO
1549 if (p)
1550 while (*p)
1551 hash += *p++;
1552
1553 return hash;
1554}
7506f491
DE
1555
1556/* Subroutine of hash_expr to do the actual work. */
1557
1558static unsigned int
1d088dee 1559hash_expr_1 (rtx x, enum machine_mode mode, int *do_not_record_p)
7506f491
DE
1560{
1561 int i, j;
1562 unsigned hash = 0;
1563 enum rtx_code code;
6f7d635c 1564 const char *fmt;
7506f491 1565
c4c81601 1566 /* Used to turn recursion into iteration. We can't rely on GCC's
fbe5a4a6 1567 tail-recursion elimination since we need to keep accumulating values
c4c81601 1568 in HASH. */
7506f491
DE
1569
1570 if (x == 0)
1571 return hash;
1572
c4c81601 1573 repeat:
7506f491
DE
1574 code = GET_CODE (x);
1575 switch (code)
1576 {
1577 case REG:
c4c81601
RK
1578 hash += ((unsigned int) REG << 7) + REGNO (x);
1579 return hash;
7506f491
DE
1580
1581 case CONST_INT:
c4c81601
RK
1582 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1583 + (unsigned int) INTVAL (x));
1584 return hash;
7506f491
DE
1585
1586 case CONST_DOUBLE:
1587 /* This is like the general case, except that it only counts
1588 the integers representing the constant. */
c4c81601 1589 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
7506f491
DE
1590 if (GET_MODE (x) != VOIDmode)
1591 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
c4c81601 1592 hash += (unsigned int) XWINT (x, i);
7506f491 1593 else
c4c81601
RK
1594 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1595 + (unsigned int) CONST_DOUBLE_HIGH (x));
7506f491
DE
1596 return hash;
1597
69ef87e2
AH
1598 case CONST_VECTOR:
1599 {
1600 int units;
1601 rtx elt;
1602
1603 units = CONST_VECTOR_NUNITS (x);
1604
1605 for (i = 0; i < units; ++i)
1606 {
1607 elt = CONST_VECTOR_ELT (x, i);
1608 hash += hash_expr_1 (elt, GET_MODE (elt), do_not_record_p);
1609 }
1610
1611 return hash;
1612 }
1613
7506f491
DE
1614 /* Assume there is only one rtx object for any given label. */
1615 case LABEL_REF:
1616 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1617 differences and differences between each stage's debugging dumps. */
c4c81601
RK
1618 hash += (((unsigned int) LABEL_REF << 7)
1619 + CODE_LABEL_NUMBER (XEXP (x, 0)));
7506f491
DE
1620 return hash;
1621
1622 case SYMBOL_REF:
1623 {
1624 /* Don't hash on the symbol's address to avoid bootstrap differences.
1625 Different hash values may cause expressions to be recorded in
1626 different orders and thus different registers to be used in the
1627 final assembler. This also avoids differences in the dump files
1628 between various stages. */
1629 unsigned int h = 0;
3cce094d 1630 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
c4c81601 1631
7506f491
DE
1632 while (*p)
1633 h += (h << 7) + *p++; /* ??? revisit */
c4c81601
RK
1634
1635 hash += ((unsigned int) SYMBOL_REF << 7) + h;
7506f491
DE
1636 return hash;
1637 }
1638
1639 case MEM:
1640 if (MEM_VOLATILE_P (x))
1641 {
1642 *do_not_record_p = 1;
1643 return 0;
1644 }
c4c81601
RK
1645
1646 hash += (unsigned int) MEM;
d51f3632
JH
1647 /* We used alias set for hashing, but this is not good, since the alias
1648 set may differ in -fprofile-arcs and -fbranch-probabilities compilation
1649 causing the profiles to fail to match. */
7506f491
DE
1650 x = XEXP (x, 0);
1651 goto repeat;
1652
1653 case PRE_DEC:
1654 case PRE_INC:
1655 case POST_DEC:
1656 case POST_INC:
1657 case PC:
1658 case CC0:
1659 case CALL:
1660 case UNSPEC_VOLATILE:
1661 *do_not_record_p = 1;
1662 return 0;
1663
1664 case ASM_OPERANDS:
1665 if (MEM_VOLATILE_P (x))
1666 {
1667 *do_not_record_p = 1;
1668 return 0;
1669 }
6462bb43
AO
1670 else
1671 {
1672 /* We don't want to take the filename and line into account. */
1673 hash += (unsigned) code + (unsigned) GET_MODE (x)
1674 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1675 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1676 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1677
1678 if (ASM_OPERANDS_INPUT_LENGTH (x))
1679 {
1680 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1681 {
1682 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1683 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1684 do_not_record_p)
1685 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1686 (x, i)));
1687 }
1688
1689 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1690 x = ASM_OPERANDS_INPUT (x, 0);
1691 mode = GET_MODE (x);
1692 goto repeat;
1693 }
1694 return hash;
1695 }
7506f491
DE
1696
1697 default:
1698 break;
1699 }
1700
7506f491 1701 hash += (unsigned) code + (unsigned) GET_MODE (x);
c4c81601 1702 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
1703 {
1704 if (fmt[i] == 'e')
1705 {
7506f491
DE
1706 /* If we are about to do the last recursive call
1707 needed at this level, change it into iteration.
1708 This function is called enough to be worth it. */
1709 if (i == 0)
1710 {
c4c81601 1711 x = XEXP (x, i);
7506f491
DE
1712 goto repeat;
1713 }
c4c81601
RK
1714
1715 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
7506f491
DE
1716 if (*do_not_record_p)
1717 return 0;
1718 }
c4c81601 1719
7506f491
DE
1720 else if (fmt[i] == 'E')
1721 for (j = 0; j < XVECLEN (x, i); j++)
1722 {
1723 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1724 if (*do_not_record_p)
1725 return 0;
1726 }
c4c81601 1727
7506f491 1728 else if (fmt[i] == 's')
6462bb43 1729 hash += hash_string_1 (XSTR (x, i));
7506f491 1730 else if (fmt[i] == 'i')
c4c81601 1731 hash += (unsigned int) XINT (x, i);
7506f491
DE
1732 else
1733 abort ();
1734 }
1735
1736 return hash;
1737}
1738
1739/* Hash a set of register REGNO.
1740
c4c81601
RK
1741 Sets are hashed on the register that is set. This simplifies the PRE copy
1742 propagation code.
7506f491
DE
1743
1744 ??? May need to make things more elaborate. Later, as necessary. */
1745
1746static unsigned int
1d088dee 1747hash_set (int regno, int hash_table_size)
7506f491
DE
1748{
1749 unsigned int hash;
1750
1751 hash = regno;
1752 return hash % hash_table_size;
1753}
1754
cc2902df 1755/* Return nonzero if exp1 is equivalent to exp2.
7506f491
DE
1756 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1757
1758static int
1d088dee 1759expr_equiv_p (rtx x, rtx y)
7506f491 1760{
b3694847
SS
1761 int i, j;
1762 enum rtx_code code;
1763 const char *fmt;
7506f491
DE
1764
1765 if (x == y)
1766 return 1;
c4c81601 1767
7506f491 1768 if (x == 0 || y == 0)
ebd7a7af 1769 return 0;
7506f491
DE
1770
1771 code = GET_CODE (x);
1772 if (code != GET_CODE (y))
1773 return 0;
1774
1775 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1776 if (GET_MODE (x) != GET_MODE (y))
1777 return 0;
1778
1779 switch (code)
1780 {
1781 case PC:
1782 case CC0:
7506f491 1783 case CONST_INT:
ebd7a7af 1784 return 0;
7506f491
DE
1785
1786 case LABEL_REF:
1787 return XEXP (x, 0) == XEXP (y, 0);
1788
1789 case SYMBOL_REF:
1790 return XSTR (x, 0) == XSTR (y, 0);
1791
1792 case REG:
1793 return REGNO (x) == REGNO (y);
1794
297c3335
RH
1795 case MEM:
1796 /* Can't merge two expressions in different alias sets, since we can
1797 decide that the expression is transparent in a block when it isn't,
1798 due to it being set with the different alias set. */
1799 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1800 return 0;
1801 break;
1802
7506f491
DE
1803 /* For commutative operations, check both orders. */
1804 case PLUS:
1805 case MULT:
1806 case AND:
1807 case IOR:
1808 case XOR:
1809 case NE:
1810 case EQ:
1811 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1812 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1813 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1814 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1815
6462bb43
AO
1816 case ASM_OPERANDS:
1817 /* We don't use the generic code below because we want to
1818 disregard filename and line numbers. */
1819
1820 /* A volatile asm isn't equivalent to any other. */
1821 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1822 return 0;
1823
1824 if (GET_MODE (x) != GET_MODE (y)
1825 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1826 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1827 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1828 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1829 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1830 return 0;
1831
1832 if (ASM_OPERANDS_INPUT_LENGTH (x))
1833 {
1834 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1835 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1836 ASM_OPERANDS_INPUT (y, i))
1837 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1838 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1839 return 0;
1840 }
1841
1842 return 1;
1843
7506f491
DE
1844 default:
1845 break;
1846 }
1847
1848 /* Compare the elements. If any pair of corresponding elements
1849 fail to match, return 0 for the whole thing. */
1850
1851 fmt = GET_RTX_FORMAT (code);
1852 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1853 {
1854 switch (fmt[i])
1855 {
1856 case 'e':
1857 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1858 return 0;
1859 break;
1860
1861 case 'E':
1862 if (XVECLEN (x, i) != XVECLEN (y, i))
1863 return 0;
1864 for (j = 0; j < XVECLEN (x, i); j++)
1865 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1866 return 0;
1867 break;
1868
1869 case 's':
1870 if (strcmp (XSTR (x, i), XSTR (y, i)))
1871 return 0;
1872 break;
1873
1874 case 'i':
1875 if (XINT (x, i) != XINT (y, i))
1876 return 0;
1877 break;
1878
1879 case 'w':
1880 if (XWINT (x, i) != XWINT (y, i))
1881 return 0;
1882 break;
1883
1884 case '0':
1885 break;
aaa4ca30 1886
7506f491
DE
1887 default:
1888 abort ();
1889 }
8e42ace1 1890 }
7506f491
DE
1891
1892 return 1;
1893}
1894
02280659 1895/* Insert expression X in INSN in the hash TABLE.
7506f491
DE
1896 If it is already present, record it as the last occurrence in INSN's
1897 basic block.
1898
1899 MODE is the mode of the value X is being stored into.
1900 It is only used if X is a CONST_INT.
1901
cc2902df
KH
1902 ANTIC_P is nonzero if X is an anticipatable expression.
1903 AVAIL_P is nonzero if X is an available expression. */
7506f491
DE
1904
1905static void
1d088dee
AJ
1906insert_expr_in_table (rtx x, enum machine_mode mode, rtx insn, int antic_p,
1907 int avail_p, struct hash_table *table)
7506f491
DE
1908{
1909 int found, do_not_record_p;
1910 unsigned int hash;
1911 struct expr *cur_expr, *last_expr = NULL;
1912 struct occr *antic_occr, *avail_occr;
1913 struct occr *last_occr = NULL;
1914
02280659 1915 hash = hash_expr (x, mode, &do_not_record_p, table->size);
7506f491
DE
1916
1917 /* Do not insert expression in table if it contains volatile operands,
1918 or if hash_expr determines the expression is something we don't want
1919 to or can't handle. */
1920 if (do_not_record_p)
1921 return;
1922
02280659 1923 cur_expr = table->table[hash];
7506f491
DE
1924 found = 0;
1925
c4c81601 1926 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
7506f491
DE
1927 {
1928 /* If the expression isn't found, save a pointer to the end of
1929 the list. */
1930 last_expr = cur_expr;
1931 cur_expr = cur_expr->next_same_hash;
1932 }
1933
1934 if (! found)
1935 {
703ad42b 1936 cur_expr = gcse_alloc (sizeof (struct expr));
7506f491 1937 bytes_used += sizeof (struct expr);
02280659 1938 if (table->table[hash] == NULL)
c4c81601 1939 /* This is the first pattern that hashed to this index. */
02280659 1940 table->table[hash] = cur_expr;
7506f491 1941 else
c4c81601
RK
1942 /* Add EXPR to end of this hash chain. */
1943 last_expr->next_same_hash = cur_expr;
1944
589005ff 1945 /* Set the fields of the expr element. */
7506f491 1946 cur_expr->expr = x;
02280659 1947 cur_expr->bitmap_index = table->n_elems++;
7506f491
DE
1948 cur_expr->next_same_hash = NULL;
1949 cur_expr->antic_occr = NULL;
1950 cur_expr->avail_occr = NULL;
1951 }
1952
1953 /* Now record the occurrence(s). */
7506f491
DE
1954 if (antic_p)
1955 {
1956 antic_occr = cur_expr->antic_occr;
1957
1958 /* Search for another occurrence in the same basic block. */
1959 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1960 {
1961 /* If an occurrence isn't found, save a pointer to the end of
1962 the list. */
1963 last_occr = antic_occr;
1964 antic_occr = antic_occr->next;
1965 }
1966
1967 if (antic_occr)
c4c81601
RK
1968 /* Found another instance of the expression in the same basic block.
1969 Prefer the currently recorded one. We want the first one in the
1970 block and the block is scanned from start to end. */
1971 ; /* nothing to do */
7506f491
DE
1972 else
1973 {
1974 /* First occurrence of this expression in this basic block. */
703ad42b 1975 antic_occr = gcse_alloc (sizeof (struct occr));
7506f491
DE
1976 bytes_used += sizeof (struct occr);
1977 /* First occurrence of this expression in any block? */
1978 if (cur_expr->antic_occr == NULL)
1979 cur_expr->antic_occr = antic_occr;
1980 else
1981 last_occr->next = antic_occr;
c4c81601 1982
7506f491
DE
1983 antic_occr->insn = insn;
1984 antic_occr->next = NULL;
1985 }
1986 }
1987
1988 if (avail_p)
1989 {
1990 avail_occr = cur_expr->avail_occr;
1991
1992 /* Search for another occurrence in the same basic block. */
1993 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
1994 {
1995 /* If an occurrence isn't found, save a pointer to the end of
1996 the list. */
1997 last_occr = avail_occr;
1998 avail_occr = avail_occr->next;
1999 }
2000
2001 if (avail_occr)
c4c81601
RK
2002 /* Found another instance of the expression in the same basic block.
2003 Prefer this occurrence to the currently recorded one. We want
2004 the last one in the block and the block is scanned from start
2005 to end. */
2006 avail_occr->insn = insn;
7506f491
DE
2007 else
2008 {
2009 /* First occurrence of this expression in this basic block. */
703ad42b 2010 avail_occr = gcse_alloc (sizeof (struct occr));
7506f491 2011 bytes_used += sizeof (struct occr);
c4c81601 2012
7506f491
DE
2013 /* First occurrence of this expression in any block? */
2014 if (cur_expr->avail_occr == NULL)
2015 cur_expr->avail_occr = avail_occr;
2016 else
2017 last_occr->next = avail_occr;
c4c81601 2018
7506f491
DE
2019 avail_occr->insn = insn;
2020 avail_occr->next = NULL;
2021 }
2022 }
2023}
2024
2025/* Insert pattern X in INSN in the hash table.
2026 X is a SET of a reg to either another reg or a constant.
2027 If it is already present, record it as the last occurrence in INSN's
2028 basic block. */
2029
2030static void
1d088dee 2031insert_set_in_table (rtx x, rtx insn, struct hash_table *table)
7506f491
DE
2032{
2033 int found;
2034 unsigned int hash;
2035 struct expr *cur_expr, *last_expr = NULL;
2036 struct occr *cur_occr, *last_occr = NULL;
2037
2038 if (GET_CODE (x) != SET
2039 || GET_CODE (SET_DEST (x)) != REG)
2040 abort ();
2041
02280659 2042 hash = hash_set (REGNO (SET_DEST (x)), table->size);
7506f491 2043
02280659 2044 cur_expr = table->table[hash];
7506f491
DE
2045 found = 0;
2046
c4c81601 2047 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
7506f491
DE
2048 {
2049 /* If the expression isn't found, save a pointer to the end of
2050 the list. */
2051 last_expr = cur_expr;
2052 cur_expr = cur_expr->next_same_hash;
2053 }
2054
2055 if (! found)
2056 {
703ad42b 2057 cur_expr = gcse_alloc (sizeof (struct expr));
7506f491 2058 bytes_used += sizeof (struct expr);
02280659 2059 if (table->table[hash] == NULL)
c4c81601 2060 /* This is the first pattern that hashed to this index. */
02280659 2061 table->table[hash] = cur_expr;
7506f491 2062 else
c4c81601
RK
2063 /* Add EXPR to end of this hash chain. */
2064 last_expr->next_same_hash = cur_expr;
2065
7506f491
DE
2066 /* Set the fields of the expr element.
2067 We must copy X because it can be modified when copy propagation is
2068 performed on its operands. */
7506f491 2069 cur_expr->expr = copy_rtx (x);
02280659 2070 cur_expr->bitmap_index = table->n_elems++;
7506f491
DE
2071 cur_expr->next_same_hash = NULL;
2072 cur_expr->antic_occr = NULL;
2073 cur_expr->avail_occr = NULL;
2074 }
2075
2076 /* Now record the occurrence. */
7506f491
DE
2077 cur_occr = cur_expr->avail_occr;
2078
2079 /* Search for another occurrence in the same basic block. */
2080 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2081 {
2082 /* If an occurrence isn't found, save a pointer to the end of
2083 the list. */
2084 last_occr = cur_occr;
2085 cur_occr = cur_occr->next;
2086 }
2087
2088 if (cur_occr)
c4c81601
RK
2089 /* Found another instance of the expression in the same basic block.
2090 Prefer this occurrence to the currently recorded one. We want the
2091 last one in the block and the block is scanned from start to end. */
2092 cur_occr->insn = insn;
7506f491
DE
2093 else
2094 {
2095 /* First occurrence of this expression in this basic block. */
703ad42b 2096 cur_occr = gcse_alloc (sizeof (struct occr));
7506f491 2097 bytes_used += sizeof (struct occr);
c4c81601 2098
7506f491
DE
2099 /* First occurrence of this expression in any block? */
2100 if (cur_expr->avail_occr == NULL)
2101 cur_expr->avail_occr = cur_occr;
2102 else
2103 last_occr->next = cur_occr;
c4c81601 2104
7506f491
DE
2105 cur_occr->insn = insn;
2106 cur_occr->next = NULL;
2107 }
2108}
2109
6b2d1c9e
RS
2110/* Determine whether the rtx X should be treated as a constant for
2111 the purposes of GCSE's constant propagation. */
2112
2113static bool
1d088dee 2114gcse_constant_p (rtx x)
6b2d1c9e
RS
2115{
2116 /* Consider a COMPARE of two integers constant. */
2117 if (GET_CODE (x) == COMPARE
2118 && GET_CODE (XEXP (x, 0)) == CONST_INT
2119 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2120 return true;
2121
db2f435b
AP
2122
2123 /* Consider a COMPARE of the same registers is a constant
2124 if they are not floating point registers. */
2125 if (GET_CODE(x) == COMPARE
2126 && GET_CODE (XEXP (x, 0)) == REG
2127 && GET_CODE (XEXP (x, 1)) == REG
2128 && REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 1))
2129 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
2130 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 1))))
2131 return true;
2132
6b2d1c9e
RS
2133 if (GET_CODE (x) == CONSTANT_P_RTX)
2134 return false;
2135
2136 return CONSTANT_P (x);
2137}
2138
02280659
ZD
2139/* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
2140 expression one). */
7506f491
DE
2141
2142static void
1d088dee 2143hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
7506f491
DE
2144{
2145 rtx src = SET_SRC (pat);
2146 rtx dest = SET_DEST (pat);
172890a2 2147 rtx note;
7506f491
DE
2148
2149 if (GET_CODE (src) == CALL)
02280659 2150 hash_scan_call (src, insn, table);
7506f491 2151
172890a2 2152 else if (GET_CODE (dest) == REG)
7506f491 2153 {
172890a2 2154 unsigned int regno = REGNO (dest);
7506f491
DE
2155 rtx tmp;
2156
172890a2
RK
2157 /* If this is a single set and we are doing constant propagation,
2158 see if a REG_NOTE shows this equivalent to a constant. */
02280659 2159 if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
6b2d1c9e 2160 && gcse_constant_p (XEXP (note, 0)))
172890a2
RK
2161 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2162
7506f491 2163 /* Only record sets of pseudo-regs in the hash table. */
02280659 2164 if (! table->set_p
7506f491
DE
2165 && regno >= FIRST_PSEUDO_REGISTER
2166 /* Don't GCSE something if we can't do a reg/reg copy. */
773eae39 2167 && can_copy_p (GET_MODE (dest))
068473ec
JH
2168 /* GCSE commonly inserts instruction after the insn. We can't
2169 do that easily for EH_REGION notes so disable GCSE on these
2170 for now. */
2171 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
7506f491 2172 /* Is SET_SRC something we want to gcse? */
172890a2
RK
2173 && want_to_gcse_p (src)
2174 /* Don't CSE a nop. */
43e72072
JJ
2175 && ! set_noop_p (pat)
2176 /* Don't GCSE if it has attached REG_EQUIV note.
2177 At this point this only function parameters should have
2178 REG_EQUIV notes and if the argument slot is used somewhere
a1f300c0 2179 explicitly, it means address of parameter has been taken,
43e72072
JJ
2180 so we should not extend the lifetime of the pseudo. */
2181 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2182 || GET_CODE (XEXP (note, 0)) != MEM))
7506f491
DE
2183 {
2184 /* An expression is not anticipatable if its operands are
52d76e11
RK
2185 modified before this insn or if this is not the only SET in
2186 this insn. */
2187 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
7506f491 2188 /* An expression is not available if its operands are
eb296bd9
GK
2189 subsequently modified, including this insn. It's also not
2190 available if this is a branch, because we can't insert
2191 a set after the branch. */
2192 int avail_p = (oprs_available_p (src, insn)
2193 && ! JUMP_P (insn));
c4c81601 2194
02280659 2195 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
7506f491 2196 }
c4c81601 2197
7506f491 2198 /* Record sets for constant/copy propagation. */
02280659 2199 else if (table->set_p
7506f491
DE
2200 && regno >= FIRST_PSEUDO_REGISTER
2201 && ((GET_CODE (src) == REG
2202 && REGNO (src) >= FIRST_PSEUDO_REGISTER
773eae39 2203 && can_copy_p (GET_MODE (dest))
172890a2 2204 && REGNO (src) != regno)
6b2d1c9e 2205 || gcse_constant_p (src))
7506f491
DE
2206 /* A copy is not available if its src or dest is subsequently
2207 modified. Here we want to search from INSN+1 on, but
2208 oprs_available_p searches from INSN on. */
2209 && (insn == BLOCK_END (BLOCK_NUM (insn))
2210 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2211 && oprs_available_p (pat, tmp))))
02280659 2212 insert_set_in_table (pat, insn, table);
7506f491 2213 }
7506f491
DE
2214}
2215
2216static void
1d088dee
AJ
2217hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
2218 struct hash_table *table ATTRIBUTE_UNUSED)
7506f491
DE
2219{
2220 /* Currently nothing to do. */
2221}
2222
2223static void
1d088dee
AJ
2224hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
2225 struct hash_table *table ATTRIBUTE_UNUSED)
7506f491
DE
2226{
2227 /* Currently nothing to do. */
2228}
2229
2230/* Process INSN and add hash table entries as appropriate.
2231
2232 Only available expressions that set a single pseudo-reg are recorded.
2233
2234 Single sets in a PARALLEL could be handled, but it's an extra complication
2235 that isn't dealt with right now. The trick is handling the CLOBBERs that
2236 are also in the PARALLEL. Later.
2237
cc2902df 2238 If SET_P is nonzero, this is for the assignment hash table,
ed79bb3d
R
2239 otherwise it is for the expression hash table.
2240 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2241 not record any expressions. */
7506f491
DE
2242
2243static void
1d088dee 2244hash_scan_insn (rtx insn, struct hash_table *table, int in_libcall_block)
7506f491
DE
2245{
2246 rtx pat = PATTERN (insn);
c4c81601 2247 int i;
7506f491 2248
172890a2
RK
2249 if (in_libcall_block)
2250 return;
2251
7506f491
DE
2252 /* Pick out the sets of INSN and for other forms of instructions record
2253 what's been modified. */
2254
172890a2 2255 if (GET_CODE (pat) == SET)
02280659 2256 hash_scan_set (pat, insn, table);
7506f491 2257 else if (GET_CODE (pat) == PARALLEL)
c4c81601
RK
2258 for (i = 0; i < XVECLEN (pat, 0); i++)
2259 {
2260 rtx x = XVECEXP (pat, 0, i);
7506f491 2261
c4c81601 2262 if (GET_CODE (x) == SET)
02280659 2263 hash_scan_set (x, insn, table);
c4c81601 2264 else if (GET_CODE (x) == CLOBBER)
02280659 2265 hash_scan_clobber (x, insn, table);
c4c81601 2266 else if (GET_CODE (x) == CALL)
02280659 2267 hash_scan_call (x, insn, table);
c4c81601 2268 }
7506f491 2269
7506f491 2270 else if (GET_CODE (pat) == CLOBBER)
02280659 2271 hash_scan_clobber (pat, insn, table);
7506f491 2272 else if (GET_CODE (pat) == CALL)
02280659 2273 hash_scan_call (pat, insn, table);
7506f491
DE
2274}
2275
2276static void
1d088dee 2277dump_hash_table (FILE *file, const char *name, struct hash_table *table)
7506f491
DE
2278{
2279 int i;
2280 /* Flattened out table, so it's printed in proper order. */
4da896b2
MM
2281 struct expr **flat_table;
2282 unsigned int *hash_val;
c4c81601 2283 struct expr *expr;
4da896b2 2284
703ad42b
KG
2285 flat_table = xcalloc (table->n_elems, sizeof (struct expr *));
2286 hash_val = xmalloc (table->n_elems * sizeof (unsigned int));
7506f491 2287
02280659
ZD
2288 for (i = 0; i < (int) table->size; i++)
2289 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601
RK
2290 {
2291 flat_table[expr->bitmap_index] = expr;
2292 hash_val[expr->bitmap_index] = i;
2293 }
7506f491
DE
2294
2295 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
02280659 2296 name, table->size, table->n_elems);
7506f491 2297
02280659 2298 for (i = 0; i < (int) table->n_elems; i++)
21318741
RK
2299 if (flat_table[i] != 0)
2300 {
a0ac9e5a 2301 expr = flat_table[i];
21318741
RK
2302 fprintf (file, "Index %d (hash value %d)\n ",
2303 expr->bitmap_index, hash_val[i]);
a0ac9e5a 2304 print_rtl (file, expr->expr);
21318741
RK
2305 fprintf (file, "\n");
2306 }
7506f491
DE
2307
2308 fprintf (file, "\n");
4da896b2 2309
4da896b2
MM
2310 free (flat_table);
2311 free (hash_val);
7506f491
DE
2312}
2313
2314/* Record register first/last/block set information for REGNO in INSN.
c4c81601 2315
80c29cc4 2316 first_set records the first place in the block where the register
7506f491 2317 is set and is used to compute "anticipatability".
c4c81601 2318
80c29cc4 2319 last_set records the last place in the block where the register
7506f491 2320 is set and is used to compute "availability".
c4c81601 2321
80c29cc4
RZ
2322 last_bb records the block for which first_set and last_set are
2323 valid, as a quick test to invalidate them.
2324
7506f491
DE
2325 reg_set_in_block records whether the register is set in the block
2326 and is used to compute "transparency". */
2327
2328static void
1d088dee 2329record_last_reg_set_info (rtx insn, int regno)
7506f491 2330{
80c29cc4
RZ
2331 struct reg_avail_info *info = &reg_avail_info[regno];
2332 int cuid = INSN_CUID (insn);
c4c81601 2333
80c29cc4
RZ
2334 info->last_set = cuid;
2335 if (info->last_bb != current_bb)
2336 {
2337 info->last_bb = current_bb;
2338 info->first_set = cuid;
e0082a72 2339 SET_BIT (reg_set_in_block[current_bb->index], regno);
80c29cc4 2340 }
7506f491
DE
2341}
2342
a13d4ebf
AM
2343
2344/* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2345 Note we store a pair of elements in the list, so they have to be
2346 taken off pairwise. */
2347
589005ff 2348static void
1d088dee
AJ
2349canon_list_insert (rtx dest ATTRIBUTE_UNUSED, rtx unused1 ATTRIBUTE_UNUSED,
2350 void * v_insn)
a13d4ebf
AM
2351{
2352 rtx dest_addr, insn;
0fe854a7 2353 int bb;
a13d4ebf
AM
2354
2355 while (GET_CODE (dest) == SUBREG
2356 || GET_CODE (dest) == ZERO_EXTRACT
2357 || GET_CODE (dest) == SIGN_EXTRACT
2358 || GET_CODE (dest) == STRICT_LOW_PART)
2359 dest = XEXP (dest, 0);
2360
2361 /* If DEST is not a MEM, then it will not conflict with a load. Note
2362 that function calls are assumed to clobber memory, but are handled
2363 elsewhere. */
2364
2365 if (GET_CODE (dest) != MEM)
2366 return;
2367
2368 dest_addr = get_addr (XEXP (dest, 0));
2369 dest_addr = canon_rtx (dest_addr);
589005ff 2370 insn = (rtx) v_insn;
0fe854a7 2371 bb = BLOCK_NUM (insn);
a13d4ebf 2372
589005ff 2373 canon_modify_mem_list[bb] =
0fe854a7 2374 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
589005ff 2375 canon_modify_mem_list[bb] =
0fe854a7
RH
2376 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
2377 bitmap_set_bit (canon_modify_mem_list_set, bb);
a13d4ebf
AM
2378}
2379
a13d4ebf
AM
2380/* Record memory modification information for INSN. We do not actually care
2381 about the memory location(s) that are set, or even how they are set (consider
2382 a CALL_INSN). We merely need to record which insns modify memory. */
7506f491
DE
2383
2384static void
1d088dee 2385record_last_mem_set_info (rtx insn)
7506f491 2386{
0fe854a7
RH
2387 int bb = BLOCK_NUM (insn);
2388
ccef9ef5 2389 /* load_killed_in_block_p will handle the case of calls clobbering
dc297297 2390 everything. */
0fe854a7
RH
2391 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
2392 bitmap_set_bit (modify_mem_list_set, bb);
a13d4ebf
AM
2393
2394 if (GET_CODE (insn) == CALL_INSN)
2395 {
2396 /* Note that traversals of this loop (other than for free-ing)
2397 will break after encountering a CALL_INSN. So, there's no
dc297297 2398 need to insert a pair of items, as canon_list_insert does. */
589005ff
KH
2399 canon_modify_mem_list[bb] =
2400 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
0fe854a7 2401 bitmap_set_bit (canon_modify_mem_list_set, bb);
a13d4ebf
AM
2402 }
2403 else
0fe854a7 2404 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
7506f491
DE
2405}
2406
7506f491 2407/* Called from compute_hash_table via note_stores to handle one
84832317
MM
2408 SET or CLOBBER in an insn. DATA is really the instruction in which
2409 the SET is taking place. */
7506f491
DE
2410
2411static void
1d088dee 2412record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
7506f491 2413{
84832317
MM
2414 rtx last_set_insn = (rtx) data;
2415
7506f491
DE
2416 if (GET_CODE (dest) == SUBREG)
2417 dest = SUBREG_REG (dest);
2418
2419 if (GET_CODE (dest) == REG)
2420 record_last_reg_set_info (last_set_insn, REGNO (dest));
2421 else if (GET_CODE (dest) == MEM
2422 /* Ignore pushes, they clobber nothing. */
2423 && ! push_operand (dest, GET_MODE (dest)))
2424 record_last_mem_set_info (last_set_insn);
2425}
2426
2427/* Top level function to create an expression or assignment hash table.
2428
2429 Expression entries are placed in the hash table if
2430 - they are of the form (set (pseudo-reg) src),
2431 - src is something we want to perform GCSE on,
2432 - none of the operands are subsequently modified in the block
2433
2434 Assignment entries are placed in the hash table if
2435 - they are of the form (set (pseudo-reg) src),
2436 - src is something we want to perform const/copy propagation on,
2437 - none of the operands or target are subsequently modified in the block
c4c81601 2438
7506f491
DE
2439 Currently src must be a pseudo-reg or a const_int.
2440
02280659 2441 TABLE is the table computed. */
7506f491
DE
2442
2443static void
1d088dee 2444compute_hash_table_work (struct hash_table *table)
7506f491 2445{
80c29cc4 2446 unsigned int i;
7506f491
DE
2447
2448 /* While we compute the hash table we also compute a bit array of which
2449 registers are set in which blocks.
7506f491
DE
2450 ??? This isn't needed during const/copy propagation, but it's cheap to
2451 compute. Later. */
d55bc081 2452 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
7506f491 2453
a13d4ebf 2454 /* re-Cache any INSN_LIST nodes we have allocated. */
73991d6a 2455 clear_modify_mem_tables ();
7506f491 2456 /* Some working arrays used to track first and last set in each block. */
703ad42b 2457 reg_avail_info = gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
80c29cc4
RZ
2458
2459 for (i = 0; i < max_gcse_regno; ++i)
e0082a72 2460 reg_avail_info[i].last_bb = NULL;
7506f491 2461
e0082a72 2462 FOR_EACH_BB (current_bb)
7506f491
DE
2463 {
2464 rtx insn;
770ae6cc 2465 unsigned int regno;
ed79bb3d 2466 int in_libcall_block;
7506f491
DE
2467
2468 /* First pass over the instructions records information used to
2469 determine when registers and memory are first and last set.
ccef9ef5 2470 ??? hard-reg reg_set_in_block computation
7506f491
DE
2471 could be moved to compute_sets since they currently don't change. */
2472
e0082a72
ZD
2473 for (insn = current_bb->head;
2474 insn && insn != NEXT_INSN (current_bb->end);
7506f491
DE
2475 insn = NEXT_INSN (insn))
2476 {
2c3c49de 2477 if (! INSN_P (insn))
7506f491
DE
2478 continue;
2479
2480 if (GET_CODE (insn) == CALL_INSN)
2481 {
19652adf 2482 bool clobbers_all = false;
589005ff 2483#ifdef NON_SAVING_SETJMP
19652adf
ZW
2484 if (NON_SAVING_SETJMP
2485 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
2486 clobbers_all = true;
2487#endif
2488
7506f491 2489 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
19652adf
ZW
2490 if (clobbers_all
2491 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7506f491 2492 record_last_reg_set_info (insn, regno);
c4c81601 2493
24a28584 2494 mark_call (insn);
7506f491
DE
2495 }
2496
84832317 2497 note_stores (PATTERN (insn), record_last_set_info, insn);
7506f491
DE
2498 }
2499
fbef91d8
RS
2500 /* Insert implicit sets in the hash table. */
2501 if (table->set_p
2502 && implicit_sets[current_bb->index] != NULL_RTX)
2503 hash_scan_set (implicit_sets[current_bb->index],
2504 current_bb->head, table);
2505
7506f491
DE
2506 /* The next pass builds the hash table. */
2507
e0082a72
ZD
2508 for (insn = current_bb->head, in_libcall_block = 0;
2509 insn && insn != NEXT_INSN (current_bb->end);
7506f491 2510 insn = NEXT_INSN (insn))
2c3c49de 2511 if (INSN_P (insn))
c4c81601
RK
2512 {
2513 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
589005ff 2514 in_libcall_block = 1;
02280659 2515 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
589005ff 2516 in_libcall_block = 0;
02280659
ZD
2517 hash_scan_insn (insn, table, in_libcall_block);
2518 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
589005ff 2519 in_libcall_block = 0;
8e42ace1 2520 }
7506f491
DE
2521 }
2522
80c29cc4
RZ
2523 free (reg_avail_info);
2524 reg_avail_info = NULL;
7506f491
DE
2525}
2526
02280659 2527/* Allocate space for the set/expr hash TABLE.
7506f491 2528 N_INSNS is the number of instructions in the function.
02280659
ZD
2529 It is used to determine the number of buckets to use.
2530 SET_P determines whether set or expression table will
2531 be created. */
7506f491
DE
2532
2533static void
1d088dee 2534alloc_hash_table (int n_insns, struct hash_table *table, int set_p)
7506f491
DE
2535{
2536 int n;
2537
02280659
ZD
2538 table->size = n_insns / 4;
2539 if (table->size < 11)
2540 table->size = 11;
c4c81601 2541
7506f491
DE
2542 /* Attempt to maintain efficient use of hash table.
2543 Making it an odd number is simplest for now.
2544 ??? Later take some measurements. */
02280659
ZD
2545 table->size |= 1;
2546 n = table->size * sizeof (struct expr *);
703ad42b 2547 table->table = gmalloc (n);
02280659 2548 table->set_p = set_p;
7506f491
DE
2549}
2550
02280659 2551/* Free things allocated by alloc_hash_table. */
7506f491
DE
2552
2553static void
1d088dee 2554free_hash_table (struct hash_table *table)
7506f491 2555{
02280659 2556 free (table->table);
7506f491
DE
2557}
2558
02280659
ZD
2559/* Compute the hash TABLE for doing copy/const propagation or
2560 expression hash table. */
7506f491
DE
2561
2562static void
1d088dee 2563compute_hash_table (struct hash_table *table)
7506f491
DE
2564{
2565 /* Initialize count of number of entries in hash table. */
02280659 2566 table->n_elems = 0;
703ad42b 2567 memset (table->table, 0, table->size * sizeof (struct expr *));
7506f491 2568
02280659 2569 compute_hash_table_work (table);
7506f491
DE
2570}
2571\f
2572/* Expression tracking support. */
2573
02280659 2574/* Lookup pattern PAT in the expression TABLE.
7506f491
DE
2575 The result is a pointer to the table entry, or NULL if not found. */
2576
2577static struct expr *
1d088dee 2578lookup_expr (rtx pat, struct hash_table *table)
7506f491
DE
2579{
2580 int do_not_record_p;
2581 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
02280659 2582 table->size);
7506f491
DE
2583 struct expr *expr;
2584
2585 if (do_not_record_p)
2586 return NULL;
2587
02280659 2588 expr = table->table[hash];
7506f491
DE
2589
2590 while (expr && ! expr_equiv_p (expr->expr, pat))
2591 expr = expr->next_same_hash;
2592
2593 return expr;
2594}
2595
ceda50e9
RH
2596/* Lookup REGNO in the set TABLE. The result is a pointer to the
2597 table entry, or NULL if not found. */
7506f491
DE
2598
2599static struct expr *
1d088dee 2600lookup_set (unsigned int regno, struct hash_table *table)
7506f491 2601{
02280659 2602 unsigned int hash = hash_set (regno, table->size);
7506f491
DE
2603 struct expr *expr;
2604
02280659 2605 expr = table->table[hash];
7506f491 2606
ceda50e9
RH
2607 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2608 expr = expr->next_same_hash;
7506f491
DE
2609
2610 return expr;
2611}
2612
2613/* Return the next entry for REGNO in list EXPR. */
2614
2615static struct expr *
1d088dee 2616next_set (unsigned int regno, struct expr *expr)
7506f491
DE
2617{
2618 do
2619 expr = expr->next_same_hash;
2620 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
c4c81601 2621
7506f491
DE
2622 return expr;
2623}
2624
0fe854a7
RH
2625/* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2626 types may be mixed. */
2627
2628static void
1d088dee 2629free_insn_expr_list_list (rtx *listp)
0fe854a7
RH
2630{
2631 rtx list, next;
2632
2633 for (list = *listp; list ; list = next)
2634 {
2635 next = XEXP (list, 1);
2636 if (GET_CODE (list) == EXPR_LIST)
2637 free_EXPR_LIST_node (list);
2638 else
2639 free_INSN_LIST_node (list);
2640 }
2641
2642 *listp = NULL;
2643}
2644
73991d6a
JH
2645/* Clear canon_modify_mem_list and modify_mem_list tables. */
2646static void
1d088dee 2647clear_modify_mem_tables (void)
73991d6a
JH
2648{
2649 int i;
2650
2651 EXECUTE_IF_SET_IN_BITMAP
0fe854a7
RH
2652 (modify_mem_list_set, 0, i, free_INSN_LIST_list (modify_mem_list + i));
2653 bitmap_clear (modify_mem_list_set);
73991d6a
JH
2654
2655 EXECUTE_IF_SET_IN_BITMAP
2656 (canon_modify_mem_list_set, 0, i,
0fe854a7
RH
2657 free_insn_expr_list_list (canon_modify_mem_list + i));
2658 bitmap_clear (canon_modify_mem_list_set);
73991d6a
JH
2659}
2660
2661/* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2662
2663static void
1d088dee 2664free_modify_mem_tables (void)
73991d6a
JH
2665{
2666 clear_modify_mem_tables ();
2667 free (modify_mem_list);
2668 free (canon_modify_mem_list);
2669 modify_mem_list = 0;
2670 canon_modify_mem_list = 0;
2671}
2672
7506f491
DE
2673/* Reset tables used to keep track of what's still available [since the
2674 start of the block]. */
2675
2676static void
1d088dee 2677reset_opr_set_tables (void)
7506f491
DE
2678{
2679 /* Maintain a bitmap of which regs have been set since beginning of
2680 the block. */
73991d6a 2681 CLEAR_REG_SET (reg_set_bitmap);
c4c81601 2682
7506f491
DE
2683 /* Also keep a record of the last instruction to modify memory.
2684 For now this is very trivial, we only record whether any memory
2685 location has been modified. */
73991d6a 2686 clear_modify_mem_tables ();
7506f491
DE
2687}
2688
cc2902df 2689/* Return nonzero if the operands of X are not set before INSN in
7506f491
DE
2690 INSN's basic block. */
2691
2692static int
1d088dee 2693oprs_not_set_p (rtx x, rtx insn)
7506f491 2694{
c4c81601 2695 int i, j;
7506f491 2696 enum rtx_code code;
6f7d635c 2697 const char *fmt;
7506f491 2698
7506f491
DE
2699 if (x == 0)
2700 return 1;
2701
2702 code = GET_CODE (x);
2703 switch (code)
2704 {
2705 case PC:
2706 case CC0:
2707 case CONST:
2708 case CONST_INT:
2709 case CONST_DOUBLE:
69ef87e2 2710 case CONST_VECTOR:
7506f491
DE
2711 case SYMBOL_REF:
2712 case LABEL_REF:
2713 case ADDR_VEC:
2714 case ADDR_DIFF_VEC:
2715 return 1;
2716
2717 case MEM:
589005ff 2718 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
e2d2ed72 2719 INSN_CUID (insn), x, 0))
a13d4ebf 2720 return 0;
c4c81601
RK
2721 else
2722 return oprs_not_set_p (XEXP (x, 0), insn);
7506f491
DE
2723
2724 case REG:
73991d6a 2725 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
7506f491
DE
2726
2727 default:
2728 break;
2729 }
2730
c4c81601 2731 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
2732 {
2733 if (fmt[i] == 'e')
2734 {
7506f491
DE
2735 /* If we are about to do the last recursive call
2736 needed at this level, change it into iteration.
2737 This function is called enough to be worth it. */
2738 if (i == 0)
c4c81601
RK
2739 return oprs_not_set_p (XEXP (x, i), insn);
2740
2741 if (! oprs_not_set_p (XEXP (x, i), insn))
7506f491
DE
2742 return 0;
2743 }
2744 else if (fmt[i] == 'E')
c4c81601
RK
2745 for (j = 0; j < XVECLEN (x, i); j++)
2746 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2747 return 0;
7506f491
DE
2748 }
2749
2750 return 1;
2751}
2752
2753/* Mark things set by a CALL. */
2754
2755static void
1d088dee 2756mark_call (rtx insn)
7506f491 2757{
24a28584 2758 if (! CONST_OR_PURE_CALL_P (insn))
a13d4ebf 2759 record_last_mem_set_info (insn);
7506f491
DE
2760}
2761
2762/* Mark things set by a SET. */
2763
2764static void
1d088dee 2765mark_set (rtx pat, rtx insn)
7506f491
DE
2766{
2767 rtx dest = SET_DEST (pat);
2768
2769 while (GET_CODE (dest) == SUBREG
2770 || GET_CODE (dest) == ZERO_EXTRACT
2771 || GET_CODE (dest) == SIGN_EXTRACT
2772 || GET_CODE (dest) == STRICT_LOW_PART)
2773 dest = XEXP (dest, 0);
2774
a13d4ebf 2775 if (GET_CODE (dest) == REG)
73991d6a 2776 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
a13d4ebf
AM
2777 else if (GET_CODE (dest) == MEM)
2778 record_last_mem_set_info (insn);
2779
7506f491 2780 if (GET_CODE (SET_SRC (pat)) == CALL)
b5ce41ff 2781 mark_call (insn);
7506f491
DE
2782}
2783
2784/* Record things set by a CLOBBER. */
2785
2786static void
1d088dee 2787mark_clobber (rtx pat, rtx insn)
7506f491
DE
2788{
2789 rtx clob = XEXP (pat, 0);
2790
2791 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2792 clob = XEXP (clob, 0);
2793
a13d4ebf 2794 if (GET_CODE (clob) == REG)
73991d6a 2795 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
a13d4ebf
AM
2796 else
2797 record_last_mem_set_info (insn);
7506f491
DE
2798}
2799
2800/* Record things set by INSN.
2801 This data is used by oprs_not_set_p. */
2802
2803static void
1d088dee 2804mark_oprs_set (rtx insn)
7506f491
DE
2805{
2806 rtx pat = PATTERN (insn);
c4c81601 2807 int i;
7506f491
DE
2808
2809 if (GET_CODE (pat) == SET)
2810 mark_set (pat, insn);
2811 else if (GET_CODE (pat) == PARALLEL)
c4c81601
RK
2812 for (i = 0; i < XVECLEN (pat, 0); i++)
2813 {
2814 rtx x = XVECEXP (pat, 0, i);
2815
2816 if (GET_CODE (x) == SET)
2817 mark_set (x, insn);
2818 else if (GET_CODE (x) == CLOBBER)
2819 mark_clobber (x, insn);
2820 else if (GET_CODE (x) == CALL)
2821 mark_call (insn);
2822 }
7506f491 2823
7506f491
DE
2824 else if (GET_CODE (pat) == CLOBBER)
2825 mark_clobber (pat, insn);
2826 else if (GET_CODE (pat) == CALL)
b5ce41ff 2827 mark_call (insn);
7506f491 2828}
b5ce41ff 2829
7506f491
DE
2830\f
2831/* Classic GCSE reaching definition support. */
2832
2833/* Allocate reaching def variables. */
2834
2835static void
1d088dee 2836alloc_rd_mem (int n_blocks, int n_insns)
7506f491 2837{
703ad42b 2838 rd_kill = sbitmap_vector_alloc (n_blocks, n_insns);
d55bc081 2839 sbitmap_vector_zero (rd_kill, n_blocks);
7506f491 2840
703ad42b 2841 rd_gen = sbitmap_vector_alloc (n_blocks, n_insns);
d55bc081 2842 sbitmap_vector_zero (rd_gen, n_blocks);
7506f491 2843
703ad42b 2844 reaching_defs = sbitmap_vector_alloc (n_blocks, n_insns);
d55bc081 2845 sbitmap_vector_zero (reaching_defs, n_blocks);
7506f491 2846
703ad42b 2847 rd_out = sbitmap_vector_alloc (n_blocks, n_insns);
d55bc081 2848 sbitmap_vector_zero (rd_out, n_blocks);
7506f491
DE
2849}
2850
2851/* Free reaching def variables. */
2852
2853static void
1d088dee 2854free_rd_mem (void)
7506f491 2855{
5a660bff
DB
2856 sbitmap_vector_free (rd_kill);
2857 sbitmap_vector_free (rd_gen);
2858 sbitmap_vector_free (reaching_defs);
2859 sbitmap_vector_free (rd_out);
7506f491
DE
2860}
2861
c4c81601 2862/* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
7506f491
DE
2863
2864static void
1d088dee 2865handle_rd_kill_set (rtx insn, int regno, basic_block bb)
7506f491 2866{
c4c81601 2867 struct reg_set *this_reg;
7506f491 2868
c4c81601
RK
2869 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2870 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
0b17ab2f 2871 SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn));
7506f491
DE
2872}
2873
7506f491
DE
2874/* Compute the set of kill's for reaching definitions. */
2875
2876static void
1d088dee 2877compute_kill_rd (void)
7506f491 2878{
e0082a72 2879 int cuid;
172890a2
RK
2880 unsigned int regno;
2881 int i;
e0082a72 2882 basic_block bb;
7506f491
DE
2883
2884 /* For each block
2885 For each set bit in `gen' of the block (i.e each insn which
ac7c5af5
JL
2886 generates a definition in the block)
2887 Call the reg set by the insn corresponding to that bit regx
2888 Look at the linked list starting at reg_set_table[regx]
2889 For each setting of regx in the linked list, which is not in
2890 this block
6d2f8887 2891 Set the bit in `kill' corresponding to that insn. */
e0082a72 2892 FOR_EACH_BB (bb)
c4c81601 2893 for (cuid = 0; cuid < max_cuid; cuid++)
e0082a72 2894 if (TEST_BIT (rd_gen[bb->index], cuid))
7506f491 2895 {
c4c81601
RK
2896 rtx insn = CUID_INSN (cuid);
2897 rtx pat = PATTERN (insn);
7506f491 2898
c4c81601
RK
2899 if (GET_CODE (insn) == CALL_INSN)
2900 {
2901 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4e2db584 2902 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
e0082a72 2903 handle_rd_kill_set (insn, regno, bb);
c4c81601 2904 }
7506f491 2905
c4c81601
RK
2906 if (GET_CODE (pat) == PARALLEL)
2907 {
2908 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7506f491 2909 {
c4c81601 2910 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
7506f491 2911
c4c81601
RK
2912 if ((code == SET || code == CLOBBER)
2913 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
2914 handle_rd_kill_set (insn,
2915 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
e0082a72 2916 bb);
ac7c5af5 2917 }
ac7c5af5 2918 }
c4c81601
RK
2919 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
2920 /* Each setting of this register outside of this block
2921 must be marked in the set of kills in this block. */
e0082a72 2922 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
7506f491 2923 }
7506f491
DE
2924}
2925
589005ff 2926/* Compute the reaching definitions as in
7506f491
DE
2927 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2928 Chapter 10. It is the same algorithm as used for computing available
2929 expressions but applied to the gens and kills of reaching definitions. */
2930
2931static void
1d088dee 2932compute_rd (void)
7506f491 2933{
e0082a72
ZD
2934 int changed, passes;
2935 basic_block bb;
7506f491 2936
e0082a72
ZD
2937 FOR_EACH_BB (bb)
2938 sbitmap_copy (rd_out[bb->index] /*dst*/, rd_gen[bb->index] /*src*/);
7506f491
DE
2939
2940 passes = 0;
2941 changed = 1;
2942 while (changed)
2943 {
2944 changed = 0;
e0082a72 2945 FOR_EACH_BB (bb)
ac7c5af5 2946 {
e0082a72
ZD
2947 sbitmap_union_of_preds (reaching_defs[bb->index], rd_out, bb->index);
2948 changed |= sbitmap_union_of_diff_cg (rd_out[bb->index], rd_gen[bb->index],
2949 reaching_defs[bb->index], rd_kill[bb->index]);
ac7c5af5 2950 }
7506f491
DE
2951 passes++;
2952 }
2953
2954 if (gcse_file)
2955 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
2956}
2957\f
2958/* Classic GCSE available expression support. */
2959
2960/* Allocate memory for available expression computation. */
2961
2962static void
1d088dee 2963alloc_avail_expr_mem (int n_blocks, int n_exprs)
7506f491 2964{
703ad42b 2965 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
d55bc081 2966 sbitmap_vector_zero (ae_kill, n_blocks);
7506f491 2967
703ad42b 2968 ae_gen = sbitmap_vector_alloc (n_blocks, n_exprs);
d55bc081 2969 sbitmap_vector_zero (ae_gen, n_blocks);
7506f491 2970
703ad42b 2971 ae_in = sbitmap_vector_alloc (n_blocks, n_exprs);
d55bc081 2972 sbitmap_vector_zero (ae_in, n_blocks);
7506f491 2973
703ad42b 2974 ae_out = sbitmap_vector_alloc (n_blocks, n_exprs);
d55bc081 2975 sbitmap_vector_zero (ae_out, n_blocks);
7506f491
DE
2976}
2977
2978static void
1d088dee 2979free_avail_expr_mem (void)
7506f491 2980{
5a660bff
DB
2981 sbitmap_vector_free (ae_kill);
2982 sbitmap_vector_free (ae_gen);
2983 sbitmap_vector_free (ae_in);
2984 sbitmap_vector_free (ae_out);
7506f491
DE
2985}
2986
2987/* Compute the set of available expressions generated in each basic block. */
2988
2989static void
1d088dee 2990compute_ae_gen (struct hash_table *expr_hash_table)
7506f491 2991{
2e653e39 2992 unsigned int i;
c4c81601
RK
2993 struct expr *expr;
2994 struct occr *occr;
7506f491
DE
2995
2996 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
2997 This is all we have to do because an expression is not recorded if it
2998 is not available, and the only expressions we want to work with are the
2999 ones that are recorded. */
02280659
ZD
3000 for (i = 0; i < expr_hash_table->size; i++)
3001 for (expr = expr_hash_table->table[i]; expr != 0; expr = expr->next_same_hash)
c4c81601
RK
3002 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
3003 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
7506f491
DE
3004}
3005
cc2902df 3006/* Return nonzero if expression X is killed in BB. */
7506f491
DE
3007
3008static int
1d088dee 3009expr_killed_p (rtx x, basic_block bb)
7506f491 3010{
c4c81601 3011 int i, j;
7506f491 3012 enum rtx_code code;
6f7d635c 3013 const char *fmt;
7506f491 3014
7506f491
DE
3015 if (x == 0)
3016 return 1;
3017
3018 code = GET_CODE (x);
3019 switch (code)
3020 {
3021 case REG:
0b17ab2f 3022 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
7506f491
DE
3023
3024 case MEM:
a13d4ebf
AM
3025 if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0))
3026 return 1;
c4c81601
RK
3027 else
3028 return expr_killed_p (XEXP (x, 0), bb);
7506f491
DE
3029
3030 case PC:
3031 case CC0: /*FIXME*/
3032 case CONST:
3033 case CONST_INT:
3034 case CONST_DOUBLE:
69ef87e2 3035 case CONST_VECTOR:
7506f491
DE
3036 case SYMBOL_REF:
3037 case LABEL_REF:
3038 case ADDR_VEC:
3039 case ADDR_DIFF_VEC:
3040 return 0;
3041
3042 default:
3043 break;
3044 }
3045
c4c81601 3046 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3047 {
3048 if (fmt[i] == 'e')
3049 {
7506f491
DE
3050 /* If we are about to do the last recursive call
3051 needed at this level, change it into iteration.
3052 This function is called enough to be worth it. */
3053 if (i == 0)
c4c81601
RK
3054 return expr_killed_p (XEXP (x, i), bb);
3055 else if (expr_killed_p (XEXP (x, i), bb))
7506f491
DE
3056 return 1;
3057 }
3058 else if (fmt[i] == 'E')
c4c81601
RK
3059 for (j = 0; j < XVECLEN (x, i); j++)
3060 if (expr_killed_p (XVECEXP (x, i, j), bb))
3061 return 1;
7506f491
DE
3062 }
3063
3064 return 0;
3065}
3066
3067/* Compute the set of available expressions killed in each basic block. */
3068
3069static void
1d088dee
AJ
3070compute_ae_kill (sbitmap *ae_gen, sbitmap *ae_kill,
3071 struct hash_table *expr_hash_table)
7506f491 3072{
e0082a72 3073 basic_block bb;
2e653e39 3074 unsigned int i;
c4c81601 3075 struct expr *expr;
7506f491 3076
e0082a72 3077 FOR_EACH_BB (bb)
02280659
ZD
3078 for (i = 0; i < expr_hash_table->size; i++)
3079 for (expr = expr_hash_table->table[i]; expr; expr = expr->next_same_hash)
7506f491 3080 {
c4c81601 3081 /* Skip EXPR if generated in this block. */
e0082a72 3082 if (TEST_BIT (ae_gen[bb->index], expr->bitmap_index))
c4c81601 3083 continue;
7506f491 3084
e0082a72
ZD
3085 if (expr_killed_p (expr->expr, bb))
3086 SET_BIT (ae_kill[bb->index], expr->bitmap_index);
7506f491 3087 }
7506f491 3088}
7506f491
DE
3089\f
3090/* Actually perform the Classic GCSE optimizations. */
3091
cc2902df 3092/* Return nonzero if occurrence OCCR of expression EXPR reaches block BB.
7506f491 3093
cc2902df 3094 CHECK_SELF_LOOP is nonzero if we should consider a block reaching itself
7506f491
DE
3095 as a positive reach. We want to do this when there are two computations
3096 of the expression in the block.
3097
3098 VISITED is a pointer to a working buffer for tracking which BB's have
3099 been visited. It is NULL for the top-level call.
3100
3101 We treat reaching expressions that go through blocks containing the same
3102 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3103 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3104 2 as not reaching. The intent is to improve the probability of finding
3105 only one reaching expression and to reduce register lifetimes by picking
3106 the closest such expression. */
3107
3108static int
1d088dee
AJ
3109expr_reaches_here_p_work (struct occr *occr, struct expr *expr,
3110 basic_block bb, int check_self_loop, char *visited)
7506f491 3111{
36349f8b 3112 edge pred;
7506f491 3113
e2d2ed72 3114 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
7506f491 3115 {
e2d2ed72 3116 basic_block pred_bb = pred->src;
7506f491 3117
0b17ab2f 3118 if (visited[pred_bb->index])
c4c81601 3119 /* This predecessor has already been visited. Nothing to do. */
7506f491 3120 ;
7506f491 3121 else if (pred_bb == bb)
ac7c5af5 3122 {
7506f491
DE
3123 /* BB loops on itself. */
3124 if (check_self_loop
0b17ab2f
RH
3125 && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)
3126 && BLOCK_NUM (occr->insn) == pred_bb->index)
7506f491 3127 return 1;
c4c81601 3128
0b17ab2f 3129 visited[pred_bb->index] = 1;
ac7c5af5 3130 }
c4c81601 3131
7506f491 3132 /* Ignore this predecessor if it kills the expression. */
0b17ab2f
RH
3133 else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index))
3134 visited[pred_bb->index] = 1;
c4c81601 3135
7506f491 3136 /* Does this predecessor generate this expression? */
0b17ab2f 3137 else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index))
7506f491
DE
3138 {
3139 /* Is this the occurrence we're looking for?
3140 Note that there's only one generating occurrence per block
3141 so we just need to check the block number. */
0b17ab2f 3142 if (BLOCK_NUM (occr->insn) == pred_bb->index)
7506f491 3143 return 1;
c4c81601 3144
0b17ab2f 3145 visited[pred_bb->index] = 1;
7506f491 3146 }
c4c81601 3147
7506f491
DE
3148 /* Neither gen nor kill. */
3149 else
ac7c5af5 3150 {
0b17ab2f 3151 visited[pred_bb->index] = 1;
589005ff 3152 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
283a2545 3153 visited))
c4c81601 3154
7506f491 3155 return 1;
ac7c5af5 3156 }
7506f491
DE
3157 }
3158
3159 /* All paths have been checked. */
3160 return 0;
3161}
3162
283a2545 3163/* This wrapper for expr_reaches_here_p_work() is to ensure that any
dc297297 3164 memory allocated for that function is returned. */
283a2545
RL
3165
3166static int
1d088dee
AJ
3167expr_reaches_here_p (struct occr *occr, struct expr *expr, basic_block bb,
3168 int check_self_loop)
283a2545
RL
3169{
3170 int rval;
703ad42b 3171 char *visited = xcalloc (last_basic_block, 1);
283a2545 3172
c4c81601 3173 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
589005ff 3174
283a2545 3175 free (visited);
c4c81601 3176 return rval;
283a2545
RL
3177}
3178
7506f491
DE
3179/* Return the instruction that computes EXPR that reaches INSN's basic block.
3180 If there is more than one such instruction, return NULL.
3181
3182 Called only by handle_avail_expr. */
3183
3184static rtx
1d088dee 3185computing_insn (struct expr *expr, rtx insn)
7506f491 3186{
e2d2ed72 3187 basic_block bb = BLOCK_FOR_INSN (insn);
7506f491
DE
3188
3189 if (expr->avail_occr->next == NULL)
589005ff 3190 {
e2d2ed72 3191 if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb)
c4c81601
RK
3192 /* The available expression is actually itself
3193 (i.e. a loop in the flow graph) so do nothing. */
3194 return NULL;
3195
7506f491
DE
3196 /* (FIXME) Case that we found a pattern that was created by
3197 a substitution that took place. */
3198 return expr->avail_occr->insn;
3199 }
3200 else
3201 {
3202 /* Pattern is computed more than once.
589005ff 3203 Search backwards from this insn to see how many of these
7506f491
DE
3204 computations actually reach this insn. */
3205 struct occr *occr;
3206 rtx insn_computes_expr = NULL;
3207 int can_reach = 0;
3208
3209 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3210 {
e2d2ed72 3211 if (BLOCK_FOR_INSN (occr->insn) == bb)
7506f491
DE
3212 {
3213 /* The expression is generated in this block.
3214 The only time we care about this is when the expression
3215 is generated later in the block [and thus there's a loop].
3216 We let the normal cse pass handle the other cases. */
c4c81601
RK
3217 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3218 && expr_reaches_here_p (occr, expr, bb, 1))
7506f491
DE
3219 {
3220 can_reach++;
3221 if (can_reach > 1)
3222 return NULL;
c4c81601 3223
7506f491
DE
3224 insn_computes_expr = occr->insn;
3225 }
3226 }
c4c81601
RK
3227 else if (expr_reaches_here_p (occr, expr, bb, 0))
3228 {
3229 can_reach++;
3230 if (can_reach > 1)
3231 return NULL;
3232
3233 insn_computes_expr = occr->insn;
3234 }
7506f491
DE
3235 }
3236
3237 if (insn_computes_expr == NULL)
3238 abort ();
c4c81601 3239
7506f491
DE
3240 return insn_computes_expr;
3241 }
3242}
3243
cc2902df 3244/* Return nonzero if the definition in DEF_INSN can reach INSN.
7506f491
DE
3245 Only called by can_disregard_other_sets. */
3246
3247static int
1d088dee 3248def_reaches_here_p (rtx insn, rtx def_insn)
7506f491
DE
3249{
3250 rtx reg;
3251
3252 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3253 return 1;
3254
3255 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3256 {
3257 if (INSN_CUID (def_insn) < INSN_CUID (insn))
ac7c5af5 3258 {
7506f491
DE
3259 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3260 return 1;
c4c81601 3261 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
7506f491
DE
3262 reg = XEXP (PATTERN (def_insn), 0);
3263 else if (GET_CODE (PATTERN (def_insn)) == SET)
3264 reg = SET_DEST (PATTERN (def_insn));
3265 else
3266 abort ();
c4c81601 3267
7506f491
DE
3268 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3269 }
3270 else
3271 return 0;
3272 }
3273
3274 return 0;
3275}
3276
cc2902df 3277/* Return nonzero if *ADDR_THIS_REG can only have one value at INSN. The
c4c81601
RK
3278 value returned is the number of definitions that reach INSN. Returning a
3279 value of zero means that [maybe] more than one definition reaches INSN and
3280 the caller can't perform whatever optimization it is trying. i.e. it is
3281 always safe to return zero. */
7506f491
DE
3282
3283static int
1d088dee 3284can_disregard_other_sets (struct reg_set **addr_this_reg, rtx insn, int for_combine)
7506f491
DE
3285{
3286 int number_of_reaching_defs = 0;
c4c81601 3287 struct reg_set *this_reg;
7506f491 3288
c4c81601
RK
3289 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3290 if (def_reaches_here_p (insn, this_reg->insn))
3291 {
3292 number_of_reaching_defs++;
3293 /* Ignore parallels for now. */
3294 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3295 return 0;
3296
3297 if (!for_combine
3298 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3299 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3300 SET_SRC (PATTERN (insn)))))
3301 /* A setting of the reg to a different value reaches INSN. */
3302 return 0;
3303
3304 if (number_of_reaching_defs > 1)
3305 {
3306 /* If in this setting the value the register is being set to is
3307 equal to the previous value the register was set to and this
3308 setting reaches the insn we are trying to do the substitution
3309 on then we are ok. */
3310 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
7506f491 3311 return 0;
c4c81601
RK
3312 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3313 SET_SRC (PATTERN (insn))))
3314 return 0;
3315 }
7506f491 3316
589005ff 3317 *addr_this_reg = this_reg;
c4c81601 3318 }
7506f491
DE
3319
3320 return number_of_reaching_defs;
3321}
3322
3323/* Expression computed by insn is available and the substitution is legal,
3324 so try to perform the substitution.
3325
cc2902df 3326 The result is nonzero if any changes were made. */
7506f491
DE
3327
3328static int
1d088dee 3329handle_avail_expr (rtx insn, struct expr *expr)
7506f491 3330{
0631e0bf 3331 rtx pat, insn_computes_expr, expr_set;
7506f491
DE
3332 rtx to;
3333 struct reg_set *this_reg;
3334 int found_setting, use_src;
3335 int changed = 0;
3336
3337 /* We only handle the case where one computation of the expression
3338 reaches this instruction. */
3339 insn_computes_expr = computing_insn (expr, insn);
3340 if (insn_computes_expr == NULL)
3341 return 0;
0631e0bf
JH
3342 expr_set = single_set (insn_computes_expr);
3343 if (!expr_set)
3344 abort ();
7506f491
DE
3345
3346 found_setting = 0;
3347 use_src = 0;
3348
3349 /* At this point we know only one computation of EXPR outside of this
3350 block reaches this insn. Now try to find a register that the
3351 expression is computed into. */
0631e0bf 3352 if (GET_CODE (SET_SRC (expr_set)) == REG)
7506f491
DE
3353 {
3354 /* This is the case when the available expression that reaches
3355 here has already been handled as an available expression. */
770ae6cc 3356 unsigned int regnum_for_replacing
0631e0bf 3357 = REGNO (SET_SRC (expr_set));
c4c81601 3358
7506f491
DE
3359 /* If the register was created by GCSE we can't use `reg_set_table',
3360 however we know it's set only once. */
3361 if (regnum_for_replacing >= max_gcse_regno
3362 /* If the register the expression is computed into is set only once,
3363 or only one set reaches this insn, we can use it. */
3364 || (((this_reg = reg_set_table[regnum_for_replacing]),
3365 this_reg->next == NULL)
3366 || can_disregard_other_sets (&this_reg, insn, 0)))
8e42ace1
KH
3367 {
3368 use_src = 1;
3369 found_setting = 1;
3370 }
7506f491
DE
3371 }
3372
3373 if (!found_setting)
3374 {
770ae6cc 3375 unsigned int regnum_for_replacing
0631e0bf 3376 = REGNO (SET_DEST (expr_set));
c4c81601 3377
7506f491
DE
3378 /* This shouldn't happen. */
3379 if (regnum_for_replacing >= max_gcse_regno)
3380 abort ();
c4c81601 3381
7506f491 3382 this_reg = reg_set_table[regnum_for_replacing];
c4c81601 3383
7506f491
DE
3384 /* If the register the expression is computed into is set only once,
3385 or only one set reaches this insn, use it. */
3386 if (this_reg->next == NULL
3387 || can_disregard_other_sets (&this_reg, insn, 0))
3388 found_setting = 1;
3389 }
3390
3391 if (found_setting)
3392 {
3393 pat = PATTERN (insn);
3394 if (use_src)
0631e0bf 3395 to = SET_SRC (expr_set);
7506f491 3396 else
0631e0bf 3397 to = SET_DEST (expr_set);
7506f491
DE
3398 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3399
3400 /* We should be able to ignore the return code from validate_change but
3401 to play it safe we check. */
3402 if (changed)
3403 {
3404 gcse_subst_count++;
3405 if (gcse_file != NULL)
3406 {
c4c81601
RK
3407 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3408 INSN_UID (insn));
3409 fprintf (gcse_file, " reg %d %s insn %d\n",
3410 REGNO (to), use_src ? "from" : "set in",
7506f491
DE
3411 INSN_UID (insn_computes_expr));
3412 }
7506f491
DE
3413 }
3414 }
c4c81601 3415
7506f491
DE
3416 /* The register that the expr is computed into is set more than once. */
3417 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3418 {
3419 /* Insert an insn after insnx that copies the reg set in insnx
3420 into a new pseudo register call this new register REGN.
3421 From insnb until end of basic block or until REGB is set
3422 replace all uses of REGB with REGN. */
3423 rtx new_insn;
3424
0631e0bf 3425 to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set)));
7506f491
DE
3426
3427 /* Generate the new insn. */
3428 /* ??? If the change fails, we return 0, even though we created
3429 an insn. I think this is ok. */
9e6a5703
JC
3430 new_insn
3431 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
0631e0bf 3432 SET_DEST (expr_set)),
c4c81601
RK
3433 insn_computes_expr);
3434
7506f491
DE
3435 /* Keep register set table up to date. */
3436 record_one_set (REGNO (to), new_insn);
3437
3438 gcse_create_count++;
3439 if (gcse_file != NULL)
ac7c5af5 3440 {
c4c81601 3441 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
7506f491 3442 INSN_UID (NEXT_INSN (insn_computes_expr)),
c4c81601
RK
3443 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3444 fprintf (gcse_file, ", computed in insn %d,\n",
7506f491 3445 INSN_UID (insn_computes_expr));
c4c81601
RK
3446 fprintf (gcse_file, " into newly allocated reg %d\n",
3447 REGNO (to));
ac7c5af5 3448 }
7506f491
DE
3449
3450 pat = PATTERN (insn);
3451
3452 /* Do register replacement for INSN. */
3453 changed = validate_change (insn, &SET_SRC (pat),
c4c81601
RK
3454 SET_DEST (PATTERN
3455 (NEXT_INSN (insn_computes_expr))),
7506f491
DE
3456 0);
3457
3458 /* We should be able to ignore the return code from validate_change but
3459 to play it safe we check. */
3460 if (changed)
3461 {
3462 gcse_subst_count++;
3463 if (gcse_file != NULL)
3464 {
c4c81601
RK
3465 fprintf (gcse_file,
3466 "GCSE: Replacing the source in insn %d with reg %d ",
7506f491 3467 INSN_UID (insn),
c4c81601
RK
3468 REGNO (SET_DEST (PATTERN (NEXT_INSN
3469 (insn_computes_expr)))));
3470 fprintf (gcse_file, "set in insn %d\n",
589005ff 3471 INSN_UID (insn_computes_expr));
7506f491 3472 }
7506f491
DE
3473 }
3474 }
3475
3476 return changed;
3477}
3478
c4c81601
RK
3479/* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3480 the dataflow analysis has been done.
7506f491 3481
cc2902df 3482 The result is nonzero if a change was made. */
7506f491
DE
3483
3484static int
1d088dee 3485classic_gcse (void)
7506f491 3486{
e0082a72 3487 int changed;
7506f491 3488 rtx insn;
e0082a72 3489 basic_block bb;
7506f491
DE
3490
3491 /* Note we start at block 1. */
3492
e0082a72
ZD
3493 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3494 return 0;
3495
7506f491 3496 changed = 0;
e0082a72 3497 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
7506f491
DE
3498 {
3499 /* Reset tables used to keep track of what's still valid [since the
3500 start of the block]. */
3501 reset_opr_set_tables ();
3502
e0082a72
ZD
3503 for (insn = bb->head;
3504 insn != NULL && insn != NEXT_INSN (bb->end);
7506f491
DE
3505 insn = NEXT_INSN (insn))
3506 {
3507 /* Is insn of form (set (pseudo-reg) ...)? */
7506f491
DE
3508 if (GET_CODE (insn) == INSN
3509 && GET_CODE (PATTERN (insn)) == SET
3510 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3511 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3512 {
3513 rtx pat = PATTERN (insn);
3514 rtx src = SET_SRC (pat);
3515 struct expr *expr;
3516
3517 if (want_to_gcse_p (src)
3518 /* Is the expression recorded? */
02280659 3519 && ((expr = lookup_expr (src, &expr_hash_table)) != NULL)
7506f491
DE
3520 /* Is the expression available [at the start of the
3521 block]? */
e0082a72 3522 && TEST_BIT (ae_in[bb->index], expr->bitmap_index)
7506f491
DE
3523 /* Are the operands unchanged since the start of the
3524 block? */
3525 && oprs_not_set_p (src, insn))
3526 changed |= handle_avail_expr (insn, expr);
3527 }
3528
3529 /* Keep track of everything modified by this insn. */
3530 /* ??? Need to be careful w.r.t. mods done to INSN. */
2c3c49de 3531 if (INSN_P (insn))
7506f491 3532 mark_oprs_set (insn);
ac7c5af5 3533 }
7506f491
DE
3534 }
3535
3536 return changed;
3537}
3538
3539/* Top level routine to perform one classic GCSE pass.
3540
cc2902df 3541 Return nonzero if a change was made. */
7506f491
DE
3542
3543static int
1d088dee 3544one_classic_gcse_pass (int pass)
7506f491
DE
3545{
3546 int changed = 0;
3547
3548 gcse_subst_count = 0;
3549 gcse_create_count = 0;
3550
02280659 3551 alloc_hash_table (max_cuid, &expr_hash_table, 0);
d55bc081 3552 alloc_rd_mem (last_basic_block, max_cuid);
02280659 3553 compute_hash_table (&expr_hash_table);
7506f491 3554 if (gcse_file)
02280659 3555 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
c4c81601 3556
02280659 3557 if (expr_hash_table.n_elems > 0)
7506f491
DE
3558 {
3559 compute_kill_rd ();
3560 compute_rd ();
02280659
ZD
3561 alloc_avail_expr_mem (last_basic_block, expr_hash_table.n_elems);
3562 compute_ae_gen (&expr_hash_table);
3563 compute_ae_kill (ae_gen, ae_kill, &expr_hash_table);
bd0eaec2 3564 compute_available (ae_gen, ae_kill, ae_out, ae_in);
7506f491
DE
3565 changed = classic_gcse ();
3566 free_avail_expr_mem ();
3567 }
c4c81601 3568
7506f491 3569 free_rd_mem ();
02280659 3570 free_hash_table (&expr_hash_table);
7506f491
DE
3571
3572 if (gcse_file)
3573 {
3574 fprintf (gcse_file, "\n");
c4c81601
RK
3575 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3576 current_function_name, pass, bytes_used, gcse_subst_count);
3577 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
7506f491
DE
3578 }
3579
3580 return changed;
3581}
3582\f
3583/* Compute copy/constant propagation working variables. */
3584
3585/* Local properties of assignments. */
7506f491
DE
3586static sbitmap *cprop_pavloc;
3587static sbitmap *cprop_absaltered;
3588
3589/* Global properties of assignments (computed from the local properties). */
7506f491
DE
3590static sbitmap *cprop_avin;
3591static sbitmap *cprop_avout;
3592
c4c81601
RK
3593/* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3594 basic blocks. N_SETS is the number of sets. */
7506f491
DE
3595
3596static void
1d088dee 3597alloc_cprop_mem (int n_blocks, int n_sets)
7506f491
DE
3598{
3599 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3600 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3601
3602 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3603 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3604}
3605
3606/* Free vars used by copy/const propagation. */
3607
3608static void
1d088dee 3609free_cprop_mem (void)
7506f491 3610{
5a660bff
DB
3611 sbitmap_vector_free (cprop_pavloc);
3612 sbitmap_vector_free (cprop_absaltered);
3613 sbitmap_vector_free (cprop_avin);
3614 sbitmap_vector_free (cprop_avout);
7506f491
DE
3615}
3616
c4c81601
RK
3617/* For each block, compute whether X is transparent. X is either an
3618 expression or an assignment [though we don't care which, for this context
3619 an assignment is treated as an expression]. For each block where an
3620 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3621 bit in BMAP. */
7506f491
DE
3622
3623static void
1d088dee 3624compute_transp (rtx x, int indx, sbitmap *bmap, int set_p)
7506f491 3625{
e0082a72
ZD
3626 int i, j;
3627 basic_block bb;
7506f491 3628 enum rtx_code code;
c4c81601 3629 reg_set *r;
6f7d635c 3630 const char *fmt;
7506f491 3631
c4c81601
RK
3632 /* repeat is used to turn tail-recursion into iteration since GCC
3633 can't do it when there's no return value. */
7506f491
DE
3634 repeat:
3635
3636 if (x == 0)
3637 return;
3638
3639 code = GET_CODE (x);
3640 switch (code)
3641 {
3642 case REG:
c4c81601
RK
3643 if (set_p)
3644 {
3645 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3646 {
e0082a72
ZD
3647 FOR_EACH_BB (bb)
3648 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3649 SET_BIT (bmap[bb->index], indx);
c4c81601
RK
3650 }
3651 else
3652 {
3653 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3654 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3655 }
3656 }
3657 else
3658 {
3659 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3660 {
e0082a72
ZD
3661 FOR_EACH_BB (bb)
3662 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3663 RESET_BIT (bmap[bb->index], indx);
c4c81601
RK
3664 }
3665 else
3666 {
3667 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3668 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3669 }
3670 }
7506f491 3671
c4c81601 3672 return;
7506f491
DE
3673
3674 case MEM:
e0082a72 3675 FOR_EACH_BB (bb)
a13d4ebf 3676 {
e0082a72 3677 rtx list_entry = canon_modify_mem_list[bb->index];
a13d4ebf
AM
3678
3679 while (list_entry)
3680 {
3681 rtx dest, dest_addr;
3682
3683 if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
3684 {
3685 if (set_p)
e0082a72 3686 SET_BIT (bmap[bb->index], indx);
a13d4ebf 3687 else
e0082a72 3688 RESET_BIT (bmap[bb->index], indx);
a13d4ebf
AM
3689 break;
3690 }
3691 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3692 Examine each hunk of memory that is modified. */
3693
3694 dest = XEXP (list_entry, 0);
3695 list_entry = XEXP (list_entry, 1);
3696 dest_addr = XEXP (list_entry, 0);
589005ff 3697
a13d4ebf
AM
3698 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
3699 x, rtx_addr_varies_p))
3700 {
3701 if (set_p)
e0082a72 3702 SET_BIT (bmap[bb->index], indx);
a13d4ebf 3703 else
e0082a72 3704 RESET_BIT (bmap[bb->index], indx);
a13d4ebf
AM
3705 break;
3706 }
3707 list_entry = XEXP (list_entry, 1);
3708 }
3709 }
c4c81601 3710
7506f491
DE
3711 x = XEXP (x, 0);
3712 goto repeat;
3713
3714 case PC:
3715 case CC0: /*FIXME*/
3716 case CONST:
3717 case CONST_INT:
3718 case CONST_DOUBLE:
69ef87e2 3719 case CONST_VECTOR:
7506f491
DE
3720 case SYMBOL_REF:
3721 case LABEL_REF:
3722 case ADDR_VEC:
3723 case ADDR_DIFF_VEC:
3724 return;
3725
3726 default:
3727 break;
3728 }
3729
c4c81601 3730 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3731 {
3732 if (fmt[i] == 'e')
3733 {
7506f491
DE
3734 /* If we are about to do the last recursive call
3735 needed at this level, change it into iteration.
3736 This function is called enough to be worth it. */
3737 if (i == 0)
3738 {
c4c81601 3739 x = XEXP (x, i);
7506f491
DE
3740 goto repeat;
3741 }
c4c81601
RK
3742
3743 compute_transp (XEXP (x, i), indx, bmap, set_p);
7506f491
DE
3744 }
3745 else if (fmt[i] == 'E')
c4c81601
RK
3746 for (j = 0; j < XVECLEN (x, i); j++)
3747 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
7506f491
DE
3748 }
3749}
3750
7506f491
DE
3751/* Top level routine to do the dataflow analysis needed by copy/const
3752 propagation. */
3753
3754static void
1d088dee 3755compute_cprop_data (void)
7506f491 3756{
02280659 3757 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
ce724250
JL
3758 compute_available (cprop_pavloc, cprop_absaltered,
3759 cprop_avout, cprop_avin);
7506f491
DE
3760}
3761\f
3762/* Copy/constant propagation. */
3763
7506f491
DE
3764/* Maximum number of register uses in an insn that we handle. */
3765#define MAX_USES 8
3766
3767/* Table of uses found in an insn.
3768 Allocated statically to avoid alloc/free complexity and overhead. */
3769static struct reg_use reg_use_table[MAX_USES];
3770
3771/* Index into `reg_use_table' while building it. */
3772static int reg_use_count;
3773
c4c81601
RK
3774/* Set up a list of register numbers used in INSN. The found uses are stored
3775 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3776 and contains the number of uses in the table upon exit.
7506f491 3777
c4c81601
RK
3778 ??? If a register appears multiple times we will record it multiple times.
3779 This doesn't hurt anything but it will slow things down. */
7506f491
DE
3780
3781static void
1d088dee 3782find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED)
7506f491 3783{
c4c81601 3784 int i, j;
7506f491 3785 enum rtx_code code;
6f7d635c 3786 const char *fmt;
9e71c818 3787 rtx x = *xptr;
7506f491 3788
c4c81601
RK
3789 /* repeat is used to turn tail-recursion into iteration since GCC
3790 can't do it when there's no return value. */
7506f491 3791 repeat:
7506f491
DE
3792 if (x == 0)
3793 return;
3794
3795 code = GET_CODE (x);
9e71c818 3796 if (REG_P (x))
7506f491 3797 {
7506f491
DE
3798 if (reg_use_count == MAX_USES)
3799 return;
c4c81601 3800
7506f491
DE
3801 reg_use_table[reg_use_count].reg_rtx = x;
3802 reg_use_count++;
7506f491
DE
3803 }
3804
3805 /* Recursively scan the operands of this expression. */
3806
c4c81601 3807 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3808 {
3809 if (fmt[i] == 'e')
3810 {
3811 /* If we are about to do the last recursive call
3812 needed at this level, change it into iteration.
3813 This function is called enough to be worth it. */
3814 if (i == 0)
3815 {
3816 x = XEXP (x, 0);
3817 goto repeat;
3818 }
c4c81601 3819
9e71c818 3820 find_used_regs (&XEXP (x, i), data);
7506f491
DE
3821 }
3822 else if (fmt[i] == 'E')
c4c81601 3823 for (j = 0; j < XVECLEN (x, i); j++)
9e71c818 3824 find_used_regs (&XVECEXP (x, i, j), data);
7506f491
DE
3825 }
3826}
3827
3828/* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
cc2902df 3829 Returns nonzero is successful. */
7506f491
DE
3830
3831static int
1d088dee 3832try_replace_reg (rtx from, rtx to, rtx insn)
7506f491 3833{
172890a2 3834 rtx note = find_reg_equal_equiv_note (insn);
fb0c0a12 3835 rtx src = 0;
172890a2
RK
3836 int success = 0;
3837 rtx set = single_set (insn);
833fc3ad 3838
2b773ee2
JH
3839 validate_replace_src_group (from, to, insn);
3840 if (num_changes_pending () && apply_change_group ())
3841 success = 1;
9e71c818 3842
9feff114
JDA
3843 /* Try to simplify SET_SRC if we have substituted a constant. */
3844 if (success && set && CONSTANT_P (to))
3845 {
3846 src = simplify_rtx (SET_SRC (set));
3847
3848 if (src)
3849 validate_change (insn, &SET_SRC (set), src, 0);
3850 }
3851
f305679f 3852 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
833fc3ad 3853 {
f305679f
JH
3854 /* If above failed and this is a single set, try to simplify the source of
3855 the set given our substitution. We could perhaps try this for multiple
3856 SETs, but it probably won't buy us anything. */
172890a2
RK
3857 src = simplify_replace_rtx (SET_SRC (set), from, to);
3858
9e71c818
JH
3859 if (!rtx_equal_p (src, SET_SRC (set))
3860 && validate_change (insn, &SET_SRC (set), src, 0))
172890a2 3861 success = 1;
833fc3ad 3862
bbd288a4
FS
3863 /* If we've failed to do replacement, have a single SET, don't already
3864 have a note, and have no special SET, add a REG_EQUAL note to not
3865 lose information. */
3866 if (!success && note == 0 && set != 0
3867 && GET_CODE (XEXP (set, 0)) != ZERO_EXTRACT
3868 && GET_CODE (XEXP (set, 0)) != SIGN_EXTRACT)
f305679f
JH
3869 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
3870 }
e251e2a2 3871
172890a2
RK
3872 /* If there is already a NOTE, update the expression in it with our
3873 replacement. */
3874 else if (note != 0)
3875 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
833fc3ad 3876
172890a2
RK
3877 /* REG_EQUAL may get simplified into register.
3878 We don't allow that. Remove that note. This code ought
fbe5a4a6 3879 not to happen, because previous code ought to synthesize
172890a2
RK
3880 reg-reg move, but be on the safe side. */
3881 if (note && REG_P (XEXP (note, 0)))
3882 remove_note (insn, note);
833fc3ad 3883
833fc3ad
JH
3884 return success;
3885}
c4c81601
RK
3886
3887/* Find a set of REGNOs that are available on entry to INSN's block. Returns
3888 NULL no such set is found. */
7506f491
DE
3889
3890static struct expr *
1d088dee 3891find_avail_set (int regno, rtx insn)
7506f491 3892{
cafba495
BS
3893 /* SET1 contains the last set found that can be returned to the caller for
3894 use in a substitution. */
3895 struct expr *set1 = 0;
589005ff 3896
cafba495
BS
3897 /* Loops are not possible here. To get a loop we would need two sets
3898 available at the start of the block containing INSN. ie we would
3899 need two sets like this available at the start of the block:
3900
3901 (set (reg X) (reg Y))
3902 (set (reg Y) (reg X))
3903
3904 This can not happen since the set of (reg Y) would have killed the
3905 set of (reg X) making it unavailable at the start of this block. */
3906 while (1)
8e42ace1 3907 {
cafba495 3908 rtx src;
ceda50e9 3909 struct expr *set = lookup_set (regno, &set_hash_table);
cafba495
BS
3910
3911 /* Find a set that is available at the start of the block
3912 which contains INSN. */
3913 while (set)
3914 {
3915 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
3916 break;
3917 set = next_set (regno, set);
3918 }
7506f491 3919
cafba495
BS
3920 /* If no available set was found we've reached the end of the
3921 (possibly empty) copy chain. */
3922 if (set == 0)
589005ff 3923 break;
cafba495
BS
3924
3925 if (GET_CODE (set->expr) != SET)
3926 abort ();
3927
3928 src = SET_SRC (set->expr);
3929
3930 /* We know the set is available.
3931 Now check that SRC is ANTLOC (i.e. none of the source operands
589005ff 3932 have changed since the start of the block).
cafba495
BS
3933
3934 If the source operand changed, we may still use it for the next
3935 iteration of this loop, but we may not use it for substitutions. */
c4c81601 3936
6b2d1c9e 3937 if (gcse_constant_p (src) || oprs_not_set_p (src, insn))
cafba495
BS
3938 set1 = set;
3939
3940 /* If the source of the set is anything except a register, then
3941 we have reached the end of the copy chain. */
3942 if (GET_CODE (src) != REG)
7506f491 3943 break;
7506f491 3944
cafba495
BS
3945 /* Follow the copy chain, ie start another iteration of the loop
3946 and see if we have an available copy into SRC. */
3947 regno = REGNO (src);
8e42ace1 3948 }
cafba495
BS
3949
3950 /* SET1 holds the last set that was available and anticipatable at
3951 INSN. */
3952 return set1;
7506f491
DE
3953}
3954
abd535b6 3955/* Subroutine of cprop_insn that tries to propagate constants into
0e3f0221 3956 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
fbe5a4a6 3957 it is the instruction that immediately precedes JUMP, and must be a
818b6b7f 3958 single SET of a register. FROM is what we will try to replace,
0e3f0221 3959 SRC is the constant we will try to substitute for it. Returns nonzero
589005ff 3960 if a change was made. */
c4c81601 3961
abd535b6 3962static int
1d088dee 3963cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
abd535b6 3964{
bc6688b4 3965 rtx new, set_src, note_src;
0e3f0221 3966 rtx set = pc_set (jump);
bc6688b4 3967 rtx note = find_reg_equal_equiv_note (jump);
0e3f0221 3968
bc6688b4
RS
3969 if (note)
3970 {
3971 note_src = XEXP (note, 0);
3972 if (GET_CODE (note_src) == EXPR_LIST)
3973 note_src = NULL_RTX;
3974 }
3975 else note_src = NULL_RTX;
3976
3977 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
3978 set_src = note_src ? note_src : SET_SRC (set);
3979
3980 /* First substitute the SETCC condition into the JUMP instruction,
3981 then substitute that given values into this expanded JUMP. */
3982 if (setcc != NULL_RTX
48ddd46c
JH
3983 && !modified_between_p (from, setcc, jump)
3984 && !modified_between_p (src, setcc, jump))
b2f02503 3985 {
bc6688b4 3986 rtx setcc_src;
b2f02503 3987 rtx setcc_set = single_set (setcc);
bc6688b4
RS
3988 rtx setcc_note = find_reg_equal_equiv_note (setcc);
3989 setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
3990 ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
3991 set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
3992 setcc_src);
b2f02503 3993 }
0e3f0221 3994 else
bc6688b4 3995 setcc = NULL_RTX;
0e3f0221 3996
bc6688b4 3997 new = simplify_replace_rtx (set_src, from, src);
abd535b6 3998
bc6688b4
RS
3999 /* If no simplification can be made, then try the next register. */
4000 if (rtx_equal_p (new, SET_SRC (set)))
9e48c409 4001 return 0;
589005ff 4002
7d5ab30e 4003 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
172890a2 4004 if (new == pc_rtx)
0e3f0221 4005 delete_insn (jump);
7d5ab30e 4006 else
abd535b6 4007 {
48ddd46c
JH
4008 /* Ensure the value computed inside the jump insn to be equivalent
4009 to one computed by setcc. */
bc6688b4 4010 if (setcc && modified_in_p (new, setcc))
48ddd46c 4011 return 0;
0e3f0221 4012 if (! validate_change (jump, &SET_SRC (set), new, 0))
bc6688b4
RS
4013 {
4014 /* When (some) constants are not valid in a comparison, and there
4015 are two registers to be replaced by constants before the entire
4016 comparison can be folded into a constant, we need to keep
4017 intermediate information in REG_EQUAL notes. For targets with
4018 separate compare insns, such notes are added by try_replace_reg.
4019 When we have a combined compare-and-branch instruction, however,
4020 we need to attach a note to the branch itself to make this
4021 optimization work. */
4022
4023 if (!rtx_equal_p (new, note_src))
4024 set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new));
4025 return 0;
4026 }
4027
4028 /* Remove REG_EQUAL note after simplification. */
4029 if (note_src)
4030 remove_note (jump, note);
abd535b6 4031
7d5ab30e
JH
4032 /* If this has turned into an unconditional jump,
4033 then put a barrier after it so that the unreachable
4034 code will be deleted. */
4035 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
0e3f0221 4036 emit_barrier_after (jump);
7d5ab30e 4037 }
abd535b6 4038
0e3f0221
RS
4039#ifdef HAVE_cc0
4040 /* Delete the cc0 setter. */
818b6b7f 4041 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
0e3f0221
RS
4042 delete_insn (setcc);
4043#endif
4044
172890a2 4045 run_jump_opt_after_gcse = 1;
c4c81601 4046
172890a2
RK
4047 const_prop_count++;
4048 if (gcse_file != NULL)
4049 {
4050 fprintf (gcse_file,
818b6b7f 4051 "CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
0e3f0221 4052 REGNO (from), INSN_UID (jump));
172890a2
RK
4053 print_rtl (gcse_file, src);
4054 fprintf (gcse_file, "\n");
abd535b6 4055 }
0005550b 4056 purge_dead_edges (bb);
172890a2
RK
4057
4058 return 1;
abd535b6
BS
4059}
4060
ae860ff7 4061static bool
1d088dee 4062constprop_register (rtx insn, rtx from, rtx to, int alter_jumps)
ae860ff7
JH
4063{
4064 rtx sset;
4065
4066 /* Check for reg or cc0 setting instructions followed by
4067 conditional branch instructions first. */
4068 if (alter_jumps
4069 && (sset = single_set (insn)) != NULL
244d05fb 4070 && NEXT_INSN (insn)
ae860ff7
JH
4071 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
4072 {
4073 rtx dest = SET_DEST (sset);
4074 if ((REG_P (dest) || CC0_P (dest))
4075 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
4076 return 1;
4077 }
4078
4079 /* Handle normal insns next. */
4080 if (GET_CODE (insn) == INSN
4081 && try_replace_reg (from, to, insn))
4082 return 1;
4083
4084 /* Try to propagate a CONST_INT into a conditional jump.
4085 We're pretty specific about what we will handle in this
4086 code, we can extend this as necessary over time.
4087
4088 Right now the insn in question must look like
4089 (set (pc) (if_then_else ...)) */
4090 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
4091 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
4092 return 0;
4093}
4094
7506f491 4095/* Perform constant and copy propagation on INSN.
cc2902df 4096 The result is nonzero if a change was made. */
7506f491
DE
4097
4098static int
1d088dee 4099cprop_insn (rtx insn, int alter_jumps)
7506f491
DE
4100{
4101 struct reg_use *reg_used;
4102 int changed = 0;
833fc3ad 4103 rtx note;
7506f491 4104
9e71c818 4105 if (!INSN_P (insn))
7506f491
DE
4106 return 0;
4107
4108 reg_use_count = 0;
9e71c818 4109 note_uses (&PATTERN (insn), find_used_regs, NULL);
589005ff 4110
172890a2 4111 note = find_reg_equal_equiv_note (insn);
833fc3ad 4112
dc297297 4113 /* We may win even when propagating constants into notes. */
833fc3ad 4114 if (note)
9e71c818 4115 find_used_regs (&XEXP (note, 0), NULL);
7506f491 4116
c4c81601
RK
4117 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4118 reg_used++, reg_use_count--)
7506f491 4119 {
770ae6cc 4120 unsigned int regno = REGNO (reg_used->reg_rtx);
7506f491
DE
4121 rtx pat, src;
4122 struct expr *set;
7506f491
DE
4123
4124 /* Ignore registers created by GCSE.
dc297297 4125 We do this because ... */
7506f491
DE
4126 if (regno >= max_gcse_regno)
4127 continue;
4128
4129 /* If the register has already been set in this block, there's
4130 nothing we can do. */
4131 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
4132 continue;
4133
4134 /* Find an assignment that sets reg_used and is available
4135 at the start of the block. */
4136 set = find_avail_set (regno, insn);
4137 if (! set)
4138 continue;
589005ff 4139
7506f491
DE
4140 pat = set->expr;
4141 /* ??? We might be able to handle PARALLELs. Later. */
4142 if (GET_CODE (pat) != SET)
4143 abort ();
c4c81601 4144
7506f491
DE
4145 src = SET_SRC (pat);
4146
e78d9500 4147 /* Constant propagation. */
6b2d1c9e 4148 if (gcse_constant_p (src))
7506f491 4149 {
ae860ff7 4150 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
7506f491
DE
4151 {
4152 changed = 1;
4153 const_prop_count++;
4154 if (gcse_file != NULL)
4155 {
ae860ff7
JH
4156 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
4157 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
e78d9500 4158 print_rtl (gcse_file, src);
7506f491
DE
4159 fprintf (gcse_file, "\n");
4160 }
bc6688b4
RS
4161 if (INSN_DELETED_P (insn))
4162 return 1;
7506f491
DE
4163 }
4164 }
4165 else if (GET_CODE (src) == REG
4166 && REGNO (src) >= FIRST_PSEUDO_REGISTER
4167 && REGNO (src) != regno)
4168 {
cafba495 4169 if (try_replace_reg (reg_used->reg_rtx, src, insn))
7506f491 4170 {
cafba495
BS
4171 changed = 1;
4172 copy_prop_count++;
4173 if (gcse_file != NULL)
7506f491 4174 {
ae860ff7 4175 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
c4c81601
RK
4176 regno, INSN_UID (insn));
4177 fprintf (gcse_file, " with reg %d\n", REGNO (src));
7506f491 4178 }
cafba495
BS
4179
4180 /* The original insn setting reg_used may or may not now be
4181 deletable. We leave the deletion to flow. */
4182 /* FIXME: If it turns out that the insn isn't deletable,
4183 then we may have unnecessarily extended register lifetimes
4184 and made things worse. */
7506f491
DE
4185 }
4186 }
4187 }
4188
4189 return changed;
4190}
4191
710ee3ed
RH
4192/* Like find_used_regs, but avoid recording uses that appear in
4193 input-output contexts such as zero_extract or pre_dec. This
4194 restricts the cases we consider to those for which local cprop
4195 can legitimately make replacements. */
4196
4197static void
1d088dee 4198local_cprop_find_used_regs (rtx *xptr, void *data)
710ee3ed
RH
4199{
4200 rtx x = *xptr;
4201
4202 if (x == 0)
4203 return;
4204
4205 switch (GET_CODE (x))
4206 {
4207 case ZERO_EXTRACT:
4208 case SIGN_EXTRACT:
4209 case STRICT_LOW_PART:
4210 return;
4211
4212 case PRE_DEC:
4213 case PRE_INC:
4214 case POST_DEC:
4215 case POST_INC:
4216 case PRE_MODIFY:
4217 case POST_MODIFY:
4218 /* Can only legitimately appear this early in the context of
4219 stack pushes for function arguments, but handle all of the
4220 codes nonetheless. */
4221 return;
4222
4223 case SUBREG:
4224 /* Setting a subreg of a register larger than word_mode leaves
4225 the non-written words unchanged. */
4226 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
4227 return;
4228 break;
4229
4230 default:
4231 break;
4232 }
4233
4234 find_used_regs (xptr, data);
4235}
1d088dee 4236
8ba46434
R
4237/* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4238 their REG_EQUAL notes need updating. */
e197b6fc 4239
ae860ff7 4240static bool
1d088dee 4241do_local_cprop (rtx x, rtx insn, int alter_jumps, rtx *libcall_sp)
ae860ff7
JH
4242{
4243 rtx newreg = NULL, newcnst = NULL;
4244
e197b6fc
RH
4245 /* Rule out USE instructions and ASM statements as we don't want to
4246 change the hard registers mentioned. */
ae860ff7
JH
4247 if (GET_CODE (x) == REG
4248 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
e197b6fc
RH
4249 || (GET_CODE (PATTERN (insn)) != USE
4250 && asm_noperands (PATTERN (insn)) < 0)))
ae860ff7
JH
4251 {
4252 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
4253 struct elt_loc_list *l;
4254
4255 if (!val)
4256 return false;
4257 for (l = val->locs; l; l = l->next)
4258 {
4259 rtx this_rtx = l->loc;
46690369
JH
4260 rtx note;
4261
9635cfad
JH
4262 if (l->in_libcall)
4263 continue;
4264
6b2d1c9e 4265 if (gcse_constant_p (this_rtx))
ae860ff7 4266 newcnst = this_rtx;
46690369
JH
4267 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
4268 /* Don't copy propagate if it has attached REG_EQUIV note.
4269 At this point this only function parameters should have
4270 REG_EQUIV notes and if the argument slot is used somewhere
4271 explicitly, it means address of parameter has been taken,
4272 so we should not extend the lifetime of the pseudo. */
4273 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
4274 || GET_CODE (XEXP (note, 0)) != MEM))
ae860ff7
JH
4275 newreg = this_rtx;
4276 }
4277 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
4278 {
8ba46434 4279 /* If we find a case where we can't fix the retval REG_EQUAL notes
fbe5a4a6 4280 match the new register, we either have to abandon this replacement
8ba46434
R
4281 or fix delete_trivially_dead_insns to preserve the setting insn,
4282 or make it delete the REG_EUAQL note, and fix up all passes that
4283 require the REG_EQUAL note there. */
4284 if (!adjust_libcall_notes (x, newcnst, insn, libcall_sp))
4285 abort ();
ae860ff7
JH
4286 if (gcse_file != NULL)
4287 {
4288 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
4289 REGNO (x));
4290 fprintf (gcse_file, "insn %d with constant ",
4291 INSN_UID (insn));
4292 print_rtl (gcse_file, newcnst);
4293 fprintf (gcse_file, "\n");
4294 }
4295 const_prop_count++;
4296 return true;
4297 }
4298 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
4299 {
8ba46434 4300 adjust_libcall_notes (x, newreg, insn, libcall_sp);
ae860ff7
JH
4301 if (gcse_file != NULL)
4302 {
4303 fprintf (gcse_file,
4304 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
4305 REGNO (x), INSN_UID (insn));
4306 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
4307 }
4308 copy_prop_count++;
4309 return true;
4310 }
4311 }
4312 return false;
4313}
4314
8ba46434
R
4315/* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4316 their REG_EQUAL notes need updating to reflect that OLDREG has been
f4e3e618
RH
4317 replaced with NEWVAL in INSN. Return true if all substitutions could
4318 be made. */
8ba46434 4319static bool
1d088dee 4320adjust_libcall_notes (rtx oldreg, rtx newval, rtx insn, rtx *libcall_sp)
8ba46434 4321{
f4e3e618 4322 rtx end;
8ba46434
R
4323
4324 while ((end = *libcall_sp++))
4325 {
f4e3e618 4326 rtx note = find_reg_equal_equiv_note (end);
8ba46434
R
4327
4328 if (! note)
4329 continue;
4330
4331 if (REG_P (newval))
4332 {
4333 if (reg_set_between_p (newval, PREV_INSN (insn), end))
4334 {
4335 do
4336 {
4337 note = find_reg_equal_equiv_note (end);
4338 if (! note)
4339 continue;
4340 if (reg_mentioned_p (newval, XEXP (note, 0)))
4341 return false;
4342 }
4343 while ((end = *libcall_sp++));
4344 return true;
4345 }
4346 }
4347 XEXP (note, 0) = replace_rtx (XEXP (note, 0), oldreg, newval);
4348 insn = end;
4349 }
4350 return true;
4351}
4352
4353#define MAX_NESTED_LIBCALLS 9
4354
ae860ff7 4355static void
1d088dee 4356local_cprop_pass (int alter_jumps)
ae860ff7
JH
4357{
4358 rtx insn;
4359 struct reg_use *reg_used;
8ba46434 4360 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
1649d92f 4361 bool changed = false;
ae860ff7
JH
4362
4363 cselib_init ();
8ba46434
R
4364 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
4365 *libcall_sp = 0;
ae860ff7
JH
4366 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4367 {
4368 if (INSN_P (insn))
4369 {
8ba46434 4370 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
ae860ff7 4371
8ba46434
R
4372 if (note)
4373 {
4374 if (libcall_sp == libcall_stack)
4375 abort ();
4376 *--libcall_sp = XEXP (note, 0);
4377 }
4378 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
4379 if (note)
4380 libcall_sp++;
4381 note = find_reg_equal_equiv_note (insn);
ae860ff7
JH
4382 do
4383 {
4384 reg_use_count = 0;
710ee3ed 4385 note_uses (&PATTERN (insn), local_cprop_find_used_regs, NULL);
ae860ff7 4386 if (note)
710ee3ed 4387 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
ae860ff7
JH
4388
4389 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4390 reg_used++, reg_use_count--)
8ba46434
R
4391 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
4392 libcall_sp))
1649d92f
JH
4393 {
4394 changed = true;
4395 break;
4396 }
bc6688b4
RS
4397 if (INSN_DELETED_P (insn))
4398 break;
ae860ff7
JH
4399 }
4400 while (reg_use_count);
4401 }
4402 cselib_process_insn (insn);
4403 }
4404 cselib_finish ();
1649d92f
JH
4405 /* Global analysis may get into infinite loops for unreachable blocks. */
4406 if (changed && alter_jumps)
5f0bea72
JH
4407 {
4408 delete_unreachable_blocks ();
4409 free_reg_set_mem ();
4410 alloc_reg_set_mem (max_reg_num ());
4411 compute_sets (get_insns ());
4412 }
ae860ff7
JH
4413}
4414
c4c81601 4415/* Forward propagate copies. This includes copies and constants. Return
cc2902df 4416 nonzero if a change was made. */
7506f491
DE
4417
4418static int
1d088dee 4419cprop (int alter_jumps)
7506f491 4420{
e0082a72
ZD
4421 int changed;
4422 basic_block bb;
7506f491
DE
4423 rtx insn;
4424
4425 /* Note we start at block 1. */
e0082a72
ZD
4426 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4427 {
4428 if (gcse_file != NULL)
4429 fprintf (gcse_file, "\n");
4430 return 0;
4431 }
7506f491
DE
4432
4433 changed = 0;
e0082a72 4434 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
7506f491
DE
4435 {
4436 /* Reset tables used to keep track of what's still valid [since the
4437 start of the block]. */
4438 reset_opr_set_tables ();
4439
e0082a72
ZD
4440 for (insn = bb->head;
4441 insn != NULL && insn != NEXT_INSN (bb->end);
7506f491 4442 insn = NEXT_INSN (insn))
172890a2
RK
4443 if (INSN_P (insn))
4444 {
ae860ff7 4445 changed |= cprop_insn (insn, alter_jumps);
7506f491 4446
172890a2
RK
4447 /* Keep track of everything modified by this insn. */
4448 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4449 call mark_oprs_set if we turned the insn into a NOTE. */
4450 if (GET_CODE (insn) != NOTE)
4451 mark_oprs_set (insn);
8e42ace1 4452 }
7506f491
DE
4453 }
4454
4455 if (gcse_file != NULL)
4456 fprintf (gcse_file, "\n");
4457
4458 return changed;
4459}
4460
fbef91d8
RS
4461/* Similar to get_condition, only the resulting condition must be
4462 valid at JUMP, instead of at EARLIEST.
4463
4464 This differs from noce_get_condition in ifcvt.c in that we prefer not to
4465 settle for the condition variable in the jump instruction being integral.
4466 We prefer to be able to record the value of a user variable, rather than
4467 the value of a temporary used in a condition. This could be solved by
4468 recording the value of *every* register scaned by canonicalize_condition,
4469 but this would require some code reorganization. */
4470
2fa4a849 4471rtx
1d088dee 4472fis_get_condition (rtx jump)
fbef91d8
RS
4473{
4474 rtx cond, set, tmp, insn, earliest;
4475 bool reverse;
4476
4477 if (! any_condjump_p (jump))
4478 return NULL_RTX;
4479
4480 set = pc_set (jump);
4481 cond = XEXP (SET_SRC (set), 0);
4482
4483 /* If this branches to JUMP_LABEL when the condition is false,
4484 reverse the condition. */
4485 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4486 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
4487
4488 /* Use canonicalize_condition to do the dirty work of manipulating
4489 MODE_CC values and COMPARE rtx codes. */
4490 tmp = canonicalize_condition (jump, cond, reverse, &earliest, NULL_RTX);
4491 if (!tmp)
4492 return NULL_RTX;
4493
4494 /* Verify that the given condition is valid at JUMP by virtue of not
4495 having been modified since EARLIEST. */
4496 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4497 if (INSN_P (insn) && modified_in_p (tmp, insn))
4498 break;
4499 if (insn == jump)
4500 return tmp;
4501
4502 /* The condition was modified. See if we can get a partial result
4503 that doesn't follow all the reversals. Perhaps combine can fold
4504 them together later. */
4505 tmp = XEXP (tmp, 0);
4506 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
4507 return NULL_RTX;
4508 tmp = canonicalize_condition (jump, cond, reverse, &earliest, tmp);
4509 if (!tmp)
4510 return NULL_RTX;
4511
4512 /* For sanity's sake, re-validate the new result. */
4513 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4514 if (INSN_P (insn) && modified_in_p (tmp, insn))
4515 return NULL_RTX;
4516
4517 return tmp;
4518}
4519
4520/* Find the implicit sets of a function. An "implicit set" is a constraint
4521 on the value of a variable, implied by a conditional jump. For example,
4522 following "if (x == 2)", the then branch may be optimized as though the
4523 conditional performed an "explicit set", in this example, "x = 2". This
4524 function records the set patterns that are implicit at the start of each
4525 basic block. */
4526
4527static void
1d088dee 4528find_implicit_sets (void)
fbef91d8
RS
4529{
4530 basic_block bb, dest;
4531 unsigned int count;
4532 rtx cond, new;
4533
4534 count = 0;
4535 FOR_EACH_BB (bb)
4536 /* Check for more than one sucessor. */
4537 if (bb->succ && bb->succ->succ_next)
4538 {
4539 cond = fis_get_condition (bb->end);
4540
4541 if (cond
4542 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
4543 && GET_CODE (XEXP (cond, 0)) == REG
4544 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
6b2d1c9e 4545 && gcse_constant_p (XEXP (cond, 1)))
fbef91d8
RS
4546 {
4547 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
4548 : FALLTHRU_EDGE (bb)->dest;
4549
4550 if (dest && ! dest->pred->pred_next
4551 && dest != EXIT_BLOCK_PTR)
4552 {
4553 new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
4554 XEXP (cond, 1));
4555 implicit_sets[dest->index] = new;
4556 if (gcse_file)
4557 {
4558 fprintf(gcse_file, "Implicit set of reg %d in ",
4559 REGNO (XEXP (cond, 0)));
4560 fprintf(gcse_file, "basic block %d\n", dest->index);
4561 }
4562 count++;
4563 }
4564 }
4565 }
4566
4567 if (gcse_file)
4568 fprintf (gcse_file, "Found %d implicit sets\n", count);
4569}
4570
7506f491 4571/* Perform one copy/constant propagation pass.
a0134312
RS
4572 PASS is the pass count. If CPROP_JUMPS is true, perform constant
4573 propagation into conditional jumps. If BYPASS_JUMPS is true,
4574 perform conditional jump bypassing optimizations. */
7506f491
DE
4575
4576static int
1d088dee 4577one_cprop_pass (int pass, int cprop_jumps, int bypass_jumps)
7506f491
DE
4578{
4579 int changed = 0;
4580
4581 const_prop_count = 0;
4582 copy_prop_count = 0;
4583
a0134312 4584 local_cprop_pass (cprop_jumps);
ae860ff7 4585
fbef91d8 4586 /* Determine implicit sets. */
703ad42b 4587 implicit_sets = xcalloc (last_basic_block, sizeof (rtx));
fbef91d8
RS
4588 find_implicit_sets ();
4589
02280659
ZD
4590 alloc_hash_table (max_cuid, &set_hash_table, 1);
4591 compute_hash_table (&set_hash_table);
fbef91d8
RS
4592
4593 /* Free implicit_sets before peak usage. */
4594 free (implicit_sets);
4595 implicit_sets = NULL;
4596
7506f491 4597 if (gcse_file)
02280659
ZD
4598 dump_hash_table (gcse_file, "SET", &set_hash_table);
4599 if (set_hash_table.n_elems > 0)
7506f491 4600 {
02280659 4601 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
7506f491 4602 compute_cprop_data ();
a0134312
RS
4603 changed = cprop (cprop_jumps);
4604 if (bypass_jumps)
0e3f0221 4605 changed |= bypass_conditional_jumps ();
7506f491
DE
4606 free_cprop_mem ();
4607 }
c4c81601 4608
02280659 4609 free_hash_table (&set_hash_table);
7506f491
DE
4610
4611 if (gcse_file)
4612 {
c4c81601
RK
4613 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
4614 current_function_name, pass, bytes_used);
4615 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4616 const_prop_count, copy_prop_count);
7506f491 4617 }
1649d92f
JH
4618 /* Global analysis may get into infinite loops for unreachable blocks. */
4619 if (changed && cprop_jumps)
4620 delete_unreachable_blocks ();
7506f491
DE
4621
4622 return changed;
4623}
4624\f
0e3f0221
RS
4625/* Bypass conditional jumps. */
4626
7821bfc7
RS
4627/* The value of last_basic_block at the beginning of the jump_bypass
4628 pass. The use of redirect_edge_and_branch_force may introduce new
4629 basic blocks, but the data flow analysis is only valid for basic
4630 block indices less than bypass_last_basic_block. */
4631
4632static int bypass_last_basic_block;
4633
0e3f0221
RS
4634/* Find a set of REGNO to a constant that is available at the end of basic
4635 block BB. Returns NULL if no such set is found. Based heavily upon
4636 find_avail_set. */
4637
4638static struct expr *
1d088dee 4639find_bypass_set (int regno, int bb)
0e3f0221
RS
4640{
4641 struct expr *result = 0;
4642
4643 for (;;)
4644 {
4645 rtx src;
ceda50e9 4646 struct expr *set = lookup_set (regno, &set_hash_table);
0e3f0221
RS
4647
4648 while (set)
4649 {
4650 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
4651 break;
4652 set = next_set (regno, set);
4653 }
4654
4655 if (set == 0)
4656 break;
4657
4658 if (GET_CODE (set->expr) != SET)
4659 abort ();
4660
4661 src = SET_SRC (set->expr);
6b2d1c9e 4662 if (gcse_constant_p (src))
0e3f0221
RS
4663 result = set;
4664
4665 if (GET_CODE (src) != REG)
4666 break;
4667
4668 regno = REGNO (src);
4669 }
4670 return result;
4671}
4672
4673
e129b3f9
RS
4674/* Subroutine of bypass_block that checks whether a pseudo is killed by
4675 any of the instructions inserted on an edge. Jump bypassing places
4676 condition code setters on CFG edges using insert_insn_on_edge. This
4677 function is required to check that our data flow analysis is still
4678 valid prior to commit_edge_insertions. */
4679
4680static bool
1d088dee 4681reg_killed_on_edge (rtx reg, edge e)
e129b3f9
RS
4682{
4683 rtx insn;
4684
4685 for (insn = e->insns; insn; insn = NEXT_INSN (insn))
4686 if (INSN_P (insn) && reg_set_p (reg, insn))
4687 return true;
4688
4689 return false;
4690}
4691
0e3f0221
RS
4692/* Subroutine of bypass_conditional_jumps that attempts to bypass the given
4693 basic block BB which has more than one predecessor. If not NULL, SETCC
4694 is the first instruction of BB, which is immediately followed by JUMP_INSN
4695 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
e129b3f9
RS
4696 Returns nonzero if a change was made.
4697
e0bb17a8 4698 During the jump bypassing pass, we may place copies of SETCC instructions
e129b3f9
RS
4699 on CFG edges. The following routine must be careful to pay attention to
4700 these inserted insns when performing its transformations. */
0e3f0221
RS
4701
4702static int
1d088dee 4703bypass_block (basic_block bb, rtx setcc, rtx jump)
0e3f0221
RS
4704{
4705 rtx insn, note;
e129b3f9 4706 edge e, enext, edest;
818b6b7f 4707 int i, change;
72b8d451 4708 int may_be_loop_header;
0e3f0221
RS
4709
4710 insn = (setcc != NULL) ? setcc : jump;
4711
4712 /* Determine set of register uses in INSN. */
4713 reg_use_count = 0;
4714 note_uses (&PATTERN (insn), find_used_regs, NULL);
4715 note = find_reg_equal_equiv_note (insn);
4716 if (note)
4717 find_used_regs (&XEXP (note, 0), NULL);
4718
72b8d451
ZD
4719 may_be_loop_header = false;
4720 for (e = bb->pred; e; e = e->pred_next)
4721 if (e->flags & EDGE_DFS_BACK)
4722 {
4723 may_be_loop_header = true;
4724 break;
4725 }
4726
0e3f0221
RS
4727 change = 0;
4728 for (e = bb->pred; e; e = enext)
4729 {
4730 enext = e->pred_next;
7821bfc7
RS
4731 if (e->flags & EDGE_COMPLEX)
4732 continue;
4733
4734 /* We can't redirect edges from new basic blocks. */
4735 if (e->src->index >= bypass_last_basic_block)
4736 continue;
4737
72b8d451 4738 /* The irreducible loops created by redirecting of edges entering the
e0bb17a8
KH
4739 loop from outside would decrease effectiveness of some of the following
4740 optimizations, so prevent this. */
72b8d451
ZD
4741 if (may_be_loop_header
4742 && !(e->flags & EDGE_DFS_BACK))
4743 continue;
4744
0e3f0221
RS
4745 for (i = 0; i < reg_use_count; i++)
4746 {
4747 struct reg_use *reg_used = &reg_use_table[i];
589005ff 4748 unsigned int regno = REGNO (reg_used->reg_rtx);
818b6b7f 4749 basic_block dest, old_dest;
589005ff
KH
4750 struct expr *set;
4751 rtx src, new;
0e3f0221 4752
589005ff
KH
4753 if (regno >= max_gcse_regno)
4754 continue;
0e3f0221 4755
589005ff 4756 set = find_bypass_set (regno, e->src->index);
0e3f0221
RS
4757
4758 if (! set)
4759 continue;
4760
e129b3f9
RS
4761 /* Check the data flow is valid after edge insertions. */
4762 if (e->insns && reg_killed_on_edge (reg_used->reg_rtx, e))
4763 continue;
4764
589005ff 4765 src = SET_SRC (pc_set (jump));
0e3f0221
RS
4766
4767 if (setcc != NULL)
4768 src = simplify_replace_rtx (src,
589005ff
KH
4769 SET_DEST (PATTERN (setcc)),
4770 SET_SRC (PATTERN (setcc)));
0e3f0221
RS
4771
4772 new = simplify_replace_rtx (src, reg_used->reg_rtx,
589005ff 4773 SET_SRC (set->expr));
0e3f0221 4774
1d088dee 4775 /* Jump bypassing may have already placed instructions on
e129b3f9
RS
4776 edges of the CFG. We can't bypass an outgoing edge that
4777 has instructions associated with it, as these insns won't
4778 get executed if the incoming edge is redirected. */
4779
589005ff 4780 if (new == pc_rtx)
e129b3f9
RS
4781 {
4782 edest = FALLTHRU_EDGE (bb);
4783 dest = edest->insns ? NULL : edest->dest;
4784 }
0e3f0221 4785 else if (GET_CODE (new) == LABEL_REF)
e129b3f9
RS
4786 {
4787 dest = BLOCK_FOR_INSN (XEXP (new, 0));
4788 /* Don't bypass edges containing instructions. */
4789 for (edest = bb->succ; edest; edest = edest->succ_next)
4790 if (edest->dest == dest && edest->insns)
4791 {
4792 dest = NULL;
4793 break;
4794 }
4795 }
0e3f0221
RS
4796 else
4797 dest = NULL;
4798
818b6b7f 4799 old_dest = e->dest;
7821bfc7
RS
4800 if (dest != NULL
4801 && dest != old_dest
4802 && dest != EXIT_BLOCK_PTR)
4803 {
4804 redirect_edge_and_branch_force (e, dest);
4805
818b6b7f 4806 /* Copy the register setter to the redirected edge.
0e3f0221
RS
4807 Don't copy CC0 setters, as CC0 is dead after jump. */
4808 if (setcc)
4809 {
4810 rtx pat = PATTERN (setcc);
818b6b7f 4811 if (!CC0_P (SET_DEST (pat)))
0e3f0221
RS
4812 insert_insn_on_edge (copy_insn (pat), e);
4813 }
4814
4815 if (gcse_file != NULL)
4816 {
818b6b7f
RH
4817 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d in jump_insn %d equals constant ",
4818 regno, INSN_UID (jump));
0e3f0221
RS
4819 print_rtl (gcse_file, SET_SRC (set->expr));
4820 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
818b6b7f 4821 e->src->index, old_dest->index, dest->index);
0e3f0221
RS
4822 }
4823 change = 1;
4824 break;
4825 }
4826 }
4827 }
4828 return change;
4829}
4830
4831/* Find basic blocks with more than one predecessor that only contain a
4832 single conditional jump. If the result of the comparison is known at
4833 compile-time from any incoming edge, redirect that edge to the
9a71ece1
RH
4834 appropriate target. Returns nonzero if a change was made.
4835
4836 This function is now mis-named, because we also handle indirect jumps. */
0e3f0221
RS
4837
4838static int
1d088dee 4839bypass_conditional_jumps (void)
0e3f0221
RS
4840{
4841 basic_block bb;
4842 int changed;
4843 rtx setcc;
4844 rtx insn;
4845 rtx dest;
4846
4847 /* Note we start at block 1. */
4848 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4849 return 0;
4850
7821bfc7 4851 bypass_last_basic_block = last_basic_block;
72b8d451 4852 mark_dfs_back_edges ();
7821bfc7 4853
0e3f0221
RS
4854 changed = 0;
4855 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
589005ff 4856 EXIT_BLOCK_PTR, next_bb)
0e3f0221
RS
4857 {
4858 /* Check for more than one predecessor. */
4859 if (bb->pred && bb->pred->pred_next)
4860 {
4861 setcc = NULL_RTX;
4862 for (insn = bb->head;
4863 insn != NULL && insn != NEXT_INSN (bb->end);
4864 insn = NEXT_INSN (insn))
4865 if (GET_CODE (insn) == INSN)
4866 {
9543a9d2 4867 if (setcc)
0e3f0221 4868 break;
ba4f7968 4869 if (GET_CODE (PATTERN (insn)) != SET)
0e3f0221
RS
4870 break;
4871
ba4f7968 4872 dest = SET_DEST (PATTERN (insn));
818b6b7f 4873 if (REG_P (dest) || CC0_P (dest))
0e3f0221 4874 setcc = insn;
0e3f0221
RS
4875 else
4876 break;
4877 }
4878 else if (GET_CODE (insn) == JUMP_INSN)
4879 {
9a71ece1
RH
4880 if ((any_condjump_p (insn) || computed_jump_p (insn))
4881 && onlyjump_p (insn))
0e3f0221
RS
4882 changed |= bypass_block (bb, setcc, insn);
4883 break;
4884 }
4885 else if (INSN_P (insn))
4886 break;
4887 }
4888 }
4889
818b6b7f 4890 /* If we bypassed any register setting insns, we inserted a
fbe5a4a6 4891 copy on the redirected edge. These need to be committed. */
0e3f0221
RS
4892 if (changed)
4893 commit_edge_insertions();
4894
4895 return changed;
4896}
4897\f
a65f3558 4898/* Compute PRE+LCM working variables. */
7506f491
DE
4899
4900/* Local properties of expressions. */
4901/* Nonzero for expressions that are transparent in the block. */
a65f3558 4902static sbitmap *transp;
7506f491 4903
5c35539b
RH
4904/* Nonzero for expressions that are transparent at the end of the block.
4905 This is only zero for expressions killed by abnormal critical edge
4906 created by a calls. */
a65f3558 4907static sbitmap *transpout;
5c35539b 4908
a65f3558
JL
4909/* Nonzero for expressions that are computed (available) in the block. */
4910static sbitmap *comp;
7506f491 4911
a65f3558
JL
4912/* Nonzero for expressions that are locally anticipatable in the block. */
4913static sbitmap *antloc;
7506f491 4914
a65f3558
JL
4915/* Nonzero for expressions where this block is an optimal computation
4916 point. */
4917static sbitmap *pre_optimal;
5c35539b 4918
a65f3558
JL
4919/* Nonzero for expressions which are redundant in a particular block. */
4920static sbitmap *pre_redundant;
7506f491 4921
a42cd965
AM
4922/* Nonzero for expressions which should be inserted on a specific edge. */
4923static sbitmap *pre_insert_map;
4924
4925/* Nonzero for expressions which should be deleted in a specific block. */
4926static sbitmap *pre_delete_map;
4927
4928/* Contains the edge_list returned by pre_edge_lcm. */
4929static struct edge_list *edge_list;
4930
a65f3558
JL
4931/* Redundant insns. */
4932static sbitmap pre_redundant_insns;
7506f491 4933
a65f3558 4934/* Allocate vars used for PRE analysis. */
7506f491
DE
4935
4936static void
1d088dee 4937alloc_pre_mem (int n_blocks, int n_exprs)
7506f491 4938{
a65f3558
JL
4939 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4940 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4941 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5faf03ae 4942
a42cd965
AM
4943 pre_optimal = NULL;
4944 pre_redundant = NULL;
4945 pre_insert_map = NULL;
4946 pre_delete_map = NULL;
4947 ae_in = NULL;
4948 ae_out = NULL;
a42cd965 4949 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
c4c81601 4950
a42cd965 4951 /* pre_insert and pre_delete are allocated later. */
7506f491
DE
4952}
4953
a65f3558 4954/* Free vars used for PRE analysis. */
7506f491
DE
4955
4956static void
1d088dee 4957free_pre_mem (void)
7506f491 4958{
5a660bff
DB
4959 sbitmap_vector_free (transp);
4960 sbitmap_vector_free (comp);
bd3675fc
JL
4961
4962 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
7506f491 4963
a42cd965 4964 if (pre_optimal)
5a660bff 4965 sbitmap_vector_free (pre_optimal);
a42cd965 4966 if (pre_redundant)
5a660bff 4967 sbitmap_vector_free (pre_redundant);
a42cd965 4968 if (pre_insert_map)
5a660bff 4969 sbitmap_vector_free (pre_insert_map);
a42cd965 4970 if (pre_delete_map)
5a660bff 4971 sbitmap_vector_free (pre_delete_map);
a42cd965 4972 if (ae_in)
5a660bff 4973 sbitmap_vector_free (ae_in);
a42cd965 4974 if (ae_out)
5a660bff 4975 sbitmap_vector_free (ae_out);
a42cd965 4976
bd3675fc 4977 transp = comp = NULL;
a42cd965 4978 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
55d3f917 4979 ae_in = ae_out = NULL;
7506f491
DE
4980}
4981
4982/* Top level routine to do the dataflow analysis needed by PRE. */
4983
4984static void
1d088dee 4985compute_pre_data (void)
7506f491 4986{
b614171e 4987 sbitmap trapping_expr;
e0082a72 4988 basic_block bb;
b614171e 4989 unsigned int ui;
c66e8ae9 4990
02280659 4991 compute_local_properties (transp, comp, antloc, &expr_hash_table);
d55bc081 4992 sbitmap_vector_zero (ae_kill, last_basic_block);
c66e8ae9 4993
b614171e 4994 /* Collect expressions which might trap. */
02280659 4995 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
b614171e 4996 sbitmap_zero (trapping_expr);
02280659 4997 for (ui = 0; ui < expr_hash_table.size; ui++)
b614171e
MM
4998 {
4999 struct expr *e;
02280659 5000 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
b614171e
MM
5001 if (may_trap_p (e->expr))
5002 SET_BIT (trapping_expr, e->bitmap_index);
5003 }
5004
c66e8ae9
JL
5005 /* Compute ae_kill for each basic block using:
5006
5007 ~(TRANSP | COMP)
5008
a2e90653 5009 This is significantly faster than compute_ae_kill. */
c66e8ae9 5010
e0082a72 5011 FOR_EACH_BB (bb)
c66e8ae9 5012 {
b614171e
MM
5013 edge e;
5014
5015 /* If the current block is the destination of an abnormal edge, we
5016 kill all trapping expressions because we won't be able to properly
5017 place the instruction on the edge. So make them neither
5018 anticipatable nor transparent. This is fairly conservative. */
e0082a72 5019 for (e = bb->pred; e ; e = e->pred_next)
b614171e
MM
5020 if (e->flags & EDGE_ABNORMAL)
5021 {
e0082a72
ZD
5022 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
5023 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
b614171e
MM
5024 break;
5025 }
5026
e0082a72
ZD
5027 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
5028 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
c66e8ae9
JL
5029 }
5030
02280659 5031 edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
a42cd965 5032 ae_kill, &pre_insert_map, &pre_delete_map);
5a660bff 5033 sbitmap_vector_free (antloc);
bd3675fc 5034 antloc = NULL;
5a660bff 5035 sbitmap_vector_free (ae_kill);
589005ff 5036 ae_kill = NULL;
76ac938b 5037 sbitmap_free (trapping_expr);
7506f491
DE
5038}
5039\f
5040/* PRE utilities */
5041
cc2902df 5042/* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
a65f3558 5043 block BB.
7506f491
DE
5044
5045 VISITED is a pointer to a working buffer for tracking which BB's have
5046 been visited. It is NULL for the top-level call.
5047
5048 We treat reaching expressions that go through blocks containing the same
5049 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
5050 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
5051 2 as not reaching. The intent is to improve the probability of finding
5052 only one reaching expression and to reduce register lifetimes by picking
5053 the closest such expression. */
5054
5055static int
1d088dee 5056pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr, basic_block bb, char *visited)
7506f491 5057{
36349f8b 5058 edge pred;
7506f491 5059
e2d2ed72 5060 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
7506f491 5061 {
e2d2ed72 5062 basic_block pred_bb = pred->src;
7506f491 5063
36349f8b 5064 if (pred->src == ENTRY_BLOCK_PTR
7506f491 5065 /* Has predecessor has already been visited? */
0b17ab2f 5066 || visited[pred_bb->index])
c4c81601
RK
5067 ;/* Nothing to do. */
5068
7506f491 5069 /* Does this predecessor generate this expression? */
0b17ab2f 5070 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
7506f491
DE
5071 {
5072 /* Is this the occurrence we're looking for?
5073 Note that there's only one generating occurrence per block
5074 so we just need to check the block number. */
a65f3558 5075 if (occr_bb == pred_bb)
7506f491 5076 return 1;
c4c81601 5077
0b17ab2f 5078 visited[pred_bb->index] = 1;
7506f491
DE
5079 }
5080 /* Ignore this predecessor if it kills the expression. */
0b17ab2f
RH
5081 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
5082 visited[pred_bb->index] = 1;
c4c81601 5083
7506f491
DE
5084 /* Neither gen nor kill. */
5085 else
ac7c5af5 5086 {
0b17ab2f 5087 visited[pred_bb->index] = 1;
89e606c9 5088 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
7506f491 5089 return 1;
ac7c5af5 5090 }
7506f491
DE
5091 }
5092
5093 /* All paths have been checked. */
5094 return 0;
5095}
283a2545
RL
5096
5097/* The wrapper for pre_expr_reaches_here_work that ensures that any
dc297297 5098 memory allocated for that function is returned. */
283a2545
RL
5099
5100static int
1d088dee 5101pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
283a2545
RL
5102{
5103 int rval;
703ad42b 5104 char *visited = xcalloc (last_basic_block, 1);
283a2545 5105
8e42ace1 5106 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
283a2545
RL
5107
5108 free (visited);
c4c81601 5109 return rval;
283a2545 5110}
7506f491 5111\f
a42cd965
AM
5112
5113/* Given an expr, generate RTL which we can insert at the end of a BB,
589005ff 5114 or on an edge. Set the block number of any insns generated to
a42cd965
AM
5115 the value of BB. */
5116
5117static rtx
1d088dee 5118process_insert_insn (struct expr *expr)
a42cd965
AM
5119{
5120 rtx reg = expr->reaching_reg;
fb0c0a12
RK
5121 rtx exp = copy_rtx (expr->expr);
5122 rtx pat;
a42cd965
AM
5123
5124 start_sequence ();
fb0c0a12
RK
5125
5126 /* If the expression is something that's an operand, like a constant,
5127 just copy it to a register. */
5128 if (general_operand (exp, GET_MODE (reg)))
5129 emit_move_insn (reg, exp);
5130
5131 /* Otherwise, make a new insn to compute this expression and make sure the
5132 insn will be recognized (this also adds any needed CLOBBERs). Copy the
5133 expression to make sure we don't have any sharing issues. */
8d444206 5134 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
fb0c0a12 5135 abort ();
589005ff 5136
2f937369 5137 pat = get_insns ();
a42cd965
AM
5138 end_sequence ();
5139
5140 return pat;
5141}
589005ff 5142
a65f3558
JL
5143/* Add EXPR to the end of basic block BB.
5144
5145 This is used by both the PRE and code hoisting.
5146
5147 For PRE, we want to verify that the expr is either transparent
5148 or locally anticipatable in the target block. This check makes
5149 no sense for code hoisting. */
7506f491
DE
5150
5151static void
1d088dee 5152insert_insn_end_bb (struct expr *expr, basic_block bb, int pre)
7506f491 5153{
e2d2ed72 5154 rtx insn = bb->end;
7506f491
DE
5155 rtx new_insn;
5156 rtx reg = expr->reaching_reg;
5157 int regno = REGNO (reg);
2f937369 5158 rtx pat, pat_end;
7506f491 5159
a42cd965 5160 pat = process_insert_insn (expr);
2f937369
DM
5161 if (pat == NULL_RTX || ! INSN_P (pat))
5162 abort ();
5163
5164 pat_end = pat;
5165 while (NEXT_INSN (pat_end) != NULL_RTX)
5166 pat_end = NEXT_INSN (pat_end);
7506f491
DE
5167
5168 /* If the last insn is a jump, insert EXPR in front [taking care to
4d6922ee 5169 handle cc0, etc. properly]. Similarly we need to care trapping
068473ec 5170 instructions in presence of non-call exceptions. */
7506f491 5171
068473ec
JH
5172 if (GET_CODE (insn) == JUMP_INSN
5173 || (GET_CODE (insn) == INSN
5174 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
7506f491 5175 {
50b2596f 5176#ifdef HAVE_cc0
7506f491 5177 rtx note;
50b2596f 5178#endif
068473ec
JH
5179 /* It should always be the case that we can put these instructions
5180 anywhere in the basic block with performing PRE optimizations.
5181 Check this. */
3b25fbfe 5182 if (GET_CODE (insn) == INSN && pre
0b17ab2f 5183 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
589005ff 5184 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
068473ec 5185 abort ();
7506f491
DE
5186
5187 /* If this is a jump table, then we can't insert stuff here. Since
5188 we know the previous real insn must be the tablejump, we insert
5189 the new instruction just before the tablejump. */
5190 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
5191 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
5192 insn = prev_real_insn (insn);
5193
5194#ifdef HAVE_cc0
5195 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
5196 if cc0 isn't set. */
5197 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
5198 if (note)
5199 insn = XEXP (note, 0);
5200 else
5201 {
5202 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
5203 if (maybe_cc0_setter
2c3c49de 5204 && INSN_P (maybe_cc0_setter)
7506f491
DE
5205 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
5206 insn = maybe_cc0_setter;
5207 }
5208#endif
5209 /* FIXME: What if something in cc0/jump uses value set in new insn? */
3c030e88 5210 new_insn = emit_insn_before (pat, insn);
3947e2f9 5211 }
c4c81601 5212
3947e2f9
RH
5213 /* Likewise if the last insn is a call, as will happen in the presence
5214 of exception handling. */
068473ec
JH
5215 else if (GET_CODE (insn) == CALL_INSN
5216 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))
3947e2f9 5217 {
3947e2f9
RH
5218 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
5219 we search backward and place the instructions before the first
5220 parameter is loaded. Do this for everyone for consistency and a
fbe5a4a6 5221 presumption that we'll get better code elsewhere as well.
3947e2f9 5222
c4c81601 5223 It should always be the case that we can put these instructions
a65f3558
JL
5224 anywhere in the basic block with performing PRE optimizations.
5225 Check this. */
c4c81601 5226
a65f3558 5227 if (pre
0b17ab2f 5228 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
589005ff 5229 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
3947e2f9
RH
5230 abort ();
5231
5232 /* Since different machines initialize their parameter registers
5233 in different orders, assume nothing. Collect the set of all
5234 parameter registers. */
833366d6 5235 insn = find_first_parameter_load (insn, bb->head);
3947e2f9 5236
b1d26727
JL
5237 /* If we found all the parameter loads, then we want to insert
5238 before the first parameter load.
5239
5240 If we did not find all the parameter loads, then we might have
5241 stopped on the head of the block, which could be a CODE_LABEL.
5242 If we inserted before the CODE_LABEL, then we would be putting
5243 the insn in the wrong basic block. In that case, put the insn
b5229628 5244 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
0a377997 5245 while (GET_CODE (insn) == CODE_LABEL
589ca5cb 5246 || NOTE_INSN_BASIC_BLOCK_P (insn))
b5229628 5247 insn = NEXT_INSN (insn);
c4c81601 5248
3c030e88 5249 new_insn = emit_insn_before (pat, insn);
7506f491
DE
5250 }
5251 else
3c030e88 5252 new_insn = emit_insn_after (pat, insn);
7506f491 5253
2f937369 5254 while (1)
a65f3558 5255 {
2f937369 5256 if (INSN_P (pat))
a65f3558 5257 {
2f937369
DM
5258 add_label_notes (PATTERN (pat), new_insn);
5259 note_stores (PATTERN (pat), record_set_info, pat);
a65f3558 5260 }
2f937369
DM
5261 if (pat == pat_end)
5262 break;
5263 pat = NEXT_INSN (pat);
a65f3558 5264 }
3947e2f9 5265
7506f491
DE
5266 gcse_create_count++;
5267
5268 if (gcse_file)
5269 {
c4c81601 5270 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
0b17ab2f 5271 bb->index, INSN_UID (new_insn));
c4c81601
RK
5272 fprintf (gcse_file, "copying expression %d to reg %d\n",
5273 expr->bitmap_index, regno);
7506f491
DE
5274 }
5275}
5276
a42cd965
AM
5277/* Insert partially redundant expressions on edges in the CFG to make
5278 the expressions fully redundant. */
7506f491 5279
a42cd965 5280static int
1d088dee 5281pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
7506f491 5282{
c4c81601 5283 int e, i, j, num_edges, set_size, did_insert = 0;
a65f3558
JL
5284 sbitmap *inserted;
5285
a42cd965
AM
5286 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
5287 if it reaches any of the deleted expressions. */
7506f491 5288
a42cd965
AM
5289 set_size = pre_insert_map[0]->size;
5290 num_edges = NUM_EDGES (edge_list);
02280659 5291 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
a42cd965 5292 sbitmap_vector_zero (inserted, num_edges);
7506f491 5293
a42cd965 5294 for (e = 0; e < num_edges; e++)
7506f491
DE
5295 {
5296 int indx;
e2d2ed72 5297 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
a65f3558 5298
a65f3558 5299 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
7506f491 5300 {
a42cd965 5301 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
7506f491 5302
02280659 5303 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
c4c81601
RK
5304 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
5305 {
5306 struct expr *expr = index_map[j];
5307 struct occr *occr;
a65f3558 5308
ff7cc307 5309 /* Now look at each deleted occurrence of this expression. */
c4c81601
RK
5310 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5311 {
5312 if (! occr->deleted_p)
5313 continue;
5314
5315 /* Insert this expression on this edge if if it would
ff7cc307 5316 reach the deleted occurrence in BB. */
c4c81601
RK
5317 if (!TEST_BIT (inserted[e], j))
5318 {
5319 rtx insn;
5320 edge eg = INDEX_EDGE (edge_list, e);
5321
5322 /* We can't insert anything on an abnormal and
5323 critical edge, so we insert the insn at the end of
5324 the previous block. There are several alternatives
5325 detailed in Morgans book P277 (sec 10.5) for
5326 handling this situation. This one is easiest for
5327 now. */
5328
5329 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
5330 insert_insn_end_bb (index_map[j], bb, 0);
5331 else
5332 {
5333 insn = process_insert_insn (index_map[j]);
5334 insert_insn_on_edge (insn, eg);
5335 }
5336
5337 if (gcse_file)
5338 {
5339 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
0b17ab2f
RH
5340 bb->index,
5341 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
c4c81601
RK
5342 fprintf (gcse_file, "copy expression %d\n",
5343 expr->bitmap_index);
5344 }
5345
a13d4ebf 5346 update_ld_motion_stores (expr);
c4c81601
RK
5347 SET_BIT (inserted[e], j);
5348 did_insert = 1;
5349 gcse_create_count++;
5350 }
5351 }
5352 }
7506f491
DE
5353 }
5354 }
5faf03ae 5355
5a660bff 5356 sbitmap_vector_free (inserted);
a42cd965 5357 return did_insert;
7506f491
DE
5358}
5359
c4c81601 5360/* Copy the result of INSN to REG. INDX is the expression number. */
7506f491
DE
5361
5362static void
1d088dee 5363pre_insert_copy_insn (struct expr *expr, rtx insn)
7506f491
DE
5364{
5365 rtx reg = expr->reaching_reg;
5366 int regno = REGNO (reg);
5367 int indx = expr->bitmap_index;
5368 rtx set = single_set (insn);
5369 rtx new_insn;
5370
5371 if (!set)
5372 abort ();
c4c81601 5373
47a3dae1 5374 new_insn = emit_insn_after (gen_move_insn (reg, copy_rtx (SET_DEST (set))), insn);
c4c81601 5375
7506f491
DE
5376 /* Keep register set table up to date. */
5377 record_one_set (regno, new_insn);
5378
5379 gcse_create_count++;
5380
5381 if (gcse_file)
a42cd965
AM
5382 fprintf (gcse_file,
5383 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
5384 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
5385 INSN_UID (insn), regno);
222f7ba9 5386 update_ld_motion_stores (expr);
7506f491
DE
5387}
5388
5389/* Copy available expressions that reach the redundant expression
5390 to `reaching_reg'. */
5391
5392static void
1d088dee 5393pre_insert_copies (void)
7506f491 5394{
2e653e39 5395 unsigned int i;
c4c81601
RK
5396 struct expr *expr;
5397 struct occr *occr;
5398 struct occr *avail;
a65f3558 5399
7506f491
DE
5400 /* For each available expression in the table, copy the result to
5401 `reaching_reg' if the expression reaches a deleted one.
5402
5403 ??? The current algorithm is rather brute force.
5404 Need to do some profiling. */
5405
02280659
ZD
5406 for (i = 0; i < expr_hash_table.size; i++)
5407 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601
RK
5408 {
5409 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
5410 we don't want to insert a copy here because the expression may not
5411 really be redundant. So only insert an insn if the expression was
5412 deleted. This test also avoids further processing if the
5413 expression wasn't deleted anywhere. */
5414 if (expr->reaching_reg == NULL)
5415 continue;
5416
5417 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5418 {
5419 if (! occr->deleted_p)
5420 continue;
7506f491 5421
c4c81601
RK
5422 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
5423 {
5424 rtx insn = avail->insn;
7506f491 5425
c4c81601
RK
5426 /* No need to handle this one if handled already. */
5427 if (avail->copied_p)
5428 continue;
7506f491 5429
c4c81601
RK
5430 /* Don't handle this one if it's a redundant one. */
5431 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
5432 continue;
7506f491 5433
c4c81601 5434 /* Or if the expression doesn't reach the deleted one. */
589005ff 5435 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
e2d2ed72
AM
5436 expr,
5437 BLOCK_FOR_INSN (occr->insn)))
c4c81601 5438 continue;
7506f491 5439
c4c81601
RK
5440 /* Copy the result of avail to reaching_reg. */
5441 pre_insert_copy_insn (expr, insn);
5442 avail->copied_p = 1;
5443 }
5444 }
5445 }
7506f491
DE
5446}
5447
10d1bb36
JH
5448/* Emit move from SRC to DEST noting the equivalence with expression computed
5449 in INSN. */
5450static rtx
1d088dee 5451gcse_emit_move_after (rtx src, rtx dest, rtx insn)
10d1bb36
JH
5452{
5453 rtx new;
6bdb8dd6 5454 rtx set = single_set (insn), set2;
10d1bb36
JH
5455 rtx note;
5456 rtx eqv;
5457
5458 /* This should never fail since we're creating a reg->reg copy
5459 we've verified to be valid. */
5460
6bdb8dd6 5461 new = emit_insn_after (gen_move_insn (dest, src), insn);
285464d0 5462
10d1bb36 5463 /* Note the equivalence for local CSE pass. */
6bdb8dd6
JH
5464 set2 = single_set (new);
5465 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
5466 return new;
10d1bb36
JH
5467 if ((note = find_reg_equal_equiv_note (insn)))
5468 eqv = XEXP (note, 0);
5469 else
5470 eqv = SET_SRC (set);
5471
a500466b 5472 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
10d1bb36
JH
5473
5474 return new;
5475}
5476
7506f491 5477/* Delete redundant computations.
7506f491
DE
5478 Deletion is done by changing the insn to copy the `reaching_reg' of
5479 the expression into the result of the SET. It is left to later passes
5480 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
5481
cc2902df 5482 Returns nonzero if a change is made. */
7506f491
DE
5483
5484static int
1d088dee 5485pre_delete (void)
7506f491 5486{
2e653e39 5487 unsigned int i;
63bc1d05 5488 int changed;
c4c81601
RK
5489 struct expr *expr;
5490 struct occr *occr;
a65f3558 5491
7506f491 5492 changed = 0;
02280659
ZD
5493 for (i = 0; i < expr_hash_table.size; i++)
5494 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601
RK
5495 {
5496 int indx = expr->bitmap_index;
7506f491 5497
c4c81601
RK
5498 /* We only need to search antic_occr since we require
5499 ANTLOC != 0. */
7506f491 5500
c4c81601
RK
5501 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5502 {
5503 rtx insn = occr->insn;
5504 rtx set;
e2d2ed72 5505 basic_block bb = BLOCK_FOR_INSN (insn);
7506f491 5506
0b17ab2f 5507 if (TEST_BIT (pre_delete_map[bb->index], indx))
c4c81601
RK
5508 {
5509 set = single_set (insn);
5510 if (! set)
5511 abort ();
5512
5513 /* Create a pseudo-reg to store the result of reaching
5514 expressions into. Get the mode for the new pseudo from
5515 the mode of the original destination pseudo. */
5516 if (expr->reaching_reg == NULL)
5517 expr->reaching_reg
5518 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5519
10d1bb36
JH
5520 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
5521 delete_insn (insn);
5522 occr->deleted_p = 1;
5523 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
5524 changed = 1;
5525 gcse_subst_count++;
7506f491 5526
c4c81601
RK
5527 if (gcse_file)
5528 {
5529 fprintf (gcse_file,
5530 "PRE: redundant insn %d (expression %d) in ",
5531 INSN_UID (insn), indx);
5532 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
0b17ab2f 5533 bb->index, REGNO (expr->reaching_reg));
c4c81601
RK
5534 }
5535 }
5536 }
5537 }
7506f491
DE
5538
5539 return changed;
5540}
5541
5542/* Perform GCSE optimizations using PRE.
5543 This is called by one_pre_gcse_pass after all the dataflow analysis
5544 has been done.
5545
c4c81601
RK
5546 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
5547 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
5548 Compiler Design and Implementation.
7506f491 5549
c4c81601
RK
5550 ??? A new pseudo reg is created to hold the reaching expression. The nice
5551 thing about the classical approach is that it would try to use an existing
5552 reg. If the register can't be adequately optimized [i.e. we introduce
5553 reload problems], one could add a pass here to propagate the new register
5554 through the block.
7506f491 5555
c4c81601
RK
5556 ??? We don't handle single sets in PARALLELs because we're [currently] not
5557 able to copy the rest of the parallel when we insert copies to create full
5558 redundancies from partial redundancies. However, there's no reason why we
5559 can't handle PARALLELs in the cases where there are no partial
7506f491
DE
5560 redundancies. */
5561
5562static int
1d088dee 5563pre_gcse (void)
7506f491 5564{
2e653e39
RK
5565 unsigned int i;
5566 int did_insert, changed;
7506f491 5567 struct expr **index_map;
c4c81601 5568 struct expr *expr;
7506f491
DE
5569
5570 /* Compute a mapping from expression number (`bitmap_index') to
5571 hash table entry. */
5572
703ad42b 5573 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
02280659
ZD
5574 for (i = 0; i < expr_hash_table.size; i++)
5575 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601 5576 index_map[expr->bitmap_index] = expr;
7506f491
DE
5577
5578 /* Reset bitmap used to track which insns are redundant. */
a65f3558
JL
5579 pre_redundant_insns = sbitmap_alloc (max_cuid);
5580 sbitmap_zero (pre_redundant_insns);
7506f491
DE
5581
5582 /* Delete the redundant insns first so that
5583 - we know what register to use for the new insns and for the other
5584 ones with reaching expressions
5585 - we know which insns are redundant when we go to create copies */
c4c81601 5586
7506f491
DE
5587 changed = pre_delete ();
5588
a42cd965 5589 did_insert = pre_edge_insert (edge_list, index_map);
c4c81601 5590
7506f491 5591 /* In other places with reaching expressions, copy the expression to the
a42cd965 5592 specially allocated pseudo-reg that reaches the redundant expr. */
7506f491 5593 pre_insert_copies ();
a42cd965
AM
5594 if (did_insert)
5595 {
5596 commit_edge_insertions ();
5597 changed = 1;
5598 }
7506f491 5599
283a2545 5600 free (index_map);
76ac938b 5601 sbitmap_free (pre_redundant_insns);
7506f491
DE
5602 return changed;
5603}
5604
5605/* Top level routine to perform one PRE GCSE pass.
5606
cc2902df 5607 Return nonzero if a change was made. */
7506f491
DE
5608
5609static int
1d088dee 5610one_pre_gcse_pass (int pass)
7506f491
DE
5611{
5612 int changed = 0;
5613
5614 gcse_subst_count = 0;
5615 gcse_create_count = 0;
5616
02280659 5617 alloc_hash_table (max_cuid, &expr_hash_table, 0);
a42cd965 5618 add_noreturn_fake_exit_edges ();
a13d4ebf
AM
5619 if (flag_gcse_lm)
5620 compute_ld_motion_mems ();
5621
02280659 5622 compute_hash_table (&expr_hash_table);
a13d4ebf 5623 trim_ld_motion_mems ();
7506f491 5624 if (gcse_file)
02280659 5625 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
c4c81601 5626
02280659 5627 if (expr_hash_table.n_elems > 0)
7506f491 5628 {
02280659 5629 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
7506f491
DE
5630 compute_pre_data ();
5631 changed |= pre_gcse ();
a42cd965 5632 free_edge_list (edge_list);
7506f491
DE
5633 free_pre_mem ();
5634 }
c4c81601 5635
a13d4ebf 5636 free_ldst_mems ();
a42cd965 5637 remove_fake_edges ();
02280659 5638 free_hash_table (&expr_hash_table);
7506f491
DE
5639
5640 if (gcse_file)
5641 {
c4c81601
RK
5642 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
5643 current_function_name, pass, bytes_used);
5644 fprintf (gcse_file, "%d substs, %d insns created\n",
5645 gcse_subst_count, gcse_create_count);
7506f491
DE
5646 }
5647
5648 return changed;
5649}
aeb2f500
JW
5650\f
5651/* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5b1ef594
JDA
5652 If notes are added to an insn which references a CODE_LABEL, the
5653 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5654 because the following loop optimization pass requires them. */
aeb2f500
JW
5655
5656/* ??? This is very similar to the loop.c add_label_notes function. We
5657 could probably share code here. */
5658
5659/* ??? If there was a jump optimization pass after gcse and before loop,
5660 then we would not need to do this here, because jump would add the
5661 necessary REG_LABEL notes. */
5662
5663static void
1d088dee 5664add_label_notes (rtx x, rtx insn)
aeb2f500
JW
5665{
5666 enum rtx_code code = GET_CODE (x);
5667 int i, j;
6f7d635c 5668 const char *fmt;
aeb2f500
JW
5669
5670 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
5671 {
6b3603c2 5672 /* This code used to ignore labels that referred to dispatch tables to
e0bb17a8 5673 avoid flow generating (slightly) worse code.
6b3603c2 5674
ac7c5af5
JL
5675 We no longer ignore such label references (see LABEL_REF handling in
5676 mark_jump_label for additional information). */
c4c81601 5677
6b8c9327 5678 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
6b3603c2 5679 REG_NOTES (insn));
5b1ef594 5680 if (LABEL_P (XEXP (x, 0)))
589005ff 5681 LABEL_NUSES (XEXP (x, 0))++;
aeb2f500
JW
5682 return;
5683 }
5684
c4c81601 5685 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
aeb2f500
JW
5686 {
5687 if (fmt[i] == 'e')
5688 add_label_notes (XEXP (x, i), insn);
5689 else if (fmt[i] == 'E')
5690 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5691 add_label_notes (XVECEXP (x, i, j), insn);
5692 }
5693}
a65f3558
JL
5694
5695/* Compute transparent outgoing information for each block.
5696
5697 An expression is transparent to an edge unless it is killed by
5698 the edge itself. This can only happen with abnormal control flow,
5699 when the edge is traversed through a call. This happens with
5700 non-local labels and exceptions.
5701
5702 This would not be necessary if we split the edge. While this is
5703 normally impossible for abnormal critical edges, with some effort
5704 it should be possible with exception handling, since we still have
5705 control over which handler should be invoked. But due to increased
5706 EH table sizes, this may not be worthwhile. */
5707
5708static void
1d088dee 5709compute_transpout (void)
a65f3558 5710{
e0082a72 5711 basic_block bb;
2e653e39 5712 unsigned int i;
c4c81601 5713 struct expr *expr;
a65f3558 5714
d55bc081 5715 sbitmap_vector_ones (transpout, last_basic_block);
a65f3558 5716
e0082a72 5717 FOR_EACH_BB (bb)
a65f3558 5718 {
a65f3558
JL
5719 /* Note that flow inserted a nop a the end of basic blocks that
5720 end in call instructions for reasons other than abnormal
5721 control flow. */
e0082a72 5722 if (GET_CODE (bb->end) != CALL_INSN)
a65f3558
JL
5723 continue;
5724
02280659
ZD
5725 for (i = 0; i < expr_hash_table.size; i++)
5726 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
c4c81601
RK
5727 if (GET_CODE (expr->expr) == MEM)
5728 {
5729 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5730 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5731 continue;
589005ff 5732
c4c81601
RK
5733 /* ??? Optimally, we would use interprocedural alias
5734 analysis to determine if this mem is actually killed
5735 by this call. */
e0082a72 5736 RESET_BIT (transpout[bb->index], expr->bitmap_index);
c4c81601 5737 }
a65f3558
JL
5738 }
5739}
dfdb644f
JL
5740
5741/* Removal of useless null pointer checks */
5742
dfdb644f 5743/* Called via note_stores. X is set by SETTER. If X is a register we must
0511851c
MM
5744 invalidate nonnull_local and set nonnull_killed. DATA is really a
5745 `null_pointer_info *'.
dfdb644f
JL
5746
5747 We ignore hard registers. */
c4c81601 5748
dfdb644f 5749static void
1d088dee 5750invalidate_nonnull_info (rtx x, rtx setter ATTRIBUTE_UNUSED, void *data)
dfdb644f 5751{
770ae6cc
RK
5752 unsigned int regno;
5753 struct null_pointer_info *npi = (struct null_pointer_info *) data;
c4c81601 5754
dfdb644f
JL
5755 while (GET_CODE (x) == SUBREG)
5756 x = SUBREG_REG (x);
5757
5758 /* Ignore anything that is not a register or is a hard register. */
5759 if (GET_CODE (x) != REG
0511851c
MM
5760 || REGNO (x) < npi->min_reg
5761 || REGNO (x) >= npi->max_reg)
dfdb644f
JL
5762 return;
5763
0511851c 5764 regno = REGNO (x) - npi->min_reg;
dfdb644f 5765
e0082a72
ZD
5766 RESET_BIT (npi->nonnull_local[npi->current_block->index], regno);
5767 SET_BIT (npi->nonnull_killed[npi->current_block->index], regno);
dfdb644f
JL
5768}
5769
0511851c
MM
5770/* Do null-pointer check elimination for the registers indicated in
5771 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5772 they are not our responsibility to free. */
dfdb644f 5773
99a15921 5774static int
1d088dee
AJ
5775delete_null_pointer_checks_1 (unsigned int *block_reg, sbitmap *nonnull_avin,
5776 sbitmap *nonnull_avout,
5777 struct null_pointer_info *npi)
dfdb644f 5778{
e0082a72 5779 basic_block bb, current_block;
0511851c
MM
5780 sbitmap *nonnull_local = npi->nonnull_local;
5781 sbitmap *nonnull_killed = npi->nonnull_killed;
99a15921 5782 int something_changed = 0;
589005ff 5783
dfdb644f
JL
5784 /* Compute local properties, nonnull and killed. A register will have
5785 the nonnull property if at the end of the current block its value is
5786 known to be nonnull. The killed property indicates that somewhere in
5787 the block any information we had about the register is killed.
5788
5789 Note that a register can have both properties in a single block. That
5790 indicates that it's killed, then later in the block a new value is
5791 computed. */
d55bc081
ZD
5792 sbitmap_vector_zero (nonnull_local, last_basic_block);
5793 sbitmap_vector_zero (nonnull_killed, last_basic_block);
c4c81601 5794
e0082a72 5795 FOR_EACH_BB (current_block)
dfdb644f
JL
5796 {
5797 rtx insn, stop_insn;
5798
0511851c
MM
5799 /* Set the current block for invalidate_nonnull_info. */
5800 npi->current_block = current_block;
5801
dfdb644f
JL
5802 /* Scan each insn in the basic block looking for memory references and
5803 register sets. */
e0082a72
ZD
5804 stop_insn = NEXT_INSN (current_block->end);
5805 for (insn = current_block->head;
dfdb644f
JL
5806 insn != stop_insn;
5807 insn = NEXT_INSN (insn))
5808 {
5809 rtx set;
0511851c 5810 rtx reg;
dfdb644f
JL
5811
5812 /* Ignore anything that is not a normal insn. */
2c3c49de 5813 if (! INSN_P (insn))
dfdb644f
JL
5814 continue;
5815
5816 /* Basically ignore anything that is not a simple SET. We do have
5817 to make sure to invalidate nonnull_local and set nonnull_killed
5818 for such insns though. */
5819 set = single_set (insn);
5820 if (!set)
5821 {
0511851c 5822 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
dfdb644f
JL
5823 continue;
5824 }
5825
f63d1bf7 5826 /* See if we've got a usable memory load. We handle it first
dfdb644f
JL
5827 in case it uses its address register as a dest (which kills
5828 the nonnull property). */
5829 if (GET_CODE (SET_SRC (set)) == MEM
0511851c
MM
5830 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
5831 && REGNO (reg) >= npi->min_reg
5832 && REGNO (reg) < npi->max_reg)
e0082a72 5833 SET_BIT (nonnull_local[current_block->index],
0511851c 5834 REGNO (reg) - npi->min_reg);
dfdb644f
JL
5835
5836 /* Now invalidate stuff clobbered by this insn. */
0511851c 5837 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
dfdb644f
JL
5838
5839 /* And handle stores, we do these last since any sets in INSN can
5840 not kill the nonnull property if it is derived from a MEM
5841 appearing in a SET_DEST. */
5842 if (GET_CODE (SET_DEST (set)) == MEM
0511851c
MM
5843 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
5844 && REGNO (reg) >= npi->min_reg
5845 && REGNO (reg) < npi->max_reg)
e0082a72 5846 SET_BIT (nonnull_local[current_block->index],
0511851c 5847 REGNO (reg) - npi->min_reg);
dfdb644f
JL
5848 }
5849 }
5850
5851 /* Now compute global properties based on the local properties. This
fbe5a4a6 5852 is a classic global availability algorithm. */
ce724250
JL
5853 compute_available (nonnull_local, nonnull_killed,
5854 nonnull_avout, nonnull_avin);
dfdb644f
JL
5855
5856 /* Now look at each bb and see if it ends with a compare of a value
5857 against zero. */
e0082a72 5858 FOR_EACH_BB (bb)
dfdb644f 5859 {
e0082a72 5860 rtx last_insn = bb->end;
0511851c 5861 rtx condition, earliest;
dfdb644f
JL
5862 int compare_and_branch;
5863
0511851c
MM
5864 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
5865 since BLOCK_REG[BB] is zero if this block did not end with a
5866 comparison against zero, this condition works. */
e0082a72
ZD
5867 if (block_reg[bb->index] < npi->min_reg
5868 || block_reg[bb->index] >= npi->max_reg)
dfdb644f
JL
5869 continue;
5870
5871 /* LAST_INSN is a conditional jump. Get its condition. */
5872 condition = get_condition (last_insn, &earliest);
5873
40d7a3fe
NB
5874 /* If we can't determine the condition then skip. */
5875 if (! condition)
5876 continue;
5877
dfdb644f 5878 /* Is the register known to have a nonzero value? */
e0082a72 5879 if (!TEST_BIT (nonnull_avout[bb->index], block_reg[bb->index] - npi->min_reg))
dfdb644f
JL
5880 continue;
5881
5882 /* Try to compute whether the compare/branch at the loop end is one or
5883 two instructions. */
5884 if (earliest == last_insn)
5885 compare_and_branch = 1;
5886 else if (earliest == prev_nonnote_insn (last_insn))
5887 compare_and_branch = 2;
5888 else
5889 continue;
5890
5891 /* We know the register in this comparison is nonnull at exit from
5892 this block. We can optimize this comparison. */
5893 if (GET_CODE (condition) == NE)
5894 {
5895 rtx new_jump;
5896
38c1593d
JH
5897 new_jump = emit_jump_insn_after (gen_jump (JUMP_LABEL (last_insn)),
5898 last_insn);
dfdb644f
JL
5899 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
5900 LABEL_NUSES (JUMP_LABEL (new_jump))++;
5901 emit_barrier_after (new_jump);
5902 }
8e184d9c 5903
99a15921 5904 something_changed = 1;
9cd56be1 5905 delete_insn (last_insn);
dfdb644f 5906 if (compare_and_branch == 2)
589005ff 5907 delete_insn (earliest);
e0082a72 5908 purge_dead_edges (bb);
0511851c
MM
5909
5910 /* Don't check this block again. (Note that BLOCK_END is
589005ff 5911 invalid here; we deleted the last instruction in the
0511851c 5912 block.) */
e0082a72 5913 block_reg[bb->index] = 0;
0511851c 5914 }
99a15921
JL
5915
5916 return something_changed;
0511851c
MM
5917}
5918
5919/* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
5920 at compile time.
5921
5922 This is conceptually similar to global constant/copy propagation and
5923 classic global CSE (it even uses the same dataflow equations as cprop).
5924
5925 If a register is used as memory address with the form (mem (reg)), then we
5926 know that REG can not be zero at that point in the program. Any instruction
5927 which sets REG "kills" this property.
5928
5929 So, if every path leading to a conditional branch has an available memory
5930 reference of that form, then we know the register can not have the value
589005ff 5931 zero at the conditional branch.
0511851c 5932
fbe5a4a6 5933 So we merely need to compute the local properties and propagate that data
0511851c
MM
5934 around the cfg, then optimize where possible.
5935
5936 We run this pass two times. Once before CSE, then again after CSE. This
5937 has proven to be the most profitable approach. It is rare for new
5938 optimization opportunities of this nature to appear after the first CSE
5939 pass.
5940
5941 This could probably be integrated with global cprop with a little work. */
5942
99a15921 5943int
1d088dee 5944delete_null_pointer_checks (rtx f ATTRIBUTE_UNUSED)
0511851c 5945{
0511851c 5946 sbitmap *nonnull_avin, *nonnull_avout;
770ae6cc 5947 unsigned int *block_reg;
e0082a72 5948 basic_block bb;
0511851c
MM
5949 int reg;
5950 int regs_per_pass;
5951 int max_reg;
5952 struct null_pointer_info npi;
99a15921 5953 int something_changed = 0;
0511851c 5954
0511851c 5955 /* If we have only a single block, then there's nothing to do. */
0b17ab2f 5956 if (n_basic_blocks <= 1)
99a15921 5957 return 0;
0511851c
MM
5958
5959 /* Trying to perform global optimizations on flow graphs which have
5960 a high connectivity will take a long time and is unlikely to be
5961 particularly useful.
5962
43e72072 5963 In normal circumstances a cfg should have about twice as many edges
0511851c
MM
5964 as blocks. But we do not want to punish small functions which have
5965 a couple switch statements. So we require a relatively large number
5966 of basic blocks and the ratio of edges to blocks to be high. */
0b17ab2f 5967 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
99a15921 5968 return 0;
0511851c 5969
0511851c
MM
5970 /* We need four bitmaps, each with a bit for each register in each
5971 basic block. */
5972 max_reg = max_reg_num ();
d55bc081 5973 regs_per_pass = get_bitmap_width (4, last_basic_block, max_reg);
0511851c
MM
5974
5975 /* Allocate bitmaps to hold local and global properties. */
d55bc081
ZD
5976 npi.nonnull_local = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5977 npi.nonnull_killed = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5978 nonnull_avin = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5979 nonnull_avout = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
0511851c
MM
5980
5981 /* Go through the basic blocks, seeing whether or not each block
5982 ends with a conditional branch whose condition is a comparison
5983 against zero. Record the register compared in BLOCK_REG. */
703ad42b 5984 block_reg = xcalloc (last_basic_block, sizeof (int));
e0082a72 5985 FOR_EACH_BB (bb)
0511851c 5986 {
e0082a72 5987 rtx last_insn = bb->end;
0511851c
MM
5988 rtx condition, earliest, reg;
5989
5990 /* We only want conditional branches. */
5991 if (GET_CODE (last_insn) != JUMP_INSN
7f1c097d
JH
5992 || !any_condjump_p (last_insn)
5993 || !onlyjump_p (last_insn))
0511851c
MM
5994 continue;
5995
5996 /* LAST_INSN is a conditional jump. Get its condition. */
5997 condition = get_condition (last_insn, &earliest);
5998
4fe9b91c 5999 /* If we were unable to get the condition, or it is not an equality
0511851c
MM
6000 comparison against zero then there's nothing we can do. */
6001 if (!condition
6002 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
6003 || GET_CODE (XEXP (condition, 1)) != CONST_INT
589005ff 6004 || (XEXP (condition, 1)
0511851c
MM
6005 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
6006 continue;
6007
6008 /* We must be checking a register against zero. */
6009 reg = XEXP (condition, 0);
6010 if (GET_CODE (reg) != REG)
6011 continue;
6012
e0082a72 6013 block_reg[bb->index] = REGNO (reg);
0511851c
MM
6014 }
6015
6016 /* Go through the algorithm for each block of registers. */
6017 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
6018 {
6019 npi.min_reg = reg;
6020 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
99a15921
JL
6021 something_changed |= delete_null_pointer_checks_1 (block_reg,
6022 nonnull_avin,
6023 nonnull_avout,
6024 &npi);
dfdb644f
JL
6025 }
6026
0511851c
MM
6027 /* Free the table of registers compared at the end of every block. */
6028 free (block_reg);
6029
dfdb644f 6030 /* Free bitmaps. */
5a660bff
DB
6031 sbitmap_vector_free (npi.nonnull_local);
6032 sbitmap_vector_free (npi.nonnull_killed);
6033 sbitmap_vector_free (nonnull_avin);
6034 sbitmap_vector_free (nonnull_avout);
99a15921
JL
6035
6036 return something_changed;
dfdb644f 6037}
bb457bd9
JL
6038
6039/* Code Hoisting variables and subroutines. */
6040
6041/* Very busy expressions. */
6042static sbitmap *hoist_vbein;
6043static sbitmap *hoist_vbeout;
6044
6045/* Hoistable expressions. */
6046static sbitmap *hoist_exprs;
6047
6048/* Dominator bitmaps. */
355be0dc 6049dominance_info dominators;
bb457bd9
JL
6050
6051/* ??? We could compute post dominators and run this algorithm in
68e82b83 6052 reverse to perform tail merging, doing so would probably be
bb457bd9
JL
6053 more effective than the tail merging code in jump.c.
6054
6055 It's unclear if tail merging could be run in parallel with
6056 code hoisting. It would be nice. */
6057
6058/* Allocate vars used for code hoisting analysis. */
6059
6060static void
1d088dee 6061alloc_code_hoist_mem (int n_blocks, int n_exprs)
bb457bd9
JL
6062{
6063 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
6064 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
6065 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
6066
6067 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
6068 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
6069 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
6070 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
bb457bd9
JL
6071}
6072
6073/* Free vars used for code hoisting analysis. */
6074
6075static void
1d088dee 6076free_code_hoist_mem (void)
bb457bd9 6077{
5a660bff
DB
6078 sbitmap_vector_free (antloc);
6079 sbitmap_vector_free (transp);
6080 sbitmap_vector_free (comp);
bb457bd9 6081
5a660bff
DB
6082 sbitmap_vector_free (hoist_vbein);
6083 sbitmap_vector_free (hoist_vbeout);
6084 sbitmap_vector_free (hoist_exprs);
6085 sbitmap_vector_free (transpout);
bb457bd9 6086
355be0dc 6087 free_dominance_info (dominators);
bb457bd9
JL
6088}
6089
6090/* Compute the very busy expressions at entry/exit from each block.
6091
6092 An expression is very busy if all paths from a given point
6093 compute the expression. */
6094
6095static void
1d088dee 6096compute_code_hoist_vbeinout (void)
bb457bd9 6097{
e0082a72
ZD
6098 int changed, passes;
6099 basic_block bb;
bb457bd9 6100
d55bc081
ZD
6101 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
6102 sbitmap_vector_zero (hoist_vbein, last_basic_block);
bb457bd9
JL
6103
6104 passes = 0;
6105 changed = 1;
c4c81601 6106
bb457bd9
JL
6107 while (changed)
6108 {
6109 changed = 0;
c4c81601 6110
bb457bd9
JL
6111 /* We scan the blocks in the reverse order to speed up
6112 the convergence. */
e0082a72 6113 FOR_EACH_BB_REVERSE (bb)
bb457bd9 6114 {
e0082a72
ZD
6115 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
6116 hoist_vbeout[bb->index], transp[bb->index]);
6117 if (bb->next_bb != EXIT_BLOCK_PTR)
6118 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
bb457bd9 6119 }
c4c81601 6120
bb457bd9
JL
6121 passes++;
6122 }
6123
6124 if (gcse_file)
6125 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
6126}
6127
6128/* Top level routine to do the dataflow analysis needed by code hoisting. */
6129
6130static void
1d088dee 6131compute_code_hoist_data (void)
bb457bd9 6132{
02280659 6133 compute_local_properties (transp, comp, antloc, &expr_hash_table);
bb457bd9
JL
6134 compute_transpout ();
6135 compute_code_hoist_vbeinout ();
355be0dc 6136 dominators = calculate_dominance_info (CDI_DOMINATORS);
bb457bd9
JL
6137 if (gcse_file)
6138 fprintf (gcse_file, "\n");
6139}
6140
6141/* Determine if the expression identified by EXPR_INDEX would
6142 reach BB unimpared if it was placed at the end of EXPR_BB.
6143
6144 It's unclear exactly what Muchnick meant by "unimpared". It seems
6145 to me that the expression must either be computed or transparent in
6146 *every* block in the path(s) from EXPR_BB to BB. Any other definition
6147 would allow the expression to be hoisted out of loops, even if
6148 the expression wasn't a loop invariant.
6149
6150 Contrast this to reachability for PRE where an expression is
6151 considered reachable if *any* path reaches instead of *all*
6152 paths. */
6153
6154static int
1d088dee 6155hoist_expr_reaches_here_p (basic_block expr_bb, int expr_index, basic_block bb, char *visited)
bb457bd9
JL
6156{
6157 edge pred;
283a2545 6158 int visited_allocated_locally = 0;
589005ff 6159
bb457bd9
JL
6160
6161 if (visited == NULL)
6162 {
8e42ace1 6163 visited_allocated_locally = 1;
d55bc081 6164 visited = xcalloc (last_basic_block, 1);
bb457bd9
JL
6165 }
6166
e2d2ed72 6167 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
bb457bd9 6168 {
e2d2ed72 6169 basic_block pred_bb = pred->src;
bb457bd9
JL
6170
6171 if (pred->src == ENTRY_BLOCK_PTR)
6172 break;
f305679f
JH
6173 else if (pred_bb == expr_bb)
6174 continue;
0b17ab2f 6175 else if (visited[pred_bb->index])
bb457bd9 6176 continue;
c4c81601 6177
bb457bd9 6178 /* Does this predecessor generate this expression? */
0b17ab2f 6179 else if (TEST_BIT (comp[pred_bb->index], expr_index))
bb457bd9 6180 break;
0b17ab2f 6181 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
bb457bd9 6182 break;
c4c81601 6183
bb457bd9
JL
6184 /* Not killed. */
6185 else
6186 {
0b17ab2f 6187 visited[pred_bb->index] = 1;
bb457bd9
JL
6188 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
6189 pred_bb, visited))
6190 break;
6191 }
6192 }
589005ff 6193 if (visited_allocated_locally)
283a2545 6194 free (visited);
c4c81601 6195
bb457bd9
JL
6196 return (pred == NULL);
6197}
6198\f
6199/* Actually perform code hoisting. */
c4c81601 6200
bb457bd9 6201static void
1d088dee 6202hoist_code (void)
bb457bd9 6203{
e0082a72 6204 basic_block bb, dominated;
c635a1ec
DB
6205 basic_block *domby;
6206 unsigned int domby_len;
6207 unsigned int i,j;
bb457bd9 6208 struct expr **index_map;
c4c81601 6209 struct expr *expr;
bb457bd9 6210
d55bc081 6211 sbitmap_vector_zero (hoist_exprs, last_basic_block);
bb457bd9
JL
6212
6213 /* Compute a mapping from expression number (`bitmap_index') to
6214 hash table entry. */
6215
703ad42b 6216 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
02280659
ZD
6217 for (i = 0; i < expr_hash_table.size; i++)
6218 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601 6219 index_map[expr->bitmap_index] = expr;
bb457bd9
JL
6220
6221 /* Walk over each basic block looking for potentially hoistable
6222 expressions, nothing gets hoisted from the entry block. */
e0082a72 6223 FOR_EACH_BB (bb)
bb457bd9
JL
6224 {
6225 int found = 0;
6226 int insn_inserted_p;
6227
c635a1ec 6228 domby_len = get_dominated_by (dominators, bb, &domby);
bb457bd9
JL
6229 /* Examine each expression that is very busy at the exit of this
6230 block. These are the potentially hoistable expressions. */
e0082a72 6231 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
bb457bd9
JL
6232 {
6233 int hoistable = 0;
c4c81601 6234
c635a1ec
DB
6235 if (TEST_BIT (hoist_vbeout[bb->index], i)
6236 && TEST_BIT (transpout[bb->index], i))
bb457bd9
JL
6237 {
6238 /* We've found a potentially hoistable expression, now
6239 we look at every block BB dominates to see if it
6240 computes the expression. */
c635a1ec 6241 for (j = 0; j < domby_len; j++)
bb457bd9 6242 {
c635a1ec 6243 dominated = domby[j];
bb457bd9 6244 /* Ignore self dominance. */
c635a1ec 6245 if (bb == dominated)
bb457bd9 6246 continue;
bb457bd9
JL
6247 /* We've found a dominated block, now see if it computes
6248 the busy expression and whether or not moving that
6249 expression to the "beginning" of that block is safe. */
e0082a72 6250 if (!TEST_BIT (antloc[dominated->index], i))
bb457bd9
JL
6251 continue;
6252
6253 /* Note if the expression would reach the dominated block
589005ff 6254 unimpared if it was placed at the end of BB.
bb457bd9
JL
6255
6256 Keep track of how many times this expression is hoistable
6257 from a dominated block into BB. */
e0082a72 6258 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
bb457bd9
JL
6259 hoistable++;
6260 }
6261
ff7cc307 6262 /* If we found more than one hoistable occurrence of this
bb457bd9
JL
6263 expression, then note it in the bitmap of expressions to
6264 hoist. It makes no sense to hoist things which are computed
6265 in only one BB, and doing so tends to pessimize register
6266 allocation. One could increase this value to try harder
6267 to avoid any possible code expansion due to register
6268 allocation issues; however experiments have shown that
6269 the vast majority of hoistable expressions are only movable
e0bb17a8 6270 from two successors, so raising this threshold is likely
bb457bd9
JL
6271 to nullify any benefit we get from code hoisting. */
6272 if (hoistable > 1)
6273 {
e0082a72 6274 SET_BIT (hoist_exprs[bb->index], i);
bb457bd9
JL
6275 found = 1;
6276 }
6277 }
6278 }
bb457bd9
JL
6279 /* If we found nothing to hoist, then quit now. */
6280 if (! found)
c635a1ec 6281 {
1d088dee 6282 free (domby);
bb457bd9 6283 continue;
c635a1ec 6284 }
bb457bd9
JL
6285
6286 /* Loop over all the hoistable expressions. */
e0082a72 6287 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
bb457bd9
JL
6288 {
6289 /* We want to insert the expression into BB only once, so
6290 note when we've inserted it. */
6291 insn_inserted_p = 0;
6292
6293 /* These tests should be the same as the tests above. */
e0082a72 6294 if (TEST_BIT (hoist_vbeout[bb->index], i))
bb457bd9
JL
6295 {
6296 /* We've found a potentially hoistable expression, now
6297 we look at every block BB dominates to see if it
6298 computes the expression. */
c635a1ec 6299 for (j = 0; j < domby_len; j++)
bb457bd9 6300 {
c635a1ec 6301 dominated = domby[j];
bb457bd9 6302 /* Ignore self dominance. */
c635a1ec 6303 if (bb == dominated)
bb457bd9
JL
6304 continue;
6305
6306 /* We've found a dominated block, now see if it computes
6307 the busy expression and whether or not moving that
6308 expression to the "beginning" of that block is safe. */
e0082a72 6309 if (!TEST_BIT (antloc[dominated->index], i))
bb457bd9
JL
6310 continue;
6311
6312 /* The expression is computed in the dominated block and
6313 it would be safe to compute it at the start of the
6314 dominated block. Now we have to determine if the
ff7cc307 6315 expression would reach the dominated block if it was
bb457bd9 6316 placed at the end of BB. */
e0082a72 6317 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
bb457bd9
JL
6318 {
6319 struct expr *expr = index_map[i];
6320 struct occr *occr = expr->antic_occr;
6321 rtx insn;
6322 rtx set;
6323
ff7cc307 6324 /* Find the right occurrence of this expression. */
e0082a72 6325 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
bb457bd9
JL
6326 occr = occr->next;
6327
6328 /* Should never happen. */
6329 if (!occr)
6330 abort ();
6331
6332 insn = occr->insn;
589005ff 6333
bb457bd9
JL
6334 set = single_set (insn);
6335 if (! set)
6336 abort ();
6337
6338 /* Create a pseudo-reg to store the result of reaching
6339 expressions into. Get the mode for the new pseudo
6340 from the mode of the original destination pseudo. */
6341 if (expr->reaching_reg == NULL)
6342 expr->reaching_reg
6343 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
6344
10d1bb36
JH
6345 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
6346 delete_insn (insn);
6347 occr->deleted_p = 1;
6348 if (!insn_inserted_p)
bb457bd9 6349 {
10d1bb36
JH
6350 insert_insn_end_bb (index_map[i], bb, 0);
6351 insn_inserted_p = 1;
bb457bd9
JL
6352 }
6353 }
6354 }
6355 }
6356 }
c635a1ec 6357 free (domby);
bb457bd9 6358 }
c4c81601 6359
8e42ace1 6360 free (index_map);
bb457bd9
JL
6361}
6362
6363/* Top level routine to perform one code hoisting (aka unification) pass
6364
cc2902df 6365 Return nonzero if a change was made. */
bb457bd9
JL
6366
6367static int
1d088dee 6368one_code_hoisting_pass (void)
bb457bd9
JL
6369{
6370 int changed = 0;
6371
02280659
ZD
6372 alloc_hash_table (max_cuid, &expr_hash_table, 0);
6373 compute_hash_table (&expr_hash_table);
bb457bd9 6374 if (gcse_file)
02280659 6375 dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
c4c81601 6376
02280659 6377 if (expr_hash_table.n_elems > 0)
bb457bd9 6378 {
02280659 6379 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
bb457bd9
JL
6380 compute_code_hoist_data ();
6381 hoist_code ();
6382 free_code_hoist_mem ();
6383 }
c4c81601 6384
02280659 6385 free_hash_table (&expr_hash_table);
bb457bd9
JL
6386
6387 return changed;
6388}
a13d4ebf
AM
6389\f
6390/* Here we provide the things required to do store motion towards
6391 the exit. In order for this to be effective, gcse also needed to
6392 be taught how to move a load when it is kill only by a store to itself.
6393
6394 int i;
6395 float a[10];
6396
6397 void foo(float scale)
6398 {
6399 for (i=0; i<10; i++)
6400 a[i] *= scale;
6401 }
6402
6403 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
589005ff
KH
6404 the load out since its live around the loop, and stored at the bottom
6405 of the loop.
a13d4ebf 6406
589005ff 6407 The 'Load Motion' referred to and implemented in this file is
a13d4ebf
AM
6408 an enhancement to gcse which when using edge based lcm, recognizes
6409 this situation and allows gcse to move the load out of the loop.
6410
6411 Once gcse has hoisted the load, store motion can then push this
6412 load towards the exit, and we end up with no loads or stores of 'i'
6413 in the loop. */
6414
ff7cc307 6415/* This will search the ldst list for a matching expression. If it
a13d4ebf
AM
6416 doesn't find one, we create one and initialize it. */
6417
6418static struct ls_expr *
1d088dee 6419ldst_entry (rtx x)
a13d4ebf
AM
6420{
6421 struct ls_expr * ptr;
6422
6423 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6424 if (expr_equiv_p (ptr->pattern, x))
6425 break;
6426
6427 if (!ptr)
6428 {
703ad42b 6429 ptr = xmalloc (sizeof (struct ls_expr));
a13d4ebf
AM
6430
6431 ptr->next = pre_ldst_mems;
6432 ptr->expr = NULL;
6433 ptr->pattern = x;
47a3dae1 6434 ptr->pattern_regs = NULL_RTX;
a13d4ebf
AM
6435 ptr->loads = NULL_RTX;
6436 ptr->stores = NULL_RTX;
6437 ptr->reaching_reg = NULL_RTX;
6438 ptr->invalid = 0;
6439 ptr->index = 0;
6440 ptr->hash_index = 0;
6441 pre_ldst_mems = ptr;
6442 }
589005ff 6443
a13d4ebf
AM
6444 return ptr;
6445}
6446
6447/* Free up an individual ldst entry. */
6448
589005ff 6449static void
1d088dee 6450free_ldst_entry (struct ls_expr * ptr)
a13d4ebf 6451{
aaa4ca30
AJ
6452 free_INSN_LIST_list (& ptr->loads);
6453 free_INSN_LIST_list (& ptr->stores);
a13d4ebf
AM
6454
6455 free (ptr);
6456}
6457
6458/* Free up all memory associated with the ldst list. */
6459
6460static void
1d088dee 6461free_ldst_mems (void)
a13d4ebf 6462{
589005ff 6463 while (pre_ldst_mems)
a13d4ebf
AM
6464 {
6465 struct ls_expr * tmp = pre_ldst_mems;
6466
6467 pre_ldst_mems = pre_ldst_mems->next;
6468
6469 free_ldst_entry (tmp);
6470 }
6471
6472 pre_ldst_mems = NULL;
6473}
6474
6475/* Dump debugging info about the ldst list. */
6476
6477static void
1d088dee 6478print_ldst_list (FILE * file)
a13d4ebf
AM
6479{
6480 struct ls_expr * ptr;
6481
6482 fprintf (file, "LDST list: \n");
6483
6484 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6485 {
6486 fprintf (file, " Pattern (%3d): ", ptr->index);
6487
6488 print_rtl (file, ptr->pattern);
6489
6490 fprintf (file, "\n Loads : ");
6491
6492 if (ptr->loads)
6493 print_rtl (file, ptr->loads);
6494 else
6495 fprintf (file, "(nil)");
6496
6497 fprintf (file, "\n Stores : ");
6498
6499 if (ptr->stores)
6500 print_rtl (file, ptr->stores);
6501 else
6502 fprintf (file, "(nil)");
6503
6504 fprintf (file, "\n\n");
6505 }
6506
6507 fprintf (file, "\n");
6508}
6509
6510/* Returns 1 if X is in the list of ldst only expressions. */
6511
6512static struct ls_expr *
1d088dee 6513find_rtx_in_ldst (rtx x)
a13d4ebf
AM
6514{
6515 struct ls_expr * ptr;
589005ff 6516
a13d4ebf
AM
6517 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6518 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
6519 return ptr;
6520
6521 return NULL;
6522}
6523
6524/* Assign each element of the list of mems a monotonically increasing value. */
6525
6526static int
1d088dee 6527enumerate_ldsts (void)
a13d4ebf
AM
6528{
6529 struct ls_expr * ptr;
6530 int n = 0;
6531
6532 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6533 ptr->index = n++;
6534
6535 return n;
6536}
6537
6538/* Return first item in the list. */
6539
6540static inline struct ls_expr *
1d088dee 6541first_ls_expr (void)
a13d4ebf
AM
6542{
6543 return pre_ldst_mems;
6544}
6545
0e8a66de 6546/* Return the next item in the list after the specified one. */
a13d4ebf
AM
6547
6548static inline struct ls_expr *
1d088dee 6549next_ls_expr (struct ls_expr * ptr)
a13d4ebf
AM
6550{
6551 return ptr->next;
6552}
6553\f
6554/* Load Motion for loads which only kill themselves. */
6555
6556/* Return true if x is a simple MEM operation, with no registers or
6557 side effects. These are the types of loads we consider for the
6558 ld_motion list, otherwise we let the usual aliasing take care of it. */
6559
589005ff 6560static int
1d088dee 6561simple_mem (rtx x)
a13d4ebf
AM
6562{
6563 if (GET_CODE (x) != MEM)
6564 return 0;
589005ff 6565
a13d4ebf
AM
6566 if (MEM_VOLATILE_P (x))
6567 return 0;
589005ff 6568
a13d4ebf
AM
6569 if (GET_MODE (x) == BLKmode)
6570 return 0;
aaa4ca30 6571
47a3dae1
ZD
6572 /* If we are handling exceptions, we must be careful with memory references
6573 that may trap. If we are not, the behavior is undefined, so we may just
6574 continue. */
6575 if (flag_non_call_exceptions && may_trap_p (x))
98d3d336
RS
6576 return 0;
6577
47a3dae1
ZD
6578 if (side_effects_p (x))
6579 return 0;
589005ff 6580
47a3dae1
ZD
6581 /* Do not consider function arguments passed on stack. */
6582 if (reg_mentioned_p (stack_pointer_rtx, x))
6583 return 0;
6584
6585 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
6586 return 0;
6587
6588 return 1;
a13d4ebf
AM
6589}
6590
589005ff
KH
6591/* Make sure there isn't a buried reference in this pattern anywhere.
6592 If there is, invalidate the entry for it since we're not capable
6593 of fixing it up just yet.. We have to be sure we know about ALL
a13d4ebf
AM
6594 loads since the aliasing code will allow all entries in the
6595 ld_motion list to not-alias itself. If we miss a load, we will get
589005ff 6596 the wrong value since gcse might common it and we won't know to
a13d4ebf
AM
6597 fix it up. */
6598
6599static void
1d088dee 6600invalidate_any_buried_refs (rtx x)
a13d4ebf
AM
6601{
6602 const char * fmt;
8e42ace1 6603 int i, j;
a13d4ebf
AM
6604 struct ls_expr * ptr;
6605
6606 /* Invalidate it in the list. */
6607 if (GET_CODE (x) == MEM && simple_mem (x))
6608 {
6609 ptr = ldst_entry (x);
6610 ptr->invalid = 1;
6611 }
6612
6613 /* Recursively process the insn. */
6614 fmt = GET_RTX_FORMAT (GET_CODE (x));
589005ff 6615
a13d4ebf
AM
6616 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6617 {
6618 if (fmt[i] == 'e')
6619 invalidate_any_buried_refs (XEXP (x, i));
6620 else if (fmt[i] == 'E')
6621 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6622 invalidate_any_buried_refs (XVECEXP (x, i, j));
6623 }
6624}
6625
4d3eb89a
HPN
6626/* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6627 being defined as MEM loads and stores to symbols, with no side effects
6628 and no registers in the expression. For a MEM destination, we also
6629 check that the insn is still valid if we replace the destination with a
6630 REG, as is done in update_ld_motion_stores. If there are any uses/defs
6631 which don't match this criteria, they are invalidated and trimmed out
6632 later. */
a13d4ebf 6633
589005ff 6634static void
1d088dee 6635compute_ld_motion_mems (void)
a13d4ebf
AM
6636{
6637 struct ls_expr * ptr;
e0082a72 6638 basic_block bb;
a13d4ebf 6639 rtx insn;
589005ff 6640
a13d4ebf
AM
6641 pre_ldst_mems = NULL;
6642
e0082a72 6643 FOR_EACH_BB (bb)
a13d4ebf 6644 {
e0082a72
ZD
6645 for (insn = bb->head;
6646 insn && insn != NEXT_INSN (bb->end);
a13d4ebf
AM
6647 insn = NEXT_INSN (insn))
6648 {
735e8085 6649 if (INSN_P (insn))
a13d4ebf
AM
6650 {
6651 if (GET_CODE (PATTERN (insn)) == SET)
6652 {
6653 rtx src = SET_SRC (PATTERN (insn));
6654 rtx dest = SET_DEST (PATTERN (insn));
6655
6656 /* Check for a simple LOAD... */
6657 if (GET_CODE (src) == MEM && simple_mem (src))
6658 {
6659 ptr = ldst_entry (src);
6660 if (GET_CODE (dest) == REG)
6661 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
6662 else
6663 ptr->invalid = 1;
6664 }
6665 else
6666 {
6667 /* Make sure there isn't a buried load somewhere. */
6668 invalidate_any_buried_refs (src);
6669 }
589005ff 6670
a13d4ebf
AM
6671 /* Check for stores. Don't worry about aliased ones, they
6672 will block any movement we might do later. We only care
6673 about this exact pattern since those are the only
6674 circumstance that we will ignore the aliasing info. */
6675 if (GET_CODE (dest) == MEM && simple_mem (dest))
6676 {
6677 ptr = ldst_entry (dest);
589005ff 6678
f54104df 6679 if (GET_CODE (src) != MEM
4d3eb89a
HPN
6680 && GET_CODE (src) != ASM_OPERANDS
6681 /* Check for REG manually since want_to_gcse_p
6682 returns 0 for all REGs. */
6683 && (REG_P (src) || want_to_gcse_p (src)))
a13d4ebf
AM
6684 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6685 else
6686 ptr->invalid = 1;
6687 }
6688 }
6689 else
6690 invalidate_any_buried_refs (PATTERN (insn));
6691 }
6692 }
6693 }
6694}
6695
589005ff 6696/* Remove any references that have been either invalidated or are not in the
a13d4ebf
AM
6697 expression list for pre gcse. */
6698
6699static void
1d088dee 6700trim_ld_motion_mems (void)
a13d4ebf
AM
6701{
6702 struct ls_expr * last = NULL;
6703 struct ls_expr * ptr = first_ls_expr ();
6704
6705 while (ptr != NULL)
6706 {
6707 int del = ptr->invalid;
6708 struct expr * expr = NULL;
589005ff 6709
a13d4ebf 6710 /* Delete if entry has been made invalid. */
589005ff 6711 if (!del)
a13d4ebf
AM
6712 {
6713 unsigned int i;
589005ff 6714
a13d4ebf
AM
6715 del = 1;
6716 /* Delete if we cannot find this mem in the expression list. */
02280659 6717 for (i = 0; i < expr_hash_table.size && del; i++)
a13d4ebf 6718 {
02280659 6719 for (expr = expr_hash_table.table[i];
589005ff 6720 expr != NULL;
a13d4ebf
AM
6721 expr = expr->next_same_hash)
6722 if (expr_equiv_p (expr->expr, ptr->pattern))
6723 {
6724 del = 0;
6725 break;
6726 }
6727 }
6728 }
589005ff 6729
a13d4ebf
AM
6730 if (del)
6731 {
6732 if (last != NULL)
6733 {
6734 last->next = ptr->next;
6735 free_ldst_entry (ptr);
6736 ptr = last->next;
6737 }
6738 else
6739 {
6740 pre_ldst_mems = pre_ldst_mems->next;
6741 free_ldst_entry (ptr);
6742 ptr = pre_ldst_mems;
6743 }
6744 }
6745 else
6746 {
6747 /* Set the expression field if we are keeping it. */
6748 last = ptr;
6749 ptr->expr = expr;
6750 ptr = ptr->next;
6751 }
6752 }
6753
6754 /* Show the world what we've found. */
6755 if (gcse_file && pre_ldst_mems != NULL)
6756 print_ldst_list (gcse_file);
6757}
6758
6759/* This routine will take an expression which we are replacing with
6760 a reaching register, and update any stores that are needed if
6761 that expression is in the ld_motion list. Stores are updated by
6762 copying their SRC to the reaching register, and then storeing
6763 the reaching register into the store location. These keeps the
6764 correct value in the reaching register for the loads. */
6765
6766static void
1d088dee 6767update_ld_motion_stores (struct expr * expr)
a13d4ebf
AM
6768{
6769 struct ls_expr * mem_ptr;
6770
6771 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
6772 {
589005ff
KH
6773 /* We can try to find just the REACHED stores, but is shouldn't
6774 matter to set the reaching reg everywhere... some might be
a13d4ebf
AM
6775 dead and should be eliminated later. */
6776
4d3eb89a
HPN
6777 /* We replace (set mem expr) with (set reg expr) (set mem reg)
6778 where reg is the reaching reg used in the load. We checked in
6779 compute_ld_motion_mems that we can replace (set mem expr) with
6780 (set reg expr) in that insn. */
a13d4ebf 6781 rtx list = mem_ptr->stores;
589005ff 6782
a13d4ebf
AM
6783 for ( ; list != NULL_RTX; list = XEXP (list, 1))
6784 {
6785 rtx insn = XEXP (list, 0);
6786 rtx pat = PATTERN (insn);
6787 rtx src = SET_SRC (pat);
6788 rtx reg = expr->reaching_reg;
c57718d3 6789 rtx copy, new;
a13d4ebf
AM
6790
6791 /* If we've already copied it, continue. */
6792 if (expr->reaching_reg == src)
6793 continue;
589005ff 6794
a13d4ebf
AM
6795 if (gcse_file)
6796 {
6797 fprintf (gcse_file, "PRE: store updated with reaching reg ");
6798 print_rtl (gcse_file, expr->reaching_reg);
6799 fprintf (gcse_file, ":\n ");
6800 print_inline_rtx (gcse_file, insn, 8);
6801 fprintf (gcse_file, "\n");
6802 }
589005ff 6803
47a3dae1 6804 copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat)));
c57718d3
RK
6805 new = emit_insn_before (copy, insn);
6806 record_one_set (REGNO (reg), new);
a13d4ebf
AM
6807 SET_SRC (pat) = reg;
6808
6809 /* un-recognize this pattern since it's probably different now. */
6810 INSN_CODE (insn) = -1;
6811 gcse_create_count++;
6812 }
6813 }
6814}
6815\f
6816/* Store motion code. */
6817
47a3dae1
ZD
6818#define ANTIC_STORE_LIST(x) ((x)->loads)
6819#define AVAIL_STORE_LIST(x) ((x)->stores)
6820#define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
6821
589005ff 6822/* This is used to communicate the target bitvector we want to use in the
aaa4ca30 6823 reg_set_info routine when called via the note_stores mechanism. */
47a3dae1
ZD
6824static int * regvec;
6825
6826/* And current insn, for the same routine. */
6827static rtx compute_store_table_current_insn;
aaa4ca30 6828
a13d4ebf
AM
6829/* Used in computing the reverse edge graph bit vectors. */
6830static sbitmap * st_antloc;
6831
6832/* Global holding the number of store expressions we are dealing with. */
6833static int num_stores;
6834
aaa4ca30 6835/* Checks to set if we need to mark a register set. Called from note_stores. */
a13d4ebf 6836
aaa4ca30 6837static void
1d088dee
AJ
6838reg_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED,
6839 void *data ATTRIBUTE_UNUSED)
a13d4ebf 6840{
aaa4ca30
AJ
6841 if (GET_CODE (dest) == SUBREG)
6842 dest = SUBREG_REG (dest);
adfcce61 6843
aaa4ca30 6844 if (GET_CODE (dest) == REG)
47a3dae1 6845 regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
a13d4ebf
AM
6846}
6847
47a3dae1
ZD
6848/* Return zero if some of the registers in list X are killed
6849 due to set of registers in bitmap REGS_SET. */
1d088dee 6850
47a3dae1 6851static bool
1d088dee 6852store_ops_ok (rtx x, int *regs_set)
47a3dae1
ZD
6853{
6854 rtx reg;
6855
6856 for (; x; x = XEXP (x, 1))
6857 {
6858 reg = XEXP (x, 0);
6859 if (regs_set[REGNO(reg)])
1d088dee 6860 return false;
47a3dae1 6861 }
a13d4ebf 6862
47a3dae1
ZD
6863 return true;
6864}
6865
6866/* Returns a list of registers mentioned in X. */
6867static rtx
1d088dee 6868extract_mentioned_regs (rtx x)
47a3dae1
ZD
6869{
6870 return extract_mentioned_regs_helper (x, NULL_RTX);
6871}
6872
6873/* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
6874 registers. */
6875static rtx
1d088dee 6876extract_mentioned_regs_helper (rtx x, rtx accum)
a13d4ebf
AM
6877{
6878 int i;
6879 enum rtx_code code;
6880 const char * fmt;
6881
6882 /* Repeat is used to turn tail-recursion into iteration. */
6883 repeat:
6884
6885 if (x == 0)
47a3dae1 6886 return accum;
a13d4ebf
AM
6887
6888 code = GET_CODE (x);
6889 switch (code)
6890 {
6891 case REG:
47a3dae1 6892 return alloc_EXPR_LIST (0, x, accum);
a13d4ebf
AM
6893
6894 case MEM:
6895 x = XEXP (x, 0);
6896 goto repeat;
6897
6898 case PRE_DEC:
6899 case PRE_INC:
6900 case POST_DEC:
6901 case POST_INC:
47a3dae1
ZD
6902 /* We do not run this function with arguments having side effects. */
6903 abort ();
a13d4ebf
AM
6904
6905 case PC:
6906 case CC0: /*FIXME*/
6907 case CONST:
6908 case CONST_INT:
6909 case CONST_DOUBLE:
69ef87e2 6910 case CONST_VECTOR:
a13d4ebf
AM
6911 case SYMBOL_REF:
6912 case LABEL_REF:
6913 case ADDR_VEC:
6914 case ADDR_DIFF_VEC:
47a3dae1 6915 return accum;
a13d4ebf
AM
6916
6917 default:
6918 break;
6919 }
6920
6921 i = GET_RTX_LENGTH (code) - 1;
6922 fmt = GET_RTX_FORMAT (code);
589005ff 6923
a13d4ebf
AM
6924 for (; i >= 0; i--)
6925 {
6926 if (fmt[i] == 'e')
6927 {
6928 rtx tem = XEXP (x, i);
6929
6930 /* If we are about to do the last recursive call
47a3dae1 6931 needed at this level, change it into iteration. */
a13d4ebf
AM
6932 if (i == 0)
6933 {
6934 x = tem;
6935 goto repeat;
6936 }
589005ff 6937
47a3dae1 6938 accum = extract_mentioned_regs_helper (tem, accum);
a13d4ebf
AM
6939 }
6940 else if (fmt[i] == 'E')
6941 {
6942 int j;
589005ff 6943
a13d4ebf 6944 for (j = 0; j < XVECLEN (x, i); j++)
47a3dae1 6945 accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum);
a13d4ebf
AM
6946 }
6947 }
6948
47a3dae1 6949 return accum;
a13d4ebf
AM
6950}
6951
47a3dae1
ZD
6952/* Determine whether INSN is MEM store pattern that we will consider moving.
6953 REGS_SET_BEFORE is bitmap of registers set before (and including) the
6954 current insn, REGS_SET_AFTER is bitmap of registers set after (and
6955 including) the insn in this basic block. We must be passing through BB from
6956 head to end, as we are using this fact to speed things up.
1d088dee 6957
47a3dae1
ZD
6958 The results are stored this way:
6959
6960 -- the first anticipatable expression is added into ANTIC_STORE_LIST
6961 -- if the processed expression is not anticipatable, NULL_RTX is added
6962 there instead, so that we can use it as indicator that no further
6963 expression of this type may be anticipatable
6964 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
6965 consequently, all of them but this head are dead and may be deleted.
6966 -- if the expression is not available, the insn due to that it fails to be
6967 available is stored in reaching_reg.
6968
6969 The things are complicated a bit by fact that there already may be stores
6970 to the same MEM from other blocks; also caller must take care of the
e0bb17a8 6971 necessary cleanup of the temporary markers after end of the basic block.
47a3dae1 6972 */
a13d4ebf
AM
6973
6974static void
1d088dee 6975find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
a13d4ebf
AM
6976{
6977 struct ls_expr * ptr;
47a3dae1
ZD
6978 rtx dest, set, tmp;
6979 int check_anticipatable, check_available;
6980 basic_block bb = BLOCK_FOR_INSN (insn);
a13d4ebf 6981
47a3dae1
ZD
6982 set = single_set (insn);
6983 if (!set)
a13d4ebf
AM
6984 return;
6985
47a3dae1 6986 dest = SET_DEST (set);
589005ff 6987
a13d4ebf
AM
6988 if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
6989 || GET_MODE (dest) == BLKmode)
aaa4ca30
AJ
6990 return;
6991
47a3dae1
ZD
6992 if (side_effects_p (dest))
6993 return;
aaa4ca30 6994
47a3dae1
ZD
6995 /* If we are handling exceptions, we must be careful with memory references
6996 that may trap. If we are not, the behavior is undefined, so we may just
6997 continue. */
94f24ddc 6998 if (flag_non_call_exceptions && may_trap_p (dest))
47a3dae1 6999 return;
1d088dee 7000
a13d4ebf 7001 ptr = ldst_entry (dest);
47a3dae1
ZD
7002 if (!ptr->pattern_regs)
7003 ptr->pattern_regs = extract_mentioned_regs (dest);
7004
7005 /* Do not check for anticipatability if we either found one anticipatable
7006 store already, or tested for one and found out that it was killed. */
7007 check_anticipatable = 0;
7008 if (!ANTIC_STORE_LIST (ptr))
7009 check_anticipatable = 1;
7010 else
7011 {
7012 tmp = XEXP (ANTIC_STORE_LIST (ptr), 0);
7013 if (tmp != NULL_RTX
7014 && BLOCK_FOR_INSN (tmp) != bb)
7015 check_anticipatable = 1;
7016 }
7017 if (check_anticipatable)
7018 {
7019 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
7020 tmp = NULL_RTX;
7021 else
7022 tmp = insn;
7023 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp,
7024 ANTIC_STORE_LIST (ptr));
7025 }
a13d4ebf 7026
e0bb17a8 7027 /* It is not necessary to check whether store is available if we did
47a3dae1
ZD
7028 it successfully before; if we failed before, do not bother to check
7029 until we reach the insn that caused us to fail. */
7030 check_available = 0;
7031 if (!AVAIL_STORE_LIST (ptr))
7032 check_available = 1;
7033 else
7034 {
7035 tmp = XEXP (AVAIL_STORE_LIST (ptr), 0);
7036 if (BLOCK_FOR_INSN (tmp) != bb)
7037 check_available = 1;
7038 }
7039 if (check_available)
7040 {
7041 /* Check that we have already reached the insn at that the check
7042 failed last time. */
7043 if (LAST_AVAIL_CHECK_FAILURE (ptr))
7044 {
7045 for (tmp = bb->end;
7046 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
7047 tmp = PREV_INSN (tmp))
7048 continue;
7049 if (tmp == insn)
7050 check_available = 0;
7051 }
7052 else
7053 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
7054 bb, regs_set_after,
7055 &LAST_AVAIL_CHECK_FAILURE (ptr));
7056 }
7057 if (!check_available)
7058 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr));
7059}
1d088dee 7060
47a3dae1 7061/* Find available and anticipatable stores. */
a13d4ebf
AM
7062
7063static int
1d088dee 7064compute_store_table (void)
a13d4ebf 7065{
e0082a72
ZD
7066 int ret;
7067 basic_block bb;
aaa4ca30 7068 unsigned regno;
47a3dae1
ZD
7069 rtx insn, pat, tmp;
7070 int *last_set_in, *already_set;
7071 struct ls_expr * ptr, **prev_next_ptr_ptr;
aaa4ca30 7072
a13d4ebf
AM
7073 max_gcse_regno = max_reg_num ();
7074
703ad42b 7075 reg_set_in_block = sbitmap_vector_alloc (last_basic_block,
aaa4ca30 7076 max_gcse_regno);
d55bc081 7077 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
a13d4ebf 7078 pre_ldst_mems = 0;
47a3dae1
ZD
7079 last_set_in = xmalloc (sizeof (int) * max_gcse_regno);
7080 already_set = xmalloc (sizeof (int) * max_gcse_regno);
aaa4ca30 7081
a13d4ebf 7082 /* Find all the stores we care about. */
e0082a72 7083 FOR_EACH_BB (bb)
a13d4ebf 7084 {
47a3dae1
ZD
7085 /* First compute the registers set in this block. */
7086 memset (last_set_in, 0, sizeof (int) * max_gcse_regno);
7087 regvec = last_set_in;
7088
7089 for (insn = bb->head;
7090 insn != NEXT_INSN (bb->end);
7091 insn = NEXT_INSN (insn))
7092 {
7093 if (! INSN_P (insn))
7094 continue;
7095
7096 if (GET_CODE (insn) == CALL_INSN)
7097 {
7098 bool clobbers_all = false;
7099#ifdef NON_SAVING_SETJMP
7100 if (NON_SAVING_SETJMP
7101 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7102 clobbers_all = true;
7103#endif
7104
7105 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7106 if (clobbers_all
7107 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7108 last_set_in[regno] = INSN_UID (insn);
7109 }
7110
7111 pat = PATTERN (insn);
7112 compute_store_table_current_insn = insn;
7113 note_stores (pat, reg_set_info, NULL);
7114 }
7115
7116 /* Record the set registers. */
7117 for (regno = 0; regno < max_gcse_regno; regno++)
7118 if (last_set_in[regno])
7119 SET_BIT (reg_set_in_block[bb->index], regno);
7120
7121 /* Now find the stores. */
7122 memset (already_set, 0, sizeof (int) * max_gcse_regno);
7123 regvec = already_set;
7124 for (insn = bb->head;
7125 insn != NEXT_INSN (bb->end);
7126 insn = NEXT_INSN (insn))
a13d4ebf 7127 {
19652adf 7128 if (! INSN_P (insn))
a13d4ebf
AM
7129 continue;
7130
aaa4ca30
AJ
7131 if (GET_CODE (insn) == CALL_INSN)
7132 {
19652adf 7133 bool clobbers_all = false;
589005ff 7134#ifdef NON_SAVING_SETJMP
19652adf
ZW
7135 if (NON_SAVING_SETJMP
7136 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7137 clobbers_all = true;
7138#endif
7139
aaa4ca30 7140 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
19652adf
ZW
7141 if (clobbers_all
7142 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
47a3dae1 7143 already_set[regno] = 1;
aaa4ca30 7144 }
589005ff 7145
a13d4ebf 7146 pat = PATTERN (insn);
aaa4ca30 7147 note_stores (pat, reg_set_info, NULL);
589005ff 7148
a13d4ebf 7149 /* Now that we've marked regs, look for stores. */
47a3dae1
ZD
7150 find_moveable_store (insn, already_set, last_set_in);
7151
7152 /* Unmark regs that are no longer set. */
7153 for (regno = 0; regno < max_gcse_regno; regno++)
7154 if (last_set_in[regno] == INSN_UID (insn))
7155 last_set_in[regno] = 0;
7156 }
7157
7158 /* Clear temporary marks. */
7159 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7160 {
7161 LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX;
7162 if (ANTIC_STORE_LIST (ptr)
7163 && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX)
7164 ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1);
7165 }
7166 }
7167
7168 /* Remove the stores that are not available anywhere, as there will
7169 be no opportunity to optimize them. */
7170 for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems;
7171 ptr != NULL;
7172 ptr = *prev_next_ptr_ptr)
7173 {
7174 if (!AVAIL_STORE_LIST (ptr))
7175 {
7176 *prev_next_ptr_ptr = ptr->next;
7177 free_ldst_entry (ptr);
a13d4ebf 7178 }
47a3dae1
ZD
7179 else
7180 prev_next_ptr_ptr = &ptr->next;
a13d4ebf
AM
7181 }
7182
7183 ret = enumerate_ldsts ();
589005ff 7184
a13d4ebf
AM
7185 if (gcse_file)
7186 {
47a3dae1 7187 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
a13d4ebf
AM
7188 print_ldst_list (gcse_file);
7189 }
589005ff 7190
47a3dae1
ZD
7191 free (last_set_in);
7192 free (already_set);
a13d4ebf
AM
7193 return ret;
7194}
7195
3b14e3af
ZD
7196/* Check to see if the load X is aliased with STORE_PATTERN.
7197 AFTER is true if we are checking the case when STORE_PATTERN occurs
7198 after the X. */
a13d4ebf 7199
47a3dae1 7200static bool
3b14e3af 7201load_kills_store (rtx x, rtx store_pattern, int after)
a13d4ebf 7202{
3b14e3af
ZD
7203 if (after)
7204 return anti_dependence (x, store_pattern);
7205 else
7206 return true_dependence (store_pattern, GET_MODE (store_pattern), x,
7207 rtx_addr_varies_p);
a13d4ebf
AM
7208}
7209
589005ff 7210/* Go through the entire insn X, looking for any loads which might alias
3b14e3af
ZD
7211 STORE_PATTERN. Return true if found.
7212 AFTER is true if we are checking the case when STORE_PATTERN occurs
7213 after the insn X. */
a13d4ebf 7214
47a3dae1 7215static bool
3b14e3af 7216find_loads (rtx x, rtx store_pattern, int after)
a13d4ebf
AM
7217{
7218 const char * fmt;
8e42ace1 7219 int i, j;
47a3dae1 7220 int ret = false;
a13d4ebf 7221
24a28584 7222 if (!x)
47a3dae1 7223 return false;
24a28584 7224
589005ff 7225 if (GET_CODE (x) == SET)
a13d4ebf
AM
7226 x = SET_SRC (x);
7227
7228 if (GET_CODE (x) == MEM)
7229 {
3b14e3af 7230 if (load_kills_store (x, store_pattern, after))
47a3dae1 7231 return true;
a13d4ebf
AM
7232 }
7233
7234 /* Recursively process the insn. */
7235 fmt = GET_RTX_FORMAT (GET_CODE (x));
589005ff 7236
a13d4ebf
AM
7237 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
7238 {
7239 if (fmt[i] == 'e')
3b14e3af 7240 ret |= find_loads (XEXP (x, i), store_pattern, after);
a13d4ebf
AM
7241 else if (fmt[i] == 'E')
7242 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3b14e3af 7243 ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
a13d4ebf
AM
7244 }
7245 return ret;
7246}
7247
589005ff 7248/* Check if INSN kills the store pattern X (is aliased with it).
3b14e3af
ZD
7249 AFTER is true if we are checking the case when store X occurs
7250 after the insn. Return true if it it does. */
a13d4ebf 7251
47a3dae1 7252static bool
3b14e3af 7253store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)
a13d4ebf 7254{
94f24ddc
ZD
7255 rtx reg, base;
7256
735e8085 7257 if (!INSN_P (insn))
47a3dae1 7258 return false;
589005ff 7259
a13d4ebf
AM
7260 if (GET_CODE (insn) == CALL_INSN)
7261 {
1218665b
JJ
7262 /* A normal or pure call might read from pattern,
7263 but a const call will not. */
47a3dae1
ZD
7264 if (! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn))
7265 return true;
7266
94f24ddc
ZD
7267 /* But even a const call reads its parameters. Check whether the
7268 base of some of registers used in mem is stack pointer. */
7269 for (reg = x_regs; reg; reg = XEXP (reg, 1))
7270 {
bc083e18 7271 base = find_base_term (XEXP (reg, 0));
94f24ddc
ZD
7272 if (!base
7273 || (GET_CODE (base) == ADDRESS
7274 && GET_MODE (base) == Pmode
7275 && XEXP (base, 0) == stack_pointer_rtx))
7276 return true;
7277 }
47a3dae1
ZD
7278
7279 return false;
a13d4ebf 7280 }
589005ff 7281
a13d4ebf
AM
7282 if (GET_CODE (PATTERN (insn)) == SET)
7283 {
7284 rtx pat = PATTERN (insn);
3b14e3af
ZD
7285 rtx dest = SET_DEST (pat);
7286
7287 if (GET_CODE (dest) == SIGN_EXTRACT
7288 || GET_CODE (dest) == ZERO_EXTRACT)
7289 dest = XEXP (dest, 0);
7290
a13d4ebf 7291 /* Check for memory stores to aliased objects. */
3b14e3af
ZD
7292 if (GET_CODE (dest) == MEM
7293 && !expr_equiv_p (dest, x))
7294 {
7295 if (after)
7296 {
7297 if (output_dependence (dest, x))
7298 return true;
7299 }
7300 else
7301 {
7302 if (output_dependence (x, dest))
7303 return true;
7304 }
7305 }
7306 return find_loads (SET_SRC (pat), x, after);
a13d4ebf
AM
7307 }
7308 else
3b14e3af 7309 return find_loads (PATTERN (insn), x, after);
a13d4ebf
AM
7310}
7311
47a3dae1
ZD
7312/* Returns true if the expression X is loaded or clobbered on or after INSN
7313 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
7314 or after the insn. X_REGS is list of registers mentioned in X. If the store
7315 is killed, return the last insn in that it occurs in FAIL_INSN. */
a13d4ebf 7316
47a3dae1 7317static bool
1d088dee
AJ
7318store_killed_after (rtx x, rtx x_regs, rtx insn, basic_block bb,
7319 int *regs_set_after, rtx *fail_insn)
a13d4ebf 7320{
47a3dae1 7321 rtx last = bb->end, act;
aaa4ca30 7322
47a3dae1 7323 if (!store_ops_ok (x_regs, regs_set_after))
1d088dee 7324 {
47a3dae1
ZD
7325 /* We do not know where it will happen. */
7326 if (fail_insn)
7327 *fail_insn = NULL_RTX;
7328 return true;
7329 }
a13d4ebf 7330
47a3dae1
ZD
7331 /* Scan from the end, so that fail_insn is determined correctly. */
7332 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
3b14e3af 7333 if (store_killed_in_insn (x, x_regs, act, false))
47a3dae1
ZD
7334 {
7335 if (fail_insn)
7336 *fail_insn = act;
7337 return true;
7338 }
589005ff 7339
47a3dae1 7340 return false;
a13d4ebf 7341}
1d088dee 7342
47a3dae1
ZD
7343/* Returns true if the expression X is loaded or clobbered on or before INSN
7344 within basic block BB. X_REGS is list of registers mentioned in X.
7345 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
7346static bool
1d088dee
AJ
7347store_killed_before (rtx x, rtx x_regs, rtx insn, basic_block bb,
7348 int *regs_set_before)
a13d4ebf 7349{
8e42ace1 7350 rtx first = bb->head;
a13d4ebf 7351
47a3dae1
ZD
7352 if (!store_ops_ok (x_regs, regs_set_before))
7353 return true;
a13d4ebf 7354
47a3dae1 7355 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
3b14e3af 7356 if (store_killed_in_insn (x, x_regs, insn, true))
47a3dae1 7357 return true;
589005ff 7358
47a3dae1 7359 return false;
a13d4ebf 7360}
1d088dee 7361
47a3dae1
ZD
7362/* Fill in available, anticipatable, transparent and kill vectors in
7363 STORE_DATA, based on lists of available and anticipatable stores. */
a13d4ebf 7364static void
1d088dee 7365build_store_vectors (void)
a13d4ebf 7366{
47a3dae1
ZD
7367 basic_block bb;
7368 int *regs_set_in_block;
a13d4ebf
AM
7369 rtx insn, st;
7370 struct ls_expr * ptr;
47a3dae1 7371 unsigned regno;
a13d4ebf
AM
7372
7373 /* Build the gen_vector. This is any store in the table which is not killed
7374 by aliasing later in its block. */
703ad42b 7375 ae_gen = sbitmap_vector_alloc (last_basic_block, num_stores);
d55bc081 7376 sbitmap_vector_zero (ae_gen, last_basic_block);
a13d4ebf 7377
703ad42b 7378 st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores);
d55bc081 7379 sbitmap_vector_zero (st_antloc, last_basic_block);
aaa4ca30 7380
a13d4ebf 7381 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
589005ff 7382 {
47a3dae1 7383 for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
a13d4ebf
AM
7384 {
7385 insn = XEXP (st, 0);
e2d2ed72 7386 bb = BLOCK_FOR_INSN (insn);
589005ff 7387
47a3dae1
ZD
7388 /* If we've already seen an available expression in this block,
7389 we can delete this one (It occurs earlier in the block). We'll
7390 copy the SRC expression to an unused register in case there
7391 are any side effects. */
7392 if (TEST_BIT (ae_gen[bb->index], ptr->index))
a13d4ebf 7393 {
47a3dae1
ZD
7394 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
7395 if (gcse_file)
7396 fprintf (gcse_file, "Removing redundant store:\n");
7397 replace_store_insn (r, XEXP (st, 0), bb);
7398 continue;
a13d4ebf 7399 }
47a3dae1 7400 SET_BIT (ae_gen[bb->index], ptr->index);
a13d4ebf 7401 }
589005ff 7402
47a3dae1
ZD
7403 for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
7404 {
7405 insn = XEXP (st, 0);
7406 bb = BLOCK_FOR_INSN (insn);
7407 SET_BIT (st_antloc[bb->index], ptr->index);
7408 }
a13d4ebf 7409 }
589005ff 7410
703ad42b 7411 ae_kill = sbitmap_vector_alloc (last_basic_block, num_stores);
d55bc081 7412 sbitmap_vector_zero (ae_kill, last_basic_block);
a13d4ebf 7413
703ad42b 7414 transp = sbitmap_vector_alloc (last_basic_block, num_stores);
d55bc081 7415 sbitmap_vector_zero (transp, last_basic_block);
47a3dae1 7416 regs_set_in_block = xmalloc (sizeof (int) * max_gcse_regno);
a13d4ebf 7417
47a3dae1
ZD
7418 FOR_EACH_BB (bb)
7419 {
7420 for (regno = 0; regno < max_gcse_regno; regno++)
7421 regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno);
7422
7423 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7424 {
7425 if (store_killed_after (ptr->pattern, ptr->pattern_regs, bb->head,
7426 bb, regs_set_in_block, NULL))
7427 {
e0bb17a8 7428 /* It should not be necessary to consider the expression
47a3dae1
ZD
7429 killed if it is both anticipatable and available. */
7430 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
7431 || !TEST_BIT (ae_gen[bb->index], ptr->index))
7432 SET_BIT (ae_kill[bb->index], ptr->index);
1d088dee
AJ
7433 }
7434 else
7435 SET_BIT (transp[bb->index], ptr->index);
7436 }
47a3dae1
ZD
7437 }
7438
7439 free (regs_set_in_block);
aaa4ca30 7440
589005ff 7441 if (gcse_file)
aaa4ca30 7442 {
d55bc081
ZD
7443 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
7444 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
7445 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
7446 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
a13d4ebf
AM
7447 }
7448}
7449
fbe5a4a6 7450/* Insert an instruction at the beginning of a basic block, and update
a13d4ebf
AM
7451 the BLOCK_HEAD if needed. */
7452
589005ff 7453static void
1d088dee 7454insert_insn_start_bb (rtx insn, basic_block bb)
a13d4ebf
AM
7455{
7456 /* Insert at start of successor block. */
e2d2ed72
AM
7457 rtx prev = PREV_INSN (bb->head);
7458 rtx before = bb->head;
a13d4ebf
AM
7459 while (before != 0)
7460 {
7461 if (GET_CODE (before) != CODE_LABEL
7462 && (GET_CODE (before) != NOTE
7463 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
7464 break;
7465 prev = before;
e2d2ed72 7466 if (prev == bb->end)
a13d4ebf
AM
7467 break;
7468 before = NEXT_INSN (before);
7469 }
7470
7471 insn = emit_insn_after (insn, prev);
7472
a13d4ebf
AM
7473 if (gcse_file)
7474 {
7475 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
0b17ab2f 7476 bb->index);
a13d4ebf
AM
7477 print_inline_rtx (gcse_file, insn, 6);
7478 fprintf (gcse_file, "\n");
7479 }
7480}
7481
7482/* This routine will insert a store on an edge. EXPR is the ldst entry for
cc2902df 7483 the memory reference, and E is the edge to insert it on. Returns nonzero
a13d4ebf
AM
7484 if an edge insertion was performed. */
7485
7486static int
1d088dee 7487insert_store (struct ls_expr * expr, edge e)
a13d4ebf
AM
7488{
7489 rtx reg, insn;
e2d2ed72 7490 basic_block bb;
a13d4ebf
AM
7491 edge tmp;
7492
7493 /* We did all the deleted before this insert, so if we didn't delete a
7494 store, then we haven't set the reaching reg yet either. */
7495 if (expr->reaching_reg == NULL_RTX)
7496 return 0;
7497
7498 reg = expr->reaching_reg;
47a3dae1 7499 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
589005ff 7500
a13d4ebf
AM
7501 /* If we are inserting this expression on ALL predecessor edges of a BB,
7502 insert it at the start of the BB, and reset the insert bits on the other
ff7cc307 7503 edges so we don't try to insert it on the other edges. */
e2d2ed72 7504 bb = e->dest;
a13d4ebf
AM
7505 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7506 {
7507 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7508 if (index == EDGE_INDEX_NO_EDGE)
7509 abort ();
7510 if (! TEST_BIT (pre_insert_map[index], expr->index))
7511 break;
7512 }
7513
7514 /* If tmp is NULL, we found an insertion on every edge, blank the
7515 insertion vector for these edges, and insert at the start of the BB. */
e2d2ed72 7516 if (!tmp && bb != EXIT_BLOCK_PTR)
a13d4ebf
AM
7517 {
7518 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7519 {
7520 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7521 RESET_BIT (pre_insert_map[index], expr->index);
7522 }
7523 insert_insn_start_bb (insn, bb);
7524 return 0;
7525 }
589005ff 7526
a13d4ebf
AM
7527 /* We can't insert on this edge, so we'll insert at the head of the
7528 successors block. See Morgan, sec 10.5. */
7529 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
7530 {
7531 insert_insn_start_bb (insn, bb);
7532 return 0;
7533 }
7534
7535 insert_insn_on_edge (insn, e);
589005ff 7536
a13d4ebf
AM
7537 if (gcse_file)
7538 {
7539 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
0b17ab2f 7540 e->src->index, e->dest->index);
a13d4ebf
AM
7541 print_inline_rtx (gcse_file, insn, 6);
7542 fprintf (gcse_file, "\n");
7543 }
589005ff 7544
a13d4ebf
AM
7545 return 1;
7546}
7547
7548/* This routine will replace a store with a SET to a specified register. */
7549
7550static void
1d088dee 7551replace_store_insn (rtx reg, rtx del, basic_block bb)
a13d4ebf
AM
7552{
7553 rtx insn;
589005ff 7554
9a318d30 7555 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
a13d4ebf 7556 insn = emit_insn_after (insn, del);
589005ff 7557
a13d4ebf
AM
7558 if (gcse_file)
7559 {
589005ff 7560 fprintf (gcse_file,
0b17ab2f 7561 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
a13d4ebf 7562 print_inline_rtx (gcse_file, del, 6);
8e42ace1 7563 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
a13d4ebf 7564 print_inline_rtx (gcse_file, insn, 6);
8e42ace1 7565 fprintf (gcse_file, "\n");
a13d4ebf 7566 }
589005ff 7567
49ce134f 7568 delete_insn (del);
a13d4ebf
AM
7569}
7570
7571
7572/* Delete a store, but copy the value that would have been stored into
7573 the reaching_reg for later storing. */
7574
7575static void
1d088dee 7576delete_store (struct ls_expr * expr, basic_block bb)
a13d4ebf
AM
7577{
7578 rtx reg, i, del;
7579
7580 if (expr->reaching_reg == NULL_RTX)
7581 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
a13d4ebf 7582
a13d4ebf 7583 reg = expr->reaching_reg;
589005ff 7584
a13d4ebf
AM
7585 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
7586 {
7587 del = XEXP (i, 0);
e2d2ed72 7588 if (BLOCK_FOR_INSN (del) == bb)
a13d4ebf 7589 {
589005ff 7590 /* We know there is only one since we deleted redundant
a13d4ebf
AM
7591 ones during the available computation. */
7592 replace_store_insn (reg, del, bb);
7593 break;
7594 }
7595 }
7596}
7597
7598/* Free memory used by store motion. */
7599
589005ff 7600static void
1d088dee 7601free_store_memory (void)
a13d4ebf
AM
7602{
7603 free_ldst_mems ();
589005ff 7604
a13d4ebf 7605 if (ae_gen)
5a660bff 7606 sbitmap_vector_free (ae_gen);
a13d4ebf 7607 if (ae_kill)
5a660bff 7608 sbitmap_vector_free (ae_kill);
a13d4ebf 7609 if (transp)
5a660bff 7610 sbitmap_vector_free (transp);
a13d4ebf 7611 if (st_antloc)
5a660bff 7612 sbitmap_vector_free (st_antloc);
a13d4ebf 7613 if (pre_insert_map)
5a660bff 7614 sbitmap_vector_free (pre_insert_map);
a13d4ebf 7615 if (pre_delete_map)
5a660bff 7616 sbitmap_vector_free (pre_delete_map);
aaa4ca30
AJ
7617 if (reg_set_in_block)
7618 sbitmap_vector_free (reg_set_in_block);
589005ff 7619
a13d4ebf
AM
7620 ae_gen = ae_kill = transp = st_antloc = NULL;
7621 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
7622}
7623
7624/* Perform store motion. Much like gcse, except we move expressions the
7625 other way by looking at the flowgraph in reverse. */
7626
7627static void
1d088dee 7628store_motion (void)
a13d4ebf 7629{
e0082a72 7630 basic_block bb;
0b17ab2f 7631 int x;
a13d4ebf 7632 struct ls_expr * ptr;
adfcce61 7633 int update_flow = 0;
aaa4ca30 7634
a13d4ebf
AM
7635 if (gcse_file)
7636 {
7637 fprintf (gcse_file, "before store motion\n");
7638 print_rtl (gcse_file, get_insns ());
7639 }
7640
a13d4ebf 7641 init_alias_analysis ();
aaa4ca30 7642
47a3dae1 7643 /* Find all the available and anticipatable stores. */
a13d4ebf
AM
7644 num_stores = compute_store_table ();
7645 if (num_stores == 0)
7646 {
aaa4ca30 7647 sbitmap_vector_free (reg_set_in_block);
a13d4ebf
AM
7648 end_alias_analysis ();
7649 return;
7650 }
7651
47a3dae1 7652 /* Now compute kill & transp vectors. */
a13d4ebf 7653 build_store_vectors ();
47a3dae1 7654 add_noreturn_fake_exit_edges ();
a13d4ebf 7655
589005ff
KH
7656 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
7657 st_antloc, ae_kill, &pre_insert_map,
a13d4ebf
AM
7658 &pre_delete_map);
7659
7660 /* Now we want to insert the new stores which are going to be needed. */
7661 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7662 {
e0082a72
ZD
7663 FOR_EACH_BB (bb)
7664 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
7665 delete_store (ptr, bb);
a13d4ebf 7666
0b17ab2f
RH
7667 for (x = 0; x < NUM_EDGES (edge_list); x++)
7668 if (TEST_BIT (pre_insert_map[x], ptr->index))
7669 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
a13d4ebf
AM
7670 }
7671
7672 if (update_flow)
7673 commit_edge_insertions ();
aaa4ca30 7674
a13d4ebf
AM
7675 free_store_memory ();
7676 free_edge_list (edge_list);
7677 remove_fake_edges ();
7678 end_alias_analysis ();
7679}
e2500fed 7680
a0134312
RS
7681\f
7682/* Entry point for jump bypassing optimization pass. */
7683
7684int
1d088dee 7685bypass_jumps (FILE *file)
a0134312
RS
7686{
7687 int changed;
7688
7689 /* We do not construct an accurate cfg in functions which call
7690 setjmp, so just punt to be safe. */
7691 if (current_function_calls_setjmp)
7692 return 0;
7693
7694 /* For calling dump_foo fns from gdb. */
7695 debug_stderr = stderr;
7696 gcse_file = file;
7697
7698 /* Identify the basic block information for this function, including
7699 successors and predecessors. */
7700 max_gcse_regno = max_reg_num ();
7701
7702 if (file)
7703 dump_flow_info (file);
7704
7705 /* Return if there's nothing to do. */
7706 if (n_basic_blocks <= 1)
7707 return 0;
7708
7709 /* Trying to perform global optimizations on flow graphs which have
7710 a high connectivity will take a long time and is unlikely to be
7711 particularly useful.
7712
7713 In normal circumstances a cfg should have about twice as many edges
7714 as blocks. But we do not want to punish small functions which have
7715 a couple switch statements. So we require a relatively large number
7716 of basic blocks and the ratio of edges to blocks to be high. */
7717 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
7718 {
7719 if (warn_disabled_optimization)
7720 warning ("BYPASS disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
7721 n_basic_blocks, n_edges / n_basic_blocks);
7722 return 0;
7723 }
7724
7725 /* If allocating memory for the cprop bitmap would take up too much
7726 storage it's better just to disable the optimization. */
7727 if ((n_basic_blocks
7728 * SBITMAP_SET_SIZE (max_gcse_regno)
7729 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
7730 {
7731 if (warn_disabled_optimization)
7732 warning ("GCSE disabled: %d basic blocks and %d registers",
7733 n_basic_blocks, max_gcse_regno);
7734
7735 return 0;
7736 }
7737
a0134312
RS
7738 gcc_obstack_init (&gcse_obstack);
7739 bytes_used = 0;
7740
7741 /* We need alias. */
7742 init_alias_analysis ();
7743
7744 /* Record where pseudo-registers are set. This data is kept accurate
7745 during each pass. ??? We could also record hard-reg information here
7746 [since it's unchanging], however it is currently done during hash table
7747 computation.
7748
7749 It may be tempting to compute MEM set information here too, but MEM sets
7750 will be subject to code motion one day and thus we need to compute
7751 information about memory sets when we build the hash tables. */
7752
7753 alloc_reg_set_mem (max_gcse_regno);
7754 compute_sets (get_insns ());
7755
7756 max_gcse_regno = max_reg_num ();
7757 alloc_gcse_mem (get_insns ());
7758 changed = one_cprop_pass (1, 1, 1);
7759 free_gcse_mem ();
7760
7761 if (file)
7762 {
7763 fprintf (file, "BYPASS of %s: %d basic blocks, ",
7764 current_function_name, n_basic_blocks);
7765 fprintf (file, "%d bytes\n\n", bytes_used);
7766 }
7767
7768 obstack_free (&gcse_obstack, NULL);
7769 free_reg_set_mem ();
7770
7771 /* We are finished with alias. */
7772 end_alias_analysis ();
7773 allocate_reg_info (max_reg_num (), FALSE, FALSE);
7774
7775 return changed;
7776}
7777
e2500fed 7778#include "gt-gcse.h"
This page took 2.411868 seconds and 5 git commands to generate.