]> gcc.gnu.org Git - gcc.git/blame - gcc/gcse.c
Merge dataflow branch into mainline
[gcc.git] / gcc / gcse.c
CommitLineData
f4e584dc 1/* Global common subexpression elimination/Partial redundancy elimination
7506f491 2 and global constant/copy propagation for GNU compiler.
62e5bf5d
RS
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
4 2006, 2007 Free Software Foundation, Inc.
7506f491 5
1322177d 6This file is part of GCC.
7506f491 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
7506f491 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
7506f491
DE
17
18You should have received a copy of the GNU General Public License
1322177d 19along with GCC; see the file COPYING. If not, write to the Free
366ccddb
KC
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA. */
7506f491
DE
22
23/* TODO
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
f4e584dc
JL
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
7506f491
DE
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
34
7506f491
DE
35*/
36
37/* References searched while implementing this.
7506f491
DE
38
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
42
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
46
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
50
7506f491
DE
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
54
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
59
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
64
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
69
7506f491
DE
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
73
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
79
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
84
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
88
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
92
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
96
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
100
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
104
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
108
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
112
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
116
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
120
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
124
f4e584dc
JL
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
128
a42cd965
AM
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
132
f4e584dc
JL
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
137
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
141
7506f491
DE
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
144*/
145
146#include "config.h"
50b2596f 147#include "system.h"
4977bab6
ZW
148#include "coretypes.h"
149#include "tm.h"
01198c2f 150#include "toplev.h"
7506f491
DE
151
152#include "rtl.h"
b0656d8b 153#include "tree.h"
6baf1cc8 154#include "tm_p.h"
7506f491
DE
155#include "regs.h"
156#include "hard-reg-set.h"
157#include "flags.h"
158#include "real.h"
159#include "insn-config.h"
160#include "recog.h"
161#include "basic-block.h"
50b2596f 162#include "output.h"
49ad7cfa 163#include "function.h"
589005ff 164#include "expr.h"
e7d482b9 165#include "except.h"
fb0c0a12 166#include "ggc.h"
f1fa37ff 167#include "params.h"
ae860ff7 168#include "cselib.h"
d128effb 169#include "intl.h"
7506f491 170#include "obstack.h"
27fb79ad 171#include "timevar.h"
ef330312 172#include "tree-pass.h"
9727e468 173#include "hashtab.h"
6fb5fa3c
DB
174#include "df.h"
175#include "dbgcnt.h"
4fa31c2a 176
7506f491
DE
177/* Propagate flow information through back edges and thus enable PRE's
178 moving loop invariant calculations out of loops.
179
180 Originally this tended to create worse overall code, but several
181 improvements during the development of PRE seem to have made following
182 back edges generally a win.
183
184 Note much of the loop invariant code motion done here would normally
185 be done by loop.c, which has more heuristics for when to move invariants
186 out of loops. At some point we might need to move some of those
187 heuristics into gcse.c. */
7506f491 188
f4e584dc
JL
189/* We support GCSE via Partial Redundancy Elimination. PRE optimizations
190 are a superset of those done by GCSE.
7506f491 191
f4e584dc 192 We perform the following steps:
7506f491
DE
193
194 1) Compute basic block information.
195
196 2) Compute table of places where registers are set.
197
198 3) Perform copy/constant propagation.
199
e83f4801
SB
200 4) Perform global cse using lazy code motion if not optimizing
201 for size, or code hoisting if we are.
7506f491 202
e78d9500 203 5) Perform another pass of copy/constant propagation.
7506f491
DE
204
205 Two passes of copy/constant propagation are done because the first one
206 enables more GCSE and the second one helps to clean up the copies that
207 GCSE creates. This is needed more for PRE than for Classic because Classic
208 GCSE will try to use an existing register containing the common
209 subexpression rather than create a new one. This is harder to do for PRE
210 because of the code motion (which Classic GCSE doesn't do).
211
212 Expressions we are interested in GCSE-ing are of the form
213 (set (pseudo-reg) (expression)).
214 Function want_to_gcse_p says what these are.
215
216 PRE handles moving invariant expressions out of loops (by treating them as
f4e584dc 217 partially redundant).
7506f491
DE
218
219 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
220 assignment) based GVN (global value numbering). L. T. Simpson's paper
221 (Rice University) on value numbering is a useful reference for this.
222
223 **********************
224
225 We used to support multiple passes but there are diminishing returns in
226 doing so. The first pass usually makes 90% of the changes that are doable.
227 A second pass can make a few more changes made possible by the first pass.
228 Experiments show any further passes don't make enough changes to justify
229 the expense.
230
231 A study of spec92 using an unlimited number of passes:
232 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
233 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
234 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
235
236 It was found doing copy propagation between each pass enables further
237 substitutions.
238
239 PRE is quite expensive in complicated functions because the DFA can take
7b1b4aed
SB
240 a while to converge. Hence we only perform one pass. The parameter
241 max-gcse-passes can be modified if one wants to experiment.
7506f491
DE
242
243 **********************
244
245 The steps for PRE are:
246
247 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
248
249 2) Perform the data flow analysis for PRE.
250
251 3) Delete the redundant instructions
252
253 4) Insert the required copies [if any] that make the partially
254 redundant instructions fully redundant.
255
256 5) For other reaching expressions, insert an instruction to copy the value
257 to a newly created pseudo that will reach the redundant instruction.
258
259 The deletion is done first so that when we do insertions we
260 know which pseudo reg to use.
261
262 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
263 argue it is not. The number of iterations for the algorithm to converge
264 is typically 2-4 so I don't view it as that expensive (relatively speaking).
265
f4e584dc 266 PRE GCSE depends heavily on the second CSE pass to clean up the copies
7506f491
DE
267 we create. To make an expression reach the place where it's redundant,
268 the result of the expression is copied to a new register, and the redundant
269 expression is deleted by replacing it with this new register. Classic GCSE
270 doesn't have this problem as much as it computes the reaching defs of
a3c28ba2
KH
271 each register in each block and thus can try to use an existing
272 register. */
7506f491
DE
273\f
274/* GCSE global vars. */
275
f4e584dc
JL
276/* Note whether or not we should run jump optimization after gcse. We
277 want to do this for two cases.
278
279 * If we changed any jumps via cprop.
280
281 * If we added any labels via edge splitting. */
f4e584dc
JL
282static int run_jump_opt_after_gcse;
283
7506f491
DE
284/* An obstack for our working variables. */
285static struct obstack gcse_obstack;
286
c4c81601 287struct reg_use {rtx reg_rtx; };
abd535b6 288
7506f491
DE
289/* Hash table of expressions. */
290
291struct expr
292{
293 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
294 rtx expr;
295 /* Index in the available expression bitmaps. */
296 int bitmap_index;
297 /* Next entry with the same hash. */
298 struct expr *next_same_hash;
299 /* List of anticipatable occurrences in basic blocks in the function.
300 An "anticipatable occurrence" is one that is the first occurrence in the
f4e584dc
JL
301 basic block, the operands are not modified in the basic block prior
302 to the occurrence and the output is not used between the start of
303 the block and the occurrence. */
7506f491
DE
304 struct occr *antic_occr;
305 /* List of available occurrence in basic blocks in the function.
306 An "available occurrence" is one that is the last occurrence in the
307 basic block and the operands are not modified by following statements in
308 the basic block [including this insn]. */
309 struct occr *avail_occr;
310 /* Non-null if the computation is PRE redundant.
311 The value is the newly created pseudo-reg to record a copy of the
312 expression in all the places that reach the redundant copy. */
313 rtx reaching_reg;
314};
315
316/* Occurrence of an expression.
317 There is one per basic block. If a pattern appears more than once the
318 last appearance is used [or first for anticipatable expressions]. */
319
320struct occr
321{
322 /* Next occurrence of this expression. */
323 struct occr *next;
324 /* The insn that computes the expression. */
325 rtx insn;
cc2902df 326 /* Nonzero if this [anticipatable] occurrence has been deleted. */
7506f491 327 char deleted_p;
cc2902df 328 /* Nonzero if this [available] occurrence has been copied to
7506f491
DE
329 reaching_reg. */
330 /* ??? This is mutually exclusive with deleted_p, so they could share
331 the same byte. */
332 char copied_p;
333};
334
335/* Expression and copy propagation hash tables.
336 Each hash table is an array of buckets.
337 ??? It is known that if it were an array of entries, structure elements
338 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
339 not clear whether in the final analysis a sufficient amount of memory would
340 be saved as the size of the available expression bitmaps would be larger
341 [one could build a mapping table without holes afterwards though].
c4c81601 342 Someday I'll perform the computation and figure it out. */
7506f491 343
02280659
ZD
344struct hash_table
345{
346 /* The table itself.
347 This is an array of `expr_hash_table_size' elements. */
348 struct expr **table;
349
350 /* Size of the hash table, in elements. */
351 unsigned int size;
2e653e39 352
02280659
ZD
353 /* Number of hash table elements. */
354 unsigned int n_elems;
7506f491 355
02280659
ZD
356 /* Whether the table is expression of copy propagation one. */
357 int set_p;
358};
c4c81601 359
02280659
ZD
360/* Expression hash table. */
361static struct hash_table expr_hash_table;
362
363/* Copy propagation hash table. */
364static struct hash_table set_hash_table;
7506f491
DE
365
366/* Mapping of uids to cuids.
367 Only real insns get cuids. */
368static int *uid_cuid;
369
370/* Highest UID in UID_CUID. */
371static int max_uid;
372
373/* Get the cuid of an insn. */
b86db3eb 374#ifdef ENABLE_CHECKING
282899df
NS
375#define INSN_CUID(INSN) \
376 (gcc_assert (INSN_UID (INSN) <= max_uid), uid_cuid[INSN_UID (INSN)])
b86db3eb 377#else
7506f491 378#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
b86db3eb 379#endif
7506f491
DE
380
381/* Number of cuids. */
382static int max_cuid;
383
384/* Mapping of cuids to insns. */
385static rtx *cuid_insn;
386
387/* Get insn from cuid. */
388#define CUID_INSN(CUID) (cuid_insn[CUID])
389
390/* Maximum register number in function prior to doing gcse + 1.
391 Registers created during this pass have regno >= max_gcse_regno.
392 This is named with "gcse" to not collide with global of same name. */
770ae6cc 393static unsigned int max_gcse_regno;
7506f491 394
7506f491 395/* Table of registers that are modified.
c4c81601 396
7506f491
DE
397 For each register, each element is a list of places where the pseudo-reg
398 is set.
399
400 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
401 requires knowledge of which blocks kill which regs [and thus could use
f4e584dc 402 a bitmap instead of the lists `reg_set_table' uses].
7506f491 403
c4c81601
RK
404 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
405 num-regs) [however perhaps it may be useful to keep the data as is]. One
406 advantage of recording things this way is that `reg_set_table' is fairly
407 sparse with respect to pseudo regs but for hard regs could be fairly dense
408 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
7506f491
DE
409 up functions like compute_transp since in the case of pseudo-regs we only
410 need to iterate over the number of times a pseudo-reg is set, not over the
411 number of basic blocks [clearly there is a bit of a slow down in the cases
412 where a pseudo is set more than once in a block, however it is believed
413 that the net effect is to speed things up]. This isn't done for hard-regs
414 because recording call-clobbered hard-regs in `reg_set_table' at each
c4c81601
RK
415 function call can consume a fair bit of memory, and iterating over
416 hard-regs stored this way in compute_transp will be more expensive. */
7506f491 417
c4c81601
RK
418typedef struct reg_set
419{
7506f491
DE
420 /* The next setting of this register. */
421 struct reg_set *next;
ed425871
JL
422 /* The index of the block where it was set. */
423 int bb_index;
7506f491 424} reg_set;
c4c81601 425
7506f491 426static reg_set **reg_set_table;
c4c81601 427
7506f491
DE
428/* Size of `reg_set_table'.
429 The table starts out at max_gcse_regno + slop, and is enlarged as
430 necessary. */
431static int reg_set_table_size;
c4c81601 432
7506f491
DE
433/* Amount to grow `reg_set_table' by when it's full. */
434#define REG_SET_TABLE_SLOP 100
435
a13d4ebf 436/* This is a list of expressions which are MEMs and will be used by load
589005ff 437 or store motion.
a13d4ebf 438 Load motion tracks MEMs which aren't killed by
454ff5cb 439 anything except itself. (i.e., loads and stores to a single location).
589005ff 440 We can then allow movement of these MEM refs with a little special
a13d4ebf
AM
441 allowance. (all stores copy the same value to the reaching reg used
442 for the loads). This means all values used to store into memory must have
589005ff 443 no side effects so we can re-issue the setter value.
a13d4ebf
AM
444 Store Motion uses this structure as an expression table to track stores
445 which look interesting, and might be moveable towards the exit block. */
446
447struct ls_expr
448{
449 struct expr * expr; /* Gcse expression reference for LM. */
450 rtx pattern; /* Pattern of this mem. */
47a3dae1 451 rtx pattern_regs; /* List of registers mentioned by the mem. */
aaa4ca30
AJ
452 rtx loads; /* INSN list of loads seen. */
453 rtx stores; /* INSN list of stores seen. */
a13d4ebf
AM
454 struct ls_expr * next; /* Next in the list. */
455 int invalid; /* Invalid for some reason. */
456 int index; /* If it maps to a bitmap index. */
b58b21d5 457 unsigned int hash_index; /* Index when in a hash table. */
a13d4ebf
AM
458 rtx reaching_reg; /* Register to use when re-writing. */
459};
460
fbef91d8
RS
461/* Array of implicit set patterns indexed by basic block index. */
462static rtx *implicit_sets;
463
a13d4ebf
AM
464/* Head of the list of load/store memory refs. */
465static struct ls_expr * pre_ldst_mems = NULL;
466
9727e468
RG
467/* Hashtable for the load/store memory refs. */
468static htab_t pre_ldst_table = NULL;
469
7506f491
DE
470/* Bitmap containing one bit for each register in the program.
471 Used when performing GCSE to track which registers have been set since
472 the start of the basic block. */
73991d6a 473static regset reg_set_bitmap;
7506f491
DE
474
475/* For each block, a bitmap of registers set in the block.
e83f4801 476 This is used by compute_transp.
7506f491
DE
477 It is computed during hash table computation and not by compute_sets
478 as it includes registers added since the last pass (or between cprop and
479 gcse) and it's currently not easy to realloc sbitmap vectors. */
480static sbitmap *reg_set_in_block;
481
a13d4ebf
AM
482/* Array, indexed by basic block number for a list of insns which modify
483 memory within that block. */
484static rtx * modify_mem_list;
0516f6fe 485static bitmap modify_mem_list_set;
a13d4ebf
AM
486
487/* This array parallels modify_mem_list, but is kept canonicalized. */
488static rtx * canon_modify_mem_list;
0516f6fe 489
aa47fcfa
JL
490/* Bitmap indexed by block numbers to record which blocks contain
491 function calls. */
492static bitmap blocks_with_calls;
493
7506f491
DE
494/* Various variables for statistics gathering. */
495
496/* Memory used in a pass.
497 This isn't intended to be absolutely precise. Its intent is only
498 to keep an eye on memory usage. */
499static int bytes_used;
c4c81601 500
7506f491
DE
501/* GCSE substitutions made. */
502static int gcse_subst_count;
503/* Number of copy instructions created. */
504static int gcse_create_count;
27fb79ad
SB
505/* Number of local constants propagated. */
506static int local_const_prop_count;
0fa2e4df 507/* Number of local copies propagated. */
27fb79ad
SB
508static int local_copy_prop_count;
509/* Number of global constants propagated. */
510static int global_const_prop_count;
0fa2e4df 511/* Number of global copies propagated. */
27fb79ad 512static int global_copy_prop_count;
7506f491 513\f
e83f4801
SB
514/* For available exprs */
515static sbitmap *ae_kill, *ae_gen;
7506f491 516\f
1d088dee 517static void compute_can_copy (void);
9fe15a12
KG
518static void *gmalloc (size_t) ATTRIBUTE_MALLOC;
519static void *gcalloc (size_t, size_t) ATTRIBUTE_MALLOC;
520static void *grealloc (void *, size_t);
703ad42b 521static void *gcse_alloc (unsigned long);
eb232f4e 522static void alloc_gcse_mem (void);
1d088dee
AJ
523static void free_gcse_mem (void);
524static void alloc_reg_set_mem (int);
525static void free_reg_set_mem (void);
1d088dee
AJ
526static void record_one_set (int, rtx);
527static void record_set_info (rtx, rtx, void *);
eb232f4e 528static void compute_sets (void);
1d088dee
AJ
529static void hash_scan_insn (rtx, struct hash_table *, int);
530static void hash_scan_set (rtx, rtx, struct hash_table *);
531static void hash_scan_clobber (rtx, rtx, struct hash_table *);
532static void hash_scan_call (rtx, rtx, struct hash_table *);
533static int want_to_gcse_p (rtx);
1707bafa 534static bool can_assign_to_reg_p (rtx);
1d088dee
AJ
535static bool gcse_constant_p (rtx);
536static int oprs_unchanged_p (rtx, rtx, int);
537static int oprs_anticipatable_p (rtx, rtx);
538static int oprs_available_p (rtx, rtx);
539static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int,
540 struct hash_table *);
541static void insert_set_in_table (rtx, rtx, struct hash_table *);
542static unsigned int hash_expr (rtx, enum machine_mode, int *, int);
1d088dee
AJ
543static unsigned int hash_set (int, int);
544static int expr_equiv_p (rtx, rtx);
545static void record_last_reg_set_info (rtx, int);
546static void record_last_mem_set_info (rtx);
547static void record_last_set_info (rtx, rtx, void *);
548static void compute_hash_table (struct hash_table *);
549static void alloc_hash_table (int, struct hash_table *, int);
550static void free_hash_table (struct hash_table *);
551static void compute_hash_table_work (struct hash_table *);
552static void dump_hash_table (FILE *, const char *, struct hash_table *);
1d088dee
AJ
553static struct expr *lookup_set (unsigned int, struct hash_table *);
554static struct expr *next_set (unsigned int, struct expr *);
555static void reset_opr_set_tables (void);
556static int oprs_not_set_p (rtx, rtx);
557static void mark_call (rtx);
558static void mark_set (rtx, rtx);
559static void mark_clobber (rtx, rtx);
560static void mark_oprs_set (rtx);
561static void alloc_cprop_mem (int, int);
562static void free_cprop_mem (void);
563static void compute_transp (rtx, int, sbitmap *, int);
564static void compute_transpout (void);
565static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
566 struct hash_table *);
567static void compute_cprop_data (void);
568static void find_used_regs (rtx *, void *);
569static int try_replace_reg (rtx, rtx, rtx);
570static struct expr *find_avail_set (int, rtx);
571static int cprop_jump (basic_block, rtx, rtx, rtx, rtx);
572static void mems_conflict_for_gcse_p (rtx, rtx, void *);
573static int load_killed_in_block_p (basic_block, int, rtx, int);
574static void canon_list_insert (rtx, rtx, void *);
575static int cprop_insn (rtx, int);
576static int cprop (int);
577static void find_implicit_sets (void);
eb232f4e
SB
578static int one_cprop_pass (int, bool, bool);
579static bool constprop_register (rtx, rtx, rtx, bool);
1d088dee
AJ
580static struct expr *find_bypass_set (int, int);
581static bool reg_killed_on_edge (rtx, edge);
582static int bypass_block (basic_block, rtx, rtx);
583static int bypass_conditional_jumps (void);
584static void alloc_pre_mem (int, int);
585static void free_pre_mem (void);
586static void compute_pre_data (void);
587static int pre_expr_reaches_here_p (basic_block, struct expr *,
588 basic_block);
6fb5fa3c 589static void insert_insn_end_basic_block (struct expr *, basic_block, int);
1d088dee
AJ
590static void pre_insert_copy_insn (struct expr *, rtx);
591static void pre_insert_copies (void);
592static int pre_delete (void);
593static int pre_gcse (void);
594static int one_pre_gcse_pass (int);
595static void add_label_notes (rtx, rtx);
596static void alloc_code_hoist_mem (int, int);
597static void free_code_hoist_mem (void);
598static void compute_code_hoist_vbeinout (void);
599static void compute_code_hoist_data (void);
600static int hoist_expr_reaches_here_p (basic_block, int, basic_block, char *);
601static void hoist_code (void);
602static int one_code_hoisting_pass (void);
1d088dee
AJ
603static rtx process_insert_insn (struct expr *);
604static int pre_edge_insert (struct edge_list *, struct expr **);
1d088dee
AJ
605static int pre_expr_reaches_here_p_work (basic_block, struct expr *,
606 basic_block, char *);
607static struct ls_expr * ldst_entry (rtx);
608static void free_ldst_entry (struct ls_expr *);
609static void free_ldst_mems (void);
610static void print_ldst_list (FILE *);
611static struct ls_expr * find_rtx_in_ldst (rtx);
612static int enumerate_ldsts (void);
613static inline struct ls_expr * first_ls_expr (void);
614static inline struct ls_expr * next_ls_expr (struct ls_expr *);
615static int simple_mem (rtx);
616static void invalidate_any_buried_refs (rtx);
617static void compute_ld_motion_mems (void);
618static void trim_ld_motion_mems (void);
619static void update_ld_motion_stores (struct expr *);
620static void reg_set_info (rtx, rtx, void *);
01c43039 621static void reg_clear_last_set (rtx, rtx, void *);
1d088dee
AJ
622static bool store_ops_ok (rtx, int *);
623static rtx extract_mentioned_regs (rtx);
624static rtx extract_mentioned_regs_helper (rtx, rtx);
625static void find_moveable_store (rtx, int *, int *);
626static int compute_store_table (void);
3b14e3af
ZD
627static bool load_kills_store (rtx, rtx, int);
628static bool find_loads (rtx, rtx, int);
629static bool store_killed_in_insn (rtx, rtx, rtx, int);
1d088dee
AJ
630static bool store_killed_after (rtx, rtx, rtx, basic_block, int *, rtx *);
631static bool store_killed_before (rtx, rtx, rtx, basic_block, int *);
632static void build_store_vectors (void);
6fb5fa3c 633static void insert_insn_start_basic_block (rtx, basic_block);
1d088dee 634static int insert_store (struct ls_expr *, edge);
d088acea
ZD
635static void remove_reachable_equiv_notes (basic_block, struct ls_expr *);
636static void replace_store_insn (rtx, rtx, basic_block, struct ls_expr *);
1d088dee
AJ
637static void delete_store (struct ls_expr *, basic_block);
638static void free_store_memory (void);
639static void store_motion (void);
640static void free_insn_expr_list_list (rtx *);
641static void clear_modify_mem_tables (void);
642static void free_modify_mem_tables (void);
643static rtx gcse_emit_move_after (rtx, rtx, rtx);
644static void local_cprop_find_used_regs (rtx *, void *);
eb232f4e 645static bool do_local_cprop (rtx, rtx, bool, rtx*);
1d088dee 646static bool adjust_libcall_notes (rtx, rtx, rtx, rtx*);
eb232f4e 647static void local_cprop_pass (bool);
d128effb 648static bool is_too_expensive (const char *);
7506f491 649\f
d128effb 650
7506f491 651/* Entry point for global common subexpression elimination.
b732f36f
KH
652 F is the first instruction in the function. Return nonzero if a
653 change is mode. */
7506f491 654
65727068 655static int
10d22567 656gcse_main (rtx f ATTRIBUTE_UNUSED)
7506f491
DE
657{
658 int changed, pass;
659 /* Bytes used at start of pass. */
660 int initial_bytes_used;
661 /* Maximum number of bytes used by a pass. */
662 int max_pass_bytes;
663 /* Point to release obstack data from for each pass. */
664 char *gcse_obstack_bottom;
665
b5ce41ff
JL
666 /* We do not construct an accurate cfg in functions which call
667 setjmp, so just punt to be safe. */
7506f491 668 if (current_function_calls_setjmp)
e78d9500 669 return 0;
589005ff 670
b5ce41ff
JL
671 /* Assume that we do not need to run jump optimizations after gcse. */
672 run_jump_opt_after_gcse = 0;
673
b5ce41ff
JL
674 /* Identify the basic block information for this function, including
675 successors and predecessors. */
7506f491 676 max_gcse_regno = max_reg_num ();
7506f491 677
6fb5fa3c
DB
678 df_note_add_problem ();
679 df_analyze ();
680
10d22567 681 if (dump_file)
5b4fdb20 682 dump_flow_info (dump_file, dump_flags);
a42cd965 683
d128effb 684 /* Return if there's nothing to do, or it is too expensive. */
ab9a1ff8
SB
685 if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1
686 || is_too_expensive (_("GCSE disabled")))
a18820c6 687 return 0;
7b1b4aed 688
7506f491 689 gcc_obstack_init (&gcse_obstack);
a42cd965 690 bytes_used = 0;
7506f491 691
a13d4ebf
AM
692 /* We need alias. */
693 init_alias_analysis ();
c4c81601
RK
694 /* Record where pseudo-registers are set. This data is kept accurate
695 during each pass. ??? We could also record hard-reg information here
696 [since it's unchanging], however it is currently done during hash table
697 computation.
b5ce41ff 698
c4c81601
RK
699 It may be tempting to compute MEM set information here too, but MEM sets
700 will be subject to code motion one day and thus we need to compute
b5ce41ff 701 information about memory sets when we build the hash tables. */
7506f491
DE
702
703 alloc_reg_set_mem (max_gcse_regno);
eb232f4e 704 compute_sets ();
7506f491
DE
705
706 pass = 0;
707 initial_bytes_used = bytes_used;
708 max_pass_bytes = 0;
709 gcse_obstack_bottom = gcse_alloc (1);
710 changed = 1;
740f35a0 711 while (changed && pass < MAX_GCSE_PASSES)
7506f491
DE
712 {
713 changed = 0;
10d22567
ZD
714 if (dump_file)
715 fprintf (dump_file, "GCSE pass %d\n\n", pass + 1);
7506f491
DE
716
717 /* Initialize bytes_used to the space for the pred/succ lists,
718 and the reg_set_table data. */
719 bytes_used = initial_bytes_used;
720
721 /* Each pass may create new registers, so recalculate each time. */
722 max_gcse_regno = max_reg_num ();
723
eb232f4e 724 alloc_gcse_mem ();
7506f491 725
b5ce41ff
JL
726 /* Don't allow constant propagation to modify jumps
727 during this pass. */
27fb79ad 728 timevar_push (TV_CPROP1);
eb232f4e 729 changed = one_cprop_pass (pass + 1, false, false);
27fb79ad 730 timevar_pop (TV_CPROP1);
7506f491
DE
731
732 if (optimize_size)
e83f4801 733 /* Do nothing. */ ;
7506f491 734 else
589005ff 735 {
27fb79ad 736 timevar_push (TV_PRE);
a42cd965 737 changed |= one_pre_gcse_pass (pass + 1);
a13d4ebf
AM
738 /* We may have just created new basic blocks. Release and
739 recompute various things which are sized on the number of
740 basic blocks. */
741 if (changed)
742 {
73991d6a 743 free_modify_mem_tables ();
9fe15a12
KG
744 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
745 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
a13d4ebf 746 }
a42cd965
AM
747 free_reg_set_mem ();
748 alloc_reg_set_mem (max_reg_num ());
eb232f4e 749 compute_sets ();
a42cd965 750 run_jump_opt_after_gcse = 1;
27fb79ad 751 timevar_pop (TV_PRE);
a42cd965 752 }
7506f491
DE
753
754 if (max_pass_bytes < bytes_used)
755 max_pass_bytes = bytes_used;
756
bb457bd9
JL
757 /* Free up memory, then reallocate for code hoisting. We can
758 not re-use the existing allocated memory because the tables
759 will not have info for the insns or registers created by
760 partial redundancy elimination. */
7506f491
DE
761 free_gcse_mem ();
762
5d3cc252 763 /* It does not make sense to run code hoisting unless we are optimizing
bb457bd9
JL
764 for code size -- it rarely makes programs faster, and can make
765 them bigger if we did partial redundancy elimination (when optimizing
e83f4801 766 for space, we don't run the partial redundancy algorithms). */
bb457bd9 767 if (optimize_size)
589005ff 768 {
27fb79ad 769 timevar_push (TV_HOIST);
bb457bd9 770 max_gcse_regno = max_reg_num ();
eb232f4e 771 alloc_gcse_mem ();
bb457bd9
JL
772 changed |= one_code_hoisting_pass ();
773 free_gcse_mem ();
774
775 if (max_pass_bytes < bytes_used)
776 max_pass_bytes = bytes_used;
27fb79ad 777 timevar_pop (TV_HOIST);
589005ff 778 }
bb457bd9 779
10d22567 780 if (dump_file)
7506f491 781 {
10d22567
ZD
782 fprintf (dump_file, "\n");
783 fflush (dump_file);
7506f491 784 }
c4c81601 785
7506f491
DE
786 obstack_free (&gcse_obstack, gcse_obstack_bottom);
787 pass++;
788 }
789
b5ce41ff
JL
790 /* Do one last pass of copy propagation, including cprop into
791 conditional jumps. */
792
793 max_gcse_regno = max_reg_num ();
eb232f4e 794 alloc_gcse_mem ();
b5ce41ff 795 /* This time, go ahead and allow cprop to alter jumps. */
27fb79ad 796 timevar_push (TV_CPROP2);
bae8b6b2 797 one_cprop_pass (pass + 1, true, true);
27fb79ad 798 timevar_pop (TV_CPROP2);
b5ce41ff 799 free_gcse_mem ();
7506f491 800
10d22567 801 if (dump_file)
7506f491 802 {
10d22567 803 fprintf (dump_file, "GCSE of %s: %d basic blocks, ",
faed5cc3 804 current_function_name (), n_basic_blocks);
10d22567 805 fprintf (dump_file, "%d pass%s, %d bytes\n\n",
7506f491
DE
806 pass, pass > 1 ? "es" : "", max_pass_bytes);
807 }
808
6496a589 809 obstack_free (&gcse_obstack, NULL);
7506f491 810 free_reg_set_mem ();
7b1b4aed 811
a13d4ebf
AM
812 /* We are finished with alias. */
813 end_alias_analysis ();
a13d4ebf 814
47a3dae1 815 if (!optimize_size && flag_gcse_sm)
27fb79ad
SB
816 {
817 timevar_push (TV_LSM);
818 store_motion ();
819 timevar_pop (TV_LSM);
820 }
47a3dae1 821
a13d4ebf 822 /* Record where pseudo-registers are set. */
e78d9500 823 return run_jump_opt_after_gcse;
7506f491
DE
824}
825\f
826/* Misc. utilities. */
827
773eae39
EB
828/* Nonzero for each mode that supports (set (reg) (reg)).
829 This is trivially true for integer and floating point values.
830 It may or may not be true for condition codes. */
831static char can_copy[(int) NUM_MACHINE_MODES];
832
7506f491
DE
833/* Compute which modes support reg/reg copy operations. */
834
835static void
1d088dee 836compute_can_copy (void)
7506f491
DE
837{
838 int i;
50b2596f 839#ifndef AVOID_CCMODE_COPIES
8e42ace1 840 rtx reg, insn;
50b2596f 841#endif
773eae39 842 memset (can_copy, 0, NUM_MACHINE_MODES);
7506f491
DE
843
844 start_sequence ();
845 for (i = 0; i < NUM_MACHINE_MODES; i++)
c4c81601
RK
846 if (GET_MODE_CLASS (i) == MODE_CC)
847 {
7506f491 848#ifdef AVOID_CCMODE_COPIES
773eae39 849 can_copy[i] = 0;
7506f491 850#else
c4c81601
RK
851 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
852 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
9714cf43 853 if (recog (PATTERN (insn), insn, NULL) >= 0)
773eae39 854 can_copy[i] = 1;
7506f491 855#endif
c4c81601 856 }
141b5810 857 else
773eae39 858 can_copy[i] = 1;
c4c81601 859
7506f491 860 end_sequence ();
7506f491 861}
773eae39
EB
862
863/* Returns whether the mode supports reg/reg copy operations. */
864
865bool
1d088dee 866can_copy_p (enum machine_mode mode)
773eae39
EB
867{
868 static bool can_copy_init_p = false;
869
870 if (! can_copy_init_p)
871 {
872 compute_can_copy ();
873 can_copy_init_p = true;
874 }
875
876 return can_copy[mode] != 0;
877}
7506f491
DE
878\f
879/* Cover function to xmalloc to record bytes allocated. */
880
703ad42b 881static void *
4ac11022 882gmalloc (size_t size)
7506f491
DE
883{
884 bytes_used += size;
885 return xmalloc (size);
886}
887
9fe15a12
KG
888/* Cover function to xcalloc to record bytes allocated. */
889
890static void *
891gcalloc (size_t nelem, size_t elsize)
892{
893 bytes_used += nelem * elsize;
894 return xcalloc (nelem, elsize);
895}
896
7506f491
DE
897/* Cover function to xrealloc.
898 We don't record the additional size since we don't know it.
899 It won't affect memory usage stats much anyway. */
900
703ad42b 901static void *
9fe15a12 902grealloc (void *ptr, size_t size)
7506f491
DE
903{
904 return xrealloc (ptr, size);
905}
906
77bbd421 907/* Cover function to obstack_alloc. */
7506f491 908
703ad42b 909static void *
1d088dee 910gcse_alloc (unsigned long size)
7506f491 911{
77bbd421 912 bytes_used += size;
703ad42b 913 return obstack_alloc (&gcse_obstack, size);
7506f491
DE
914}
915
916/* Allocate memory for the cuid mapping array,
917 and reg/memory set tracking tables.
918
919 This is called at the start of each pass. */
920
921static void
eb232f4e 922alloc_gcse_mem (void)
7506f491 923{
9fe15a12 924 int i;
eb232f4e 925 basic_block bb;
7506f491
DE
926 rtx insn;
927
928 /* Find the largest UID and create a mapping from UIDs to CUIDs.
929 CUIDs are like UIDs except they increase monotonically, have no gaps,
eb232f4e
SB
930 and only apply to real insns.
931 (Actually, there are gaps, for insn that are not inside a basic block.
932 but we should never see those anyway, so this is OK.) */
7506f491
DE
933
934 max_uid = get_max_uid ();
9fe15a12 935 uid_cuid = gcalloc (max_uid + 1, sizeof (int));
eb232f4e
SB
936 i = 0;
937 FOR_EACH_BB (bb)
938 FOR_BB_INSNS (bb, insn)
939 {
940 if (INSN_P (insn))
941 uid_cuid[INSN_UID (insn)] = i++;
942 else
943 uid_cuid[INSN_UID (insn)] = i;
944 }
7506f491
DE
945
946 /* Create a table mapping cuids to insns. */
947
948 max_cuid = i;
9fe15a12 949 cuid_insn = gcalloc (max_cuid + 1, sizeof (rtx));
eb232f4e
SB
950 i = 0;
951 FOR_EACH_BB (bb)
952 FOR_BB_INSNS (bb, insn)
953 if (INSN_P (insn))
954 CUID_INSN (i++) = insn;
7506f491
DE
955
956 /* Allocate vars to track sets of regs. */
8bdbfff5 957 reg_set_bitmap = BITMAP_ALLOC (NULL);
7506f491
DE
958
959 /* Allocate vars to track sets of regs, memory per block. */
703ad42b 960 reg_set_in_block = sbitmap_vector_alloc (last_basic_block, max_gcse_regno);
a13d4ebf
AM
961 /* Allocate array to keep a list of insns which modify memory in each
962 basic block. */
9fe15a12
KG
963 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
964 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
8bdbfff5
NS
965 modify_mem_list_set = BITMAP_ALLOC (NULL);
966 blocks_with_calls = BITMAP_ALLOC (NULL);
7506f491
DE
967}
968
969/* Free memory allocated by alloc_gcse_mem. */
970
971static void
1d088dee 972free_gcse_mem (void)
7506f491
DE
973{
974 free (uid_cuid);
975 free (cuid_insn);
976
8bdbfff5 977 BITMAP_FREE (reg_set_bitmap);
7506f491 978
5a660bff 979 sbitmap_vector_free (reg_set_in_block);
73991d6a 980 free_modify_mem_tables ();
8bdbfff5
NS
981 BITMAP_FREE (modify_mem_list_set);
982 BITMAP_FREE (blocks_with_calls);
7506f491 983}
b5ce41ff
JL
984\f
985/* Compute the local properties of each recorded expression.
c4c81601
RK
986
987 Local properties are those that are defined by the block, irrespective of
988 other blocks.
b5ce41ff
JL
989
990 An expression is transparent in a block if its operands are not modified
991 in the block.
992
993 An expression is computed (locally available) in a block if it is computed
994 at least once and expression would contain the same value if the
995 computation was moved to the end of the block.
996
997 An expression is locally anticipatable in a block if it is computed at
998 least once and expression would contain the same value if the computation
999 was moved to the beginning of the block.
1000
c4c81601
RK
1001 We call this routine for cprop, pre and code hoisting. They all compute
1002 basically the same information and thus can easily share this code.
7506f491 1003
c4c81601
RK
1004 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1005 properties. If NULL, then it is not necessary to compute or record that
1006 particular property.
b5ce41ff 1007
02280659
ZD
1008 TABLE controls which hash table to look at. If it is set hash table,
1009 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
c4c81601 1010 ABSALTERED. */
589005ff 1011
b5ce41ff 1012static void
7b1b4aed
SB
1013compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc,
1014 struct hash_table *table)
b5ce41ff 1015{
02280659 1016 unsigned int i;
589005ff 1017
b5ce41ff
JL
1018 /* Initialize any bitmaps that were passed in. */
1019 if (transp)
695ab36a 1020 {
02280659 1021 if (table->set_p)
d55bc081 1022 sbitmap_vector_zero (transp, last_basic_block);
695ab36a 1023 else
d55bc081 1024 sbitmap_vector_ones (transp, last_basic_block);
695ab36a 1025 }
c4c81601 1026
b5ce41ff 1027 if (comp)
d55bc081 1028 sbitmap_vector_zero (comp, last_basic_block);
b5ce41ff 1029 if (antloc)
d55bc081 1030 sbitmap_vector_zero (antloc, last_basic_block);
b5ce41ff 1031
02280659 1032 for (i = 0; i < table->size; i++)
7506f491 1033 {
b5ce41ff
JL
1034 struct expr *expr;
1035
02280659 1036 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
b5ce41ff 1037 {
b5ce41ff 1038 int indx = expr->bitmap_index;
c4c81601 1039 struct occr *occr;
b5ce41ff
JL
1040
1041 /* The expression is transparent in this block if it is not killed.
1042 We start by assuming all are transparent [none are killed], and
1043 then reset the bits for those that are. */
b5ce41ff 1044 if (transp)
02280659 1045 compute_transp (expr->expr, indx, transp, table->set_p);
b5ce41ff
JL
1046
1047 /* The occurrences recorded in antic_occr are exactly those that
cc2902df 1048 we want to set to nonzero in ANTLOC. */
b5ce41ff 1049 if (antloc)
c4c81601
RK
1050 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1051 {
1052 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
b5ce41ff 1053
c4c81601
RK
1054 /* While we're scanning the table, this is a good place to
1055 initialize this. */
1056 occr->deleted_p = 0;
1057 }
b5ce41ff
JL
1058
1059 /* The occurrences recorded in avail_occr are exactly those that
cc2902df 1060 we want to set to nonzero in COMP. */
b5ce41ff 1061 if (comp)
c4c81601
RK
1062 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1063 {
1064 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
b5ce41ff 1065
c4c81601
RK
1066 /* While we're scanning the table, this is a good place to
1067 initialize this. */
1068 occr->copied_p = 0;
1069 }
b5ce41ff
JL
1070
1071 /* While we're scanning the table, this is a good place to
1072 initialize this. */
1073 expr->reaching_reg = 0;
1074 }
7506f491 1075 }
7506f491
DE
1076}
1077\f
1078/* Register set information.
1079
1080 `reg_set_table' records where each register is set or otherwise
1081 modified. */
1082
1083static struct obstack reg_set_obstack;
1084
1085static void
1d088dee 1086alloc_reg_set_mem (int n_regs)
7506f491 1087{
7506f491 1088 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
9fe15a12 1089 reg_set_table = gcalloc (reg_set_table_size, sizeof (struct reg_set *));
7506f491
DE
1090
1091 gcc_obstack_init (&reg_set_obstack);
1092}
1093
1094static void
1d088dee 1095free_reg_set_mem (void)
7506f491
DE
1096{
1097 free (reg_set_table);
6496a589 1098 obstack_free (&reg_set_obstack, NULL);
7506f491
DE
1099}
1100
1101/* Record REGNO in the reg_set table. */
1102
1103static void
1d088dee 1104record_one_set (int regno, rtx insn)
7506f491 1105{
172890a2 1106 /* Allocate a new reg_set element and link it onto the list. */
63bc1d05 1107 struct reg_set *new_reg_info;
7506f491
DE
1108
1109 /* If the table isn't big enough, enlarge it. */
1110 if (regno >= reg_set_table_size)
1111 {
1112 int new_size = regno + REG_SET_TABLE_SLOP;
c4c81601 1113
703ad42b
KG
1114 reg_set_table = grealloc (reg_set_table,
1115 new_size * sizeof (struct reg_set *));
1116 memset (reg_set_table + reg_set_table_size, 0,
8e42ace1 1117 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
7506f491
DE
1118 reg_set_table_size = new_size;
1119 }
1120
703ad42b 1121 new_reg_info = obstack_alloc (&reg_set_obstack, sizeof (struct reg_set));
7506f491 1122 bytes_used += sizeof (struct reg_set);
ed425871 1123 new_reg_info->bb_index = BLOCK_NUM (insn);
274969ea
MM
1124 new_reg_info->next = reg_set_table[regno];
1125 reg_set_table[regno] = new_reg_info;
7506f491
DE
1126}
1127
c4c81601
RK
1128/* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1129 an insn. The DATA is really the instruction in which the SET is
1130 occurring. */
7506f491
DE
1131
1132static void
1d088dee 1133record_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
7506f491 1134{
84832317
MM
1135 rtx record_set_insn = (rtx) data;
1136
7b1b4aed 1137 if (REG_P (dest) && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
c4c81601 1138 record_one_set (REGNO (dest), record_set_insn);
7506f491
DE
1139}
1140
1141/* Scan the function and record each set of each pseudo-register.
1142
c4c81601 1143 This is called once, at the start of the gcse pass. See the comments for
fbe5a4a6 1144 `reg_set_table' for further documentation. */
7506f491
DE
1145
1146static void
eb232f4e 1147compute_sets (void)
7506f491 1148{
eb232f4e 1149 basic_block bb;
c4c81601 1150 rtx insn;
7506f491 1151
eb232f4e
SB
1152 FOR_EACH_BB (bb)
1153 FOR_BB_INSNS (bb, insn)
1154 if (INSN_P (insn))
1155 note_stores (PATTERN (insn), record_set_info, insn);
7506f491
DE
1156}
1157\f
1158/* Hash table support. */
1159
80c29cc4
RZ
1160struct reg_avail_info
1161{
e0082a72 1162 basic_block last_bb;
80c29cc4
RZ
1163 int first_set;
1164 int last_set;
1165};
1166
1167static struct reg_avail_info *reg_avail_info;
e0082a72 1168static basic_block current_bb;
7506f491 1169
7506f491 1170
fb0c0a12
RK
1171/* See whether X, the source of a set, is something we want to consider for
1172 GCSE. */
7506f491
DE
1173
1174static int
1d088dee 1175want_to_gcse_p (rtx x)
7506f491 1176{
3d8504ac
RS
1177#ifdef STACK_REGS
1178 /* On register stack architectures, don't GCSE constants from the
1179 constant pool, as the benefits are often swamped by the overhead
1180 of shuffling the register stack between basic blocks. */
1181 if (IS_STACK_MODE (GET_MODE (x)))
1182 x = avoid_constant_pool_reference (x);
1183#endif
1184
c4c81601 1185 switch (GET_CODE (x))
7506f491
DE
1186 {
1187 case REG:
1188 case SUBREG:
1189 case CONST_INT:
1190 case CONST_DOUBLE:
69ef87e2 1191 case CONST_VECTOR:
7506f491
DE
1192 case CALL:
1193 return 0;
1194
1195 default:
1707bafa 1196 return can_assign_to_reg_p (x);
7506f491 1197 }
1707bafa
RS
1198}
1199
1200/* Used internally by can_assign_to_reg_p. */
1201
1202static GTY(()) rtx test_insn;
1203
1204/* Return true if we can assign X to a pseudo register. */
1205
1206static bool
1207can_assign_to_reg_p (rtx x)
1208{
1209 int num_clobbers = 0;
1210 int icode;
7506f491 1211
fb0c0a12
RK
1212 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1213 if (general_operand (x, GET_MODE (x)))
1214 return 1;
1215 else if (GET_MODE (x) == VOIDmode)
1216 return 0;
1217
1218 /* Otherwise, check if we can make a valid insn from it. First initialize
1219 our test insn if we haven't already. */
1220 if (test_insn == 0)
1221 {
1222 test_insn
1223 = make_insn_raw (gen_rtx_SET (VOIDmode,
1224 gen_rtx_REG (word_mode,
1225 FIRST_PSEUDO_REGISTER * 2),
1226 const0_rtx));
1227 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
fb0c0a12
RK
1228 }
1229
1230 /* Now make an insn like the one we would make when GCSE'ing and see if
1231 valid. */
1232 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1233 SET_SRC (PATTERN (test_insn)) = x;
1234 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1235 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
7506f491
DE
1236}
1237
cc2902df 1238/* Return nonzero if the operands of expression X are unchanged from the
7506f491
DE
1239 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1240 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1241
1242static int
1d088dee 1243oprs_unchanged_p (rtx x, rtx insn, int avail_p)
7506f491 1244{
c4c81601 1245 int i, j;
7506f491 1246 enum rtx_code code;
6f7d635c 1247 const char *fmt;
7506f491 1248
7506f491
DE
1249 if (x == 0)
1250 return 1;
1251
1252 code = GET_CODE (x);
1253 switch (code)
1254 {
1255 case REG:
80c29cc4
RZ
1256 {
1257 struct reg_avail_info *info = &reg_avail_info[REGNO (x)];
1258
1259 if (info->last_bb != current_bb)
1260 return 1;
589005ff 1261 if (avail_p)
80c29cc4
RZ
1262 return info->last_set < INSN_CUID (insn);
1263 else
1264 return info->first_set >= INSN_CUID (insn);
1265 }
7506f491
DE
1266
1267 case MEM:
e0082a72 1268 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
a13d4ebf
AM
1269 x, avail_p))
1270 return 0;
7506f491 1271 else
c4c81601 1272 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
7506f491
DE
1273
1274 case PRE_DEC:
1275 case PRE_INC:
1276 case POST_DEC:
1277 case POST_INC:
4b983fdc
RH
1278 case PRE_MODIFY:
1279 case POST_MODIFY:
7506f491
DE
1280 return 0;
1281
1282 case PC:
1283 case CC0: /*FIXME*/
1284 case CONST:
1285 case CONST_INT:
1286 case CONST_DOUBLE:
69ef87e2 1287 case CONST_VECTOR:
7506f491
DE
1288 case SYMBOL_REF:
1289 case LABEL_REF:
1290 case ADDR_VEC:
1291 case ADDR_DIFF_VEC:
1292 return 1;
1293
1294 default:
1295 break;
1296 }
1297
c4c81601 1298 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
1299 {
1300 if (fmt[i] == 'e')
1301 {
c4c81601
RK
1302 /* If we are about to do the last recursive call needed at this
1303 level, change it into iteration. This function is called enough
1304 to be worth it. */
7506f491 1305 if (i == 0)
c4c81601
RK
1306 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1307
1308 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
7506f491
DE
1309 return 0;
1310 }
1311 else if (fmt[i] == 'E')
c4c81601
RK
1312 for (j = 0; j < XVECLEN (x, i); j++)
1313 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1314 return 0;
7506f491
DE
1315 }
1316
1317 return 1;
1318}
1319
a13d4ebf
AM
1320/* Used for communication between mems_conflict_for_gcse_p and
1321 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1322 conflict between two memory references. */
1323static int gcse_mems_conflict_p;
1324
1325/* Used for communication between mems_conflict_for_gcse_p and
1326 load_killed_in_block_p. A memory reference for a load instruction,
1327 mems_conflict_for_gcse_p will see if a memory store conflicts with
1328 this memory load. */
1329static rtx gcse_mem_operand;
1330
1331/* DEST is the output of an instruction. If it is a memory reference, and
1332 possibly conflicts with the load found in gcse_mem_operand, then set
1333 gcse_mems_conflict_p to a nonzero value. */
1334
1335static void
1d088dee
AJ
1336mems_conflict_for_gcse_p (rtx dest, rtx setter ATTRIBUTE_UNUSED,
1337 void *data ATTRIBUTE_UNUSED)
a13d4ebf
AM
1338{
1339 while (GET_CODE (dest) == SUBREG
1340 || GET_CODE (dest) == ZERO_EXTRACT
a13d4ebf
AM
1341 || GET_CODE (dest) == STRICT_LOW_PART)
1342 dest = XEXP (dest, 0);
1343
1344 /* If DEST is not a MEM, then it will not conflict with the load. Note
1345 that function calls are assumed to clobber memory, but are handled
1346 elsewhere. */
7b1b4aed 1347 if (! MEM_P (dest))
a13d4ebf 1348 return;
aaa4ca30 1349
a13d4ebf 1350 /* If we are setting a MEM in our list of specially recognized MEMs,
589005ff
KH
1351 don't mark as killed this time. */
1352
47a3dae1 1353 if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL)
a13d4ebf
AM
1354 {
1355 if (!find_rtx_in_ldst (dest))
1356 gcse_mems_conflict_p = 1;
1357 return;
1358 }
aaa4ca30 1359
a13d4ebf
AM
1360 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1361 rtx_addr_varies_p))
1362 gcse_mems_conflict_p = 1;
1363}
1364
1365/* Return nonzero if the expression in X (a memory reference) is killed
1366 in block BB before or after the insn with the CUID in UID_LIMIT.
1367 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1368 before UID_LIMIT.
1369
1370 To check the entire block, set UID_LIMIT to max_uid + 1 and
1371 AVAIL_P to 0. */
1372
1373static int
1d088dee 1374load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p)
a13d4ebf 1375{
0b17ab2f 1376 rtx list_entry = modify_mem_list[bb->index];
16c5b95d
MH
1377
1378 /* If this is a readonly then we aren't going to be changing it. */
1379 if (MEM_READONLY_P (x))
1380 return 0;
1381
a13d4ebf
AM
1382 while (list_entry)
1383 {
1384 rtx setter;
1385 /* Ignore entries in the list that do not apply. */
1386 if ((avail_p
1387 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1388 || (! avail_p
1389 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1390 {
1391 list_entry = XEXP (list_entry, 1);
1392 continue;
1393 }
1394
1395 setter = XEXP (list_entry, 0);
1396
1397 /* If SETTER is a call everything is clobbered. Note that calls
1398 to pure functions are never put on the list, so we need not
1399 worry about them. */
7b1b4aed 1400 if (CALL_P (setter))
a13d4ebf
AM
1401 return 1;
1402
1403 /* SETTER must be an INSN of some kind that sets memory. Call
589005ff 1404 note_stores to examine each hunk of memory that is modified.
a13d4ebf
AM
1405
1406 The note_stores interface is pretty limited, so we have to
1407 communicate via global variables. Yuk. */
1408 gcse_mem_operand = x;
1409 gcse_mems_conflict_p = 0;
1410 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1411 if (gcse_mems_conflict_p)
1412 return 1;
1413 list_entry = XEXP (list_entry, 1);
1414 }
1415 return 0;
1416}
1417
cc2902df 1418/* Return nonzero if the operands of expression X are unchanged from
7506f491
DE
1419 the start of INSN's basic block up to but not including INSN. */
1420
1421static int
1d088dee 1422oprs_anticipatable_p (rtx x, rtx insn)
7506f491
DE
1423{
1424 return oprs_unchanged_p (x, insn, 0);
1425}
1426
cc2902df 1427/* Return nonzero if the operands of expression X are unchanged from
7506f491
DE
1428 INSN to the end of INSN's basic block. */
1429
1430static int
1d088dee 1431oprs_available_p (rtx x, rtx insn)
7506f491
DE
1432{
1433 return oprs_unchanged_p (x, insn, 1);
1434}
1435
1436/* Hash expression X.
c4c81601
RK
1437
1438 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1439 indicating if a volatile operand is found or if the expression contains
b58b21d5 1440 something we don't want to insert in the table. HASH_TABLE_SIZE is
0516f6fe 1441 the current size of the hash table to be probed. */
7506f491
DE
1442
1443static unsigned int
b58b21d5
RS
1444hash_expr (rtx x, enum machine_mode mode, int *do_not_record_p,
1445 int hash_table_size)
7506f491
DE
1446{
1447 unsigned int hash;
1448
1449 *do_not_record_p = 0;
1450
0516f6fe
SB
1451 hash = hash_rtx (x, mode, do_not_record_p,
1452 NULL, /*have_reg_qty=*/false);
7506f491
DE
1453 return hash % hash_table_size;
1454}
172890a2 1455
7506f491
DE
1456/* Hash a set of register REGNO.
1457
c4c81601
RK
1458 Sets are hashed on the register that is set. This simplifies the PRE copy
1459 propagation code.
7506f491
DE
1460
1461 ??? May need to make things more elaborate. Later, as necessary. */
1462
1463static unsigned int
1d088dee 1464hash_set (int regno, int hash_table_size)
7506f491
DE
1465{
1466 unsigned int hash;
1467
1468 hash = regno;
1469 return hash % hash_table_size;
1470}
1471
0516f6fe 1472/* Return nonzero if exp1 is equivalent to exp2. */
7506f491
DE
1473
1474static int
1d088dee 1475expr_equiv_p (rtx x, rtx y)
7506f491 1476{
0516f6fe 1477 return exp_equiv_p (x, y, 0, true);
7506f491
DE
1478}
1479
02280659 1480/* Insert expression X in INSN in the hash TABLE.
7506f491
DE
1481 If it is already present, record it as the last occurrence in INSN's
1482 basic block.
1483
1484 MODE is the mode of the value X is being stored into.
1485 It is only used if X is a CONST_INT.
1486
cc2902df
KH
1487 ANTIC_P is nonzero if X is an anticipatable expression.
1488 AVAIL_P is nonzero if X is an available expression. */
7506f491
DE
1489
1490static void
1d088dee
AJ
1491insert_expr_in_table (rtx x, enum machine_mode mode, rtx insn, int antic_p,
1492 int avail_p, struct hash_table *table)
7506f491
DE
1493{
1494 int found, do_not_record_p;
1495 unsigned int hash;
1496 struct expr *cur_expr, *last_expr = NULL;
1497 struct occr *antic_occr, *avail_occr;
7506f491 1498
02280659 1499 hash = hash_expr (x, mode, &do_not_record_p, table->size);
7506f491
DE
1500
1501 /* Do not insert expression in table if it contains volatile operands,
1502 or if hash_expr determines the expression is something we don't want
1503 to or can't handle. */
1504 if (do_not_record_p)
1505 return;
1506
02280659 1507 cur_expr = table->table[hash];
7506f491
DE
1508 found = 0;
1509
c4c81601 1510 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
7506f491
DE
1511 {
1512 /* If the expression isn't found, save a pointer to the end of
1513 the list. */
1514 last_expr = cur_expr;
1515 cur_expr = cur_expr->next_same_hash;
1516 }
1517
1518 if (! found)
1519 {
703ad42b 1520 cur_expr = gcse_alloc (sizeof (struct expr));
7506f491 1521 bytes_used += sizeof (struct expr);
02280659 1522 if (table->table[hash] == NULL)
c4c81601 1523 /* This is the first pattern that hashed to this index. */
02280659 1524 table->table[hash] = cur_expr;
7506f491 1525 else
c4c81601
RK
1526 /* Add EXPR to end of this hash chain. */
1527 last_expr->next_same_hash = cur_expr;
1528
589005ff 1529 /* Set the fields of the expr element. */
7506f491 1530 cur_expr->expr = x;
02280659 1531 cur_expr->bitmap_index = table->n_elems++;
7506f491
DE
1532 cur_expr->next_same_hash = NULL;
1533 cur_expr->antic_occr = NULL;
1534 cur_expr->avail_occr = NULL;
1535 }
1536
1537 /* Now record the occurrence(s). */
7506f491
DE
1538 if (antic_p)
1539 {
1540 antic_occr = cur_expr->antic_occr;
1541
b6e47ceb
JL
1542 if (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1543 antic_occr = NULL;
7506f491
DE
1544
1545 if (antic_occr)
c4c81601
RK
1546 /* Found another instance of the expression in the same basic block.
1547 Prefer the currently recorded one. We want the first one in the
1548 block and the block is scanned from start to end. */
1549 ; /* nothing to do */
7506f491
DE
1550 else
1551 {
1552 /* First occurrence of this expression in this basic block. */
703ad42b 1553 antic_occr = gcse_alloc (sizeof (struct occr));
7506f491 1554 bytes_used += sizeof (struct occr);
7506f491 1555 antic_occr->insn = insn;
b6e47ceb 1556 antic_occr->next = cur_expr->antic_occr;
f9957958 1557 antic_occr->deleted_p = 0;
b6e47ceb 1558 cur_expr->antic_occr = antic_occr;
7506f491
DE
1559 }
1560 }
1561
1562 if (avail_p)
1563 {
1564 avail_occr = cur_expr->avail_occr;
1565
b6e47ceb 1566 if (avail_occr && BLOCK_NUM (avail_occr->insn) == BLOCK_NUM (insn))
7506f491 1567 {
b6e47ceb
JL
1568 /* Found another instance of the expression in the same basic block.
1569 Prefer this occurrence to the currently recorded one. We want
1570 the last one in the block and the block is scanned from start
1571 to end. */
1572 avail_occr->insn = insn;
7506f491 1573 }
7506f491
DE
1574 else
1575 {
1576 /* First occurrence of this expression in this basic block. */
703ad42b 1577 avail_occr = gcse_alloc (sizeof (struct occr));
7506f491 1578 bytes_used += sizeof (struct occr);
7506f491 1579 avail_occr->insn = insn;
b6e47ceb 1580 avail_occr->next = cur_expr->avail_occr;
f9957958 1581 avail_occr->deleted_p = 0;
b6e47ceb 1582 cur_expr->avail_occr = avail_occr;
7506f491
DE
1583 }
1584 }
1585}
1586
1587/* Insert pattern X in INSN in the hash table.
1588 X is a SET of a reg to either another reg or a constant.
1589 If it is already present, record it as the last occurrence in INSN's
1590 basic block. */
1591
1592static void
1d088dee 1593insert_set_in_table (rtx x, rtx insn, struct hash_table *table)
7506f491
DE
1594{
1595 int found;
1596 unsigned int hash;
1597 struct expr *cur_expr, *last_expr = NULL;
b6e47ceb 1598 struct occr *cur_occr;
7506f491 1599
282899df 1600 gcc_assert (GET_CODE (x) == SET && REG_P (SET_DEST (x)));
7506f491 1601
02280659 1602 hash = hash_set (REGNO (SET_DEST (x)), table->size);
7506f491 1603
02280659 1604 cur_expr = table->table[hash];
7506f491
DE
1605 found = 0;
1606
c4c81601 1607 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
7506f491
DE
1608 {
1609 /* If the expression isn't found, save a pointer to the end of
1610 the list. */
1611 last_expr = cur_expr;
1612 cur_expr = cur_expr->next_same_hash;
1613 }
1614
1615 if (! found)
1616 {
703ad42b 1617 cur_expr = gcse_alloc (sizeof (struct expr));
7506f491 1618 bytes_used += sizeof (struct expr);
02280659 1619 if (table->table[hash] == NULL)
c4c81601 1620 /* This is the first pattern that hashed to this index. */
02280659 1621 table->table[hash] = cur_expr;
7506f491 1622 else
c4c81601
RK
1623 /* Add EXPR to end of this hash chain. */
1624 last_expr->next_same_hash = cur_expr;
1625
7506f491
DE
1626 /* Set the fields of the expr element.
1627 We must copy X because it can be modified when copy propagation is
1628 performed on its operands. */
7506f491 1629 cur_expr->expr = copy_rtx (x);
02280659 1630 cur_expr->bitmap_index = table->n_elems++;
7506f491
DE
1631 cur_expr->next_same_hash = NULL;
1632 cur_expr->antic_occr = NULL;
1633 cur_expr->avail_occr = NULL;
1634 }
1635
1636 /* Now record the occurrence. */
7506f491
DE
1637 cur_occr = cur_expr->avail_occr;
1638
b6e47ceb 1639 if (cur_occr && BLOCK_NUM (cur_occr->insn) == BLOCK_NUM (insn))
7506f491 1640 {
b6e47ceb
JL
1641 /* Found another instance of the expression in the same basic block.
1642 Prefer this occurrence to the currently recorded one. We want
1643 the last one in the block and the block is scanned from start
1644 to end. */
1645 cur_occr->insn = insn;
7506f491 1646 }
7506f491
DE
1647 else
1648 {
1649 /* First occurrence of this expression in this basic block. */
703ad42b 1650 cur_occr = gcse_alloc (sizeof (struct occr));
7506f491 1651 bytes_used += sizeof (struct occr);
c4c81601 1652
b6e47ceb
JL
1653 cur_occr->insn = insn;
1654 cur_occr->next = cur_expr->avail_occr;
1655 cur_occr->deleted_p = 0;
1656 cur_expr->avail_occr = cur_occr;
7506f491
DE
1657 }
1658}
1659
6b2d1c9e
RS
1660/* Determine whether the rtx X should be treated as a constant for
1661 the purposes of GCSE's constant propagation. */
1662
1663static bool
1d088dee 1664gcse_constant_p (rtx x)
6b2d1c9e
RS
1665{
1666 /* Consider a COMPARE of two integers constant. */
1667 if (GET_CODE (x) == COMPARE
1668 && GET_CODE (XEXP (x, 0)) == CONST_INT
1669 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1670 return true;
1671
db2f435b 1672 /* Consider a COMPARE of the same registers is a constant
7b1b4aed 1673 if they are not floating point registers. */
db2f435b 1674 if (GET_CODE(x) == COMPARE
7b1b4aed 1675 && REG_P (XEXP (x, 0)) && REG_P (XEXP (x, 1))
db2f435b
AP
1676 && REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 1))
1677 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
1678 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 1))))
1679 return true;
1680
6b2d1c9e
RS
1681 return CONSTANT_P (x);
1682}
1683
02280659
ZD
1684/* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
1685 expression one). */
7506f491
DE
1686
1687static void
1d088dee 1688hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
7506f491
DE
1689{
1690 rtx src = SET_SRC (pat);
1691 rtx dest = SET_DEST (pat);
172890a2 1692 rtx note;
7506f491 1693
6e72d1e9 1694 if (GET_CODE (src) == CALL)
02280659 1695 hash_scan_call (src, insn, table);
7506f491 1696
7b1b4aed 1697 else if (REG_P (dest))
7506f491 1698 {
172890a2 1699 unsigned int regno = REGNO (dest);
7506f491
DE
1700 rtx tmp;
1701
90631280
PB
1702 /* See if a REG_NOTE shows this equivalent to a simpler expression.
1703 This allows us to do a single GCSE pass and still eliminate
1704 redundant constants, addresses or other expressions that are
1705 constructed with multiple instructions. */
1706 note = find_reg_equal_equiv_note (insn);
1707 if (note != 0
1708 && (table->set_p
1709 ? gcse_constant_p (XEXP (note, 0))
1710 : want_to_gcse_p (XEXP (note, 0))))
172890a2
RK
1711 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
1712
7506f491 1713 /* Only record sets of pseudo-regs in the hash table. */
02280659 1714 if (! table->set_p
7506f491
DE
1715 && regno >= FIRST_PSEUDO_REGISTER
1716 /* Don't GCSE something if we can't do a reg/reg copy. */
773eae39 1717 && can_copy_p (GET_MODE (dest))
068473ec
JH
1718 /* GCSE commonly inserts instruction after the insn. We can't
1719 do that easily for EH_REGION notes so disable GCSE on these
1720 for now. */
1721 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
7506f491 1722 /* Is SET_SRC something we want to gcse? */
172890a2
RK
1723 && want_to_gcse_p (src)
1724 /* Don't CSE a nop. */
43e72072
JJ
1725 && ! set_noop_p (pat)
1726 /* Don't GCSE if it has attached REG_EQUIV note.
1727 At this point this only function parameters should have
1728 REG_EQUIV notes and if the argument slot is used somewhere
a1f300c0 1729 explicitly, it means address of parameter has been taken,
43e72072 1730 so we should not extend the lifetime of the pseudo. */
90631280 1731 && (note == NULL_RTX || ! MEM_P (XEXP (note, 0))))
7506f491
DE
1732 {
1733 /* An expression is not anticipatable if its operands are
52d76e11 1734 modified before this insn or if this is not the only SET in
6fb5fa3c
DB
1735 this insn. The latter condition does not have to mean that
1736 SRC itself is not anticipatable, but we just will not be
1737 able to handle code motion of insns with multiple sets. */
1738 int antic_p = oprs_anticipatable_p (src, insn)
1739 && !multiple_sets (insn);
7506f491 1740 /* An expression is not available if its operands are
eb296bd9
GK
1741 subsequently modified, including this insn. It's also not
1742 available if this is a branch, because we can't insert
1743 a set after the branch. */
1744 int avail_p = (oprs_available_p (src, insn)
1745 && ! JUMP_P (insn));
c4c81601 1746
02280659 1747 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
7506f491 1748 }
c4c81601 1749
7506f491 1750 /* Record sets for constant/copy propagation. */
02280659 1751 else if (table->set_p
7506f491 1752 && regno >= FIRST_PSEUDO_REGISTER
7b1b4aed 1753 && ((REG_P (src)
7506f491 1754 && REGNO (src) >= FIRST_PSEUDO_REGISTER
773eae39 1755 && can_copy_p (GET_MODE (dest))
172890a2 1756 && REGNO (src) != regno)
6b2d1c9e 1757 || gcse_constant_p (src))
7506f491
DE
1758 /* A copy is not available if its src or dest is subsequently
1759 modified. Here we want to search from INSN+1 on, but
1760 oprs_available_p searches from INSN on. */
a813c111 1761 && (insn == BB_END (BLOCK_FOR_INSN (insn))
7506f491
DE
1762 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
1763 && oprs_available_p (pat, tmp))))
02280659 1764 insert_set_in_table (pat, insn, table);
7506f491 1765 }
d91edf86 1766 /* In case of store we want to consider the memory value as available in
f5f2e3cd
MH
1767 the REG stored in that memory. This makes it possible to remove
1768 redundant loads from due to stores to the same location. */
7b1b4aed 1769 else if (flag_gcse_las && REG_P (src) && MEM_P (dest))
f5f2e3cd
MH
1770 {
1771 unsigned int regno = REGNO (src);
1772
1773 /* Do not do this for constant/copy propagation. */
1774 if (! table->set_p
1775 /* Only record sets of pseudo-regs in the hash table. */
1776 && regno >= FIRST_PSEUDO_REGISTER
1777 /* Don't GCSE something if we can't do a reg/reg copy. */
1778 && can_copy_p (GET_MODE (src))
1779 /* GCSE commonly inserts instruction after the insn. We can't
1780 do that easily for EH_REGION notes so disable GCSE on these
1781 for now. */
1782 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1783 /* Is SET_DEST something we want to gcse? */
1784 && want_to_gcse_p (dest)
1785 /* Don't CSE a nop. */
1786 && ! set_noop_p (pat)
1787 /* Don't GCSE if it has attached REG_EQUIV note.
1788 At this point this only function parameters should have
1789 REG_EQUIV notes and if the argument slot is used somewhere
1790 explicitly, it means address of parameter has been taken,
1791 so we should not extend the lifetime of the pseudo. */
1792 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
7b1b4aed 1793 || ! MEM_P (XEXP (note, 0))))
f5f2e3cd
MH
1794 {
1795 /* Stores are never anticipatable. */
1796 int antic_p = 0;
1797 /* An expression is not available if its operands are
1798 subsequently modified, including this insn. It's also not
1799 available if this is a branch, because we can't insert
1800 a set after the branch. */
1801 int avail_p = oprs_available_p (dest, insn)
1802 && ! JUMP_P (insn);
1803
1804 /* Record the memory expression (DEST) in the hash table. */
1805 insert_expr_in_table (dest, GET_MODE (dest), insn,
1806 antic_p, avail_p, table);
1807 }
1808 }
7506f491
DE
1809}
1810
1811static void
1d088dee
AJ
1812hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
1813 struct hash_table *table ATTRIBUTE_UNUSED)
7506f491
DE
1814{
1815 /* Currently nothing to do. */
1816}
1817
1818static void
1d088dee
AJ
1819hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
1820 struct hash_table *table ATTRIBUTE_UNUSED)
7506f491
DE
1821{
1822 /* Currently nothing to do. */
1823}
1824
1825/* Process INSN and add hash table entries as appropriate.
1826
1827 Only available expressions that set a single pseudo-reg are recorded.
1828
1829 Single sets in a PARALLEL could be handled, but it's an extra complication
1830 that isn't dealt with right now. The trick is handling the CLOBBERs that
1831 are also in the PARALLEL. Later.
1832
cc2902df 1833 If SET_P is nonzero, this is for the assignment hash table,
ed79bb3d
R
1834 otherwise it is for the expression hash table.
1835 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
1836 not record any expressions. */
7506f491
DE
1837
1838static void
1d088dee 1839hash_scan_insn (rtx insn, struct hash_table *table, int in_libcall_block)
7506f491
DE
1840{
1841 rtx pat = PATTERN (insn);
c4c81601 1842 int i;
7506f491 1843
172890a2
RK
1844 if (in_libcall_block)
1845 return;
1846
7506f491
DE
1847 /* Pick out the sets of INSN and for other forms of instructions record
1848 what's been modified. */
1849
172890a2 1850 if (GET_CODE (pat) == SET)
02280659 1851 hash_scan_set (pat, insn, table);
7506f491 1852 else if (GET_CODE (pat) == PARALLEL)
c4c81601
RK
1853 for (i = 0; i < XVECLEN (pat, 0); i++)
1854 {
1855 rtx x = XVECEXP (pat, 0, i);
7506f491 1856
c4c81601 1857 if (GET_CODE (x) == SET)
02280659 1858 hash_scan_set (x, insn, table);
c4c81601 1859 else if (GET_CODE (x) == CLOBBER)
02280659 1860 hash_scan_clobber (x, insn, table);
6e72d1e9 1861 else if (GET_CODE (x) == CALL)
02280659 1862 hash_scan_call (x, insn, table);
c4c81601 1863 }
7506f491 1864
7506f491 1865 else if (GET_CODE (pat) == CLOBBER)
02280659 1866 hash_scan_clobber (pat, insn, table);
6e72d1e9 1867 else if (GET_CODE (pat) == CALL)
02280659 1868 hash_scan_call (pat, insn, table);
7506f491
DE
1869}
1870
1871static void
1d088dee 1872dump_hash_table (FILE *file, const char *name, struct hash_table *table)
7506f491
DE
1873{
1874 int i;
1875 /* Flattened out table, so it's printed in proper order. */
4da896b2
MM
1876 struct expr **flat_table;
1877 unsigned int *hash_val;
c4c81601 1878 struct expr *expr;
4da896b2 1879
703ad42b
KG
1880 flat_table = xcalloc (table->n_elems, sizeof (struct expr *));
1881 hash_val = xmalloc (table->n_elems * sizeof (unsigned int));
7506f491 1882
02280659
ZD
1883 for (i = 0; i < (int) table->size; i++)
1884 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601
RK
1885 {
1886 flat_table[expr->bitmap_index] = expr;
1887 hash_val[expr->bitmap_index] = i;
1888 }
7506f491
DE
1889
1890 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
02280659 1891 name, table->size, table->n_elems);
7506f491 1892
02280659 1893 for (i = 0; i < (int) table->n_elems; i++)
21318741
RK
1894 if (flat_table[i] != 0)
1895 {
a0ac9e5a 1896 expr = flat_table[i];
21318741
RK
1897 fprintf (file, "Index %d (hash value %d)\n ",
1898 expr->bitmap_index, hash_val[i]);
a0ac9e5a 1899 print_rtl (file, expr->expr);
21318741
RK
1900 fprintf (file, "\n");
1901 }
7506f491
DE
1902
1903 fprintf (file, "\n");
4da896b2 1904
4da896b2
MM
1905 free (flat_table);
1906 free (hash_val);
7506f491
DE
1907}
1908
1909/* Record register first/last/block set information for REGNO in INSN.
c4c81601 1910
80c29cc4 1911 first_set records the first place in the block where the register
7506f491 1912 is set and is used to compute "anticipatability".
c4c81601 1913
80c29cc4 1914 last_set records the last place in the block where the register
7506f491 1915 is set and is used to compute "availability".
c4c81601 1916
80c29cc4
RZ
1917 last_bb records the block for which first_set and last_set are
1918 valid, as a quick test to invalidate them.
1919
7506f491
DE
1920 reg_set_in_block records whether the register is set in the block
1921 and is used to compute "transparency". */
1922
1923static void
1d088dee 1924record_last_reg_set_info (rtx insn, int regno)
7506f491 1925{
80c29cc4
RZ
1926 struct reg_avail_info *info = &reg_avail_info[regno];
1927 int cuid = INSN_CUID (insn);
c4c81601 1928
80c29cc4
RZ
1929 info->last_set = cuid;
1930 if (info->last_bb != current_bb)
1931 {
1932 info->last_bb = current_bb;
1933 info->first_set = cuid;
e0082a72 1934 SET_BIT (reg_set_in_block[current_bb->index], regno);
80c29cc4 1935 }
7506f491
DE
1936}
1937
a13d4ebf
AM
1938
1939/* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
1940 Note we store a pair of elements in the list, so they have to be
1941 taken off pairwise. */
1942
589005ff 1943static void
1d088dee
AJ
1944canon_list_insert (rtx dest ATTRIBUTE_UNUSED, rtx unused1 ATTRIBUTE_UNUSED,
1945 void * v_insn)
a13d4ebf
AM
1946{
1947 rtx dest_addr, insn;
0fe854a7 1948 int bb;
a13d4ebf
AM
1949
1950 while (GET_CODE (dest) == SUBREG
1951 || GET_CODE (dest) == ZERO_EXTRACT
a13d4ebf
AM
1952 || GET_CODE (dest) == STRICT_LOW_PART)
1953 dest = XEXP (dest, 0);
1954
1955 /* If DEST is not a MEM, then it will not conflict with a load. Note
1956 that function calls are assumed to clobber memory, but are handled
1957 elsewhere. */
1958
7b1b4aed 1959 if (! MEM_P (dest))
a13d4ebf
AM
1960 return;
1961
1962 dest_addr = get_addr (XEXP (dest, 0));
1963 dest_addr = canon_rtx (dest_addr);
589005ff 1964 insn = (rtx) v_insn;
0fe854a7 1965 bb = BLOCK_NUM (insn);
a13d4ebf 1966
589005ff 1967 canon_modify_mem_list[bb] =
0fe854a7 1968 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
589005ff 1969 canon_modify_mem_list[bb] =
0fe854a7 1970 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
a13d4ebf
AM
1971}
1972
a13d4ebf
AM
1973/* Record memory modification information for INSN. We do not actually care
1974 about the memory location(s) that are set, or even how they are set (consider
1975 a CALL_INSN). We merely need to record which insns modify memory. */
7506f491
DE
1976
1977static void
1d088dee 1978record_last_mem_set_info (rtx insn)
7506f491 1979{
0fe854a7
RH
1980 int bb = BLOCK_NUM (insn);
1981
ccef9ef5 1982 /* load_killed_in_block_p will handle the case of calls clobbering
dc297297 1983 everything. */
0fe854a7
RH
1984 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
1985 bitmap_set_bit (modify_mem_list_set, bb);
a13d4ebf 1986
7b1b4aed 1987 if (CALL_P (insn))
a13d4ebf
AM
1988 {
1989 /* Note that traversals of this loop (other than for free-ing)
1990 will break after encountering a CALL_INSN. So, there's no
dc297297 1991 need to insert a pair of items, as canon_list_insert does. */
589005ff
KH
1992 canon_modify_mem_list[bb] =
1993 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
aa47fcfa 1994 bitmap_set_bit (blocks_with_calls, bb);
a13d4ebf
AM
1995 }
1996 else
0fe854a7 1997 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
7506f491
DE
1998}
1999
7506f491 2000/* Called from compute_hash_table via note_stores to handle one
84832317
MM
2001 SET or CLOBBER in an insn. DATA is really the instruction in which
2002 the SET is taking place. */
7506f491
DE
2003
2004static void
1d088dee 2005record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
7506f491 2006{
84832317
MM
2007 rtx last_set_insn = (rtx) data;
2008
7506f491
DE
2009 if (GET_CODE (dest) == SUBREG)
2010 dest = SUBREG_REG (dest);
2011
7b1b4aed 2012 if (REG_P (dest))
7506f491 2013 record_last_reg_set_info (last_set_insn, REGNO (dest));
7b1b4aed 2014 else if (MEM_P (dest)
7506f491
DE
2015 /* Ignore pushes, they clobber nothing. */
2016 && ! push_operand (dest, GET_MODE (dest)))
2017 record_last_mem_set_info (last_set_insn);
2018}
2019
2020/* Top level function to create an expression or assignment hash table.
2021
2022 Expression entries are placed in the hash table if
2023 - they are of the form (set (pseudo-reg) src),
2024 - src is something we want to perform GCSE on,
2025 - none of the operands are subsequently modified in the block
2026
2027 Assignment entries are placed in the hash table if
2028 - they are of the form (set (pseudo-reg) src),
2029 - src is something we want to perform const/copy propagation on,
2030 - none of the operands or target are subsequently modified in the block
c4c81601 2031
7506f491
DE
2032 Currently src must be a pseudo-reg or a const_int.
2033
02280659 2034 TABLE is the table computed. */
7506f491
DE
2035
2036static void
1d088dee 2037compute_hash_table_work (struct hash_table *table)
7506f491 2038{
80c29cc4 2039 unsigned int i;
7506f491
DE
2040
2041 /* While we compute the hash table we also compute a bit array of which
2042 registers are set in which blocks.
7506f491
DE
2043 ??? This isn't needed during const/copy propagation, but it's cheap to
2044 compute. Later. */
d55bc081 2045 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
7506f491 2046
a13d4ebf 2047 /* re-Cache any INSN_LIST nodes we have allocated. */
73991d6a 2048 clear_modify_mem_tables ();
7506f491 2049 /* Some working arrays used to track first and last set in each block. */
703ad42b 2050 reg_avail_info = gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
80c29cc4
RZ
2051
2052 for (i = 0; i < max_gcse_regno; ++i)
e0082a72 2053 reg_avail_info[i].last_bb = NULL;
7506f491 2054
e0082a72 2055 FOR_EACH_BB (current_bb)
7506f491
DE
2056 {
2057 rtx insn;
770ae6cc 2058 unsigned int regno;
ed79bb3d 2059 int in_libcall_block;
7506f491
DE
2060
2061 /* First pass over the instructions records information used to
2062 determine when registers and memory are first and last set.
ccef9ef5 2063 ??? hard-reg reg_set_in_block computation
7506f491
DE
2064 could be moved to compute_sets since they currently don't change. */
2065
eb232f4e 2066 FOR_BB_INSNS (current_bb, insn)
7506f491 2067 {
2c3c49de 2068 if (! INSN_P (insn))
7506f491
DE
2069 continue;
2070
7b1b4aed 2071 if (CALL_P (insn))
7506f491
DE
2072 {
2073 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6e14af16 2074 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7506f491 2075 record_last_reg_set_info (insn, regno);
c4c81601 2076
24a28584 2077 mark_call (insn);
7506f491
DE
2078 }
2079
84832317 2080 note_stores (PATTERN (insn), record_last_set_info, insn);
7506f491
DE
2081 }
2082
fbef91d8
RS
2083 /* Insert implicit sets in the hash table. */
2084 if (table->set_p
2085 && implicit_sets[current_bb->index] != NULL_RTX)
2086 hash_scan_set (implicit_sets[current_bb->index],
a813c111 2087 BB_HEAD (current_bb), table);
fbef91d8 2088
7506f491 2089 /* The next pass builds the hash table. */
eb232f4e
SB
2090 in_libcall_block = 0;
2091 FOR_BB_INSNS (current_bb, insn)
2c3c49de 2092 if (INSN_P (insn))
c4c81601
RK
2093 {
2094 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
589005ff 2095 in_libcall_block = 1;
02280659 2096 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
589005ff 2097 in_libcall_block = 0;
02280659
ZD
2098 hash_scan_insn (insn, table, in_libcall_block);
2099 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
589005ff 2100 in_libcall_block = 0;
8e42ace1 2101 }
7506f491
DE
2102 }
2103
80c29cc4
RZ
2104 free (reg_avail_info);
2105 reg_avail_info = NULL;
7506f491
DE
2106}
2107
02280659 2108/* Allocate space for the set/expr hash TABLE.
7506f491 2109 N_INSNS is the number of instructions in the function.
02280659
ZD
2110 It is used to determine the number of buckets to use.
2111 SET_P determines whether set or expression table will
2112 be created. */
7506f491
DE
2113
2114static void
1d088dee 2115alloc_hash_table (int n_insns, struct hash_table *table, int set_p)
7506f491
DE
2116{
2117 int n;
2118
02280659
ZD
2119 table->size = n_insns / 4;
2120 if (table->size < 11)
2121 table->size = 11;
c4c81601 2122
7506f491
DE
2123 /* Attempt to maintain efficient use of hash table.
2124 Making it an odd number is simplest for now.
2125 ??? Later take some measurements. */
02280659
ZD
2126 table->size |= 1;
2127 n = table->size * sizeof (struct expr *);
703ad42b 2128 table->table = gmalloc (n);
02280659 2129 table->set_p = set_p;
7506f491
DE
2130}
2131
02280659 2132/* Free things allocated by alloc_hash_table. */
7506f491
DE
2133
2134static void
1d088dee 2135free_hash_table (struct hash_table *table)
7506f491 2136{
02280659 2137 free (table->table);
7506f491
DE
2138}
2139
02280659
ZD
2140/* Compute the hash TABLE for doing copy/const propagation or
2141 expression hash table. */
7506f491
DE
2142
2143static void
1d088dee 2144compute_hash_table (struct hash_table *table)
7506f491
DE
2145{
2146 /* Initialize count of number of entries in hash table. */
02280659 2147 table->n_elems = 0;
703ad42b 2148 memset (table->table, 0, table->size * sizeof (struct expr *));
7506f491 2149
02280659 2150 compute_hash_table_work (table);
7506f491
DE
2151}
2152\f
2153/* Expression tracking support. */
2154
ceda50e9
RH
2155/* Lookup REGNO in the set TABLE. The result is a pointer to the
2156 table entry, or NULL if not found. */
7506f491
DE
2157
2158static struct expr *
1d088dee 2159lookup_set (unsigned int regno, struct hash_table *table)
7506f491 2160{
02280659 2161 unsigned int hash = hash_set (regno, table->size);
7506f491
DE
2162 struct expr *expr;
2163
02280659 2164 expr = table->table[hash];
7506f491 2165
ceda50e9
RH
2166 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2167 expr = expr->next_same_hash;
7506f491
DE
2168
2169 return expr;
2170}
2171
2172/* Return the next entry for REGNO in list EXPR. */
2173
2174static struct expr *
1d088dee 2175next_set (unsigned int regno, struct expr *expr)
7506f491
DE
2176{
2177 do
2178 expr = expr->next_same_hash;
2179 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
c4c81601 2180
7506f491
DE
2181 return expr;
2182}
2183
0fe854a7
RH
2184/* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2185 types may be mixed. */
2186
2187static void
1d088dee 2188free_insn_expr_list_list (rtx *listp)
0fe854a7
RH
2189{
2190 rtx list, next;
2191
2192 for (list = *listp; list ; list = next)
2193 {
2194 next = XEXP (list, 1);
2195 if (GET_CODE (list) == EXPR_LIST)
2196 free_EXPR_LIST_node (list);
2197 else
2198 free_INSN_LIST_node (list);
2199 }
2200
2201 *listp = NULL;
2202}
2203
73991d6a
JH
2204/* Clear canon_modify_mem_list and modify_mem_list tables. */
2205static void
1d088dee 2206clear_modify_mem_tables (void)
73991d6a 2207{
3cd8c58a 2208 unsigned i;
87c476a2 2209 bitmap_iterator bi;
73991d6a 2210
87c476a2
ZD
2211 EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set, 0, i, bi)
2212 {
2213 free_INSN_LIST_list (modify_mem_list + i);
87c476a2
ZD
2214 free_insn_expr_list_list (canon_modify_mem_list + i);
2215 }
9a6cf911 2216 bitmap_clear (modify_mem_list_set);
aa47fcfa 2217 bitmap_clear (blocks_with_calls);
73991d6a
JH
2218}
2219
9a6cf911 2220/* Release memory used by modify_mem_list_set. */
73991d6a
JH
2221
2222static void
1d088dee 2223free_modify_mem_tables (void)
73991d6a
JH
2224{
2225 clear_modify_mem_tables ();
2226 free (modify_mem_list);
2227 free (canon_modify_mem_list);
2228 modify_mem_list = 0;
2229 canon_modify_mem_list = 0;
2230}
2231
7506f491
DE
2232/* Reset tables used to keep track of what's still available [since the
2233 start of the block]. */
2234
2235static void
1d088dee 2236reset_opr_set_tables (void)
7506f491
DE
2237{
2238 /* Maintain a bitmap of which regs have been set since beginning of
2239 the block. */
73991d6a 2240 CLEAR_REG_SET (reg_set_bitmap);
c4c81601 2241
7506f491
DE
2242 /* Also keep a record of the last instruction to modify memory.
2243 For now this is very trivial, we only record whether any memory
2244 location has been modified. */
73991d6a 2245 clear_modify_mem_tables ();
7506f491
DE
2246}
2247
cc2902df 2248/* Return nonzero if the operands of X are not set before INSN in
7506f491
DE
2249 INSN's basic block. */
2250
2251static int
1d088dee 2252oprs_not_set_p (rtx x, rtx insn)
7506f491 2253{
c4c81601 2254 int i, j;
7506f491 2255 enum rtx_code code;
6f7d635c 2256 const char *fmt;
7506f491 2257
7506f491
DE
2258 if (x == 0)
2259 return 1;
2260
2261 code = GET_CODE (x);
2262 switch (code)
2263 {
2264 case PC:
2265 case CC0:
2266 case CONST:
2267 case CONST_INT:
2268 case CONST_DOUBLE:
69ef87e2 2269 case CONST_VECTOR:
7506f491
DE
2270 case SYMBOL_REF:
2271 case LABEL_REF:
2272 case ADDR_VEC:
2273 case ADDR_DIFF_VEC:
2274 return 1;
2275
2276 case MEM:
589005ff 2277 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
e2d2ed72 2278 INSN_CUID (insn), x, 0))
a13d4ebf 2279 return 0;
c4c81601
RK
2280 else
2281 return oprs_not_set_p (XEXP (x, 0), insn);
7506f491
DE
2282
2283 case REG:
73991d6a 2284 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
7506f491
DE
2285
2286 default:
2287 break;
2288 }
2289
c4c81601 2290 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
2291 {
2292 if (fmt[i] == 'e')
2293 {
7506f491
DE
2294 /* If we are about to do the last recursive call
2295 needed at this level, change it into iteration.
2296 This function is called enough to be worth it. */
2297 if (i == 0)
c4c81601
RK
2298 return oprs_not_set_p (XEXP (x, i), insn);
2299
2300 if (! oprs_not_set_p (XEXP (x, i), insn))
7506f491
DE
2301 return 0;
2302 }
2303 else if (fmt[i] == 'E')
c4c81601
RK
2304 for (j = 0; j < XVECLEN (x, i); j++)
2305 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2306 return 0;
7506f491
DE
2307 }
2308
2309 return 1;
2310}
2311
2312/* Mark things set by a CALL. */
2313
2314static void
1d088dee 2315mark_call (rtx insn)
7506f491 2316{
24a28584 2317 if (! CONST_OR_PURE_CALL_P (insn))
a13d4ebf 2318 record_last_mem_set_info (insn);
7506f491
DE
2319}
2320
2321/* Mark things set by a SET. */
2322
2323static void
1d088dee 2324mark_set (rtx pat, rtx insn)
7506f491
DE
2325{
2326 rtx dest = SET_DEST (pat);
2327
2328 while (GET_CODE (dest) == SUBREG
2329 || GET_CODE (dest) == ZERO_EXTRACT
7506f491
DE
2330 || GET_CODE (dest) == STRICT_LOW_PART)
2331 dest = XEXP (dest, 0);
2332
7b1b4aed 2333 if (REG_P (dest))
73991d6a 2334 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
7b1b4aed 2335 else if (MEM_P (dest))
a13d4ebf
AM
2336 record_last_mem_set_info (insn);
2337
6e72d1e9 2338 if (GET_CODE (SET_SRC (pat)) == CALL)
b5ce41ff 2339 mark_call (insn);
7506f491
DE
2340}
2341
2342/* Record things set by a CLOBBER. */
2343
2344static void
1d088dee 2345mark_clobber (rtx pat, rtx insn)
7506f491
DE
2346{
2347 rtx clob = XEXP (pat, 0);
2348
2349 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2350 clob = XEXP (clob, 0);
2351
7b1b4aed 2352 if (REG_P (clob))
73991d6a 2353 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
a13d4ebf
AM
2354 else
2355 record_last_mem_set_info (insn);
7506f491
DE
2356}
2357
2358/* Record things set by INSN.
2359 This data is used by oprs_not_set_p. */
2360
2361static void
1d088dee 2362mark_oprs_set (rtx insn)
7506f491
DE
2363{
2364 rtx pat = PATTERN (insn);
c4c81601 2365 int i;
7506f491
DE
2366
2367 if (GET_CODE (pat) == SET)
2368 mark_set (pat, insn);
2369 else if (GET_CODE (pat) == PARALLEL)
c4c81601
RK
2370 for (i = 0; i < XVECLEN (pat, 0); i++)
2371 {
2372 rtx x = XVECEXP (pat, 0, i);
2373
2374 if (GET_CODE (x) == SET)
2375 mark_set (x, insn);
2376 else if (GET_CODE (x) == CLOBBER)
2377 mark_clobber (x, insn);
6e72d1e9 2378 else if (GET_CODE (x) == CALL)
c4c81601
RK
2379 mark_call (insn);
2380 }
7506f491 2381
7506f491
DE
2382 else if (GET_CODE (pat) == CLOBBER)
2383 mark_clobber (pat, insn);
6e72d1e9 2384 else if (GET_CODE (pat) == CALL)
b5ce41ff 2385 mark_call (insn);
7506f491 2386}
b5ce41ff 2387
7506f491
DE
2388\f
2389/* Compute copy/constant propagation working variables. */
2390
2391/* Local properties of assignments. */
7506f491
DE
2392static sbitmap *cprop_pavloc;
2393static sbitmap *cprop_absaltered;
2394
2395/* Global properties of assignments (computed from the local properties). */
7506f491
DE
2396static sbitmap *cprop_avin;
2397static sbitmap *cprop_avout;
2398
c4c81601
RK
2399/* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
2400 basic blocks. N_SETS is the number of sets. */
7506f491
DE
2401
2402static void
1d088dee 2403alloc_cprop_mem (int n_blocks, int n_sets)
7506f491
DE
2404{
2405 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
2406 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
2407
2408 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
2409 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
2410}
2411
2412/* Free vars used by copy/const propagation. */
2413
2414static void
1d088dee 2415free_cprop_mem (void)
7506f491 2416{
5a660bff
DB
2417 sbitmap_vector_free (cprop_pavloc);
2418 sbitmap_vector_free (cprop_absaltered);
2419 sbitmap_vector_free (cprop_avin);
2420 sbitmap_vector_free (cprop_avout);
7506f491
DE
2421}
2422
c4c81601
RK
2423/* For each block, compute whether X is transparent. X is either an
2424 expression or an assignment [though we don't care which, for this context
2425 an assignment is treated as an expression]. For each block where an
2426 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
2427 bit in BMAP. */
7506f491
DE
2428
2429static void
1d088dee 2430compute_transp (rtx x, int indx, sbitmap *bmap, int set_p)
7506f491 2431{
e0082a72
ZD
2432 int i, j;
2433 basic_block bb;
7506f491 2434 enum rtx_code code;
c4c81601 2435 reg_set *r;
6f7d635c 2436 const char *fmt;
7506f491 2437
c4c81601
RK
2438 /* repeat is used to turn tail-recursion into iteration since GCC
2439 can't do it when there's no return value. */
7506f491
DE
2440 repeat:
2441
2442 if (x == 0)
2443 return;
2444
2445 code = GET_CODE (x);
2446 switch (code)
2447 {
2448 case REG:
c4c81601
RK
2449 if (set_p)
2450 {
2451 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
2452 {
e0082a72
ZD
2453 FOR_EACH_BB (bb)
2454 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
2455 SET_BIT (bmap[bb->index], indx);
c4c81601
RK
2456 }
2457 else
2458 {
2459 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
ed425871 2460 SET_BIT (bmap[r->bb_index], indx);
c4c81601
RK
2461 }
2462 }
2463 else
2464 {
2465 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
2466 {
e0082a72
ZD
2467 FOR_EACH_BB (bb)
2468 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
2469 RESET_BIT (bmap[bb->index], indx);
c4c81601
RK
2470 }
2471 else
2472 {
2473 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
ed425871 2474 RESET_BIT (bmap[r->bb_index], indx);
c4c81601
RK
2475 }
2476 }
7506f491 2477
c4c81601 2478 return;
7506f491
DE
2479
2480 case MEM:
16c5b95d
MH
2481 if (! MEM_READONLY_P (x))
2482 {
2483 bitmap_iterator bi;
2484 unsigned bb_index;
aa47fcfa 2485
16c5b95d
MH
2486 /* First handle all the blocks with calls. We don't need to
2487 do any list walking for them. */
2488 EXECUTE_IF_SET_IN_BITMAP (blocks_with_calls, 0, bb_index, bi)
2489 {
2490 if (set_p)
2491 SET_BIT (bmap[bb_index], indx);
2492 else
2493 RESET_BIT (bmap[bb_index], indx);
2494 }
aa47fcfa 2495
16c5b95d
MH
2496 /* Now iterate over the blocks which have memory modifications
2497 but which do not have any calls. */
2498 EXECUTE_IF_AND_COMPL_IN_BITMAP (modify_mem_list_set,
2499 blocks_with_calls,
2500 0, bb_index, bi)
aa47fcfa 2501 {
16c5b95d 2502 rtx list_entry = canon_modify_mem_list[bb_index];
aa47fcfa 2503
16c5b95d 2504 while (list_entry)
aa47fcfa 2505 {
16c5b95d
MH
2506 rtx dest, dest_addr;
2507
2508 /* LIST_ENTRY must be an INSN of some kind that sets memory.
2509 Examine each hunk of memory that is modified. */
2510
2511 dest = XEXP (list_entry, 0);
2512 list_entry = XEXP (list_entry, 1);
2513 dest_addr = XEXP (list_entry, 0);
2514
2515 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
2516 x, rtx_addr_varies_p))
2517 {
2518 if (set_p)
2519 SET_BIT (bmap[bb_index], indx);
2520 else
2521 RESET_BIT (bmap[bb_index], indx);
2522 break;
2523 }
2524 list_entry = XEXP (list_entry, 1);
2525 }
aa47fcfa 2526 }
16c5b95d 2527 }
c4c81601 2528
7506f491
DE
2529 x = XEXP (x, 0);
2530 goto repeat;
2531
2532 case PC:
2533 case CC0: /*FIXME*/
2534 case CONST:
2535 case CONST_INT:
2536 case CONST_DOUBLE:
69ef87e2 2537 case CONST_VECTOR:
7506f491
DE
2538 case SYMBOL_REF:
2539 case LABEL_REF:
2540 case ADDR_VEC:
2541 case ADDR_DIFF_VEC:
2542 return;
2543
2544 default:
2545 break;
2546 }
2547
c4c81601 2548 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
2549 {
2550 if (fmt[i] == 'e')
2551 {
7506f491
DE
2552 /* If we are about to do the last recursive call
2553 needed at this level, change it into iteration.
2554 This function is called enough to be worth it. */
2555 if (i == 0)
2556 {
c4c81601 2557 x = XEXP (x, i);
7506f491
DE
2558 goto repeat;
2559 }
c4c81601
RK
2560
2561 compute_transp (XEXP (x, i), indx, bmap, set_p);
7506f491
DE
2562 }
2563 else if (fmt[i] == 'E')
c4c81601
RK
2564 for (j = 0; j < XVECLEN (x, i); j++)
2565 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
7506f491
DE
2566 }
2567}
2568
7506f491
DE
2569/* Top level routine to do the dataflow analysis needed by copy/const
2570 propagation. */
2571
2572static void
1d088dee 2573compute_cprop_data (void)
7506f491 2574{
02280659 2575 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
ce724250
JL
2576 compute_available (cprop_pavloc, cprop_absaltered,
2577 cprop_avout, cprop_avin);
7506f491
DE
2578}
2579\f
2580/* Copy/constant propagation. */
2581
7506f491
DE
2582/* Maximum number of register uses in an insn that we handle. */
2583#define MAX_USES 8
2584
2585/* Table of uses found in an insn.
2586 Allocated statically to avoid alloc/free complexity and overhead. */
2587static struct reg_use reg_use_table[MAX_USES];
2588
2589/* Index into `reg_use_table' while building it. */
2590static int reg_use_count;
2591
c4c81601
RK
2592/* Set up a list of register numbers used in INSN. The found uses are stored
2593 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
2594 and contains the number of uses in the table upon exit.
7506f491 2595
c4c81601
RK
2596 ??? If a register appears multiple times we will record it multiple times.
2597 This doesn't hurt anything but it will slow things down. */
7506f491
DE
2598
2599static void
1d088dee 2600find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED)
7506f491 2601{
c4c81601 2602 int i, j;
7506f491 2603 enum rtx_code code;
6f7d635c 2604 const char *fmt;
9e71c818 2605 rtx x = *xptr;
7506f491 2606
c4c81601
RK
2607 /* repeat is used to turn tail-recursion into iteration since GCC
2608 can't do it when there's no return value. */
7506f491 2609 repeat:
7506f491
DE
2610 if (x == 0)
2611 return;
2612
2613 code = GET_CODE (x);
9e71c818 2614 if (REG_P (x))
7506f491 2615 {
7506f491
DE
2616 if (reg_use_count == MAX_USES)
2617 return;
c4c81601 2618
7506f491
DE
2619 reg_use_table[reg_use_count].reg_rtx = x;
2620 reg_use_count++;
7506f491
DE
2621 }
2622
2623 /* Recursively scan the operands of this expression. */
2624
c4c81601 2625 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
2626 {
2627 if (fmt[i] == 'e')
2628 {
2629 /* If we are about to do the last recursive call
2630 needed at this level, change it into iteration.
2631 This function is called enough to be worth it. */
2632 if (i == 0)
2633 {
2634 x = XEXP (x, 0);
2635 goto repeat;
2636 }
c4c81601 2637
9e71c818 2638 find_used_regs (&XEXP (x, i), data);
7506f491
DE
2639 }
2640 else if (fmt[i] == 'E')
c4c81601 2641 for (j = 0; j < XVECLEN (x, i); j++)
9e71c818 2642 find_used_regs (&XVECEXP (x, i, j), data);
7506f491
DE
2643 }
2644}
2645
2646/* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
cc2902df 2647 Returns nonzero is successful. */
7506f491
DE
2648
2649static int
1d088dee 2650try_replace_reg (rtx from, rtx to, rtx insn)
7506f491 2651{
205eb6e7 2652 rtx note = find_reg_equal_equiv_note (insn);
fb0c0a12 2653 rtx src = 0;
172890a2
RK
2654 int success = 0;
2655 rtx set = single_set (insn);
833fc3ad 2656
3e916873
JH
2657 /* Usually we substitute easy stuff, so we won't copy everything.
2658 We however need to take care to not duplicate non-trivial CONST
2659 expressions. */
2660 to = copy_rtx (to);
2661
2b773ee2
JH
2662 validate_replace_src_group (from, to, insn);
2663 if (num_changes_pending () && apply_change_group ())
2664 success = 1;
9e71c818 2665
9feff114
JDA
2666 /* Try to simplify SET_SRC if we have substituted a constant. */
2667 if (success && set && CONSTANT_P (to))
2668 {
2669 src = simplify_rtx (SET_SRC (set));
2670
2671 if (src)
2672 validate_change (insn, &SET_SRC (set), src, 0);
2673 }
2674
205eb6e7
RS
2675 /* If there is already a REG_EQUAL note, update the expression in it
2676 with our replacement. */
2677 if (note != 0 && REG_NOTE_KIND (note) == REG_EQUAL)
a31830a7
SB
2678 set_unique_reg_note (insn, REG_EQUAL,
2679 simplify_replace_rtx (XEXP (note, 0), from, to));
f305679f 2680 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
833fc3ad 2681 {
f305679f
JH
2682 /* If above failed and this is a single set, try to simplify the source of
2683 the set given our substitution. We could perhaps try this for multiple
2684 SETs, but it probably won't buy us anything. */
172890a2
RK
2685 src = simplify_replace_rtx (SET_SRC (set), from, to);
2686
9e71c818
JH
2687 if (!rtx_equal_p (src, SET_SRC (set))
2688 && validate_change (insn, &SET_SRC (set), src, 0))
172890a2 2689 success = 1;
833fc3ad 2690
bbd288a4
FS
2691 /* If we've failed to do replacement, have a single SET, don't already
2692 have a note, and have no special SET, add a REG_EQUAL note to not
2693 lose information. */
2694 if (!success && note == 0 && set != 0
70a640af
AK
2695 && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
2696 && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART)
f305679f
JH
2697 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
2698 }
e251e2a2 2699
172890a2
RK
2700 /* REG_EQUAL may get simplified into register.
2701 We don't allow that. Remove that note. This code ought
fbe5a4a6 2702 not to happen, because previous code ought to synthesize
172890a2 2703 reg-reg move, but be on the safe side. */
205eb6e7 2704 if (note && REG_NOTE_KIND (note) == REG_EQUAL && REG_P (XEXP (note, 0)))
172890a2 2705 remove_note (insn, note);
833fc3ad 2706
833fc3ad
JH
2707 return success;
2708}
c4c81601
RK
2709
2710/* Find a set of REGNOs that are available on entry to INSN's block. Returns
2711 NULL no such set is found. */
7506f491
DE
2712
2713static struct expr *
1d088dee 2714find_avail_set (int regno, rtx insn)
7506f491 2715{
cafba495
BS
2716 /* SET1 contains the last set found that can be returned to the caller for
2717 use in a substitution. */
2718 struct expr *set1 = 0;
589005ff 2719
cafba495 2720 /* Loops are not possible here. To get a loop we would need two sets
454ff5cb 2721 available at the start of the block containing INSN. i.e. we would
cafba495
BS
2722 need two sets like this available at the start of the block:
2723
2724 (set (reg X) (reg Y))
2725 (set (reg Y) (reg X))
2726
2727 This can not happen since the set of (reg Y) would have killed the
2728 set of (reg X) making it unavailable at the start of this block. */
2729 while (1)
8e42ace1 2730 {
cafba495 2731 rtx src;
ceda50e9 2732 struct expr *set = lookup_set (regno, &set_hash_table);
cafba495
BS
2733
2734 /* Find a set that is available at the start of the block
2735 which contains INSN. */
2736 while (set)
2737 {
2738 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
2739 break;
2740 set = next_set (regno, set);
2741 }
7506f491 2742
cafba495
BS
2743 /* If no available set was found we've reached the end of the
2744 (possibly empty) copy chain. */
2745 if (set == 0)
589005ff 2746 break;
cafba495 2747
282899df 2748 gcc_assert (GET_CODE (set->expr) == SET);
cafba495
BS
2749
2750 src = SET_SRC (set->expr);
2751
2752 /* We know the set is available.
2753 Now check that SRC is ANTLOC (i.e. none of the source operands
589005ff 2754 have changed since the start of the block).
cafba495
BS
2755
2756 If the source operand changed, we may still use it for the next
2757 iteration of this loop, but we may not use it for substitutions. */
c4c81601 2758
6b2d1c9e 2759 if (gcse_constant_p (src) || oprs_not_set_p (src, insn))
cafba495
BS
2760 set1 = set;
2761
2762 /* If the source of the set is anything except a register, then
2763 we have reached the end of the copy chain. */
7b1b4aed 2764 if (! REG_P (src))
7506f491 2765 break;
7506f491 2766
454ff5cb 2767 /* Follow the copy chain, i.e. start another iteration of the loop
cafba495
BS
2768 and see if we have an available copy into SRC. */
2769 regno = REGNO (src);
8e42ace1 2770 }
cafba495
BS
2771
2772 /* SET1 holds the last set that was available and anticipatable at
2773 INSN. */
2774 return set1;
7506f491
DE
2775}
2776
abd535b6 2777/* Subroutine of cprop_insn that tries to propagate constants into
0e3f0221 2778 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
fbe5a4a6 2779 it is the instruction that immediately precedes JUMP, and must be a
818b6b7f 2780 single SET of a register. FROM is what we will try to replace,
0e3f0221 2781 SRC is the constant we will try to substitute for it. Returns nonzero
589005ff 2782 if a change was made. */
c4c81601 2783
abd535b6 2784static int
1d088dee 2785cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
abd535b6 2786{
bc6688b4 2787 rtx new, set_src, note_src;
0e3f0221 2788 rtx set = pc_set (jump);
bc6688b4 2789 rtx note = find_reg_equal_equiv_note (jump);
0e3f0221 2790
bc6688b4
RS
2791 if (note)
2792 {
2793 note_src = XEXP (note, 0);
2794 if (GET_CODE (note_src) == EXPR_LIST)
2795 note_src = NULL_RTX;
2796 }
2797 else note_src = NULL_RTX;
2798
2799 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
2800 set_src = note_src ? note_src : SET_SRC (set);
2801
2802 /* First substitute the SETCC condition into the JUMP instruction,
2803 then substitute that given values into this expanded JUMP. */
2804 if (setcc != NULL_RTX
48ddd46c
JH
2805 && !modified_between_p (from, setcc, jump)
2806 && !modified_between_p (src, setcc, jump))
b2f02503 2807 {
bc6688b4 2808 rtx setcc_src;
b2f02503 2809 rtx setcc_set = single_set (setcc);
bc6688b4
RS
2810 rtx setcc_note = find_reg_equal_equiv_note (setcc);
2811 setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
2812 ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
2813 set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
2814 setcc_src);
b2f02503 2815 }
0e3f0221 2816 else
bc6688b4 2817 setcc = NULL_RTX;
0e3f0221 2818
bc6688b4 2819 new = simplify_replace_rtx (set_src, from, src);
abd535b6 2820
bc6688b4
RS
2821 /* If no simplification can be made, then try the next register. */
2822 if (rtx_equal_p (new, SET_SRC (set)))
9e48c409 2823 return 0;
589005ff 2824
7d5ab30e 2825 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
172890a2 2826 if (new == pc_rtx)
0e3f0221 2827 delete_insn (jump);
7d5ab30e 2828 else
abd535b6 2829 {
48ddd46c
JH
2830 /* Ensure the value computed inside the jump insn to be equivalent
2831 to one computed by setcc. */
bc6688b4 2832 if (setcc && modified_in_p (new, setcc))
48ddd46c 2833 return 0;
0e3f0221 2834 if (! validate_change (jump, &SET_SRC (set), new, 0))
bc6688b4
RS
2835 {
2836 /* When (some) constants are not valid in a comparison, and there
2837 are two registers to be replaced by constants before the entire
2838 comparison can be folded into a constant, we need to keep
2839 intermediate information in REG_EQUAL notes. For targets with
2840 separate compare insns, such notes are added by try_replace_reg.
2841 When we have a combined compare-and-branch instruction, however,
2842 we need to attach a note to the branch itself to make this
2843 optimization work. */
2844
2845 if (!rtx_equal_p (new, note_src))
2846 set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new));
2847 return 0;
2848 }
2849
2850 /* Remove REG_EQUAL note after simplification. */
2851 if (note_src)
2852 remove_note (jump, note);
abd535b6 2853
7d5ab30e
JH
2854 /* If this has turned into an unconditional jump,
2855 then put a barrier after it so that the unreachable
2856 code will be deleted. */
2857 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
0e3f0221 2858 emit_barrier_after (jump);
7d5ab30e 2859 }
abd535b6 2860
0e3f0221
RS
2861#ifdef HAVE_cc0
2862 /* Delete the cc0 setter. */
818b6b7f 2863 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
0e3f0221
RS
2864 delete_insn (setcc);
2865#endif
2866
172890a2 2867 run_jump_opt_after_gcse = 1;
c4c81601 2868
27fb79ad 2869 global_const_prop_count++;
10d22567 2870 if (dump_file != NULL)
172890a2 2871 {
10d22567 2872 fprintf (dump_file,
27fb79ad 2873 "GLOBAL CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
0e3f0221 2874 REGNO (from), INSN_UID (jump));
10d22567
ZD
2875 print_rtl (dump_file, src);
2876 fprintf (dump_file, "\n");
abd535b6 2877 }
0005550b 2878 purge_dead_edges (bb);
172890a2
RK
2879
2880 return 1;
abd535b6
BS
2881}
2882
ae860ff7 2883static bool
eb232f4e 2884constprop_register (rtx insn, rtx from, rtx to, bool alter_jumps)
ae860ff7
JH
2885{
2886 rtx sset;
2887
2888 /* Check for reg or cc0 setting instructions followed by
2889 conditional branch instructions first. */
2890 if (alter_jumps
2891 && (sset = single_set (insn)) != NULL
244d05fb 2892 && NEXT_INSN (insn)
ae860ff7
JH
2893 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
2894 {
2895 rtx dest = SET_DEST (sset);
2896 if ((REG_P (dest) || CC0_P (dest))
2897 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
2898 return 1;
2899 }
2900
2901 /* Handle normal insns next. */
4b4bf941 2902 if (NONJUMP_INSN_P (insn)
ae860ff7
JH
2903 && try_replace_reg (from, to, insn))
2904 return 1;
2905
2906 /* Try to propagate a CONST_INT into a conditional jump.
2907 We're pretty specific about what we will handle in this
2908 code, we can extend this as necessary over time.
2909
2910 Right now the insn in question must look like
2911 (set (pc) (if_then_else ...)) */
2912 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
2913 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
2914 return 0;
2915}
2916
7506f491 2917/* Perform constant and copy propagation on INSN.
cc2902df 2918 The result is nonzero if a change was made. */
7506f491
DE
2919
2920static int
1d088dee 2921cprop_insn (rtx insn, int alter_jumps)
7506f491
DE
2922{
2923 struct reg_use *reg_used;
2924 int changed = 0;
833fc3ad 2925 rtx note;
7506f491 2926
9e71c818 2927 if (!INSN_P (insn))
7506f491
DE
2928 return 0;
2929
2930 reg_use_count = 0;
9e71c818 2931 note_uses (&PATTERN (insn), find_used_regs, NULL);
589005ff 2932
172890a2 2933 note = find_reg_equal_equiv_note (insn);
833fc3ad 2934
dc297297 2935 /* We may win even when propagating constants into notes. */
833fc3ad 2936 if (note)
9e71c818 2937 find_used_regs (&XEXP (note, 0), NULL);
7506f491 2938
c4c81601
RK
2939 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
2940 reg_used++, reg_use_count--)
7506f491 2941 {
770ae6cc 2942 unsigned int regno = REGNO (reg_used->reg_rtx);
7506f491
DE
2943 rtx pat, src;
2944 struct expr *set;
7506f491
DE
2945
2946 /* Ignore registers created by GCSE.
dc297297 2947 We do this because ... */
7506f491
DE
2948 if (regno >= max_gcse_regno)
2949 continue;
2950
2951 /* If the register has already been set in this block, there's
2952 nothing we can do. */
2953 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
2954 continue;
2955
2956 /* Find an assignment that sets reg_used and is available
2957 at the start of the block. */
2958 set = find_avail_set (regno, insn);
2959 if (! set)
2960 continue;
589005ff 2961
7506f491
DE
2962 pat = set->expr;
2963 /* ??? We might be able to handle PARALLELs. Later. */
282899df 2964 gcc_assert (GET_CODE (pat) == SET);
c4c81601 2965
7506f491
DE
2966 src = SET_SRC (pat);
2967
e78d9500 2968 /* Constant propagation. */
6b2d1c9e 2969 if (gcse_constant_p (src))
7506f491 2970 {
ae860ff7 2971 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
7506f491
DE
2972 {
2973 changed = 1;
27fb79ad 2974 global_const_prop_count++;
10d22567 2975 if (dump_file != NULL)
7506f491 2976 {
10d22567
ZD
2977 fprintf (dump_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
2978 fprintf (dump_file, "insn %d with constant ", INSN_UID (insn));
2979 print_rtl (dump_file, src);
2980 fprintf (dump_file, "\n");
7506f491 2981 }
bc6688b4
RS
2982 if (INSN_DELETED_P (insn))
2983 return 1;
7506f491
DE
2984 }
2985 }
7b1b4aed 2986 else if (REG_P (src)
7506f491
DE
2987 && REGNO (src) >= FIRST_PSEUDO_REGISTER
2988 && REGNO (src) != regno)
2989 {
cafba495 2990 if (try_replace_reg (reg_used->reg_rtx, src, insn))
7506f491 2991 {
cafba495 2992 changed = 1;
27fb79ad 2993 global_copy_prop_count++;
10d22567 2994 if (dump_file != NULL)
7506f491 2995 {
10d22567 2996 fprintf (dump_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
c4c81601 2997 regno, INSN_UID (insn));
10d22567 2998 fprintf (dump_file, " with reg %d\n", REGNO (src));
7506f491 2999 }
cafba495
BS
3000
3001 /* The original insn setting reg_used may or may not now be
3002 deletable. We leave the deletion to flow. */
3003 /* FIXME: If it turns out that the insn isn't deletable,
3004 then we may have unnecessarily extended register lifetimes
3005 and made things worse. */
7506f491
DE
3006 }
3007 }
3008 }
3009
3010 return changed;
3011}
3012
710ee3ed
RH
3013/* Like find_used_regs, but avoid recording uses that appear in
3014 input-output contexts such as zero_extract or pre_dec. This
3015 restricts the cases we consider to those for which local cprop
3016 can legitimately make replacements. */
3017
3018static void
1d088dee 3019local_cprop_find_used_regs (rtx *xptr, void *data)
710ee3ed
RH
3020{
3021 rtx x = *xptr;
3022
3023 if (x == 0)
3024 return;
3025
3026 switch (GET_CODE (x))
3027 {
3028 case ZERO_EXTRACT:
3029 case SIGN_EXTRACT:
3030 case STRICT_LOW_PART:
3031 return;
3032
3033 case PRE_DEC:
3034 case PRE_INC:
3035 case POST_DEC:
3036 case POST_INC:
3037 case PRE_MODIFY:
3038 case POST_MODIFY:
3039 /* Can only legitimately appear this early in the context of
3040 stack pushes for function arguments, but handle all of the
3041 codes nonetheless. */
3042 return;
3043
3044 case SUBREG:
3045 /* Setting a subreg of a register larger than word_mode leaves
3046 the non-written words unchanged. */
3047 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
3048 return;
3049 break;
3050
3051 default:
3052 break;
3053 }
3054
3055 find_used_regs (xptr, data);
3056}
1d088dee 3057
8ba46434
R
3058/* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
3059 their REG_EQUAL notes need updating. */
e197b6fc 3060
ae860ff7 3061static bool
eb232f4e 3062do_local_cprop (rtx x, rtx insn, bool alter_jumps, rtx *libcall_sp)
ae860ff7
JH
3063{
3064 rtx newreg = NULL, newcnst = NULL;
3065
e197b6fc
RH
3066 /* Rule out USE instructions and ASM statements as we don't want to
3067 change the hard registers mentioned. */
7b1b4aed 3068 if (REG_P (x)
ae860ff7 3069 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
e197b6fc
RH
3070 || (GET_CODE (PATTERN (insn)) != USE
3071 && asm_noperands (PATTERN (insn)) < 0)))
ae860ff7
JH
3072 {
3073 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
3074 struct elt_loc_list *l;
3075
3076 if (!val)
3077 return false;
3078 for (l = val->locs; l; l = l->next)
3079 {
3080 rtx this_rtx = l->loc;
46690369
JH
3081 rtx note;
3082
5976e643
RS
3083 /* Don't CSE non-constant values out of libcall blocks. */
3084 if (l->in_libcall && ! CONSTANT_P (this_rtx))
9635cfad
JH
3085 continue;
3086
6b2d1c9e 3087 if (gcse_constant_p (this_rtx))
ae860ff7 3088 newcnst = this_rtx;
46690369
JH
3089 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
3090 /* Don't copy propagate if it has attached REG_EQUIV note.
3091 At this point this only function parameters should have
3092 REG_EQUIV notes and if the argument slot is used somewhere
3093 explicitly, it means address of parameter has been taken,
3094 so we should not extend the lifetime of the pseudo. */
3095 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
7b1b4aed 3096 || ! MEM_P (XEXP (note, 0))))
ae860ff7
JH
3097 newreg = this_rtx;
3098 }
3099 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
3100 {
8ba46434 3101 /* If we find a case where we can't fix the retval REG_EQUAL notes
fbe5a4a6 3102 match the new register, we either have to abandon this replacement
8ba46434 3103 or fix delete_trivially_dead_insns to preserve the setting insn,
6fb5fa3c 3104 or make it delete the REG_EQUAL note, and fix up all passes that
8ba46434 3105 require the REG_EQUAL note there. */
282899df
NS
3106 bool adjusted;
3107
3108 adjusted = adjust_libcall_notes (x, newcnst, insn, libcall_sp);
3109 gcc_assert (adjusted);
3110
10d22567 3111 if (dump_file != NULL)
ae860ff7 3112 {
10d22567 3113 fprintf (dump_file, "LOCAL CONST-PROP: Replacing reg %d in ",
ae860ff7 3114 REGNO (x));
10d22567 3115 fprintf (dump_file, "insn %d with constant ",
ae860ff7 3116 INSN_UID (insn));
10d22567
ZD
3117 print_rtl (dump_file, newcnst);
3118 fprintf (dump_file, "\n");
ae860ff7 3119 }
27fb79ad 3120 local_const_prop_count++;
ae860ff7
JH
3121 return true;
3122 }
3123 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
3124 {
8ba46434 3125 adjust_libcall_notes (x, newreg, insn, libcall_sp);
10d22567 3126 if (dump_file != NULL)
ae860ff7 3127 {
10d22567 3128 fprintf (dump_file,
ae860ff7
JH
3129 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
3130 REGNO (x), INSN_UID (insn));
10d22567 3131 fprintf (dump_file, " with reg %d\n", REGNO (newreg));
ae860ff7 3132 }
27fb79ad 3133 local_copy_prop_count++;
ae860ff7
JH
3134 return true;
3135 }
3136 }
3137 return false;
3138}
3139
8ba46434
R
3140/* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
3141 their REG_EQUAL notes need updating to reflect that OLDREG has been
f4e3e618
RH
3142 replaced with NEWVAL in INSN. Return true if all substitutions could
3143 be made. */
8ba46434 3144static bool
1d088dee 3145adjust_libcall_notes (rtx oldreg, rtx newval, rtx insn, rtx *libcall_sp)
8ba46434 3146{
f4e3e618 3147 rtx end;
8ba46434
R
3148
3149 while ((end = *libcall_sp++))
3150 {
f4e3e618 3151 rtx note = find_reg_equal_equiv_note (end);
8ba46434
R
3152
3153 if (! note)
3154 continue;
3155
3156 if (REG_P (newval))
3157 {
3158 if (reg_set_between_p (newval, PREV_INSN (insn), end))
3159 {
3160 do
3161 {
3162 note = find_reg_equal_equiv_note (end);
3163 if (! note)
3164 continue;
3165 if (reg_mentioned_p (newval, XEXP (note, 0)))
3166 return false;
3167 }
3168 while ((end = *libcall_sp++));
3169 return true;
3170 }
3171 }
5976e643 3172 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), oldreg, newval);
6fb5fa3c 3173 df_notes_rescan (end);
8ba46434
R
3174 insn = end;
3175 }
3176 return true;
3177}
3178
3179#define MAX_NESTED_LIBCALLS 9
3180
eb232f4e
SB
3181/* Do local const/copy propagation (i.e. within each basic block).
3182 If ALTER_JUMPS is true, allow propagating into jump insns, which
3183 could modify the CFG. */
3184
ae860ff7 3185static void
eb232f4e 3186local_cprop_pass (bool alter_jumps)
ae860ff7 3187{
eb232f4e 3188 basic_block bb;
ae860ff7
JH
3189 rtx insn;
3190 struct reg_use *reg_used;
8ba46434 3191 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
1649d92f 3192 bool changed = false;
ae860ff7 3193
463301c3 3194 cselib_init (false);
8ba46434
R
3195 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
3196 *libcall_sp = 0;
eb232f4e 3197 FOR_EACH_BB (bb)
ae860ff7 3198 {
eb232f4e 3199 FOR_BB_INSNS (bb, insn)
ae860ff7 3200 {
eb232f4e 3201 if (INSN_P (insn))
ae860ff7 3202 {
eb232f4e 3203 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
ae860ff7 3204
eb232f4e
SB
3205 if (note)
3206 {
3207 gcc_assert (libcall_sp != libcall_stack);
3208 *--libcall_sp = XEXP (note, 0);
3209 }
3210 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3211 if (note)
3212 libcall_sp++;
3213 note = find_reg_equal_equiv_note (insn);
3214 do
3215 {
3216 reg_use_count = 0;
3217 note_uses (&PATTERN (insn), local_cprop_find_used_regs,
3218 NULL);
3219 if (note)
3220 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
3221
3222 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
3223 reg_used++, reg_use_count--)
6fb5fa3c
DB
3224 {
3225 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
3226 libcall_sp))
3227 {
3228 changed = true;
3229 break;
3230 }
3231 }
eb232f4e 3232 if (INSN_DELETED_P (insn))
1649d92f 3233 break;
eb232f4e
SB
3234 }
3235 while (reg_use_count);
ae860ff7 3236 }
eb232f4e 3237 cselib_process_insn (insn);
ae860ff7 3238 }
eb232f4e
SB
3239
3240 /* Forget everything at the end of a basic block. Make sure we are
3241 not inside a libcall, they should never cross basic blocks. */
3242 cselib_clear_table ();
3243 gcc_assert (libcall_sp == &libcall_stack[MAX_NESTED_LIBCALLS]);
ae860ff7 3244 }
eb232f4e 3245
ae860ff7 3246 cselib_finish ();
eb232f4e 3247
1649d92f
JH
3248 /* Global analysis may get into infinite loops for unreachable blocks. */
3249 if (changed && alter_jumps)
5f0bea72
JH
3250 {
3251 delete_unreachable_blocks ();
3252 free_reg_set_mem ();
3253 alloc_reg_set_mem (max_reg_num ());
eb232f4e 3254 compute_sets ();
5f0bea72 3255 }
ae860ff7
JH
3256}
3257
c4c81601 3258/* Forward propagate copies. This includes copies and constants. Return
cc2902df 3259 nonzero if a change was made. */
7506f491
DE
3260
3261static int
1d088dee 3262cprop (int alter_jumps)
7506f491 3263{
e0082a72
ZD
3264 int changed;
3265 basic_block bb;
7506f491
DE
3266 rtx insn;
3267
3268 /* Note we start at block 1. */
e0082a72
ZD
3269 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3270 {
10d22567
ZD
3271 if (dump_file != NULL)
3272 fprintf (dump_file, "\n");
e0082a72
ZD
3273 return 0;
3274 }
7506f491
DE
3275
3276 changed = 0;
e0082a72 3277 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
7506f491
DE
3278 {
3279 /* Reset tables used to keep track of what's still valid [since the
3280 start of the block]. */
3281 reset_opr_set_tables ();
3282
eb232f4e 3283 FOR_BB_INSNS (bb, insn)
172890a2
RK
3284 if (INSN_P (insn))
3285 {
ae860ff7 3286 changed |= cprop_insn (insn, alter_jumps);
7506f491 3287
172890a2
RK
3288 /* Keep track of everything modified by this insn. */
3289 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
3290 call mark_oprs_set if we turned the insn into a NOTE. */
7b1b4aed 3291 if (! NOTE_P (insn))
172890a2 3292 mark_oprs_set (insn);
8e42ace1 3293 }
7506f491
DE
3294 }
3295
10d22567
ZD
3296 if (dump_file != NULL)
3297 fprintf (dump_file, "\n");
7506f491
DE
3298
3299 return changed;
3300}
3301
fbef91d8
RS
3302/* Similar to get_condition, only the resulting condition must be
3303 valid at JUMP, instead of at EARLIEST.
3304
3305 This differs from noce_get_condition in ifcvt.c in that we prefer not to
3306 settle for the condition variable in the jump instruction being integral.
3307 We prefer to be able to record the value of a user variable, rather than
3308 the value of a temporary used in a condition. This could be solved by
aabcd309 3309 recording the value of *every* register scanned by canonicalize_condition,
fbef91d8
RS
3310 but this would require some code reorganization. */
3311
2fa4a849 3312rtx
1d088dee 3313fis_get_condition (rtx jump)
fbef91d8 3314{
45d09c02 3315 return get_condition (jump, NULL, false, true);
fbef91d8
RS
3316}
3317
b0656d8b
JW
3318/* Check the comparison COND to see if we can safely form an implicit set from
3319 it. COND is either an EQ or NE comparison. */
3320
3321static bool
3322implicit_set_cond_p (rtx cond)
3323{
3324 enum machine_mode mode = GET_MODE (XEXP (cond, 0));
3325 rtx cst = XEXP (cond, 1);
3326
3327 /* We can't perform this optimization if either operand might be or might
3328 contain a signed zero. */
3329 if (HONOR_SIGNED_ZEROS (mode))
3330 {
3331 /* It is sufficient to check if CST is or contains a zero. We must
3332 handle float, complex, and vector. If any subpart is a zero, then
3333 the optimization can't be performed. */
3334 /* ??? The complex and vector checks are not implemented yet. We just
3335 always return zero for them. */
3336 if (GET_CODE (cst) == CONST_DOUBLE)
3337 {
3338 REAL_VALUE_TYPE d;
3339 REAL_VALUE_FROM_CONST_DOUBLE (d, cst);
3340 if (REAL_VALUES_EQUAL (d, dconst0))
3341 return 0;
3342 }
3343 else
3344 return 0;
3345 }
3346
3347 return gcse_constant_p (cst);
3348}
3349
fbef91d8
RS
3350/* Find the implicit sets of a function. An "implicit set" is a constraint
3351 on the value of a variable, implied by a conditional jump. For example,
3352 following "if (x == 2)", the then branch may be optimized as though the
3353 conditional performed an "explicit set", in this example, "x = 2". This
3354 function records the set patterns that are implicit at the start of each
3355 basic block. */
3356
3357static void
1d088dee 3358find_implicit_sets (void)
fbef91d8
RS
3359{
3360 basic_block bb, dest;
3361 unsigned int count;
3362 rtx cond, new;
3363
3364 count = 0;
3365 FOR_EACH_BB (bb)
a98ebe2e 3366 /* Check for more than one successor. */
628f6a4e 3367 if (EDGE_COUNT (bb->succs) > 1)
fbef91d8 3368 {
a813c111 3369 cond = fis_get_condition (BB_END (bb));
fbef91d8
RS
3370
3371 if (cond
3372 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
7b1b4aed 3373 && REG_P (XEXP (cond, 0))
fbef91d8 3374 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
b0656d8b 3375 && implicit_set_cond_p (cond))
fbef91d8
RS
3376 {
3377 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
3378 : FALLTHRU_EDGE (bb)->dest;
3379
c5cbcccf 3380 if (dest && single_pred_p (dest)
fbef91d8
RS
3381 && dest != EXIT_BLOCK_PTR)
3382 {
3383 new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
3384 XEXP (cond, 1));
3385 implicit_sets[dest->index] = new;
10d22567 3386 if (dump_file)
fbef91d8 3387 {
10d22567 3388 fprintf(dump_file, "Implicit set of reg %d in ",
fbef91d8 3389 REGNO (XEXP (cond, 0)));
10d22567 3390 fprintf(dump_file, "basic block %d\n", dest->index);
fbef91d8
RS
3391 }
3392 count++;
3393 }
3394 }
3395 }
3396
10d22567
ZD
3397 if (dump_file)
3398 fprintf (dump_file, "Found %d implicit sets\n", count);
fbef91d8
RS
3399}
3400
7506f491 3401/* Perform one copy/constant propagation pass.
a0134312
RS
3402 PASS is the pass count. If CPROP_JUMPS is true, perform constant
3403 propagation into conditional jumps. If BYPASS_JUMPS is true,
3404 perform conditional jump bypassing optimizations. */
7506f491
DE
3405
3406static int
eb232f4e 3407one_cprop_pass (int pass, bool cprop_jumps, bool bypass_jumps)
7506f491
DE
3408{
3409 int changed = 0;
3410
27fb79ad
SB
3411 global_const_prop_count = local_const_prop_count = 0;
3412 global_copy_prop_count = local_copy_prop_count = 0;
7506f491 3413
a52b023a
PB
3414 if (cprop_jumps)
3415 local_cprop_pass (cprop_jumps);
ae860ff7 3416
fbef91d8 3417 /* Determine implicit sets. */
5ed6ace5 3418 implicit_sets = XCNEWVEC (rtx, last_basic_block);
fbef91d8
RS
3419 find_implicit_sets ();
3420
02280659
ZD
3421 alloc_hash_table (max_cuid, &set_hash_table, 1);
3422 compute_hash_table (&set_hash_table);
fbef91d8
RS
3423
3424 /* Free implicit_sets before peak usage. */
3425 free (implicit_sets);
3426 implicit_sets = NULL;
3427
10d22567
ZD
3428 if (dump_file)
3429 dump_hash_table (dump_file, "SET", &set_hash_table);
02280659 3430 if (set_hash_table.n_elems > 0)
7506f491 3431 {
02280659 3432 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
7506f491 3433 compute_cprop_data ();
a0134312
RS
3434 changed = cprop (cprop_jumps);
3435 if (bypass_jumps)
0e3f0221 3436 changed |= bypass_conditional_jumps ();
7506f491
DE
3437 free_cprop_mem ();
3438 }
c4c81601 3439
02280659 3440 free_hash_table (&set_hash_table);
7506f491 3441
10d22567 3442 if (dump_file)
7506f491 3443 {
10d22567 3444 fprintf (dump_file, "CPROP of %s, pass %d: %d bytes needed, ",
faed5cc3 3445 current_function_name (), pass, bytes_used);
10d22567 3446 fprintf (dump_file, "%d local const props, %d local copy props, ",
27fb79ad 3447 local_const_prop_count, local_copy_prop_count);
10d22567 3448 fprintf (dump_file, "%d global const props, %d global copy props\n\n",
27fb79ad 3449 global_const_prop_count, global_copy_prop_count);
7506f491 3450 }
1649d92f
JH
3451 /* Global analysis may get into infinite loops for unreachable blocks. */
3452 if (changed && cprop_jumps)
3453 delete_unreachable_blocks ();
7506f491
DE
3454
3455 return changed;
3456}
3457\f
0e3f0221
RS
3458/* Bypass conditional jumps. */
3459
7821bfc7
RS
3460/* The value of last_basic_block at the beginning of the jump_bypass
3461 pass. The use of redirect_edge_and_branch_force may introduce new
3462 basic blocks, but the data flow analysis is only valid for basic
3463 block indices less than bypass_last_basic_block. */
3464
3465static int bypass_last_basic_block;
3466
0e3f0221
RS
3467/* Find a set of REGNO to a constant that is available at the end of basic
3468 block BB. Returns NULL if no such set is found. Based heavily upon
3469 find_avail_set. */
3470
3471static struct expr *
1d088dee 3472find_bypass_set (int regno, int bb)
0e3f0221
RS
3473{
3474 struct expr *result = 0;
3475
3476 for (;;)
3477 {
3478 rtx src;
ceda50e9 3479 struct expr *set = lookup_set (regno, &set_hash_table);
0e3f0221
RS
3480
3481 while (set)
3482 {
3483 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
3484 break;
3485 set = next_set (regno, set);
3486 }
3487
3488 if (set == 0)
3489 break;
3490
282899df 3491 gcc_assert (GET_CODE (set->expr) == SET);
0e3f0221
RS
3492
3493 src = SET_SRC (set->expr);
6b2d1c9e 3494 if (gcse_constant_p (src))
0e3f0221
RS
3495 result = set;
3496
7b1b4aed 3497 if (! REG_P (src))
0e3f0221
RS
3498 break;
3499
3500 regno = REGNO (src);
3501 }
3502 return result;
3503}
3504
3505
e129b3f9
RS
3506/* Subroutine of bypass_block that checks whether a pseudo is killed by
3507 any of the instructions inserted on an edge. Jump bypassing places
3508 condition code setters on CFG edges using insert_insn_on_edge. This
3509 function is required to check that our data flow analysis is still
3510 valid prior to commit_edge_insertions. */
3511
3512static bool
1d088dee 3513reg_killed_on_edge (rtx reg, edge e)
e129b3f9
RS
3514{
3515 rtx insn;
3516
6de9cd9a 3517 for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
e129b3f9
RS
3518 if (INSN_P (insn) && reg_set_p (reg, insn))
3519 return true;
3520
3521 return false;
3522}
3523
0e3f0221
RS
3524/* Subroutine of bypass_conditional_jumps that attempts to bypass the given
3525 basic block BB which has more than one predecessor. If not NULL, SETCC
3526 is the first instruction of BB, which is immediately followed by JUMP_INSN
3527 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
e129b3f9
RS
3528 Returns nonzero if a change was made.
3529
e0bb17a8 3530 During the jump bypassing pass, we may place copies of SETCC instructions
e129b3f9
RS
3531 on CFG edges. The following routine must be careful to pay attention to
3532 these inserted insns when performing its transformations. */
0e3f0221
RS
3533
3534static int
1d088dee 3535bypass_block (basic_block bb, rtx setcc, rtx jump)
0e3f0221
RS
3536{
3537 rtx insn, note;
628f6a4e 3538 edge e, edest;
818b6b7f 3539 int i, change;
72b8d451 3540 int may_be_loop_header;
628f6a4e
BE
3541 unsigned removed_p;
3542 edge_iterator ei;
0e3f0221
RS
3543
3544 insn = (setcc != NULL) ? setcc : jump;
3545
3546 /* Determine set of register uses in INSN. */
3547 reg_use_count = 0;
3548 note_uses (&PATTERN (insn), find_used_regs, NULL);
3549 note = find_reg_equal_equiv_note (insn);
3550 if (note)
3551 find_used_regs (&XEXP (note, 0), NULL);
3552
72b8d451 3553 may_be_loop_header = false;
628f6a4e 3554 FOR_EACH_EDGE (e, ei, bb->preds)
72b8d451
ZD
3555 if (e->flags & EDGE_DFS_BACK)
3556 {
3557 may_be_loop_header = true;
3558 break;
3559 }
3560
0e3f0221 3561 change = 0;
628f6a4e 3562 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
0e3f0221 3563 {
628f6a4e
BE
3564 removed_p = 0;
3565
7821bfc7 3566 if (e->flags & EDGE_COMPLEX)
628f6a4e
BE
3567 {
3568 ei_next (&ei);
3569 continue;
3570 }
7821bfc7
RS
3571
3572 /* We can't redirect edges from new basic blocks. */
3573 if (e->src->index >= bypass_last_basic_block)
628f6a4e
BE
3574 {
3575 ei_next (&ei);
3576 continue;
3577 }
7821bfc7 3578
72b8d451 3579 /* The irreducible loops created by redirecting of edges entering the
e0bb17a8
KH
3580 loop from outside would decrease effectiveness of some of the following
3581 optimizations, so prevent this. */
72b8d451
ZD
3582 if (may_be_loop_header
3583 && !(e->flags & EDGE_DFS_BACK))
628f6a4e
BE
3584 {
3585 ei_next (&ei);
3586 continue;
3587 }
72b8d451 3588
0e3f0221
RS
3589 for (i = 0; i < reg_use_count; i++)
3590 {
3591 struct reg_use *reg_used = &reg_use_table[i];
589005ff 3592 unsigned int regno = REGNO (reg_used->reg_rtx);
818b6b7f 3593 basic_block dest, old_dest;
589005ff
KH
3594 struct expr *set;
3595 rtx src, new;
0e3f0221 3596
589005ff
KH
3597 if (regno >= max_gcse_regno)
3598 continue;
0e3f0221 3599
589005ff 3600 set = find_bypass_set (regno, e->src->index);
0e3f0221
RS
3601
3602 if (! set)
3603 continue;
3604
e129b3f9 3605 /* Check the data flow is valid after edge insertions. */
6de9cd9a 3606 if (e->insns.r && reg_killed_on_edge (reg_used->reg_rtx, e))
e129b3f9
RS
3607 continue;
3608
589005ff 3609 src = SET_SRC (pc_set (jump));
0e3f0221
RS
3610
3611 if (setcc != NULL)
3612 src = simplify_replace_rtx (src,
589005ff
KH
3613 SET_DEST (PATTERN (setcc)),
3614 SET_SRC (PATTERN (setcc)));
0e3f0221
RS
3615
3616 new = simplify_replace_rtx (src, reg_used->reg_rtx,
589005ff 3617 SET_SRC (set->expr));
0e3f0221 3618
1d088dee 3619 /* Jump bypassing may have already placed instructions on
e129b3f9
RS
3620 edges of the CFG. We can't bypass an outgoing edge that
3621 has instructions associated with it, as these insns won't
3622 get executed if the incoming edge is redirected. */
3623
589005ff 3624 if (new == pc_rtx)
e129b3f9
RS
3625 {
3626 edest = FALLTHRU_EDGE (bb);
6de9cd9a 3627 dest = edest->insns.r ? NULL : edest->dest;
e129b3f9 3628 }
0e3f0221 3629 else if (GET_CODE (new) == LABEL_REF)
e129b3f9
RS
3630 {
3631 dest = BLOCK_FOR_INSN (XEXP (new, 0));
3632 /* Don't bypass edges containing instructions. */
c7d1b449
KH
3633 edest = find_edge (bb, dest);
3634 if (edest && edest->insns.r)
3635 dest = NULL;
e129b3f9 3636 }
0e3f0221
RS
3637 else
3638 dest = NULL;
3639
a544524a
JH
3640 /* Avoid unification of the edge with other edges from original
3641 branch. We would end up emitting the instruction on "both"
3642 edges. */
7b1b4aed 3643
c7d1b449
KH
3644 if (dest && setcc && !CC0_P (SET_DEST (PATTERN (setcc)))
3645 && find_edge (e->src, dest))
3646 dest = NULL;
a544524a 3647
818b6b7f 3648 old_dest = e->dest;
7821bfc7
RS
3649 if (dest != NULL
3650 && dest != old_dest
3651 && dest != EXIT_BLOCK_PTR)
3652 {
3653 redirect_edge_and_branch_force (e, dest);
3654
818b6b7f 3655 /* Copy the register setter to the redirected edge.
0e3f0221
RS
3656 Don't copy CC0 setters, as CC0 is dead after jump. */
3657 if (setcc)
3658 {
3659 rtx pat = PATTERN (setcc);
818b6b7f 3660 if (!CC0_P (SET_DEST (pat)))
0e3f0221
RS
3661 insert_insn_on_edge (copy_insn (pat), e);
3662 }
3663
10d22567 3664 if (dump_file != NULL)
0e3f0221 3665 {
10d22567 3666 fprintf (dump_file, "JUMP-BYPASS: Proved reg %d "
27fb79ad 3667 "in jump_insn %d equals constant ",
818b6b7f 3668 regno, INSN_UID (jump));
10d22567
ZD
3669 print_rtl (dump_file, SET_SRC (set->expr));
3670 fprintf (dump_file, "\nBypass edge from %d->%d to %d\n",
818b6b7f 3671 e->src->index, old_dest->index, dest->index);
0e3f0221
RS
3672 }
3673 change = 1;
628f6a4e 3674 removed_p = 1;
0e3f0221
RS
3675 break;
3676 }
3677 }
628f6a4e
BE
3678 if (!removed_p)
3679 ei_next (&ei);
0e3f0221
RS
3680 }
3681 return change;
3682}
3683
3684/* Find basic blocks with more than one predecessor that only contain a
3685 single conditional jump. If the result of the comparison is known at
3686 compile-time from any incoming edge, redirect that edge to the
9a71ece1
RH
3687 appropriate target. Returns nonzero if a change was made.
3688
3689 This function is now mis-named, because we also handle indirect jumps. */
0e3f0221
RS
3690
3691static int
1d088dee 3692bypass_conditional_jumps (void)
0e3f0221
RS
3693{
3694 basic_block bb;
3695 int changed;
3696 rtx setcc;
3697 rtx insn;
3698 rtx dest;
3699
3700 /* Note we start at block 1. */
3701 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3702 return 0;
3703
7821bfc7 3704 bypass_last_basic_block = last_basic_block;
72b8d451 3705 mark_dfs_back_edges ();
7821bfc7 3706
0e3f0221
RS
3707 changed = 0;
3708 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
589005ff 3709 EXIT_BLOCK_PTR, next_bb)
0e3f0221
RS
3710 {
3711 /* Check for more than one predecessor. */
c5cbcccf 3712 if (!single_pred_p (bb))
0e3f0221
RS
3713 {
3714 setcc = NULL_RTX;
eb232f4e 3715 FOR_BB_INSNS (bb, insn)
4b4bf941 3716 if (NONJUMP_INSN_P (insn))
0e3f0221 3717 {
9543a9d2 3718 if (setcc)
0e3f0221 3719 break;
ba4f7968 3720 if (GET_CODE (PATTERN (insn)) != SET)
0e3f0221
RS
3721 break;
3722
ba4f7968 3723 dest = SET_DEST (PATTERN (insn));
818b6b7f 3724 if (REG_P (dest) || CC0_P (dest))
0e3f0221 3725 setcc = insn;
0e3f0221
RS
3726 else
3727 break;
3728 }
7b1b4aed 3729 else if (JUMP_P (insn))
0e3f0221 3730 {
9a71ece1
RH
3731 if ((any_condjump_p (insn) || computed_jump_p (insn))
3732 && onlyjump_p (insn))
0e3f0221
RS
3733 changed |= bypass_block (bb, setcc, insn);
3734 break;
3735 }
3736 else if (INSN_P (insn))
3737 break;
3738 }
3739 }
3740
818b6b7f 3741 /* If we bypassed any register setting insns, we inserted a
fbe5a4a6 3742 copy on the redirected edge. These need to be committed. */
0e3f0221 3743 if (changed)
62e5bf5d 3744 commit_edge_insertions ();
0e3f0221
RS
3745
3746 return changed;
3747}
3748\f
a65f3558 3749/* Compute PRE+LCM working variables. */
7506f491
DE
3750
3751/* Local properties of expressions. */
3752/* Nonzero for expressions that are transparent in the block. */
a65f3558 3753static sbitmap *transp;
7506f491 3754
5c35539b
RH
3755/* Nonzero for expressions that are transparent at the end of the block.
3756 This is only zero for expressions killed by abnormal critical edge
3757 created by a calls. */
a65f3558 3758static sbitmap *transpout;
5c35539b 3759
a65f3558
JL
3760/* Nonzero for expressions that are computed (available) in the block. */
3761static sbitmap *comp;
7506f491 3762
a65f3558
JL
3763/* Nonzero for expressions that are locally anticipatable in the block. */
3764static sbitmap *antloc;
7506f491 3765
a65f3558
JL
3766/* Nonzero for expressions where this block is an optimal computation
3767 point. */
3768static sbitmap *pre_optimal;
5c35539b 3769
a65f3558
JL
3770/* Nonzero for expressions which are redundant in a particular block. */
3771static sbitmap *pre_redundant;
7506f491 3772
a42cd965
AM
3773/* Nonzero for expressions which should be inserted on a specific edge. */
3774static sbitmap *pre_insert_map;
3775
3776/* Nonzero for expressions which should be deleted in a specific block. */
3777static sbitmap *pre_delete_map;
3778
3779/* Contains the edge_list returned by pre_edge_lcm. */
3780static struct edge_list *edge_list;
3781
a65f3558
JL
3782/* Redundant insns. */
3783static sbitmap pre_redundant_insns;
7506f491 3784
a65f3558 3785/* Allocate vars used for PRE analysis. */
7506f491
DE
3786
3787static void
1d088dee 3788alloc_pre_mem (int n_blocks, int n_exprs)
7506f491 3789{
a65f3558
JL
3790 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
3791 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
3792 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5faf03ae 3793
a42cd965
AM
3794 pre_optimal = NULL;
3795 pre_redundant = NULL;
3796 pre_insert_map = NULL;
3797 pre_delete_map = NULL;
a42cd965 3798 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
c4c81601 3799
a42cd965 3800 /* pre_insert and pre_delete are allocated later. */
7506f491
DE
3801}
3802
a65f3558 3803/* Free vars used for PRE analysis. */
7506f491
DE
3804
3805static void
1d088dee 3806free_pre_mem (void)
7506f491 3807{
5a660bff
DB
3808 sbitmap_vector_free (transp);
3809 sbitmap_vector_free (comp);
bd3675fc
JL
3810
3811 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
7506f491 3812
a42cd965 3813 if (pre_optimal)
5a660bff 3814 sbitmap_vector_free (pre_optimal);
a42cd965 3815 if (pre_redundant)
5a660bff 3816 sbitmap_vector_free (pre_redundant);
a42cd965 3817 if (pre_insert_map)
5a660bff 3818 sbitmap_vector_free (pre_insert_map);
a42cd965 3819 if (pre_delete_map)
5a660bff 3820 sbitmap_vector_free (pre_delete_map);
a42cd965 3821
bd3675fc 3822 transp = comp = NULL;
a42cd965 3823 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
7506f491
DE
3824}
3825
3826/* Top level routine to do the dataflow analysis needed by PRE. */
3827
3828static void
1d088dee 3829compute_pre_data (void)
7506f491 3830{
b614171e 3831 sbitmap trapping_expr;
e0082a72 3832 basic_block bb;
b614171e 3833 unsigned int ui;
c66e8ae9 3834
02280659 3835 compute_local_properties (transp, comp, antloc, &expr_hash_table);
d55bc081 3836 sbitmap_vector_zero (ae_kill, last_basic_block);
c66e8ae9 3837
b614171e 3838 /* Collect expressions which might trap. */
02280659 3839 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
b614171e 3840 sbitmap_zero (trapping_expr);
02280659 3841 for (ui = 0; ui < expr_hash_table.size; ui++)
b614171e
MM
3842 {
3843 struct expr *e;
02280659 3844 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
b614171e
MM
3845 if (may_trap_p (e->expr))
3846 SET_BIT (trapping_expr, e->bitmap_index);
3847 }
3848
c66e8ae9
JL
3849 /* Compute ae_kill for each basic block using:
3850
3851 ~(TRANSP | COMP)
e83f4801 3852 */
c66e8ae9 3853
e0082a72 3854 FOR_EACH_BB (bb)
c66e8ae9 3855 {
b614171e 3856 edge e;
628f6a4e 3857 edge_iterator ei;
b614171e
MM
3858
3859 /* If the current block is the destination of an abnormal edge, we
3860 kill all trapping expressions because we won't be able to properly
3861 place the instruction on the edge. So make them neither
3862 anticipatable nor transparent. This is fairly conservative. */
628f6a4e 3863 FOR_EACH_EDGE (e, ei, bb->preds)
b614171e
MM
3864 if (e->flags & EDGE_ABNORMAL)
3865 {
e0082a72
ZD
3866 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
3867 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
b614171e
MM
3868 break;
3869 }
3870
e0082a72
ZD
3871 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
3872 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
c66e8ae9
JL
3873 }
3874
10d22567 3875 edge_list = pre_edge_lcm (expr_hash_table.n_elems, transp, comp, antloc,
a42cd965 3876 ae_kill, &pre_insert_map, &pre_delete_map);
5a660bff 3877 sbitmap_vector_free (antloc);
bd3675fc 3878 antloc = NULL;
5a660bff 3879 sbitmap_vector_free (ae_kill);
589005ff 3880 ae_kill = NULL;
76ac938b 3881 sbitmap_free (trapping_expr);
7506f491
DE
3882}
3883\f
3884/* PRE utilities */
3885
cc2902df 3886/* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
a65f3558 3887 block BB.
7506f491
DE
3888
3889 VISITED is a pointer to a working buffer for tracking which BB's have
3890 been visited. It is NULL for the top-level call.
3891
3892 We treat reaching expressions that go through blocks containing the same
3893 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3894 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3895 2 as not reaching. The intent is to improve the probability of finding
3896 only one reaching expression and to reduce register lifetimes by picking
3897 the closest such expression. */
3898
3899static int
1d088dee 3900pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr, basic_block bb, char *visited)
7506f491 3901{
36349f8b 3902 edge pred;
628f6a4e
BE
3903 edge_iterator ei;
3904
3905 FOR_EACH_EDGE (pred, ei, bb->preds)
7506f491 3906 {
e2d2ed72 3907 basic_block pred_bb = pred->src;
7506f491 3908
36349f8b 3909 if (pred->src == ENTRY_BLOCK_PTR
7506f491 3910 /* Has predecessor has already been visited? */
0b17ab2f 3911 || visited[pred_bb->index])
c4c81601
RK
3912 ;/* Nothing to do. */
3913
7506f491 3914 /* Does this predecessor generate this expression? */
0b17ab2f 3915 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
7506f491
DE
3916 {
3917 /* Is this the occurrence we're looking for?
3918 Note that there's only one generating occurrence per block
3919 so we just need to check the block number. */
a65f3558 3920 if (occr_bb == pred_bb)
7506f491 3921 return 1;
c4c81601 3922
0b17ab2f 3923 visited[pred_bb->index] = 1;
7506f491
DE
3924 }
3925 /* Ignore this predecessor if it kills the expression. */
0b17ab2f
RH
3926 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
3927 visited[pred_bb->index] = 1;
c4c81601 3928
7506f491
DE
3929 /* Neither gen nor kill. */
3930 else
ac7c5af5 3931 {
0b17ab2f 3932 visited[pred_bb->index] = 1;
89e606c9 3933 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
7506f491 3934 return 1;
ac7c5af5 3935 }
7506f491
DE
3936 }
3937
3938 /* All paths have been checked. */
3939 return 0;
3940}
283a2545
RL
3941
3942/* The wrapper for pre_expr_reaches_here_work that ensures that any
dc297297 3943 memory allocated for that function is returned. */
283a2545
RL
3944
3945static int
1d088dee 3946pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
283a2545
RL
3947{
3948 int rval;
5ed6ace5 3949 char *visited = XCNEWVEC (char, last_basic_block);
283a2545 3950
8e42ace1 3951 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
283a2545
RL
3952
3953 free (visited);
c4c81601 3954 return rval;
283a2545 3955}
7506f491 3956\f
a42cd965
AM
3957
3958/* Given an expr, generate RTL which we can insert at the end of a BB,
589005ff 3959 or on an edge. Set the block number of any insns generated to
a42cd965
AM
3960 the value of BB. */
3961
3962static rtx
1d088dee 3963process_insert_insn (struct expr *expr)
a42cd965
AM
3964{
3965 rtx reg = expr->reaching_reg;
fb0c0a12
RK
3966 rtx exp = copy_rtx (expr->expr);
3967 rtx pat;
a42cd965
AM
3968
3969 start_sequence ();
fb0c0a12
RK
3970
3971 /* If the expression is something that's an operand, like a constant,
3972 just copy it to a register. */
3973 if (general_operand (exp, GET_MODE (reg)))
3974 emit_move_insn (reg, exp);
3975
3976 /* Otherwise, make a new insn to compute this expression and make sure the
3977 insn will be recognized (this also adds any needed CLOBBERs). Copy the
3978 expression to make sure we don't have any sharing issues. */
282899df
NS
3979 else
3980 {
3981 rtx insn = emit_insn (gen_rtx_SET (VOIDmode, reg, exp));
3982
2f021b67
AP
3983 if (insn_invalid_p (insn))
3984 gcc_unreachable ();
282899df
NS
3985 }
3986
589005ff 3987
2f937369 3988 pat = get_insns ();
a42cd965
AM
3989 end_sequence ();
3990
3991 return pat;
3992}
589005ff 3993
a65f3558
JL
3994/* Add EXPR to the end of basic block BB.
3995
3996 This is used by both the PRE and code hoisting.
3997
3998 For PRE, we want to verify that the expr is either transparent
3999 or locally anticipatable in the target block. This check makes
4000 no sense for code hoisting. */
7506f491
DE
4001
4002static void
6fb5fa3c 4003insert_insn_end_basic_block (struct expr *expr, basic_block bb, int pre)
7506f491 4004{
a813c111 4005 rtx insn = BB_END (bb);
7506f491
DE
4006 rtx new_insn;
4007 rtx reg = expr->reaching_reg;
4008 int regno = REGNO (reg);
2f937369 4009 rtx pat, pat_end;
7506f491 4010
a42cd965 4011 pat = process_insert_insn (expr);
282899df 4012 gcc_assert (pat && INSN_P (pat));
2f937369
DM
4013
4014 pat_end = pat;
4015 while (NEXT_INSN (pat_end) != NULL_RTX)
4016 pat_end = NEXT_INSN (pat_end);
7506f491
DE
4017
4018 /* If the last insn is a jump, insert EXPR in front [taking care to
4d6922ee 4019 handle cc0, etc. properly]. Similarly we need to care trapping
068473ec 4020 instructions in presence of non-call exceptions. */
7506f491 4021
7b1b4aed 4022 if (JUMP_P (insn)
4b4bf941 4023 || (NONJUMP_INSN_P (insn)
c5cbcccf
ZD
4024 && (!single_succ_p (bb)
4025 || single_succ_edge (bb)->flags & EDGE_ABNORMAL)))
7506f491 4026 {
50b2596f 4027#ifdef HAVE_cc0
7506f491 4028 rtx note;
50b2596f 4029#endif
068473ec
JH
4030 /* It should always be the case that we can put these instructions
4031 anywhere in the basic block with performing PRE optimizations.
4032 Check this. */
282899df
NS
4033 gcc_assert (!NONJUMP_INSN_P (insn) || !pre
4034 || TEST_BIT (antloc[bb->index], expr->bitmap_index)
4035 || TEST_BIT (transp[bb->index], expr->bitmap_index));
7506f491
DE
4036
4037 /* If this is a jump table, then we can't insert stuff here. Since
4038 we know the previous real insn must be the tablejump, we insert
4039 the new instruction just before the tablejump. */
4040 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
4041 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
4042 insn = prev_real_insn (insn);
4043
4044#ifdef HAVE_cc0
4045 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
4046 if cc0 isn't set. */
4047 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
4048 if (note)
4049 insn = XEXP (note, 0);
4050 else
4051 {
4052 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
4053 if (maybe_cc0_setter
2c3c49de 4054 && INSN_P (maybe_cc0_setter)
7506f491
DE
4055 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
4056 insn = maybe_cc0_setter;
4057 }
4058#endif
4059 /* FIXME: What if something in cc0/jump uses value set in new insn? */
6fb5fa3c 4060 new_insn = emit_insn_before_noloc (pat, insn, bb);
3947e2f9 4061 }
c4c81601 4062
3947e2f9
RH
4063 /* Likewise if the last insn is a call, as will happen in the presence
4064 of exception handling. */
7b1b4aed 4065 else if (CALL_P (insn)
c5cbcccf
ZD
4066 && (!single_succ_p (bb)
4067 || single_succ_edge (bb)->flags & EDGE_ABNORMAL))
3947e2f9 4068 {
3947e2f9
RH
4069 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
4070 we search backward and place the instructions before the first
4071 parameter is loaded. Do this for everyone for consistency and a
fbe5a4a6 4072 presumption that we'll get better code elsewhere as well.
3947e2f9 4073
c4c81601 4074 It should always be the case that we can put these instructions
a65f3558
JL
4075 anywhere in the basic block with performing PRE optimizations.
4076 Check this. */
c4c81601 4077
282899df
NS
4078 gcc_assert (!pre
4079 || TEST_BIT (antloc[bb->index], expr->bitmap_index)
4080 || TEST_BIT (transp[bb->index], expr->bitmap_index));
3947e2f9
RH
4081
4082 /* Since different machines initialize their parameter registers
4083 in different orders, assume nothing. Collect the set of all
4084 parameter registers. */
a813c111 4085 insn = find_first_parameter_load (insn, BB_HEAD (bb));
3947e2f9 4086
b1d26727
JL
4087 /* If we found all the parameter loads, then we want to insert
4088 before the first parameter load.
4089
4090 If we did not find all the parameter loads, then we might have
4091 stopped on the head of the block, which could be a CODE_LABEL.
4092 If we inserted before the CODE_LABEL, then we would be putting
4093 the insn in the wrong basic block. In that case, put the insn
b5229628 4094 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
7b1b4aed 4095 while (LABEL_P (insn)
589ca5cb 4096 || NOTE_INSN_BASIC_BLOCK_P (insn))
b5229628 4097 insn = NEXT_INSN (insn);
c4c81601 4098
6fb5fa3c 4099 new_insn = emit_insn_before_noloc (pat, insn, bb);
7506f491
DE
4100 }
4101 else
6fb5fa3c 4102 new_insn = emit_insn_after_noloc (pat, insn, bb);
7506f491 4103
2f937369 4104 while (1)
a65f3558 4105 {
2f937369 4106 if (INSN_P (pat))
a65f3558 4107 {
2f937369
DM
4108 add_label_notes (PATTERN (pat), new_insn);
4109 note_stores (PATTERN (pat), record_set_info, pat);
a65f3558 4110 }
2f937369
DM
4111 if (pat == pat_end)
4112 break;
4113 pat = NEXT_INSN (pat);
a65f3558 4114 }
3947e2f9 4115
7506f491
DE
4116 gcse_create_count++;
4117
10d22567 4118 if (dump_file)
7506f491 4119 {
10d22567 4120 fprintf (dump_file, "PRE/HOIST: end of bb %d, insn %d, ",
0b17ab2f 4121 bb->index, INSN_UID (new_insn));
10d22567 4122 fprintf (dump_file, "copying expression %d to reg %d\n",
c4c81601 4123 expr->bitmap_index, regno);
7506f491
DE
4124 }
4125}
4126
a42cd965
AM
4127/* Insert partially redundant expressions on edges in the CFG to make
4128 the expressions fully redundant. */
7506f491 4129
a42cd965 4130static int
1d088dee 4131pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
7506f491 4132{
c4c81601 4133 int e, i, j, num_edges, set_size, did_insert = 0;
a65f3558
JL
4134 sbitmap *inserted;
4135
a42cd965
AM
4136 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
4137 if it reaches any of the deleted expressions. */
7506f491 4138
a42cd965
AM
4139 set_size = pre_insert_map[0]->size;
4140 num_edges = NUM_EDGES (edge_list);
02280659 4141 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
a42cd965 4142 sbitmap_vector_zero (inserted, num_edges);
7506f491 4143
a42cd965 4144 for (e = 0; e < num_edges; e++)
7506f491
DE
4145 {
4146 int indx;
e2d2ed72 4147 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
a65f3558 4148
a65f3558 4149 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
7506f491 4150 {
a42cd965 4151 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
7506f491 4152
02280659 4153 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
c4c81601
RK
4154 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
4155 {
4156 struct expr *expr = index_map[j];
4157 struct occr *occr;
a65f3558 4158
ff7cc307 4159 /* Now look at each deleted occurrence of this expression. */
c4c81601
RK
4160 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4161 {
4162 if (! occr->deleted_p)
4163 continue;
4164
3f117656 4165 /* Insert this expression on this edge if it would
ff7cc307 4166 reach the deleted occurrence in BB. */
c4c81601
RK
4167 if (!TEST_BIT (inserted[e], j))
4168 {
4169 rtx insn;
4170 edge eg = INDEX_EDGE (edge_list, e);
4171
4172 /* We can't insert anything on an abnormal and
4173 critical edge, so we insert the insn at the end of
4174 the previous block. There are several alternatives
4175 detailed in Morgans book P277 (sec 10.5) for
4176 handling this situation. This one is easiest for
4177 now. */
4178
b16aa8a5 4179 if (eg->flags & EDGE_ABNORMAL)
6fb5fa3c 4180 insert_insn_end_basic_block (index_map[j], bb, 0);
c4c81601
RK
4181 else
4182 {
4183 insn = process_insert_insn (index_map[j]);
4184 insert_insn_on_edge (insn, eg);
4185 }
4186
10d22567 4187 if (dump_file)
c4c81601 4188 {
10d22567 4189 fprintf (dump_file, "PRE/HOIST: edge (%d,%d), ",
0b17ab2f
RH
4190 bb->index,
4191 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
10d22567 4192 fprintf (dump_file, "copy expression %d\n",
c4c81601
RK
4193 expr->bitmap_index);
4194 }
4195
a13d4ebf 4196 update_ld_motion_stores (expr);
c4c81601
RK
4197 SET_BIT (inserted[e], j);
4198 did_insert = 1;
4199 gcse_create_count++;
4200 }
4201 }
4202 }
7506f491
DE
4203 }
4204 }
5faf03ae 4205
5a660bff 4206 sbitmap_vector_free (inserted);
a42cd965 4207 return did_insert;
7506f491
DE
4208}
4209
073089a7 4210/* Copy the result of EXPR->EXPR generated by INSN to EXPR->REACHING_REG.
b885908b
MH
4211 Given "old_reg <- expr" (INSN), instead of adding after it
4212 reaching_reg <- old_reg
4213 it's better to do the following:
4214 reaching_reg <- expr
4215 old_reg <- reaching_reg
4216 because this way copy propagation can discover additional PRE
f5f2e3cd
MH
4217 opportunities. But if this fails, we try the old way.
4218 When "expr" is a store, i.e.
4219 given "MEM <- old_reg", instead of adding after it
4220 reaching_reg <- old_reg
4221 it's better to add it before as follows:
4222 reaching_reg <- old_reg
4223 MEM <- reaching_reg. */
7506f491
DE
4224
4225static void
1d088dee 4226pre_insert_copy_insn (struct expr *expr, rtx insn)
7506f491
DE
4227{
4228 rtx reg = expr->reaching_reg;
4229 int regno = REGNO (reg);
4230 int indx = expr->bitmap_index;
073089a7 4231 rtx pat = PATTERN (insn);
64068ca2 4232 rtx set, first_set, new_insn;
b885908b 4233 rtx old_reg;
073089a7 4234 int i;
7506f491 4235
073089a7 4236 /* This block matches the logic in hash_scan_insn. */
282899df 4237 switch (GET_CODE (pat))
073089a7 4238 {
282899df
NS
4239 case SET:
4240 set = pat;
4241 break;
4242
4243 case PARALLEL:
073089a7
RS
4244 /* Search through the parallel looking for the set whose
4245 source was the expression that we're interested in. */
64068ca2 4246 first_set = NULL_RTX;
073089a7
RS
4247 set = NULL_RTX;
4248 for (i = 0; i < XVECLEN (pat, 0); i++)
4249 {
4250 rtx x = XVECEXP (pat, 0, i);
64068ca2 4251 if (GET_CODE (x) == SET)
073089a7 4252 {
64068ca2
RS
4253 /* If the source was a REG_EQUAL or REG_EQUIV note, we
4254 may not find an equivalent expression, but in this
4255 case the PARALLEL will have a single set. */
4256 if (first_set == NULL_RTX)
4257 first_set = x;
4258 if (expr_equiv_p (SET_SRC (x), expr->expr))
4259 {
4260 set = x;
4261 break;
4262 }
073089a7
RS
4263 }
4264 }
64068ca2
RS
4265
4266 gcc_assert (first_set);
4267 if (set == NULL_RTX)
4268 set = first_set;
282899df
NS
4269 break;
4270
4271 default:
4272 gcc_unreachable ();
073089a7 4273 }
c4c81601 4274
7b1b4aed 4275 if (REG_P (SET_DEST (set)))
073089a7 4276 {
f5f2e3cd
MH
4277 old_reg = SET_DEST (set);
4278 /* Check if we can modify the set destination in the original insn. */
4279 if (validate_change (insn, &SET_DEST (set), reg, 0))
4280 {
4281 new_insn = gen_move_insn (old_reg, reg);
4282 new_insn = emit_insn_after (new_insn, insn);
4283
4284 /* Keep register set table up to date. */
f5f2e3cd
MH
4285 record_one_set (regno, insn);
4286 }
4287 else
4288 {
4289 new_insn = gen_move_insn (reg, old_reg);
4290 new_insn = emit_insn_after (new_insn, insn);
073089a7 4291
f5f2e3cd
MH
4292 /* Keep register set table up to date. */
4293 record_one_set (regno, new_insn);
4294 }
073089a7 4295 }
f5f2e3cd 4296 else /* This is possible only in case of a store to memory. */
073089a7 4297 {
f5f2e3cd 4298 old_reg = SET_SRC (set);
073089a7 4299 new_insn = gen_move_insn (reg, old_reg);
f5f2e3cd
MH
4300
4301 /* Check if we can modify the set source in the original insn. */
4302 if (validate_change (insn, &SET_SRC (set), reg, 0))
4303 new_insn = emit_insn_before (new_insn, insn);
4304 else
4305 new_insn = emit_insn_after (new_insn, insn);
c4c81601 4306
073089a7
RS
4307 /* Keep register set table up to date. */
4308 record_one_set (regno, new_insn);
4309 }
7506f491
DE
4310
4311 gcse_create_count++;
4312
10d22567
ZD
4313 if (dump_file)
4314 fprintf (dump_file,
a42cd965
AM
4315 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
4316 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
4317 INSN_UID (insn), regno);
7506f491
DE
4318}
4319
4320/* Copy available expressions that reach the redundant expression
4321 to `reaching_reg'. */
4322
4323static void
1d088dee 4324pre_insert_copies (void)
7506f491 4325{
f5f2e3cd 4326 unsigned int i, added_copy;
c4c81601
RK
4327 struct expr *expr;
4328 struct occr *occr;
4329 struct occr *avail;
a65f3558 4330
7506f491
DE
4331 /* For each available expression in the table, copy the result to
4332 `reaching_reg' if the expression reaches a deleted one.
4333
4334 ??? The current algorithm is rather brute force.
4335 Need to do some profiling. */
4336
02280659
ZD
4337 for (i = 0; i < expr_hash_table.size; i++)
4338 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601
RK
4339 {
4340 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
4341 we don't want to insert a copy here because the expression may not
4342 really be redundant. So only insert an insn if the expression was
4343 deleted. This test also avoids further processing if the
4344 expression wasn't deleted anywhere. */
4345 if (expr->reaching_reg == NULL)
4346 continue;
7b1b4aed 4347
f5f2e3cd 4348 /* Set when we add a copy for that expression. */
7b1b4aed 4349 added_copy = 0;
c4c81601
RK
4350
4351 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4352 {
4353 if (! occr->deleted_p)
4354 continue;
7506f491 4355
c4c81601
RK
4356 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
4357 {
4358 rtx insn = avail->insn;
7506f491 4359
c4c81601
RK
4360 /* No need to handle this one if handled already. */
4361 if (avail->copied_p)
4362 continue;
7506f491 4363
c4c81601
RK
4364 /* Don't handle this one if it's a redundant one. */
4365 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
4366 continue;
7506f491 4367
c4c81601 4368 /* Or if the expression doesn't reach the deleted one. */
589005ff 4369 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
e2d2ed72
AM
4370 expr,
4371 BLOCK_FOR_INSN (occr->insn)))
c4c81601 4372 continue;
7506f491 4373
f5f2e3cd
MH
4374 added_copy = 1;
4375
c4c81601
RK
4376 /* Copy the result of avail to reaching_reg. */
4377 pre_insert_copy_insn (expr, insn);
4378 avail->copied_p = 1;
4379 }
4380 }
f5f2e3cd 4381
7b1b4aed 4382 if (added_copy)
f5f2e3cd 4383 update_ld_motion_stores (expr);
c4c81601 4384 }
7506f491
DE
4385}
4386
10d1bb36
JH
4387/* Emit move from SRC to DEST noting the equivalence with expression computed
4388 in INSN. */
4389static rtx
1d088dee 4390gcse_emit_move_after (rtx src, rtx dest, rtx insn)
10d1bb36
JH
4391{
4392 rtx new;
6bdb8dd6 4393 rtx set = single_set (insn), set2;
10d1bb36
JH
4394 rtx note;
4395 rtx eqv;
4396
4397 /* This should never fail since we're creating a reg->reg copy
4398 we've verified to be valid. */
4399
6bdb8dd6 4400 new = emit_insn_after (gen_move_insn (dest, src), insn);
285464d0 4401
10d1bb36 4402 /* Note the equivalence for local CSE pass. */
6bdb8dd6
JH
4403 set2 = single_set (new);
4404 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
4405 return new;
10d1bb36
JH
4406 if ((note = find_reg_equal_equiv_note (insn)))
4407 eqv = XEXP (note, 0);
4408 else
4409 eqv = SET_SRC (set);
4410
a500466b 4411 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
10d1bb36
JH
4412
4413 return new;
4414}
4415
7506f491 4416/* Delete redundant computations.
7506f491
DE
4417 Deletion is done by changing the insn to copy the `reaching_reg' of
4418 the expression into the result of the SET. It is left to later passes
4419 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
4420
cc2902df 4421 Returns nonzero if a change is made. */
7506f491
DE
4422
4423static int
1d088dee 4424pre_delete (void)
7506f491 4425{
2e653e39 4426 unsigned int i;
63bc1d05 4427 int changed;
c4c81601
RK
4428 struct expr *expr;
4429 struct occr *occr;
a65f3558 4430
7506f491 4431 changed = 0;
02280659 4432 for (i = 0; i < expr_hash_table.size; i++)
073089a7
RS
4433 for (expr = expr_hash_table.table[i];
4434 expr != NULL;
4435 expr = expr->next_same_hash)
c4c81601
RK
4436 {
4437 int indx = expr->bitmap_index;
7506f491 4438
c4c81601
RK
4439 /* We only need to search antic_occr since we require
4440 ANTLOC != 0. */
7506f491 4441
c4c81601
RK
4442 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4443 {
4444 rtx insn = occr->insn;
4445 rtx set;
e2d2ed72 4446 basic_block bb = BLOCK_FOR_INSN (insn);
7506f491 4447
073089a7
RS
4448 /* We only delete insns that have a single_set. */
4449 if (TEST_BIT (pre_delete_map[bb->index], indx)
6fb5fa3c
DB
4450 && (set = single_set (insn)) != 0
4451 && dbg_cnt (pre_insn))
c4c81601 4452 {
c4c81601
RK
4453 /* Create a pseudo-reg to store the result of reaching
4454 expressions into. Get the mode for the new pseudo from
4455 the mode of the original destination pseudo. */
4456 if (expr->reaching_reg == NULL)
4457 expr->reaching_reg
4458 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
4459
9b76aa3b 4460 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
10d1bb36
JH
4461 delete_insn (insn);
4462 occr->deleted_p = 1;
4463 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
4464 changed = 1;
4465 gcse_subst_count++;
7506f491 4466
10d22567 4467 if (dump_file)
c4c81601 4468 {
10d22567 4469 fprintf (dump_file,
c4c81601
RK
4470 "PRE: redundant insn %d (expression %d) in ",
4471 INSN_UID (insn), indx);
10d22567 4472 fprintf (dump_file, "bb %d, reaching reg is %d\n",
0b17ab2f 4473 bb->index, REGNO (expr->reaching_reg));
c4c81601
RK
4474 }
4475 }
4476 }
4477 }
7506f491
DE
4478
4479 return changed;
4480}
4481
4482/* Perform GCSE optimizations using PRE.
4483 This is called by one_pre_gcse_pass after all the dataflow analysis
4484 has been done.
4485
c4c81601
RK
4486 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
4487 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
4488 Compiler Design and Implementation.
7506f491 4489
c4c81601
RK
4490 ??? A new pseudo reg is created to hold the reaching expression. The nice
4491 thing about the classical approach is that it would try to use an existing
4492 reg. If the register can't be adequately optimized [i.e. we introduce
4493 reload problems], one could add a pass here to propagate the new register
4494 through the block.
7506f491 4495
c4c81601
RK
4496 ??? We don't handle single sets in PARALLELs because we're [currently] not
4497 able to copy the rest of the parallel when we insert copies to create full
4498 redundancies from partial redundancies. However, there's no reason why we
4499 can't handle PARALLELs in the cases where there are no partial
7506f491
DE
4500 redundancies. */
4501
4502static int
1d088dee 4503pre_gcse (void)
7506f491 4504{
2e653e39
RK
4505 unsigned int i;
4506 int did_insert, changed;
7506f491 4507 struct expr **index_map;
c4c81601 4508 struct expr *expr;
7506f491
DE
4509
4510 /* Compute a mapping from expression number (`bitmap_index') to
4511 hash table entry. */
4512
5ed6ace5 4513 index_map = XCNEWVEC (struct expr *, expr_hash_table.n_elems);
02280659
ZD
4514 for (i = 0; i < expr_hash_table.size; i++)
4515 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601 4516 index_map[expr->bitmap_index] = expr;
7506f491
DE
4517
4518 /* Reset bitmap used to track which insns are redundant. */
a65f3558
JL
4519 pre_redundant_insns = sbitmap_alloc (max_cuid);
4520 sbitmap_zero (pre_redundant_insns);
7506f491
DE
4521
4522 /* Delete the redundant insns first so that
4523 - we know what register to use for the new insns and for the other
4524 ones with reaching expressions
4525 - we know which insns are redundant when we go to create copies */
c4c81601 4526
7506f491 4527 changed = pre_delete ();
a42cd965 4528 did_insert = pre_edge_insert (edge_list, index_map);
c4c81601 4529
7506f491 4530 /* In other places with reaching expressions, copy the expression to the
a42cd965 4531 specially allocated pseudo-reg that reaches the redundant expr. */
7506f491 4532 pre_insert_copies ();
a42cd965
AM
4533 if (did_insert)
4534 {
4535 commit_edge_insertions ();
4536 changed = 1;
4537 }
7506f491 4538
283a2545 4539 free (index_map);
76ac938b 4540 sbitmap_free (pre_redundant_insns);
7506f491
DE
4541 return changed;
4542}
4543
4544/* Top level routine to perform one PRE GCSE pass.
4545
cc2902df 4546 Return nonzero if a change was made. */
7506f491
DE
4547
4548static int
1d088dee 4549one_pre_gcse_pass (int pass)
7506f491
DE
4550{
4551 int changed = 0;
4552
4553 gcse_subst_count = 0;
4554 gcse_create_count = 0;
4555
02280659 4556 alloc_hash_table (max_cuid, &expr_hash_table, 0);
a42cd965 4557 add_noreturn_fake_exit_edges ();
a13d4ebf
AM
4558 if (flag_gcse_lm)
4559 compute_ld_motion_mems ();
4560
02280659 4561 compute_hash_table (&expr_hash_table);
a13d4ebf 4562 trim_ld_motion_mems ();
10d22567
ZD
4563 if (dump_file)
4564 dump_hash_table (dump_file, "Expression", &expr_hash_table);
c4c81601 4565
02280659 4566 if (expr_hash_table.n_elems > 0)
7506f491 4567 {
02280659 4568 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
7506f491
DE
4569 compute_pre_data ();
4570 changed |= pre_gcse ();
a42cd965 4571 free_edge_list (edge_list);
7506f491
DE
4572 free_pre_mem ();
4573 }
c4c81601 4574
a13d4ebf 4575 free_ldst_mems ();
6809cbf9 4576 remove_fake_exit_edges ();
02280659 4577 free_hash_table (&expr_hash_table);
7506f491 4578
10d22567 4579 if (dump_file)
7506f491 4580 {
10d22567 4581 fprintf (dump_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
faed5cc3 4582 current_function_name (), pass, bytes_used);
10d22567 4583 fprintf (dump_file, "%d substs, %d insns created\n",
c4c81601 4584 gcse_subst_count, gcse_create_count);
7506f491
DE
4585 }
4586
4587 return changed;
4588}
aeb2f500
JW
4589\f
4590/* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5b1ef594
JDA
4591 If notes are added to an insn which references a CODE_LABEL, the
4592 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
4593 because the following loop optimization pass requires them. */
aeb2f500 4594
aeb2f500
JW
4595/* ??? If there was a jump optimization pass after gcse and before loop,
4596 then we would not need to do this here, because jump would add the
4597 necessary REG_LABEL notes. */
4598
4599static void
1d088dee 4600add_label_notes (rtx x, rtx insn)
aeb2f500
JW
4601{
4602 enum rtx_code code = GET_CODE (x);
4603 int i, j;
6f7d635c 4604 const char *fmt;
aeb2f500
JW
4605
4606 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
4607 {
6b3603c2 4608 /* This code used to ignore labels that referred to dispatch tables to
e0bb17a8 4609 avoid flow generating (slightly) worse code.
6b3603c2 4610
ac7c5af5
JL
4611 We no longer ignore such label references (see LABEL_REF handling in
4612 mark_jump_label for additional information). */
c4c81601 4613
6b8c9327 4614 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
6b3603c2 4615 REG_NOTES (insn));
5b1ef594 4616 if (LABEL_P (XEXP (x, 0)))
589005ff 4617 LABEL_NUSES (XEXP (x, 0))++;
aeb2f500
JW
4618 return;
4619 }
4620
c4c81601 4621 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
aeb2f500
JW
4622 {
4623 if (fmt[i] == 'e')
4624 add_label_notes (XEXP (x, i), insn);
4625 else if (fmt[i] == 'E')
4626 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4627 add_label_notes (XVECEXP (x, i, j), insn);
4628 }
4629}
a65f3558
JL
4630
4631/* Compute transparent outgoing information for each block.
4632
4633 An expression is transparent to an edge unless it is killed by
4634 the edge itself. This can only happen with abnormal control flow,
4635 when the edge is traversed through a call. This happens with
4636 non-local labels and exceptions.
4637
4638 This would not be necessary if we split the edge. While this is
4639 normally impossible for abnormal critical edges, with some effort
4640 it should be possible with exception handling, since we still have
4641 control over which handler should be invoked. But due to increased
4642 EH table sizes, this may not be worthwhile. */
4643
4644static void
1d088dee 4645compute_transpout (void)
a65f3558 4646{
e0082a72 4647 basic_block bb;
2e653e39 4648 unsigned int i;
c4c81601 4649 struct expr *expr;
a65f3558 4650
d55bc081 4651 sbitmap_vector_ones (transpout, last_basic_block);
a65f3558 4652
e0082a72 4653 FOR_EACH_BB (bb)
a65f3558 4654 {
a65f3558
JL
4655 /* Note that flow inserted a nop a the end of basic blocks that
4656 end in call instructions for reasons other than abnormal
4657 control flow. */
7b1b4aed 4658 if (! CALL_P (BB_END (bb)))
a65f3558
JL
4659 continue;
4660
02280659
ZD
4661 for (i = 0; i < expr_hash_table.size; i++)
4662 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
7b1b4aed 4663 if (MEM_P (expr->expr))
c4c81601
RK
4664 {
4665 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
4666 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
4667 continue;
589005ff 4668
c4c81601
RK
4669 /* ??? Optimally, we would use interprocedural alias
4670 analysis to determine if this mem is actually killed
4671 by this call. */
e0082a72 4672 RESET_BIT (transpout[bb->index], expr->bitmap_index);
c4c81601 4673 }
a65f3558
JL
4674 }
4675}
dfdb644f 4676
bb457bd9
JL
4677/* Code Hoisting variables and subroutines. */
4678
4679/* Very busy expressions. */
4680static sbitmap *hoist_vbein;
4681static sbitmap *hoist_vbeout;
4682
4683/* Hoistable expressions. */
4684static sbitmap *hoist_exprs;
4685
bb457bd9 4686/* ??? We could compute post dominators and run this algorithm in
68e82b83 4687 reverse to perform tail merging, doing so would probably be
bb457bd9
JL
4688 more effective than the tail merging code in jump.c.
4689
4690 It's unclear if tail merging could be run in parallel with
4691 code hoisting. It would be nice. */
4692
4693/* Allocate vars used for code hoisting analysis. */
4694
4695static void
1d088dee 4696alloc_code_hoist_mem (int n_blocks, int n_exprs)
bb457bd9
JL
4697{
4698 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4699 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4700 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4701
4702 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
4703 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
4704 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
4705 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
bb457bd9
JL
4706}
4707
4708/* Free vars used for code hoisting analysis. */
4709
4710static void
1d088dee 4711free_code_hoist_mem (void)
bb457bd9 4712{
5a660bff
DB
4713 sbitmap_vector_free (antloc);
4714 sbitmap_vector_free (transp);
4715 sbitmap_vector_free (comp);
bb457bd9 4716
5a660bff
DB
4717 sbitmap_vector_free (hoist_vbein);
4718 sbitmap_vector_free (hoist_vbeout);
4719 sbitmap_vector_free (hoist_exprs);
4720 sbitmap_vector_free (transpout);
bb457bd9 4721
d47cc544 4722 free_dominance_info (CDI_DOMINATORS);
bb457bd9
JL
4723}
4724
4725/* Compute the very busy expressions at entry/exit from each block.
4726
4727 An expression is very busy if all paths from a given point
4728 compute the expression. */
4729
4730static void
1d088dee 4731compute_code_hoist_vbeinout (void)
bb457bd9 4732{
e0082a72
ZD
4733 int changed, passes;
4734 basic_block bb;
bb457bd9 4735
d55bc081
ZD
4736 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
4737 sbitmap_vector_zero (hoist_vbein, last_basic_block);
bb457bd9
JL
4738
4739 passes = 0;
4740 changed = 1;
c4c81601 4741
bb457bd9
JL
4742 while (changed)
4743 {
4744 changed = 0;
c4c81601 4745
bb457bd9
JL
4746 /* We scan the blocks in the reverse order to speed up
4747 the convergence. */
e0082a72 4748 FOR_EACH_BB_REVERSE (bb)
bb457bd9 4749 {
e0082a72
ZD
4750 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
4751 hoist_vbeout[bb->index], transp[bb->index]);
4752 if (bb->next_bb != EXIT_BLOCK_PTR)
4753 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
bb457bd9 4754 }
c4c81601 4755
bb457bd9
JL
4756 passes++;
4757 }
4758
10d22567
ZD
4759 if (dump_file)
4760 fprintf (dump_file, "hoisting vbeinout computation: %d passes\n", passes);
bb457bd9
JL
4761}
4762
4763/* Top level routine to do the dataflow analysis needed by code hoisting. */
4764
4765static void
1d088dee 4766compute_code_hoist_data (void)
bb457bd9 4767{
02280659 4768 compute_local_properties (transp, comp, antloc, &expr_hash_table);
bb457bd9
JL
4769 compute_transpout ();
4770 compute_code_hoist_vbeinout ();
d47cc544 4771 calculate_dominance_info (CDI_DOMINATORS);
10d22567
ZD
4772 if (dump_file)
4773 fprintf (dump_file, "\n");
bb457bd9
JL
4774}
4775
4776/* Determine if the expression identified by EXPR_INDEX would
4777 reach BB unimpared if it was placed at the end of EXPR_BB.
4778
4779 It's unclear exactly what Muchnick meant by "unimpared". It seems
4780 to me that the expression must either be computed or transparent in
4781 *every* block in the path(s) from EXPR_BB to BB. Any other definition
4782 would allow the expression to be hoisted out of loops, even if
4783 the expression wasn't a loop invariant.
4784
4785 Contrast this to reachability for PRE where an expression is
4786 considered reachable if *any* path reaches instead of *all*
4787 paths. */
4788
4789static int
1d088dee 4790hoist_expr_reaches_here_p (basic_block expr_bb, int expr_index, basic_block bb, char *visited)
bb457bd9
JL
4791{
4792 edge pred;
628f6a4e 4793 edge_iterator ei;
283a2545 4794 int visited_allocated_locally = 0;
589005ff 4795
bb457bd9
JL
4796
4797 if (visited == NULL)
4798 {
8e42ace1 4799 visited_allocated_locally = 1;
5ed6ace5 4800 visited = XCNEWVEC (char, last_basic_block);
bb457bd9
JL
4801 }
4802
628f6a4e 4803 FOR_EACH_EDGE (pred, ei, bb->preds)
bb457bd9 4804 {
e2d2ed72 4805 basic_block pred_bb = pred->src;
bb457bd9
JL
4806
4807 if (pred->src == ENTRY_BLOCK_PTR)
4808 break;
f305679f
JH
4809 else if (pred_bb == expr_bb)
4810 continue;
0b17ab2f 4811 else if (visited[pred_bb->index])
bb457bd9 4812 continue;
c4c81601 4813
bb457bd9 4814 /* Does this predecessor generate this expression? */
0b17ab2f 4815 else if (TEST_BIT (comp[pred_bb->index], expr_index))
bb457bd9 4816 break;
0b17ab2f 4817 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
bb457bd9 4818 break;
c4c81601 4819
bb457bd9
JL
4820 /* Not killed. */
4821 else
4822 {
0b17ab2f 4823 visited[pred_bb->index] = 1;
bb457bd9
JL
4824 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
4825 pred_bb, visited))
4826 break;
4827 }
4828 }
589005ff 4829 if (visited_allocated_locally)
283a2545 4830 free (visited);
c4c81601 4831
bb457bd9
JL
4832 return (pred == NULL);
4833}
4834\f
4835/* Actually perform code hoisting. */
c4c81601 4836
bb457bd9 4837static void
1d088dee 4838hoist_code (void)
bb457bd9 4839{
e0082a72 4840 basic_block bb, dominated;
66f97d31 4841 VEC (basic_block, heap) *domby;
c635a1ec 4842 unsigned int i,j;
bb457bd9 4843 struct expr **index_map;
c4c81601 4844 struct expr *expr;
bb457bd9 4845
d55bc081 4846 sbitmap_vector_zero (hoist_exprs, last_basic_block);
bb457bd9
JL
4847
4848 /* Compute a mapping from expression number (`bitmap_index') to
4849 hash table entry. */
4850
5ed6ace5 4851 index_map = XCNEWVEC (struct expr *, expr_hash_table.n_elems);
02280659
ZD
4852 for (i = 0; i < expr_hash_table.size; i++)
4853 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601 4854 index_map[expr->bitmap_index] = expr;
bb457bd9
JL
4855
4856 /* Walk over each basic block looking for potentially hoistable
4857 expressions, nothing gets hoisted from the entry block. */
e0082a72 4858 FOR_EACH_BB (bb)
bb457bd9
JL
4859 {
4860 int found = 0;
4861 int insn_inserted_p;
4862
66f97d31 4863 domby = get_dominated_by (CDI_DOMINATORS, bb);
bb457bd9
JL
4864 /* Examine each expression that is very busy at the exit of this
4865 block. These are the potentially hoistable expressions. */
e0082a72 4866 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
bb457bd9
JL
4867 {
4868 int hoistable = 0;
c4c81601 4869
c635a1ec
DB
4870 if (TEST_BIT (hoist_vbeout[bb->index], i)
4871 && TEST_BIT (transpout[bb->index], i))
bb457bd9
JL
4872 {
4873 /* We've found a potentially hoistable expression, now
4874 we look at every block BB dominates to see if it
4875 computes the expression. */
66f97d31 4876 for (j = 0; VEC_iterate (basic_block, domby, j, dominated); j++)
bb457bd9
JL
4877 {
4878 /* Ignore self dominance. */
c635a1ec 4879 if (bb == dominated)
bb457bd9 4880 continue;
bb457bd9
JL
4881 /* We've found a dominated block, now see if it computes
4882 the busy expression and whether or not moving that
4883 expression to the "beginning" of that block is safe. */
e0082a72 4884 if (!TEST_BIT (antloc[dominated->index], i))
bb457bd9
JL
4885 continue;
4886
4887 /* Note if the expression would reach the dominated block
589005ff 4888 unimpared if it was placed at the end of BB.
bb457bd9
JL
4889
4890 Keep track of how many times this expression is hoistable
4891 from a dominated block into BB. */
e0082a72 4892 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
bb457bd9
JL
4893 hoistable++;
4894 }
4895
ff7cc307 4896 /* If we found more than one hoistable occurrence of this
bb457bd9
JL
4897 expression, then note it in the bitmap of expressions to
4898 hoist. It makes no sense to hoist things which are computed
4899 in only one BB, and doing so tends to pessimize register
4900 allocation. One could increase this value to try harder
4901 to avoid any possible code expansion due to register
4902 allocation issues; however experiments have shown that
4903 the vast majority of hoistable expressions are only movable
e0bb17a8 4904 from two successors, so raising this threshold is likely
bb457bd9
JL
4905 to nullify any benefit we get from code hoisting. */
4906 if (hoistable > 1)
4907 {
e0082a72 4908 SET_BIT (hoist_exprs[bb->index], i);
bb457bd9
JL
4909 found = 1;
4910 }
4911 }
4912 }
bb457bd9
JL
4913 /* If we found nothing to hoist, then quit now. */
4914 if (! found)
c635a1ec 4915 {
66f97d31
ZD
4916 VEC_free (basic_block, heap, domby);
4917 continue;
c635a1ec 4918 }
bb457bd9
JL
4919
4920 /* Loop over all the hoistable expressions. */
e0082a72 4921 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
bb457bd9
JL
4922 {
4923 /* We want to insert the expression into BB only once, so
4924 note when we've inserted it. */
4925 insn_inserted_p = 0;
4926
4927 /* These tests should be the same as the tests above. */
cb83c2ec 4928 if (TEST_BIT (hoist_exprs[bb->index], i))
bb457bd9
JL
4929 {
4930 /* We've found a potentially hoistable expression, now
4931 we look at every block BB dominates to see if it
4932 computes the expression. */
66f97d31 4933 for (j = 0; VEC_iterate (basic_block, domby, j, dominated); j++)
bb457bd9
JL
4934 {
4935 /* Ignore self dominance. */
c635a1ec 4936 if (bb == dominated)
bb457bd9
JL
4937 continue;
4938
4939 /* We've found a dominated block, now see if it computes
4940 the busy expression and whether or not moving that
4941 expression to the "beginning" of that block is safe. */
e0082a72 4942 if (!TEST_BIT (antloc[dominated->index], i))
bb457bd9
JL
4943 continue;
4944
4945 /* The expression is computed in the dominated block and
4946 it would be safe to compute it at the start of the
4947 dominated block. Now we have to determine if the
ff7cc307 4948 expression would reach the dominated block if it was
bb457bd9 4949 placed at the end of BB. */
e0082a72 4950 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
bb457bd9
JL
4951 {
4952 struct expr *expr = index_map[i];
4953 struct occr *occr = expr->antic_occr;
4954 rtx insn;
4955 rtx set;
4956
ff7cc307 4957 /* Find the right occurrence of this expression. */
e0082a72 4958 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
bb457bd9
JL
4959 occr = occr->next;
4960
282899df 4961 gcc_assert (occr);
bb457bd9 4962 insn = occr->insn;
bb457bd9 4963 set = single_set (insn);
282899df 4964 gcc_assert (set);
bb457bd9
JL
4965
4966 /* Create a pseudo-reg to store the result of reaching
4967 expressions into. Get the mode for the new pseudo
4968 from the mode of the original destination pseudo. */
4969 if (expr->reaching_reg == NULL)
4970 expr->reaching_reg
4971 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
4972
10d1bb36
JH
4973 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
4974 delete_insn (insn);
4975 occr->deleted_p = 1;
4976 if (!insn_inserted_p)
bb457bd9 4977 {
6fb5fa3c 4978 insert_insn_end_basic_block (index_map[i], bb, 0);
10d1bb36 4979 insn_inserted_p = 1;
bb457bd9
JL
4980 }
4981 }
4982 }
4983 }
4984 }
66f97d31 4985 VEC_free (basic_block, heap, domby);
bb457bd9 4986 }
c4c81601 4987
8e42ace1 4988 free (index_map);
bb457bd9
JL
4989}
4990
4991/* Top level routine to perform one code hoisting (aka unification) pass
4992
cc2902df 4993 Return nonzero if a change was made. */
bb457bd9
JL
4994
4995static int
1d088dee 4996one_code_hoisting_pass (void)
bb457bd9
JL
4997{
4998 int changed = 0;
4999
02280659
ZD
5000 alloc_hash_table (max_cuid, &expr_hash_table, 0);
5001 compute_hash_table (&expr_hash_table);
10d22567
ZD
5002 if (dump_file)
5003 dump_hash_table (dump_file, "Code Hosting Expressions", &expr_hash_table);
c4c81601 5004
02280659 5005 if (expr_hash_table.n_elems > 0)
bb457bd9 5006 {
02280659 5007 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
bb457bd9
JL
5008 compute_code_hoist_data ();
5009 hoist_code ();
5010 free_code_hoist_mem ();
5011 }
c4c81601 5012
02280659 5013 free_hash_table (&expr_hash_table);
bb457bd9
JL
5014
5015 return changed;
5016}
a13d4ebf
AM
5017\f
5018/* Here we provide the things required to do store motion towards
5019 the exit. In order for this to be effective, gcse also needed to
5020 be taught how to move a load when it is kill only by a store to itself.
5021
5022 int i;
5023 float a[10];
5024
5025 void foo(float scale)
5026 {
5027 for (i=0; i<10; i++)
5028 a[i] *= scale;
5029 }
5030
5031 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
589005ff
KH
5032 the load out since its live around the loop, and stored at the bottom
5033 of the loop.
a13d4ebf 5034
589005ff 5035 The 'Load Motion' referred to and implemented in this file is
a13d4ebf
AM
5036 an enhancement to gcse which when using edge based lcm, recognizes
5037 this situation and allows gcse to move the load out of the loop.
5038
5039 Once gcse has hoisted the load, store motion can then push this
5040 load towards the exit, and we end up with no loads or stores of 'i'
5041 in the loop. */
5042
9727e468
RG
5043static hashval_t
5044pre_ldst_expr_hash (const void *p)
5045{
5046 int do_not_record_p = 0;
5047 const struct ls_expr *x = p;
5048 return hash_rtx (x->pattern, GET_MODE (x->pattern), &do_not_record_p, NULL, false);
5049}
5050
5051static int
5052pre_ldst_expr_eq (const void *p1, const void *p2)
5053{
5054 const struct ls_expr *ptr1 = p1, *ptr2 = p2;
5055 return expr_equiv_p (ptr1->pattern, ptr2->pattern);
5056}
5057
ff7cc307 5058/* This will search the ldst list for a matching expression. If it
a13d4ebf
AM
5059 doesn't find one, we create one and initialize it. */
5060
5061static struct ls_expr *
1d088dee 5062ldst_entry (rtx x)
a13d4ebf 5063{
b58b21d5 5064 int do_not_record_p = 0;
a13d4ebf 5065 struct ls_expr * ptr;
b58b21d5 5066 unsigned int hash;
9727e468
RG
5067 void **slot;
5068 struct ls_expr e;
a13d4ebf 5069
0516f6fe
SB
5070 hash = hash_rtx (x, GET_MODE (x), &do_not_record_p,
5071 NULL, /*have_reg_qty=*/false);
a13d4ebf 5072
9727e468
RG
5073 e.pattern = x;
5074 slot = htab_find_slot_with_hash (pre_ldst_table, &e, hash, INSERT);
5075 if (*slot)
5076 return (struct ls_expr *)*slot;
b58b21d5 5077
5ed6ace5 5078 ptr = XNEW (struct ls_expr);
b58b21d5
RS
5079
5080 ptr->next = pre_ldst_mems;
5081 ptr->expr = NULL;
5082 ptr->pattern = x;
5083 ptr->pattern_regs = NULL_RTX;
5084 ptr->loads = NULL_RTX;
5085 ptr->stores = NULL_RTX;
5086 ptr->reaching_reg = NULL_RTX;
5087 ptr->invalid = 0;
5088 ptr->index = 0;
5089 ptr->hash_index = hash;
5090 pre_ldst_mems = ptr;
9727e468 5091 *slot = ptr;
589005ff 5092
a13d4ebf
AM
5093 return ptr;
5094}
5095
5096/* Free up an individual ldst entry. */
5097
589005ff 5098static void
1d088dee 5099free_ldst_entry (struct ls_expr * ptr)
a13d4ebf 5100{
aaa4ca30
AJ
5101 free_INSN_LIST_list (& ptr->loads);
5102 free_INSN_LIST_list (& ptr->stores);
a13d4ebf
AM
5103
5104 free (ptr);
5105}
5106
5107/* Free up all memory associated with the ldst list. */
5108
5109static void
1d088dee 5110free_ldst_mems (void)
a13d4ebf 5111{
35b5442a
RG
5112 if (pre_ldst_table)
5113 htab_delete (pre_ldst_table);
9727e468
RG
5114 pre_ldst_table = NULL;
5115
589005ff 5116 while (pre_ldst_mems)
a13d4ebf
AM
5117 {
5118 struct ls_expr * tmp = pre_ldst_mems;
5119
5120 pre_ldst_mems = pre_ldst_mems->next;
5121
5122 free_ldst_entry (tmp);
5123 }
5124
5125 pre_ldst_mems = NULL;
5126}
5127
5128/* Dump debugging info about the ldst list. */
5129
5130static void
1d088dee 5131print_ldst_list (FILE * file)
a13d4ebf
AM
5132{
5133 struct ls_expr * ptr;
5134
5135 fprintf (file, "LDST list: \n");
5136
62e5bf5d 5137 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
a13d4ebf
AM
5138 {
5139 fprintf (file, " Pattern (%3d): ", ptr->index);
5140
5141 print_rtl (file, ptr->pattern);
5142
5143 fprintf (file, "\n Loads : ");
5144
5145 if (ptr->loads)
5146 print_rtl (file, ptr->loads);
5147 else
5148 fprintf (file, "(nil)");
5149
5150 fprintf (file, "\n Stores : ");
5151
5152 if (ptr->stores)
5153 print_rtl (file, ptr->stores);
5154 else
5155 fprintf (file, "(nil)");
5156
5157 fprintf (file, "\n\n");
5158 }
5159
5160 fprintf (file, "\n");
5161}
5162
5163/* Returns 1 if X is in the list of ldst only expressions. */
5164
5165static struct ls_expr *
1d088dee 5166find_rtx_in_ldst (rtx x)
a13d4ebf 5167{
9727e468
RG
5168 struct ls_expr e;
5169 void **slot;
6375779a
RG
5170 if (!pre_ldst_table)
5171 return NULL;
9727e468
RG
5172 e.pattern = x;
5173 slot = htab_find_slot (pre_ldst_table, &e, NO_INSERT);
5174 if (!slot || ((struct ls_expr *)*slot)->invalid)
5175 return NULL;
5176 return *slot;
a13d4ebf
AM
5177}
5178
5179/* Assign each element of the list of mems a monotonically increasing value. */
5180
5181static int
1d088dee 5182enumerate_ldsts (void)
a13d4ebf
AM
5183{
5184 struct ls_expr * ptr;
5185 int n = 0;
5186
5187 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
5188 ptr->index = n++;
5189
5190 return n;
5191}
5192
5193/* Return first item in the list. */
5194
5195static inline struct ls_expr *
1d088dee 5196first_ls_expr (void)
a13d4ebf
AM
5197{
5198 return pre_ldst_mems;
5199}
5200
0e8a66de 5201/* Return the next item in the list after the specified one. */
a13d4ebf
AM
5202
5203static inline struct ls_expr *
1d088dee 5204next_ls_expr (struct ls_expr * ptr)
a13d4ebf
AM
5205{
5206 return ptr->next;
5207}
5208\f
5209/* Load Motion for loads which only kill themselves. */
5210
5211/* Return true if x is a simple MEM operation, with no registers or
5212 side effects. These are the types of loads we consider for the
5213 ld_motion list, otherwise we let the usual aliasing take care of it. */
5214
589005ff 5215static int
1d088dee 5216simple_mem (rtx x)
a13d4ebf 5217{
7b1b4aed 5218 if (! MEM_P (x))
a13d4ebf 5219 return 0;
589005ff 5220
a13d4ebf
AM
5221 if (MEM_VOLATILE_P (x))
5222 return 0;
589005ff 5223
a13d4ebf
AM
5224 if (GET_MODE (x) == BLKmode)
5225 return 0;
aaa4ca30 5226
47a3dae1
ZD
5227 /* If we are handling exceptions, we must be careful with memory references
5228 that may trap. If we are not, the behavior is undefined, so we may just
5229 continue. */
5230 if (flag_non_call_exceptions && may_trap_p (x))
98d3d336
RS
5231 return 0;
5232
47a3dae1
ZD
5233 if (side_effects_p (x))
5234 return 0;
589005ff 5235
47a3dae1
ZD
5236 /* Do not consider function arguments passed on stack. */
5237 if (reg_mentioned_p (stack_pointer_rtx, x))
5238 return 0;
5239
5240 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
5241 return 0;
5242
5243 return 1;
a13d4ebf
AM
5244}
5245
589005ff
KH
5246/* Make sure there isn't a buried reference in this pattern anywhere.
5247 If there is, invalidate the entry for it since we're not capable
5248 of fixing it up just yet.. We have to be sure we know about ALL
a13d4ebf
AM
5249 loads since the aliasing code will allow all entries in the
5250 ld_motion list to not-alias itself. If we miss a load, we will get
589005ff 5251 the wrong value since gcse might common it and we won't know to
a13d4ebf
AM
5252 fix it up. */
5253
5254static void
1d088dee 5255invalidate_any_buried_refs (rtx x)
a13d4ebf
AM
5256{
5257 const char * fmt;
8e42ace1 5258 int i, j;
a13d4ebf
AM
5259 struct ls_expr * ptr;
5260
5261 /* Invalidate it in the list. */
7b1b4aed 5262 if (MEM_P (x) && simple_mem (x))
a13d4ebf
AM
5263 {
5264 ptr = ldst_entry (x);
5265 ptr->invalid = 1;
5266 }
5267
5268 /* Recursively process the insn. */
5269 fmt = GET_RTX_FORMAT (GET_CODE (x));
589005ff 5270
a13d4ebf
AM
5271 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5272 {
5273 if (fmt[i] == 'e')
5274 invalidate_any_buried_refs (XEXP (x, i));
5275 else if (fmt[i] == 'E')
5276 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5277 invalidate_any_buried_refs (XVECEXP (x, i, j));
5278 }
5279}
5280
4d3eb89a
HPN
5281/* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
5282 being defined as MEM loads and stores to symbols, with no side effects
5283 and no registers in the expression. For a MEM destination, we also
5284 check that the insn is still valid if we replace the destination with a
5285 REG, as is done in update_ld_motion_stores. If there are any uses/defs
5286 which don't match this criteria, they are invalidated and trimmed out
5287 later. */
a13d4ebf 5288
589005ff 5289static void
1d088dee 5290compute_ld_motion_mems (void)
a13d4ebf
AM
5291{
5292 struct ls_expr * ptr;
e0082a72 5293 basic_block bb;
a13d4ebf 5294 rtx insn;
589005ff 5295
a13d4ebf 5296 pre_ldst_mems = NULL;
9727e468
RG
5297 pre_ldst_table = htab_create (13, pre_ldst_expr_hash,
5298 pre_ldst_expr_eq, NULL);
a13d4ebf 5299
e0082a72 5300 FOR_EACH_BB (bb)
a13d4ebf 5301 {
eb232f4e 5302 FOR_BB_INSNS (bb, insn)
a13d4ebf 5303 {
735e8085 5304 if (INSN_P (insn))
a13d4ebf
AM
5305 {
5306 if (GET_CODE (PATTERN (insn)) == SET)
5307 {
5308 rtx src = SET_SRC (PATTERN (insn));
5309 rtx dest = SET_DEST (PATTERN (insn));
5310
5311 /* Check for a simple LOAD... */
7b1b4aed 5312 if (MEM_P (src) && simple_mem (src))
a13d4ebf
AM
5313 {
5314 ptr = ldst_entry (src);
7b1b4aed 5315 if (REG_P (dest))
a13d4ebf
AM
5316 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
5317 else
5318 ptr->invalid = 1;
5319 }
5320 else
5321 {
5322 /* Make sure there isn't a buried load somewhere. */
5323 invalidate_any_buried_refs (src);
5324 }
589005ff 5325
a13d4ebf
AM
5326 /* Check for stores. Don't worry about aliased ones, they
5327 will block any movement we might do later. We only care
5328 about this exact pattern since those are the only
5329 circumstance that we will ignore the aliasing info. */
7b1b4aed 5330 if (MEM_P (dest) && simple_mem (dest))
a13d4ebf
AM
5331 {
5332 ptr = ldst_entry (dest);
589005ff 5333
7b1b4aed 5334 if (! MEM_P (src)
4d3eb89a
HPN
5335 && GET_CODE (src) != ASM_OPERANDS
5336 /* Check for REG manually since want_to_gcse_p
5337 returns 0 for all REGs. */
1707bafa 5338 && can_assign_to_reg_p (src))
a13d4ebf
AM
5339 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
5340 else
5341 ptr->invalid = 1;
5342 }
5343 }
5344 else
5345 invalidate_any_buried_refs (PATTERN (insn));
5346 }
5347 }
5348 }
5349}
5350
589005ff 5351/* Remove any references that have been either invalidated or are not in the
a13d4ebf
AM
5352 expression list for pre gcse. */
5353
5354static void
1d088dee 5355trim_ld_motion_mems (void)
a13d4ebf 5356{
b58b21d5
RS
5357 struct ls_expr * * last = & pre_ldst_mems;
5358 struct ls_expr * ptr = pre_ldst_mems;
a13d4ebf
AM
5359
5360 while (ptr != NULL)
5361 {
b58b21d5 5362 struct expr * expr;
589005ff 5363
a13d4ebf 5364 /* Delete if entry has been made invalid. */
b58b21d5 5365 if (! ptr->invalid)
a13d4ebf 5366 {
a13d4ebf 5367 /* Delete if we cannot find this mem in the expression list. */
b58b21d5 5368 unsigned int hash = ptr->hash_index % expr_hash_table.size;
589005ff 5369
b58b21d5
RS
5370 for (expr = expr_hash_table.table[hash];
5371 expr != NULL;
5372 expr = expr->next_same_hash)
5373 if (expr_equiv_p (expr->expr, ptr->pattern))
5374 break;
a13d4ebf
AM
5375 }
5376 else
b58b21d5
RS
5377 expr = (struct expr *) 0;
5378
5379 if (expr)
a13d4ebf
AM
5380 {
5381 /* Set the expression field if we are keeping it. */
a13d4ebf 5382 ptr->expr = expr;
b58b21d5 5383 last = & ptr->next;
a13d4ebf
AM
5384 ptr = ptr->next;
5385 }
b58b21d5
RS
5386 else
5387 {
5388 *last = ptr->next;
9727e468 5389 htab_remove_elt_with_hash (pre_ldst_table, ptr, ptr->hash_index);
b58b21d5
RS
5390 free_ldst_entry (ptr);
5391 ptr = * last;
5392 }
a13d4ebf
AM
5393 }
5394
5395 /* Show the world what we've found. */
10d22567
ZD
5396 if (dump_file && pre_ldst_mems != NULL)
5397 print_ldst_list (dump_file);
a13d4ebf
AM
5398}
5399
5400/* This routine will take an expression which we are replacing with
5401 a reaching register, and update any stores that are needed if
5402 that expression is in the ld_motion list. Stores are updated by
a98ebe2e 5403 copying their SRC to the reaching register, and then storing
a13d4ebf
AM
5404 the reaching register into the store location. These keeps the
5405 correct value in the reaching register for the loads. */
5406
5407static void
1d088dee 5408update_ld_motion_stores (struct expr * expr)
a13d4ebf
AM
5409{
5410 struct ls_expr * mem_ptr;
5411
5412 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
5413 {
589005ff
KH
5414 /* We can try to find just the REACHED stores, but is shouldn't
5415 matter to set the reaching reg everywhere... some might be
a13d4ebf
AM
5416 dead and should be eliminated later. */
5417
4d3eb89a
HPN
5418 /* We replace (set mem expr) with (set reg expr) (set mem reg)
5419 where reg is the reaching reg used in the load. We checked in
5420 compute_ld_motion_mems that we can replace (set mem expr) with
5421 (set reg expr) in that insn. */
a13d4ebf 5422 rtx list = mem_ptr->stores;
589005ff 5423
a13d4ebf
AM
5424 for ( ; list != NULL_RTX; list = XEXP (list, 1))
5425 {
5426 rtx insn = XEXP (list, 0);
5427 rtx pat = PATTERN (insn);
5428 rtx src = SET_SRC (pat);
5429 rtx reg = expr->reaching_reg;
c57718d3 5430 rtx copy, new;
a13d4ebf
AM
5431
5432 /* If we've already copied it, continue. */
5433 if (expr->reaching_reg == src)
5434 continue;
589005ff 5435
10d22567 5436 if (dump_file)
a13d4ebf 5437 {
10d22567
ZD
5438 fprintf (dump_file, "PRE: store updated with reaching reg ");
5439 print_rtl (dump_file, expr->reaching_reg);
5440 fprintf (dump_file, ":\n ");
5441 print_inline_rtx (dump_file, insn, 8);
5442 fprintf (dump_file, "\n");
a13d4ebf 5443 }
589005ff 5444
47a3dae1 5445 copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat)));
c57718d3
RK
5446 new = emit_insn_before (copy, insn);
5447 record_one_set (REGNO (reg), new);
a13d4ebf 5448 SET_SRC (pat) = reg;
6fb5fa3c 5449 df_insn_rescan (insn);
a13d4ebf
AM
5450
5451 /* un-recognize this pattern since it's probably different now. */
5452 INSN_CODE (insn) = -1;
5453 gcse_create_count++;
5454 }
5455 }
5456}
5457\f
5458/* Store motion code. */
5459
47a3dae1
ZD
5460#define ANTIC_STORE_LIST(x) ((x)->loads)
5461#define AVAIL_STORE_LIST(x) ((x)->stores)
5462#define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
5463
589005ff 5464/* This is used to communicate the target bitvector we want to use in the
aaa4ca30 5465 reg_set_info routine when called via the note_stores mechanism. */
47a3dae1
ZD
5466static int * regvec;
5467
5468/* And current insn, for the same routine. */
5469static rtx compute_store_table_current_insn;
aaa4ca30 5470
a13d4ebf
AM
5471/* Used in computing the reverse edge graph bit vectors. */
5472static sbitmap * st_antloc;
5473
5474/* Global holding the number of store expressions we are dealing with. */
5475static int num_stores;
5476
01c43039
RE
5477/* Checks to set if we need to mark a register set. Called from
5478 note_stores. */
a13d4ebf 5479
aaa4ca30 5480static void
1d088dee 5481reg_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED,
01c43039 5482 void *data)
a13d4ebf 5483{
01c43039
RE
5484 sbitmap bb_reg = data;
5485
aaa4ca30
AJ
5486 if (GET_CODE (dest) == SUBREG)
5487 dest = SUBREG_REG (dest);
adfcce61 5488
7b1b4aed 5489 if (REG_P (dest))
01c43039
RE
5490 {
5491 regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
5492 if (bb_reg)
5493 SET_BIT (bb_reg, REGNO (dest));
5494 }
5495}
5496
5497/* Clear any mark that says that this insn sets dest. Called from
5498 note_stores. */
5499
5500static void
5501reg_clear_last_set (rtx dest, rtx setter ATTRIBUTE_UNUSED,
5502 void *data)
5503{
5504 int *dead_vec = data;
5505
5506 if (GET_CODE (dest) == SUBREG)
5507 dest = SUBREG_REG (dest);
5508
7b1b4aed 5509 if (REG_P (dest) &&
01c43039
RE
5510 dead_vec[REGNO (dest)] == INSN_UID (compute_store_table_current_insn))
5511 dead_vec[REGNO (dest)] = 0;
a13d4ebf
AM
5512}
5513
47a3dae1
ZD
5514/* Return zero if some of the registers in list X are killed
5515 due to set of registers in bitmap REGS_SET. */
1d088dee 5516
47a3dae1 5517static bool
1d088dee 5518store_ops_ok (rtx x, int *regs_set)
47a3dae1
ZD
5519{
5520 rtx reg;
5521
5522 for (; x; x = XEXP (x, 1))
5523 {
5524 reg = XEXP (x, 0);
5525 if (regs_set[REGNO(reg)])
1d088dee 5526 return false;
47a3dae1 5527 }
a13d4ebf 5528
47a3dae1
ZD
5529 return true;
5530}
5531
5532/* Returns a list of registers mentioned in X. */
5533static rtx
1d088dee 5534extract_mentioned_regs (rtx x)
47a3dae1
ZD
5535{
5536 return extract_mentioned_regs_helper (x, NULL_RTX);
5537}
5538
5539/* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
5540 registers. */
5541static rtx
1d088dee 5542extract_mentioned_regs_helper (rtx x, rtx accum)
a13d4ebf
AM
5543{
5544 int i;
5545 enum rtx_code code;
5546 const char * fmt;
5547
5548 /* Repeat is used to turn tail-recursion into iteration. */
5549 repeat:
5550
5551 if (x == 0)
47a3dae1 5552 return accum;
a13d4ebf
AM
5553
5554 code = GET_CODE (x);
5555 switch (code)
5556 {
5557 case REG:
47a3dae1 5558 return alloc_EXPR_LIST (0, x, accum);
a13d4ebf
AM
5559
5560 case MEM:
5561 x = XEXP (x, 0);
5562 goto repeat;
5563
5564 case PRE_DEC:
5565 case PRE_INC:
6fb5fa3c 5566 case PRE_MODIFY:
a13d4ebf
AM
5567 case POST_DEC:
5568 case POST_INC:
6fb5fa3c 5569 case POST_MODIFY:
47a3dae1 5570 /* We do not run this function with arguments having side effects. */
282899df 5571 gcc_unreachable ();
a13d4ebf
AM
5572
5573 case PC:
5574 case CC0: /*FIXME*/
5575 case CONST:
5576 case CONST_INT:
5577 case CONST_DOUBLE:
69ef87e2 5578 case CONST_VECTOR:
a13d4ebf
AM
5579 case SYMBOL_REF:
5580 case LABEL_REF:
5581 case ADDR_VEC:
5582 case ADDR_DIFF_VEC:
47a3dae1 5583 return accum;
a13d4ebf
AM
5584
5585 default:
5586 break;
5587 }
5588
5589 i = GET_RTX_LENGTH (code) - 1;
5590 fmt = GET_RTX_FORMAT (code);
589005ff 5591
a13d4ebf
AM
5592 for (; i >= 0; i--)
5593 {
5594 if (fmt[i] == 'e')
5595 {
5596 rtx tem = XEXP (x, i);
5597
5598 /* If we are about to do the last recursive call
47a3dae1 5599 needed at this level, change it into iteration. */
a13d4ebf
AM
5600 if (i == 0)
5601 {
5602 x = tem;
5603 goto repeat;
5604 }
589005ff 5605
47a3dae1 5606 accum = extract_mentioned_regs_helper (tem, accum);
a13d4ebf
AM
5607 }
5608 else if (fmt[i] == 'E')
5609 {
5610 int j;
589005ff 5611
a13d4ebf 5612 for (j = 0; j < XVECLEN (x, i); j++)
47a3dae1 5613 accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum);
a13d4ebf
AM
5614 }
5615 }
5616
47a3dae1 5617 return accum;
a13d4ebf
AM
5618}
5619
47a3dae1
ZD
5620/* Determine whether INSN is MEM store pattern that we will consider moving.
5621 REGS_SET_BEFORE is bitmap of registers set before (and including) the
5622 current insn, REGS_SET_AFTER is bitmap of registers set after (and
5623 including) the insn in this basic block. We must be passing through BB from
5624 head to end, as we are using this fact to speed things up.
1d088dee 5625
47a3dae1
ZD
5626 The results are stored this way:
5627
5628 -- the first anticipatable expression is added into ANTIC_STORE_LIST
5629 -- if the processed expression is not anticipatable, NULL_RTX is added
5630 there instead, so that we can use it as indicator that no further
5631 expression of this type may be anticipatable
5632 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
5633 consequently, all of them but this head are dead and may be deleted.
5634 -- if the expression is not available, the insn due to that it fails to be
5635 available is stored in reaching_reg.
5636
5637 The things are complicated a bit by fact that there already may be stores
5638 to the same MEM from other blocks; also caller must take care of the
e0bb17a8 5639 necessary cleanup of the temporary markers after end of the basic block.
47a3dae1 5640 */
a13d4ebf
AM
5641
5642static void
1d088dee 5643find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
a13d4ebf
AM
5644{
5645 struct ls_expr * ptr;
47a3dae1
ZD
5646 rtx dest, set, tmp;
5647 int check_anticipatable, check_available;
5648 basic_block bb = BLOCK_FOR_INSN (insn);
a13d4ebf 5649
47a3dae1
ZD
5650 set = single_set (insn);
5651 if (!set)
a13d4ebf
AM
5652 return;
5653
47a3dae1 5654 dest = SET_DEST (set);
589005ff 5655
7b1b4aed 5656 if (! MEM_P (dest) || MEM_VOLATILE_P (dest)
a13d4ebf 5657 || GET_MODE (dest) == BLKmode)
aaa4ca30
AJ
5658 return;
5659
47a3dae1
ZD
5660 if (side_effects_p (dest))
5661 return;
aaa4ca30 5662
47a3dae1
ZD
5663 /* If we are handling exceptions, we must be careful with memory references
5664 that may trap. If we are not, the behavior is undefined, so we may just
5665 continue. */
94f24ddc 5666 if (flag_non_call_exceptions && may_trap_p (dest))
47a3dae1 5667 return;
1d088dee 5668
c2e2375e
UW
5669 /* Even if the destination cannot trap, the source may. In this case we'd
5670 need to handle updating the REG_EH_REGION note. */
5671 if (find_reg_note (insn, REG_EH_REGION, NULL_RTX))
5672 return;
5673
26fb114d
SB
5674 /* Make sure that the SET_SRC of this store insns can be assigned to
5675 a register, or we will fail later on in replace_store_insn, which
5676 assumes that we can do this. But sometimes the target machine has
5677 oddities like MEM read-modify-write instruction. See for example
5678 PR24257. */
5679 if (!can_assign_to_reg_p (SET_SRC (set)))
5680 return;
5681
a13d4ebf 5682 ptr = ldst_entry (dest);
47a3dae1
ZD
5683 if (!ptr->pattern_regs)
5684 ptr->pattern_regs = extract_mentioned_regs (dest);
5685
5686 /* Do not check for anticipatability if we either found one anticipatable
5687 store already, or tested for one and found out that it was killed. */
5688 check_anticipatable = 0;
5689 if (!ANTIC_STORE_LIST (ptr))
5690 check_anticipatable = 1;
5691 else
5692 {
5693 tmp = XEXP (ANTIC_STORE_LIST (ptr), 0);
5694 if (tmp != NULL_RTX
5695 && BLOCK_FOR_INSN (tmp) != bb)
5696 check_anticipatable = 1;
5697 }
5698 if (check_anticipatable)
5699 {
5700 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
5701 tmp = NULL_RTX;
5702 else
5703 tmp = insn;
5704 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp,
5705 ANTIC_STORE_LIST (ptr));
5706 }
a13d4ebf 5707
e0bb17a8 5708 /* It is not necessary to check whether store is available if we did
47a3dae1
ZD
5709 it successfully before; if we failed before, do not bother to check
5710 until we reach the insn that caused us to fail. */
5711 check_available = 0;
5712 if (!AVAIL_STORE_LIST (ptr))
5713 check_available = 1;
5714 else
5715 {
5716 tmp = XEXP (AVAIL_STORE_LIST (ptr), 0);
5717 if (BLOCK_FOR_INSN (tmp) != bb)
5718 check_available = 1;
5719 }
5720 if (check_available)
5721 {
5722 /* Check that we have already reached the insn at that the check
5723 failed last time. */
5724 if (LAST_AVAIL_CHECK_FAILURE (ptr))
5725 {
a813c111 5726 for (tmp = BB_END (bb);
47a3dae1
ZD
5727 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
5728 tmp = PREV_INSN (tmp))
5729 continue;
5730 if (tmp == insn)
5731 check_available = 0;
5732 }
5733 else
5734 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
5735 bb, regs_set_after,
5736 &LAST_AVAIL_CHECK_FAILURE (ptr));
5737 }
5738 if (!check_available)
5739 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr));
5740}
1d088dee 5741
47a3dae1 5742/* Find available and anticipatable stores. */
a13d4ebf
AM
5743
5744static int
1d088dee 5745compute_store_table (void)
a13d4ebf 5746{
e0082a72
ZD
5747 int ret;
5748 basic_block bb;
aaa4ca30 5749 unsigned regno;
47a3dae1
ZD
5750 rtx insn, pat, tmp;
5751 int *last_set_in, *already_set;
5752 struct ls_expr * ptr, **prev_next_ptr_ptr;
aaa4ca30 5753
a13d4ebf
AM
5754 max_gcse_regno = max_reg_num ();
5755
703ad42b 5756 reg_set_in_block = sbitmap_vector_alloc (last_basic_block,
aaa4ca30 5757 max_gcse_regno);
d55bc081 5758 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
a13d4ebf 5759 pre_ldst_mems = 0;
9727e468
RG
5760 pre_ldst_table = htab_create (13, pre_ldst_expr_hash,
5761 pre_ldst_expr_eq, NULL);
5ed6ace5
MD
5762 last_set_in = XCNEWVEC (int, max_gcse_regno);
5763 already_set = XNEWVEC (int, max_gcse_regno);
aaa4ca30 5764
a13d4ebf 5765 /* Find all the stores we care about. */
e0082a72 5766 FOR_EACH_BB (bb)
a13d4ebf 5767 {
47a3dae1 5768 /* First compute the registers set in this block. */
47a3dae1
ZD
5769 regvec = last_set_in;
5770
eb232f4e 5771 FOR_BB_INSNS (bb, insn)
47a3dae1
ZD
5772 {
5773 if (! INSN_P (insn))
5774 continue;
5775
7b1b4aed 5776 if (CALL_P (insn))
47a3dae1 5777 {
47a3dae1 5778 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6e14af16 5779 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
01c43039
RE
5780 {
5781 last_set_in[regno] = INSN_UID (insn);
5782 SET_BIT (reg_set_in_block[bb->index], regno);
5783 }
47a3dae1
ZD
5784 }
5785
5786 pat = PATTERN (insn);
5787 compute_store_table_current_insn = insn;
01c43039 5788 note_stores (pat, reg_set_info, reg_set_in_block[bb->index]);
47a3dae1
ZD
5789 }
5790
47a3dae1
ZD
5791 /* Now find the stores. */
5792 memset (already_set, 0, sizeof (int) * max_gcse_regno);
5793 regvec = already_set;
eb232f4e 5794 FOR_BB_INSNS (bb, insn)
a13d4ebf 5795 {
19652adf 5796 if (! INSN_P (insn))
a13d4ebf
AM
5797 continue;
5798
7b1b4aed 5799 if (CALL_P (insn))
aaa4ca30
AJ
5800 {
5801 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6e14af16 5802 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
47a3dae1 5803 already_set[regno] = 1;
aaa4ca30 5804 }
589005ff 5805
a13d4ebf 5806 pat = PATTERN (insn);
aaa4ca30 5807 note_stores (pat, reg_set_info, NULL);
589005ff 5808
a13d4ebf 5809 /* Now that we've marked regs, look for stores. */
47a3dae1
ZD
5810 find_moveable_store (insn, already_set, last_set_in);
5811
5812 /* Unmark regs that are no longer set. */
01c43039
RE
5813 compute_store_table_current_insn = insn;
5814 note_stores (pat, reg_clear_last_set, last_set_in);
7b1b4aed 5815 if (CALL_P (insn))
01c43039 5816 {
01c43039 5817 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6e14af16 5818 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
01c43039
RE
5819 && last_set_in[regno] == INSN_UID (insn))
5820 last_set_in[regno] = 0;
5821 }
47a3dae1
ZD
5822 }
5823
01c43039
RE
5824#ifdef ENABLE_CHECKING
5825 /* last_set_in should now be all-zero. */
5826 for (regno = 0; regno < max_gcse_regno; regno++)
282899df 5827 gcc_assert (!last_set_in[regno]);
01c43039
RE
5828#endif
5829
47a3dae1
ZD
5830 /* Clear temporary marks. */
5831 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
5832 {
5833 LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX;
5834 if (ANTIC_STORE_LIST (ptr)
5835 && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX)
5836 ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1);
5837 }
5838 }
5839
5840 /* Remove the stores that are not available anywhere, as there will
5841 be no opportunity to optimize them. */
5842 for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems;
5843 ptr != NULL;
5844 ptr = *prev_next_ptr_ptr)
5845 {
5846 if (!AVAIL_STORE_LIST (ptr))
5847 {
5848 *prev_next_ptr_ptr = ptr->next;
9727e468 5849 htab_remove_elt_with_hash (pre_ldst_table, ptr, ptr->hash_index);
47a3dae1 5850 free_ldst_entry (ptr);
a13d4ebf 5851 }
47a3dae1
ZD
5852 else
5853 prev_next_ptr_ptr = &ptr->next;
a13d4ebf
AM
5854 }
5855
5856 ret = enumerate_ldsts ();
589005ff 5857
10d22567 5858 if (dump_file)
a13d4ebf 5859 {
10d22567
ZD
5860 fprintf (dump_file, "ST_avail and ST_antic (shown under loads..)\n");
5861 print_ldst_list (dump_file);
a13d4ebf 5862 }
589005ff 5863
47a3dae1
ZD
5864 free (last_set_in);
5865 free (already_set);
a13d4ebf
AM
5866 return ret;
5867}
5868
3b14e3af
ZD
5869/* Check to see if the load X is aliased with STORE_PATTERN.
5870 AFTER is true if we are checking the case when STORE_PATTERN occurs
5871 after the X. */
a13d4ebf 5872
47a3dae1 5873static bool
3b14e3af 5874load_kills_store (rtx x, rtx store_pattern, int after)
a13d4ebf 5875{
3b14e3af
ZD
5876 if (after)
5877 return anti_dependence (x, store_pattern);
5878 else
5879 return true_dependence (store_pattern, GET_MODE (store_pattern), x,
5880 rtx_addr_varies_p);
a13d4ebf
AM
5881}
5882
589005ff 5883/* Go through the entire insn X, looking for any loads which might alias
3b14e3af
ZD
5884 STORE_PATTERN. Return true if found.
5885 AFTER is true if we are checking the case when STORE_PATTERN occurs
5886 after the insn X. */
a13d4ebf 5887
47a3dae1 5888static bool
3b14e3af 5889find_loads (rtx x, rtx store_pattern, int after)
a13d4ebf
AM
5890{
5891 const char * fmt;
8e42ace1 5892 int i, j;
47a3dae1 5893 int ret = false;
a13d4ebf 5894
24a28584 5895 if (!x)
47a3dae1 5896 return false;
24a28584 5897
589005ff 5898 if (GET_CODE (x) == SET)
a13d4ebf
AM
5899 x = SET_SRC (x);
5900
7b1b4aed 5901 if (MEM_P (x))
a13d4ebf 5902 {
3b14e3af 5903 if (load_kills_store (x, store_pattern, after))
47a3dae1 5904 return true;
a13d4ebf
AM
5905 }
5906
5907 /* Recursively process the insn. */
5908 fmt = GET_RTX_FORMAT (GET_CODE (x));
589005ff 5909
a13d4ebf
AM
5910 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
5911 {
5912 if (fmt[i] == 'e')
3b14e3af 5913 ret |= find_loads (XEXP (x, i), store_pattern, after);
a13d4ebf
AM
5914 else if (fmt[i] == 'E')
5915 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3b14e3af 5916 ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
a13d4ebf
AM
5917 }
5918 return ret;
5919}
5920
1071bcbd
AO
5921static inline bool
5922store_killed_in_pat (rtx x, rtx pat, int after)
5923{
5924 if (GET_CODE (pat) == SET)
5925 {
5926 rtx dest = SET_DEST (pat);
5927
5928 if (GET_CODE (dest) == ZERO_EXTRACT)
5929 dest = XEXP (dest, 0);
5930
5931 /* Check for memory stores to aliased objects. */
5932 if (MEM_P (dest)
5933 && !expr_equiv_p (dest, x))
5934 {
5935 if (after)
5936 {
5937 if (output_dependence (dest, x))
5938 return true;
5939 }
5940 else
5941 {
5942 if (output_dependence (x, dest))
5943 return true;
5944 }
5945 }
5946 }
5947
5948 if (find_loads (pat, x, after))
5949 return true;
5950
5951 return false;
5952}
5953
589005ff 5954/* Check if INSN kills the store pattern X (is aliased with it).
3b14e3af 5955 AFTER is true if we are checking the case when store X occurs
3f117656 5956 after the insn. Return true if it does. */
a13d4ebf 5957
47a3dae1 5958static bool
3b14e3af 5959store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)
a13d4ebf 5960{
1071bcbd 5961 rtx reg, base, note, pat;
94f24ddc 5962
735e8085 5963 if (!INSN_P (insn))
47a3dae1 5964 return false;
589005ff 5965
7b1b4aed 5966 if (CALL_P (insn))
a13d4ebf 5967 {
1218665b
JJ
5968 /* A normal or pure call might read from pattern,
5969 but a const call will not. */
47a3dae1
ZD
5970 if (! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn))
5971 return true;
5972
94f24ddc
ZD
5973 /* But even a const call reads its parameters. Check whether the
5974 base of some of registers used in mem is stack pointer. */
5975 for (reg = x_regs; reg; reg = XEXP (reg, 1))
5976 {
bc083e18 5977 base = find_base_term (XEXP (reg, 0));
94f24ddc
ZD
5978 if (!base
5979 || (GET_CODE (base) == ADDRESS
5980 && GET_MODE (base) == Pmode
5981 && XEXP (base, 0) == stack_pointer_rtx))
5982 return true;
5983 }
47a3dae1
ZD
5984
5985 return false;
a13d4ebf 5986 }
589005ff 5987
1071bcbd
AO
5988 pat = PATTERN (insn);
5989 if (GET_CODE (pat) == SET)
a13d4ebf 5990 {
1071bcbd 5991 if (store_killed_in_pat (x, pat, after))
d088acea 5992 return true;
a13d4ebf 5993 }
1071bcbd
AO
5994 else if (GET_CODE (pat) == PARALLEL)
5995 {
5996 int i;
5997
5998 for (i = 0; i < XVECLEN (pat, 0); i++)
5999 if (store_killed_in_pat (x, XVECEXP (pat, 0, i), after))
6000 return true;
6001 }
d088acea
ZD
6002 else if (find_loads (PATTERN (insn), x, after))
6003 return true;
6004
6005 /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory
6006 location aliased with X, then this insn kills X. */
6007 note = find_reg_equal_equiv_note (insn);
6008 if (! note)
6009 return false;
6010 note = XEXP (note, 0);
6011
6012 /* However, if the note represents a must alias rather than a may
6013 alias relationship, then it does not kill X. */
6014 if (expr_equiv_p (note, x))
6015 return false;
6016
6017 /* See if there are any aliased loads in the note. */
6018 return find_loads (note, x, after);
a13d4ebf
AM
6019}
6020
47a3dae1
ZD
6021/* Returns true if the expression X is loaded or clobbered on or after INSN
6022 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
6023 or after the insn. X_REGS is list of registers mentioned in X. If the store
6024 is killed, return the last insn in that it occurs in FAIL_INSN. */
a13d4ebf 6025
47a3dae1 6026static bool
1d088dee
AJ
6027store_killed_after (rtx x, rtx x_regs, rtx insn, basic_block bb,
6028 int *regs_set_after, rtx *fail_insn)
a13d4ebf 6029{
a813c111 6030 rtx last = BB_END (bb), act;
aaa4ca30 6031
47a3dae1 6032 if (!store_ops_ok (x_regs, regs_set_after))
1d088dee 6033 {
47a3dae1
ZD
6034 /* We do not know where it will happen. */
6035 if (fail_insn)
6036 *fail_insn = NULL_RTX;
6037 return true;
6038 }
a13d4ebf 6039
47a3dae1
ZD
6040 /* Scan from the end, so that fail_insn is determined correctly. */
6041 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
3b14e3af 6042 if (store_killed_in_insn (x, x_regs, act, false))
47a3dae1
ZD
6043 {
6044 if (fail_insn)
6045 *fail_insn = act;
6046 return true;
6047 }
589005ff 6048
47a3dae1 6049 return false;
a13d4ebf 6050}
1d088dee 6051
47a3dae1
ZD
6052/* Returns true if the expression X is loaded or clobbered on or before INSN
6053 within basic block BB. X_REGS is list of registers mentioned in X.
6054 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
6055static bool
1d088dee
AJ
6056store_killed_before (rtx x, rtx x_regs, rtx insn, basic_block bb,
6057 int *regs_set_before)
a13d4ebf 6058{
a813c111 6059 rtx first = BB_HEAD (bb);
a13d4ebf 6060
47a3dae1
ZD
6061 if (!store_ops_ok (x_regs, regs_set_before))
6062 return true;
a13d4ebf 6063
47a3dae1 6064 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
3b14e3af 6065 if (store_killed_in_insn (x, x_regs, insn, true))
47a3dae1 6066 return true;
589005ff 6067
47a3dae1 6068 return false;
a13d4ebf 6069}
1d088dee 6070
47a3dae1
ZD
6071/* Fill in available, anticipatable, transparent and kill vectors in
6072 STORE_DATA, based on lists of available and anticipatable stores. */
a13d4ebf 6073static void
1d088dee 6074build_store_vectors (void)
a13d4ebf 6075{
47a3dae1
ZD
6076 basic_block bb;
6077 int *regs_set_in_block;
a13d4ebf
AM
6078 rtx insn, st;
6079 struct ls_expr * ptr;
47a3dae1 6080 unsigned regno;
a13d4ebf
AM
6081
6082 /* Build the gen_vector. This is any store in the table which is not killed
6083 by aliasing later in its block. */
703ad42b 6084 ae_gen = sbitmap_vector_alloc (last_basic_block, num_stores);
d55bc081 6085 sbitmap_vector_zero (ae_gen, last_basic_block);
a13d4ebf 6086
703ad42b 6087 st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores);
d55bc081 6088 sbitmap_vector_zero (st_antloc, last_basic_block);
aaa4ca30 6089
a13d4ebf 6090 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
589005ff 6091 {
47a3dae1 6092 for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
a13d4ebf
AM
6093 {
6094 insn = XEXP (st, 0);
e2d2ed72 6095 bb = BLOCK_FOR_INSN (insn);
589005ff 6096
47a3dae1
ZD
6097 /* If we've already seen an available expression in this block,
6098 we can delete this one (It occurs earlier in the block). We'll
6099 copy the SRC expression to an unused register in case there
6100 are any side effects. */
6101 if (TEST_BIT (ae_gen[bb->index], ptr->index))
a13d4ebf 6102 {
47a3dae1 6103 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
10d22567
ZD
6104 if (dump_file)
6105 fprintf (dump_file, "Removing redundant store:\n");
d088acea 6106 replace_store_insn (r, XEXP (st, 0), bb, ptr);
47a3dae1 6107 continue;
a13d4ebf 6108 }
47a3dae1 6109 SET_BIT (ae_gen[bb->index], ptr->index);
a13d4ebf 6110 }
589005ff 6111
47a3dae1
ZD
6112 for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
6113 {
6114 insn = XEXP (st, 0);
6115 bb = BLOCK_FOR_INSN (insn);
6116 SET_BIT (st_antloc[bb->index], ptr->index);
6117 }
a13d4ebf 6118 }
589005ff 6119
703ad42b 6120 ae_kill = sbitmap_vector_alloc (last_basic_block, num_stores);
d55bc081 6121 sbitmap_vector_zero (ae_kill, last_basic_block);
a13d4ebf 6122
703ad42b 6123 transp = sbitmap_vector_alloc (last_basic_block, num_stores);
d55bc081 6124 sbitmap_vector_zero (transp, last_basic_block);
5ed6ace5 6125 regs_set_in_block = XNEWVEC (int, max_gcse_regno);
a13d4ebf 6126
47a3dae1
ZD
6127 FOR_EACH_BB (bb)
6128 {
6129 for (regno = 0; regno < max_gcse_regno; regno++)
6130 regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno);
6131
6132 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6133 {
a813c111 6134 if (store_killed_after (ptr->pattern, ptr->pattern_regs, BB_HEAD (bb),
47a3dae1
ZD
6135 bb, regs_set_in_block, NULL))
6136 {
e0bb17a8 6137 /* It should not be necessary to consider the expression
47a3dae1
ZD
6138 killed if it is both anticipatable and available. */
6139 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
6140 || !TEST_BIT (ae_gen[bb->index], ptr->index))
6141 SET_BIT (ae_kill[bb->index], ptr->index);
1d088dee
AJ
6142 }
6143 else
6144 SET_BIT (transp[bb->index], ptr->index);
6145 }
47a3dae1
ZD
6146 }
6147
6148 free (regs_set_in_block);
aaa4ca30 6149
10d22567 6150 if (dump_file)
aaa4ca30 6151 {
10d22567
ZD
6152 dump_sbitmap_vector (dump_file, "st_antloc", "", st_antloc, last_basic_block);
6153 dump_sbitmap_vector (dump_file, "st_kill", "", ae_kill, last_basic_block);
6154 dump_sbitmap_vector (dump_file, "Transpt", "", transp, last_basic_block);
6155 dump_sbitmap_vector (dump_file, "st_avloc", "", ae_gen, last_basic_block);
a13d4ebf
AM
6156 }
6157}
6158
fbe5a4a6 6159/* Insert an instruction at the beginning of a basic block, and update
a813c111 6160 the BB_HEAD if needed. */
a13d4ebf 6161
589005ff 6162static void
6fb5fa3c 6163insert_insn_start_basic_block (rtx insn, basic_block bb)
a13d4ebf
AM
6164{
6165 /* Insert at start of successor block. */
a813c111
SB
6166 rtx prev = PREV_INSN (BB_HEAD (bb));
6167 rtx before = BB_HEAD (bb);
a13d4ebf
AM
6168 while (before != 0)
6169 {
7b1b4aed 6170 if (! LABEL_P (before)
a38e7aa5 6171 && !NOTE_INSN_BASIC_BLOCK_P (before))
a13d4ebf
AM
6172 break;
6173 prev = before;
a813c111 6174 if (prev == BB_END (bb))
a13d4ebf
AM
6175 break;
6176 before = NEXT_INSN (before);
6177 }
6178
6fb5fa3c 6179 insn = emit_insn_after_noloc (insn, prev, bb);
a13d4ebf 6180
10d22567 6181 if (dump_file)
a13d4ebf 6182 {
10d22567 6183 fprintf (dump_file, "STORE_MOTION insert store at start of BB %d:\n",
0b17ab2f 6184 bb->index);
10d22567
ZD
6185 print_inline_rtx (dump_file, insn, 6);
6186 fprintf (dump_file, "\n");
a13d4ebf
AM
6187 }
6188}
6189
6190/* This routine will insert a store on an edge. EXPR is the ldst entry for
cc2902df 6191 the memory reference, and E is the edge to insert it on. Returns nonzero
a13d4ebf
AM
6192 if an edge insertion was performed. */
6193
6194static int
1d088dee 6195insert_store (struct ls_expr * expr, edge e)
a13d4ebf
AM
6196{
6197 rtx reg, insn;
e2d2ed72 6198 basic_block bb;
a13d4ebf 6199 edge tmp;
628f6a4e 6200 edge_iterator ei;
a13d4ebf
AM
6201
6202 /* We did all the deleted before this insert, so if we didn't delete a
6203 store, then we haven't set the reaching reg yet either. */
6204 if (expr->reaching_reg == NULL_RTX)
6205 return 0;
6206
a0c8285b
JH
6207 if (e->flags & EDGE_FAKE)
6208 return 0;
6209
a13d4ebf 6210 reg = expr->reaching_reg;
47a3dae1 6211 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
589005ff 6212
a13d4ebf
AM
6213 /* If we are inserting this expression on ALL predecessor edges of a BB,
6214 insert it at the start of the BB, and reset the insert bits on the other
ff7cc307 6215 edges so we don't try to insert it on the other edges. */
e2d2ed72 6216 bb = e->dest;
628f6a4e 6217 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
3f2eae23 6218 if (!(tmp->flags & EDGE_FAKE))
a0c8285b
JH
6219 {
6220 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
282899df
NS
6221
6222 gcc_assert (index != EDGE_INDEX_NO_EDGE);
a0c8285b
JH
6223 if (! TEST_BIT (pre_insert_map[index], expr->index))
6224 break;
6225 }
a13d4ebf
AM
6226
6227 /* If tmp is NULL, we found an insertion on every edge, blank the
6228 insertion vector for these edges, and insert at the start of the BB. */
e2d2ed72 6229 if (!tmp && bb != EXIT_BLOCK_PTR)
a13d4ebf 6230 {
628f6a4e 6231 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
a13d4ebf
AM
6232 {
6233 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6234 RESET_BIT (pre_insert_map[index], expr->index);
6235 }
6fb5fa3c 6236 insert_insn_start_basic_block (insn, bb);
a13d4ebf
AM
6237 return 0;
6238 }
589005ff 6239
b16aa8a5
RK
6240 /* We can't put stores in the front of blocks pointed to by abnormal
6241 edges since that may put a store where one didn't used to be. */
6242 gcc_assert (!(e->flags & EDGE_ABNORMAL));
a13d4ebf
AM
6243
6244 insert_insn_on_edge (insn, e);
589005ff 6245
10d22567 6246 if (dump_file)
a13d4ebf 6247 {
10d22567 6248 fprintf (dump_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
0b17ab2f 6249 e->src->index, e->dest->index);
10d22567
ZD
6250 print_inline_rtx (dump_file, insn, 6);
6251 fprintf (dump_file, "\n");
a13d4ebf 6252 }
589005ff 6253
a13d4ebf
AM
6254 return 1;
6255}
6256
d088acea
ZD
6257/* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the
6258 memory location in SMEXPR set in basic block BB.
6259
6260 This could be rather expensive. */
6261
6262static void
6263remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr)
6264{
628f6a4e
BE
6265 edge_iterator *stack, ei;
6266 int sp;
6267 edge act;
d088acea 6268 sbitmap visited = sbitmap_alloc (last_basic_block);
d088acea
ZD
6269 rtx last, insn, note;
6270 rtx mem = smexpr->pattern;
6271
5ed6ace5 6272 stack = XNEWVEC (edge_iterator, n_basic_blocks);
628f6a4e
BE
6273 sp = 0;
6274 ei = ei_start (bb->succs);
6275
d088acea 6276 sbitmap_zero (visited);
d088acea 6277
f76ccf60 6278 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
d088acea
ZD
6279 while (1)
6280 {
6281 if (!act)
6282 {
628f6a4e 6283 if (!sp)
d088acea
ZD
6284 {
6285 free (stack);
6286 sbitmap_free (visited);
6287 return;
6288 }
628f6a4e 6289 act = ei_edge (stack[--sp]);
d088acea
ZD
6290 }
6291 bb = act->dest;
7b1b4aed 6292
d088acea 6293 if (bb == EXIT_BLOCK_PTR
d1c6a401 6294 || TEST_BIT (visited, bb->index))
d088acea 6295 {
628f6a4e
BE
6296 if (!ei_end_p (ei))
6297 ei_next (&ei);
6298 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
d088acea
ZD
6299 continue;
6300 }
6301 SET_BIT (visited, bb->index);
6302
6303 if (TEST_BIT (st_antloc[bb->index], smexpr->index))
6304 {
6305 for (last = ANTIC_STORE_LIST (smexpr);
6306 BLOCK_FOR_INSN (XEXP (last, 0)) != bb;
6307 last = XEXP (last, 1))
6308 continue;
6309 last = XEXP (last, 0);
6310 }
6311 else
a813c111 6312 last = NEXT_INSN (BB_END (bb));
7b1b4aed 6313
a813c111 6314 for (insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
d088acea
ZD
6315 if (INSN_P (insn))
6316 {
6317 note = find_reg_equal_equiv_note (insn);
6318 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
6319 continue;
6320
10d22567
ZD
6321 if (dump_file)
6322 fprintf (dump_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
d088acea
ZD
6323 INSN_UID (insn));
6324 remove_note (insn, note);
6325 }
628f6a4e
BE
6326
6327 if (!ei_end_p (ei))
6328 ei_next (&ei);
6329 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
6330
6331 if (EDGE_COUNT (bb->succs) > 0)
d088acea
ZD
6332 {
6333 if (act)
628f6a4e
BE
6334 stack[sp++] = ei;
6335 ei = ei_start (bb->succs);
f76ccf60 6336 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
d088acea
ZD
6337 }
6338 }
6339}
6340
a13d4ebf
AM
6341/* This routine will replace a store with a SET to a specified register. */
6342
6343static void
d088acea 6344replace_store_insn (rtx reg, rtx del, basic_block bb, struct ls_expr *smexpr)
a13d4ebf 6345{
d7fe1183 6346 rtx insn, mem, note, set, ptr, pair;
589005ff 6347
d088acea 6348 mem = smexpr->pattern;
9a318d30 6349 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
a13d4ebf 6350 insn = emit_insn_after (insn, del);
589005ff 6351
10d22567 6352 if (dump_file)
a13d4ebf 6353 {
10d22567 6354 fprintf (dump_file,
0b17ab2f 6355 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
10d22567
ZD
6356 print_inline_rtx (dump_file, del, 6);
6357 fprintf (dump_file, "\nSTORE MOTION replaced with insn:\n ");
6358 print_inline_rtx (dump_file, insn, 6);
6359 fprintf (dump_file, "\n");
a13d4ebf 6360 }
589005ff 6361
d088acea
ZD
6362 for (ptr = ANTIC_STORE_LIST (smexpr); ptr; ptr = XEXP (ptr, 1))
6363 if (XEXP (ptr, 0) == del)
6364 {
6365 XEXP (ptr, 0) = insn;
6366 break;
6367 }
d7fe1183
ZD
6368
6369 /* Move the notes from the deleted insn to its replacement, and patch
6370 up the LIBCALL notes. */
6371 REG_NOTES (insn) = REG_NOTES (del);
6372
6373 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
6374 if (note)
6375 {
6376 pair = XEXP (note, 0);
6377 note = find_reg_note (pair, REG_LIBCALL, NULL_RTX);
6378 XEXP (note, 0) = insn;
6379 }
6380 note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
6381 if (note)
6382 {
6383 pair = XEXP (note, 0);
6384 note = find_reg_note (pair, REG_RETVAL, NULL_RTX);
6385 XEXP (note, 0) = insn;
6386 }
6387
49ce134f 6388 delete_insn (del);
d088acea
ZD
6389
6390 /* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
6391 they are no longer accurate provided that they are reached by this
6392 definition, so drop them. */
a813c111 6393 for (; insn != NEXT_INSN (BB_END (bb)); insn = NEXT_INSN (insn))
d088acea
ZD
6394 if (INSN_P (insn))
6395 {
6396 set = single_set (insn);
6397 if (!set)
6398 continue;
6399 if (expr_equiv_p (SET_DEST (set), mem))
6400 return;
6401 note = find_reg_equal_equiv_note (insn);
6402 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
6403 continue;
6404
10d22567
ZD
6405 if (dump_file)
6406 fprintf (dump_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
d088acea
ZD
6407 INSN_UID (insn));
6408 remove_note (insn, note);
6409 }
6410 remove_reachable_equiv_notes (bb, smexpr);
a13d4ebf
AM
6411}
6412
6413
6414/* Delete a store, but copy the value that would have been stored into
6415 the reaching_reg for later storing. */
6416
6417static void
1d088dee 6418delete_store (struct ls_expr * expr, basic_block bb)
a13d4ebf
AM
6419{
6420 rtx reg, i, del;
6421
6422 if (expr->reaching_reg == NULL_RTX)
6423 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
a13d4ebf 6424
a13d4ebf 6425 reg = expr->reaching_reg;
589005ff 6426
a13d4ebf
AM
6427 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
6428 {
6429 del = XEXP (i, 0);
e2d2ed72 6430 if (BLOCK_FOR_INSN (del) == bb)
a13d4ebf 6431 {
589005ff 6432 /* We know there is only one since we deleted redundant
a13d4ebf 6433 ones during the available computation. */
d088acea 6434 replace_store_insn (reg, del, bb, expr);
a13d4ebf
AM
6435 break;
6436 }
6437 }
6438}
6439
6440/* Free memory used by store motion. */
6441
589005ff 6442static void
1d088dee 6443free_store_memory (void)
a13d4ebf
AM
6444{
6445 free_ldst_mems ();
589005ff 6446
a13d4ebf 6447 if (ae_gen)
5a660bff 6448 sbitmap_vector_free (ae_gen);
a13d4ebf 6449 if (ae_kill)
5a660bff 6450 sbitmap_vector_free (ae_kill);
a13d4ebf 6451 if (transp)
5a660bff 6452 sbitmap_vector_free (transp);
a13d4ebf 6453 if (st_antloc)
5a660bff 6454 sbitmap_vector_free (st_antloc);
a13d4ebf 6455 if (pre_insert_map)
5a660bff 6456 sbitmap_vector_free (pre_insert_map);
a13d4ebf 6457 if (pre_delete_map)
5a660bff 6458 sbitmap_vector_free (pre_delete_map);
aaa4ca30
AJ
6459 if (reg_set_in_block)
6460 sbitmap_vector_free (reg_set_in_block);
589005ff 6461
a13d4ebf
AM
6462 ae_gen = ae_kill = transp = st_antloc = NULL;
6463 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
6464}
6465
6466/* Perform store motion. Much like gcse, except we move expressions the
6467 other way by looking at the flowgraph in reverse. */
6468
6469static void
1d088dee 6470store_motion (void)
a13d4ebf 6471{
e0082a72 6472 basic_block bb;
0b17ab2f 6473 int x;
a13d4ebf 6474 struct ls_expr * ptr;
adfcce61 6475 int update_flow = 0;
aaa4ca30 6476
10d22567 6477 if (dump_file)
a13d4ebf 6478 {
10d22567
ZD
6479 fprintf (dump_file, "before store motion\n");
6480 print_rtl (dump_file, get_insns ());
a13d4ebf
AM
6481 }
6482
a13d4ebf 6483 init_alias_analysis ();
aaa4ca30 6484
47a3dae1 6485 /* Find all the available and anticipatable stores. */
a13d4ebf
AM
6486 num_stores = compute_store_table ();
6487 if (num_stores == 0)
6488 {
9727e468
RG
6489 htab_delete (pre_ldst_table);
6490 pre_ldst_table = NULL;
aaa4ca30 6491 sbitmap_vector_free (reg_set_in_block);
a13d4ebf
AM
6492 end_alias_analysis ();
6493 return;
6494 }
6495
47a3dae1 6496 /* Now compute kill & transp vectors. */
a13d4ebf 6497 build_store_vectors ();
47a3dae1 6498 add_noreturn_fake_exit_edges ();
2a868ea4 6499 connect_infinite_loops_to_exit ();
a13d4ebf 6500
10d22567 6501 edge_list = pre_edge_rev_lcm (num_stores, transp, ae_gen,
589005ff 6502 st_antloc, ae_kill, &pre_insert_map,
a13d4ebf
AM
6503 &pre_delete_map);
6504
6505 /* Now we want to insert the new stores which are going to be needed. */
6506 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6507 {
b16aa8a5
RK
6508 /* If any of the edges we have above are abnormal, we can't move this
6509 store. */
6510 for (x = NUM_EDGES (edge_list) - 1; x >= 0; x--)
6511 if (TEST_BIT (pre_insert_map[x], ptr->index)
6512 && (INDEX_EDGE (edge_list, x)->flags & EDGE_ABNORMAL))
6513 break;
6514
6515 if (x >= 0)
6516 {
10d22567
ZD
6517 if (dump_file != NULL)
6518 fprintf (dump_file,
b16aa8a5
RK
6519 "Can't replace store %d: abnormal edge from %d to %d\n",
6520 ptr->index, INDEX_EDGE (edge_list, x)->src->index,
6521 INDEX_EDGE (edge_list, x)->dest->index);
6522 continue;
6523 }
6524
6525 /* Now we want to insert the new stores which are going to be needed. */
6526
e0082a72
ZD
6527 FOR_EACH_BB (bb)
6528 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
6529 delete_store (ptr, bb);
a13d4ebf 6530
0b17ab2f
RH
6531 for (x = 0; x < NUM_EDGES (edge_list); x++)
6532 if (TEST_BIT (pre_insert_map[x], ptr->index))
6533 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
a13d4ebf
AM
6534 }
6535
6536 if (update_flow)
6537 commit_edge_insertions ();
aaa4ca30 6538
a13d4ebf
AM
6539 free_store_memory ();
6540 free_edge_list (edge_list);
6809cbf9 6541 remove_fake_exit_edges ();
a13d4ebf
AM
6542 end_alias_analysis ();
6543}
e2500fed 6544
a0134312
RS
6545\f
6546/* Entry point for jump bypassing optimization pass. */
6547
65727068 6548static int
10d22567 6549bypass_jumps (void)
a0134312
RS
6550{
6551 int changed;
6552
6553 /* We do not construct an accurate cfg in functions which call
6554 setjmp, so just punt to be safe. */
6555 if (current_function_calls_setjmp)
6556 return 0;
6557
a0134312
RS
6558 /* Identify the basic block information for this function, including
6559 successors and predecessors. */
6560 max_gcse_regno = max_reg_num ();
6561
10d22567 6562 if (dump_file)
5b4fdb20 6563 dump_flow_info (dump_file, dump_flags);
a0134312 6564
6614fd40 6565 /* Return if there's nothing to do, or it is too expensive. */
ab9a1ff8
SB
6566 if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1
6567 || is_too_expensive (_ ("jump bypassing disabled")))
a0134312
RS
6568 return 0;
6569
a0134312
RS
6570 gcc_obstack_init (&gcse_obstack);
6571 bytes_used = 0;
6572
6573 /* We need alias. */
6574 init_alias_analysis ();
6575
6576 /* Record where pseudo-registers are set. This data is kept accurate
6577 during each pass. ??? We could also record hard-reg information here
6578 [since it's unchanging], however it is currently done during hash table
6579 computation.
6580
6581 It may be tempting to compute MEM set information here too, but MEM sets
6582 will be subject to code motion one day and thus we need to compute
6583 information about memory sets when we build the hash tables. */
6584
6585 alloc_reg_set_mem (max_gcse_regno);
eb232f4e 6586 compute_sets ();
a0134312
RS
6587
6588 max_gcse_regno = max_reg_num ();
eb232f4e
SB
6589 alloc_gcse_mem ();
6590 changed = one_cprop_pass (MAX_GCSE_PASSES + 2, true, true);
a0134312
RS
6591 free_gcse_mem ();
6592
10d22567 6593 if (dump_file)
a0134312 6594 {
10d22567 6595 fprintf (dump_file, "BYPASS of %s: %d basic blocks, ",
faed5cc3 6596 current_function_name (), n_basic_blocks);
10d22567 6597 fprintf (dump_file, "%d bytes\n\n", bytes_used);
a0134312
RS
6598 }
6599
6600 obstack_free (&gcse_obstack, NULL);
6601 free_reg_set_mem ();
6602
6603 /* We are finished with alias. */
6604 end_alias_analysis ();
a0134312
RS
6605
6606 return changed;
6607}
6608
d128effb
NS
6609/* Return true if the graph is too expensive to optimize. PASS is the
6610 optimization about to be performed. */
6611
6612static bool
6613is_too_expensive (const char *pass)
6614{
6615 /* Trying to perform global optimizations on flow graphs which have
6616 a high connectivity will take a long time and is unlikely to be
6617 particularly useful.
7b1b4aed 6618
d128effb
NS
6619 In normal circumstances a cfg should have about twice as many
6620 edges as blocks. But we do not want to punish small functions
6621 which have a couple switch statements. Rather than simply
6622 threshold the number of blocks, uses something with a more
6623 graceful degradation. */
6624 if (n_edges > 20000 + n_basic_blocks * 4)
6625 {
44c21c7f
DD
6626 warning (OPT_Wdisabled_optimization,
6627 "%s: %d basic blocks and %d edges/basic block",
6628 pass, n_basic_blocks, n_edges / n_basic_blocks);
7b1b4aed 6629
d128effb
NS
6630 return true;
6631 }
6632
6633 /* If allocating memory for the cprop bitmap would take up too much
6634 storage it's better just to disable the optimization. */
6635 if ((n_basic_blocks
6636 * SBITMAP_SET_SIZE (max_reg_num ())
6637 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
6638 {
44c21c7f
DD
6639 warning (OPT_Wdisabled_optimization,
6640 "%s: %d basic blocks and %d registers",
6641 pass, n_basic_blocks, max_reg_num ());
d128effb
NS
6642
6643 return true;
6644 }
6645
6646 return false;
6647}
ef330312
PB
6648\f
6649static bool
6650gate_handle_jump_bypass (void)
6651{
6652 return optimize > 0 && flag_gcse;
6653}
6654
6655/* Perform jump bypassing and control flow optimizations. */
c2924966 6656static unsigned int
ef330312
PB
6657rest_of_handle_jump_bypass (void)
6658{
6fb5fa3c 6659 delete_unreachable_blocks ();
10d22567 6660 if (bypass_jumps ())
ef330312 6661 {
ef330312 6662 delete_trivially_dead_insns (get_insns (), max_reg_num ());
6fb5fa3c
DB
6663 rebuild_jump_labels (get_insns ());
6664 cleanup_cfg (0);
ef330312 6665 }
c2924966 6666 return 0;
ef330312
PB
6667}
6668
6669struct tree_opt_pass pass_jump_bypass =
6670{
6671 "bypass", /* name */
6672 gate_handle_jump_bypass, /* gate */
6673 rest_of_handle_jump_bypass, /* execute */
6674 NULL, /* sub */
6675 NULL, /* next */
6676 0, /* static_pass_number */
6677 TV_BYPASS, /* tv_id */
6678 0, /* properties_required */
6679 0, /* properties_provided */
6680 0, /* properties_destroyed */
6681 0, /* todo_flags_start */
6682 TODO_dump_func |
6683 TODO_ggc_collect | TODO_verify_flow, /* todo_flags_finish */
6684 'G' /* letter */
6685};
6686
6687
6688static bool
6689gate_handle_gcse (void)
6690{
6691 return optimize > 0 && flag_gcse;
6692}
6693
6694
c2924966 6695static unsigned int
ef330312
PB
6696rest_of_handle_gcse (void)
6697{
6698 int save_csb, save_cfj;
6699 int tem2 = 0, tem;
10d22567 6700 tem = gcse_main (get_insns ());
ef330312 6701 delete_trivially_dead_insns (get_insns (), max_reg_num ());
6fb5fa3c 6702 rebuild_jump_labels (get_insns ());
ef330312
PB
6703 save_csb = flag_cse_skip_blocks;
6704 save_cfj = flag_cse_follow_jumps;
6705 flag_cse_skip_blocks = flag_cse_follow_jumps = 0;
6706
6707 /* If -fexpensive-optimizations, re-run CSE to clean up things done
6708 by gcse. */
6709 if (flag_expensive_optimizations)
6710 {
6711 timevar_push (TV_CSE);
10d22567 6712 tem2 = cse_main (get_insns (), max_reg_num ());
ef330312
PB
6713 purge_all_dead_edges ();
6714 delete_trivially_dead_insns (get_insns (), max_reg_num ());
6715 timevar_pop (TV_CSE);
6716 cse_not_expected = !flag_rerun_cse_after_loop;
6717 }
6718
6719 /* If gcse or cse altered any jumps, rerun jump optimizations to clean
6720 things up. */
6721 if (tem || tem2)
6722 {
6723 timevar_push (TV_JUMP);
6724 rebuild_jump_labels (get_insns ());
6fb5fa3c 6725 cleanup_cfg (0);
ef330312
PB
6726 timevar_pop (TV_JUMP);
6727 }
6728
6729 flag_cse_skip_blocks = save_csb;
6730 flag_cse_follow_jumps = save_cfj;
c2924966 6731 return 0;
ef330312
PB
6732}
6733
6734struct tree_opt_pass pass_gcse =
6735{
6736 "gcse1", /* name */
6737 gate_handle_gcse, /* gate */
6738 rest_of_handle_gcse, /* execute */
6739 NULL, /* sub */
6740 NULL, /* next */
6741 0, /* static_pass_number */
6742 TV_GCSE, /* tv_id */
6743 0, /* properties_required */
6744 0, /* properties_provided */
6745 0, /* properties_destroyed */
6746 0, /* todo_flags_start */
6fb5fa3c 6747 TODO_df_finish |
ef330312
PB
6748 TODO_dump_func |
6749 TODO_verify_flow | TODO_ggc_collect, /* todo_flags_finish */
6750 'G' /* letter */
6751};
6752
d128effb 6753
e2500fed 6754#include "gt-gcse.h"
This page took 3.33282 seconds and 5 git commands to generate.