]> gcc.gnu.org Git - gcc.git/blame - gcc/gcse.c
re PR middle-end/32321 (ICE in df_refs_verify with -fgcse-sm)
[gcc.git] / gcc / gcse.c
CommitLineData
f4e584dc 1/* Global common subexpression elimination/Partial redundancy elimination
7506f491 2 and global constant/copy propagation for GNU compiler.
62e5bf5d
RS
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
4 2006, 2007 Free Software Foundation, Inc.
7506f491 5
1322177d 6This file is part of GCC.
7506f491 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
7506f491 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
7506f491
DE
17
18You should have received a copy of the GNU General Public License
1322177d 19along with GCC; see the file COPYING. If not, write to the Free
366ccddb
KC
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA. */
7506f491
DE
22
23/* TODO
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
f4e584dc
JL
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
7506f491
DE
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
34
7506f491
DE
35*/
36
37/* References searched while implementing this.
7506f491
DE
38
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
42
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
46
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
50
7506f491
DE
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
54
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
59
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
64
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
69
7506f491
DE
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
73
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
79
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
84
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
88
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
92
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
96
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
100
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
104
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
108
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
112
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
116
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
120
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
124
f4e584dc
JL
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
128
a42cd965
AM
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
132
f4e584dc
JL
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
137
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
141
7506f491
DE
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
144*/
145
146#include "config.h"
50b2596f 147#include "system.h"
4977bab6
ZW
148#include "coretypes.h"
149#include "tm.h"
01198c2f 150#include "toplev.h"
7506f491
DE
151
152#include "rtl.h"
b0656d8b 153#include "tree.h"
6baf1cc8 154#include "tm_p.h"
7506f491
DE
155#include "regs.h"
156#include "hard-reg-set.h"
157#include "flags.h"
158#include "real.h"
159#include "insn-config.h"
160#include "recog.h"
161#include "basic-block.h"
50b2596f 162#include "output.h"
49ad7cfa 163#include "function.h"
589005ff 164#include "expr.h"
e7d482b9 165#include "except.h"
fb0c0a12 166#include "ggc.h"
f1fa37ff 167#include "params.h"
ae860ff7 168#include "cselib.h"
d128effb 169#include "intl.h"
7506f491 170#include "obstack.h"
27fb79ad 171#include "timevar.h"
ef330312 172#include "tree-pass.h"
9727e468 173#include "hashtab.h"
6fb5fa3c
DB
174#include "df.h"
175#include "dbgcnt.h"
4fa31c2a 176
7506f491
DE
177/* Propagate flow information through back edges and thus enable PRE's
178 moving loop invariant calculations out of loops.
179
180 Originally this tended to create worse overall code, but several
181 improvements during the development of PRE seem to have made following
182 back edges generally a win.
183
184 Note much of the loop invariant code motion done here would normally
185 be done by loop.c, which has more heuristics for when to move invariants
186 out of loops. At some point we might need to move some of those
187 heuristics into gcse.c. */
7506f491 188
f4e584dc
JL
189/* We support GCSE via Partial Redundancy Elimination. PRE optimizations
190 are a superset of those done by GCSE.
7506f491 191
f4e584dc 192 We perform the following steps:
7506f491
DE
193
194 1) Compute basic block information.
195
196 2) Compute table of places where registers are set.
197
198 3) Perform copy/constant propagation.
199
e83f4801
SB
200 4) Perform global cse using lazy code motion if not optimizing
201 for size, or code hoisting if we are.
7506f491 202
e78d9500 203 5) Perform another pass of copy/constant propagation.
7506f491
DE
204
205 Two passes of copy/constant propagation are done because the first one
206 enables more GCSE and the second one helps to clean up the copies that
207 GCSE creates. This is needed more for PRE than for Classic because Classic
208 GCSE will try to use an existing register containing the common
209 subexpression rather than create a new one. This is harder to do for PRE
210 because of the code motion (which Classic GCSE doesn't do).
211
212 Expressions we are interested in GCSE-ing are of the form
213 (set (pseudo-reg) (expression)).
214 Function want_to_gcse_p says what these are.
215
216 PRE handles moving invariant expressions out of loops (by treating them as
f4e584dc 217 partially redundant).
7506f491
DE
218
219 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
220 assignment) based GVN (global value numbering). L. T. Simpson's paper
221 (Rice University) on value numbering is a useful reference for this.
222
223 **********************
224
225 We used to support multiple passes but there are diminishing returns in
226 doing so. The first pass usually makes 90% of the changes that are doable.
227 A second pass can make a few more changes made possible by the first pass.
228 Experiments show any further passes don't make enough changes to justify
229 the expense.
230
231 A study of spec92 using an unlimited number of passes:
232 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
233 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
234 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
235
236 It was found doing copy propagation between each pass enables further
237 substitutions.
238
239 PRE is quite expensive in complicated functions because the DFA can take
7b1b4aed
SB
240 a while to converge. Hence we only perform one pass. The parameter
241 max-gcse-passes can be modified if one wants to experiment.
7506f491
DE
242
243 **********************
244
245 The steps for PRE are:
246
247 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
248
249 2) Perform the data flow analysis for PRE.
250
251 3) Delete the redundant instructions
252
253 4) Insert the required copies [if any] that make the partially
254 redundant instructions fully redundant.
255
256 5) For other reaching expressions, insert an instruction to copy the value
257 to a newly created pseudo that will reach the redundant instruction.
258
259 The deletion is done first so that when we do insertions we
260 know which pseudo reg to use.
261
262 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
263 argue it is not. The number of iterations for the algorithm to converge
264 is typically 2-4 so I don't view it as that expensive (relatively speaking).
265
f4e584dc 266 PRE GCSE depends heavily on the second CSE pass to clean up the copies
7506f491
DE
267 we create. To make an expression reach the place where it's redundant,
268 the result of the expression is copied to a new register, and the redundant
269 expression is deleted by replacing it with this new register. Classic GCSE
270 doesn't have this problem as much as it computes the reaching defs of
a3c28ba2
KH
271 each register in each block and thus can try to use an existing
272 register. */
7506f491
DE
273\f
274/* GCSE global vars. */
275
f4e584dc
JL
276/* Note whether or not we should run jump optimization after gcse. We
277 want to do this for two cases.
278
279 * If we changed any jumps via cprop.
280
281 * If we added any labels via edge splitting. */
f4e584dc
JL
282static int run_jump_opt_after_gcse;
283
7506f491
DE
284/* An obstack for our working variables. */
285static struct obstack gcse_obstack;
286
c4c81601 287struct reg_use {rtx reg_rtx; };
abd535b6 288
7506f491
DE
289/* Hash table of expressions. */
290
291struct expr
292{
293 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
294 rtx expr;
295 /* Index in the available expression bitmaps. */
296 int bitmap_index;
297 /* Next entry with the same hash. */
298 struct expr *next_same_hash;
299 /* List of anticipatable occurrences in basic blocks in the function.
300 An "anticipatable occurrence" is one that is the first occurrence in the
f4e584dc
JL
301 basic block, the operands are not modified in the basic block prior
302 to the occurrence and the output is not used between the start of
303 the block and the occurrence. */
7506f491
DE
304 struct occr *antic_occr;
305 /* List of available occurrence in basic blocks in the function.
306 An "available occurrence" is one that is the last occurrence in the
307 basic block and the operands are not modified by following statements in
308 the basic block [including this insn]. */
309 struct occr *avail_occr;
310 /* Non-null if the computation is PRE redundant.
311 The value is the newly created pseudo-reg to record a copy of the
312 expression in all the places that reach the redundant copy. */
313 rtx reaching_reg;
314};
315
316/* Occurrence of an expression.
317 There is one per basic block. If a pattern appears more than once the
318 last appearance is used [or first for anticipatable expressions]. */
319
320struct occr
321{
322 /* Next occurrence of this expression. */
323 struct occr *next;
324 /* The insn that computes the expression. */
325 rtx insn;
cc2902df 326 /* Nonzero if this [anticipatable] occurrence has been deleted. */
7506f491 327 char deleted_p;
cc2902df 328 /* Nonzero if this [available] occurrence has been copied to
7506f491
DE
329 reaching_reg. */
330 /* ??? This is mutually exclusive with deleted_p, so they could share
331 the same byte. */
332 char copied_p;
333};
334
335/* Expression and copy propagation hash tables.
336 Each hash table is an array of buckets.
337 ??? It is known that if it were an array of entries, structure elements
338 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
339 not clear whether in the final analysis a sufficient amount of memory would
340 be saved as the size of the available expression bitmaps would be larger
341 [one could build a mapping table without holes afterwards though].
c4c81601 342 Someday I'll perform the computation and figure it out. */
7506f491 343
02280659
ZD
344struct hash_table
345{
346 /* The table itself.
347 This is an array of `expr_hash_table_size' elements. */
348 struct expr **table;
349
350 /* Size of the hash table, in elements. */
351 unsigned int size;
2e653e39 352
02280659
ZD
353 /* Number of hash table elements. */
354 unsigned int n_elems;
7506f491 355
02280659
ZD
356 /* Whether the table is expression of copy propagation one. */
357 int set_p;
358};
c4c81601 359
02280659
ZD
360/* Expression hash table. */
361static struct hash_table expr_hash_table;
362
363/* Copy propagation hash table. */
364static struct hash_table set_hash_table;
7506f491
DE
365
366/* Mapping of uids to cuids.
367 Only real insns get cuids. */
368static int *uid_cuid;
369
370/* Highest UID in UID_CUID. */
371static int max_uid;
372
373/* Get the cuid of an insn. */
b86db3eb 374#ifdef ENABLE_CHECKING
282899df
NS
375#define INSN_CUID(INSN) \
376 (gcc_assert (INSN_UID (INSN) <= max_uid), uid_cuid[INSN_UID (INSN)])
b86db3eb 377#else
7506f491 378#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
b86db3eb 379#endif
7506f491
DE
380
381/* Number of cuids. */
382static int max_cuid;
383
384/* Mapping of cuids to insns. */
385static rtx *cuid_insn;
386
387/* Get insn from cuid. */
388#define CUID_INSN(CUID) (cuid_insn[CUID])
389
390/* Maximum register number in function prior to doing gcse + 1.
391 Registers created during this pass have regno >= max_gcse_regno.
392 This is named with "gcse" to not collide with global of same name. */
770ae6cc 393static unsigned int max_gcse_regno;
7506f491 394
7506f491 395/* Table of registers that are modified.
c4c81601 396
7506f491
DE
397 For each register, each element is a list of places where the pseudo-reg
398 is set.
399
400 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
401 requires knowledge of which blocks kill which regs [and thus could use
f4e584dc 402 a bitmap instead of the lists `reg_set_table' uses].
7506f491 403
c4c81601
RK
404 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
405 num-regs) [however perhaps it may be useful to keep the data as is]. One
406 advantage of recording things this way is that `reg_set_table' is fairly
407 sparse with respect to pseudo regs but for hard regs could be fairly dense
408 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
7506f491
DE
409 up functions like compute_transp since in the case of pseudo-regs we only
410 need to iterate over the number of times a pseudo-reg is set, not over the
411 number of basic blocks [clearly there is a bit of a slow down in the cases
412 where a pseudo is set more than once in a block, however it is believed
413 that the net effect is to speed things up]. This isn't done for hard-regs
414 because recording call-clobbered hard-regs in `reg_set_table' at each
c4c81601
RK
415 function call can consume a fair bit of memory, and iterating over
416 hard-regs stored this way in compute_transp will be more expensive. */
7506f491 417
c4c81601
RK
418typedef struct reg_set
419{
7506f491
DE
420 /* The next setting of this register. */
421 struct reg_set *next;
ed425871
JL
422 /* The index of the block where it was set. */
423 int bb_index;
7506f491 424} reg_set;
c4c81601 425
7506f491 426static reg_set **reg_set_table;
c4c81601 427
7506f491
DE
428/* Size of `reg_set_table'.
429 The table starts out at max_gcse_regno + slop, and is enlarged as
430 necessary. */
431static int reg_set_table_size;
c4c81601 432
7506f491
DE
433/* Amount to grow `reg_set_table' by when it's full. */
434#define REG_SET_TABLE_SLOP 100
435
a13d4ebf 436/* This is a list of expressions which are MEMs and will be used by load
589005ff 437 or store motion.
a13d4ebf 438 Load motion tracks MEMs which aren't killed by
454ff5cb 439 anything except itself. (i.e., loads and stores to a single location).
589005ff 440 We can then allow movement of these MEM refs with a little special
a13d4ebf
AM
441 allowance. (all stores copy the same value to the reaching reg used
442 for the loads). This means all values used to store into memory must have
589005ff 443 no side effects so we can re-issue the setter value.
a13d4ebf
AM
444 Store Motion uses this structure as an expression table to track stores
445 which look interesting, and might be moveable towards the exit block. */
446
447struct ls_expr
448{
449 struct expr * expr; /* Gcse expression reference for LM. */
450 rtx pattern; /* Pattern of this mem. */
47a3dae1 451 rtx pattern_regs; /* List of registers mentioned by the mem. */
aaa4ca30
AJ
452 rtx loads; /* INSN list of loads seen. */
453 rtx stores; /* INSN list of stores seen. */
a13d4ebf
AM
454 struct ls_expr * next; /* Next in the list. */
455 int invalid; /* Invalid for some reason. */
456 int index; /* If it maps to a bitmap index. */
b58b21d5 457 unsigned int hash_index; /* Index when in a hash table. */
a13d4ebf
AM
458 rtx reaching_reg; /* Register to use when re-writing. */
459};
460
fbef91d8
RS
461/* Array of implicit set patterns indexed by basic block index. */
462static rtx *implicit_sets;
463
a13d4ebf
AM
464/* Head of the list of load/store memory refs. */
465static struct ls_expr * pre_ldst_mems = NULL;
466
9727e468
RG
467/* Hashtable for the load/store memory refs. */
468static htab_t pre_ldst_table = NULL;
469
7506f491
DE
470/* Bitmap containing one bit for each register in the program.
471 Used when performing GCSE to track which registers have been set since
472 the start of the basic block. */
73991d6a 473static regset reg_set_bitmap;
7506f491
DE
474
475/* For each block, a bitmap of registers set in the block.
e83f4801 476 This is used by compute_transp.
7506f491
DE
477 It is computed during hash table computation and not by compute_sets
478 as it includes registers added since the last pass (or between cprop and
479 gcse) and it's currently not easy to realloc sbitmap vectors. */
480static sbitmap *reg_set_in_block;
481
a13d4ebf
AM
482/* Array, indexed by basic block number for a list of insns which modify
483 memory within that block. */
484static rtx * modify_mem_list;
0516f6fe 485static bitmap modify_mem_list_set;
a13d4ebf
AM
486
487/* This array parallels modify_mem_list, but is kept canonicalized. */
488static rtx * canon_modify_mem_list;
0516f6fe 489
aa47fcfa
JL
490/* Bitmap indexed by block numbers to record which blocks contain
491 function calls. */
492static bitmap blocks_with_calls;
493
7506f491
DE
494/* Various variables for statistics gathering. */
495
496/* Memory used in a pass.
497 This isn't intended to be absolutely precise. Its intent is only
498 to keep an eye on memory usage. */
499static int bytes_used;
c4c81601 500
7506f491
DE
501/* GCSE substitutions made. */
502static int gcse_subst_count;
503/* Number of copy instructions created. */
504static int gcse_create_count;
27fb79ad
SB
505/* Number of local constants propagated. */
506static int local_const_prop_count;
0fa2e4df 507/* Number of local copies propagated. */
27fb79ad
SB
508static int local_copy_prop_count;
509/* Number of global constants propagated. */
510static int global_const_prop_count;
0fa2e4df 511/* Number of global copies propagated. */
27fb79ad 512static int global_copy_prop_count;
7506f491 513\f
e83f4801
SB
514/* For available exprs */
515static sbitmap *ae_kill, *ae_gen;
7506f491 516\f
1d088dee 517static void compute_can_copy (void);
9fe15a12
KG
518static void *gmalloc (size_t) ATTRIBUTE_MALLOC;
519static void *gcalloc (size_t, size_t) ATTRIBUTE_MALLOC;
520static void *grealloc (void *, size_t);
703ad42b 521static void *gcse_alloc (unsigned long);
eb232f4e 522static void alloc_gcse_mem (void);
1d088dee
AJ
523static void free_gcse_mem (void);
524static void alloc_reg_set_mem (int);
525static void free_reg_set_mem (void);
1d088dee
AJ
526static void record_one_set (int, rtx);
527static void record_set_info (rtx, rtx, void *);
eb232f4e 528static void compute_sets (void);
1d088dee
AJ
529static void hash_scan_insn (rtx, struct hash_table *, int);
530static void hash_scan_set (rtx, rtx, struct hash_table *);
531static void hash_scan_clobber (rtx, rtx, struct hash_table *);
532static void hash_scan_call (rtx, rtx, struct hash_table *);
533static int want_to_gcse_p (rtx);
1707bafa 534static bool can_assign_to_reg_p (rtx);
1d088dee
AJ
535static bool gcse_constant_p (rtx);
536static int oprs_unchanged_p (rtx, rtx, int);
537static int oprs_anticipatable_p (rtx, rtx);
538static int oprs_available_p (rtx, rtx);
539static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int,
540 struct hash_table *);
541static void insert_set_in_table (rtx, rtx, struct hash_table *);
542static unsigned int hash_expr (rtx, enum machine_mode, int *, int);
1d088dee
AJ
543static unsigned int hash_set (int, int);
544static int expr_equiv_p (rtx, rtx);
545static void record_last_reg_set_info (rtx, int);
546static void record_last_mem_set_info (rtx);
547static void record_last_set_info (rtx, rtx, void *);
548static void compute_hash_table (struct hash_table *);
549static void alloc_hash_table (int, struct hash_table *, int);
550static void free_hash_table (struct hash_table *);
551static void compute_hash_table_work (struct hash_table *);
552static void dump_hash_table (FILE *, const char *, struct hash_table *);
1d088dee
AJ
553static struct expr *lookup_set (unsigned int, struct hash_table *);
554static struct expr *next_set (unsigned int, struct expr *);
555static void reset_opr_set_tables (void);
556static int oprs_not_set_p (rtx, rtx);
557static void mark_call (rtx);
558static void mark_set (rtx, rtx);
559static void mark_clobber (rtx, rtx);
560static void mark_oprs_set (rtx);
561static void alloc_cprop_mem (int, int);
562static void free_cprop_mem (void);
563static void compute_transp (rtx, int, sbitmap *, int);
564static void compute_transpout (void);
565static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
566 struct hash_table *);
567static void compute_cprop_data (void);
568static void find_used_regs (rtx *, void *);
569static int try_replace_reg (rtx, rtx, rtx);
570static struct expr *find_avail_set (int, rtx);
571static int cprop_jump (basic_block, rtx, rtx, rtx, rtx);
572static void mems_conflict_for_gcse_p (rtx, rtx, void *);
573static int load_killed_in_block_p (basic_block, int, rtx, int);
574static void canon_list_insert (rtx, rtx, void *);
575static int cprop_insn (rtx, int);
576static int cprop (int);
577static void find_implicit_sets (void);
eb232f4e
SB
578static int one_cprop_pass (int, bool, bool);
579static bool constprop_register (rtx, rtx, rtx, bool);
1d088dee
AJ
580static struct expr *find_bypass_set (int, int);
581static bool reg_killed_on_edge (rtx, edge);
582static int bypass_block (basic_block, rtx, rtx);
583static int bypass_conditional_jumps (void);
584static void alloc_pre_mem (int, int);
585static void free_pre_mem (void);
586static void compute_pre_data (void);
587static int pre_expr_reaches_here_p (basic_block, struct expr *,
588 basic_block);
6fb5fa3c 589static void insert_insn_end_basic_block (struct expr *, basic_block, int);
1d088dee
AJ
590static void pre_insert_copy_insn (struct expr *, rtx);
591static void pre_insert_copies (void);
592static int pre_delete (void);
593static int pre_gcse (void);
594static int one_pre_gcse_pass (int);
595static void add_label_notes (rtx, rtx);
596static void alloc_code_hoist_mem (int, int);
597static void free_code_hoist_mem (void);
598static void compute_code_hoist_vbeinout (void);
599static void compute_code_hoist_data (void);
600static int hoist_expr_reaches_here_p (basic_block, int, basic_block, char *);
601static void hoist_code (void);
602static int one_code_hoisting_pass (void);
1d088dee
AJ
603static rtx process_insert_insn (struct expr *);
604static int pre_edge_insert (struct edge_list *, struct expr **);
1d088dee
AJ
605static int pre_expr_reaches_here_p_work (basic_block, struct expr *,
606 basic_block, char *);
607static struct ls_expr * ldst_entry (rtx);
608static void free_ldst_entry (struct ls_expr *);
609static void free_ldst_mems (void);
610static void print_ldst_list (FILE *);
611static struct ls_expr * find_rtx_in_ldst (rtx);
612static int enumerate_ldsts (void);
613static inline struct ls_expr * first_ls_expr (void);
614static inline struct ls_expr * next_ls_expr (struct ls_expr *);
615static int simple_mem (rtx);
616static void invalidate_any_buried_refs (rtx);
617static void compute_ld_motion_mems (void);
618static void trim_ld_motion_mems (void);
619static void update_ld_motion_stores (struct expr *);
620static void reg_set_info (rtx, rtx, void *);
01c43039 621static void reg_clear_last_set (rtx, rtx, void *);
1d088dee
AJ
622static bool store_ops_ok (rtx, int *);
623static rtx extract_mentioned_regs (rtx);
624static rtx extract_mentioned_regs_helper (rtx, rtx);
625static void find_moveable_store (rtx, int *, int *);
626static int compute_store_table (void);
3b14e3af
ZD
627static bool load_kills_store (rtx, rtx, int);
628static bool find_loads (rtx, rtx, int);
629static bool store_killed_in_insn (rtx, rtx, rtx, int);
1d088dee
AJ
630static bool store_killed_after (rtx, rtx, rtx, basic_block, int *, rtx *);
631static bool store_killed_before (rtx, rtx, rtx, basic_block, int *);
632static void build_store_vectors (void);
6fb5fa3c 633static void insert_insn_start_basic_block (rtx, basic_block);
1d088dee 634static int insert_store (struct ls_expr *, edge);
d088acea
ZD
635static void remove_reachable_equiv_notes (basic_block, struct ls_expr *);
636static void replace_store_insn (rtx, rtx, basic_block, struct ls_expr *);
1d088dee
AJ
637static void delete_store (struct ls_expr *, basic_block);
638static void free_store_memory (void);
639static void store_motion (void);
640static void free_insn_expr_list_list (rtx *);
641static void clear_modify_mem_tables (void);
642static void free_modify_mem_tables (void);
643static rtx gcse_emit_move_after (rtx, rtx, rtx);
644static void local_cprop_find_used_regs (rtx *, void *);
eb232f4e 645static bool do_local_cprop (rtx, rtx, bool, rtx*);
1d088dee 646static bool adjust_libcall_notes (rtx, rtx, rtx, rtx*);
eb232f4e 647static void local_cprop_pass (bool);
d128effb 648static bool is_too_expensive (const char *);
7506f491 649\f
d128effb 650
7506f491 651/* Entry point for global common subexpression elimination.
b732f36f
KH
652 F is the first instruction in the function. Return nonzero if a
653 change is mode. */
7506f491 654
65727068 655static int
10d22567 656gcse_main (rtx f ATTRIBUTE_UNUSED)
7506f491
DE
657{
658 int changed, pass;
659 /* Bytes used at start of pass. */
660 int initial_bytes_used;
661 /* Maximum number of bytes used by a pass. */
662 int max_pass_bytes;
663 /* Point to release obstack data from for each pass. */
664 char *gcse_obstack_bottom;
665
b5ce41ff
JL
666 /* We do not construct an accurate cfg in functions which call
667 setjmp, so just punt to be safe. */
7506f491 668 if (current_function_calls_setjmp)
e78d9500 669 return 0;
589005ff 670
b5ce41ff
JL
671 /* Assume that we do not need to run jump optimizations after gcse. */
672 run_jump_opt_after_gcse = 0;
673
b5ce41ff
JL
674 /* Identify the basic block information for this function, including
675 successors and predecessors. */
7506f491 676 max_gcse_regno = max_reg_num ();
7506f491 677
6fb5fa3c
DB
678 df_note_add_problem ();
679 df_analyze ();
680
10d22567 681 if (dump_file)
5b4fdb20 682 dump_flow_info (dump_file, dump_flags);
a42cd965 683
d128effb 684 /* Return if there's nothing to do, or it is too expensive. */
ab9a1ff8
SB
685 if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1
686 || is_too_expensive (_("GCSE disabled")))
a18820c6 687 return 0;
7b1b4aed 688
7506f491 689 gcc_obstack_init (&gcse_obstack);
a42cd965 690 bytes_used = 0;
7506f491 691
a13d4ebf
AM
692 /* We need alias. */
693 init_alias_analysis ();
c4c81601
RK
694 /* Record where pseudo-registers are set. This data is kept accurate
695 during each pass. ??? We could also record hard-reg information here
696 [since it's unchanging], however it is currently done during hash table
697 computation.
b5ce41ff 698
c4c81601
RK
699 It may be tempting to compute MEM set information here too, but MEM sets
700 will be subject to code motion one day and thus we need to compute
b5ce41ff 701 information about memory sets when we build the hash tables. */
7506f491
DE
702
703 alloc_reg_set_mem (max_gcse_regno);
eb232f4e 704 compute_sets ();
7506f491
DE
705
706 pass = 0;
707 initial_bytes_used = bytes_used;
708 max_pass_bytes = 0;
709 gcse_obstack_bottom = gcse_alloc (1);
710 changed = 1;
740f35a0 711 while (changed && pass < MAX_GCSE_PASSES)
7506f491
DE
712 {
713 changed = 0;
10d22567
ZD
714 if (dump_file)
715 fprintf (dump_file, "GCSE pass %d\n\n", pass + 1);
7506f491
DE
716
717 /* Initialize bytes_used to the space for the pred/succ lists,
718 and the reg_set_table data. */
719 bytes_used = initial_bytes_used;
720
721 /* Each pass may create new registers, so recalculate each time. */
722 max_gcse_regno = max_reg_num ();
723
eb232f4e 724 alloc_gcse_mem ();
7506f491 725
b5ce41ff
JL
726 /* Don't allow constant propagation to modify jumps
727 during this pass. */
27fb79ad 728 timevar_push (TV_CPROP1);
eb232f4e 729 changed = one_cprop_pass (pass + 1, false, false);
27fb79ad 730 timevar_pop (TV_CPROP1);
7506f491
DE
731
732 if (optimize_size)
e83f4801 733 /* Do nothing. */ ;
7506f491 734 else
589005ff 735 {
27fb79ad 736 timevar_push (TV_PRE);
a42cd965 737 changed |= one_pre_gcse_pass (pass + 1);
a13d4ebf
AM
738 /* We may have just created new basic blocks. Release and
739 recompute various things which are sized on the number of
740 basic blocks. */
741 if (changed)
742 {
73991d6a 743 free_modify_mem_tables ();
9fe15a12
KG
744 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
745 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
a13d4ebf 746 }
a42cd965
AM
747 free_reg_set_mem ();
748 alloc_reg_set_mem (max_reg_num ());
eb232f4e 749 compute_sets ();
a42cd965 750 run_jump_opt_after_gcse = 1;
27fb79ad 751 timevar_pop (TV_PRE);
a42cd965 752 }
7506f491
DE
753
754 if (max_pass_bytes < bytes_used)
755 max_pass_bytes = bytes_used;
756
bb457bd9
JL
757 /* Free up memory, then reallocate for code hoisting. We can
758 not re-use the existing allocated memory because the tables
759 will not have info for the insns or registers created by
760 partial redundancy elimination. */
7506f491
DE
761 free_gcse_mem ();
762
5d3cc252 763 /* It does not make sense to run code hoisting unless we are optimizing
bb457bd9
JL
764 for code size -- it rarely makes programs faster, and can make
765 them bigger if we did partial redundancy elimination (when optimizing
e83f4801 766 for space, we don't run the partial redundancy algorithms). */
bb457bd9 767 if (optimize_size)
589005ff 768 {
27fb79ad 769 timevar_push (TV_HOIST);
bb457bd9 770 max_gcse_regno = max_reg_num ();
eb232f4e 771 alloc_gcse_mem ();
bb457bd9
JL
772 changed |= one_code_hoisting_pass ();
773 free_gcse_mem ();
774
775 if (max_pass_bytes < bytes_used)
776 max_pass_bytes = bytes_used;
27fb79ad 777 timevar_pop (TV_HOIST);
589005ff 778 }
bb457bd9 779
10d22567 780 if (dump_file)
7506f491 781 {
10d22567
ZD
782 fprintf (dump_file, "\n");
783 fflush (dump_file);
7506f491 784 }
c4c81601 785
7506f491
DE
786 obstack_free (&gcse_obstack, gcse_obstack_bottom);
787 pass++;
788 }
789
b5ce41ff
JL
790 /* Do one last pass of copy propagation, including cprop into
791 conditional jumps. */
792
793 max_gcse_regno = max_reg_num ();
eb232f4e 794 alloc_gcse_mem ();
b5ce41ff 795 /* This time, go ahead and allow cprop to alter jumps. */
27fb79ad 796 timevar_push (TV_CPROP2);
bae8b6b2 797 one_cprop_pass (pass + 1, true, true);
27fb79ad 798 timevar_pop (TV_CPROP2);
b5ce41ff 799 free_gcse_mem ();
7506f491 800
10d22567 801 if (dump_file)
7506f491 802 {
10d22567 803 fprintf (dump_file, "GCSE of %s: %d basic blocks, ",
faed5cc3 804 current_function_name (), n_basic_blocks);
10d22567 805 fprintf (dump_file, "%d pass%s, %d bytes\n\n",
7506f491
DE
806 pass, pass > 1 ? "es" : "", max_pass_bytes);
807 }
808
6496a589 809 obstack_free (&gcse_obstack, NULL);
7506f491 810 free_reg_set_mem ();
7b1b4aed 811
a13d4ebf
AM
812 /* We are finished with alias. */
813 end_alias_analysis ();
a13d4ebf 814
47a3dae1 815 if (!optimize_size && flag_gcse_sm)
27fb79ad
SB
816 {
817 timevar_push (TV_LSM);
818 store_motion ();
819 timevar_pop (TV_LSM);
820 }
47a3dae1 821
a13d4ebf 822 /* Record where pseudo-registers are set. */
e78d9500 823 return run_jump_opt_after_gcse;
7506f491
DE
824}
825\f
826/* Misc. utilities. */
827
773eae39
EB
828/* Nonzero for each mode that supports (set (reg) (reg)).
829 This is trivially true for integer and floating point values.
830 It may or may not be true for condition codes. */
831static char can_copy[(int) NUM_MACHINE_MODES];
832
7506f491
DE
833/* Compute which modes support reg/reg copy operations. */
834
835static void
1d088dee 836compute_can_copy (void)
7506f491
DE
837{
838 int i;
50b2596f 839#ifndef AVOID_CCMODE_COPIES
8e42ace1 840 rtx reg, insn;
50b2596f 841#endif
773eae39 842 memset (can_copy, 0, NUM_MACHINE_MODES);
7506f491
DE
843
844 start_sequence ();
845 for (i = 0; i < NUM_MACHINE_MODES; i++)
c4c81601
RK
846 if (GET_MODE_CLASS (i) == MODE_CC)
847 {
7506f491 848#ifdef AVOID_CCMODE_COPIES
773eae39 849 can_copy[i] = 0;
7506f491 850#else
c4c81601
RK
851 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
852 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
9714cf43 853 if (recog (PATTERN (insn), insn, NULL) >= 0)
773eae39 854 can_copy[i] = 1;
7506f491 855#endif
c4c81601 856 }
141b5810 857 else
773eae39 858 can_copy[i] = 1;
c4c81601 859
7506f491 860 end_sequence ();
7506f491 861}
773eae39
EB
862
863/* Returns whether the mode supports reg/reg copy operations. */
864
865bool
1d088dee 866can_copy_p (enum machine_mode mode)
773eae39
EB
867{
868 static bool can_copy_init_p = false;
869
870 if (! can_copy_init_p)
871 {
872 compute_can_copy ();
873 can_copy_init_p = true;
874 }
875
876 return can_copy[mode] != 0;
877}
7506f491
DE
878\f
879/* Cover function to xmalloc to record bytes allocated. */
880
703ad42b 881static void *
4ac11022 882gmalloc (size_t size)
7506f491
DE
883{
884 bytes_used += size;
885 return xmalloc (size);
886}
887
9fe15a12
KG
888/* Cover function to xcalloc to record bytes allocated. */
889
890static void *
891gcalloc (size_t nelem, size_t elsize)
892{
893 bytes_used += nelem * elsize;
894 return xcalloc (nelem, elsize);
895}
896
7506f491
DE
897/* Cover function to xrealloc.
898 We don't record the additional size since we don't know it.
899 It won't affect memory usage stats much anyway. */
900
703ad42b 901static void *
9fe15a12 902grealloc (void *ptr, size_t size)
7506f491
DE
903{
904 return xrealloc (ptr, size);
905}
906
77bbd421 907/* Cover function to obstack_alloc. */
7506f491 908
703ad42b 909static void *
1d088dee 910gcse_alloc (unsigned long size)
7506f491 911{
77bbd421 912 bytes_used += size;
703ad42b 913 return obstack_alloc (&gcse_obstack, size);
7506f491
DE
914}
915
916/* Allocate memory for the cuid mapping array,
917 and reg/memory set tracking tables.
918
919 This is called at the start of each pass. */
920
921static void
eb232f4e 922alloc_gcse_mem (void)
7506f491 923{
9fe15a12 924 int i;
eb232f4e 925 basic_block bb;
7506f491
DE
926 rtx insn;
927
928 /* Find the largest UID and create a mapping from UIDs to CUIDs.
929 CUIDs are like UIDs except they increase monotonically, have no gaps,
eb232f4e
SB
930 and only apply to real insns.
931 (Actually, there are gaps, for insn that are not inside a basic block.
932 but we should never see those anyway, so this is OK.) */
7506f491
DE
933
934 max_uid = get_max_uid ();
9fe15a12 935 uid_cuid = gcalloc (max_uid + 1, sizeof (int));
eb232f4e
SB
936 i = 0;
937 FOR_EACH_BB (bb)
938 FOR_BB_INSNS (bb, insn)
939 {
940 if (INSN_P (insn))
941 uid_cuid[INSN_UID (insn)] = i++;
942 else
943 uid_cuid[INSN_UID (insn)] = i;
944 }
7506f491
DE
945
946 /* Create a table mapping cuids to insns. */
947
948 max_cuid = i;
9fe15a12 949 cuid_insn = gcalloc (max_cuid + 1, sizeof (rtx));
eb232f4e
SB
950 i = 0;
951 FOR_EACH_BB (bb)
952 FOR_BB_INSNS (bb, insn)
953 if (INSN_P (insn))
954 CUID_INSN (i++) = insn;
7506f491
DE
955
956 /* Allocate vars to track sets of regs. */
8bdbfff5 957 reg_set_bitmap = BITMAP_ALLOC (NULL);
7506f491
DE
958
959 /* Allocate vars to track sets of regs, memory per block. */
703ad42b 960 reg_set_in_block = sbitmap_vector_alloc (last_basic_block, max_gcse_regno);
a13d4ebf
AM
961 /* Allocate array to keep a list of insns which modify memory in each
962 basic block. */
9fe15a12
KG
963 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
964 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
8bdbfff5
NS
965 modify_mem_list_set = BITMAP_ALLOC (NULL);
966 blocks_with_calls = BITMAP_ALLOC (NULL);
7506f491
DE
967}
968
969/* Free memory allocated by alloc_gcse_mem. */
970
971static void
1d088dee 972free_gcse_mem (void)
7506f491
DE
973{
974 free (uid_cuid);
975 free (cuid_insn);
976
8bdbfff5 977 BITMAP_FREE (reg_set_bitmap);
7506f491 978
5a660bff 979 sbitmap_vector_free (reg_set_in_block);
73991d6a 980 free_modify_mem_tables ();
8bdbfff5
NS
981 BITMAP_FREE (modify_mem_list_set);
982 BITMAP_FREE (blocks_with_calls);
7506f491 983}
b5ce41ff
JL
984\f
985/* Compute the local properties of each recorded expression.
c4c81601
RK
986
987 Local properties are those that are defined by the block, irrespective of
988 other blocks.
b5ce41ff
JL
989
990 An expression is transparent in a block if its operands are not modified
991 in the block.
992
993 An expression is computed (locally available) in a block if it is computed
994 at least once and expression would contain the same value if the
995 computation was moved to the end of the block.
996
997 An expression is locally anticipatable in a block if it is computed at
998 least once and expression would contain the same value if the computation
999 was moved to the beginning of the block.
1000
c4c81601
RK
1001 We call this routine for cprop, pre and code hoisting. They all compute
1002 basically the same information and thus can easily share this code.
7506f491 1003
c4c81601
RK
1004 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1005 properties. If NULL, then it is not necessary to compute or record that
1006 particular property.
b5ce41ff 1007
02280659
ZD
1008 TABLE controls which hash table to look at. If it is set hash table,
1009 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
c4c81601 1010 ABSALTERED. */
589005ff 1011
b5ce41ff 1012static void
7b1b4aed
SB
1013compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc,
1014 struct hash_table *table)
b5ce41ff 1015{
02280659 1016 unsigned int i;
589005ff 1017
b5ce41ff
JL
1018 /* Initialize any bitmaps that were passed in. */
1019 if (transp)
695ab36a 1020 {
02280659 1021 if (table->set_p)
d55bc081 1022 sbitmap_vector_zero (transp, last_basic_block);
695ab36a 1023 else
d55bc081 1024 sbitmap_vector_ones (transp, last_basic_block);
695ab36a 1025 }
c4c81601 1026
b5ce41ff 1027 if (comp)
d55bc081 1028 sbitmap_vector_zero (comp, last_basic_block);
b5ce41ff 1029 if (antloc)
d55bc081 1030 sbitmap_vector_zero (antloc, last_basic_block);
b5ce41ff 1031
02280659 1032 for (i = 0; i < table->size; i++)
7506f491 1033 {
b5ce41ff
JL
1034 struct expr *expr;
1035
02280659 1036 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
b5ce41ff 1037 {
b5ce41ff 1038 int indx = expr->bitmap_index;
c4c81601 1039 struct occr *occr;
b5ce41ff
JL
1040
1041 /* The expression is transparent in this block if it is not killed.
1042 We start by assuming all are transparent [none are killed], and
1043 then reset the bits for those that are. */
b5ce41ff 1044 if (transp)
02280659 1045 compute_transp (expr->expr, indx, transp, table->set_p);
b5ce41ff
JL
1046
1047 /* The occurrences recorded in antic_occr are exactly those that
cc2902df 1048 we want to set to nonzero in ANTLOC. */
b5ce41ff 1049 if (antloc)
c4c81601
RK
1050 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1051 {
1052 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
b5ce41ff 1053
c4c81601
RK
1054 /* While we're scanning the table, this is a good place to
1055 initialize this. */
1056 occr->deleted_p = 0;
1057 }
b5ce41ff
JL
1058
1059 /* The occurrences recorded in avail_occr are exactly those that
cc2902df 1060 we want to set to nonzero in COMP. */
b5ce41ff 1061 if (comp)
c4c81601
RK
1062 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1063 {
1064 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
b5ce41ff 1065
c4c81601
RK
1066 /* While we're scanning the table, this is a good place to
1067 initialize this. */
1068 occr->copied_p = 0;
1069 }
b5ce41ff
JL
1070
1071 /* While we're scanning the table, this is a good place to
1072 initialize this. */
1073 expr->reaching_reg = 0;
1074 }
7506f491 1075 }
7506f491
DE
1076}
1077\f
1078/* Register set information.
1079
1080 `reg_set_table' records where each register is set or otherwise
1081 modified. */
1082
1083static struct obstack reg_set_obstack;
1084
1085static void
1d088dee 1086alloc_reg_set_mem (int n_regs)
7506f491 1087{
7506f491 1088 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
9fe15a12 1089 reg_set_table = gcalloc (reg_set_table_size, sizeof (struct reg_set *));
7506f491
DE
1090
1091 gcc_obstack_init (&reg_set_obstack);
1092}
1093
1094static void
1d088dee 1095free_reg_set_mem (void)
7506f491
DE
1096{
1097 free (reg_set_table);
6496a589 1098 obstack_free (&reg_set_obstack, NULL);
7506f491
DE
1099}
1100
1101/* Record REGNO in the reg_set table. */
1102
1103static void
1d088dee 1104record_one_set (int regno, rtx insn)
7506f491 1105{
172890a2 1106 /* Allocate a new reg_set element and link it onto the list. */
63bc1d05 1107 struct reg_set *new_reg_info;
7506f491
DE
1108
1109 /* If the table isn't big enough, enlarge it. */
1110 if (regno >= reg_set_table_size)
1111 {
1112 int new_size = regno + REG_SET_TABLE_SLOP;
c4c81601 1113
703ad42b
KG
1114 reg_set_table = grealloc (reg_set_table,
1115 new_size * sizeof (struct reg_set *));
1116 memset (reg_set_table + reg_set_table_size, 0,
8e42ace1 1117 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
7506f491
DE
1118 reg_set_table_size = new_size;
1119 }
1120
703ad42b 1121 new_reg_info = obstack_alloc (&reg_set_obstack, sizeof (struct reg_set));
7506f491 1122 bytes_used += sizeof (struct reg_set);
ed425871 1123 new_reg_info->bb_index = BLOCK_NUM (insn);
274969ea
MM
1124 new_reg_info->next = reg_set_table[regno];
1125 reg_set_table[regno] = new_reg_info;
7506f491
DE
1126}
1127
c4c81601
RK
1128/* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1129 an insn. The DATA is really the instruction in which the SET is
1130 occurring. */
7506f491
DE
1131
1132static void
1d088dee 1133record_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
7506f491 1134{
84832317
MM
1135 rtx record_set_insn = (rtx) data;
1136
7b1b4aed 1137 if (REG_P (dest) && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
c4c81601 1138 record_one_set (REGNO (dest), record_set_insn);
7506f491
DE
1139}
1140
1141/* Scan the function and record each set of each pseudo-register.
1142
c4c81601 1143 This is called once, at the start of the gcse pass. See the comments for
fbe5a4a6 1144 `reg_set_table' for further documentation. */
7506f491
DE
1145
1146static void
eb232f4e 1147compute_sets (void)
7506f491 1148{
eb232f4e 1149 basic_block bb;
c4c81601 1150 rtx insn;
7506f491 1151
eb232f4e
SB
1152 FOR_EACH_BB (bb)
1153 FOR_BB_INSNS (bb, insn)
1154 if (INSN_P (insn))
1155 note_stores (PATTERN (insn), record_set_info, insn);
7506f491
DE
1156}
1157\f
1158/* Hash table support. */
1159
80c29cc4
RZ
1160struct reg_avail_info
1161{
e0082a72 1162 basic_block last_bb;
80c29cc4
RZ
1163 int first_set;
1164 int last_set;
1165};
1166
1167static struct reg_avail_info *reg_avail_info;
e0082a72 1168static basic_block current_bb;
7506f491 1169
7506f491 1170
fb0c0a12
RK
1171/* See whether X, the source of a set, is something we want to consider for
1172 GCSE. */
7506f491
DE
1173
1174static int
1d088dee 1175want_to_gcse_p (rtx x)
7506f491 1176{
3d8504ac
RS
1177#ifdef STACK_REGS
1178 /* On register stack architectures, don't GCSE constants from the
1179 constant pool, as the benefits are often swamped by the overhead
1180 of shuffling the register stack between basic blocks. */
1181 if (IS_STACK_MODE (GET_MODE (x)))
1182 x = avoid_constant_pool_reference (x);
1183#endif
1184
c4c81601 1185 switch (GET_CODE (x))
7506f491
DE
1186 {
1187 case REG:
1188 case SUBREG:
1189 case CONST_INT:
1190 case CONST_DOUBLE:
69ef87e2 1191 case CONST_VECTOR:
7506f491
DE
1192 case CALL:
1193 return 0;
1194
1195 default:
1707bafa 1196 return can_assign_to_reg_p (x);
7506f491 1197 }
1707bafa
RS
1198}
1199
1200/* Used internally by can_assign_to_reg_p. */
1201
1202static GTY(()) rtx test_insn;
1203
1204/* Return true if we can assign X to a pseudo register. */
1205
1206static bool
1207can_assign_to_reg_p (rtx x)
1208{
1209 int num_clobbers = 0;
1210 int icode;
7506f491 1211
fb0c0a12
RK
1212 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1213 if (general_operand (x, GET_MODE (x)))
1214 return 1;
1215 else if (GET_MODE (x) == VOIDmode)
1216 return 0;
1217
1218 /* Otherwise, check if we can make a valid insn from it. First initialize
1219 our test insn if we haven't already. */
1220 if (test_insn == 0)
1221 {
1222 test_insn
1223 = make_insn_raw (gen_rtx_SET (VOIDmode,
1224 gen_rtx_REG (word_mode,
1225 FIRST_PSEUDO_REGISTER * 2),
1226 const0_rtx));
1227 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
fb0c0a12
RK
1228 }
1229
1230 /* Now make an insn like the one we would make when GCSE'ing and see if
1231 valid. */
1232 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1233 SET_SRC (PATTERN (test_insn)) = x;
1234 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1235 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
7506f491
DE
1236}
1237
cc2902df 1238/* Return nonzero if the operands of expression X are unchanged from the
7506f491
DE
1239 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1240 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1241
1242static int
1d088dee 1243oprs_unchanged_p (rtx x, rtx insn, int avail_p)
7506f491 1244{
c4c81601 1245 int i, j;
7506f491 1246 enum rtx_code code;
6f7d635c 1247 const char *fmt;
7506f491 1248
7506f491
DE
1249 if (x == 0)
1250 return 1;
1251
1252 code = GET_CODE (x);
1253 switch (code)
1254 {
1255 case REG:
80c29cc4
RZ
1256 {
1257 struct reg_avail_info *info = &reg_avail_info[REGNO (x)];
1258
1259 if (info->last_bb != current_bb)
1260 return 1;
589005ff 1261 if (avail_p)
80c29cc4
RZ
1262 return info->last_set < INSN_CUID (insn);
1263 else
1264 return info->first_set >= INSN_CUID (insn);
1265 }
7506f491
DE
1266
1267 case MEM:
e0082a72 1268 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
a13d4ebf
AM
1269 x, avail_p))
1270 return 0;
7506f491 1271 else
c4c81601 1272 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
7506f491
DE
1273
1274 case PRE_DEC:
1275 case PRE_INC:
1276 case POST_DEC:
1277 case POST_INC:
4b983fdc
RH
1278 case PRE_MODIFY:
1279 case POST_MODIFY:
7506f491
DE
1280 return 0;
1281
1282 case PC:
1283 case CC0: /*FIXME*/
1284 case CONST:
1285 case CONST_INT:
1286 case CONST_DOUBLE:
69ef87e2 1287 case CONST_VECTOR:
7506f491
DE
1288 case SYMBOL_REF:
1289 case LABEL_REF:
1290 case ADDR_VEC:
1291 case ADDR_DIFF_VEC:
1292 return 1;
1293
1294 default:
1295 break;
1296 }
1297
c4c81601 1298 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
1299 {
1300 if (fmt[i] == 'e')
1301 {
c4c81601
RK
1302 /* If we are about to do the last recursive call needed at this
1303 level, change it into iteration. This function is called enough
1304 to be worth it. */
7506f491 1305 if (i == 0)
c4c81601
RK
1306 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1307
1308 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
7506f491
DE
1309 return 0;
1310 }
1311 else if (fmt[i] == 'E')
c4c81601
RK
1312 for (j = 0; j < XVECLEN (x, i); j++)
1313 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1314 return 0;
7506f491
DE
1315 }
1316
1317 return 1;
1318}
1319
a13d4ebf
AM
1320/* Used for communication between mems_conflict_for_gcse_p and
1321 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1322 conflict between two memory references. */
1323static int gcse_mems_conflict_p;
1324
1325/* Used for communication between mems_conflict_for_gcse_p and
1326 load_killed_in_block_p. A memory reference for a load instruction,
1327 mems_conflict_for_gcse_p will see if a memory store conflicts with
1328 this memory load. */
1329static rtx gcse_mem_operand;
1330
1331/* DEST is the output of an instruction. If it is a memory reference, and
1332 possibly conflicts with the load found in gcse_mem_operand, then set
1333 gcse_mems_conflict_p to a nonzero value. */
1334
1335static void
1d088dee
AJ
1336mems_conflict_for_gcse_p (rtx dest, rtx setter ATTRIBUTE_UNUSED,
1337 void *data ATTRIBUTE_UNUSED)
a13d4ebf
AM
1338{
1339 while (GET_CODE (dest) == SUBREG
1340 || GET_CODE (dest) == ZERO_EXTRACT
a13d4ebf
AM
1341 || GET_CODE (dest) == STRICT_LOW_PART)
1342 dest = XEXP (dest, 0);
1343
1344 /* If DEST is not a MEM, then it will not conflict with the load. Note
1345 that function calls are assumed to clobber memory, but are handled
1346 elsewhere. */
7b1b4aed 1347 if (! MEM_P (dest))
a13d4ebf 1348 return;
aaa4ca30 1349
a13d4ebf 1350 /* If we are setting a MEM in our list of specially recognized MEMs,
589005ff
KH
1351 don't mark as killed this time. */
1352
47a3dae1 1353 if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL)
a13d4ebf
AM
1354 {
1355 if (!find_rtx_in_ldst (dest))
1356 gcse_mems_conflict_p = 1;
1357 return;
1358 }
aaa4ca30 1359
a13d4ebf
AM
1360 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1361 rtx_addr_varies_p))
1362 gcse_mems_conflict_p = 1;
1363}
1364
1365/* Return nonzero if the expression in X (a memory reference) is killed
1366 in block BB before or after the insn with the CUID in UID_LIMIT.
1367 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1368 before UID_LIMIT.
1369
1370 To check the entire block, set UID_LIMIT to max_uid + 1 and
1371 AVAIL_P to 0. */
1372
1373static int
1d088dee 1374load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p)
a13d4ebf 1375{
0b17ab2f 1376 rtx list_entry = modify_mem_list[bb->index];
16c5b95d
MH
1377
1378 /* If this is a readonly then we aren't going to be changing it. */
1379 if (MEM_READONLY_P (x))
1380 return 0;
1381
a13d4ebf
AM
1382 while (list_entry)
1383 {
1384 rtx setter;
1385 /* Ignore entries in the list that do not apply. */
1386 if ((avail_p
1387 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1388 || (! avail_p
1389 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1390 {
1391 list_entry = XEXP (list_entry, 1);
1392 continue;
1393 }
1394
1395 setter = XEXP (list_entry, 0);
1396
1397 /* If SETTER is a call everything is clobbered. Note that calls
1398 to pure functions are never put on the list, so we need not
1399 worry about them. */
7b1b4aed 1400 if (CALL_P (setter))
a13d4ebf
AM
1401 return 1;
1402
1403 /* SETTER must be an INSN of some kind that sets memory. Call
589005ff 1404 note_stores to examine each hunk of memory that is modified.
a13d4ebf
AM
1405
1406 The note_stores interface is pretty limited, so we have to
1407 communicate via global variables. Yuk. */
1408 gcse_mem_operand = x;
1409 gcse_mems_conflict_p = 0;
1410 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1411 if (gcse_mems_conflict_p)
1412 return 1;
1413 list_entry = XEXP (list_entry, 1);
1414 }
1415 return 0;
1416}
1417
cc2902df 1418/* Return nonzero if the operands of expression X are unchanged from
7506f491
DE
1419 the start of INSN's basic block up to but not including INSN. */
1420
1421static int
1d088dee 1422oprs_anticipatable_p (rtx x, rtx insn)
7506f491
DE
1423{
1424 return oprs_unchanged_p (x, insn, 0);
1425}
1426
cc2902df 1427/* Return nonzero if the operands of expression X are unchanged from
7506f491
DE
1428 INSN to the end of INSN's basic block. */
1429
1430static int
1d088dee 1431oprs_available_p (rtx x, rtx insn)
7506f491
DE
1432{
1433 return oprs_unchanged_p (x, insn, 1);
1434}
1435
1436/* Hash expression X.
c4c81601
RK
1437
1438 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1439 indicating if a volatile operand is found or if the expression contains
b58b21d5 1440 something we don't want to insert in the table. HASH_TABLE_SIZE is
0516f6fe 1441 the current size of the hash table to be probed. */
7506f491
DE
1442
1443static unsigned int
b58b21d5
RS
1444hash_expr (rtx x, enum machine_mode mode, int *do_not_record_p,
1445 int hash_table_size)
7506f491
DE
1446{
1447 unsigned int hash;
1448
1449 *do_not_record_p = 0;
1450
0516f6fe
SB
1451 hash = hash_rtx (x, mode, do_not_record_p,
1452 NULL, /*have_reg_qty=*/false);
7506f491
DE
1453 return hash % hash_table_size;
1454}
172890a2 1455
7506f491
DE
1456/* Hash a set of register REGNO.
1457
c4c81601
RK
1458 Sets are hashed on the register that is set. This simplifies the PRE copy
1459 propagation code.
7506f491
DE
1460
1461 ??? May need to make things more elaborate. Later, as necessary. */
1462
1463static unsigned int
1d088dee 1464hash_set (int regno, int hash_table_size)
7506f491
DE
1465{
1466 unsigned int hash;
1467
1468 hash = regno;
1469 return hash % hash_table_size;
1470}
1471
0516f6fe 1472/* Return nonzero if exp1 is equivalent to exp2. */
7506f491
DE
1473
1474static int
1d088dee 1475expr_equiv_p (rtx x, rtx y)
7506f491 1476{
0516f6fe 1477 return exp_equiv_p (x, y, 0, true);
7506f491
DE
1478}
1479
02280659 1480/* Insert expression X in INSN in the hash TABLE.
7506f491
DE
1481 If it is already present, record it as the last occurrence in INSN's
1482 basic block.
1483
1484 MODE is the mode of the value X is being stored into.
1485 It is only used if X is a CONST_INT.
1486
cc2902df
KH
1487 ANTIC_P is nonzero if X is an anticipatable expression.
1488 AVAIL_P is nonzero if X is an available expression. */
7506f491
DE
1489
1490static void
1d088dee
AJ
1491insert_expr_in_table (rtx x, enum machine_mode mode, rtx insn, int antic_p,
1492 int avail_p, struct hash_table *table)
7506f491
DE
1493{
1494 int found, do_not_record_p;
1495 unsigned int hash;
1496 struct expr *cur_expr, *last_expr = NULL;
1497 struct occr *antic_occr, *avail_occr;
7506f491 1498
02280659 1499 hash = hash_expr (x, mode, &do_not_record_p, table->size);
7506f491
DE
1500
1501 /* Do not insert expression in table if it contains volatile operands,
1502 or if hash_expr determines the expression is something we don't want
1503 to or can't handle. */
1504 if (do_not_record_p)
1505 return;
1506
02280659 1507 cur_expr = table->table[hash];
7506f491
DE
1508 found = 0;
1509
c4c81601 1510 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
7506f491
DE
1511 {
1512 /* If the expression isn't found, save a pointer to the end of
1513 the list. */
1514 last_expr = cur_expr;
1515 cur_expr = cur_expr->next_same_hash;
1516 }
1517
1518 if (! found)
1519 {
703ad42b 1520 cur_expr = gcse_alloc (sizeof (struct expr));
7506f491 1521 bytes_used += sizeof (struct expr);
02280659 1522 if (table->table[hash] == NULL)
c4c81601 1523 /* This is the first pattern that hashed to this index. */
02280659 1524 table->table[hash] = cur_expr;
7506f491 1525 else
c4c81601
RK
1526 /* Add EXPR to end of this hash chain. */
1527 last_expr->next_same_hash = cur_expr;
1528
589005ff 1529 /* Set the fields of the expr element. */
7506f491 1530 cur_expr->expr = x;
02280659 1531 cur_expr->bitmap_index = table->n_elems++;
7506f491
DE
1532 cur_expr->next_same_hash = NULL;
1533 cur_expr->antic_occr = NULL;
1534 cur_expr->avail_occr = NULL;
1535 }
1536
1537 /* Now record the occurrence(s). */
7506f491
DE
1538 if (antic_p)
1539 {
1540 antic_occr = cur_expr->antic_occr;
1541
b6e47ceb
JL
1542 if (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1543 antic_occr = NULL;
7506f491
DE
1544
1545 if (antic_occr)
c4c81601
RK
1546 /* Found another instance of the expression in the same basic block.
1547 Prefer the currently recorded one. We want the first one in the
1548 block and the block is scanned from start to end. */
1549 ; /* nothing to do */
7506f491
DE
1550 else
1551 {
1552 /* First occurrence of this expression in this basic block. */
703ad42b 1553 antic_occr = gcse_alloc (sizeof (struct occr));
7506f491 1554 bytes_used += sizeof (struct occr);
7506f491 1555 antic_occr->insn = insn;
b6e47ceb 1556 antic_occr->next = cur_expr->antic_occr;
f9957958 1557 antic_occr->deleted_p = 0;
b6e47ceb 1558 cur_expr->antic_occr = antic_occr;
7506f491
DE
1559 }
1560 }
1561
1562 if (avail_p)
1563 {
1564 avail_occr = cur_expr->avail_occr;
1565
b6e47ceb 1566 if (avail_occr && BLOCK_NUM (avail_occr->insn) == BLOCK_NUM (insn))
7506f491 1567 {
b6e47ceb
JL
1568 /* Found another instance of the expression in the same basic block.
1569 Prefer this occurrence to the currently recorded one. We want
1570 the last one in the block and the block is scanned from start
1571 to end. */
1572 avail_occr->insn = insn;
7506f491 1573 }
7506f491
DE
1574 else
1575 {
1576 /* First occurrence of this expression in this basic block. */
703ad42b 1577 avail_occr = gcse_alloc (sizeof (struct occr));
7506f491 1578 bytes_used += sizeof (struct occr);
7506f491 1579 avail_occr->insn = insn;
b6e47ceb 1580 avail_occr->next = cur_expr->avail_occr;
f9957958 1581 avail_occr->deleted_p = 0;
b6e47ceb 1582 cur_expr->avail_occr = avail_occr;
7506f491
DE
1583 }
1584 }
1585}
1586
1587/* Insert pattern X in INSN in the hash table.
1588 X is a SET of a reg to either another reg or a constant.
1589 If it is already present, record it as the last occurrence in INSN's
1590 basic block. */
1591
1592static void
1d088dee 1593insert_set_in_table (rtx x, rtx insn, struct hash_table *table)
7506f491
DE
1594{
1595 int found;
1596 unsigned int hash;
1597 struct expr *cur_expr, *last_expr = NULL;
b6e47ceb 1598 struct occr *cur_occr;
7506f491 1599
282899df 1600 gcc_assert (GET_CODE (x) == SET && REG_P (SET_DEST (x)));
7506f491 1601
02280659 1602 hash = hash_set (REGNO (SET_DEST (x)), table->size);
7506f491 1603
02280659 1604 cur_expr = table->table[hash];
7506f491
DE
1605 found = 0;
1606
c4c81601 1607 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
7506f491
DE
1608 {
1609 /* If the expression isn't found, save a pointer to the end of
1610 the list. */
1611 last_expr = cur_expr;
1612 cur_expr = cur_expr->next_same_hash;
1613 }
1614
1615 if (! found)
1616 {
703ad42b 1617 cur_expr = gcse_alloc (sizeof (struct expr));
7506f491 1618 bytes_used += sizeof (struct expr);
02280659 1619 if (table->table[hash] == NULL)
c4c81601 1620 /* This is the first pattern that hashed to this index. */
02280659 1621 table->table[hash] = cur_expr;
7506f491 1622 else
c4c81601
RK
1623 /* Add EXPR to end of this hash chain. */
1624 last_expr->next_same_hash = cur_expr;
1625
7506f491
DE
1626 /* Set the fields of the expr element.
1627 We must copy X because it can be modified when copy propagation is
1628 performed on its operands. */
7506f491 1629 cur_expr->expr = copy_rtx (x);
02280659 1630 cur_expr->bitmap_index = table->n_elems++;
7506f491
DE
1631 cur_expr->next_same_hash = NULL;
1632 cur_expr->antic_occr = NULL;
1633 cur_expr->avail_occr = NULL;
1634 }
1635
1636 /* Now record the occurrence. */
7506f491
DE
1637 cur_occr = cur_expr->avail_occr;
1638
b6e47ceb 1639 if (cur_occr && BLOCK_NUM (cur_occr->insn) == BLOCK_NUM (insn))
7506f491 1640 {
b6e47ceb
JL
1641 /* Found another instance of the expression in the same basic block.
1642 Prefer this occurrence to the currently recorded one. We want
1643 the last one in the block and the block is scanned from start
1644 to end. */
1645 cur_occr->insn = insn;
7506f491 1646 }
7506f491
DE
1647 else
1648 {
1649 /* First occurrence of this expression in this basic block. */
703ad42b 1650 cur_occr = gcse_alloc (sizeof (struct occr));
7506f491 1651 bytes_used += sizeof (struct occr);
c4c81601 1652
b6e47ceb
JL
1653 cur_occr->insn = insn;
1654 cur_occr->next = cur_expr->avail_occr;
1655 cur_occr->deleted_p = 0;
1656 cur_expr->avail_occr = cur_occr;
7506f491
DE
1657 }
1658}
1659
6b2d1c9e
RS
1660/* Determine whether the rtx X should be treated as a constant for
1661 the purposes of GCSE's constant propagation. */
1662
1663static bool
1d088dee 1664gcse_constant_p (rtx x)
6b2d1c9e
RS
1665{
1666 /* Consider a COMPARE of two integers constant. */
1667 if (GET_CODE (x) == COMPARE
1668 && GET_CODE (XEXP (x, 0)) == CONST_INT
1669 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1670 return true;
1671
db2f435b 1672 /* Consider a COMPARE of the same registers is a constant
7b1b4aed 1673 if they are not floating point registers. */
db2f435b 1674 if (GET_CODE(x) == COMPARE
7b1b4aed 1675 && REG_P (XEXP (x, 0)) && REG_P (XEXP (x, 1))
db2f435b
AP
1676 && REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 1))
1677 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
1678 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 1))))
1679 return true;
1680
6b2d1c9e
RS
1681 return CONSTANT_P (x);
1682}
1683
02280659
ZD
1684/* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
1685 expression one). */
7506f491
DE
1686
1687static void
1d088dee 1688hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
7506f491
DE
1689{
1690 rtx src = SET_SRC (pat);
1691 rtx dest = SET_DEST (pat);
172890a2 1692 rtx note;
7506f491 1693
6e72d1e9 1694 if (GET_CODE (src) == CALL)
02280659 1695 hash_scan_call (src, insn, table);
7506f491 1696
7b1b4aed 1697 else if (REG_P (dest))
7506f491 1698 {
172890a2 1699 unsigned int regno = REGNO (dest);
7506f491
DE
1700 rtx tmp;
1701
90631280
PB
1702 /* See if a REG_NOTE shows this equivalent to a simpler expression.
1703 This allows us to do a single GCSE pass and still eliminate
1704 redundant constants, addresses or other expressions that are
1705 constructed with multiple instructions. */
1706 note = find_reg_equal_equiv_note (insn);
1707 if (note != 0
1708 && (table->set_p
1709 ? gcse_constant_p (XEXP (note, 0))
1710 : want_to_gcse_p (XEXP (note, 0))))
172890a2
RK
1711 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
1712
7506f491 1713 /* Only record sets of pseudo-regs in the hash table. */
02280659 1714 if (! table->set_p
7506f491
DE
1715 && regno >= FIRST_PSEUDO_REGISTER
1716 /* Don't GCSE something if we can't do a reg/reg copy. */
773eae39 1717 && can_copy_p (GET_MODE (dest))
068473ec
JH
1718 /* GCSE commonly inserts instruction after the insn. We can't
1719 do that easily for EH_REGION notes so disable GCSE on these
1720 for now. */
1721 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
7506f491 1722 /* Is SET_SRC something we want to gcse? */
172890a2
RK
1723 && want_to_gcse_p (src)
1724 /* Don't CSE a nop. */
43e72072
JJ
1725 && ! set_noop_p (pat)
1726 /* Don't GCSE if it has attached REG_EQUIV note.
1727 At this point this only function parameters should have
1728 REG_EQUIV notes and if the argument slot is used somewhere
a1f300c0 1729 explicitly, it means address of parameter has been taken,
43e72072 1730 so we should not extend the lifetime of the pseudo. */
90631280 1731 && (note == NULL_RTX || ! MEM_P (XEXP (note, 0))))
7506f491
DE
1732 {
1733 /* An expression is not anticipatable if its operands are
52d76e11 1734 modified before this insn or if this is not the only SET in
6fb5fa3c
DB
1735 this insn. The latter condition does not have to mean that
1736 SRC itself is not anticipatable, but we just will not be
1737 able to handle code motion of insns with multiple sets. */
1738 int antic_p = oprs_anticipatable_p (src, insn)
1739 && !multiple_sets (insn);
7506f491 1740 /* An expression is not available if its operands are
eb296bd9
GK
1741 subsequently modified, including this insn. It's also not
1742 available if this is a branch, because we can't insert
1743 a set after the branch. */
1744 int avail_p = (oprs_available_p (src, insn)
1745 && ! JUMP_P (insn));
c4c81601 1746
02280659 1747 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
7506f491 1748 }
c4c81601 1749
7506f491 1750 /* Record sets for constant/copy propagation. */
02280659 1751 else if (table->set_p
7506f491 1752 && regno >= FIRST_PSEUDO_REGISTER
7b1b4aed 1753 && ((REG_P (src)
7506f491 1754 && REGNO (src) >= FIRST_PSEUDO_REGISTER
773eae39 1755 && can_copy_p (GET_MODE (dest))
172890a2 1756 && REGNO (src) != regno)
6b2d1c9e 1757 || gcse_constant_p (src))
7506f491
DE
1758 /* A copy is not available if its src or dest is subsequently
1759 modified. Here we want to search from INSN+1 on, but
1760 oprs_available_p searches from INSN on. */
a813c111 1761 && (insn == BB_END (BLOCK_FOR_INSN (insn))
7506f491
DE
1762 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
1763 && oprs_available_p (pat, tmp))))
02280659 1764 insert_set_in_table (pat, insn, table);
7506f491 1765 }
d91edf86 1766 /* In case of store we want to consider the memory value as available in
f5f2e3cd
MH
1767 the REG stored in that memory. This makes it possible to remove
1768 redundant loads from due to stores to the same location. */
7b1b4aed 1769 else if (flag_gcse_las && REG_P (src) && MEM_P (dest))
f5f2e3cd
MH
1770 {
1771 unsigned int regno = REGNO (src);
1772
1773 /* Do not do this for constant/copy propagation. */
1774 if (! table->set_p
1775 /* Only record sets of pseudo-regs in the hash table. */
1776 && regno >= FIRST_PSEUDO_REGISTER
1777 /* Don't GCSE something if we can't do a reg/reg copy. */
1778 && can_copy_p (GET_MODE (src))
1779 /* GCSE commonly inserts instruction after the insn. We can't
1780 do that easily for EH_REGION notes so disable GCSE on these
1781 for now. */
1782 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1783 /* Is SET_DEST something we want to gcse? */
1784 && want_to_gcse_p (dest)
1785 /* Don't CSE a nop. */
1786 && ! set_noop_p (pat)
1787 /* Don't GCSE if it has attached REG_EQUIV note.
1788 At this point this only function parameters should have
1789 REG_EQUIV notes and if the argument slot is used somewhere
1790 explicitly, it means address of parameter has been taken,
1791 so we should not extend the lifetime of the pseudo. */
1792 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
7b1b4aed 1793 || ! MEM_P (XEXP (note, 0))))
f5f2e3cd
MH
1794 {
1795 /* Stores are never anticipatable. */
1796 int antic_p = 0;
1797 /* An expression is not available if its operands are
1798 subsequently modified, including this insn. It's also not
1799 available if this is a branch, because we can't insert
1800 a set after the branch. */
1801 int avail_p = oprs_available_p (dest, insn)
1802 && ! JUMP_P (insn);
1803
1804 /* Record the memory expression (DEST) in the hash table. */
1805 insert_expr_in_table (dest, GET_MODE (dest), insn,
1806 antic_p, avail_p, table);
1807 }
1808 }
7506f491
DE
1809}
1810
1811static void
1d088dee
AJ
1812hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
1813 struct hash_table *table ATTRIBUTE_UNUSED)
7506f491
DE
1814{
1815 /* Currently nothing to do. */
1816}
1817
1818static void
1d088dee
AJ
1819hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
1820 struct hash_table *table ATTRIBUTE_UNUSED)
7506f491
DE
1821{
1822 /* Currently nothing to do. */
1823}
1824
1825/* Process INSN and add hash table entries as appropriate.
1826
1827 Only available expressions that set a single pseudo-reg are recorded.
1828
1829 Single sets in a PARALLEL could be handled, but it's an extra complication
1830 that isn't dealt with right now. The trick is handling the CLOBBERs that
1831 are also in the PARALLEL. Later.
1832
cc2902df 1833 If SET_P is nonzero, this is for the assignment hash table,
ed79bb3d
R
1834 otherwise it is for the expression hash table.
1835 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
1836 not record any expressions. */
7506f491
DE
1837
1838static void
1d088dee 1839hash_scan_insn (rtx insn, struct hash_table *table, int in_libcall_block)
7506f491
DE
1840{
1841 rtx pat = PATTERN (insn);
c4c81601 1842 int i;
7506f491 1843
172890a2
RK
1844 if (in_libcall_block)
1845 return;
1846
7506f491
DE
1847 /* Pick out the sets of INSN and for other forms of instructions record
1848 what's been modified. */
1849
172890a2 1850 if (GET_CODE (pat) == SET)
02280659 1851 hash_scan_set (pat, insn, table);
7506f491 1852 else if (GET_CODE (pat) == PARALLEL)
c4c81601
RK
1853 for (i = 0; i < XVECLEN (pat, 0); i++)
1854 {
1855 rtx x = XVECEXP (pat, 0, i);
7506f491 1856
c4c81601 1857 if (GET_CODE (x) == SET)
02280659 1858 hash_scan_set (x, insn, table);
c4c81601 1859 else if (GET_CODE (x) == CLOBBER)
02280659 1860 hash_scan_clobber (x, insn, table);
6e72d1e9 1861 else if (GET_CODE (x) == CALL)
02280659 1862 hash_scan_call (x, insn, table);
c4c81601 1863 }
7506f491 1864
7506f491 1865 else if (GET_CODE (pat) == CLOBBER)
02280659 1866 hash_scan_clobber (pat, insn, table);
6e72d1e9 1867 else if (GET_CODE (pat) == CALL)
02280659 1868 hash_scan_call (pat, insn, table);
7506f491
DE
1869}
1870
1871static void
1d088dee 1872dump_hash_table (FILE *file, const char *name, struct hash_table *table)
7506f491
DE
1873{
1874 int i;
1875 /* Flattened out table, so it's printed in proper order. */
4da896b2
MM
1876 struct expr **flat_table;
1877 unsigned int *hash_val;
c4c81601 1878 struct expr *expr;
4da896b2 1879
703ad42b
KG
1880 flat_table = xcalloc (table->n_elems, sizeof (struct expr *));
1881 hash_val = xmalloc (table->n_elems * sizeof (unsigned int));
7506f491 1882
02280659
ZD
1883 for (i = 0; i < (int) table->size; i++)
1884 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601
RK
1885 {
1886 flat_table[expr->bitmap_index] = expr;
1887 hash_val[expr->bitmap_index] = i;
1888 }
7506f491
DE
1889
1890 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
02280659 1891 name, table->size, table->n_elems);
7506f491 1892
02280659 1893 for (i = 0; i < (int) table->n_elems; i++)
21318741
RK
1894 if (flat_table[i] != 0)
1895 {
a0ac9e5a 1896 expr = flat_table[i];
21318741
RK
1897 fprintf (file, "Index %d (hash value %d)\n ",
1898 expr->bitmap_index, hash_val[i]);
a0ac9e5a 1899 print_rtl (file, expr->expr);
21318741
RK
1900 fprintf (file, "\n");
1901 }
7506f491
DE
1902
1903 fprintf (file, "\n");
4da896b2 1904
4da896b2
MM
1905 free (flat_table);
1906 free (hash_val);
7506f491
DE
1907}
1908
1909/* Record register first/last/block set information for REGNO in INSN.
c4c81601 1910
80c29cc4 1911 first_set records the first place in the block where the register
7506f491 1912 is set and is used to compute "anticipatability".
c4c81601 1913
80c29cc4 1914 last_set records the last place in the block where the register
7506f491 1915 is set and is used to compute "availability".
c4c81601 1916
80c29cc4
RZ
1917 last_bb records the block for which first_set and last_set are
1918 valid, as a quick test to invalidate them.
1919
7506f491
DE
1920 reg_set_in_block records whether the register is set in the block
1921 and is used to compute "transparency". */
1922
1923static void
1d088dee 1924record_last_reg_set_info (rtx insn, int regno)
7506f491 1925{
80c29cc4
RZ
1926 struct reg_avail_info *info = &reg_avail_info[regno];
1927 int cuid = INSN_CUID (insn);
c4c81601 1928
80c29cc4
RZ
1929 info->last_set = cuid;
1930 if (info->last_bb != current_bb)
1931 {
1932 info->last_bb = current_bb;
1933 info->first_set = cuid;
e0082a72 1934 SET_BIT (reg_set_in_block[current_bb->index], regno);
80c29cc4 1935 }
7506f491
DE
1936}
1937
a13d4ebf
AM
1938
1939/* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
1940 Note we store a pair of elements in the list, so they have to be
1941 taken off pairwise. */
1942
589005ff 1943static void
1d088dee
AJ
1944canon_list_insert (rtx dest ATTRIBUTE_UNUSED, rtx unused1 ATTRIBUTE_UNUSED,
1945 void * v_insn)
a13d4ebf
AM
1946{
1947 rtx dest_addr, insn;
0fe854a7 1948 int bb;
a13d4ebf
AM
1949
1950 while (GET_CODE (dest) == SUBREG
1951 || GET_CODE (dest) == ZERO_EXTRACT
a13d4ebf
AM
1952 || GET_CODE (dest) == STRICT_LOW_PART)
1953 dest = XEXP (dest, 0);
1954
1955 /* If DEST is not a MEM, then it will not conflict with a load. Note
1956 that function calls are assumed to clobber memory, but are handled
1957 elsewhere. */
1958
7b1b4aed 1959 if (! MEM_P (dest))
a13d4ebf
AM
1960 return;
1961
1962 dest_addr = get_addr (XEXP (dest, 0));
1963 dest_addr = canon_rtx (dest_addr);
589005ff 1964 insn = (rtx) v_insn;
0fe854a7 1965 bb = BLOCK_NUM (insn);
a13d4ebf 1966
589005ff 1967 canon_modify_mem_list[bb] =
0fe854a7 1968 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
589005ff 1969 canon_modify_mem_list[bb] =
0fe854a7 1970 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
a13d4ebf
AM
1971}
1972
a13d4ebf
AM
1973/* Record memory modification information for INSN. We do not actually care
1974 about the memory location(s) that are set, or even how they are set (consider
1975 a CALL_INSN). We merely need to record which insns modify memory. */
7506f491
DE
1976
1977static void
1d088dee 1978record_last_mem_set_info (rtx insn)
7506f491 1979{
0fe854a7
RH
1980 int bb = BLOCK_NUM (insn);
1981
ccef9ef5 1982 /* load_killed_in_block_p will handle the case of calls clobbering
dc297297 1983 everything. */
0fe854a7
RH
1984 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
1985 bitmap_set_bit (modify_mem_list_set, bb);
a13d4ebf 1986
7b1b4aed 1987 if (CALL_P (insn))
a13d4ebf
AM
1988 {
1989 /* Note that traversals of this loop (other than for free-ing)
1990 will break after encountering a CALL_INSN. So, there's no
dc297297 1991 need to insert a pair of items, as canon_list_insert does. */
589005ff
KH
1992 canon_modify_mem_list[bb] =
1993 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
aa47fcfa 1994 bitmap_set_bit (blocks_with_calls, bb);
a13d4ebf
AM
1995 }
1996 else
0fe854a7 1997 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
7506f491
DE
1998}
1999
7506f491 2000/* Called from compute_hash_table via note_stores to handle one
84832317
MM
2001 SET or CLOBBER in an insn. DATA is really the instruction in which
2002 the SET is taking place. */
7506f491
DE
2003
2004static void
1d088dee 2005record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
7506f491 2006{
84832317
MM
2007 rtx last_set_insn = (rtx) data;
2008
7506f491
DE
2009 if (GET_CODE (dest) == SUBREG)
2010 dest = SUBREG_REG (dest);
2011
7b1b4aed 2012 if (REG_P (dest))
7506f491 2013 record_last_reg_set_info (last_set_insn, REGNO (dest));
7b1b4aed 2014 else if (MEM_P (dest)
7506f491
DE
2015 /* Ignore pushes, they clobber nothing. */
2016 && ! push_operand (dest, GET_MODE (dest)))
2017 record_last_mem_set_info (last_set_insn);
2018}
2019
2020/* Top level function to create an expression or assignment hash table.
2021
2022 Expression entries are placed in the hash table if
2023 - they are of the form (set (pseudo-reg) src),
2024 - src is something we want to perform GCSE on,
2025 - none of the operands are subsequently modified in the block
2026
2027 Assignment entries are placed in the hash table if
2028 - they are of the form (set (pseudo-reg) src),
2029 - src is something we want to perform const/copy propagation on,
2030 - none of the operands or target are subsequently modified in the block
c4c81601 2031
7506f491
DE
2032 Currently src must be a pseudo-reg or a const_int.
2033
02280659 2034 TABLE is the table computed. */
7506f491
DE
2035
2036static void
1d088dee 2037compute_hash_table_work (struct hash_table *table)
7506f491 2038{
80c29cc4 2039 unsigned int i;
7506f491
DE
2040
2041 /* While we compute the hash table we also compute a bit array of which
2042 registers are set in which blocks.
7506f491
DE
2043 ??? This isn't needed during const/copy propagation, but it's cheap to
2044 compute. Later. */
d55bc081 2045 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
7506f491 2046
a13d4ebf 2047 /* re-Cache any INSN_LIST nodes we have allocated. */
73991d6a 2048 clear_modify_mem_tables ();
7506f491 2049 /* Some working arrays used to track first and last set in each block. */
703ad42b 2050 reg_avail_info = gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
80c29cc4
RZ
2051
2052 for (i = 0; i < max_gcse_regno; ++i)
e0082a72 2053 reg_avail_info[i].last_bb = NULL;
7506f491 2054
e0082a72 2055 FOR_EACH_BB (current_bb)
7506f491
DE
2056 {
2057 rtx insn;
770ae6cc 2058 unsigned int regno;
ed79bb3d 2059 int in_libcall_block;
7506f491
DE
2060
2061 /* First pass over the instructions records information used to
2062 determine when registers and memory are first and last set.
ccef9ef5 2063 ??? hard-reg reg_set_in_block computation
7506f491
DE
2064 could be moved to compute_sets since they currently don't change. */
2065
eb232f4e 2066 FOR_BB_INSNS (current_bb, insn)
7506f491 2067 {
2c3c49de 2068 if (! INSN_P (insn))
7506f491
DE
2069 continue;
2070
7b1b4aed 2071 if (CALL_P (insn))
7506f491
DE
2072 {
2073 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6e14af16 2074 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7506f491 2075 record_last_reg_set_info (insn, regno);
c4c81601 2076
24a28584 2077 mark_call (insn);
7506f491
DE
2078 }
2079
84832317 2080 note_stores (PATTERN (insn), record_last_set_info, insn);
7506f491
DE
2081 }
2082
fbef91d8
RS
2083 /* Insert implicit sets in the hash table. */
2084 if (table->set_p
2085 && implicit_sets[current_bb->index] != NULL_RTX)
2086 hash_scan_set (implicit_sets[current_bb->index],
a813c111 2087 BB_HEAD (current_bb), table);
fbef91d8 2088
7506f491 2089 /* The next pass builds the hash table. */
eb232f4e
SB
2090 in_libcall_block = 0;
2091 FOR_BB_INSNS (current_bb, insn)
2c3c49de 2092 if (INSN_P (insn))
c4c81601
RK
2093 {
2094 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
589005ff 2095 in_libcall_block = 1;
02280659 2096 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
589005ff 2097 in_libcall_block = 0;
02280659
ZD
2098 hash_scan_insn (insn, table, in_libcall_block);
2099 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
589005ff 2100 in_libcall_block = 0;
8e42ace1 2101 }
7506f491
DE
2102 }
2103
80c29cc4
RZ
2104 free (reg_avail_info);
2105 reg_avail_info = NULL;
7506f491
DE
2106}
2107
02280659 2108/* Allocate space for the set/expr hash TABLE.
7506f491 2109 N_INSNS is the number of instructions in the function.
02280659
ZD
2110 It is used to determine the number of buckets to use.
2111 SET_P determines whether set or expression table will
2112 be created. */
7506f491
DE
2113
2114static void
1d088dee 2115alloc_hash_table (int n_insns, struct hash_table *table, int set_p)
7506f491
DE
2116{
2117 int n;
2118
02280659
ZD
2119 table->size = n_insns / 4;
2120 if (table->size < 11)
2121 table->size = 11;
c4c81601 2122
7506f491
DE
2123 /* Attempt to maintain efficient use of hash table.
2124 Making it an odd number is simplest for now.
2125 ??? Later take some measurements. */
02280659
ZD
2126 table->size |= 1;
2127 n = table->size * sizeof (struct expr *);
703ad42b 2128 table->table = gmalloc (n);
02280659 2129 table->set_p = set_p;
7506f491
DE
2130}
2131
02280659 2132/* Free things allocated by alloc_hash_table. */
7506f491
DE
2133
2134static void
1d088dee 2135free_hash_table (struct hash_table *table)
7506f491 2136{
02280659 2137 free (table->table);
7506f491
DE
2138}
2139
02280659
ZD
2140/* Compute the hash TABLE for doing copy/const propagation or
2141 expression hash table. */
7506f491
DE
2142
2143static void
1d088dee 2144compute_hash_table (struct hash_table *table)
7506f491
DE
2145{
2146 /* Initialize count of number of entries in hash table. */
02280659 2147 table->n_elems = 0;
703ad42b 2148 memset (table->table, 0, table->size * sizeof (struct expr *));
7506f491 2149
02280659 2150 compute_hash_table_work (table);
7506f491
DE
2151}
2152\f
2153/* Expression tracking support. */
2154
ceda50e9
RH
2155/* Lookup REGNO in the set TABLE. The result is a pointer to the
2156 table entry, or NULL if not found. */
7506f491
DE
2157
2158static struct expr *
1d088dee 2159lookup_set (unsigned int regno, struct hash_table *table)
7506f491 2160{
02280659 2161 unsigned int hash = hash_set (regno, table->size);
7506f491
DE
2162 struct expr *expr;
2163
02280659 2164 expr = table->table[hash];
7506f491 2165
ceda50e9
RH
2166 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2167 expr = expr->next_same_hash;
7506f491
DE
2168
2169 return expr;
2170}
2171
2172/* Return the next entry for REGNO in list EXPR. */
2173
2174static struct expr *
1d088dee 2175next_set (unsigned int regno, struct expr *expr)
7506f491
DE
2176{
2177 do
2178 expr = expr->next_same_hash;
2179 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
c4c81601 2180
7506f491
DE
2181 return expr;
2182}
2183
0fe854a7
RH
2184/* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2185 types may be mixed. */
2186
2187static void
1d088dee 2188free_insn_expr_list_list (rtx *listp)
0fe854a7
RH
2189{
2190 rtx list, next;
2191
2192 for (list = *listp; list ; list = next)
2193 {
2194 next = XEXP (list, 1);
2195 if (GET_CODE (list) == EXPR_LIST)
2196 free_EXPR_LIST_node (list);
2197 else
2198 free_INSN_LIST_node (list);
2199 }
2200
2201 *listp = NULL;
2202}
2203
73991d6a
JH
2204/* Clear canon_modify_mem_list and modify_mem_list tables. */
2205static void
1d088dee 2206clear_modify_mem_tables (void)
73991d6a 2207{
3cd8c58a 2208 unsigned i;
87c476a2 2209 bitmap_iterator bi;
73991d6a 2210
87c476a2
ZD
2211 EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set, 0, i, bi)
2212 {
2213 free_INSN_LIST_list (modify_mem_list + i);
87c476a2
ZD
2214 free_insn_expr_list_list (canon_modify_mem_list + i);
2215 }
9a6cf911 2216 bitmap_clear (modify_mem_list_set);
aa47fcfa 2217 bitmap_clear (blocks_with_calls);
73991d6a
JH
2218}
2219
9a6cf911 2220/* Release memory used by modify_mem_list_set. */
73991d6a
JH
2221
2222static void
1d088dee 2223free_modify_mem_tables (void)
73991d6a
JH
2224{
2225 clear_modify_mem_tables ();
2226 free (modify_mem_list);
2227 free (canon_modify_mem_list);
2228 modify_mem_list = 0;
2229 canon_modify_mem_list = 0;
2230}
2231
7506f491
DE
2232/* Reset tables used to keep track of what's still available [since the
2233 start of the block]. */
2234
2235static void
1d088dee 2236reset_opr_set_tables (void)
7506f491
DE
2237{
2238 /* Maintain a bitmap of which regs have been set since beginning of
2239 the block. */
73991d6a 2240 CLEAR_REG_SET (reg_set_bitmap);
c4c81601 2241
7506f491
DE
2242 /* Also keep a record of the last instruction to modify memory.
2243 For now this is very trivial, we only record whether any memory
2244 location has been modified. */
73991d6a 2245 clear_modify_mem_tables ();
7506f491
DE
2246}
2247
cc2902df 2248/* Return nonzero if the operands of X are not set before INSN in
7506f491
DE
2249 INSN's basic block. */
2250
2251static int
1d088dee 2252oprs_not_set_p (rtx x, rtx insn)
7506f491 2253{
c4c81601 2254 int i, j;
7506f491 2255 enum rtx_code code;
6f7d635c 2256 const char *fmt;
7506f491 2257
7506f491
DE
2258 if (x == 0)
2259 return 1;
2260
2261 code = GET_CODE (x);
2262 switch (code)
2263 {
2264 case PC:
2265 case CC0:
2266 case CONST:
2267 case CONST_INT:
2268 case CONST_DOUBLE:
69ef87e2 2269 case CONST_VECTOR:
7506f491
DE
2270 case SYMBOL_REF:
2271 case LABEL_REF:
2272 case ADDR_VEC:
2273 case ADDR_DIFF_VEC:
2274 return 1;
2275
2276 case MEM:
589005ff 2277 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
e2d2ed72 2278 INSN_CUID (insn), x, 0))
a13d4ebf 2279 return 0;
c4c81601
RK
2280 else
2281 return oprs_not_set_p (XEXP (x, 0), insn);
7506f491
DE
2282
2283 case REG:
73991d6a 2284 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
7506f491
DE
2285
2286 default:
2287 break;
2288 }
2289
c4c81601 2290 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
2291 {
2292 if (fmt[i] == 'e')
2293 {
7506f491
DE
2294 /* If we are about to do the last recursive call
2295 needed at this level, change it into iteration.
2296 This function is called enough to be worth it. */
2297 if (i == 0)
c4c81601
RK
2298 return oprs_not_set_p (XEXP (x, i), insn);
2299
2300 if (! oprs_not_set_p (XEXP (x, i), insn))
7506f491
DE
2301 return 0;
2302 }
2303 else if (fmt[i] == 'E')
c4c81601
RK
2304 for (j = 0; j < XVECLEN (x, i); j++)
2305 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2306 return 0;
7506f491
DE
2307 }
2308
2309 return 1;
2310}
2311
2312/* Mark things set by a CALL. */
2313
2314static void
1d088dee 2315mark_call (rtx insn)
7506f491 2316{
24a28584 2317 if (! CONST_OR_PURE_CALL_P (insn))
a13d4ebf 2318 record_last_mem_set_info (insn);
7506f491
DE
2319}
2320
2321/* Mark things set by a SET. */
2322
2323static void
1d088dee 2324mark_set (rtx pat, rtx insn)
7506f491
DE
2325{
2326 rtx dest = SET_DEST (pat);
2327
2328 while (GET_CODE (dest) == SUBREG
2329 || GET_CODE (dest) == ZERO_EXTRACT
7506f491
DE
2330 || GET_CODE (dest) == STRICT_LOW_PART)
2331 dest = XEXP (dest, 0);
2332
7b1b4aed 2333 if (REG_P (dest))
73991d6a 2334 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
7b1b4aed 2335 else if (MEM_P (dest))
a13d4ebf
AM
2336 record_last_mem_set_info (insn);
2337
6e72d1e9 2338 if (GET_CODE (SET_SRC (pat)) == CALL)
b5ce41ff 2339 mark_call (insn);
7506f491
DE
2340}
2341
2342/* Record things set by a CLOBBER. */
2343
2344static void
1d088dee 2345mark_clobber (rtx pat, rtx insn)
7506f491
DE
2346{
2347 rtx clob = XEXP (pat, 0);
2348
2349 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2350 clob = XEXP (clob, 0);
2351
7b1b4aed 2352 if (REG_P (clob))
73991d6a 2353 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
a13d4ebf
AM
2354 else
2355 record_last_mem_set_info (insn);
7506f491
DE
2356}
2357
2358/* Record things set by INSN.
2359 This data is used by oprs_not_set_p. */
2360
2361static void
1d088dee 2362mark_oprs_set (rtx insn)
7506f491
DE
2363{
2364 rtx pat = PATTERN (insn);
c4c81601 2365 int i;
7506f491
DE
2366
2367 if (GET_CODE (pat) == SET)
2368 mark_set (pat, insn);
2369 else if (GET_CODE (pat) == PARALLEL)
c4c81601
RK
2370 for (i = 0; i < XVECLEN (pat, 0); i++)
2371 {
2372 rtx x = XVECEXP (pat, 0, i);
2373
2374 if (GET_CODE (x) == SET)
2375 mark_set (x, insn);
2376 else if (GET_CODE (x) == CLOBBER)
2377 mark_clobber (x, insn);
6e72d1e9 2378 else if (GET_CODE (x) == CALL)
c4c81601
RK
2379 mark_call (insn);
2380 }
7506f491 2381
7506f491
DE
2382 else if (GET_CODE (pat) == CLOBBER)
2383 mark_clobber (pat, insn);
6e72d1e9 2384 else if (GET_CODE (pat) == CALL)
b5ce41ff 2385 mark_call (insn);
7506f491 2386}
b5ce41ff 2387
7506f491
DE
2388\f
2389/* Compute copy/constant propagation working variables. */
2390
2391/* Local properties of assignments. */
7506f491
DE
2392static sbitmap *cprop_pavloc;
2393static sbitmap *cprop_absaltered;
2394
2395/* Global properties of assignments (computed from the local properties). */
7506f491
DE
2396static sbitmap *cprop_avin;
2397static sbitmap *cprop_avout;
2398
c4c81601
RK
2399/* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
2400 basic blocks. N_SETS is the number of sets. */
7506f491
DE
2401
2402static void
1d088dee 2403alloc_cprop_mem (int n_blocks, int n_sets)
7506f491
DE
2404{
2405 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
2406 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
2407
2408 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
2409 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
2410}
2411
2412/* Free vars used by copy/const propagation. */
2413
2414static void
1d088dee 2415free_cprop_mem (void)
7506f491 2416{
5a660bff
DB
2417 sbitmap_vector_free (cprop_pavloc);
2418 sbitmap_vector_free (cprop_absaltered);
2419 sbitmap_vector_free (cprop_avin);
2420 sbitmap_vector_free (cprop_avout);
7506f491
DE
2421}
2422
c4c81601
RK
2423/* For each block, compute whether X is transparent. X is either an
2424 expression or an assignment [though we don't care which, for this context
2425 an assignment is treated as an expression]. For each block where an
2426 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
2427 bit in BMAP. */
7506f491
DE
2428
2429static void
1d088dee 2430compute_transp (rtx x, int indx, sbitmap *bmap, int set_p)
7506f491 2431{
e0082a72
ZD
2432 int i, j;
2433 basic_block bb;
7506f491 2434 enum rtx_code code;
c4c81601 2435 reg_set *r;
6f7d635c 2436 const char *fmt;
7506f491 2437
c4c81601
RK
2438 /* repeat is used to turn tail-recursion into iteration since GCC
2439 can't do it when there's no return value. */
7506f491
DE
2440 repeat:
2441
2442 if (x == 0)
2443 return;
2444
2445 code = GET_CODE (x);
2446 switch (code)
2447 {
2448 case REG:
c4c81601
RK
2449 if (set_p)
2450 {
2451 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
2452 {
e0082a72
ZD
2453 FOR_EACH_BB (bb)
2454 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
2455 SET_BIT (bmap[bb->index], indx);
c4c81601
RK
2456 }
2457 else
2458 {
2459 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
ed425871 2460 SET_BIT (bmap[r->bb_index], indx);
c4c81601
RK
2461 }
2462 }
2463 else
2464 {
2465 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
2466 {
e0082a72
ZD
2467 FOR_EACH_BB (bb)
2468 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
2469 RESET_BIT (bmap[bb->index], indx);
c4c81601
RK
2470 }
2471 else
2472 {
2473 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
ed425871 2474 RESET_BIT (bmap[r->bb_index], indx);
c4c81601
RK
2475 }
2476 }
7506f491 2477
c4c81601 2478 return;
7506f491
DE
2479
2480 case MEM:
16c5b95d
MH
2481 if (! MEM_READONLY_P (x))
2482 {
2483 bitmap_iterator bi;
2484 unsigned bb_index;
aa47fcfa 2485
16c5b95d
MH
2486 /* First handle all the blocks with calls. We don't need to
2487 do any list walking for them. */
2488 EXECUTE_IF_SET_IN_BITMAP (blocks_with_calls, 0, bb_index, bi)
2489 {
2490 if (set_p)
2491 SET_BIT (bmap[bb_index], indx);
2492 else
2493 RESET_BIT (bmap[bb_index], indx);
2494 }
aa47fcfa 2495
16c5b95d
MH
2496 /* Now iterate over the blocks which have memory modifications
2497 but which do not have any calls. */
2498 EXECUTE_IF_AND_COMPL_IN_BITMAP (modify_mem_list_set,
2499 blocks_with_calls,
2500 0, bb_index, bi)
aa47fcfa 2501 {
16c5b95d 2502 rtx list_entry = canon_modify_mem_list[bb_index];
aa47fcfa 2503
16c5b95d 2504 while (list_entry)
aa47fcfa 2505 {
16c5b95d
MH
2506 rtx dest, dest_addr;
2507
2508 /* LIST_ENTRY must be an INSN of some kind that sets memory.
2509 Examine each hunk of memory that is modified. */
2510
2511 dest = XEXP (list_entry, 0);
2512 list_entry = XEXP (list_entry, 1);
2513 dest_addr = XEXP (list_entry, 0);
2514
2515 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
2516 x, rtx_addr_varies_p))
2517 {
2518 if (set_p)
2519 SET_BIT (bmap[bb_index], indx);
2520 else
2521 RESET_BIT (bmap[bb_index], indx);
2522 break;
2523 }
2524 list_entry = XEXP (list_entry, 1);
2525 }
aa47fcfa 2526 }
16c5b95d 2527 }
c4c81601 2528
7506f491
DE
2529 x = XEXP (x, 0);
2530 goto repeat;
2531
2532 case PC:
2533 case CC0: /*FIXME*/
2534 case CONST:
2535 case CONST_INT:
2536 case CONST_DOUBLE:
69ef87e2 2537 case CONST_VECTOR:
7506f491
DE
2538 case SYMBOL_REF:
2539 case LABEL_REF:
2540 case ADDR_VEC:
2541 case ADDR_DIFF_VEC:
2542 return;
2543
2544 default:
2545 break;
2546 }
2547
c4c81601 2548 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
2549 {
2550 if (fmt[i] == 'e')
2551 {
7506f491
DE
2552 /* If we are about to do the last recursive call
2553 needed at this level, change it into iteration.
2554 This function is called enough to be worth it. */
2555 if (i == 0)
2556 {
c4c81601 2557 x = XEXP (x, i);
7506f491
DE
2558 goto repeat;
2559 }
c4c81601
RK
2560
2561 compute_transp (XEXP (x, i), indx, bmap, set_p);
7506f491
DE
2562 }
2563 else if (fmt[i] == 'E')
c4c81601
RK
2564 for (j = 0; j < XVECLEN (x, i); j++)
2565 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
7506f491
DE
2566 }
2567}
2568
7506f491
DE
2569/* Top level routine to do the dataflow analysis needed by copy/const
2570 propagation. */
2571
2572static void
1d088dee 2573compute_cprop_data (void)
7506f491 2574{
02280659 2575 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
ce724250
JL
2576 compute_available (cprop_pavloc, cprop_absaltered,
2577 cprop_avout, cprop_avin);
7506f491
DE
2578}
2579\f
2580/* Copy/constant propagation. */
2581
7506f491
DE
2582/* Maximum number of register uses in an insn that we handle. */
2583#define MAX_USES 8
2584
2585/* Table of uses found in an insn.
2586 Allocated statically to avoid alloc/free complexity and overhead. */
2587static struct reg_use reg_use_table[MAX_USES];
2588
2589/* Index into `reg_use_table' while building it. */
2590static int reg_use_count;
2591
c4c81601
RK
2592/* Set up a list of register numbers used in INSN. The found uses are stored
2593 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
2594 and contains the number of uses in the table upon exit.
7506f491 2595
c4c81601
RK
2596 ??? If a register appears multiple times we will record it multiple times.
2597 This doesn't hurt anything but it will slow things down. */
7506f491
DE
2598
2599static void
1d088dee 2600find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED)
7506f491 2601{
c4c81601 2602 int i, j;
7506f491 2603 enum rtx_code code;
6f7d635c 2604 const char *fmt;
9e71c818 2605 rtx x = *xptr;
7506f491 2606
c4c81601
RK
2607 /* repeat is used to turn tail-recursion into iteration since GCC
2608 can't do it when there's no return value. */
7506f491 2609 repeat:
7506f491
DE
2610 if (x == 0)
2611 return;
2612
2613 code = GET_CODE (x);
9e71c818 2614 if (REG_P (x))
7506f491 2615 {
7506f491
DE
2616 if (reg_use_count == MAX_USES)
2617 return;
c4c81601 2618
7506f491
DE
2619 reg_use_table[reg_use_count].reg_rtx = x;
2620 reg_use_count++;
7506f491
DE
2621 }
2622
2623 /* Recursively scan the operands of this expression. */
2624
c4c81601 2625 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
2626 {
2627 if (fmt[i] == 'e')
2628 {
2629 /* If we are about to do the last recursive call
2630 needed at this level, change it into iteration.
2631 This function is called enough to be worth it. */
2632 if (i == 0)
2633 {
2634 x = XEXP (x, 0);
2635 goto repeat;
2636 }
c4c81601 2637
9e71c818 2638 find_used_regs (&XEXP (x, i), data);
7506f491
DE
2639 }
2640 else if (fmt[i] == 'E')
c4c81601 2641 for (j = 0; j < XVECLEN (x, i); j++)
9e71c818 2642 find_used_regs (&XVECEXP (x, i, j), data);
7506f491
DE
2643 }
2644}
2645
2646/* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
cc2902df 2647 Returns nonzero is successful. */
7506f491
DE
2648
2649static int
1d088dee 2650try_replace_reg (rtx from, rtx to, rtx insn)
7506f491 2651{
205eb6e7 2652 rtx note = find_reg_equal_equiv_note (insn);
fb0c0a12 2653 rtx src = 0;
172890a2
RK
2654 int success = 0;
2655 rtx set = single_set (insn);
833fc3ad 2656
3e916873
JH
2657 /* Usually we substitute easy stuff, so we won't copy everything.
2658 We however need to take care to not duplicate non-trivial CONST
2659 expressions. */
2660 to = copy_rtx (to);
2661
2b773ee2
JH
2662 validate_replace_src_group (from, to, insn);
2663 if (num_changes_pending () && apply_change_group ())
2664 success = 1;
9e71c818 2665
9feff114
JDA
2666 /* Try to simplify SET_SRC if we have substituted a constant. */
2667 if (success && set && CONSTANT_P (to))
2668 {
2669 src = simplify_rtx (SET_SRC (set));
2670
2671 if (src)
2672 validate_change (insn, &SET_SRC (set), src, 0);
2673 }
2674
205eb6e7
RS
2675 /* If there is already a REG_EQUAL note, update the expression in it
2676 with our replacement. */
2677 if (note != 0 && REG_NOTE_KIND (note) == REG_EQUAL)
a31830a7
SB
2678 set_unique_reg_note (insn, REG_EQUAL,
2679 simplify_replace_rtx (XEXP (note, 0), from, to));
f305679f 2680 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
833fc3ad 2681 {
f305679f
JH
2682 /* If above failed and this is a single set, try to simplify the source of
2683 the set given our substitution. We could perhaps try this for multiple
2684 SETs, but it probably won't buy us anything. */
172890a2
RK
2685 src = simplify_replace_rtx (SET_SRC (set), from, to);
2686
9e71c818
JH
2687 if (!rtx_equal_p (src, SET_SRC (set))
2688 && validate_change (insn, &SET_SRC (set), src, 0))
172890a2 2689 success = 1;
833fc3ad 2690
bbd288a4
FS
2691 /* If we've failed to do replacement, have a single SET, don't already
2692 have a note, and have no special SET, add a REG_EQUAL note to not
2693 lose information. */
2694 if (!success && note == 0 && set != 0
70a640af
AK
2695 && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
2696 && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART)
f305679f
JH
2697 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
2698 }
e251e2a2 2699
172890a2
RK
2700 /* REG_EQUAL may get simplified into register.
2701 We don't allow that. Remove that note. This code ought
fbe5a4a6 2702 not to happen, because previous code ought to synthesize
172890a2 2703 reg-reg move, but be on the safe side. */
205eb6e7 2704 if (note && REG_NOTE_KIND (note) == REG_EQUAL && REG_P (XEXP (note, 0)))
172890a2 2705 remove_note (insn, note);
833fc3ad 2706
833fc3ad
JH
2707 return success;
2708}
c4c81601
RK
2709
2710/* Find a set of REGNOs that are available on entry to INSN's block. Returns
2711 NULL no such set is found. */
7506f491
DE
2712
2713static struct expr *
1d088dee 2714find_avail_set (int regno, rtx insn)
7506f491 2715{
cafba495
BS
2716 /* SET1 contains the last set found that can be returned to the caller for
2717 use in a substitution. */
2718 struct expr *set1 = 0;
589005ff 2719
cafba495 2720 /* Loops are not possible here. To get a loop we would need two sets
454ff5cb 2721 available at the start of the block containing INSN. i.e. we would
cafba495
BS
2722 need two sets like this available at the start of the block:
2723
2724 (set (reg X) (reg Y))
2725 (set (reg Y) (reg X))
2726
2727 This can not happen since the set of (reg Y) would have killed the
2728 set of (reg X) making it unavailable at the start of this block. */
2729 while (1)
8e42ace1 2730 {
cafba495 2731 rtx src;
ceda50e9 2732 struct expr *set = lookup_set (regno, &set_hash_table);
cafba495
BS
2733
2734 /* Find a set that is available at the start of the block
2735 which contains INSN. */
2736 while (set)
2737 {
2738 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
2739 break;
2740 set = next_set (regno, set);
2741 }
7506f491 2742
cafba495
BS
2743 /* If no available set was found we've reached the end of the
2744 (possibly empty) copy chain. */
2745 if (set == 0)
589005ff 2746 break;
cafba495 2747
282899df 2748 gcc_assert (GET_CODE (set->expr) == SET);
cafba495
BS
2749
2750 src = SET_SRC (set->expr);
2751
2752 /* We know the set is available.
2753 Now check that SRC is ANTLOC (i.e. none of the source operands
589005ff 2754 have changed since the start of the block).
cafba495
BS
2755
2756 If the source operand changed, we may still use it for the next
2757 iteration of this loop, but we may not use it for substitutions. */
c4c81601 2758
6b2d1c9e 2759 if (gcse_constant_p (src) || oprs_not_set_p (src, insn))
cafba495
BS
2760 set1 = set;
2761
2762 /* If the source of the set is anything except a register, then
2763 we have reached the end of the copy chain. */
7b1b4aed 2764 if (! REG_P (src))
7506f491 2765 break;
7506f491 2766
454ff5cb 2767 /* Follow the copy chain, i.e. start another iteration of the loop
cafba495
BS
2768 and see if we have an available copy into SRC. */
2769 regno = REGNO (src);
8e42ace1 2770 }
cafba495
BS
2771
2772 /* SET1 holds the last set that was available and anticipatable at
2773 INSN. */
2774 return set1;
7506f491
DE
2775}
2776
abd535b6 2777/* Subroutine of cprop_insn that tries to propagate constants into
0e3f0221 2778 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
fbe5a4a6 2779 it is the instruction that immediately precedes JUMP, and must be a
818b6b7f 2780 single SET of a register. FROM is what we will try to replace,
0e3f0221 2781 SRC is the constant we will try to substitute for it. Returns nonzero
589005ff 2782 if a change was made. */
c4c81601 2783
abd535b6 2784static int
1d088dee 2785cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
abd535b6 2786{
bc6688b4 2787 rtx new, set_src, note_src;
0e3f0221 2788 rtx set = pc_set (jump);
bc6688b4 2789 rtx note = find_reg_equal_equiv_note (jump);
0e3f0221 2790
bc6688b4
RS
2791 if (note)
2792 {
2793 note_src = XEXP (note, 0);
2794 if (GET_CODE (note_src) == EXPR_LIST)
2795 note_src = NULL_RTX;
2796 }
2797 else note_src = NULL_RTX;
2798
2799 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
2800 set_src = note_src ? note_src : SET_SRC (set);
2801
2802 /* First substitute the SETCC condition into the JUMP instruction,
2803 then substitute that given values into this expanded JUMP. */
2804 if (setcc != NULL_RTX
48ddd46c
JH
2805 && !modified_between_p (from, setcc, jump)
2806 && !modified_between_p (src, setcc, jump))
b2f02503 2807 {
bc6688b4 2808 rtx setcc_src;
b2f02503 2809 rtx setcc_set = single_set (setcc);
bc6688b4
RS
2810 rtx setcc_note = find_reg_equal_equiv_note (setcc);
2811 setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
2812 ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
2813 set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
2814 setcc_src);
b2f02503 2815 }
0e3f0221 2816 else
bc6688b4 2817 setcc = NULL_RTX;
0e3f0221 2818
bc6688b4 2819 new = simplify_replace_rtx (set_src, from, src);
abd535b6 2820
bc6688b4
RS
2821 /* If no simplification can be made, then try the next register. */
2822 if (rtx_equal_p (new, SET_SRC (set)))
9e48c409 2823 return 0;
589005ff 2824
7d5ab30e 2825 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
172890a2 2826 if (new == pc_rtx)
0e3f0221 2827 delete_insn (jump);
7d5ab30e 2828 else
abd535b6 2829 {
48ddd46c
JH
2830 /* Ensure the value computed inside the jump insn to be equivalent
2831 to one computed by setcc. */
bc6688b4 2832 if (setcc && modified_in_p (new, setcc))
48ddd46c 2833 return 0;
0e3f0221 2834 if (! validate_change (jump, &SET_SRC (set), new, 0))
bc6688b4
RS
2835 {
2836 /* When (some) constants are not valid in a comparison, and there
2837 are two registers to be replaced by constants before the entire
2838 comparison can be folded into a constant, we need to keep
2839 intermediate information in REG_EQUAL notes. For targets with
2840 separate compare insns, such notes are added by try_replace_reg.
2841 When we have a combined compare-and-branch instruction, however,
2842 we need to attach a note to the branch itself to make this
2843 optimization work. */
2844
2845 if (!rtx_equal_p (new, note_src))
2846 set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new));
2847 return 0;
2848 }
2849
2850 /* Remove REG_EQUAL note after simplification. */
2851 if (note_src)
2852 remove_note (jump, note);
7d5ab30e 2853 }
abd535b6 2854
0e3f0221
RS
2855#ifdef HAVE_cc0
2856 /* Delete the cc0 setter. */
818b6b7f 2857 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
0e3f0221
RS
2858 delete_insn (setcc);
2859#endif
2860
172890a2 2861 run_jump_opt_after_gcse = 1;
c4c81601 2862
27fb79ad 2863 global_const_prop_count++;
10d22567 2864 if (dump_file != NULL)
172890a2 2865 {
10d22567 2866 fprintf (dump_file,
27fb79ad 2867 "GLOBAL CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
0e3f0221 2868 REGNO (from), INSN_UID (jump));
10d22567
ZD
2869 print_rtl (dump_file, src);
2870 fprintf (dump_file, "\n");
abd535b6 2871 }
0005550b 2872 purge_dead_edges (bb);
172890a2
RK
2873
2874 return 1;
abd535b6
BS
2875}
2876
ae860ff7 2877static bool
eb232f4e 2878constprop_register (rtx insn, rtx from, rtx to, bool alter_jumps)
ae860ff7
JH
2879{
2880 rtx sset;
2881
2882 /* Check for reg or cc0 setting instructions followed by
2883 conditional branch instructions first. */
2884 if (alter_jumps
2885 && (sset = single_set (insn)) != NULL
244d05fb 2886 && NEXT_INSN (insn)
ae860ff7
JH
2887 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
2888 {
2889 rtx dest = SET_DEST (sset);
2890 if ((REG_P (dest) || CC0_P (dest))
2891 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
2892 return 1;
2893 }
2894
2895 /* Handle normal insns next. */
4b4bf941 2896 if (NONJUMP_INSN_P (insn)
ae860ff7
JH
2897 && try_replace_reg (from, to, insn))
2898 return 1;
2899
2900 /* Try to propagate a CONST_INT into a conditional jump.
2901 We're pretty specific about what we will handle in this
2902 code, we can extend this as necessary over time.
2903
2904 Right now the insn in question must look like
2905 (set (pc) (if_then_else ...)) */
2906 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
2907 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
2908 return 0;
2909}
2910
7506f491 2911/* Perform constant and copy propagation on INSN.
cc2902df 2912 The result is nonzero if a change was made. */
7506f491
DE
2913
2914static int
1d088dee 2915cprop_insn (rtx insn, int alter_jumps)
7506f491
DE
2916{
2917 struct reg_use *reg_used;
2918 int changed = 0;
833fc3ad 2919 rtx note;
7506f491 2920
9e71c818 2921 if (!INSN_P (insn))
7506f491
DE
2922 return 0;
2923
2924 reg_use_count = 0;
9e71c818 2925 note_uses (&PATTERN (insn), find_used_regs, NULL);
589005ff 2926
172890a2 2927 note = find_reg_equal_equiv_note (insn);
833fc3ad 2928
dc297297 2929 /* We may win even when propagating constants into notes. */
833fc3ad 2930 if (note)
9e71c818 2931 find_used_regs (&XEXP (note, 0), NULL);
7506f491 2932
c4c81601
RK
2933 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
2934 reg_used++, reg_use_count--)
7506f491 2935 {
770ae6cc 2936 unsigned int regno = REGNO (reg_used->reg_rtx);
7506f491
DE
2937 rtx pat, src;
2938 struct expr *set;
7506f491
DE
2939
2940 /* Ignore registers created by GCSE.
dc297297 2941 We do this because ... */
7506f491
DE
2942 if (regno >= max_gcse_regno)
2943 continue;
2944
2945 /* If the register has already been set in this block, there's
2946 nothing we can do. */
2947 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
2948 continue;
2949
2950 /* Find an assignment that sets reg_used and is available
2951 at the start of the block. */
2952 set = find_avail_set (regno, insn);
2953 if (! set)
2954 continue;
589005ff 2955
7506f491
DE
2956 pat = set->expr;
2957 /* ??? We might be able to handle PARALLELs. Later. */
282899df 2958 gcc_assert (GET_CODE (pat) == SET);
c4c81601 2959
7506f491
DE
2960 src = SET_SRC (pat);
2961
e78d9500 2962 /* Constant propagation. */
6b2d1c9e 2963 if (gcse_constant_p (src))
7506f491 2964 {
ae860ff7 2965 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
7506f491
DE
2966 {
2967 changed = 1;
27fb79ad 2968 global_const_prop_count++;
10d22567 2969 if (dump_file != NULL)
7506f491 2970 {
10d22567
ZD
2971 fprintf (dump_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
2972 fprintf (dump_file, "insn %d with constant ", INSN_UID (insn));
2973 print_rtl (dump_file, src);
2974 fprintf (dump_file, "\n");
7506f491 2975 }
bc6688b4
RS
2976 if (INSN_DELETED_P (insn))
2977 return 1;
7506f491
DE
2978 }
2979 }
7b1b4aed 2980 else if (REG_P (src)
7506f491
DE
2981 && REGNO (src) >= FIRST_PSEUDO_REGISTER
2982 && REGNO (src) != regno)
2983 {
cafba495 2984 if (try_replace_reg (reg_used->reg_rtx, src, insn))
7506f491 2985 {
cafba495 2986 changed = 1;
27fb79ad 2987 global_copy_prop_count++;
10d22567 2988 if (dump_file != NULL)
7506f491 2989 {
10d22567 2990 fprintf (dump_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
c4c81601 2991 regno, INSN_UID (insn));
10d22567 2992 fprintf (dump_file, " with reg %d\n", REGNO (src));
7506f491 2993 }
cafba495
BS
2994
2995 /* The original insn setting reg_used may or may not now be
2996 deletable. We leave the deletion to flow. */
2997 /* FIXME: If it turns out that the insn isn't deletable,
2998 then we may have unnecessarily extended register lifetimes
2999 and made things worse. */
7506f491
DE
3000 }
3001 }
3002 }
3003
3004 return changed;
3005}
3006
710ee3ed
RH
3007/* Like find_used_regs, but avoid recording uses that appear in
3008 input-output contexts such as zero_extract or pre_dec. This
3009 restricts the cases we consider to those for which local cprop
3010 can legitimately make replacements. */
3011
3012static void
1d088dee 3013local_cprop_find_used_regs (rtx *xptr, void *data)
710ee3ed
RH
3014{
3015 rtx x = *xptr;
3016
3017 if (x == 0)
3018 return;
3019
3020 switch (GET_CODE (x))
3021 {
3022 case ZERO_EXTRACT:
3023 case SIGN_EXTRACT:
3024 case STRICT_LOW_PART:
3025 return;
3026
3027 case PRE_DEC:
3028 case PRE_INC:
3029 case POST_DEC:
3030 case POST_INC:
3031 case PRE_MODIFY:
3032 case POST_MODIFY:
3033 /* Can only legitimately appear this early in the context of
3034 stack pushes for function arguments, but handle all of the
3035 codes nonetheless. */
3036 return;
3037
3038 case SUBREG:
3039 /* Setting a subreg of a register larger than word_mode leaves
3040 the non-written words unchanged. */
3041 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
3042 return;
3043 break;
3044
3045 default:
3046 break;
3047 }
3048
3049 find_used_regs (xptr, data);
3050}
1d088dee 3051
8ba46434
R
3052/* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
3053 their REG_EQUAL notes need updating. */
e197b6fc 3054
ae860ff7 3055static bool
eb232f4e 3056do_local_cprop (rtx x, rtx insn, bool alter_jumps, rtx *libcall_sp)
ae860ff7
JH
3057{
3058 rtx newreg = NULL, newcnst = NULL;
3059
e197b6fc
RH
3060 /* Rule out USE instructions and ASM statements as we don't want to
3061 change the hard registers mentioned. */
7b1b4aed 3062 if (REG_P (x)
ae860ff7 3063 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
e197b6fc
RH
3064 || (GET_CODE (PATTERN (insn)) != USE
3065 && asm_noperands (PATTERN (insn)) < 0)))
ae860ff7
JH
3066 {
3067 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
3068 struct elt_loc_list *l;
3069
3070 if (!val)
3071 return false;
3072 for (l = val->locs; l; l = l->next)
3073 {
3074 rtx this_rtx = l->loc;
46690369
JH
3075 rtx note;
3076
5976e643
RS
3077 /* Don't CSE non-constant values out of libcall blocks. */
3078 if (l->in_libcall && ! CONSTANT_P (this_rtx))
9635cfad
JH
3079 continue;
3080
6b2d1c9e 3081 if (gcse_constant_p (this_rtx))
ae860ff7 3082 newcnst = this_rtx;
46690369
JH
3083 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
3084 /* Don't copy propagate if it has attached REG_EQUIV note.
3085 At this point this only function parameters should have
3086 REG_EQUIV notes and if the argument slot is used somewhere
3087 explicitly, it means address of parameter has been taken,
3088 so we should not extend the lifetime of the pseudo. */
3089 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
7b1b4aed 3090 || ! MEM_P (XEXP (note, 0))))
ae860ff7
JH
3091 newreg = this_rtx;
3092 }
3093 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
3094 {
8ba46434 3095 /* If we find a case where we can't fix the retval REG_EQUAL notes
fbe5a4a6 3096 match the new register, we either have to abandon this replacement
8ba46434 3097 or fix delete_trivially_dead_insns to preserve the setting insn,
6fb5fa3c 3098 or make it delete the REG_EQUAL note, and fix up all passes that
8ba46434 3099 require the REG_EQUAL note there. */
282899df
NS
3100 bool adjusted;
3101
3102 adjusted = adjust_libcall_notes (x, newcnst, insn, libcall_sp);
3103 gcc_assert (adjusted);
3104
10d22567 3105 if (dump_file != NULL)
ae860ff7 3106 {
10d22567 3107 fprintf (dump_file, "LOCAL CONST-PROP: Replacing reg %d in ",
ae860ff7 3108 REGNO (x));
10d22567 3109 fprintf (dump_file, "insn %d with constant ",
ae860ff7 3110 INSN_UID (insn));
10d22567
ZD
3111 print_rtl (dump_file, newcnst);
3112 fprintf (dump_file, "\n");
ae860ff7 3113 }
27fb79ad 3114 local_const_prop_count++;
ae860ff7
JH
3115 return true;
3116 }
3117 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
3118 {
8ba46434 3119 adjust_libcall_notes (x, newreg, insn, libcall_sp);
10d22567 3120 if (dump_file != NULL)
ae860ff7 3121 {
10d22567 3122 fprintf (dump_file,
ae860ff7
JH
3123 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
3124 REGNO (x), INSN_UID (insn));
10d22567 3125 fprintf (dump_file, " with reg %d\n", REGNO (newreg));
ae860ff7 3126 }
27fb79ad 3127 local_copy_prop_count++;
ae860ff7
JH
3128 return true;
3129 }
3130 }
3131 return false;
3132}
3133
8ba46434
R
3134/* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
3135 their REG_EQUAL notes need updating to reflect that OLDREG has been
f4e3e618
RH
3136 replaced with NEWVAL in INSN. Return true if all substitutions could
3137 be made. */
8ba46434 3138static bool
1d088dee 3139adjust_libcall_notes (rtx oldreg, rtx newval, rtx insn, rtx *libcall_sp)
8ba46434 3140{
f4e3e618 3141 rtx end;
8ba46434
R
3142
3143 while ((end = *libcall_sp++))
3144 {
f4e3e618 3145 rtx note = find_reg_equal_equiv_note (end);
8ba46434
R
3146
3147 if (! note)
3148 continue;
3149
3150 if (REG_P (newval))
3151 {
3152 if (reg_set_between_p (newval, PREV_INSN (insn), end))
3153 {
3154 do
3155 {
3156 note = find_reg_equal_equiv_note (end);
3157 if (! note)
3158 continue;
3159 if (reg_mentioned_p (newval, XEXP (note, 0)))
3160 return false;
3161 }
3162 while ((end = *libcall_sp++));
3163 return true;
3164 }
3165 }
5976e643 3166 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), oldreg, newval);
6fb5fa3c 3167 df_notes_rescan (end);
8ba46434
R
3168 insn = end;
3169 }
3170 return true;
3171}
3172
3173#define MAX_NESTED_LIBCALLS 9
3174
eb232f4e
SB
3175/* Do local const/copy propagation (i.e. within each basic block).
3176 If ALTER_JUMPS is true, allow propagating into jump insns, which
3177 could modify the CFG. */
3178
ae860ff7 3179static void
eb232f4e 3180local_cprop_pass (bool alter_jumps)
ae860ff7 3181{
eb232f4e 3182 basic_block bb;
ae860ff7
JH
3183 rtx insn;
3184 struct reg_use *reg_used;
8ba46434 3185 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
1649d92f 3186 bool changed = false;
ae860ff7 3187
463301c3 3188 cselib_init (false);
8ba46434
R
3189 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
3190 *libcall_sp = 0;
eb232f4e 3191 FOR_EACH_BB (bb)
ae860ff7 3192 {
eb232f4e 3193 FOR_BB_INSNS (bb, insn)
ae860ff7 3194 {
eb232f4e 3195 if (INSN_P (insn))
ae860ff7 3196 {
eb232f4e 3197 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
ae860ff7 3198
eb232f4e
SB
3199 if (note)
3200 {
3201 gcc_assert (libcall_sp != libcall_stack);
3202 *--libcall_sp = XEXP (note, 0);
3203 }
3204 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3205 if (note)
3206 libcall_sp++;
3207 note = find_reg_equal_equiv_note (insn);
3208 do
3209 {
3210 reg_use_count = 0;
3211 note_uses (&PATTERN (insn), local_cprop_find_used_regs,
3212 NULL);
3213 if (note)
3214 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
3215
3216 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
3217 reg_used++, reg_use_count--)
6fb5fa3c
DB
3218 {
3219 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
3220 libcall_sp))
3221 {
3222 changed = true;
3223 break;
3224 }
3225 }
eb232f4e 3226 if (INSN_DELETED_P (insn))
1649d92f 3227 break;
eb232f4e
SB
3228 }
3229 while (reg_use_count);
ae860ff7 3230 }
eb232f4e 3231 cselib_process_insn (insn);
ae860ff7 3232 }
eb232f4e
SB
3233
3234 /* Forget everything at the end of a basic block. Make sure we are
3235 not inside a libcall, they should never cross basic blocks. */
3236 cselib_clear_table ();
3237 gcc_assert (libcall_sp == &libcall_stack[MAX_NESTED_LIBCALLS]);
ae860ff7 3238 }
eb232f4e 3239
ae860ff7 3240 cselib_finish ();
eb232f4e 3241
1649d92f
JH
3242 /* Global analysis may get into infinite loops for unreachable blocks. */
3243 if (changed && alter_jumps)
5f0bea72
JH
3244 {
3245 delete_unreachable_blocks ();
3246 free_reg_set_mem ();
3247 alloc_reg_set_mem (max_reg_num ());
eb232f4e 3248 compute_sets ();
5f0bea72 3249 }
ae860ff7
JH
3250}
3251
c4c81601 3252/* Forward propagate copies. This includes copies and constants. Return
cc2902df 3253 nonzero if a change was made. */
7506f491
DE
3254
3255static int
1d088dee 3256cprop (int alter_jumps)
7506f491 3257{
e0082a72
ZD
3258 int changed;
3259 basic_block bb;
7506f491
DE
3260 rtx insn;
3261
3262 /* Note we start at block 1. */
e0082a72
ZD
3263 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3264 {
10d22567
ZD
3265 if (dump_file != NULL)
3266 fprintf (dump_file, "\n");
e0082a72
ZD
3267 return 0;
3268 }
7506f491
DE
3269
3270 changed = 0;
e0082a72 3271 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
7506f491
DE
3272 {
3273 /* Reset tables used to keep track of what's still valid [since the
3274 start of the block]. */
3275 reset_opr_set_tables ();
3276
eb232f4e 3277 FOR_BB_INSNS (bb, insn)
172890a2
RK
3278 if (INSN_P (insn))
3279 {
ae860ff7 3280 changed |= cprop_insn (insn, alter_jumps);
7506f491 3281
172890a2
RK
3282 /* Keep track of everything modified by this insn. */
3283 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
3284 call mark_oprs_set if we turned the insn into a NOTE. */
7b1b4aed 3285 if (! NOTE_P (insn))
172890a2 3286 mark_oprs_set (insn);
8e42ace1 3287 }
7506f491
DE
3288 }
3289
10d22567
ZD
3290 if (dump_file != NULL)
3291 fprintf (dump_file, "\n");
7506f491
DE
3292
3293 return changed;
3294}
3295
fbef91d8
RS
3296/* Similar to get_condition, only the resulting condition must be
3297 valid at JUMP, instead of at EARLIEST.
3298
3299 This differs from noce_get_condition in ifcvt.c in that we prefer not to
3300 settle for the condition variable in the jump instruction being integral.
3301 We prefer to be able to record the value of a user variable, rather than
3302 the value of a temporary used in a condition. This could be solved by
aabcd309 3303 recording the value of *every* register scanned by canonicalize_condition,
fbef91d8
RS
3304 but this would require some code reorganization. */
3305
2fa4a849 3306rtx
1d088dee 3307fis_get_condition (rtx jump)
fbef91d8 3308{
45d09c02 3309 return get_condition (jump, NULL, false, true);
fbef91d8
RS
3310}
3311
b0656d8b
JW
3312/* Check the comparison COND to see if we can safely form an implicit set from
3313 it. COND is either an EQ or NE comparison. */
3314
3315static bool
3316implicit_set_cond_p (rtx cond)
3317{
3318 enum machine_mode mode = GET_MODE (XEXP (cond, 0));
3319 rtx cst = XEXP (cond, 1);
3320
3321 /* We can't perform this optimization if either operand might be or might
3322 contain a signed zero. */
3323 if (HONOR_SIGNED_ZEROS (mode))
3324 {
3325 /* It is sufficient to check if CST is or contains a zero. We must
3326 handle float, complex, and vector. If any subpart is a zero, then
3327 the optimization can't be performed. */
3328 /* ??? The complex and vector checks are not implemented yet. We just
3329 always return zero for them. */
3330 if (GET_CODE (cst) == CONST_DOUBLE)
3331 {
3332 REAL_VALUE_TYPE d;
3333 REAL_VALUE_FROM_CONST_DOUBLE (d, cst);
3334 if (REAL_VALUES_EQUAL (d, dconst0))
3335 return 0;
3336 }
3337 else
3338 return 0;
3339 }
3340
3341 return gcse_constant_p (cst);
3342}
3343
fbef91d8
RS
3344/* Find the implicit sets of a function. An "implicit set" is a constraint
3345 on the value of a variable, implied by a conditional jump. For example,
3346 following "if (x == 2)", the then branch may be optimized as though the
3347 conditional performed an "explicit set", in this example, "x = 2". This
3348 function records the set patterns that are implicit at the start of each
3349 basic block. */
3350
3351static void
1d088dee 3352find_implicit_sets (void)
fbef91d8
RS
3353{
3354 basic_block bb, dest;
3355 unsigned int count;
3356 rtx cond, new;
3357
3358 count = 0;
3359 FOR_EACH_BB (bb)
a98ebe2e 3360 /* Check for more than one successor. */
628f6a4e 3361 if (EDGE_COUNT (bb->succs) > 1)
fbef91d8 3362 {
a813c111 3363 cond = fis_get_condition (BB_END (bb));
fbef91d8
RS
3364
3365 if (cond
3366 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
7b1b4aed 3367 && REG_P (XEXP (cond, 0))
fbef91d8 3368 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
b0656d8b 3369 && implicit_set_cond_p (cond))
fbef91d8
RS
3370 {
3371 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
3372 : FALLTHRU_EDGE (bb)->dest;
3373
c5cbcccf 3374 if (dest && single_pred_p (dest)
fbef91d8
RS
3375 && dest != EXIT_BLOCK_PTR)
3376 {
3377 new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
3378 XEXP (cond, 1));
3379 implicit_sets[dest->index] = new;
10d22567 3380 if (dump_file)
fbef91d8 3381 {
10d22567 3382 fprintf(dump_file, "Implicit set of reg %d in ",
fbef91d8 3383 REGNO (XEXP (cond, 0)));
10d22567 3384 fprintf(dump_file, "basic block %d\n", dest->index);
fbef91d8
RS
3385 }
3386 count++;
3387 }
3388 }
3389 }
3390
10d22567
ZD
3391 if (dump_file)
3392 fprintf (dump_file, "Found %d implicit sets\n", count);
fbef91d8
RS
3393}
3394
7506f491 3395/* Perform one copy/constant propagation pass.
a0134312
RS
3396 PASS is the pass count. If CPROP_JUMPS is true, perform constant
3397 propagation into conditional jumps. If BYPASS_JUMPS is true,
3398 perform conditional jump bypassing optimizations. */
7506f491
DE
3399
3400static int
eb232f4e 3401one_cprop_pass (int pass, bool cprop_jumps, bool bypass_jumps)
7506f491
DE
3402{
3403 int changed = 0;
3404
27fb79ad
SB
3405 global_const_prop_count = local_const_prop_count = 0;
3406 global_copy_prop_count = local_copy_prop_count = 0;
7506f491 3407
a52b023a
PB
3408 if (cprop_jumps)
3409 local_cprop_pass (cprop_jumps);
ae860ff7 3410
fbef91d8 3411 /* Determine implicit sets. */
5ed6ace5 3412 implicit_sets = XCNEWVEC (rtx, last_basic_block);
fbef91d8
RS
3413 find_implicit_sets ();
3414
02280659
ZD
3415 alloc_hash_table (max_cuid, &set_hash_table, 1);
3416 compute_hash_table (&set_hash_table);
fbef91d8
RS
3417
3418 /* Free implicit_sets before peak usage. */
3419 free (implicit_sets);
3420 implicit_sets = NULL;
3421
10d22567
ZD
3422 if (dump_file)
3423 dump_hash_table (dump_file, "SET", &set_hash_table);
02280659 3424 if (set_hash_table.n_elems > 0)
7506f491 3425 {
02280659 3426 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
7506f491 3427 compute_cprop_data ();
a0134312
RS
3428 changed = cprop (cprop_jumps);
3429 if (bypass_jumps)
0e3f0221 3430 changed |= bypass_conditional_jumps ();
7506f491
DE
3431 free_cprop_mem ();
3432 }
c4c81601 3433
02280659 3434 free_hash_table (&set_hash_table);
7506f491 3435
10d22567 3436 if (dump_file)
7506f491 3437 {
10d22567 3438 fprintf (dump_file, "CPROP of %s, pass %d: %d bytes needed, ",
faed5cc3 3439 current_function_name (), pass, bytes_used);
10d22567 3440 fprintf (dump_file, "%d local const props, %d local copy props, ",
27fb79ad 3441 local_const_prop_count, local_copy_prop_count);
10d22567 3442 fprintf (dump_file, "%d global const props, %d global copy props\n\n",
27fb79ad 3443 global_const_prop_count, global_copy_prop_count);
7506f491 3444 }
1649d92f
JH
3445 /* Global analysis may get into infinite loops for unreachable blocks. */
3446 if (changed && cprop_jumps)
3447 delete_unreachable_blocks ();
7506f491
DE
3448
3449 return changed;
3450}
3451\f
0e3f0221
RS
3452/* Bypass conditional jumps. */
3453
7821bfc7
RS
3454/* The value of last_basic_block at the beginning of the jump_bypass
3455 pass. The use of redirect_edge_and_branch_force may introduce new
3456 basic blocks, but the data flow analysis is only valid for basic
3457 block indices less than bypass_last_basic_block. */
3458
3459static int bypass_last_basic_block;
3460
0e3f0221
RS
3461/* Find a set of REGNO to a constant that is available at the end of basic
3462 block BB. Returns NULL if no such set is found. Based heavily upon
3463 find_avail_set. */
3464
3465static struct expr *
1d088dee 3466find_bypass_set (int regno, int bb)
0e3f0221
RS
3467{
3468 struct expr *result = 0;
3469
3470 for (;;)
3471 {
3472 rtx src;
ceda50e9 3473 struct expr *set = lookup_set (regno, &set_hash_table);
0e3f0221
RS
3474
3475 while (set)
3476 {
3477 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
3478 break;
3479 set = next_set (regno, set);
3480 }
3481
3482 if (set == 0)
3483 break;
3484
282899df 3485 gcc_assert (GET_CODE (set->expr) == SET);
0e3f0221
RS
3486
3487 src = SET_SRC (set->expr);
6b2d1c9e 3488 if (gcse_constant_p (src))
0e3f0221
RS
3489 result = set;
3490
7b1b4aed 3491 if (! REG_P (src))
0e3f0221
RS
3492 break;
3493
3494 regno = REGNO (src);
3495 }
3496 return result;
3497}
3498
3499
e129b3f9
RS
3500/* Subroutine of bypass_block that checks whether a pseudo is killed by
3501 any of the instructions inserted on an edge. Jump bypassing places
3502 condition code setters on CFG edges using insert_insn_on_edge. This
3503 function is required to check that our data flow analysis is still
3504 valid prior to commit_edge_insertions. */
3505
3506static bool
1d088dee 3507reg_killed_on_edge (rtx reg, edge e)
e129b3f9
RS
3508{
3509 rtx insn;
3510
6de9cd9a 3511 for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
e129b3f9
RS
3512 if (INSN_P (insn) && reg_set_p (reg, insn))
3513 return true;
3514
3515 return false;
3516}
3517
0e3f0221
RS
3518/* Subroutine of bypass_conditional_jumps that attempts to bypass the given
3519 basic block BB which has more than one predecessor. If not NULL, SETCC
3520 is the first instruction of BB, which is immediately followed by JUMP_INSN
3521 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
e129b3f9
RS
3522 Returns nonzero if a change was made.
3523
e0bb17a8 3524 During the jump bypassing pass, we may place copies of SETCC instructions
e129b3f9
RS
3525 on CFG edges. The following routine must be careful to pay attention to
3526 these inserted insns when performing its transformations. */
0e3f0221
RS
3527
3528static int
1d088dee 3529bypass_block (basic_block bb, rtx setcc, rtx jump)
0e3f0221
RS
3530{
3531 rtx insn, note;
628f6a4e 3532 edge e, edest;
818b6b7f 3533 int i, change;
72b8d451 3534 int may_be_loop_header;
628f6a4e
BE
3535 unsigned removed_p;
3536 edge_iterator ei;
0e3f0221
RS
3537
3538 insn = (setcc != NULL) ? setcc : jump;
3539
3540 /* Determine set of register uses in INSN. */
3541 reg_use_count = 0;
3542 note_uses (&PATTERN (insn), find_used_regs, NULL);
3543 note = find_reg_equal_equiv_note (insn);
3544 if (note)
3545 find_used_regs (&XEXP (note, 0), NULL);
3546
72b8d451 3547 may_be_loop_header = false;
628f6a4e 3548 FOR_EACH_EDGE (e, ei, bb->preds)
72b8d451
ZD
3549 if (e->flags & EDGE_DFS_BACK)
3550 {
3551 may_be_loop_header = true;
3552 break;
3553 }
3554
0e3f0221 3555 change = 0;
628f6a4e 3556 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
0e3f0221 3557 {
628f6a4e
BE
3558 removed_p = 0;
3559
7821bfc7 3560 if (e->flags & EDGE_COMPLEX)
628f6a4e
BE
3561 {
3562 ei_next (&ei);
3563 continue;
3564 }
7821bfc7
RS
3565
3566 /* We can't redirect edges from new basic blocks. */
3567 if (e->src->index >= bypass_last_basic_block)
628f6a4e
BE
3568 {
3569 ei_next (&ei);
3570 continue;
3571 }
7821bfc7 3572
72b8d451 3573 /* The irreducible loops created by redirecting of edges entering the
e0bb17a8
KH
3574 loop from outside would decrease effectiveness of some of the following
3575 optimizations, so prevent this. */
72b8d451
ZD
3576 if (may_be_loop_header
3577 && !(e->flags & EDGE_DFS_BACK))
628f6a4e
BE
3578 {
3579 ei_next (&ei);
3580 continue;
3581 }
72b8d451 3582
0e3f0221
RS
3583 for (i = 0; i < reg_use_count; i++)
3584 {
3585 struct reg_use *reg_used = &reg_use_table[i];
589005ff 3586 unsigned int regno = REGNO (reg_used->reg_rtx);
818b6b7f 3587 basic_block dest, old_dest;
589005ff
KH
3588 struct expr *set;
3589 rtx src, new;
0e3f0221 3590
589005ff
KH
3591 if (regno >= max_gcse_regno)
3592 continue;
0e3f0221 3593
589005ff 3594 set = find_bypass_set (regno, e->src->index);
0e3f0221
RS
3595
3596 if (! set)
3597 continue;
3598
e129b3f9 3599 /* Check the data flow is valid after edge insertions. */
6de9cd9a 3600 if (e->insns.r && reg_killed_on_edge (reg_used->reg_rtx, e))
e129b3f9
RS
3601 continue;
3602
589005ff 3603 src = SET_SRC (pc_set (jump));
0e3f0221
RS
3604
3605 if (setcc != NULL)
3606 src = simplify_replace_rtx (src,
589005ff
KH
3607 SET_DEST (PATTERN (setcc)),
3608 SET_SRC (PATTERN (setcc)));
0e3f0221
RS
3609
3610 new = simplify_replace_rtx (src, reg_used->reg_rtx,
589005ff 3611 SET_SRC (set->expr));
0e3f0221 3612
1d088dee 3613 /* Jump bypassing may have already placed instructions on
e129b3f9
RS
3614 edges of the CFG. We can't bypass an outgoing edge that
3615 has instructions associated with it, as these insns won't
3616 get executed if the incoming edge is redirected. */
3617
589005ff 3618 if (new == pc_rtx)
e129b3f9
RS
3619 {
3620 edest = FALLTHRU_EDGE (bb);
6de9cd9a 3621 dest = edest->insns.r ? NULL : edest->dest;
e129b3f9 3622 }
0e3f0221 3623 else if (GET_CODE (new) == LABEL_REF)
e129b3f9
RS
3624 {
3625 dest = BLOCK_FOR_INSN (XEXP (new, 0));
3626 /* Don't bypass edges containing instructions. */
c7d1b449
KH
3627 edest = find_edge (bb, dest);
3628 if (edest && edest->insns.r)
3629 dest = NULL;
e129b3f9 3630 }
0e3f0221
RS
3631 else
3632 dest = NULL;
3633
a544524a
JH
3634 /* Avoid unification of the edge with other edges from original
3635 branch. We would end up emitting the instruction on "both"
3636 edges. */
7b1b4aed 3637
c7d1b449
KH
3638 if (dest && setcc && !CC0_P (SET_DEST (PATTERN (setcc)))
3639 && find_edge (e->src, dest))
3640 dest = NULL;
a544524a 3641
818b6b7f 3642 old_dest = e->dest;
7821bfc7
RS
3643 if (dest != NULL
3644 && dest != old_dest
3645 && dest != EXIT_BLOCK_PTR)
3646 {
3647 redirect_edge_and_branch_force (e, dest);
3648
818b6b7f 3649 /* Copy the register setter to the redirected edge.
0e3f0221
RS
3650 Don't copy CC0 setters, as CC0 is dead after jump. */
3651 if (setcc)
3652 {
3653 rtx pat = PATTERN (setcc);
818b6b7f 3654 if (!CC0_P (SET_DEST (pat)))
0e3f0221
RS
3655 insert_insn_on_edge (copy_insn (pat), e);
3656 }
3657
10d22567 3658 if (dump_file != NULL)
0e3f0221 3659 {
10d22567 3660 fprintf (dump_file, "JUMP-BYPASS: Proved reg %d "
27fb79ad 3661 "in jump_insn %d equals constant ",
818b6b7f 3662 regno, INSN_UID (jump));
10d22567
ZD
3663 print_rtl (dump_file, SET_SRC (set->expr));
3664 fprintf (dump_file, "\nBypass edge from %d->%d to %d\n",
818b6b7f 3665 e->src->index, old_dest->index, dest->index);
0e3f0221
RS
3666 }
3667 change = 1;
628f6a4e 3668 removed_p = 1;
0e3f0221
RS
3669 break;
3670 }
3671 }
628f6a4e
BE
3672 if (!removed_p)
3673 ei_next (&ei);
0e3f0221
RS
3674 }
3675 return change;
3676}
3677
3678/* Find basic blocks with more than one predecessor that only contain a
3679 single conditional jump. If the result of the comparison is known at
3680 compile-time from any incoming edge, redirect that edge to the
9a71ece1
RH
3681 appropriate target. Returns nonzero if a change was made.
3682
3683 This function is now mis-named, because we also handle indirect jumps. */
0e3f0221
RS
3684
3685static int
1d088dee 3686bypass_conditional_jumps (void)
0e3f0221
RS
3687{
3688 basic_block bb;
3689 int changed;
3690 rtx setcc;
3691 rtx insn;
3692 rtx dest;
3693
3694 /* Note we start at block 1. */
3695 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3696 return 0;
3697
7821bfc7 3698 bypass_last_basic_block = last_basic_block;
72b8d451 3699 mark_dfs_back_edges ();
7821bfc7 3700
0e3f0221
RS
3701 changed = 0;
3702 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
589005ff 3703 EXIT_BLOCK_PTR, next_bb)
0e3f0221
RS
3704 {
3705 /* Check for more than one predecessor. */
c5cbcccf 3706 if (!single_pred_p (bb))
0e3f0221
RS
3707 {
3708 setcc = NULL_RTX;
eb232f4e 3709 FOR_BB_INSNS (bb, insn)
4b4bf941 3710 if (NONJUMP_INSN_P (insn))
0e3f0221 3711 {
9543a9d2 3712 if (setcc)
0e3f0221 3713 break;
ba4f7968 3714 if (GET_CODE (PATTERN (insn)) != SET)
0e3f0221
RS
3715 break;
3716
ba4f7968 3717 dest = SET_DEST (PATTERN (insn));
818b6b7f 3718 if (REG_P (dest) || CC0_P (dest))
0e3f0221 3719 setcc = insn;
0e3f0221
RS
3720 else
3721 break;
3722 }
7b1b4aed 3723 else if (JUMP_P (insn))
0e3f0221 3724 {
9a71ece1
RH
3725 if ((any_condjump_p (insn) || computed_jump_p (insn))
3726 && onlyjump_p (insn))
0e3f0221
RS
3727 changed |= bypass_block (bb, setcc, insn);
3728 break;
3729 }
3730 else if (INSN_P (insn))
3731 break;
3732 }
3733 }
3734
818b6b7f 3735 /* If we bypassed any register setting insns, we inserted a
fbe5a4a6 3736 copy on the redirected edge. These need to be committed. */
0e3f0221 3737 if (changed)
62e5bf5d 3738 commit_edge_insertions ();
0e3f0221
RS
3739
3740 return changed;
3741}
3742\f
a65f3558 3743/* Compute PRE+LCM working variables. */
7506f491
DE
3744
3745/* Local properties of expressions. */
3746/* Nonzero for expressions that are transparent in the block. */
a65f3558 3747static sbitmap *transp;
7506f491 3748
5c35539b
RH
3749/* Nonzero for expressions that are transparent at the end of the block.
3750 This is only zero for expressions killed by abnormal critical edge
3751 created by a calls. */
a65f3558 3752static sbitmap *transpout;
5c35539b 3753
a65f3558
JL
3754/* Nonzero for expressions that are computed (available) in the block. */
3755static sbitmap *comp;
7506f491 3756
a65f3558
JL
3757/* Nonzero for expressions that are locally anticipatable in the block. */
3758static sbitmap *antloc;
7506f491 3759
a65f3558
JL
3760/* Nonzero for expressions where this block is an optimal computation
3761 point. */
3762static sbitmap *pre_optimal;
5c35539b 3763
a65f3558
JL
3764/* Nonzero for expressions which are redundant in a particular block. */
3765static sbitmap *pre_redundant;
7506f491 3766
a42cd965
AM
3767/* Nonzero for expressions which should be inserted on a specific edge. */
3768static sbitmap *pre_insert_map;
3769
3770/* Nonzero for expressions which should be deleted in a specific block. */
3771static sbitmap *pre_delete_map;
3772
3773/* Contains the edge_list returned by pre_edge_lcm. */
3774static struct edge_list *edge_list;
3775
a65f3558
JL
3776/* Redundant insns. */
3777static sbitmap pre_redundant_insns;
7506f491 3778
a65f3558 3779/* Allocate vars used for PRE analysis. */
7506f491
DE
3780
3781static void
1d088dee 3782alloc_pre_mem (int n_blocks, int n_exprs)
7506f491 3783{
a65f3558
JL
3784 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
3785 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
3786 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5faf03ae 3787
a42cd965
AM
3788 pre_optimal = NULL;
3789 pre_redundant = NULL;
3790 pre_insert_map = NULL;
3791 pre_delete_map = NULL;
a42cd965 3792 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
c4c81601 3793
a42cd965 3794 /* pre_insert and pre_delete are allocated later. */
7506f491
DE
3795}
3796
a65f3558 3797/* Free vars used for PRE analysis. */
7506f491
DE
3798
3799static void
1d088dee 3800free_pre_mem (void)
7506f491 3801{
5a660bff
DB
3802 sbitmap_vector_free (transp);
3803 sbitmap_vector_free (comp);
bd3675fc
JL
3804
3805 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
7506f491 3806
a42cd965 3807 if (pre_optimal)
5a660bff 3808 sbitmap_vector_free (pre_optimal);
a42cd965 3809 if (pre_redundant)
5a660bff 3810 sbitmap_vector_free (pre_redundant);
a42cd965 3811 if (pre_insert_map)
5a660bff 3812 sbitmap_vector_free (pre_insert_map);
a42cd965 3813 if (pre_delete_map)
5a660bff 3814 sbitmap_vector_free (pre_delete_map);
a42cd965 3815
bd3675fc 3816 transp = comp = NULL;
a42cd965 3817 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
7506f491
DE
3818}
3819
3820/* Top level routine to do the dataflow analysis needed by PRE. */
3821
3822static void
1d088dee 3823compute_pre_data (void)
7506f491 3824{
b614171e 3825 sbitmap trapping_expr;
e0082a72 3826 basic_block bb;
b614171e 3827 unsigned int ui;
c66e8ae9 3828
02280659 3829 compute_local_properties (transp, comp, antloc, &expr_hash_table);
d55bc081 3830 sbitmap_vector_zero (ae_kill, last_basic_block);
c66e8ae9 3831
b614171e 3832 /* Collect expressions which might trap. */
02280659 3833 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
b614171e 3834 sbitmap_zero (trapping_expr);
02280659 3835 for (ui = 0; ui < expr_hash_table.size; ui++)
b614171e
MM
3836 {
3837 struct expr *e;
02280659 3838 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
b614171e
MM
3839 if (may_trap_p (e->expr))
3840 SET_BIT (trapping_expr, e->bitmap_index);
3841 }
3842
c66e8ae9
JL
3843 /* Compute ae_kill for each basic block using:
3844
3845 ~(TRANSP | COMP)
e83f4801 3846 */
c66e8ae9 3847
e0082a72 3848 FOR_EACH_BB (bb)
c66e8ae9 3849 {
b614171e 3850 edge e;
628f6a4e 3851 edge_iterator ei;
b614171e
MM
3852
3853 /* If the current block is the destination of an abnormal edge, we
3854 kill all trapping expressions because we won't be able to properly
3855 place the instruction on the edge. So make them neither
3856 anticipatable nor transparent. This is fairly conservative. */
628f6a4e 3857 FOR_EACH_EDGE (e, ei, bb->preds)
b614171e
MM
3858 if (e->flags & EDGE_ABNORMAL)
3859 {
e0082a72
ZD
3860 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
3861 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
b614171e
MM
3862 break;
3863 }
3864
e0082a72
ZD
3865 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
3866 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
c66e8ae9
JL
3867 }
3868
10d22567 3869 edge_list = pre_edge_lcm (expr_hash_table.n_elems, transp, comp, antloc,
a42cd965 3870 ae_kill, &pre_insert_map, &pre_delete_map);
5a660bff 3871 sbitmap_vector_free (antloc);
bd3675fc 3872 antloc = NULL;
5a660bff 3873 sbitmap_vector_free (ae_kill);
589005ff 3874 ae_kill = NULL;
76ac938b 3875 sbitmap_free (trapping_expr);
7506f491
DE
3876}
3877\f
3878/* PRE utilities */
3879
cc2902df 3880/* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
a65f3558 3881 block BB.
7506f491
DE
3882
3883 VISITED is a pointer to a working buffer for tracking which BB's have
3884 been visited. It is NULL for the top-level call.
3885
3886 We treat reaching expressions that go through blocks containing the same
3887 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3888 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3889 2 as not reaching. The intent is to improve the probability of finding
3890 only one reaching expression and to reduce register lifetimes by picking
3891 the closest such expression. */
3892
3893static int
1d088dee 3894pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr, basic_block bb, char *visited)
7506f491 3895{
36349f8b 3896 edge pred;
628f6a4e
BE
3897 edge_iterator ei;
3898
3899 FOR_EACH_EDGE (pred, ei, bb->preds)
7506f491 3900 {
e2d2ed72 3901 basic_block pred_bb = pred->src;
7506f491 3902
36349f8b 3903 if (pred->src == ENTRY_BLOCK_PTR
7506f491 3904 /* Has predecessor has already been visited? */
0b17ab2f 3905 || visited[pred_bb->index])
c4c81601
RK
3906 ;/* Nothing to do. */
3907
7506f491 3908 /* Does this predecessor generate this expression? */
0b17ab2f 3909 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
7506f491
DE
3910 {
3911 /* Is this the occurrence we're looking for?
3912 Note that there's only one generating occurrence per block
3913 so we just need to check the block number. */
a65f3558 3914 if (occr_bb == pred_bb)
7506f491 3915 return 1;
c4c81601 3916
0b17ab2f 3917 visited[pred_bb->index] = 1;
7506f491
DE
3918 }
3919 /* Ignore this predecessor if it kills the expression. */
0b17ab2f
RH
3920 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
3921 visited[pred_bb->index] = 1;
c4c81601 3922
7506f491
DE
3923 /* Neither gen nor kill. */
3924 else
ac7c5af5 3925 {
0b17ab2f 3926 visited[pred_bb->index] = 1;
89e606c9 3927 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
7506f491 3928 return 1;
ac7c5af5 3929 }
7506f491
DE
3930 }
3931
3932 /* All paths have been checked. */
3933 return 0;
3934}
283a2545
RL
3935
3936/* The wrapper for pre_expr_reaches_here_work that ensures that any
dc297297 3937 memory allocated for that function is returned. */
283a2545
RL
3938
3939static int
1d088dee 3940pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
283a2545
RL
3941{
3942 int rval;
5ed6ace5 3943 char *visited = XCNEWVEC (char, last_basic_block);
283a2545 3944
8e42ace1 3945 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
283a2545
RL
3946
3947 free (visited);
c4c81601 3948 return rval;
283a2545 3949}
7506f491 3950\f
a42cd965
AM
3951
3952/* Given an expr, generate RTL which we can insert at the end of a BB,
589005ff 3953 or on an edge. Set the block number of any insns generated to
a42cd965
AM
3954 the value of BB. */
3955
3956static rtx
1d088dee 3957process_insert_insn (struct expr *expr)
a42cd965
AM
3958{
3959 rtx reg = expr->reaching_reg;
fb0c0a12
RK
3960 rtx exp = copy_rtx (expr->expr);
3961 rtx pat;
a42cd965
AM
3962
3963 start_sequence ();
fb0c0a12
RK
3964
3965 /* If the expression is something that's an operand, like a constant,
3966 just copy it to a register. */
3967 if (general_operand (exp, GET_MODE (reg)))
3968 emit_move_insn (reg, exp);
3969
3970 /* Otherwise, make a new insn to compute this expression and make sure the
3971 insn will be recognized (this also adds any needed CLOBBERs). Copy the
3972 expression to make sure we don't have any sharing issues. */
282899df
NS
3973 else
3974 {
3975 rtx insn = emit_insn (gen_rtx_SET (VOIDmode, reg, exp));
3976
2f021b67
AP
3977 if (insn_invalid_p (insn))
3978 gcc_unreachable ();
282899df
NS
3979 }
3980
589005ff 3981
2f937369 3982 pat = get_insns ();
a42cd965
AM
3983 end_sequence ();
3984
3985 return pat;
3986}
589005ff 3987
a65f3558
JL
3988/* Add EXPR to the end of basic block BB.
3989
3990 This is used by both the PRE and code hoisting.
3991
3992 For PRE, we want to verify that the expr is either transparent
3993 or locally anticipatable in the target block. This check makes
3994 no sense for code hoisting. */
7506f491
DE
3995
3996static void
6fb5fa3c 3997insert_insn_end_basic_block (struct expr *expr, basic_block bb, int pre)
7506f491 3998{
a813c111 3999 rtx insn = BB_END (bb);
7506f491
DE
4000 rtx new_insn;
4001 rtx reg = expr->reaching_reg;
4002 int regno = REGNO (reg);
2f937369 4003 rtx pat, pat_end;
7506f491 4004
a42cd965 4005 pat = process_insert_insn (expr);
282899df 4006 gcc_assert (pat && INSN_P (pat));
2f937369
DM
4007
4008 pat_end = pat;
4009 while (NEXT_INSN (pat_end) != NULL_RTX)
4010 pat_end = NEXT_INSN (pat_end);
7506f491
DE
4011
4012 /* If the last insn is a jump, insert EXPR in front [taking care to
4d6922ee 4013 handle cc0, etc. properly]. Similarly we need to care trapping
068473ec 4014 instructions in presence of non-call exceptions. */
7506f491 4015
7b1b4aed 4016 if (JUMP_P (insn)
4b4bf941 4017 || (NONJUMP_INSN_P (insn)
c5cbcccf
ZD
4018 && (!single_succ_p (bb)
4019 || single_succ_edge (bb)->flags & EDGE_ABNORMAL)))
7506f491 4020 {
50b2596f 4021#ifdef HAVE_cc0
7506f491 4022 rtx note;
50b2596f 4023#endif
068473ec
JH
4024 /* It should always be the case that we can put these instructions
4025 anywhere in the basic block with performing PRE optimizations.
4026 Check this. */
282899df
NS
4027 gcc_assert (!NONJUMP_INSN_P (insn) || !pre
4028 || TEST_BIT (antloc[bb->index], expr->bitmap_index)
4029 || TEST_BIT (transp[bb->index], expr->bitmap_index));
7506f491
DE
4030
4031 /* If this is a jump table, then we can't insert stuff here. Since
4032 we know the previous real insn must be the tablejump, we insert
4033 the new instruction just before the tablejump. */
4034 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
4035 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
4036 insn = prev_real_insn (insn);
4037
4038#ifdef HAVE_cc0
4039 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
4040 if cc0 isn't set. */
4041 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
4042 if (note)
4043 insn = XEXP (note, 0);
4044 else
4045 {
4046 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
4047 if (maybe_cc0_setter
2c3c49de 4048 && INSN_P (maybe_cc0_setter)
7506f491
DE
4049 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
4050 insn = maybe_cc0_setter;
4051 }
4052#endif
4053 /* FIXME: What if something in cc0/jump uses value set in new insn? */
6fb5fa3c 4054 new_insn = emit_insn_before_noloc (pat, insn, bb);
3947e2f9 4055 }
c4c81601 4056
3947e2f9
RH
4057 /* Likewise if the last insn is a call, as will happen in the presence
4058 of exception handling. */
7b1b4aed 4059 else if (CALL_P (insn)
c5cbcccf
ZD
4060 && (!single_succ_p (bb)
4061 || single_succ_edge (bb)->flags & EDGE_ABNORMAL))
3947e2f9 4062 {
3947e2f9
RH
4063 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
4064 we search backward and place the instructions before the first
4065 parameter is loaded. Do this for everyone for consistency and a
fbe5a4a6 4066 presumption that we'll get better code elsewhere as well.
3947e2f9 4067
c4c81601 4068 It should always be the case that we can put these instructions
a65f3558
JL
4069 anywhere in the basic block with performing PRE optimizations.
4070 Check this. */
c4c81601 4071
282899df
NS
4072 gcc_assert (!pre
4073 || TEST_BIT (antloc[bb->index], expr->bitmap_index)
4074 || TEST_BIT (transp[bb->index], expr->bitmap_index));
3947e2f9
RH
4075
4076 /* Since different machines initialize their parameter registers
4077 in different orders, assume nothing. Collect the set of all
4078 parameter registers. */
a813c111 4079 insn = find_first_parameter_load (insn, BB_HEAD (bb));
3947e2f9 4080
b1d26727
JL
4081 /* If we found all the parameter loads, then we want to insert
4082 before the first parameter load.
4083
4084 If we did not find all the parameter loads, then we might have
4085 stopped on the head of the block, which could be a CODE_LABEL.
4086 If we inserted before the CODE_LABEL, then we would be putting
4087 the insn in the wrong basic block. In that case, put the insn
b5229628 4088 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
7b1b4aed 4089 while (LABEL_P (insn)
589ca5cb 4090 || NOTE_INSN_BASIC_BLOCK_P (insn))
b5229628 4091 insn = NEXT_INSN (insn);
c4c81601 4092
6fb5fa3c 4093 new_insn = emit_insn_before_noloc (pat, insn, bb);
7506f491
DE
4094 }
4095 else
6fb5fa3c 4096 new_insn = emit_insn_after_noloc (pat, insn, bb);
7506f491 4097
2f937369 4098 while (1)
a65f3558 4099 {
2f937369 4100 if (INSN_P (pat))
a65f3558 4101 {
2f937369
DM
4102 add_label_notes (PATTERN (pat), new_insn);
4103 note_stores (PATTERN (pat), record_set_info, pat);
a65f3558 4104 }
2f937369
DM
4105 if (pat == pat_end)
4106 break;
4107 pat = NEXT_INSN (pat);
a65f3558 4108 }
3947e2f9 4109
7506f491
DE
4110 gcse_create_count++;
4111
10d22567 4112 if (dump_file)
7506f491 4113 {
10d22567 4114 fprintf (dump_file, "PRE/HOIST: end of bb %d, insn %d, ",
0b17ab2f 4115 bb->index, INSN_UID (new_insn));
10d22567 4116 fprintf (dump_file, "copying expression %d to reg %d\n",
c4c81601 4117 expr->bitmap_index, regno);
7506f491
DE
4118 }
4119}
4120
a42cd965
AM
4121/* Insert partially redundant expressions on edges in the CFG to make
4122 the expressions fully redundant. */
7506f491 4123
a42cd965 4124static int
1d088dee 4125pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
7506f491 4126{
c4c81601 4127 int e, i, j, num_edges, set_size, did_insert = 0;
a65f3558
JL
4128 sbitmap *inserted;
4129
a42cd965
AM
4130 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
4131 if it reaches any of the deleted expressions. */
7506f491 4132
a42cd965
AM
4133 set_size = pre_insert_map[0]->size;
4134 num_edges = NUM_EDGES (edge_list);
02280659 4135 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
a42cd965 4136 sbitmap_vector_zero (inserted, num_edges);
7506f491 4137
a42cd965 4138 for (e = 0; e < num_edges; e++)
7506f491
DE
4139 {
4140 int indx;
e2d2ed72 4141 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
a65f3558 4142
a65f3558 4143 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
7506f491 4144 {
a42cd965 4145 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
7506f491 4146
02280659 4147 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
c4c81601
RK
4148 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
4149 {
4150 struct expr *expr = index_map[j];
4151 struct occr *occr;
a65f3558 4152
ff7cc307 4153 /* Now look at each deleted occurrence of this expression. */
c4c81601
RK
4154 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4155 {
4156 if (! occr->deleted_p)
4157 continue;
4158
3f117656 4159 /* Insert this expression on this edge if it would
ff7cc307 4160 reach the deleted occurrence in BB. */
c4c81601
RK
4161 if (!TEST_BIT (inserted[e], j))
4162 {
4163 rtx insn;
4164 edge eg = INDEX_EDGE (edge_list, e);
4165
4166 /* We can't insert anything on an abnormal and
4167 critical edge, so we insert the insn at the end of
4168 the previous block. There are several alternatives
4169 detailed in Morgans book P277 (sec 10.5) for
4170 handling this situation. This one is easiest for
4171 now. */
4172
b16aa8a5 4173 if (eg->flags & EDGE_ABNORMAL)
6fb5fa3c 4174 insert_insn_end_basic_block (index_map[j], bb, 0);
c4c81601
RK
4175 else
4176 {
4177 insn = process_insert_insn (index_map[j]);
4178 insert_insn_on_edge (insn, eg);
4179 }
4180
10d22567 4181 if (dump_file)
c4c81601 4182 {
10d22567 4183 fprintf (dump_file, "PRE/HOIST: edge (%d,%d), ",
0b17ab2f
RH
4184 bb->index,
4185 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
10d22567 4186 fprintf (dump_file, "copy expression %d\n",
c4c81601
RK
4187 expr->bitmap_index);
4188 }
4189
a13d4ebf 4190 update_ld_motion_stores (expr);
c4c81601
RK
4191 SET_BIT (inserted[e], j);
4192 did_insert = 1;
4193 gcse_create_count++;
4194 }
4195 }
4196 }
7506f491
DE
4197 }
4198 }
5faf03ae 4199
5a660bff 4200 sbitmap_vector_free (inserted);
a42cd965 4201 return did_insert;
7506f491
DE
4202}
4203
073089a7 4204/* Copy the result of EXPR->EXPR generated by INSN to EXPR->REACHING_REG.
b885908b
MH
4205 Given "old_reg <- expr" (INSN), instead of adding after it
4206 reaching_reg <- old_reg
4207 it's better to do the following:
4208 reaching_reg <- expr
4209 old_reg <- reaching_reg
4210 because this way copy propagation can discover additional PRE
f5f2e3cd
MH
4211 opportunities. But if this fails, we try the old way.
4212 When "expr" is a store, i.e.
4213 given "MEM <- old_reg", instead of adding after it
4214 reaching_reg <- old_reg
4215 it's better to add it before as follows:
4216 reaching_reg <- old_reg
4217 MEM <- reaching_reg. */
7506f491
DE
4218
4219static void
1d088dee 4220pre_insert_copy_insn (struct expr *expr, rtx insn)
7506f491
DE
4221{
4222 rtx reg = expr->reaching_reg;
4223 int regno = REGNO (reg);
4224 int indx = expr->bitmap_index;
073089a7 4225 rtx pat = PATTERN (insn);
64068ca2 4226 rtx set, first_set, new_insn;
b885908b 4227 rtx old_reg;
073089a7 4228 int i;
7506f491 4229
073089a7 4230 /* This block matches the logic in hash_scan_insn. */
282899df 4231 switch (GET_CODE (pat))
073089a7 4232 {
282899df
NS
4233 case SET:
4234 set = pat;
4235 break;
4236
4237 case PARALLEL:
073089a7
RS
4238 /* Search through the parallel looking for the set whose
4239 source was the expression that we're interested in. */
64068ca2 4240 first_set = NULL_RTX;
073089a7
RS
4241 set = NULL_RTX;
4242 for (i = 0; i < XVECLEN (pat, 0); i++)
4243 {
4244 rtx x = XVECEXP (pat, 0, i);
64068ca2 4245 if (GET_CODE (x) == SET)
073089a7 4246 {
64068ca2
RS
4247 /* If the source was a REG_EQUAL or REG_EQUIV note, we
4248 may not find an equivalent expression, but in this
4249 case the PARALLEL will have a single set. */
4250 if (first_set == NULL_RTX)
4251 first_set = x;
4252 if (expr_equiv_p (SET_SRC (x), expr->expr))
4253 {
4254 set = x;
4255 break;
4256 }
073089a7
RS
4257 }
4258 }
64068ca2
RS
4259
4260 gcc_assert (first_set);
4261 if (set == NULL_RTX)
4262 set = first_set;
282899df
NS
4263 break;
4264
4265 default:
4266 gcc_unreachable ();
073089a7 4267 }
c4c81601 4268
7b1b4aed 4269 if (REG_P (SET_DEST (set)))
073089a7 4270 {
f5f2e3cd
MH
4271 old_reg = SET_DEST (set);
4272 /* Check if we can modify the set destination in the original insn. */
4273 if (validate_change (insn, &SET_DEST (set), reg, 0))
4274 {
4275 new_insn = gen_move_insn (old_reg, reg);
4276 new_insn = emit_insn_after (new_insn, insn);
4277
4278 /* Keep register set table up to date. */
f5f2e3cd
MH
4279 record_one_set (regno, insn);
4280 }
4281 else
4282 {
4283 new_insn = gen_move_insn (reg, old_reg);
4284 new_insn = emit_insn_after (new_insn, insn);
073089a7 4285
f5f2e3cd
MH
4286 /* Keep register set table up to date. */
4287 record_one_set (regno, new_insn);
4288 }
073089a7 4289 }
f5f2e3cd 4290 else /* This is possible only in case of a store to memory. */
073089a7 4291 {
f5f2e3cd 4292 old_reg = SET_SRC (set);
073089a7 4293 new_insn = gen_move_insn (reg, old_reg);
f5f2e3cd
MH
4294
4295 /* Check if we can modify the set source in the original insn. */
4296 if (validate_change (insn, &SET_SRC (set), reg, 0))
4297 new_insn = emit_insn_before (new_insn, insn);
4298 else
4299 new_insn = emit_insn_after (new_insn, insn);
c4c81601 4300
073089a7
RS
4301 /* Keep register set table up to date. */
4302 record_one_set (regno, new_insn);
4303 }
7506f491
DE
4304
4305 gcse_create_count++;
4306
10d22567
ZD
4307 if (dump_file)
4308 fprintf (dump_file,
a42cd965
AM
4309 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
4310 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
4311 INSN_UID (insn), regno);
7506f491
DE
4312}
4313
4314/* Copy available expressions that reach the redundant expression
4315 to `reaching_reg'. */
4316
4317static void
1d088dee 4318pre_insert_copies (void)
7506f491 4319{
f5f2e3cd 4320 unsigned int i, added_copy;
c4c81601
RK
4321 struct expr *expr;
4322 struct occr *occr;
4323 struct occr *avail;
a65f3558 4324
7506f491
DE
4325 /* For each available expression in the table, copy the result to
4326 `reaching_reg' if the expression reaches a deleted one.
4327
4328 ??? The current algorithm is rather brute force.
4329 Need to do some profiling. */
4330
02280659
ZD
4331 for (i = 0; i < expr_hash_table.size; i++)
4332 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601
RK
4333 {
4334 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
4335 we don't want to insert a copy here because the expression may not
4336 really be redundant. So only insert an insn if the expression was
4337 deleted. This test also avoids further processing if the
4338 expression wasn't deleted anywhere. */
4339 if (expr->reaching_reg == NULL)
4340 continue;
7b1b4aed 4341
f5f2e3cd 4342 /* Set when we add a copy for that expression. */
7b1b4aed 4343 added_copy = 0;
c4c81601
RK
4344
4345 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4346 {
4347 if (! occr->deleted_p)
4348 continue;
7506f491 4349
c4c81601
RK
4350 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
4351 {
4352 rtx insn = avail->insn;
7506f491 4353
c4c81601
RK
4354 /* No need to handle this one if handled already. */
4355 if (avail->copied_p)
4356 continue;
7506f491 4357
c4c81601
RK
4358 /* Don't handle this one if it's a redundant one. */
4359 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
4360 continue;
7506f491 4361
c4c81601 4362 /* Or if the expression doesn't reach the deleted one. */
589005ff 4363 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
e2d2ed72
AM
4364 expr,
4365 BLOCK_FOR_INSN (occr->insn)))
c4c81601 4366 continue;
7506f491 4367
f5f2e3cd
MH
4368 added_copy = 1;
4369
c4c81601
RK
4370 /* Copy the result of avail to reaching_reg. */
4371 pre_insert_copy_insn (expr, insn);
4372 avail->copied_p = 1;
4373 }
4374 }
f5f2e3cd 4375
7b1b4aed 4376 if (added_copy)
f5f2e3cd 4377 update_ld_motion_stores (expr);
c4c81601 4378 }
7506f491
DE
4379}
4380
10d1bb36
JH
4381/* Emit move from SRC to DEST noting the equivalence with expression computed
4382 in INSN. */
4383static rtx
1d088dee 4384gcse_emit_move_after (rtx src, rtx dest, rtx insn)
10d1bb36
JH
4385{
4386 rtx new;
6bdb8dd6 4387 rtx set = single_set (insn), set2;
10d1bb36
JH
4388 rtx note;
4389 rtx eqv;
4390
4391 /* This should never fail since we're creating a reg->reg copy
4392 we've verified to be valid. */
4393
6bdb8dd6 4394 new = emit_insn_after (gen_move_insn (dest, src), insn);
285464d0 4395
10d1bb36 4396 /* Note the equivalence for local CSE pass. */
6bdb8dd6
JH
4397 set2 = single_set (new);
4398 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
4399 return new;
10d1bb36
JH
4400 if ((note = find_reg_equal_equiv_note (insn)))
4401 eqv = XEXP (note, 0);
4402 else
4403 eqv = SET_SRC (set);
4404
a500466b 4405 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
10d1bb36
JH
4406
4407 return new;
4408}
4409
7506f491 4410/* Delete redundant computations.
7506f491
DE
4411 Deletion is done by changing the insn to copy the `reaching_reg' of
4412 the expression into the result of the SET. It is left to later passes
4413 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
4414
cc2902df 4415 Returns nonzero if a change is made. */
7506f491
DE
4416
4417static int
1d088dee 4418pre_delete (void)
7506f491 4419{
2e653e39 4420 unsigned int i;
63bc1d05 4421 int changed;
c4c81601
RK
4422 struct expr *expr;
4423 struct occr *occr;
a65f3558 4424
7506f491 4425 changed = 0;
02280659 4426 for (i = 0; i < expr_hash_table.size; i++)
073089a7
RS
4427 for (expr = expr_hash_table.table[i];
4428 expr != NULL;
4429 expr = expr->next_same_hash)
c4c81601
RK
4430 {
4431 int indx = expr->bitmap_index;
7506f491 4432
c4c81601
RK
4433 /* We only need to search antic_occr since we require
4434 ANTLOC != 0. */
7506f491 4435
c4c81601
RK
4436 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4437 {
4438 rtx insn = occr->insn;
4439 rtx set;
e2d2ed72 4440 basic_block bb = BLOCK_FOR_INSN (insn);
7506f491 4441
073089a7
RS
4442 /* We only delete insns that have a single_set. */
4443 if (TEST_BIT (pre_delete_map[bb->index], indx)
6fb5fa3c
DB
4444 && (set = single_set (insn)) != 0
4445 && dbg_cnt (pre_insn))
c4c81601 4446 {
c4c81601
RK
4447 /* Create a pseudo-reg to store the result of reaching
4448 expressions into. Get the mode for the new pseudo from
4449 the mode of the original destination pseudo. */
4450 if (expr->reaching_reg == NULL)
4451 expr->reaching_reg
4452 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
4453
9b76aa3b 4454 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
10d1bb36
JH
4455 delete_insn (insn);
4456 occr->deleted_p = 1;
4457 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
4458 changed = 1;
4459 gcse_subst_count++;
7506f491 4460
10d22567 4461 if (dump_file)
c4c81601 4462 {
10d22567 4463 fprintf (dump_file,
c4c81601
RK
4464 "PRE: redundant insn %d (expression %d) in ",
4465 INSN_UID (insn), indx);
10d22567 4466 fprintf (dump_file, "bb %d, reaching reg is %d\n",
0b17ab2f 4467 bb->index, REGNO (expr->reaching_reg));
c4c81601
RK
4468 }
4469 }
4470 }
4471 }
7506f491
DE
4472
4473 return changed;
4474}
4475
4476/* Perform GCSE optimizations using PRE.
4477 This is called by one_pre_gcse_pass after all the dataflow analysis
4478 has been done.
4479
c4c81601
RK
4480 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
4481 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
4482 Compiler Design and Implementation.
7506f491 4483
c4c81601
RK
4484 ??? A new pseudo reg is created to hold the reaching expression. The nice
4485 thing about the classical approach is that it would try to use an existing
4486 reg. If the register can't be adequately optimized [i.e. we introduce
4487 reload problems], one could add a pass here to propagate the new register
4488 through the block.
7506f491 4489
c4c81601
RK
4490 ??? We don't handle single sets in PARALLELs because we're [currently] not
4491 able to copy the rest of the parallel when we insert copies to create full
4492 redundancies from partial redundancies. However, there's no reason why we
4493 can't handle PARALLELs in the cases where there are no partial
7506f491
DE
4494 redundancies. */
4495
4496static int
1d088dee 4497pre_gcse (void)
7506f491 4498{
2e653e39
RK
4499 unsigned int i;
4500 int did_insert, changed;
7506f491 4501 struct expr **index_map;
c4c81601 4502 struct expr *expr;
7506f491
DE
4503
4504 /* Compute a mapping from expression number (`bitmap_index') to
4505 hash table entry. */
4506
5ed6ace5 4507 index_map = XCNEWVEC (struct expr *, expr_hash_table.n_elems);
02280659
ZD
4508 for (i = 0; i < expr_hash_table.size; i++)
4509 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601 4510 index_map[expr->bitmap_index] = expr;
7506f491
DE
4511
4512 /* Reset bitmap used to track which insns are redundant. */
a65f3558
JL
4513 pre_redundant_insns = sbitmap_alloc (max_cuid);
4514 sbitmap_zero (pre_redundant_insns);
7506f491
DE
4515
4516 /* Delete the redundant insns first so that
4517 - we know what register to use for the new insns and for the other
4518 ones with reaching expressions
4519 - we know which insns are redundant when we go to create copies */
c4c81601 4520
7506f491 4521 changed = pre_delete ();
a42cd965 4522 did_insert = pre_edge_insert (edge_list, index_map);
c4c81601 4523
7506f491 4524 /* In other places with reaching expressions, copy the expression to the
a42cd965 4525 specially allocated pseudo-reg that reaches the redundant expr. */
7506f491 4526 pre_insert_copies ();
a42cd965
AM
4527 if (did_insert)
4528 {
4529 commit_edge_insertions ();
4530 changed = 1;
4531 }
7506f491 4532
283a2545 4533 free (index_map);
76ac938b 4534 sbitmap_free (pre_redundant_insns);
7506f491
DE
4535 return changed;
4536}
4537
4538/* Top level routine to perform one PRE GCSE pass.
4539
cc2902df 4540 Return nonzero if a change was made. */
7506f491
DE
4541
4542static int
1d088dee 4543one_pre_gcse_pass (int pass)
7506f491
DE
4544{
4545 int changed = 0;
4546
4547 gcse_subst_count = 0;
4548 gcse_create_count = 0;
4549
02280659 4550 alloc_hash_table (max_cuid, &expr_hash_table, 0);
a42cd965 4551 add_noreturn_fake_exit_edges ();
a13d4ebf
AM
4552 if (flag_gcse_lm)
4553 compute_ld_motion_mems ();
4554
02280659 4555 compute_hash_table (&expr_hash_table);
a13d4ebf 4556 trim_ld_motion_mems ();
10d22567
ZD
4557 if (dump_file)
4558 dump_hash_table (dump_file, "Expression", &expr_hash_table);
c4c81601 4559
02280659 4560 if (expr_hash_table.n_elems > 0)
7506f491 4561 {
02280659 4562 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
7506f491
DE
4563 compute_pre_data ();
4564 changed |= pre_gcse ();
a42cd965 4565 free_edge_list (edge_list);
7506f491
DE
4566 free_pre_mem ();
4567 }
c4c81601 4568
a13d4ebf 4569 free_ldst_mems ();
6809cbf9 4570 remove_fake_exit_edges ();
02280659 4571 free_hash_table (&expr_hash_table);
7506f491 4572
10d22567 4573 if (dump_file)
7506f491 4574 {
10d22567 4575 fprintf (dump_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
faed5cc3 4576 current_function_name (), pass, bytes_used);
10d22567 4577 fprintf (dump_file, "%d substs, %d insns created\n",
c4c81601 4578 gcse_subst_count, gcse_create_count);
7506f491
DE
4579 }
4580
4581 return changed;
4582}
aeb2f500
JW
4583\f
4584/* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5b1ef594
JDA
4585 If notes are added to an insn which references a CODE_LABEL, the
4586 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
4587 because the following loop optimization pass requires them. */
aeb2f500 4588
aeb2f500
JW
4589/* ??? If there was a jump optimization pass after gcse and before loop,
4590 then we would not need to do this here, because jump would add the
4591 necessary REG_LABEL notes. */
4592
4593static void
1d088dee 4594add_label_notes (rtx x, rtx insn)
aeb2f500
JW
4595{
4596 enum rtx_code code = GET_CODE (x);
4597 int i, j;
6f7d635c 4598 const char *fmt;
aeb2f500
JW
4599
4600 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
4601 {
6b3603c2 4602 /* This code used to ignore labels that referred to dispatch tables to
e0bb17a8 4603 avoid flow generating (slightly) worse code.
6b3603c2 4604
ac7c5af5
JL
4605 We no longer ignore such label references (see LABEL_REF handling in
4606 mark_jump_label for additional information). */
c4c81601 4607
6b8c9327 4608 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
6b3603c2 4609 REG_NOTES (insn));
5b1ef594 4610 if (LABEL_P (XEXP (x, 0)))
589005ff 4611 LABEL_NUSES (XEXP (x, 0))++;
aeb2f500
JW
4612 return;
4613 }
4614
c4c81601 4615 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
aeb2f500
JW
4616 {
4617 if (fmt[i] == 'e')
4618 add_label_notes (XEXP (x, i), insn);
4619 else if (fmt[i] == 'E')
4620 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4621 add_label_notes (XVECEXP (x, i, j), insn);
4622 }
4623}
a65f3558
JL
4624
4625/* Compute transparent outgoing information for each block.
4626
4627 An expression is transparent to an edge unless it is killed by
4628 the edge itself. This can only happen with abnormal control flow,
4629 when the edge is traversed through a call. This happens with
4630 non-local labels and exceptions.
4631
4632 This would not be necessary if we split the edge. While this is
4633 normally impossible for abnormal critical edges, with some effort
4634 it should be possible with exception handling, since we still have
4635 control over which handler should be invoked. But due to increased
4636 EH table sizes, this may not be worthwhile. */
4637
4638static void
1d088dee 4639compute_transpout (void)
a65f3558 4640{
e0082a72 4641 basic_block bb;
2e653e39 4642 unsigned int i;
c4c81601 4643 struct expr *expr;
a65f3558 4644
d55bc081 4645 sbitmap_vector_ones (transpout, last_basic_block);
a65f3558 4646
e0082a72 4647 FOR_EACH_BB (bb)
a65f3558 4648 {
a65f3558
JL
4649 /* Note that flow inserted a nop a the end of basic blocks that
4650 end in call instructions for reasons other than abnormal
4651 control flow. */
7b1b4aed 4652 if (! CALL_P (BB_END (bb)))
a65f3558
JL
4653 continue;
4654
02280659
ZD
4655 for (i = 0; i < expr_hash_table.size; i++)
4656 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
7b1b4aed 4657 if (MEM_P (expr->expr))
c4c81601
RK
4658 {
4659 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
4660 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
4661 continue;
589005ff 4662
c4c81601
RK
4663 /* ??? Optimally, we would use interprocedural alias
4664 analysis to determine if this mem is actually killed
4665 by this call. */
e0082a72 4666 RESET_BIT (transpout[bb->index], expr->bitmap_index);
c4c81601 4667 }
a65f3558
JL
4668 }
4669}
dfdb644f 4670
bb457bd9
JL
4671/* Code Hoisting variables and subroutines. */
4672
4673/* Very busy expressions. */
4674static sbitmap *hoist_vbein;
4675static sbitmap *hoist_vbeout;
4676
4677/* Hoistable expressions. */
4678static sbitmap *hoist_exprs;
4679
bb457bd9 4680/* ??? We could compute post dominators and run this algorithm in
68e82b83 4681 reverse to perform tail merging, doing so would probably be
bb457bd9
JL
4682 more effective than the tail merging code in jump.c.
4683
4684 It's unclear if tail merging could be run in parallel with
4685 code hoisting. It would be nice. */
4686
4687/* Allocate vars used for code hoisting analysis. */
4688
4689static void
1d088dee 4690alloc_code_hoist_mem (int n_blocks, int n_exprs)
bb457bd9
JL
4691{
4692 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4693 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4694 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4695
4696 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
4697 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
4698 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
4699 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
bb457bd9
JL
4700}
4701
4702/* Free vars used for code hoisting analysis. */
4703
4704static void
1d088dee 4705free_code_hoist_mem (void)
bb457bd9 4706{
5a660bff
DB
4707 sbitmap_vector_free (antloc);
4708 sbitmap_vector_free (transp);
4709 sbitmap_vector_free (comp);
bb457bd9 4710
5a660bff
DB
4711 sbitmap_vector_free (hoist_vbein);
4712 sbitmap_vector_free (hoist_vbeout);
4713 sbitmap_vector_free (hoist_exprs);
4714 sbitmap_vector_free (transpout);
bb457bd9 4715
d47cc544 4716 free_dominance_info (CDI_DOMINATORS);
bb457bd9
JL
4717}
4718
4719/* Compute the very busy expressions at entry/exit from each block.
4720
4721 An expression is very busy if all paths from a given point
4722 compute the expression. */
4723
4724static void
1d088dee 4725compute_code_hoist_vbeinout (void)
bb457bd9 4726{
e0082a72
ZD
4727 int changed, passes;
4728 basic_block bb;
bb457bd9 4729
d55bc081
ZD
4730 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
4731 sbitmap_vector_zero (hoist_vbein, last_basic_block);
bb457bd9
JL
4732
4733 passes = 0;
4734 changed = 1;
c4c81601 4735
bb457bd9
JL
4736 while (changed)
4737 {
4738 changed = 0;
c4c81601 4739
bb457bd9
JL
4740 /* We scan the blocks in the reverse order to speed up
4741 the convergence. */
e0082a72 4742 FOR_EACH_BB_REVERSE (bb)
bb457bd9 4743 {
e0082a72
ZD
4744 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
4745 hoist_vbeout[bb->index], transp[bb->index]);
4746 if (bb->next_bb != EXIT_BLOCK_PTR)
4747 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
bb457bd9 4748 }
c4c81601 4749
bb457bd9
JL
4750 passes++;
4751 }
4752
10d22567
ZD
4753 if (dump_file)
4754 fprintf (dump_file, "hoisting vbeinout computation: %d passes\n", passes);
bb457bd9
JL
4755}
4756
4757/* Top level routine to do the dataflow analysis needed by code hoisting. */
4758
4759static void
1d088dee 4760compute_code_hoist_data (void)
bb457bd9 4761{
02280659 4762 compute_local_properties (transp, comp, antloc, &expr_hash_table);
bb457bd9
JL
4763 compute_transpout ();
4764 compute_code_hoist_vbeinout ();
d47cc544 4765 calculate_dominance_info (CDI_DOMINATORS);
10d22567
ZD
4766 if (dump_file)
4767 fprintf (dump_file, "\n");
bb457bd9
JL
4768}
4769
4770/* Determine if the expression identified by EXPR_INDEX would
4771 reach BB unimpared if it was placed at the end of EXPR_BB.
4772
4773 It's unclear exactly what Muchnick meant by "unimpared". It seems
4774 to me that the expression must either be computed or transparent in
4775 *every* block in the path(s) from EXPR_BB to BB. Any other definition
4776 would allow the expression to be hoisted out of loops, even if
4777 the expression wasn't a loop invariant.
4778
4779 Contrast this to reachability for PRE where an expression is
4780 considered reachable if *any* path reaches instead of *all*
4781 paths. */
4782
4783static int
1d088dee 4784hoist_expr_reaches_here_p (basic_block expr_bb, int expr_index, basic_block bb, char *visited)
bb457bd9
JL
4785{
4786 edge pred;
628f6a4e 4787 edge_iterator ei;
283a2545 4788 int visited_allocated_locally = 0;
589005ff 4789
bb457bd9
JL
4790
4791 if (visited == NULL)
4792 {
8e42ace1 4793 visited_allocated_locally = 1;
5ed6ace5 4794 visited = XCNEWVEC (char, last_basic_block);
bb457bd9
JL
4795 }
4796
628f6a4e 4797 FOR_EACH_EDGE (pred, ei, bb->preds)
bb457bd9 4798 {
e2d2ed72 4799 basic_block pred_bb = pred->src;
bb457bd9
JL
4800
4801 if (pred->src == ENTRY_BLOCK_PTR)
4802 break;
f305679f
JH
4803 else if (pred_bb == expr_bb)
4804 continue;
0b17ab2f 4805 else if (visited[pred_bb->index])
bb457bd9 4806 continue;
c4c81601 4807
bb457bd9 4808 /* Does this predecessor generate this expression? */
0b17ab2f 4809 else if (TEST_BIT (comp[pred_bb->index], expr_index))
bb457bd9 4810 break;
0b17ab2f 4811 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
bb457bd9 4812 break;
c4c81601 4813
bb457bd9
JL
4814 /* Not killed. */
4815 else
4816 {
0b17ab2f 4817 visited[pred_bb->index] = 1;
bb457bd9
JL
4818 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
4819 pred_bb, visited))
4820 break;
4821 }
4822 }
589005ff 4823 if (visited_allocated_locally)
283a2545 4824 free (visited);
c4c81601 4825
bb457bd9
JL
4826 return (pred == NULL);
4827}
4828\f
4829/* Actually perform code hoisting. */
c4c81601 4830
bb457bd9 4831static void
1d088dee 4832hoist_code (void)
bb457bd9 4833{
e0082a72 4834 basic_block bb, dominated;
66f97d31 4835 VEC (basic_block, heap) *domby;
c635a1ec 4836 unsigned int i,j;
bb457bd9 4837 struct expr **index_map;
c4c81601 4838 struct expr *expr;
bb457bd9 4839
d55bc081 4840 sbitmap_vector_zero (hoist_exprs, last_basic_block);
bb457bd9
JL
4841
4842 /* Compute a mapping from expression number (`bitmap_index') to
4843 hash table entry. */
4844
5ed6ace5 4845 index_map = XCNEWVEC (struct expr *, expr_hash_table.n_elems);
02280659
ZD
4846 for (i = 0; i < expr_hash_table.size; i++)
4847 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601 4848 index_map[expr->bitmap_index] = expr;
bb457bd9
JL
4849
4850 /* Walk over each basic block looking for potentially hoistable
4851 expressions, nothing gets hoisted from the entry block. */
e0082a72 4852 FOR_EACH_BB (bb)
bb457bd9
JL
4853 {
4854 int found = 0;
4855 int insn_inserted_p;
4856
66f97d31 4857 domby = get_dominated_by (CDI_DOMINATORS, bb);
bb457bd9
JL
4858 /* Examine each expression that is very busy at the exit of this
4859 block. These are the potentially hoistable expressions. */
e0082a72 4860 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
bb457bd9
JL
4861 {
4862 int hoistable = 0;
c4c81601 4863
c635a1ec
DB
4864 if (TEST_BIT (hoist_vbeout[bb->index], i)
4865 && TEST_BIT (transpout[bb->index], i))
bb457bd9
JL
4866 {
4867 /* We've found a potentially hoistable expression, now
4868 we look at every block BB dominates to see if it
4869 computes the expression. */
66f97d31 4870 for (j = 0; VEC_iterate (basic_block, domby, j, dominated); j++)
bb457bd9
JL
4871 {
4872 /* Ignore self dominance. */
c635a1ec 4873 if (bb == dominated)
bb457bd9 4874 continue;
bb457bd9
JL
4875 /* We've found a dominated block, now see if it computes
4876 the busy expression and whether or not moving that
4877 expression to the "beginning" of that block is safe. */
e0082a72 4878 if (!TEST_BIT (antloc[dominated->index], i))
bb457bd9
JL
4879 continue;
4880
4881 /* Note if the expression would reach the dominated block
589005ff 4882 unimpared if it was placed at the end of BB.
bb457bd9
JL
4883
4884 Keep track of how many times this expression is hoistable
4885 from a dominated block into BB. */
e0082a72 4886 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
bb457bd9
JL
4887 hoistable++;
4888 }
4889
ff7cc307 4890 /* If we found more than one hoistable occurrence of this
bb457bd9
JL
4891 expression, then note it in the bitmap of expressions to
4892 hoist. It makes no sense to hoist things which are computed
4893 in only one BB, and doing so tends to pessimize register
4894 allocation. One could increase this value to try harder
4895 to avoid any possible code expansion due to register
4896 allocation issues; however experiments have shown that
4897 the vast majority of hoistable expressions are only movable
e0bb17a8 4898 from two successors, so raising this threshold is likely
bb457bd9
JL
4899 to nullify any benefit we get from code hoisting. */
4900 if (hoistable > 1)
4901 {
e0082a72 4902 SET_BIT (hoist_exprs[bb->index], i);
bb457bd9
JL
4903 found = 1;
4904 }
4905 }
4906 }
bb457bd9
JL
4907 /* If we found nothing to hoist, then quit now. */
4908 if (! found)
c635a1ec 4909 {
66f97d31
ZD
4910 VEC_free (basic_block, heap, domby);
4911 continue;
c635a1ec 4912 }
bb457bd9
JL
4913
4914 /* Loop over all the hoistable expressions. */
e0082a72 4915 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
bb457bd9
JL
4916 {
4917 /* We want to insert the expression into BB only once, so
4918 note when we've inserted it. */
4919 insn_inserted_p = 0;
4920
4921 /* These tests should be the same as the tests above. */
cb83c2ec 4922 if (TEST_BIT (hoist_exprs[bb->index], i))
bb457bd9
JL
4923 {
4924 /* We've found a potentially hoistable expression, now
4925 we look at every block BB dominates to see if it
4926 computes the expression. */
66f97d31 4927 for (j = 0; VEC_iterate (basic_block, domby, j, dominated); j++)
bb457bd9
JL
4928 {
4929 /* Ignore self dominance. */
c635a1ec 4930 if (bb == dominated)
bb457bd9
JL
4931 continue;
4932
4933 /* We've found a dominated block, now see if it computes
4934 the busy expression and whether or not moving that
4935 expression to the "beginning" of that block is safe. */
e0082a72 4936 if (!TEST_BIT (antloc[dominated->index], i))
bb457bd9
JL
4937 continue;
4938
4939 /* The expression is computed in the dominated block and
4940 it would be safe to compute it at the start of the
4941 dominated block. Now we have to determine if the
ff7cc307 4942 expression would reach the dominated block if it was
bb457bd9 4943 placed at the end of BB. */
e0082a72 4944 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
bb457bd9
JL
4945 {
4946 struct expr *expr = index_map[i];
4947 struct occr *occr = expr->antic_occr;
4948 rtx insn;
4949 rtx set;
4950
ff7cc307 4951 /* Find the right occurrence of this expression. */
e0082a72 4952 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
bb457bd9
JL
4953 occr = occr->next;
4954
282899df 4955 gcc_assert (occr);
bb457bd9 4956 insn = occr->insn;
bb457bd9 4957 set = single_set (insn);
282899df 4958 gcc_assert (set);
bb457bd9
JL
4959
4960 /* Create a pseudo-reg to store the result of reaching
4961 expressions into. Get the mode for the new pseudo
4962 from the mode of the original destination pseudo. */
4963 if (expr->reaching_reg == NULL)
4964 expr->reaching_reg
4965 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
4966
10d1bb36
JH
4967 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
4968 delete_insn (insn);
4969 occr->deleted_p = 1;
4970 if (!insn_inserted_p)
bb457bd9 4971 {
6fb5fa3c 4972 insert_insn_end_basic_block (index_map[i], bb, 0);
10d1bb36 4973 insn_inserted_p = 1;
bb457bd9
JL
4974 }
4975 }
4976 }
4977 }
4978 }
66f97d31 4979 VEC_free (basic_block, heap, domby);
bb457bd9 4980 }
c4c81601 4981
8e42ace1 4982 free (index_map);
bb457bd9
JL
4983}
4984
4985/* Top level routine to perform one code hoisting (aka unification) pass
4986
cc2902df 4987 Return nonzero if a change was made. */
bb457bd9
JL
4988
4989static int
1d088dee 4990one_code_hoisting_pass (void)
bb457bd9
JL
4991{
4992 int changed = 0;
4993
02280659
ZD
4994 alloc_hash_table (max_cuid, &expr_hash_table, 0);
4995 compute_hash_table (&expr_hash_table);
10d22567
ZD
4996 if (dump_file)
4997 dump_hash_table (dump_file, "Code Hosting Expressions", &expr_hash_table);
c4c81601 4998
02280659 4999 if (expr_hash_table.n_elems > 0)
bb457bd9 5000 {
02280659 5001 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
bb457bd9
JL
5002 compute_code_hoist_data ();
5003 hoist_code ();
5004 free_code_hoist_mem ();
5005 }
c4c81601 5006
02280659 5007 free_hash_table (&expr_hash_table);
bb457bd9
JL
5008
5009 return changed;
5010}
a13d4ebf
AM
5011\f
5012/* Here we provide the things required to do store motion towards
5013 the exit. In order for this to be effective, gcse also needed to
5014 be taught how to move a load when it is kill only by a store to itself.
5015
5016 int i;
5017 float a[10];
5018
5019 void foo(float scale)
5020 {
5021 for (i=0; i<10; i++)
5022 a[i] *= scale;
5023 }
5024
5025 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
589005ff
KH
5026 the load out since its live around the loop, and stored at the bottom
5027 of the loop.
a13d4ebf 5028
589005ff 5029 The 'Load Motion' referred to and implemented in this file is
a13d4ebf
AM
5030 an enhancement to gcse which when using edge based lcm, recognizes
5031 this situation and allows gcse to move the load out of the loop.
5032
5033 Once gcse has hoisted the load, store motion can then push this
5034 load towards the exit, and we end up with no loads or stores of 'i'
5035 in the loop. */
5036
9727e468
RG
5037static hashval_t
5038pre_ldst_expr_hash (const void *p)
5039{
5040 int do_not_record_p = 0;
5041 const struct ls_expr *x = p;
5042 return hash_rtx (x->pattern, GET_MODE (x->pattern), &do_not_record_p, NULL, false);
5043}
5044
5045static int
5046pre_ldst_expr_eq (const void *p1, const void *p2)
5047{
5048 const struct ls_expr *ptr1 = p1, *ptr2 = p2;
5049 return expr_equiv_p (ptr1->pattern, ptr2->pattern);
5050}
5051
ff7cc307 5052/* This will search the ldst list for a matching expression. If it
a13d4ebf
AM
5053 doesn't find one, we create one and initialize it. */
5054
5055static struct ls_expr *
1d088dee 5056ldst_entry (rtx x)
a13d4ebf 5057{
b58b21d5 5058 int do_not_record_p = 0;
a13d4ebf 5059 struct ls_expr * ptr;
b58b21d5 5060 unsigned int hash;
9727e468
RG
5061 void **slot;
5062 struct ls_expr e;
a13d4ebf 5063
0516f6fe
SB
5064 hash = hash_rtx (x, GET_MODE (x), &do_not_record_p,
5065 NULL, /*have_reg_qty=*/false);
a13d4ebf 5066
9727e468
RG
5067 e.pattern = x;
5068 slot = htab_find_slot_with_hash (pre_ldst_table, &e, hash, INSERT);
5069 if (*slot)
5070 return (struct ls_expr *)*slot;
b58b21d5 5071
5ed6ace5 5072 ptr = XNEW (struct ls_expr);
b58b21d5
RS
5073
5074 ptr->next = pre_ldst_mems;
5075 ptr->expr = NULL;
5076 ptr->pattern = x;
5077 ptr->pattern_regs = NULL_RTX;
5078 ptr->loads = NULL_RTX;
5079 ptr->stores = NULL_RTX;
5080 ptr->reaching_reg = NULL_RTX;
5081 ptr->invalid = 0;
5082 ptr->index = 0;
5083 ptr->hash_index = hash;
5084 pre_ldst_mems = ptr;
9727e468 5085 *slot = ptr;
589005ff 5086
a13d4ebf
AM
5087 return ptr;
5088}
5089
5090/* Free up an individual ldst entry. */
5091
589005ff 5092static void
1d088dee 5093free_ldst_entry (struct ls_expr * ptr)
a13d4ebf 5094{
aaa4ca30
AJ
5095 free_INSN_LIST_list (& ptr->loads);
5096 free_INSN_LIST_list (& ptr->stores);
a13d4ebf
AM
5097
5098 free (ptr);
5099}
5100
5101/* Free up all memory associated with the ldst list. */
5102
5103static void
1d088dee 5104free_ldst_mems (void)
a13d4ebf 5105{
35b5442a
RG
5106 if (pre_ldst_table)
5107 htab_delete (pre_ldst_table);
9727e468
RG
5108 pre_ldst_table = NULL;
5109
589005ff 5110 while (pre_ldst_mems)
a13d4ebf
AM
5111 {
5112 struct ls_expr * tmp = pre_ldst_mems;
5113
5114 pre_ldst_mems = pre_ldst_mems->next;
5115
5116 free_ldst_entry (tmp);
5117 }
5118
5119 pre_ldst_mems = NULL;
5120}
5121
5122/* Dump debugging info about the ldst list. */
5123
5124static void
1d088dee 5125print_ldst_list (FILE * file)
a13d4ebf
AM
5126{
5127 struct ls_expr * ptr;
5128
5129 fprintf (file, "LDST list: \n");
5130
62e5bf5d 5131 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
a13d4ebf
AM
5132 {
5133 fprintf (file, " Pattern (%3d): ", ptr->index);
5134
5135 print_rtl (file, ptr->pattern);
5136
5137 fprintf (file, "\n Loads : ");
5138
5139 if (ptr->loads)
5140 print_rtl (file, ptr->loads);
5141 else
5142 fprintf (file, "(nil)");
5143
5144 fprintf (file, "\n Stores : ");
5145
5146 if (ptr->stores)
5147 print_rtl (file, ptr->stores);
5148 else
5149 fprintf (file, "(nil)");
5150
5151 fprintf (file, "\n\n");
5152 }
5153
5154 fprintf (file, "\n");
5155}
5156
5157/* Returns 1 if X is in the list of ldst only expressions. */
5158
5159static struct ls_expr *
1d088dee 5160find_rtx_in_ldst (rtx x)
a13d4ebf 5161{
9727e468
RG
5162 struct ls_expr e;
5163 void **slot;
6375779a
RG
5164 if (!pre_ldst_table)
5165 return NULL;
9727e468
RG
5166 e.pattern = x;
5167 slot = htab_find_slot (pre_ldst_table, &e, NO_INSERT);
5168 if (!slot || ((struct ls_expr *)*slot)->invalid)
5169 return NULL;
5170 return *slot;
a13d4ebf
AM
5171}
5172
5173/* Assign each element of the list of mems a monotonically increasing value. */
5174
5175static int
1d088dee 5176enumerate_ldsts (void)
a13d4ebf
AM
5177{
5178 struct ls_expr * ptr;
5179 int n = 0;
5180
5181 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
5182 ptr->index = n++;
5183
5184 return n;
5185}
5186
5187/* Return first item in the list. */
5188
5189static inline struct ls_expr *
1d088dee 5190first_ls_expr (void)
a13d4ebf
AM
5191{
5192 return pre_ldst_mems;
5193}
5194
0e8a66de 5195/* Return the next item in the list after the specified one. */
a13d4ebf
AM
5196
5197static inline struct ls_expr *
1d088dee 5198next_ls_expr (struct ls_expr * ptr)
a13d4ebf
AM
5199{
5200 return ptr->next;
5201}
5202\f
5203/* Load Motion for loads which only kill themselves. */
5204
5205/* Return true if x is a simple MEM operation, with no registers or
5206 side effects. These are the types of loads we consider for the
5207 ld_motion list, otherwise we let the usual aliasing take care of it. */
5208
589005ff 5209static int
1d088dee 5210simple_mem (rtx x)
a13d4ebf 5211{
7b1b4aed 5212 if (! MEM_P (x))
a13d4ebf 5213 return 0;
589005ff 5214
a13d4ebf
AM
5215 if (MEM_VOLATILE_P (x))
5216 return 0;
589005ff 5217
a13d4ebf
AM
5218 if (GET_MODE (x) == BLKmode)
5219 return 0;
aaa4ca30 5220
47a3dae1
ZD
5221 /* If we are handling exceptions, we must be careful with memory references
5222 that may trap. If we are not, the behavior is undefined, so we may just
5223 continue. */
5224 if (flag_non_call_exceptions && may_trap_p (x))
98d3d336
RS
5225 return 0;
5226
47a3dae1
ZD
5227 if (side_effects_p (x))
5228 return 0;
589005ff 5229
47a3dae1
ZD
5230 /* Do not consider function arguments passed on stack. */
5231 if (reg_mentioned_p (stack_pointer_rtx, x))
5232 return 0;
5233
5234 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
5235 return 0;
5236
5237 return 1;
a13d4ebf
AM
5238}
5239
589005ff
KH
5240/* Make sure there isn't a buried reference in this pattern anywhere.
5241 If there is, invalidate the entry for it since we're not capable
5242 of fixing it up just yet.. We have to be sure we know about ALL
a13d4ebf
AM
5243 loads since the aliasing code will allow all entries in the
5244 ld_motion list to not-alias itself. If we miss a load, we will get
589005ff 5245 the wrong value since gcse might common it and we won't know to
a13d4ebf
AM
5246 fix it up. */
5247
5248static void
1d088dee 5249invalidate_any_buried_refs (rtx x)
a13d4ebf
AM
5250{
5251 const char * fmt;
8e42ace1 5252 int i, j;
a13d4ebf
AM
5253 struct ls_expr * ptr;
5254
5255 /* Invalidate it in the list. */
7b1b4aed 5256 if (MEM_P (x) && simple_mem (x))
a13d4ebf
AM
5257 {
5258 ptr = ldst_entry (x);
5259 ptr->invalid = 1;
5260 }
5261
5262 /* Recursively process the insn. */
5263 fmt = GET_RTX_FORMAT (GET_CODE (x));
589005ff 5264
a13d4ebf
AM
5265 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5266 {
5267 if (fmt[i] == 'e')
5268 invalidate_any_buried_refs (XEXP (x, i));
5269 else if (fmt[i] == 'E')
5270 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5271 invalidate_any_buried_refs (XVECEXP (x, i, j));
5272 }
5273}
5274
4d3eb89a
HPN
5275/* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
5276 being defined as MEM loads and stores to symbols, with no side effects
5277 and no registers in the expression. For a MEM destination, we also
5278 check that the insn is still valid if we replace the destination with a
5279 REG, as is done in update_ld_motion_stores. If there are any uses/defs
5280 which don't match this criteria, they are invalidated and trimmed out
5281 later. */
a13d4ebf 5282
589005ff 5283static void
1d088dee 5284compute_ld_motion_mems (void)
a13d4ebf
AM
5285{
5286 struct ls_expr * ptr;
e0082a72 5287 basic_block bb;
a13d4ebf 5288 rtx insn;
589005ff 5289
a13d4ebf 5290 pre_ldst_mems = NULL;
9727e468
RG
5291 pre_ldst_table = htab_create (13, pre_ldst_expr_hash,
5292 pre_ldst_expr_eq, NULL);
a13d4ebf 5293
e0082a72 5294 FOR_EACH_BB (bb)
a13d4ebf 5295 {
eb232f4e 5296 FOR_BB_INSNS (bb, insn)
a13d4ebf 5297 {
735e8085 5298 if (INSN_P (insn))
a13d4ebf
AM
5299 {
5300 if (GET_CODE (PATTERN (insn)) == SET)
5301 {
5302 rtx src = SET_SRC (PATTERN (insn));
5303 rtx dest = SET_DEST (PATTERN (insn));
5304
5305 /* Check for a simple LOAD... */
7b1b4aed 5306 if (MEM_P (src) && simple_mem (src))
a13d4ebf
AM
5307 {
5308 ptr = ldst_entry (src);
7b1b4aed 5309 if (REG_P (dest))
a13d4ebf
AM
5310 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
5311 else
5312 ptr->invalid = 1;
5313 }
5314 else
5315 {
5316 /* Make sure there isn't a buried load somewhere. */
5317 invalidate_any_buried_refs (src);
5318 }
589005ff 5319
a13d4ebf
AM
5320 /* Check for stores. Don't worry about aliased ones, they
5321 will block any movement we might do later. We only care
5322 about this exact pattern since those are the only
5323 circumstance that we will ignore the aliasing info. */
7b1b4aed 5324 if (MEM_P (dest) && simple_mem (dest))
a13d4ebf
AM
5325 {
5326 ptr = ldst_entry (dest);
589005ff 5327
7b1b4aed 5328 if (! MEM_P (src)
4d3eb89a
HPN
5329 && GET_CODE (src) != ASM_OPERANDS
5330 /* Check for REG manually since want_to_gcse_p
5331 returns 0 for all REGs. */
1707bafa 5332 && can_assign_to_reg_p (src))
a13d4ebf
AM
5333 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
5334 else
5335 ptr->invalid = 1;
5336 }
5337 }
5338 else
5339 invalidate_any_buried_refs (PATTERN (insn));
5340 }
5341 }
5342 }
5343}
5344
589005ff 5345/* Remove any references that have been either invalidated or are not in the
a13d4ebf
AM
5346 expression list for pre gcse. */
5347
5348static void
1d088dee 5349trim_ld_motion_mems (void)
a13d4ebf 5350{
b58b21d5
RS
5351 struct ls_expr * * last = & pre_ldst_mems;
5352 struct ls_expr * ptr = pre_ldst_mems;
a13d4ebf
AM
5353
5354 while (ptr != NULL)
5355 {
b58b21d5 5356 struct expr * expr;
589005ff 5357
a13d4ebf 5358 /* Delete if entry has been made invalid. */
b58b21d5 5359 if (! ptr->invalid)
a13d4ebf 5360 {
a13d4ebf 5361 /* Delete if we cannot find this mem in the expression list. */
b58b21d5 5362 unsigned int hash = ptr->hash_index % expr_hash_table.size;
589005ff 5363
b58b21d5
RS
5364 for (expr = expr_hash_table.table[hash];
5365 expr != NULL;
5366 expr = expr->next_same_hash)
5367 if (expr_equiv_p (expr->expr, ptr->pattern))
5368 break;
a13d4ebf
AM
5369 }
5370 else
b58b21d5
RS
5371 expr = (struct expr *) 0;
5372
5373 if (expr)
a13d4ebf
AM
5374 {
5375 /* Set the expression field if we are keeping it. */
a13d4ebf 5376 ptr->expr = expr;
b58b21d5 5377 last = & ptr->next;
a13d4ebf
AM
5378 ptr = ptr->next;
5379 }
b58b21d5
RS
5380 else
5381 {
5382 *last = ptr->next;
9727e468 5383 htab_remove_elt_with_hash (pre_ldst_table, ptr, ptr->hash_index);
b58b21d5
RS
5384 free_ldst_entry (ptr);
5385 ptr = * last;
5386 }
a13d4ebf
AM
5387 }
5388
5389 /* Show the world what we've found. */
10d22567
ZD
5390 if (dump_file && pre_ldst_mems != NULL)
5391 print_ldst_list (dump_file);
a13d4ebf
AM
5392}
5393
5394/* This routine will take an expression which we are replacing with
5395 a reaching register, and update any stores that are needed if
5396 that expression is in the ld_motion list. Stores are updated by
a98ebe2e 5397 copying their SRC to the reaching register, and then storing
a13d4ebf
AM
5398 the reaching register into the store location. These keeps the
5399 correct value in the reaching register for the loads. */
5400
5401static void
1d088dee 5402update_ld_motion_stores (struct expr * expr)
a13d4ebf
AM
5403{
5404 struct ls_expr * mem_ptr;
5405
5406 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
5407 {
589005ff
KH
5408 /* We can try to find just the REACHED stores, but is shouldn't
5409 matter to set the reaching reg everywhere... some might be
a13d4ebf
AM
5410 dead and should be eliminated later. */
5411
4d3eb89a
HPN
5412 /* We replace (set mem expr) with (set reg expr) (set mem reg)
5413 where reg is the reaching reg used in the load. We checked in
5414 compute_ld_motion_mems that we can replace (set mem expr) with
5415 (set reg expr) in that insn. */
a13d4ebf 5416 rtx list = mem_ptr->stores;
589005ff 5417
a13d4ebf
AM
5418 for ( ; list != NULL_RTX; list = XEXP (list, 1))
5419 {
5420 rtx insn = XEXP (list, 0);
5421 rtx pat = PATTERN (insn);
5422 rtx src = SET_SRC (pat);
5423 rtx reg = expr->reaching_reg;
c57718d3 5424 rtx copy, new;
a13d4ebf
AM
5425
5426 /* If we've already copied it, continue. */
5427 if (expr->reaching_reg == src)
5428 continue;
589005ff 5429
10d22567 5430 if (dump_file)
a13d4ebf 5431 {
10d22567
ZD
5432 fprintf (dump_file, "PRE: store updated with reaching reg ");
5433 print_rtl (dump_file, expr->reaching_reg);
5434 fprintf (dump_file, ":\n ");
5435 print_inline_rtx (dump_file, insn, 8);
5436 fprintf (dump_file, "\n");
a13d4ebf 5437 }
589005ff 5438
47a3dae1 5439 copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat)));
c57718d3
RK
5440 new = emit_insn_before (copy, insn);
5441 record_one_set (REGNO (reg), new);
a13d4ebf 5442 SET_SRC (pat) = reg;
6fb5fa3c 5443 df_insn_rescan (insn);
a13d4ebf
AM
5444
5445 /* un-recognize this pattern since it's probably different now. */
5446 INSN_CODE (insn) = -1;
5447 gcse_create_count++;
5448 }
5449 }
5450}
5451\f
5452/* Store motion code. */
5453
47a3dae1
ZD
5454#define ANTIC_STORE_LIST(x) ((x)->loads)
5455#define AVAIL_STORE_LIST(x) ((x)->stores)
5456#define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
5457
589005ff 5458/* This is used to communicate the target bitvector we want to use in the
aaa4ca30 5459 reg_set_info routine when called via the note_stores mechanism. */
47a3dae1
ZD
5460static int * regvec;
5461
5462/* And current insn, for the same routine. */
5463static rtx compute_store_table_current_insn;
aaa4ca30 5464
a13d4ebf
AM
5465/* Used in computing the reverse edge graph bit vectors. */
5466static sbitmap * st_antloc;
5467
5468/* Global holding the number of store expressions we are dealing with. */
5469static int num_stores;
5470
01c43039
RE
5471/* Checks to set if we need to mark a register set. Called from
5472 note_stores. */
a13d4ebf 5473
aaa4ca30 5474static void
1d088dee 5475reg_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED,
01c43039 5476 void *data)
a13d4ebf 5477{
01c43039
RE
5478 sbitmap bb_reg = data;
5479
aaa4ca30
AJ
5480 if (GET_CODE (dest) == SUBREG)
5481 dest = SUBREG_REG (dest);
adfcce61 5482
7b1b4aed 5483 if (REG_P (dest))
01c43039
RE
5484 {
5485 regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
5486 if (bb_reg)
5487 SET_BIT (bb_reg, REGNO (dest));
5488 }
5489}
5490
5491/* Clear any mark that says that this insn sets dest. Called from
5492 note_stores. */
5493
5494static void
5495reg_clear_last_set (rtx dest, rtx setter ATTRIBUTE_UNUSED,
5496 void *data)
5497{
5498 int *dead_vec = data;
5499
5500 if (GET_CODE (dest) == SUBREG)
5501 dest = SUBREG_REG (dest);
5502
7b1b4aed 5503 if (REG_P (dest) &&
01c43039
RE
5504 dead_vec[REGNO (dest)] == INSN_UID (compute_store_table_current_insn))
5505 dead_vec[REGNO (dest)] = 0;
a13d4ebf
AM
5506}
5507
47a3dae1
ZD
5508/* Return zero if some of the registers in list X are killed
5509 due to set of registers in bitmap REGS_SET. */
1d088dee 5510
47a3dae1 5511static bool
1d088dee 5512store_ops_ok (rtx x, int *regs_set)
47a3dae1
ZD
5513{
5514 rtx reg;
5515
5516 for (; x; x = XEXP (x, 1))
5517 {
5518 reg = XEXP (x, 0);
5519 if (regs_set[REGNO(reg)])
1d088dee 5520 return false;
47a3dae1 5521 }
a13d4ebf 5522
47a3dae1
ZD
5523 return true;
5524}
5525
5526/* Returns a list of registers mentioned in X. */
5527static rtx
1d088dee 5528extract_mentioned_regs (rtx x)
47a3dae1
ZD
5529{
5530 return extract_mentioned_regs_helper (x, NULL_RTX);
5531}
5532
5533/* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
5534 registers. */
5535static rtx
1d088dee 5536extract_mentioned_regs_helper (rtx x, rtx accum)
a13d4ebf
AM
5537{
5538 int i;
5539 enum rtx_code code;
5540 const char * fmt;
5541
5542 /* Repeat is used to turn tail-recursion into iteration. */
5543 repeat:
5544
5545 if (x == 0)
47a3dae1 5546 return accum;
a13d4ebf
AM
5547
5548 code = GET_CODE (x);
5549 switch (code)
5550 {
5551 case REG:
47a3dae1 5552 return alloc_EXPR_LIST (0, x, accum);
a13d4ebf
AM
5553
5554 case MEM:
5555 x = XEXP (x, 0);
5556 goto repeat;
5557
5558 case PRE_DEC:
5559 case PRE_INC:
6fb5fa3c 5560 case PRE_MODIFY:
a13d4ebf
AM
5561 case POST_DEC:
5562 case POST_INC:
6fb5fa3c 5563 case POST_MODIFY:
47a3dae1 5564 /* We do not run this function with arguments having side effects. */
282899df 5565 gcc_unreachable ();
a13d4ebf
AM
5566
5567 case PC:
5568 case CC0: /*FIXME*/
5569 case CONST:
5570 case CONST_INT:
5571 case CONST_DOUBLE:
69ef87e2 5572 case CONST_VECTOR:
a13d4ebf
AM
5573 case SYMBOL_REF:
5574 case LABEL_REF:
5575 case ADDR_VEC:
5576 case ADDR_DIFF_VEC:
47a3dae1 5577 return accum;
a13d4ebf
AM
5578
5579 default:
5580 break;
5581 }
5582
5583 i = GET_RTX_LENGTH (code) - 1;
5584 fmt = GET_RTX_FORMAT (code);
589005ff 5585
a13d4ebf
AM
5586 for (; i >= 0; i--)
5587 {
5588 if (fmt[i] == 'e')
5589 {
5590 rtx tem = XEXP (x, i);
5591
5592 /* If we are about to do the last recursive call
47a3dae1 5593 needed at this level, change it into iteration. */
a13d4ebf
AM
5594 if (i == 0)
5595 {
5596 x = tem;
5597 goto repeat;
5598 }
589005ff 5599
47a3dae1 5600 accum = extract_mentioned_regs_helper (tem, accum);
a13d4ebf
AM
5601 }
5602 else if (fmt[i] == 'E')
5603 {
5604 int j;
589005ff 5605
a13d4ebf 5606 for (j = 0; j < XVECLEN (x, i); j++)
47a3dae1 5607 accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum);
a13d4ebf
AM
5608 }
5609 }
5610
47a3dae1 5611 return accum;
a13d4ebf
AM
5612}
5613
47a3dae1
ZD
5614/* Determine whether INSN is MEM store pattern that we will consider moving.
5615 REGS_SET_BEFORE is bitmap of registers set before (and including) the
5616 current insn, REGS_SET_AFTER is bitmap of registers set after (and
5617 including) the insn in this basic block. We must be passing through BB from
5618 head to end, as we are using this fact to speed things up.
1d088dee 5619
47a3dae1
ZD
5620 The results are stored this way:
5621
5622 -- the first anticipatable expression is added into ANTIC_STORE_LIST
5623 -- if the processed expression is not anticipatable, NULL_RTX is added
5624 there instead, so that we can use it as indicator that no further
5625 expression of this type may be anticipatable
5626 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
5627 consequently, all of them but this head are dead and may be deleted.
5628 -- if the expression is not available, the insn due to that it fails to be
5629 available is stored in reaching_reg.
5630
5631 The things are complicated a bit by fact that there already may be stores
5632 to the same MEM from other blocks; also caller must take care of the
e0bb17a8 5633 necessary cleanup of the temporary markers after end of the basic block.
47a3dae1 5634 */
a13d4ebf
AM
5635
5636static void
1d088dee 5637find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
a13d4ebf
AM
5638{
5639 struct ls_expr * ptr;
47a3dae1
ZD
5640 rtx dest, set, tmp;
5641 int check_anticipatable, check_available;
5642 basic_block bb = BLOCK_FOR_INSN (insn);
a13d4ebf 5643
47a3dae1
ZD
5644 set = single_set (insn);
5645 if (!set)
a13d4ebf
AM
5646 return;
5647
47a3dae1 5648 dest = SET_DEST (set);
589005ff 5649
7b1b4aed 5650 if (! MEM_P (dest) || MEM_VOLATILE_P (dest)
a13d4ebf 5651 || GET_MODE (dest) == BLKmode)
aaa4ca30
AJ
5652 return;
5653
47a3dae1
ZD
5654 if (side_effects_p (dest))
5655 return;
aaa4ca30 5656
47a3dae1
ZD
5657 /* If we are handling exceptions, we must be careful with memory references
5658 that may trap. If we are not, the behavior is undefined, so we may just
5659 continue. */
94f24ddc 5660 if (flag_non_call_exceptions && may_trap_p (dest))
47a3dae1 5661 return;
1d088dee 5662
c2e2375e
UW
5663 /* Even if the destination cannot trap, the source may. In this case we'd
5664 need to handle updating the REG_EH_REGION note. */
5665 if (find_reg_note (insn, REG_EH_REGION, NULL_RTX))
5666 return;
5667
26fb114d
SB
5668 /* Make sure that the SET_SRC of this store insns can be assigned to
5669 a register, or we will fail later on in replace_store_insn, which
5670 assumes that we can do this. But sometimes the target machine has
5671 oddities like MEM read-modify-write instruction. See for example
5672 PR24257. */
5673 if (!can_assign_to_reg_p (SET_SRC (set)))
5674 return;
5675
a13d4ebf 5676 ptr = ldst_entry (dest);
47a3dae1
ZD
5677 if (!ptr->pattern_regs)
5678 ptr->pattern_regs = extract_mentioned_regs (dest);
5679
5680 /* Do not check for anticipatability if we either found one anticipatable
5681 store already, or tested for one and found out that it was killed. */
5682 check_anticipatable = 0;
5683 if (!ANTIC_STORE_LIST (ptr))
5684 check_anticipatable = 1;
5685 else
5686 {
5687 tmp = XEXP (ANTIC_STORE_LIST (ptr), 0);
5688 if (tmp != NULL_RTX
5689 && BLOCK_FOR_INSN (tmp) != bb)
5690 check_anticipatable = 1;
5691 }
5692 if (check_anticipatable)
5693 {
5694 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
5695 tmp = NULL_RTX;
5696 else
5697 tmp = insn;
5698 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp,
5699 ANTIC_STORE_LIST (ptr));
5700 }
a13d4ebf 5701
e0bb17a8 5702 /* It is not necessary to check whether store is available if we did
47a3dae1
ZD
5703 it successfully before; if we failed before, do not bother to check
5704 until we reach the insn that caused us to fail. */
5705 check_available = 0;
5706 if (!AVAIL_STORE_LIST (ptr))
5707 check_available = 1;
5708 else
5709 {
5710 tmp = XEXP (AVAIL_STORE_LIST (ptr), 0);
5711 if (BLOCK_FOR_INSN (tmp) != bb)
5712 check_available = 1;
5713 }
5714 if (check_available)
5715 {
5716 /* Check that we have already reached the insn at that the check
5717 failed last time. */
5718 if (LAST_AVAIL_CHECK_FAILURE (ptr))
5719 {
a813c111 5720 for (tmp = BB_END (bb);
47a3dae1
ZD
5721 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
5722 tmp = PREV_INSN (tmp))
5723 continue;
5724 if (tmp == insn)
5725 check_available = 0;
5726 }
5727 else
5728 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
5729 bb, regs_set_after,
5730 &LAST_AVAIL_CHECK_FAILURE (ptr));
5731 }
5732 if (!check_available)
5733 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr));
5734}
1d088dee 5735
47a3dae1 5736/* Find available and anticipatable stores. */
a13d4ebf
AM
5737
5738static int
1d088dee 5739compute_store_table (void)
a13d4ebf 5740{
e0082a72
ZD
5741 int ret;
5742 basic_block bb;
aaa4ca30 5743 unsigned regno;
47a3dae1
ZD
5744 rtx insn, pat, tmp;
5745 int *last_set_in, *already_set;
5746 struct ls_expr * ptr, **prev_next_ptr_ptr;
aaa4ca30 5747
a13d4ebf
AM
5748 max_gcse_regno = max_reg_num ();
5749
703ad42b 5750 reg_set_in_block = sbitmap_vector_alloc (last_basic_block,
aaa4ca30 5751 max_gcse_regno);
d55bc081 5752 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
a13d4ebf 5753 pre_ldst_mems = 0;
9727e468
RG
5754 pre_ldst_table = htab_create (13, pre_ldst_expr_hash,
5755 pre_ldst_expr_eq, NULL);
5ed6ace5
MD
5756 last_set_in = XCNEWVEC (int, max_gcse_regno);
5757 already_set = XNEWVEC (int, max_gcse_regno);
aaa4ca30 5758
a13d4ebf 5759 /* Find all the stores we care about. */
e0082a72 5760 FOR_EACH_BB (bb)
a13d4ebf 5761 {
47a3dae1 5762 /* First compute the registers set in this block. */
47a3dae1
ZD
5763 regvec = last_set_in;
5764
eb232f4e 5765 FOR_BB_INSNS (bb, insn)
47a3dae1
ZD
5766 {
5767 if (! INSN_P (insn))
5768 continue;
5769
7b1b4aed 5770 if (CALL_P (insn))
47a3dae1 5771 {
47a3dae1 5772 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6e14af16 5773 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
01c43039
RE
5774 {
5775 last_set_in[regno] = INSN_UID (insn);
5776 SET_BIT (reg_set_in_block[bb->index], regno);
5777 }
47a3dae1
ZD
5778 }
5779
5780 pat = PATTERN (insn);
5781 compute_store_table_current_insn = insn;
01c43039 5782 note_stores (pat, reg_set_info, reg_set_in_block[bb->index]);
47a3dae1
ZD
5783 }
5784
47a3dae1
ZD
5785 /* Now find the stores. */
5786 memset (already_set, 0, sizeof (int) * max_gcse_regno);
5787 regvec = already_set;
eb232f4e 5788 FOR_BB_INSNS (bb, insn)
a13d4ebf 5789 {
19652adf 5790 if (! INSN_P (insn))
a13d4ebf
AM
5791 continue;
5792
7b1b4aed 5793 if (CALL_P (insn))
aaa4ca30
AJ
5794 {
5795 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6e14af16 5796 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
47a3dae1 5797 already_set[regno] = 1;
aaa4ca30 5798 }
589005ff 5799
a13d4ebf 5800 pat = PATTERN (insn);
aaa4ca30 5801 note_stores (pat, reg_set_info, NULL);
589005ff 5802
a13d4ebf 5803 /* Now that we've marked regs, look for stores. */
47a3dae1
ZD
5804 find_moveable_store (insn, already_set, last_set_in);
5805
5806 /* Unmark regs that are no longer set. */
01c43039
RE
5807 compute_store_table_current_insn = insn;
5808 note_stores (pat, reg_clear_last_set, last_set_in);
7b1b4aed 5809 if (CALL_P (insn))
01c43039 5810 {
01c43039 5811 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6e14af16 5812 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
01c43039
RE
5813 && last_set_in[regno] == INSN_UID (insn))
5814 last_set_in[regno] = 0;
5815 }
47a3dae1
ZD
5816 }
5817
01c43039
RE
5818#ifdef ENABLE_CHECKING
5819 /* last_set_in should now be all-zero. */
5820 for (regno = 0; regno < max_gcse_regno; regno++)
282899df 5821 gcc_assert (!last_set_in[regno]);
01c43039
RE
5822#endif
5823
47a3dae1
ZD
5824 /* Clear temporary marks. */
5825 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
5826 {
5827 LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX;
5828 if (ANTIC_STORE_LIST (ptr)
5829 && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX)
5830 ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1);
5831 }
5832 }
5833
5834 /* Remove the stores that are not available anywhere, as there will
5835 be no opportunity to optimize them. */
5836 for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems;
5837 ptr != NULL;
5838 ptr = *prev_next_ptr_ptr)
5839 {
5840 if (!AVAIL_STORE_LIST (ptr))
5841 {
5842 *prev_next_ptr_ptr = ptr->next;
9727e468 5843 htab_remove_elt_with_hash (pre_ldst_table, ptr, ptr->hash_index);
47a3dae1 5844 free_ldst_entry (ptr);
a13d4ebf 5845 }
47a3dae1
ZD
5846 else
5847 prev_next_ptr_ptr = &ptr->next;
a13d4ebf
AM
5848 }
5849
5850 ret = enumerate_ldsts ();
589005ff 5851
10d22567 5852 if (dump_file)
a13d4ebf 5853 {
10d22567
ZD
5854 fprintf (dump_file, "ST_avail and ST_antic (shown under loads..)\n");
5855 print_ldst_list (dump_file);
a13d4ebf 5856 }
589005ff 5857
47a3dae1
ZD
5858 free (last_set_in);
5859 free (already_set);
a13d4ebf
AM
5860 return ret;
5861}
5862
3b14e3af
ZD
5863/* Check to see if the load X is aliased with STORE_PATTERN.
5864 AFTER is true if we are checking the case when STORE_PATTERN occurs
5865 after the X. */
a13d4ebf 5866
47a3dae1 5867static bool
3b14e3af 5868load_kills_store (rtx x, rtx store_pattern, int after)
a13d4ebf 5869{
3b14e3af
ZD
5870 if (after)
5871 return anti_dependence (x, store_pattern);
5872 else
5873 return true_dependence (store_pattern, GET_MODE (store_pattern), x,
5874 rtx_addr_varies_p);
a13d4ebf
AM
5875}
5876
589005ff 5877/* Go through the entire insn X, looking for any loads which might alias
3b14e3af
ZD
5878 STORE_PATTERN. Return true if found.
5879 AFTER is true if we are checking the case when STORE_PATTERN occurs
5880 after the insn X. */
a13d4ebf 5881
47a3dae1 5882static bool
3b14e3af 5883find_loads (rtx x, rtx store_pattern, int after)
a13d4ebf
AM
5884{
5885 const char * fmt;
8e42ace1 5886 int i, j;
47a3dae1 5887 int ret = false;
a13d4ebf 5888
24a28584 5889 if (!x)
47a3dae1 5890 return false;
24a28584 5891
589005ff 5892 if (GET_CODE (x) == SET)
a13d4ebf
AM
5893 x = SET_SRC (x);
5894
7b1b4aed 5895 if (MEM_P (x))
a13d4ebf 5896 {
3b14e3af 5897 if (load_kills_store (x, store_pattern, after))
47a3dae1 5898 return true;
a13d4ebf
AM
5899 }
5900
5901 /* Recursively process the insn. */
5902 fmt = GET_RTX_FORMAT (GET_CODE (x));
589005ff 5903
a13d4ebf
AM
5904 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
5905 {
5906 if (fmt[i] == 'e')
3b14e3af 5907 ret |= find_loads (XEXP (x, i), store_pattern, after);
a13d4ebf
AM
5908 else if (fmt[i] == 'E')
5909 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3b14e3af 5910 ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
a13d4ebf
AM
5911 }
5912 return ret;
5913}
5914
1071bcbd
AO
5915static inline bool
5916store_killed_in_pat (rtx x, rtx pat, int after)
5917{
5918 if (GET_CODE (pat) == SET)
5919 {
5920 rtx dest = SET_DEST (pat);
5921
5922 if (GET_CODE (dest) == ZERO_EXTRACT)
5923 dest = XEXP (dest, 0);
5924
5925 /* Check for memory stores to aliased objects. */
5926 if (MEM_P (dest)
5927 && !expr_equiv_p (dest, x))
5928 {
5929 if (after)
5930 {
5931 if (output_dependence (dest, x))
5932 return true;
5933 }
5934 else
5935 {
5936 if (output_dependence (x, dest))
5937 return true;
5938 }
5939 }
5940 }
5941
5942 if (find_loads (pat, x, after))
5943 return true;
5944
5945 return false;
5946}
5947
589005ff 5948/* Check if INSN kills the store pattern X (is aliased with it).
3b14e3af 5949 AFTER is true if we are checking the case when store X occurs
3f117656 5950 after the insn. Return true if it does. */
a13d4ebf 5951
47a3dae1 5952static bool
3b14e3af 5953store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)
a13d4ebf 5954{
1071bcbd 5955 rtx reg, base, note, pat;
94f24ddc 5956
735e8085 5957 if (!INSN_P (insn))
47a3dae1 5958 return false;
589005ff 5959
7b1b4aed 5960 if (CALL_P (insn))
a13d4ebf 5961 {
1218665b
JJ
5962 /* A normal or pure call might read from pattern,
5963 but a const call will not. */
47a3dae1
ZD
5964 if (! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn))
5965 return true;
5966
94f24ddc
ZD
5967 /* But even a const call reads its parameters. Check whether the
5968 base of some of registers used in mem is stack pointer. */
5969 for (reg = x_regs; reg; reg = XEXP (reg, 1))
5970 {
bc083e18 5971 base = find_base_term (XEXP (reg, 0));
94f24ddc
ZD
5972 if (!base
5973 || (GET_CODE (base) == ADDRESS
5974 && GET_MODE (base) == Pmode
5975 && XEXP (base, 0) == stack_pointer_rtx))
5976 return true;
5977 }
47a3dae1
ZD
5978
5979 return false;
a13d4ebf 5980 }
589005ff 5981
1071bcbd
AO
5982 pat = PATTERN (insn);
5983 if (GET_CODE (pat) == SET)
a13d4ebf 5984 {
1071bcbd 5985 if (store_killed_in_pat (x, pat, after))
d088acea 5986 return true;
a13d4ebf 5987 }
1071bcbd
AO
5988 else if (GET_CODE (pat) == PARALLEL)
5989 {
5990 int i;
5991
5992 for (i = 0; i < XVECLEN (pat, 0); i++)
5993 if (store_killed_in_pat (x, XVECEXP (pat, 0, i), after))
5994 return true;
5995 }
d088acea
ZD
5996 else if (find_loads (PATTERN (insn), x, after))
5997 return true;
5998
5999 /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory
6000 location aliased with X, then this insn kills X. */
6001 note = find_reg_equal_equiv_note (insn);
6002 if (! note)
6003 return false;
6004 note = XEXP (note, 0);
6005
6006 /* However, if the note represents a must alias rather than a may
6007 alias relationship, then it does not kill X. */
6008 if (expr_equiv_p (note, x))
6009 return false;
6010
6011 /* See if there are any aliased loads in the note. */
6012 return find_loads (note, x, after);
a13d4ebf
AM
6013}
6014
47a3dae1
ZD
6015/* Returns true if the expression X is loaded or clobbered on or after INSN
6016 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
6017 or after the insn. X_REGS is list of registers mentioned in X. If the store
6018 is killed, return the last insn in that it occurs in FAIL_INSN. */
a13d4ebf 6019
47a3dae1 6020static bool
1d088dee
AJ
6021store_killed_after (rtx x, rtx x_regs, rtx insn, basic_block bb,
6022 int *regs_set_after, rtx *fail_insn)
a13d4ebf 6023{
a813c111 6024 rtx last = BB_END (bb), act;
aaa4ca30 6025
47a3dae1 6026 if (!store_ops_ok (x_regs, regs_set_after))
1d088dee 6027 {
47a3dae1
ZD
6028 /* We do not know where it will happen. */
6029 if (fail_insn)
6030 *fail_insn = NULL_RTX;
6031 return true;
6032 }
a13d4ebf 6033
47a3dae1
ZD
6034 /* Scan from the end, so that fail_insn is determined correctly. */
6035 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
3b14e3af 6036 if (store_killed_in_insn (x, x_regs, act, false))
47a3dae1
ZD
6037 {
6038 if (fail_insn)
6039 *fail_insn = act;
6040 return true;
6041 }
589005ff 6042
47a3dae1 6043 return false;
a13d4ebf 6044}
1d088dee 6045
47a3dae1
ZD
6046/* Returns true if the expression X is loaded or clobbered on or before INSN
6047 within basic block BB. X_REGS is list of registers mentioned in X.
6048 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
6049static bool
1d088dee
AJ
6050store_killed_before (rtx x, rtx x_regs, rtx insn, basic_block bb,
6051 int *regs_set_before)
a13d4ebf 6052{
a813c111 6053 rtx first = BB_HEAD (bb);
a13d4ebf 6054
47a3dae1
ZD
6055 if (!store_ops_ok (x_regs, regs_set_before))
6056 return true;
a13d4ebf 6057
47a3dae1 6058 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
3b14e3af 6059 if (store_killed_in_insn (x, x_regs, insn, true))
47a3dae1 6060 return true;
589005ff 6061
47a3dae1 6062 return false;
a13d4ebf 6063}
1d088dee 6064
47a3dae1
ZD
6065/* Fill in available, anticipatable, transparent and kill vectors in
6066 STORE_DATA, based on lists of available and anticipatable stores. */
a13d4ebf 6067static void
1d088dee 6068build_store_vectors (void)
a13d4ebf 6069{
47a3dae1
ZD
6070 basic_block bb;
6071 int *regs_set_in_block;
a13d4ebf
AM
6072 rtx insn, st;
6073 struct ls_expr * ptr;
47a3dae1 6074 unsigned regno;
a13d4ebf
AM
6075
6076 /* Build the gen_vector. This is any store in the table which is not killed
6077 by aliasing later in its block. */
703ad42b 6078 ae_gen = sbitmap_vector_alloc (last_basic_block, num_stores);
d55bc081 6079 sbitmap_vector_zero (ae_gen, last_basic_block);
a13d4ebf 6080
703ad42b 6081 st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores);
d55bc081 6082 sbitmap_vector_zero (st_antloc, last_basic_block);
aaa4ca30 6083
a13d4ebf 6084 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
589005ff 6085 {
47a3dae1 6086 for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
a13d4ebf
AM
6087 {
6088 insn = XEXP (st, 0);
e2d2ed72 6089 bb = BLOCK_FOR_INSN (insn);
589005ff 6090
47a3dae1
ZD
6091 /* If we've already seen an available expression in this block,
6092 we can delete this one (It occurs earlier in the block). We'll
6093 copy the SRC expression to an unused register in case there
6094 are any side effects. */
6095 if (TEST_BIT (ae_gen[bb->index], ptr->index))
a13d4ebf 6096 {
47a3dae1 6097 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
10d22567
ZD
6098 if (dump_file)
6099 fprintf (dump_file, "Removing redundant store:\n");
d088acea 6100 replace_store_insn (r, XEXP (st, 0), bb, ptr);
47a3dae1 6101 continue;
a13d4ebf 6102 }
47a3dae1 6103 SET_BIT (ae_gen[bb->index], ptr->index);
a13d4ebf 6104 }
589005ff 6105
47a3dae1
ZD
6106 for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
6107 {
6108 insn = XEXP (st, 0);
6109 bb = BLOCK_FOR_INSN (insn);
6110 SET_BIT (st_antloc[bb->index], ptr->index);
6111 }
a13d4ebf 6112 }
589005ff 6113
703ad42b 6114 ae_kill = sbitmap_vector_alloc (last_basic_block, num_stores);
d55bc081 6115 sbitmap_vector_zero (ae_kill, last_basic_block);
a13d4ebf 6116
703ad42b 6117 transp = sbitmap_vector_alloc (last_basic_block, num_stores);
d55bc081 6118 sbitmap_vector_zero (transp, last_basic_block);
5ed6ace5 6119 regs_set_in_block = XNEWVEC (int, max_gcse_regno);
a13d4ebf 6120
47a3dae1
ZD
6121 FOR_EACH_BB (bb)
6122 {
6123 for (regno = 0; regno < max_gcse_regno; regno++)
6124 regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno);
6125
6126 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6127 {
a813c111 6128 if (store_killed_after (ptr->pattern, ptr->pattern_regs, BB_HEAD (bb),
47a3dae1
ZD
6129 bb, regs_set_in_block, NULL))
6130 {
e0bb17a8 6131 /* It should not be necessary to consider the expression
47a3dae1
ZD
6132 killed if it is both anticipatable and available. */
6133 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
6134 || !TEST_BIT (ae_gen[bb->index], ptr->index))
6135 SET_BIT (ae_kill[bb->index], ptr->index);
1d088dee
AJ
6136 }
6137 else
6138 SET_BIT (transp[bb->index], ptr->index);
6139 }
47a3dae1
ZD
6140 }
6141
6142 free (regs_set_in_block);
aaa4ca30 6143
10d22567 6144 if (dump_file)
aaa4ca30 6145 {
10d22567
ZD
6146 dump_sbitmap_vector (dump_file, "st_antloc", "", st_antloc, last_basic_block);
6147 dump_sbitmap_vector (dump_file, "st_kill", "", ae_kill, last_basic_block);
6148 dump_sbitmap_vector (dump_file, "Transpt", "", transp, last_basic_block);
6149 dump_sbitmap_vector (dump_file, "st_avloc", "", ae_gen, last_basic_block);
a13d4ebf
AM
6150 }
6151}
6152
fbe5a4a6 6153/* Insert an instruction at the beginning of a basic block, and update
a813c111 6154 the BB_HEAD if needed. */
a13d4ebf 6155
589005ff 6156static void
6fb5fa3c 6157insert_insn_start_basic_block (rtx insn, basic_block bb)
a13d4ebf
AM
6158{
6159 /* Insert at start of successor block. */
a813c111
SB
6160 rtx prev = PREV_INSN (BB_HEAD (bb));
6161 rtx before = BB_HEAD (bb);
a13d4ebf
AM
6162 while (before != 0)
6163 {
7b1b4aed 6164 if (! LABEL_P (before)
a38e7aa5 6165 && !NOTE_INSN_BASIC_BLOCK_P (before))
a13d4ebf
AM
6166 break;
6167 prev = before;
a813c111 6168 if (prev == BB_END (bb))
a13d4ebf
AM
6169 break;
6170 before = NEXT_INSN (before);
6171 }
6172
6fb5fa3c 6173 insn = emit_insn_after_noloc (insn, prev, bb);
a13d4ebf 6174
10d22567 6175 if (dump_file)
a13d4ebf 6176 {
10d22567 6177 fprintf (dump_file, "STORE_MOTION insert store at start of BB %d:\n",
0b17ab2f 6178 bb->index);
10d22567
ZD
6179 print_inline_rtx (dump_file, insn, 6);
6180 fprintf (dump_file, "\n");
a13d4ebf
AM
6181 }
6182}
6183
6184/* This routine will insert a store on an edge. EXPR is the ldst entry for
cc2902df 6185 the memory reference, and E is the edge to insert it on. Returns nonzero
a13d4ebf
AM
6186 if an edge insertion was performed. */
6187
6188static int
1d088dee 6189insert_store (struct ls_expr * expr, edge e)
a13d4ebf
AM
6190{
6191 rtx reg, insn;
e2d2ed72 6192 basic_block bb;
a13d4ebf 6193 edge tmp;
628f6a4e 6194 edge_iterator ei;
a13d4ebf
AM
6195
6196 /* We did all the deleted before this insert, so if we didn't delete a
6197 store, then we haven't set the reaching reg yet either. */
6198 if (expr->reaching_reg == NULL_RTX)
6199 return 0;
6200
a0c8285b
JH
6201 if (e->flags & EDGE_FAKE)
6202 return 0;
6203
a13d4ebf 6204 reg = expr->reaching_reg;
47a3dae1 6205 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
589005ff 6206
a13d4ebf
AM
6207 /* If we are inserting this expression on ALL predecessor edges of a BB,
6208 insert it at the start of the BB, and reset the insert bits on the other
ff7cc307 6209 edges so we don't try to insert it on the other edges. */
e2d2ed72 6210 bb = e->dest;
628f6a4e 6211 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
3f2eae23 6212 if (!(tmp->flags & EDGE_FAKE))
a0c8285b
JH
6213 {
6214 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
282899df
NS
6215
6216 gcc_assert (index != EDGE_INDEX_NO_EDGE);
a0c8285b
JH
6217 if (! TEST_BIT (pre_insert_map[index], expr->index))
6218 break;
6219 }
a13d4ebf
AM
6220
6221 /* If tmp is NULL, we found an insertion on every edge, blank the
6222 insertion vector for these edges, and insert at the start of the BB. */
e2d2ed72 6223 if (!tmp && bb != EXIT_BLOCK_PTR)
a13d4ebf 6224 {
628f6a4e 6225 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
a13d4ebf
AM
6226 {
6227 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6228 RESET_BIT (pre_insert_map[index], expr->index);
6229 }
6fb5fa3c 6230 insert_insn_start_basic_block (insn, bb);
a13d4ebf
AM
6231 return 0;
6232 }
589005ff 6233
b16aa8a5
RK
6234 /* We can't put stores in the front of blocks pointed to by abnormal
6235 edges since that may put a store where one didn't used to be. */
6236 gcc_assert (!(e->flags & EDGE_ABNORMAL));
a13d4ebf
AM
6237
6238 insert_insn_on_edge (insn, e);
589005ff 6239
10d22567 6240 if (dump_file)
a13d4ebf 6241 {
10d22567 6242 fprintf (dump_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
0b17ab2f 6243 e->src->index, e->dest->index);
10d22567
ZD
6244 print_inline_rtx (dump_file, insn, 6);
6245 fprintf (dump_file, "\n");
a13d4ebf 6246 }
589005ff 6247
a13d4ebf
AM
6248 return 1;
6249}
6250
d088acea
ZD
6251/* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the
6252 memory location in SMEXPR set in basic block BB.
6253
6254 This could be rather expensive. */
6255
6256static void
6257remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr)
6258{
628f6a4e
BE
6259 edge_iterator *stack, ei;
6260 int sp;
6261 edge act;
d088acea 6262 sbitmap visited = sbitmap_alloc (last_basic_block);
d088acea
ZD
6263 rtx last, insn, note;
6264 rtx mem = smexpr->pattern;
6265
5ed6ace5 6266 stack = XNEWVEC (edge_iterator, n_basic_blocks);
628f6a4e
BE
6267 sp = 0;
6268 ei = ei_start (bb->succs);
6269
d088acea 6270 sbitmap_zero (visited);
d088acea 6271
f76ccf60 6272 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
d088acea
ZD
6273 while (1)
6274 {
6275 if (!act)
6276 {
628f6a4e 6277 if (!sp)
d088acea
ZD
6278 {
6279 free (stack);
6280 sbitmap_free (visited);
6281 return;
6282 }
628f6a4e 6283 act = ei_edge (stack[--sp]);
d088acea
ZD
6284 }
6285 bb = act->dest;
7b1b4aed 6286
d088acea 6287 if (bb == EXIT_BLOCK_PTR
d1c6a401 6288 || TEST_BIT (visited, bb->index))
d088acea 6289 {
628f6a4e
BE
6290 if (!ei_end_p (ei))
6291 ei_next (&ei);
6292 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
d088acea
ZD
6293 continue;
6294 }
6295 SET_BIT (visited, bb->index);
6296
6297 if (TEST_BIT (st_antloc[bb->index], smexpr->index))
6298 {
6299 for (last = ANTIC_STORE_LIST (smexpr);
6300 BLOCK_FOR_INSN (XEXP (last, 0)) != bb;
6301 last = XEXP (last, 1))
6302 continue;
6303 last = XEXP (last, 0);
6304 }
6305 else
a813c111 6306 last = NEXT_INSN (BB_END (bb));
7b1b4aed 6307
a813c111 6308 for (insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
d088acea
ZD
6309 if (INSN_P (insn))
6310 {
6311 note = find_reg_equal_equiv_note (insn);
6312 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
6313 continue;
6314
10d22567
ZD
6315 if (dump_file)
6316 fprintf (dump_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
d088acea
ZD
6317 INSN_UID (insn));
6318 remove_note (insn, note);
6319 }
628f6a4e
BE
6320
6321 if (!ei_end_p (ei))
6322 ei_next (&ei);
6323 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
6324
6325 if (EDGE_COUNT (bb->succs) > 0)
d088acea
ZD
6326 {
6327 if (act)
628f6a4e
BE
6328 stack[sp++] = ei;
6329 ei = ei_start (bb->succs);
f76ccf60 6330 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
d088acea
ZD
6331 }
6332 }
6333}
6334
a13d4ebf
AM
6335/* This routine will replace a store with a SET to a specified register. */
6336
6337static void
d088acea 6338replace_store_insn (rtx reg, rtx del, basic_block bb, struct ls_expr *smexpr)
a13d4ebf 6339{
d7fe1183 6340 rtx insn, mem, note, set, ptr, pair;
589005ff 6341
d088acea 6342 mem = smexpr->pattern;
9a318d30 6343 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
589005ff 6344
d088acea
ZD
6345 for (ptr = ANTIC_STORE_LIST (smexpr); ptr; ptr = XEXP (ptr, 1))
6346 if (XEXP (ptr, 0) == del)
6347 {
6348 XEXP (ptr, 0) = insn;
6349 break;
6350 }
d7fe1183
ZD
6351
6352 /* Move the notes from the deleted insn to its replacement, and patch
6353 up the LIBCALL notes. */
6354 REG_NOTES (insn) = REG_NOTES (del);
6355
6356 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
6357 if (note)
6358 {
6359 pair = XEXP (note, 0);
6360 note = find_reg_note (pair, REG_LIBCALL, NULL_RTX);
6361 XEXP (note, 0) = insn;
6362 }
6363 note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
6364 if (note)
6365 {
6366 pair = XEXP (note, 0);
6367 note = find_reg_note (pair, REG_RETVAL, NULL_RTX);
6368 XEXP (note, 0) = insn;
6369 }
6370
bfff9190
SP
6371 /* Emit the insn AFTER all the notes are transferred.
6372 This is cheaper since we avoid df rescanning for the note change. */
6373 insn = emit_insn_after (insn, del);
6374
6375 if (dump_file)
6376 {
6377 fprintf (dump_file,
6378 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
6379 print_inline_rtx (dump_file, del, 6);
6380 fprintf (dump_file, "\nSTORE MOTION replaced with insn:\n ");
6381 print_inline_rtx (dump_file, insn, 6);
6382 fprintf (dump_file, "\n");
6383 }
6384
49ce134f 6385 delete_insn (del);
d088acea
ZD
6386
6387 /* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
6388 they are no longer accurate provided that they are reached by this
6389 definition, so drop them. */
a813c111 6390 for (; insn != NEXT_INSN (BB_END (bb)); insn = NEXT_INSN (insn))
d088acea
ZD
6391 if (INSN_P (insn))
6392 {
6393 set = single_set (insn);
6394 if (!set)
6395 continue;
6396 if (expr_equiv_p (SET_DEST (set), mem))
6397 return;
6398 note = find_reg_equal_equiv_note (insn);
6399 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
6400 continue;
6401
10d22567
ZD
6402 if (dump_file)
6403 fprintf (dump_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
d088acea
ZD
6404 INSN_UID (insn));
6405 remove_note (insn, note);
6406 }
6407 remove_reachable_equiv_notes (bb, smexpr);
a13d4ebf
AM
6408}
6409
6410
6411/* Delete a store, but copy the value that would have been stored into
6412 the reaching_reg for later storing. */
6413
6414static void
1d088dee 6415delete_store (struct ls_expr * expr, basic_block bb)
a13d4ebf
AM
6416{
6417 rtx reg, i, del;
6418
6419 if (expr->reaching_reg == NULL_RTX)
6420 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
a13d4ebf 6421
a13d4ebf 6422 reg = expr->reaching_reg;
589005ff 6423
a13d4ebf
AM
6424 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
6425 {
6426 del = XEXP (i, 0);
e2d2ed72 6427 if (BLOCK_FOR_INSN (del) == bb)
a13d4ebf 6428 {
589005ff 6429 /* We know there is only one since we deleted redundant
a13d4ebf 6430 ones during the available computation. */
d088acea 6431 replace_store_insn (reg, del, bb, expr);
a13d4ebf
AM
6432 break;
6433 }
6434 }
6435}
6436
6437/* Free memory used by store motion. */
6438
589005ff 6439static void
1d088dee 6440free_store_memory (void)
a13d4ebf
AM
6441{
6442 free_ldst_mems ();
589005ff 6443
a13d4ebf 6444 if (ae_gen)
5a660bff 6445 sbitmap_vector_free (ae_gen);
a13d4ebf 6446 if (ae_kill)
5a660bff 6447 sbitmap_vector_free (ae_kill);
a13d4ebf 6448 if (transp)
5a660bff 6449 sbitmap_vector_free (transp);
a13d4ebf 6450 if (st_antloc)
5a660bff 6451 sbitmap_vector_free (st_antloc);
a13d4ebf 6452 if (pre_insert_map)
5a660bff 6453 sbitmap_vector_free (pre_insert_map);
a13d4ebf 6454 if (pre_delete_map)
5a660bff 6455 sbitmap_vector_free (pre_delete_map);
aaa4ca30
AJ
6456 if (reg_set_in_block)
6457 sbitmap_vector_free (reg_set_in_block);
589005ff 6458
a13d4ebf
AM
6459 ae_gen = ae_kill = transp = st_antloc = NULL;
6460 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
6461}
6462
6463/* Perform store motion. Much like gcse, except we move expressions the
6464 other way by looking at the flowgraph in reverse. */
6465
6466static void
1d088dee 6467store_motion (void)
a13d4ebf 6468{
e0082a72 6469 basic_block bb;
0b17ab2f 6470 int x;
a13d4ebf 6471 struct ls_expr * ptr;
adfcce61 6472 int update_flow = 0;
aaa4ca30 6473
10d22567 6474 if (dump_file)
a13d4ebf 6475 {
10d22567
ZD
6476 fprintf (dump_file, "before store motion\n");
6477 print_rtl (dump_file, get_insns ());
a13d4ebf
AM
6478 }
6479
a13d4ebf 6480 init_alias_analysis ();
aaa4ca30 6481
47a3dae1 6482 /* Find all the available and anticipatable stores. */
a13d4ebf
AM
6483 num_stores = compute_store_table ();
6484 if (num_stores == 0)
6485 {
9727e468
RG
6486 htab_delete (pre_ldst_table);
6487 pre_ldst_table = NULL;
aaa4ca30 6488 sbitmap_vector_free (reg_set_in_block);
a13d4ebf
AM
6489 end_alias_analysis ();
6490 return;
6491 }
6492
47a3dae1 6493 /* Now compute kill & transp vectors. */
a13d4ebf 6494 build_store_vectors ();
47a3dae1 6495 add_noreturn_fake_exit_edges ();
2a868ea4 6496 connect_infinite_loops_to_exit ();
a13d4ebf 6497
10d22567 6498 edge_list = pre_edge_rev_lcm (num_stores, transp, ae_gen,
589005ff 6499 st_antloc, ae_kill, &pre_insert_map,
a13d4ebf
AM
6500 &pre_delete_map);
6501
6502 /* Now we want to insert the new stores which are going to be needed. */
6503 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6504 {
b16aa8a5
RK
6505 /* If any of the edges we have above are abnormal, we can't move this
6506 store. */
6507 for (x = NUM_EDGES (edge_list) - 1; x >= 0; x--)
6508 if (TEST_BIT (pre_insert_map[x], ptr->index)
6509 && (INDEX_EDGE (edge_list, x)->flags & EDGE_ABNORMAL))
6510 break;
6511
6512 if (x >= 0)
6513 {
10d22567
ZD
6514 if (dump_file != NULL)
6515 fprintf (dump_file,
b16aa8a5
RK
6516 "Can't replace store %d: abnormal edge from %d to %d\n",
6517 ptr->index, INDEX_EDGE (edge_list, x)->src->index,
6518 INDEX_EDGE (edge_list, x)->dest->index);
6519 continue;
6520 }
6521
6522 /* Now we want to insert the new stores which are going to be needed. */
6523
e0082a72
ZD
6524 FOR_EACH_BB (bb)
6525 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
6526 delete_store (ptr, bb);
a13d4ebf 6527
0b17ab2f
RH
6528 for (x = 0; x < NUM_EDGES (edge_list); x++)
6529 if (TEST_BIT (pre_insert_map[x], ptr->index))
6530 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
a13d4ebf
AM
6531 }
6532
6533 if (update_flow)
6534 commit_edge_insertions ();
aaa4ca30 6535
a13d4ebf
AM
6536 free_store_memory ();
6537 free_edge_list (edge_list);
6809cbf9 6538 remove_fake_exit_edges ();
a13d4ebf
AM
6539 end_alias_analysis ();
6540}
e2500fed 6541
a0134312
RS
6542\f
6543/* Entry point for jump bypassing optimization pass. */
6544
65727068 6545static int
10d22567 6546bypass_jumps (void)
a0134312
RS
6547{
6548 int changed;
6549
6550 /* We do not construct an accurate cfg in functions which call
6551 setjmp, so just punt to be safe. */
6552 if (current_function_calls_setjmp)
6553 return 0;
6554
a0134312
RS
6555 /* Identify the basic block information for this function, including
6556 successors and predecessors. */
6557 max_gcse_regno = max_reg_num ();
6558
10d22567 6559 if (dump_file)
5b4fdb20 6560 dump_flow_info (dump_file, dump_flags);
a0134312 6561
6614fd40 6562 /* Return if there's nothing to do, or it is too expensive. */
ab9a1ff8
SB
6563 if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1
6564 || is_too_expensive (_ ("jump bypassing disabled")))
a0134312
RS
6565 return 0;
6566
a0134312
RS
6567 gcc_obstack_init (&gcse_obstack);
6568 bytes_used = 0;
6569
6570 /* We need alias. */
6571 init_alias_analysis ();
6572
6573 /* Record where pseudo-registers are set. This data is kept accurate
6574 during each pass. ??? We could also record hard-reg information here
6575 [since it's unchanging], however it is currently done during hash table
6576 computation.
6577
6578 It may be tempting to compute MEM set information here too, but MEM sets
6579 will be subject to code motion one day and thus we need to compute
6580 information about memory sets when we build the hash tables. */
6581
6582 alloc_reg_set_mem (max_gcse_regno);
eb232f4e 6583 compute_sets ();
a0134312
RS
6584
6585 max_gcse_regno = max_reg_num ();
eb232f4e
SB
6586 alloc_gcse_mem ();
6587 changed = one_cprop_pass (MAX_GCSE_PASSES + 2, true, true);
a0134312
RS
6588 free_gcse_mem ();
6589
10d22567 6590 if (dump_file)
a0134312 6591 {
10d22567 6592 fprintf (dump_file, "BYPASS of %s: %d basic blocks, ",
faed5cc3 6593 current_function_name (), n_basic_blocks);
10d22567 6594 fprintf (dump_file, "%d bytes\n\n", bytes_used);
a0134312
RS
6595 }
6596
6597 obstack_free (&gcse_obstack, NULL);
6598 free_reg_set_mem ();
6599
6600 /* We are finished with alias. */
6601 end_alias_analysis ();
a0134312
RS
6602
6603 return changed;
6604}
6605
d128effb
NS
6606/* Return true if the graph is too expensive to optimize. PASS is the
6607 optimization about to be performed. */
6608
6609static bool
6610is_too_expensive (const char *pass)
6611{
6612 /* Trying to perform global optimizations on flow graphs which have
6613 a high connectivity will take a long time and is unlikely to be
6614 particularly useful.
7b1b4aed 6615
d128effb
NS
6616 In normal circumstances a cfg should have about twice as many
6617 edges as blocks. But we do not want to punish small functions
6618 which have a couple switch statements. Rather than simply
6619 threshold the number of blocks, uses something with a more
6620 graceful degradation. */
6621 if (n_edges > 20000 + n_basic_blocks * 4)
6622 {
44c21c7f
DD
6623 warning (OPT_Wdisabled_optimization,
6624 "%s: %d basic blocks and %d edges/basic block",
6625 pass, n_basic_blocks, n_edges / n_basic_blocks);
7b1b4aed 6626
d128effb
NS
6627 return true;
6628 }
6629
6630 /* If allocating memory for the cprop bitmap would take up too much
6631 storage it's better just to disable the optimization. */
6632 if ((n_basic_blocks
6633 * SBITMAP_SET_SIZE (max_reg_num ())
6634 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
6635 {
44c21c7f
DD
6636 warning (OPT_Wdisabled_optimization,
6637 "%s: %d basic blocks and %d registers",
6638 pass, n_basic_blocks, max_reg_num ());
d128effb
NS
6639
6640 return true;
6641 }
6642
6643 return false;
6644}
ef330312
PB
6645\f
6646static bool
6647gate_handle_jump_bypass (void)
6648{
6649 return optimize > 0 && flag_gcse;
6650}
6651
6652/* Perform jump bypassing and control flow optimizations. */
c2924966 6653static unsigned int
ef330312
PB
6654rest_of_handle_jump_bypass (void)
6655{
6fb5fa3c 6656 delete_unreachable_blocks ();
10d22567 6657 if (bypass_jumps ())
ef330312 6658 {
ef330312 6659 delete_trivially_dead_insns (get_insns (), max_reg_num ());
6fb5fa3c
DB
6660 rebuild_jump_labels (get_insns ());
6661 cleanup_cfg (0);
ef330312 6662 }
c2924966 6663 return 0;
ef330312
PB
6664}
6665
6666struct tree_opt_pass pass_jump_bypass =
6667{
6668 "bypass", /* name */
6669 gate_handle_jump_bypass, /* gate */
6670 rest_of_handle_jump_bypass, /* execute */
6671 NULL, /* sub */
6672 NULL, /* next */
6673 0, /* static_pass_number */
6674 TV_BYPASS, /* tv_id */
6675 0, /* properties_required */
6676 0, /* properties_provided */
6677 0, /* properties_destroyed */
6678 0, /* todo_flags_start */
6679 TODO_dump_func |
6680 TODO_ggc_collect | TODO_verify_flow, /* todo_flags_finish */
6681 'G' /* letter */
6682};
6683
6684
6685static bool
6686gate_handle_gcse (void)
6687{
6688 return optimize > 0 && flag_gcse;
6689}
6690
6691
c2924966 6692static unsigned int
ef330312
PB
6693rest_of_handle_gcse (void)
6694{
6695 int save_csb, save_cfj;
6696 int tem2 = 0, tem;
10d22567 6697 tem = gcse_main (get_insns ());
ef330312 6698 delete_trivially_dead_insns (get_insns (), max_reg_num ());
6fb5fa3c 6699 rebuild_jump_labels (get_insns ());
ef330312
PB
6700 save_csb = flag_cse_skip_blocks;
6701 save_cfj = flag_cse_follow_jumps;
6702 flag_cse_skip_blocks = flag_cse_follow_jumps = 0;
6703
6704 /* If -fexpensive-optimizations, re-run CSE to clean up things done
6705 by gcse. */
6706 if (flag_expensive_optimizations)
6707 {
6708 timevar_push (TV_CSE);
10d22567 6709 tem2 = cse_main (get_insns (), max_reg_num ());
8b9d606b 6710 df_finish_pass ();
ef330312
PB
6711 purge_all_dead_edges ();
6712 delete_trivially_dead_insns (get_insns (), max_reg_num ());
6713 timevar_pop (TV_CSE);
6714 cse_not_expected = !flag_rerun_cse_after_loop;
6715 }
6716
6717 /* If gcse or cse altered any jumps, rerun jump optimizations to clean
6718 things up. */
6719 if (tem || tem2)
6720 {
6721 timevar_push (TV_JUMP);
6722 rebuild_jump_labels (get_insns ());
6fb5fa3c 6723 cleanup_cfg (0);
ef330312
PB
6724 timevar_pop (TV_JUMP);
6725 }
6726
6727 flag_cse_skip_blocks = save_csb;
6728 flag_cse_follow_jumps = save_cfj;
c2924966 6729 return 0;
ef330312
PB
6730}
6731
6732struct tree_opt_pass pass_gcse =
6733{
6734 "gcse1", /* name */
6735 gate_handle_gcse, /* gate */
6736 rest_of_handle_gcse, /* execute */
6737 NULL, /* sub */
6738 NULL, /* next */
6739 0, /* static_pass_number */
6740 TV_GCSE, /* tv_id */
6741 0, /* properties_required */
6742 0, /* properties_provided */
6743 0, /* properties_destroyed */
6744 0, /* todo_flags_start */
6fb5fa3c 6745 TODO_df_finish |
ef330312
PB
6746 TODO_dump_func |
6747 TODO_verify_flow | TODO_ggc_collect, /* todo_flags_finish */
6748 'G' /* letter */
6749};
6750
d128effb 6751
e2500fed 6752#include "gt-gcse.h"
This page took 3.674804 seconds and 5 git commands to generate.