]> gcc.gnu.org Git - gcc.git/blame - gcc/gcse.c
Remove a layer of indirection from hash_table
[gcc.git] / gcc / gcse.c
CommitLineData
e45425ec 1/* Partial redundancy elimination / Hoisting for RTL.
23a5b65a 2 Copyright (C) 1997-2014 Free Software Foundation, Inc.
7506f491 3
1322177d 4This file is part of GCC.
7506f491 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
7506f491 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
7506f491
DE
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
7506f491
DE
19
20/* TODO
21 - reordering of memory allocation and freeing to be more space efficient
b11f0116
BC
22 - calc rough register pressure information and use the info to drive all
23 kinds of code motion (including code hoisting) in a unified way.
7506f491
DE
24*/
25
26/* References searched while implementing this.
7506f491
DE
27
28 Compilers Principles, Techniques and Tools
29 Aho, Sethi, Ullman
30 Addison-Wesley, 1988
31
32 Global Optimization by Suppression of Partial Redundancies
33 E. Morel, C. Renvoise
34 communications of the acm, Vol. 22, Num. 2, Feb. 1979
35
36 A Portable Machine-Independent Global Optimizer - Design and Measurements
37 Frederick Chow
38 Stanford Ph.D. thesis, Dec. 1983
39
7506f491
DE
40 A Fast Algorithm for Code Movement Optimization
41 D.M. Dhamdhere
42 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
43
44 A Solution to a Problem with Morel and Renvoise's
45 Global Optimization by Suppression of Partial Redundancies
46 K-H Drechsler, M.P. Stadel
47 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
48
49 Practical Adaptation of the Global Optimization
50 Algorithm of Morel and Renvoise
51 D.M. Dhamdhere
52 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
53
54 Efficiently Computing Static Single Assignment Form and the Control
55 Dependence Graph
56 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
57 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
58
7506f491
DE
59 Lazy Code Motion
60 J. Knoop, O. Ruthing, B. Steffen
61 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
62
63 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
64 Time for Reducible Flow Control
65 Thomas Ball
66 ACM Letters on Programming Languages and Systems,
67 Vol. 2, Num. 1-4, Mar-Dec 1993
68
69 An Efficient Representation for Sparse Sets
70 Preston Briggs, Linda Torczon
71 ACM Letters on Programming Languages and Systems,
72 Vol. 2, Num. 1-4, Mar-Dec 1993
73
74 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
75 K-H Drechsler, M.P. Stadel
76 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
77
78 Partial Dead Code Elimination
79 J. Knoop, O. Ruthing, B. Steffen
80 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
81
82 Effective Partial Redundancy Elimination
83 P. Briggs, K.D. Cooper
84 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
85
86 The Program Structure Tree: Computing Control Regions in Linear Time
87 R. Johnson, D. Pearson, K. Pingali
88 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
89
90 Optimal Code Motion: Theory and Practice
91 J. Knoop, O. Ruthing, B. Steffen
92 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
93
94 The power of assignment motion
95 J. Knoop, O. Ruthing, B. Steffen
96 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
97
98 Global code motion / global value numbering
99 C. Click
100 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
101
102 Value Driven Redundancy Elimination
103 L.T. Simpson
104 Rice University Ph.D. thesis, Apr. 1996
105
106 Value Numbering
107 L.T. Simpson
108 Massively Scalar Compiler Project, Rice University, Sep. 1996
109
110 High Performance Compilers for Parallel Computing
111 Michael Wolfe
112 Addison-Wesley, 1996
113
f4e584dc
JL
114 Advanced Compiler Design and Implementation
115 Steven Muchnick
116 Morgan Kaufmann, 1997
117
a42cd965
AM
118 Building an Optimizing Compiler
119 Robert Morgan
120 Digital Press, 1998
121
f4e584dc
JL
122 People wishing to speed up the code here should read:
123 Elimination Algorithms for Data Flow Analysis
124 B.G. Ryder, M.C. Paull
125 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
126
127 How to Analyze Large Programs Efficiently and Informatively
128 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
129 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
130
7506f491
DE
131 People wishing to do something different can find various possibilities
132 in the above papers and elsewhere.
133*/
134
135#include "config.h"
50b2596f 136#include "system.h"
4977bab6
ZW
137#include "coretypes.h"
138#include "tm.h"
718f9c0f 139#include "diagnostic-core.h"
01198c2f 140#include "toplev.h"
7506f491 141
b11f0116 142#include "hard-reg-set.h"
7506f491 143#include "rtl.h"
b0656d8b 144#include "tree.h"
6baf1cc8 145#include "tm_p.h"
7506f491 146#include "regs.h"
b11f0116 147#include "ira.h"
7506f491 148#include "flags.h"
7506f491
DE
149#include "insn-config.h"
150#include "recog.h"
151#include "basic-block.h"
49ad7cfa 152#include "function.h"
589005ff 153#include "expr.h"
e7d482b9 154#include "except.h"
fb0c0a12 155#include "ggc.h"
f1fa37ff 156#include "params.h"
ae860ff7 157#include "cselib.h"
d128effb 158#include "intl.h"
7506f491 159#include "obstack.h"
ef330312 160#include "tree-pass.h"
4a8fb1a1 161#include "hash-table.h"
6fb5fa3c
DB
162#include "df.h"
163#include "dbgcnt.h"
ec0a1343 164#include "target.h"
7c6811fe 165#include "gcse.h"
4fa31c2a 166
f4e584dc 167/* We support GCSE via Partial Redundancy Elimination. PRE optimizations
4cad6dba 168 are a superset of those done by classic GCSE.
7506f491 169
e45425ec
SB
170 Two passes of copy/constant propagation are done around PRE or hoisting
171 because the first one enables more GCSE and the second one helps to clean
172 up the copies that PRE and HOIST create. This is needed more for PRE than
173 for HOIST because code hoisting will try to use an existing register
174 containing the common subexpression rather than create a new one. This is
175 harder to do for PRE because of the code motion (which HOIST doesn't do).
7506f491
DE
176
177 Expressions we are interested in GCSE-ing are of the form
178 (set (pseudo-reg) (expression)).
179 Function want_to_gcse_p says what these are.
180
4cad6dba 181 In addition, expressions in REG_EQUAL notes are candidates for GCSE-ing.
3906a4a1 182 This allows PRE to hoist expressions that are expressed in multiple insns,
4cad6dba
SB
183 such as complex address calculations (e.g. for PIC code, or loads with a
184 high part and a low part).
3906a4a1 185
7506f491 186 PRE handles moving invariant expressions out of loops (by treating them as
f4e584dc 187 partially redundant).
7506f491 188
7506f491
DE
189 **********************
190
191 We used to support multiple passes but there are diminishing returns in
192 doing so. The first pass usually makes 90% of the changes that are doable.
193 A second pass can make a few more changes made possible by the first pass.
194 Experiments show any further passes don't make enough changes to justify
195 the expense.
196
197 A study of spec92 using an unlimited number of passes:
198 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
199 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
200 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
201
202 It was found doing copy propagation between each pass enables further
203 substitutions.
204
3906a4a1
SB
205 This study was done before expressions in REG_EQUAL notes were added as
206 candidate expressions for optimization, and before the GIMPLE optimizers
207 were added. Probably, multiple passes is even less efficient now than
208 at the time when the study was conducted.
209
7506f491 210 PRE is quite expensive in complicated functions because the DFA can take
3906a4a1 211 a while to converge. Hence we only perform one pass.
7506f491
DE
212
213 **********************
214
215 The steps for PRE are:
216
217 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
218
219 2) Perform the data flow analysis for PRE.
220
221 3) Delete the redundant instructions
222
223 4) Insert the required copies [if any] that make the partially
224 redundant instructions fully redundant.
225
226 5) For other reaching expressions, insert an instruction to copy the value
227 to a newly created pseudo that will reach the redundant instruction.
228
229 The deletion is done first so that when we do insertions we
230 know which pseudo reg to use.
231
232 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
233 argue it is not. The number of iterations for the algorithm to converge
234 is typically 2-4 so I don't view it as that expensive (relatively speaking).
235
4cad6dba 236 PRE GCSE depends heavily on the second CPROP pass to clean up the copies
7506f491
DE
237 we create. To make an expression reach the place where it's redundant,
238 the result of the expression is copied to a new register, and the redundant
239 expression is deleted by replacing it with this new register. Classic GCSE
240 doesn't have this problem as much as it computes the reaching defs of
a3c28ba2
KH
241 each register in each block and thus can try to use an existing
242 register. */
7506f491
DE
243\f
244/* GCSE global vars. */
245
7c6811fe
RS
246struct target_gcse default_target_gcse;
247#if SWITCHABLE_TARGET
248struct target_gcse *this_target_gcse = &default_target_gcse;
249#endif
250
5f39ad47
SB
251/* Set to non-zero if CSE should run after all GCSE optimizations are done. */
252int flag_rerun_cse_after_global_opts;
f4e584dc 253
7506f491
DE
254/* An obstack for our working variables. */
255static struct obstack gcse_obstack;
256
7506f491
DE
257/* Hash table of expressions. */
258
259struct expr
260{
43c8a043 261 /* The expression. */
7506f491
DE
262 rtx expr;
263 /* Index in the available expression bitmaps. */
264 int bitmap_index;
265 /* Next entry with the same hash. */
266 struct expr *next_same_hash;
267 /* List of anticipatable occurrences in basic blocks in the function.
268 An "anticipatable occurrence" is one that is the first occurrence in the
f4e584dc
JL
269 basic block, the operands are not modified in the basic block prior
270 to the occurrence and the output is not used between the start of
271 the block and the occurrence. */
7506f491
DE
272 struct occr *antic_occr;
273 /* List of available occurrence in basic blocks in the function.
274 An "available occurrence" is one that is the last occurrence in the
275 basic block and the operands are not modified by following statements in
276 the basic block [including this insn]. */
277 struct occr *avail_occr;
278 /* Non-null if the computation is PRE redundant.
279 The value is the newly created pseudo-reg to record a copy of the
280 expression in all the places that reach the redundant copy. */
281 rtx reaching_reg;
20160347
MK
282 /* Maximum distance in instructions this expression can travel.
283 We avoid moving simple expressions for more than a few instructions
284 to keep register pressure under control.
285 A value of "0" removes restrictions on how far the expression can
286 travel. */
287 int max_distance;
7506f491
DE
288};
289
290/* Occurrence of an expression.
291 There is one per basic block. If a pattern appears more than once the
292 last appearance is used [or first for anticipatable expressions]. */
293
294struct occr
295{
296 /* Next occurrence of this expression. */
297 struct occr *next;
298 /* The insn that computes the expression. */
299 rtx insn;
cc2902df 300 /* Nonzero if this [anticipatable] occurrence has been deleted. */
7506f491 301 char deleted_p;
cc2902df 302 /* Nonzero if this [available] occurrence has been copied to
7506f491
DE
303 reaching_reg. */
304 /* ??? This is mutually exclusive with deleted_p, so they could share
305 the same byte. */
306 char copied_p;
307};
308
cad9aa15 309typedef struct occr *occr_t;
cad9aa15 310
e45425ec 311/* Expression hash tables.
7506f491
DE
312 Each hash table is an array of buckets.
313 ??? It is known that if it were an array of entries, structure elements
314 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
315 not clear whether in the final analysis a sufficient amount of memory would
316 be saved as the size of the available expression bitmaps would be larger
317 [one could build a mapping table without holes afterwards though].
c4c81601 318 Someday I'll perform the computation and figure it out. */
7506f491 319
7e5487a2 320struct hash_table_d
02280659
ZD
321{
322 /* The table itself.
323 This is an array of `expr_hash_table_size' elements. */
324 struct expr **table;
325
326 /* Size of the hash table, in elements. */
327 unsigned int size;
2e653e39 328
02280659
ZD
329 /* Number of hash table elements. */
330 unsigned int n_elems;
02280659 331};
c4c81601 332
02280659 333/* Expression hash table. */
7e5487a2 334static struct hash_table_d expr_hash_table;
02280659 335
a13d4ebf 336/* This is a list of expressions which are MEMs and will be used by load
589005ff 337 or store motion.
43c8a043
EB
338 Load motion tracks MEMs which aren't killed by anything except itself,
339 i.e. loads and stores to a single location.
589005ff 340 We can then allow movement of these MEM refs with a little special
a13d4ebf
AM
341 allowance. (all stores copy the same value to the reaching reg used
342 for the loads). This means all values used to store into memory must have
43c8a043 343 no side effects so we can re-issue the setter value. */
a13d4ebf
AM
344
345struct ls_expr
346{
347 struct expr * expr; /* Gcse expression reference for LM. */
348 rtx pattern; /* Pattern of this mem. */
47a3dae1 349 rtx pattern_regs; /* List of registers mentioned by the mem. */
aaa4ca30
AJ
350 rtx loads; /* INSN list of loads seen. */
351 rtx stores; /* INSN list of stores seen. */
a13d4ebf
AM
352 struct ls_expr * next; /* Next in the list. */
353 int invalid; /* Invalid for some reason. */
354 int index; /* If it maps to a bitmap index. */
b58b21d5 355 unsigned int hash_index; /* Index when in a hash table. */
a13d4ebf
AM
356 rtx reaching_reg; /* Register to use when re-writing. */
357};
358
359/* Head of the list of load/store memory refs. */
360static struct ls_expr * pre_ldst_mems = NULL;
361
4a8fb1a1
LC
362struct pre_ldst_expr_hasher : typed_noop_remove <ls_expr>
363{
364 typedef ls_expr value_type;
365 typedef value_type compare_type;
366 static inline hashval_t hash (const value_type *);
367 static inline bool equal (const value_type *, const compare_type *);
368};
369
370/* Hashtable helpers. */
371inline hashval_t
372pre_ldst_expr_hasher::hash (const value_type *x)
373{
374 int do_not_record_p = 0;
375 return
376 hash_rtx (x->pattern, GET_MODE (x->pattern), &do_not_record_p, NULL, false);
377}
378
379static int expr_equiv_p (const_rtx, const_rtx);
380
381inline bool
382pre_ldst_expr_hasher::equal (const value_type *ptr1,
383 const compare_type *ptr2)
384{
385 return expr_equiv_p (ptr1->pattern, ptr2->pattern);
386}
387
9727e468 388/* Hashtable for the load/store memory refs. */
c203e8a7 389static hash_table<pre_ldst_expr_hasher> *pre_ldst_table;
9727e468 390
7506f491
DE
391/* Bitmap containing one bit for each register in the program.
392 Used when performing GCSE to track which registers have been set since
393 the start of the basic block. */
73991d6a 394static regset reg_set_bitmap;
7506f491 395
a13d4ebf
AM
396/* Array, indexed by basic block number for a list of insns which modify
397 memory within that block. */
9771b263 398static vec<rtx> *modify_mem_list;
0516f6fe 399static bitmap modify_mem_list_set;
a13d4ebf 400
6ce1edcf
NF
401typedef struct modify_pair_s
402{
403 rtx dest; /* A MEM. */
404 rtx dest_addr; /* The canonical address of `dest'. */
405} modify_pair;
406
6ce1edcf
NF
407
408/* This array parallels modify_mem_list, except that it stores MEMs
409 being set and their canonicalized memory addresses. */
9771b263 410static vec<modify_pair> *canon_modify_mem_list;
0516f6fe 411
aa47fcfa
JL
412/* Bitmap indexed by block numbers to record which blocks contain
413 function calls. */
414static bitmap blocks_with_calls;
415
7506f491
DE
416/* Various variables for statistics gathering. */
417
418/* Memory used in a pass.
419 This isn't intended to be absolutely precise. Its intent is only
420 to keep an eye on memory usage. */
421static int bytes_used;
c4c81601 422
7506f491
DE
423/* GCSE substitutions made. */
424static int gcse_subst_count;
425/* Number of copy instructions created. */
426static int gcse_create_count;
7506f491 427\f
20160347
MK
428/* Doing code hoisting. */
429static bool doing_code_hoisting_p = false;
430\f
e83f4801 431/* For available exprs */
df35c271 432static sbitmap *ae_kill;
7506f491 433\f
b11f0116
BC
434/* Data stored for each basic block. */
435struct bb_data
436{
437 /* Maximal register pressure inside basic block for given register class
438 (defined only for the pressure classes). */
439 int max_reg_pressure[N_REG_CLASSES];
4b8181c5
BC
440 /* Recorded register pressure of basic block before trying to hoist
441 an expression. Will be used to restore the register pressure
442 if the expression should not be hoisted. */
443 int old_pressure;
444 /* Recorded register live_in info of basic block during code hoisting
445 process. BACKUP is used to record live_in info before trying to
446 hoist an expression, and will be used to restore LIVE_IN if the
447 expression should not be hoisted. */
448 bitmap live_in, backup;
b11f0116
BC
449};
450
451#define BB_DATA(bb) ((struct bb_data *) (bb)->aux)
452
453static basic_block curr_bb;
454
455/* Current register pressure for each pressure class. */
456static int curr_reg_pressure[N_REG_CLASSES];
457\f
458
1d088dee 459static void compute_can_copy (void);
9fe15a12
KG
460static void *gmalloc (size_t) ATTRIBUTE_MALLOC;
461static void *gcalloc (size_t, size_t) ATTRIBUTE_MALLOC;
703ad42b 462static void *gcse_alloc (unsigned long);
eb232f4e 463static void alloc_gcse_mem (void);
1d088dee 464static void free_gcse_mem (void);
7e5487a2
ILT
465static void hash_scan_insn (rtx, struct hash_table_d *);
466static void hash_scan_set (rtx, rtx, struct hash_table_d *);
467static void hash_scan_clobber (rtx, rtx, struct hash_table_d *);
468static void hash_scan_call (rtx, rtx, struct hash_table_d *);
20160347 469static int want_to_gcse_p (rtx, int *);
ed7a4b4b
KG
470static int oprs_unchanged_p (const_rtx, const_rtx, int);
471static int oprs_anticipatable_p (const_rtx, const_rtx);
472static int oprs_available_p (const_rtx, const_rtx);
20160347 473static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int, int,
7e5487a2 474 struct hash_table_d *);
ed7a4b4b 475static unsigned int hash_expr (const_rtx, enum machine_mode, int *, int);
1d088dee
AJ
476static void record_last_reg_set_info (rtx, int);
477static void record_last_mem_set_info (rtx);
7bc980e1 478static void record_last_set_info (rtx, const_rtx, void *);
7e5487a2 479static void compute_hash_table (struct hash_table_d *);
e45425ec 480static void alloc_hash_table (struct hash_table_d *);
7e5487a2
ILT
481static void free_hash_table (struct hash_table_d *);
482static void compute_hash_table_work (struct hash_table_d *);
483static void dump_hash_table (FILE *, const char *, struct hash_table_d *);
e45425ec 484static void compute_transp (const_rtx, int, sbitmap *);
1d088dee 485static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
7e5487a2 486 struct hash_table_d *);
7bc980e1 487static void mems_conflict_for_gcse_p (rtx, const_rtx, void *);
ed7a4b4b 488static int load_killed_in_block_p (const_basic_block, int, const_rtx, int);
7bc980e1 489static void canon_list_insert (rtx, const_rtx, void *);
1d088dee
AJ
490static void alloc_pre_mem (int, int);
491static void free_pre_mem (void);
43c8a043 492static struct edge_list *compute_pre_data (void);
1d088dee
AJ
493static int pre_expr_reaches_here_p (basic_block, struct expr *,
494 basic_block);
eae7938e 495static void insert_insn_end_basic_block (struct expr *, basic_block);
1d088dee
AJ
496static void pre_insert_copy_insn (struct expr *, rtx);
497static void pre_insert_copies (void);
498static int pre_delete (void);
43c8a043 499static int pre_gcse (struct edge_list *);
5f39ad47 500static int one_pre_gcse_pass (void);
1d088dee
AJ
501static void add_label_notes (rtx, rtx);
502static void alloc_code_hoist_mem (int, int);
503static void free_code_hoist_mem (void);
504static void compute_code_hoist_vbeinout (void);
505static void compute_code_hoist_data (void);
b11f0116
BC
506static int should_hoist_expr_to_dom (basic_block, struct expr *, basic_block,
507 sbitmap, int, int *, enum reg_class,
4b8181c5 508 int *, bitmap, rtx);
5f39ad47 509static int hoist_code (void);
4b8181c5 510static enum reg_class get_regno_pressure_class (int regno, int *nregs);
b11f0116 511static enum reg_class get_pressure_class_and_nregs (rtx insn, int *nregs);
1d088dee 512static int one_code_hoisting_pass (void);
1d088dee
AJ
513static rtx process_insert_insn (struct expr *);
514static int pre_edge_insert (struct edge_list *, struct expr **);
1d088dee
AJ
515static int pre_expr_reaches_here_p_work (basic_block, struct expr *,
516 basic_block, char *);
517static struct ls_expr * ldst_entry (rtx);
518static void free_ldst_entry (struct ls_expr *);
43c8a043 519static void free_ld_motion_mems (void);
1d088dee
AJ
520static void print_ldst_list (FILE *);
521static struct ls_expr * find_rtx_in_ldst (rtx);
ed7a4b4b 522static int simple_mem (const_rtx);
1d088dee
AJ
523static void invalidate_any_buried_refs (rtx);
524static void compute_ld_motion_mems (void);
525static void trim_ld_motion_mems (void);
526static void update_ld_motion_stores (struct expr *);
1d088dee
AJ
527static void clear_modify_mem_tables (void);
528static void free_modify_mem_tables (void);
529static rtx gcse_emit_move_after (rtx, rtx, rtx);
d128effb 530static bool is_too_expensive (const char *);
1b4572a8
KG
531
532#define GNEW(T) ((T *) gmalloc (sizeof (T)))
533#define GCNEW(T) ((T *) gcalloc (1, sizeof (T)))
534
535#define GNEWVEC(T, N) ((T *) gmalloc (sizeof (T) * (N)))
536#define GCNEWVEC(T, N) ((T *) gcalloc ((N), sizeof (T)))
1b4572a8
KG
537
538#define GNEWVAR(T, S) ((T *) gmalloc ((S)))
539#define GCNEWVAR(T, S) ((T *) gcalloc (1, (S)))
1b4572a8
KG
540
541#define GOBNEW(T) ((T *) gcse_alloc (sizeof (T)))
542#define GOBNEWVAR(T, S) ((T *) gcse_alloc ((S)))
7506f491 543\f
7506f491
DE
544/* Misc. utilities. */
545
7c6811fe
RS
546#define can_copy \
547 (this_target_gcse->x_can_copy)
548#define can_copy_init_p \
549 (this_target_gcse->x_can_copy_init_p)
773eae39 550
7506f491
DE
551/* Compute which modes support reg/reg copy operations. */
552
553static void
1d088dee 554compute_can_copy (void)
7506f491
DE
555{
556 int i;
50b2596f 557#ifndef AVOID_CCMODE_COPIES
8e42ace1 558 rtx reg, insn;
50b2596f 559#endif
773eae39 560 memset (can_copy, 0, NUM_MACHINE_MODES);
7506f491
DE
561
562 start_sequence ();
563 for (i = 0; i < NUM_MACHINE_MODES; i++)
c4c81601
RK
564 if (GET_MODE_CLASS (i) == MODE_CC)
565 {
7506f491 566#ifdef AVOID_CCMODE_COPIES
773eae39 567 can_copy[i] = 0;
7506f491 568#else
c4c81601
RK
569 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
570 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
9714cf43 571 if (recog (PATTERN (insn), insn, NULL) >= 0)
773eae39 572 can_copy[i] = 1;
7506f491 573#endif
c4c81601 574 }
141b5810 575 else
773eae39 576 can_copy[i] = 1;
c4c81601 577
7506f491 578 end_sequence ();
7506f491 579}
773eae39
EB
580
581/* Returns whether the mode supports reg/reg copy operations. */
582
583bool
1d088dee 584can_copy_p (enum machine_mode mode)
773eae39 585{
773eae39
EB
586 if (! can_copy_init_p)
587 {
588 compute_can_copy ();
589 can_copy_init_p = true;
590 }
591
592 return can_copy[mode] != 0;
593}
7506f491
DE
594\f
595/* Cover function to xmalloc to record bytes allocated. */
596
703ad42b 597static void *
4ac11022 598gmalloc (size_t size)
7506f491
DE
599{
600 bytes_used += size;
601 return xmalloc (size);
602}
603
9fe15a12
KG
604/* Cover function to xcalloc to record bytes allocated. */
605
606static void *
607gcalloc (size_t nelem, size_t elsize)
608{
609 bytes_used += nelem * elsize;
610 return xcalloc (nelem, elsize);
611}
612
77bbd421 613/* Cover function to obstack_alloc. */
7506f491 614
703ad42b 615static void *
1d088dee 616gcse_alloc (unsigned long size)
7506f491 617{
77bbd421 618 bytes_used += size;
703ad42b 619 return obstack_alloc (&gcse_obstack, size);
7506f491
DE
620}
621
4a81774c 622/* Allocate memory for the reg/memory set tracking tables.
7506f491
DE
623 This is called at the start of each pass. */
624
625static void
eb232f4e 626alloc_gcse_mem (void)
7506f491 627{
7506f491 628 /* Allocate vars to track sets of regs. */
7a8cba34 629 reg_set_bitmap = ALLOC_REG_SET (NULL);
7506f491 630
a13d4ebf 631 /* Allocate array to keep a list of insns which modify memory in each
9771b263
DN
632 basic block. The two typedefs are needed to work around the
633 pre-processor limitation with template types in macro arguments. */
634 typedef vec<rtx> vec_rtx_heap;
635 typedef vec<modify_pair> vec_modify_pair_heap;
8b1c6fd7
DM
636 modify_mem_list = GCNEWVEC (vec_rtx_heap, last_basic_block_for_fn (cfun));
637 canon_modify_mem_list = GCNEWVEC (vec_modify_pair_heap,
638 last_basic_block_for_fn (cfun));
8bdbfff5
NS
639 modify_mem_list_set = BITMAP_ALLOC (NULL);
640 blocks_with_calls = BITMAP_ALLOC (NULL);
7506f491
DE
641}
642
643/* Free memory allocated by alloc_gcse_mem. */
644
645static void
1d088dee 646free_gcse_mem (void)
7506f491 647{
d5b8da97
SB
648 FREE_REG_SET (reg_set_bitmap);
649
73991d6a 650 free_modify_mem_tables ();
8bdbfff5
NS
651 BITMAP_FREE (modify_mem_list_set);
652 BITMAP_FREE (blocks_with_calls);
7506f491 653}
b5ce41ff
JL
654\f
655/* Compute the local properties of each recorded expression.
c4c81601
RK
656
657 Local properties are those that are defined by the block, irrespective of
658 other blocks.
b5ce41ff
JL
659
660 An expression is transparent in a block if its operands are not modified
661 in the block.
662
663 An expression is computed (locally available) in a block if it is computed
664 at least once and expression would contain the same value if the
665 computation was moved to the end of the block.
666
667 An expression is locally anticipatable in a block if it is computed at
668 least once and expression would contain the same value if the computation
669 was moved to the beginning of the block.
670
e45425ec 671 We call this routine for pre and code hoisting. They all compute
c4c81601 672 basically the same information and thus can easily share this code.
7506f491 673
c4c81601
RK
674 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
675 properties. If NULL, then it is not necessary to compute or record that
676 particular property.
b5ce41ff 677
e45425ec 678 TABLE controls which hash table to look at. */
589005ff 679
b5ce41ff 680static void
7b1b4aed 681compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc,
7e5487a2 682 struct hash_table_d *table)
b5ce41ff 683{
02280659 684 unsigned int i;
589005ff 685
b5ce41ff
JL
686 /* Initialize any bitmaps that were passed in. */
687 if (transp)
695ab36a 688 {
8b1c6fd7 689 bitmap_vector_ones (transp, last_basic_block_for_fn (cfun));
695ab36a 690 }
c4c81601 691
b5ce41ff 692 if (comp)
8b1c6fd7 693 bitmap_vector_clear (comp, last_basic_block_for_fn (cfun));
b5ce41ff 694 if (antloc)
8b1c6fd7 695 bitmap_vector_clear (antloc, last_basic_block_for_fn (cfun));
b5ce41ff 696
02280659 697 for (i = 0; i < table->size; i++)
7506f491 698 {
b5ce41ff
JL
699 struct expr *expr;
700
02280659 701 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
b5ce41ff 702 {
b5ce41ff 703 int indx = expr->bitmap_index;
c4c81601 704 struct occr *occr;
b5ce41ff
JL
705
706 /* The expression is transparent in this block if it is not killed.
707 We start by assuming all are transparent [none are killed], and
708 then reset the bits for those that are. */
b5ce41ff 709 if (transp)
e45425ec 710 compute_transp (expr->expr, indx, transp);
b5ce41ff
JL
711
712 /* The occurrences recorded in antic_occr are exactly those that
cc2902df 713 we want to set to nonzero in ANTLOC. */
b5ce41ff 714 if (antloc)
c4c81601
RK
715 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
716 {
d7c028c0 717 bitmap_set_bit (antloc[BLOCK_FOR_INSN (occr->insn)->index], indx);
b5ce41ff 718
c4c81601
RK
719 /* While we're scanning the table, this is a good place to
720 initialize this. */
721 occr->deleted_p = 0;
722 }
b5ce41ff
JL
723
724 /* The occurrences recorded in avail_occr are exactly those that
cc2902df 725 we want to set to nonzero in COMP. */
b5ce41ff 726 if (comp)
c4c81601
RK
727 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
728 {
d7c028c0 729 bitmap_set_bit (comp[BLOCK_FOR_INSN (occr->insn)->index], indx);
b5ce41ff 730
c4c81601
RK
731 /* While we're scanning the table, this is a good place to
732 initialize this. */
733 occr->copied_p = 0;
734 }
b5ce41ff
JL
735
736 /* While we're scanning the table, this is a good place to
737 initialize this. */
738 expr->reaching_reg = 0;
739 }
7506f491 740 }
7506f491
DE
741}
742\f
7506f491
DE
743/* Hash table support. */
744
80c29cc4
RZ
745struct reg_avail_info
746{
e0082a72 747 basic_block last_bb;
80c29cc4
RZ
748 int first_set;
749 int last_set;
750};
751
752static struct reg_avail_info *reg_avail_info;
e0082a72 753static basic_block current_bb;
7506f491 754
fb0c0a12
RK
755/* See whether X, the source of a set, is something we want to consider for
756 GCSE. */
7506f491
DE
757
758static int
20160347 759want_to_gcse_p (rtx x, int *max_distance_ptr)
7506f491 760{
3d8504ac
RS
761#ifdef STACK_REGS
762 /* On register stack architectures, don't GCSE constants from the
763 constant pool, as the benefits are often swamped by the overhead
764 of shuffling the register stack between basic blocks. */
765 if (IS_STACK_MODE (GET_MODE (x)))
766 x = avoid_constant_pool_reference (x);
767#endif
768
20160347
MK
769 /* GCSE'ing constants:
770
771 We do not specifically distinguish between constant and non-constant
5e8f01f4 772 expressions in PRE and Hoist. We use set_src_cost below to limit
20160347
MK
773 the maximum distance simple expressions can travel.
774
775 Nevertheless, constants are much easier to GCSE, and, hence,
776 it is easy to overdo the optimizations. Usually, excessive PRE and
777 Hoisting of constant leads to increased register pressure.
778
779 RA can deal with this by rematerialing some of the constants.
780 Therefore, it is important that the back-end generates sets of constants
781 in a way that allows reload rematerialize them under high register
782 pressure, i.e., a pseudo register with REG_EQUAL to constant
783 is set only once. Failing to do so will result in IRA/reload
784 spilling such constants under high register pressure instead of
785 rematerializing them. */
786
c4c81601 787 switch (GET_CODE (x))
7506f491
DE
788 {
789 case REG:
790 case SUBREG:
20160347
MK
791 case CALL:
792 return 0;
793
d8116890 794 CASE_CONST_ANY:
20160347
MK
795 if (!doing_code_hoisting_p)
796 /* Do not PRE constants. */
797 return 0;
798
799 /* FALLTHRU */
7506f491
DE
800
801 default:
20160347
MK
802 if (doing_code_hoisting_p)
803 /* PRE doesn't implement max_distance restriction. */
804 {
805 int cost;
806 int max_distance;
807
808 gcc_assert (!optimize_function_for_speed_p (cfun)
809 && optimize_function_for_size_p (cfun));
5e8f01f4 810 cost = set_src_cost (x, 0);
20160347
MK
811
812 if (cost < COSTS_N_INSNS (GCSE_UNRESTRICTED_COST))
813 {
814 max_distance = (GCSE_COST_DISTANCE_RATIO * cost) / 10;
815 if (max_distance == 0)
816 return 0;
817
818 gcc_assert (max_distance > 0);
819 }
820 else
821 max_distance = 0;
822
823 if (max_distance_ptr)
824 *max_distance_ptr = max_distance;
825 }
826
df35c271 827 return can_assign_to_reg_without_clobbers_p (x);
7506f491 828 }
1707bafa
RS
829}
830
df35c271 831/* Used internally by can_assign_to_reg_without_clobbers_p. */
1707bafa
RS
832
833static GTY(()) rtx test_insn;
834
df35c271
SB
835/* Return true if we can assign X to a pseudo register such that the
836 resulting insn does not result in clobbering a hard register as a
837 side-effect.
ec0a1343
JB
838
839 Additionally, if the target requires it, check that the resulting insn
840 can be copied. If it cannot, this means that X is special and probably
841 has hidden side-effects we don't want to mess with.
842
df35c271
SB
843 This function is typically used by code motion passes, to verify
844 that it is safe to insert an insn without worrying about clobbering
845 maybe live hard regs. */
1707bafa 846
df35c271
SB
847bool
848can_assign_to_reg_without_clobbers_p (rtx x)
1707bafa
RS
849{
850 int num_clobbers = 0;
851 int icode;
0dbaf51e 852 bool can_assign = false;
7506f491 853
fb0c0a12
RK
854 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
855 if (general_operand (x, GET_MODE (x)))
856 return 1;
857 else if (GET_MODE (x) == VOIDmode)
858 return 0;
859
860 /* Otherwise, check if we can make a valid insn from it. First initialize
861 our test insn if we haven't already. */
862 if (test_insn == 0)
863 {
864 test_insn
865 = make_insn_raw (gen_rtx_SET (VOIDmode,
866 gen_rtx_REG (word_mode,
867 FIRST_PSEUDO_REGISTER * 2),
868 const0_rtx));
869 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
0dbaf51e 870 INSN_LOCATION (test_insn) = UNKNOWN_LOCATION;
fb0c0a12
RK
871 }
872
873 /* Now make an insn like the one we would make when GCSE'ing and see if
874 valid. */
875 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
876 SET_SRC (PATTERN (test_insn)) = x;
b8698a0f 877
ec0a1343 878 icode = recog (PATTERN (test_insn), test_insn, &num_clobbers);
b8698a0f 879
0dbaf51e
SB
880 /* If the test insn is valid and doesn't need clobbers, and the target also
881 has no objections, we're good. */
882 if (icode >= 0
883 && (num_clobbers == 0 || !added_clobbers_hard_reg_p (icode))
884 && ! (targetm.cannot_copy_insn_p
885 && targetm.cannot_copy_insn_p (test_insn)))
886 can_assign = true;
b8698a0f 887
0dbaf51e
SB
888 /* Make sure test_insn doesn't have any pointers into GC space. */
889 SET_SRC (PATTERN (test_insn)) = NULL_RTX;
b8698a0f 890
0dbaf51e 891 return can_assign;
7506f491
DE
892}
893
cc2902df 894/* Return nonzero if the operands of expression X are unchanged from the
7506f491
DE
895 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
896 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
897
898static int
ed7a4b4b 899oprs_unchanged_p (const_rtx x, const_rtx insn, int avail_p)
7506f491 900{
c4c81601 901 int i, j;
7506f491 902 enum rtx_code code;
6f7d635c 903 const char *fmt;
7506f491 904
7506f491
DE
905 if (x == 0)
906 return 1;
907
908 code = GET_CODE (x);
909 switch (code)
910 {
911 case REG:
80c29cc4
RZ
912 {
913 struct reg_avail_info *info = &reg_avail_info[REGNO (x)];
914
915 if (info->last_bb != current_bb)
916 return 1;
589005ff 917 if (avail_p)
4a81774c 918 return info->last_set < DF_INSN_LUID (insn);
80c29cc4 919 else
4a81774c 920 return info->first_set >= DF_INSN_LUID (insn);
80c29cc4 921 }
7506f491
DE
922
923 case MEM:
85c0f02d
SB
924 if (! flag_gcse_lm
925 || load_killed_in_block_p (current_bb, DF_INSN_LUID (insn),
926 x, avail_p))
a13d4ebf 927 return 0;
7506f491 928 else
c4c81601 929 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
7506f491
DE
930
931 case PRE_DEC:
932 case PRE_INC:
933 case POST_DEC:
934 case POST_INC:
4b983fdc
RH
935 case PRE_MODIFY:
936 case POST_MODIFY:
7506f491
DE
937 return 0;
938
939 case PC:
940 case CC0: /*FIXME*/
941 case CONST:
d8116890 942 CASE_CONST_ANY:
7506f491
DE
943 case SYMBOL_REF:
944 case LABEL_REF:
945 case ADDR_VEC:
946 case ADDR_DIFF_VEC:
947 return 1;
948
949 default:
950 break;
951 }
952
c4c81601 953 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
954 {
955 if (fmt[i] == 'e')
956 {
c4c81601
RK
957 /* If we are about to do the last recursive call needed at this
958 level, change it into iteration. This function is called enough
959 to be worth it. */
7506f491 960 if (i == 0)
c4c81601
RK
961 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
962
963 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
7506f491
DE
964 return 0;
965 }
966 else if (fmt[i] == 'E')
c4c81601
RK
967 for (j = 0; j < XVECLEN (x, i); j++)
968 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
969 return 0;
7506f491
DE
970 }
971
972 return 1;
973}
974
43c8a043 975/* Info passed from load_killed_in_block_p to mems_conflict_for_gcse_p. */
a13d4ebf 976
43c8a043
EB
977struct mem_conflict_info
978{
979 /* A memory reference for a load instruction, mems_conflict_for_gcse_p will
980 see if a memory store conflicts with this memory load. */
981 const_rtx mem;
a13d4ebf 982
43c8a043
EB
983 /* True if mems_conflict_for_gcse_p finds a conflict between two memory
984 references. */
985 bool conflict;
986};
987
988/* DEST is the output of an instruction. If it is a memory reference and
989 possibly conflicts with the load found in DATA, then communicate this
990 information back through DATA. */
a13d4ebf
AM
991
992static void
7bc980e1 993mems_conflict_for_gcse_p (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
43c8a043 994 void *data)
a13d4ebf 995{
43c8a043
EB
996 struct mem_conflict_info *mci = (struct mem_conflict_info *) data;
997
a13d4ebf
AM
998 while (GET_CODE (dest) == SUBREG
999 || GET_CODE (dest) == ZERO_EXTRACT
a13d4ebf
AM
1000 || GET_CODE (dest) == STRICT_LOW_PART)
1001 dest = XEXP (dest, 0);
1002
1003 /* If DEST is not a MEM, then it will not conflict with the load. Note
1004 that function calls are assumed to clobber memory, but are handled
1005 elsewhere. */
7b1b4aed 1006 if (! MEM_P (dest))
a13d4ebf 1007 return;
aaa4ca30 1008
a13d4ebf 1009 /* If we are setting a MEM in our list of specially recognized MEMs,
589005ff 1010 don't mark as killed this time. */
43c8a043 1011 if (pre_ldst_mems != NULL && expr_equiv_p (dest, mci->mem))
a13d4ebf
AM
1012 {
1013 if (!find_rtx_in_ldst (dest))
43c8a043 1014 mci->conflict = true;
a13d4ebf
AM
1015 return;
1016 }
aaa4ca30 1017
53d9622b 1018 if (true_dependence (dest, GET_MODE (dest), mci->mem))
43c8a043 1019 mci->conflict = true;
a13d4ebf
AM
1020}
1021
1022/* Return nonzero if the expression in X (a memory reference) is killed
4a81774c 1023 in block BB before or after the insn with the LUID in UID_LIMIT.
a13d4ebf
AM
1024 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1025 before UID_LIMIT.
1026
1027 To check the entire block, set UID_LIMIT to max_uid + 1 and
1028 AVAIL_P to 0. */
1029
1030static int
43c8a043
EB
1031load_killed_in_block_p (const_basic_block bb, int uid_limit, const_rtx x,
1032 int avail_p)
a13d4ebf 1033{
9771b263 1034 vec<rtx> list = modify_mem_list[bb->index];
6409abe3
NF
1035 rtx setter;
1036 unsigned ix;
16c5b95d
MH
1037
1038 /* If this is a readonly then we aren't going to be changing it. */
1039 if (MEM_READONLY_P (x))
1040 return 0;
1041
9771b263 1042 FOR_EACH_VEC_ELT_REVERSE (list, ix, setter)
a13d4ebf 1043 {
43c8a043
EB
1044 struct mem_conflict_info mci;
1045
a13d4ebf
AM
1046 /* Ignore entries in the list that do not apply. */
1047 if ((avail_p
6409abe3 1048 && DF_INSN_LUID (setter) < uid_limit)
a13d4ebf 1049 || (! avail_p
6409abe3
NF
1050 && DF_INSN_LUID (setter) > uid_limit))
1051 continue;
a13d4ebf
AM
1052
1053 /* If SETTER is a call everything is clobbered. Note that calls
1054 to pure functions are never put on the list, so we need not
1055 worry about them. */
7b1b4aed 1056 if (CALL_P (setter))
a13d4ebf
AM
1057 return 1;
1058
1059 /* SETTER must be an INSN of some kind that sets memory. Call
43c8a043
EB
1060 note_stores to examine each hunk of memory that is modified. */
1061 mci.mem = x;
1062 mci.conflict = false;
1063 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, &mci);
1064 if (mci.conflict)
a13d4ebf 1065 return 1;
a13d4ebf
AM
1066 }
1067 return 0;
1068}
1069
cc2902df 1070/* Return nonzero if the operands of expression X are unchanged from
7506f491
DE
1071 the start of INSN's basic block up to but not including INSN. */
1072
1073static int
ed7a4b4b 1074oprs_anticipatable_p (const_rtx x, const_rtx insn)
7506f491
DE
1075{
1076 return oprs_unchanged_p (x, insn, 0);
1077}
1078
cc2902df 1079/* Return nonzero if the operands of expression X are unchanged from
7506f491
DE
1080 INSN to the end of INSN's basic block. */
1081
1082static int
ed7a4b4b 1083oprs_available_p (const_rtx x, const_rtx insn)
7506f491
DE
1084{
1085 return oprs_unchanged_p (x, insn, 1);
1086}
1087
1088/* Hash expression X.
c4c81601
RK
1089
1090 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1091 indicating if a volatile operand is found or if the expression contains
b58b21d5 1092 something we don't want to insert in the table. HASH_TABLE_SIZE is
0516f6fe 1093 the current size of the hash table to be probed. */
7506f491
DE
1094
1095static unsigned int
ed7a4b4b 1096hash_expr (const_rtx x, enum machine_mode mode, int *do_not_record_p,
b58b21d5 1097 int hash_table_size)
7506f491
DE
1098{
1099 unsigned int hash;
1100
1101 *do_not_record_p = 0;
1102
43c8a043 1103 hash = hash_rtx (x, mode, do_not_record_p, NULL, /*have_reg_qty=*/false);
7506f491
DE
1104 return hash % hash_table_size;
1105}
172890a2 1106
0516f6fe 1107/* Return nonzero if exp1 is equivalent to exp2. */
7506f491
DE
1108
1109static int
ed7a4b4b 1110expr_equiv_p (const_rtx x, const_rtx y)
7506f491 1111{
0516f6fe 1112 return exp_equiv_p (x, y, 0, true);
7506f491
DE
1113}
1114
02280659 1115/* Insert expression X in INSN in the hash TABLE.
7506f491
DE
1116 If it is already present, record it as the last occurrence in INSN's
1117 basic block.
1118
1119 MODE is the mode of the value X is being stored into.
1120 It is only used if X is a CONST_INT.
1121
cc2902df 1122 ANTIC_P is nonzero if X is an anticipatable expression.
20160347
MK
1123 AVAIL_P is nonzero if X is an available expression.
1124
1125 MAX_DISTANCE is the maximum distance in instructions this expression can
1126 be moved. */
7506f491
DE
1127
1128static void
1d088dee 1129insert_expr_in_table (rtx x, enum machine_mode mode, rtx insn, int antic_p,
20160347 1130 int avail_p, int max_distance, struct hash_table_d *table)
7506f491
DE
1131{
1132 int found, do_not_record_p;
1133 unsigned int hash;
1134 struct expr *cur_expr, *last_expr = NULL;
1135 struct occr *antic_occr, *avail_occr;
7506f491 1136
02280659 1137 hash = hash_expr (x, mode, &do_not_record_p, table->size);
7506f491
DE
1138
1139 /* Do not insert expression in table if it contains volatile operands,
1140 or if hash_expr determines the expression is something we don't want
1141 to or can't handle. */
1142 if (do_not_record_p)
1143 return;
1144
02280659 1145 cur_expr = table->table[hash];
7506f491
DE
1146 found = 0;
1147
c4c81601 1148 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
7506f491
DE
1149 {
1150 /* If the expression isn't found, save a pointer to the end of
1151 the list. */
1152 last_expr = cur_expr;
1153 cur_expr = cur_expr->next_same_hash;
1154 }
1155
1156 if (! found)
1157 {
1b4572a8 1158 cur_expr = GOBNEW (struct expr);
7506f491 1159 bytes_used += sizeof (struct expr);
02280659 1160 if (table->table[hash] == NULL)
c4c81601 1161 /* This is the first pattern that hashed to this index. */
02280659 1162 table->table[hash] = cur_expr;
7506f491 1163 else
c4c81601
RK
1164 /* Add EXPR to end of this hash chain. */
1165 last_expr->next_same_hash = cur_expr;
1166
589005ff 1167 /* Set the fields of the expr element. */
7506f491 1168 cur_expr->expr = x;
02280659 1169 cur_expr->bitmap_index = table->n_elems++;
7506f491
DE
1170 cur_expr->next_same_hash = NULL;
1171 cur_expr->antic_occr = NULL;
1172 cur_expr->avail_occr = NULL;
20160347
MK
1173 gcc_assert (max_distance >= 0);
1174 cur_expr->max_distance = max_distance;
7506f491 1175 }
20160347
MK
1176 else
1177 gcc_assert (cur_expr->max_distance == max_distance);
7506f491
DE
1178
1179 /* Now record the occurrence(s). */
7506f491
DE
1180 if (antic_p)
1181 {
1182 antic_occr = cur_expr->antic_occr;
1183
b0de17ef
SB
1184 if (antic_occr
1185 && BLOCK_FOR_INSN (antic_occr->insn) != BLOCK_FOR_INSN (insn))
b6e47ceb 1186 antic_occr = NULL;
7506f491
DE
1187
1188 if (antic_occr)
c4c81601
RK
1189 /* Found another instance of the expression in the same basic block.
1190 Prefer the currently recorded one. We want the first one in the
1191 block and the block is scanned from start to end. */
1192 ; /* nothing to do */
7506f491
DE
1193 else
1194 {
1195 /* First occurrence of this expression in this basic block. */
1b4572a8 1196 antic_occr = GOBNEW (struct occr);
7506f491 1197 bytes_used += sizeof (struct occr);
7506f491 1198 antic_occr->insn = insn;
b6e47ceb 1199 antic_occr->next = cur_expr->antic_occr;
f9957958 1200 antic_occr->deleted_p = 0;
b6e47ceb 1201 cur_expr->antic_occr = antic_occr;
7506f491
DE
1202 }
1203 }
1204
1205 if (avail_p)
1206 {
1207 avail_occr = cur_expr->avail_occr;
1208
b0de17ef
SB
1209 if (avail_occr
1210 && BLOCK_FOR_INSN (avail_occr->insn) == BLOCK_FOR_INSN (insn))
7506f491 1211 {
b6e47ceb
JL
1212 /* Found another instance of the expression in the same basic block.
1213 Prefer this occurrence to the currently recorded one. We want
1214 the last one in the block and the block is scanned from start
1215 to end. */
1216 avail_occr->insn = insn;
7506f491 1217 }
7506f491
DE
1218 else
1219 {
1220 /* First occurrence of this expression in this basic block. */
1b4572a8 1221 avail_occr = GOBNEW (struct occr);
7506f491 1222 bytes_used += sizeof (struct occr);
7506f491 1223 avail_occr->insn = insn;
b6e47ceb 1224 avail_occr->next = cur_expr->avail_occr;
f9957958 1225 avail_occr->deleted_p = 0;
b6e47ceb 1226 cur_expr->avail_occr = avail_occr;
7506f491
DE
1227 }
1228 }
1229}
1230
43c8a043 1231/* Scan SET present in INSN and add an entry to the hash TABLE. */
7506f491
DE
1232
1233static void
43c8a043 1234hash_scan_set (rtx set, rtx insn, struct hash_table_d *table)
7506f491 1235{
43c8a043
EB
1236 rtx src = SET_SRC (set);
1237 rtx dest = SET_DEST (set);
172890a2 1238 rtx note;
7506f491 1239
6e72d1e9 1240 if (GET_CODE (src) == CALL)
02280659 1241 hash_scan_call (src, insn, table);
7506f491 1242
7b1b4aed 1243 else if (REG_P (dest))
7506f491 1244 {
172890a2 1245 unsigned int regno = REGNO (dest);
20160347 1246 int max_distance = 0;
7506f491 1247
29470771
SB
1248 /* See if a REG_EQUAL note shows this equivalent to a simpler expression.
1249
90631280
PB
1250 This allows us to do a single GCSE pass and still eliminate
1251 redundant constants, addresses or other expressions that are
29470771
SB
1252 constructed with multiple instructions.
1253
e45425ec 1254 However, keep the original SRC if INSN is a simple reg-reg move.
29470771
SB
1255 In this case, there will almost always be a REG_EQUAL note on the
1256 insn that sets SRC. By recording the REG_EQUAL value here as SRC
1257 for INSN, we miss copy propagation opportunities and we perform the
1258 same PRE GCSE operation repeatedly on the same REG_EQUAL value if we
1259 do more than one PRE GCSE pass.
1260
fa10beec 1261 Note that this does not impede profitable constant propagations. We
29470771 1262 "look through" reg-reg sets in lookup_avail_set. */
90631280
PB
1263 note = find_reg_equal_equiv_note (insn);
1264 if (note != 0
29470771
SB
1265 && REG_NOTE_KIND (note) == REG_EQUAL
1266 && !REG_P (src)
e45425ec 1267 && want_to_gcse_p (XEXP (note, 0), NULL))
43c8a043 1268 src = XEXP (note, 0), set = gen_rtx_SET (VOIDmode, dest, src);
172890a2 1269
7506f491 1270 /* Only record sets of pseudo-regs in the hash table. */
e45425ec 1271 if (regno >= FIRST_PSEUDO_REGISTER
7506f491 1272 /* Don't GCSE something if we can't do a reg/reg copy. */
773eae39 1273 && can_copy_p (GET_MODE (dest))
068473ec 1274 /* GCSE commonly inserts instruction after the insn. We can't
1d65f45c
RH
1275 do that easily for EH edges so disable GCSE on these for now. */
1276 /* ??? We can now easily create new EH landing pads at the
1277 gimple level, for splitting edges; there's no reason we
1278 can't do the same thing at the rtl level. */
1279 && !can_throw_internal (insn)
7506f491 1280 /* Is SET_SRC something we want to gcse? */
20160347 1281 && want_to_gcse_p (src, &max_distance)
172890a2 1282 /* Don't CSE a nop. */
43c8a043 1283 && ! set_noop_p (set)
43e72072
JJ
1284 /* Don't GCSE if it has attached REG_EQUIV note.
1285 At this point this only function parameters should have
1286 REG_EQUIV notes and if the argument slot is used somewhere
a1f300c0 1287 explicitly, it means address of parameter has been taken,
43e72072 1288 so we should not extend the lifetime of the pseudo. */
90631280 1289 && (note == NULL_RTX || ! MEM_P (XEXP (note, 0))))
7506f491
DE
1290 {
1291 /* An expression is not anticipatable if its operands are
52d76e11 1292 modified before this insn or if this is not the only SET in
6fb5fa3c
DB
1293 this insn. The latter condition does not have to mean that
1294 SRC itself is not anticipatable, but we just will not be
1295 able to handle code motion of insns with multiple sets. */
1296 int antic_p = oprs_anticipatable_p (src, insn)
1297 && !multiple_sets (insn);
7506f491 1298 /* An expression is not available if its operands are
eb296bd9
GK
1299 subsequently modified, including this insn. It's also not
1300 available if this is a branch, because we can't insert
1301 a set after the branch. */
1302 int avail_p = (oprs_available_p (src, insn)
1303 && ! JUMP_P (insn));
c4c81601 1304
20160347
MK
1305 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p,
1306 max_distance, table);
7506f491 1307 }
7506f491 1308 }
d91edf86 1309 /* In case of store we want to consider the memory value as available in
f5f2e3cd
MH
1310 the REG stored in that memory. This makes it possible to remove
1311 redundant loads from due to stores to the same location. */
7b1b4aed 1312 else if (flag_gcse_las && REG_P (src) && MEM_P (dest))
f5f2e3cd
MH
1313 {
1314 unsigned int regno = REGNO (src);
fb039b24 1315 int max_distance = 0;
f5f2e3cd 1316
e45425ec
SB
1317 /* Only record sets of pseudo-regs in the hash table. */
1318 if (regno >= FIRST_PSEUDO_REGISTER
f5f2e3cd
MH
1319 /* Don't GCSE something if we can't do a reg/reg copy. */
1320 && can_copy_p (GET_MODE (src))
1321 /* GCSE commonly inserts instruction after the insn. We can't
1d65f45c
RH
1322 do that easily for EH edges so disable GCSE on these for now. */
1323 && !can_throw_internal (insn)
f5f2e3cd 1324 /* Is SET_DEST something we want to gcse? */
fb039b24 1325 && want_to_gcse_p (dest, &max_distance)
f5f2e3cd 1326 /* Don't CSE a nop. */
43c8a043 1327 && ! set_noop_p (set)
f5f2e3cd
MH
1328 /* Don't GCSE if it has attached REG_EQUIV note.
1329 At this point this only function parameters should have
1330 REG_EQUIV notes and if the argument slot is used somewhere
1331 explicitly, it means address of parameter has been taken,
1332 so we should not extend the lifetime of the pseudo. */
1333 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
7b1b4aed 1334 || ! MEM_P (XEXP (note, 0))))
f5f2e3cd
MH
1335 {
1336 /* Stores are never anticipatable. */
1337 int antic_p = 0;
1338 /* An expression is not available if its operands are
1339 subsequently modified, including this insn. It's also not
1340 available if this is a branch, because we can't insert
1341 a set after the branch. */
1342 int avail_p = oprs_available_p (dest, insn)
1343 && ! JUMP_P (insn);
1344
1345 /* Record the memory expression (DEST) in the hash table. */
4bcaf354 1346 insert_expr_in_table (dest, GET_MODE (dest), insn,
fb039b24 1347 antic_p, avail_p, max_distance, table);
f5f2e3cd
MH
1348 }
1349 }
7506f491
DE
1350}
1351
1352static void
1d088dee 1353hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
7e5487a2 1354 struct hash_table_d *table ATTRIBUTE_UNUSED)
7506f491
DE
1355{
1356 /* Currently nothing to do. */
1357}
1358
1359static void
1d088dee 1360hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
7e5487a2 1361 struct hash_table_d *table ATTRIBUTE_UNUSED)
7506f491
DE
1362{
1363 /* Currently nothing to do. */
1364}
1365
43c8a043 1366/* Process INSN and add hash table entries as appropriate. */
7506f491
DE
1367
1368static void
7e5487a2 1369hash_scan_insn (rtx insn, struct hash_table_d *table)
7506f491
DE
1370{
1371 rtx pat = PATTERN (insn);
c4c81601 1372 int i;
7506f491
DE
1373
1374 /* Pick out the sets of INSN and for other forms of instructions record
1375 what's been modified. */
1376
172890a2 1377 if (GET_CODE (pat) == SET)
02280659 1378 hash_scan_set (pat, insn, table);
43c8a043
EB
1379
1380 else if (GET_CODE (pat) == CLOBBER)
1381 hash_scan_clobber (pat, insn, table);
1382
1383 else if (GET_CODE (pat) == CALL)
1384 hash_scan_call (pat, insn, table);
1385
7506f491 1386 else if (GET_CODE (pat) == PARALLEL)
c4c81601
RK
1387 for (i = 0; i < XVECLEN (pat, 0); i++)
1388 {
1389 rtx x = XVECEXP (pat, 0, i);
7506f491 1390
c4c81601 1391 if (GET_CODE (x) == SET)
02280659 1392 hash_scan_set (x, insn, table);
c4c81601 1393 else if (GET_CODE (x) == CLOBBER)
02280659 1394 hash_scan_clobber (x, insn, table);
6e72d1e9 1395 else if (GET_CODE (x) == CALL)
02280659 1396 hash_scan_call (x, insn, table);
c4c81601 1397 }
7506f491
DE
1398}
1399
43c8a043
EB
1400/* Dump the hash table TABLE to file FILE under the name NAME. */
1401
7506f491 1402static void
7e5487a2 1403dump_hash_table (FILE *file, const char *name, struct hash_table_d *table)
7506f491
DE
1404{
1405 int i;
1406 /* Flattened out table, so it's printed in proper order. */
4da896b2
MM
1407 struct expr **flat_table;
1408 unsigned int *hash_val;
c4c81601 1409 struct expr *expr;
4da896b2 1410
1b4572a8
KG
1411 flat_table = XCNEWVEC (struct expr *, table->n_elems);
1412 hash_val = XNEWVEC (unsigned int, table->n_elems);
7506f491 1413
02280659
ZD
1414 for (i = 0; i < (int) table->size; i++)
1415 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601
RK
1416 {
1417 flat_table[expr->bitmap_index] = expr;
1418 hash_val[expr->bitmap_index] = i;
1419 }
7506f491
DE
1420
1421 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
02280659 1422 name, table->size, table->n_elems);
7506f491 1423
02280659 1424 for (i = 0; i < (int) table->n_elems; i++)
21318741
RK
1425 if (flat_table[i] != 0)
1426 {
a0ac9e5a 1427 expr = flat_table[i];
20160347
MK
1428 fprintf (file, "Index %d (hash value %d; max distance %d)\n ",
1429 expr->bitmap_index, hash_val[i], expr->max_distance);
a0ac9e5a 1430 print_rtl (file, expr->expr);
21318741
RK
1431 fprintf (file, "\n");
1432 }
7506f491
DE
1433
1434 fprintf (file, "\n");
4da896b2 1435
4da896b2
MM
1436 free (flat_table);
1437 free (hash_val);
7506f491
DE
1438}
1439
1440/* Record register first/last/block set information for REGNO in INSN.
c4c81601 1441
80c29cc4 1442 first_set records the first place in the block where the register
7506f491 1443 is set and is used to compute "anticipatability".
c4c81601 1444
80c29cc4 1445 last_set records the last place in the block where the register
7506f491 1446 is set and is used to compute "availability".
c4c81601 1447
80c29cc4 1448 last_bb records the block for which first_set and last_set are
4a81774c 1449 valid, as a quick test to invalidate them. */
7506f491
DE
1450
1451static void
1d088dee 1452record_last_reg_set_info (rtx insn, int regno)
7506f491 1453{
80c29cc4 1454 struct reg_avail_info *info = &reg_avail_info[regno];
4a81774c 1455 int luid = DF_INSN_LUID (insn);
c4c81601 1456
4a81774c 1457 info->last_set = luid;
80c29cc4
RZ
1458 if (info->last_bb != current_bb)
1459 {
1460 info->last_bb = current_bb;
4a81774c 1461 info->first_set = luid;
80c29cc4 1462 }
7506f491
DE
1463}
1464
a13d4ebf
AM
1465/* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
1466 Note we store a pair of elements in the list, so they have to be
1467 taken off pairwise. */
1468
589005ff 1469static void
43c8a043 1470canon_list_insert (rtx dest ATTRIBUTE_UNUSED, const_rtx x ATTRIBUTE_UNUSED,
1d088dee 1471 void * v_insn)
a13d4ebf
AM
1472{
1473 rtx dest_addr, insn;
0fe854a7 1474 int bb;
f32682ca 1475 modify_pair pair;
a13d4ebf
AM
1476
1477 while (GET_CODE (dest) == SUBREG
1478 || GET_CODE (dest) == ZERO_EXTRACT
a13d4ebf
AM
1479 || GET_CODE (dest) == STRICT_LOW_PART)
1480 dest = XEXP (dest, 0);
1481
1482 /* If DEST is not a MEM, then it will not conflict with a load. Note
1483 that function calls are assumed to clobber memory, but are handled
1484 elsewhere. */
1485
7b1b4aed 1486 if (! MEM_P (dest))
a13d4ebf
AM
1487 return;
1488
1489 dest_addr = get_addr (XEXP (dest, 0));
1490 dest_addr = canon_rtx (dest_addr);
589005ff 1491 insn = (rtx) v_insn;
b0de17ef 1492 bb = BLOCK_FOR_INSN (insn)->index;
a13d4ebf 1493
f32682ca
DN
1494 pair.dest = dest;
1495 pair.dest_addr = dest_addr;
9771b263 1496 canon_modify_mem_list[bb].safe_push (pair);
a13d4ebf
AM
1497}
1498
a13d4ebf
AM
1499/* Record memory modification information for INSN. We do not actually care
1500 about the memory location(s) that are set, or even how they are set (consider
1501 a CALL_INSN). We merely need to record which insns modify memory. */
7506f491
DE
1502
1503static void
1d088dee 1504record_last_mem_set_info (rtx insn)
7506f491 1505{
85c0f02d
SB
1506 int bb;
1507
1508 if (! flag_gcse_lm)
1509 return;
0fe854a7 1510
ccef9ef5 1511 /* load_killed_in_block_p will handle the case of calls clobbering
dc297297 1512 everything. */
85c0f02d 1513 bb = BLOCK_FOR_INSN (insn)->index;
9771b263 1514 modify_mem_list[bb].safe_push (insn);
0fe854a7 1515 bitmap_set_bit (modify_mem_list_set, bb);
a13d4ebf 1516
7b1b4aed 1517 if (CALL_P (insn))
6ce1edcf 1518 bitmap_set_bit (blocks_with_calls, bb);
a13d4ebf 1519 else
0fe854a7 1520 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
7506f491
DE
1521}
1522
7506f491 1523/* Called from compute_hash_table via note_stores to handle one
84832317
MM
1524 SET or CLOBBER in an insn. DATA is really the instruction in which
1525 the SET is taking place. */
7506f491
DE
1526
1527static void
7bc980e1 1528record_last_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data)
7506f491 1529{
84832317
MM
1530 rtx last_set_insn = (rtx) data;
1531
7506f491
DE
1532 if (GET_CODE (dest) == SUBREG)
1533 dest = SUBREG_REG (dest);
1534
7b1b4aed 1535 if (REG_P (dest))
7506f491 1536 record_last_reg_set_info (last_set_insn, REGNO (dest));
7b1b4aed 1537 else if (MEM_P (dest)
7506f491
DE
1538 /* Ignore pushes, they clobber nothing. */
1539 && ! push_operand (dest, GET_MODE (dest)))
1540 record_last_mem_set_info (last_set_insn);
1541}
1542
e45425ec 1543/* Top level function to create an expression hash table.
7506f491
DE
1544
1545 Expression entries are placed in the hash table if
1546 - they are of the form (set (pseudo-reg) src),
1547 - src is something we want to perform GCSE on,
1548 - none of the operands are subsequently modified in the block
1549
7506f491
DE
1550 Currently src must be a pseudo-reg or a const_int.
1551
02280659 1552 TABLE is the table computed. */
7506f491
DE
1553
1554static void
7e5487a2 1555compute_hash_table_work (struct hash_table_d *table)
7506f491 1556{
5f39ad47 1557 int i;
7506f491 1558
a13d4ebf 1559 /* re-Cache any INSN_LIST nodes we have allocated. */
73991d6a 1560 clear_modify_mem_tables ();
7506f491 1561 /* Some working arrays used to track first and last set in each block. */
5f39ad47 1562 reg_avail_info = GNEWVEC (struct reg_avail_info, max_reg_num ());
80c29cc4 1563
5f39ad47 1564 for (i = 0; i < max_reg_num (); ++i)
e0082a72 1565 reg_avail_info[i].last_bb = NULL;
7506f491 1566
11cd3bed 1567 FOR_EACH_BB_FN (current_bb, cfun)
7506f491
DE
1568 {
1569 rtx insn;
770ae6cc 1570 unsigned int regno;
7506f491
DE
1571
1572 /* First pass over the instructions records information used to
4a81774c 1573 determine when registers and memory are first and last set. */
eb232f4e 1574 FOR_BB_INSNS (current_bb, insn)
7506f491 1575 {
a344c9f1 1576 if (!NONDEBUG_INSN_P (insn))
7506f491
DE
1577 continue;
1578
7b1b4aed 1579 if (CALL_P (insn))
7506f491 1580 {
c7fb4c7a
SB
1581 hard_reg_set_iterator hrsi;
1582 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call,
1583 0, regno, hrsi)
1584 record_last_reg_set_info (insn, regno);
c4c81601 1585
e45425ec
SB
1586 if (! RTL_CONST_OR_PURE_CALL_P (insn))
1587 record_last_mem_set_info (insn);
7506f491
DE
1588 }
1589
84832317 1590 note_stores (PATTERN (insn), record_last_set_info, insn);
7506f491
DE
1591 }
1592
1593 /* The next pass builds the hash table. */
eb232f4e 1594 FOR_BB_INSNS (current_bb, insn)
a344c9f1 1595 if (NONDEBUG_INSN_P (insn))
4a8cae83 1596 hash_scan_insn (insn, table);
7506f491
DE
1597 }
1598
80c29cc4
RZ
1599 free (reg_avail_info);
1600 reg_avail_info = NULL;
7506f491
DE
1601}
1602
02280659 1603/* Allocate space for the set/expr hash TABLE.
e45425ec 1604 It is used to determine the number of buckets to use. */
7506f491
DE
1605
1606static void
e45425ec 1607alloc_hash_table (struct hash_table_d *table)
7506f491
DE
1608{
1609 int n;
1610
b5b8b0ac
AO
1611 n = get_max_insn_count ();
1612
1613 table->size = n / 4;
02280659
ZD
1614 if (table->size < 11)
1615 table->size = 11;
c4c81601 1616
7506f491
DE
1617 /* Attempt to maintain efficient use of hash table.
1618 Making it an odd number is simplest for now.
1619 ??? Later take some measurements. */
02280659
ZD
1620 table->size |= 1;
1621 n = table->size * sizeof (struct expr *);
1b4572a8 1622 table->table = GNEWVAR (struct expr *, n);
7506f491
DE
1623}
1624
02280659 1625/* Free things allocated by alloc_hash_table. */
7506f491
DE
1626
1627static void
7e5487a2 1628free_hash_table (struct hash_table_d *table)
7506f491 1629{
02280659 1630 free (table->table);
7506f491
DE
1631}
1632
e45425ec 1633/* Compute the expression hash table TABLE. */
7506f491
DE
1634
1635static void
7e5487a2 1636compute_hash_table (struct hash_table_d *table)
7506f491
DE
1637{
1638 /* Initialize count of number of entries in hash table. */
02280659 1639 table->n_elems = 0;
703ad42b 1640 memset (table->table, 0, table->size * sizeof (struct expr *));
7506f491 1641
02280659 1642 compute_hash_table_work (table);
7506f491
DE
1643}
1644\f
1645/* Expression tracking support. */
1646
e45425ec
SB
1647/* Clear canon_modify_mem_list and modify_mem_list tables. */
1648static void
1649clear_modify_mem_tables (void)
0e3f0221 1650{
e45425ec
SB
1651 unsigned i;
1652 bitmap_iterator bi;
0e3f0221 1653
e45425ec 1654 EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set, 0, i, bi)
0e3f0221 1655 {
9771b263
DN
1656 modify_mem_list[i].release ();
1657 canon_modify_mem_list[i].release ();
0e3f0221 1658 }
e45425ec
SB
1659 bitmap_clear (modify_mem_list_set);
1660 bitmap_clear (blocks_with_calls);
0e3f0221
RS
1661}
1662
e45425ec 1663/* Release memory used by modify_mem_list_set. */
0e3f0221 1664
e45425ec
SB
1665static void
1666free_modify_mem_tables (void)
e129b3f9 1667{
e45425ec
SB
1668 clear_modify_mem_tables ();
1669 free (modify_mem_list);
1670 free (canon_modify_mem_list);
1671 modify_mem_list = 0;
1672 canon_modify_mem_list = 0;
e129b3f9 1673}
e45425ec
SB
1674\f
1675/* For each block, compute whether X is transparent. X is either an
1676 expression or an assignment [though we don't care which, for this context
1677 an assignment is treated as an expression]. For each block where an
1678 element of X is modified, reset the INDX bit in BMAP. */
0e3f0221 1679
e45425ec
SB
1680static void
1681compute_transp (const_rtx x, int indx, sbitmap *bmap)
0e3f0221 1682{
e45425ec
SB
1683 int i, j;
1684 enum rtx_code code;
1685 const char *fmt;
0e3f0221 1686
e45425ec
SB
1687 /* repeat is used to turn tail-recursion into iteration since GCC
1688 can't do it when there's no return value. */
1689 repeat:
0e3f0221 1690
e45425ec
SB
1691 if (x == 0)
1692 return;
72b8d451 1693
e45425ec
SB
1694 code = GET_CODE (x);
1695 switch (code)
0e3f0221 1696 {
e45425ec 1697 case REG:
628f6a4e 1698 {
e45425ec
SB
1699 df_ref def;
1700 for (def = DF_REG_DEF_CHAIN (REGNO (x));
1701 def;
1702 def = DF_REF_NEXT_REG (def))
d7c028c0 1703 bitmap_clear_bit (bmap[DF_REF_BB (def)->index], indx);
628f6a4e 1704 }
7821bfc7 1705
e45425ec 1706 return;
72b8d451 1707
e45425ec
SB
1708 case MEM:
1709 if (! MEM_READONLY_P (x))
0e3f0221 1710 {
e45425ec
SB
1711 bitmap_iterator bi;
1712 unsigned bb_index;
92390dd1
PB
1713 rtx x_addr;
1714
1715 x_addr = get_addr (XEXP (x, 0));
1716 x_addr = canon_rtx (x_addr);
e129b3f9 1717
e45425ec
SB
1718 /* First handle all the blocks with calls. We don't need to
1719 do any list walking for them. */
1720 EXECUTE_IF_SET_IN_BITMAP (blocks_with_calls, 0, bb_index, bi)
1721 {
d7c028c0 1722 bitmap_clear_bit (bmap[bb_index], indx);
e45425ec 1723 }
0e3f0221 1724
92390dd1
PB
1725 /* Now iterate over the blocks which have memory modifications
1726 but which do not have any calls. */
1727 EXECUTE_IF_AND_COMPL_IN_BITMAP (modify_mem_list_set,
1728 blocks_with_calls,
1729 0, bb_index, bi)
1730 {
1731 vec<modify_pair> list
1732 = canon_modify_mem_list[bb_index];
1733 modify_pair *pair;
1734 unsigned ix;
1735
1736 FOR_EACH_VEC_ELT_REVERSE (list, ix, pair)
1737 {
1738 rtx dest = pair->dest;
1739 rtx dest_addr = pair->dest_addr;
1740
1741 if (canon_true_dependence (dest, GET_MODE (dest),
1742 dest_addr, x, x_addr))
ad0188be
RB
1743 {
1744 bitmap_clear_bit (bmap[bb_index], indx);
1745 break;
1746 }
92390dd1
PB
1747 }
1748 }
0e3f0221 1749 }
0e3f0221 1750
e45425ec
SB
1751 x = XEXP (x, 0);
1752 goto repeat;
0e3f0221 1753
e45425ec
SB
1754 case PC:
1755 case CC0: /*FIXME*/
1756 case CONST:
d8116890 1757 CASE_CONST_ANY:
e45425ec
SB
1758 case SYMBOL_REF:
1759 case LABEL_REF:
1760 case ADDR_VEC:
1761 case ADDR_DIFF_VEC:
1762 return;
0e3f0221 1763
e45425ec
SB
1764 default:
1765 break;
1766 }
7821bfc7 1767
e45425ec 1768 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
0e3f0221 1769 {
e45425ec 1770 if (fmt[i] == 'e')
0e3f0221 1771 {
e45425ec
SB
1772 /* If we are about to do the last recursive call
1773 needed at this level, change it into iteration.
1774 This function is called enough to be worth it. */
1775 if (i == 0)
1776 {
1777 x = XEXP (x, i);
1778 goto repeat;
1779 }
1780
1781 compute_transp (XEXP (x, i), indx, bmap);
0e3f0221 1782 }
e45425ec
SB
1783 else if (fmt[i] == 'E')
1784 for (j = 0; j < XVECLEN (x, i); j++)
1785 compute_transp (XVECEXP (x, i, j), indx, bmap);
0e3f0221 1786 }
0e3f0221
RS
1787}
1788\f
a65f3558 1789/* Compute PRE+LCM working variables. */
7506f491
DE
1790
1791/* Local properties of expressions. */
43c8a043 1792
7506f491 1793/* Nonzero for expressions that are transparent in the block. */
a65f3558 1794static sbitmap *transp;
7506f491 1795
a65f3558
JL
1796/* Nonzero for expressions that are computed (available) in the block. */
1797static sbitmap *comp;
7506f491 1798
a65f3558
JL
1799/* Nonzero for expressions that are locally anticipatable in the block. */
1800static sbitmap *antloc;
7506f491 1801
a65f3558
JL
1802/* Nonzero for expressions where this block is an optimal computation
1803 point. */
1804static sbitmap *pre_optimal;
5c35539b 1805
a65f3558
JL
1806/* Nonzero for expressions which are redundant in a particular block. */
1807static sbitmap *pre_redundant;
7506f491 1808
a42cd965
AM
1809/* Nonzero for expressions which should be inserted on a specific edge. */
1810static sbitmap *pre_insert_map;
1811
1812/* Nonzero for expressions which should be deleted in a specific block. */
1813static sbitmap *pre_delete_map;
1814
a65f3558 1815/* Allocate vars used for PRE analysis. */
7506f491
DE
1816
1817static void
1d088dee 1818alloc_pre_mem (int n_blocks, int n_exprs)
7506f491 1819{
a65f3558
JL
1820 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
1821 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
1822 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5faf03ae 1823
a42cd965
AM
1824 pre_optimal = NULL;
1825 pre_redundant = NULL;
1826 pre_insert_map = NULL;
1827 pre_delete_map = NULL;
a42cd965 1828 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
c4c81601 1829
a42cd965 1830 /* pre_insert and pre_delete are allocated later. */
7506f491
DE
1831}
1832
a65f3558 1833/* Free vars used for PRE analysis. */
7506f491
DE
1834
1835static void
1d088dee 1836free_pre_mem (void)
7506f491 1837{
5a660bff
DB
1838 sbitmap_vector_free (transp);
1839 sbitmap_vector_free (comp);
bd3675fc
JL
1840
1841 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
7506f491 1842
a42cd965 1843 if (pre_optimal)
5a660bff 1844 sbitmap_vector_free (pre_optimal);
a42cd965 1845 if (pre_redundant)
5a660bff 1846 sbitmap_vector_free (pre_redundant);
a42cd965 1847 if (pre_insert_map)
5a660bff 1848 sbitmap_vector_free (pre_insert_map);
a42cd965 1849 if (pre_delete_map)
5a660bff 1850 sbitmap_vector_free (pre_delete_map);
a42cd965 1851
bd3675fc 1852 transp = comp = NULL;
a42cd965 1853 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
7506f491
DE
1854}
1855
9b774782
JL
1856/* Remove certain expressions from anticipatable and transparent
1857 sets of basic blocks that have incoming abnormal edge.
1858 For PRE remove potentially trapping expressions to avoid placing
1859 them on abnormal edges. For hoisting remove memory references that
1860 can be clobbered by calls. */
7506f491
DE
1861
1862static void
9b774782 1863prune_expressions (bool pre_p)
7506f491 1864{
9b774782 1865 sbitmap prune_exprs;
43c8a043 1866 struct expr *expr;
b614171e 1867 unsigned int ui;
9b774782 1868 basic_block bb;
c66e8ae9 1869
9b774782 1870 prune_exprs = sbitmap_alloc (expr_hash_table.n_elems);
f61e445a 1871 bitmap_clear (prune_exprs);
02280659 1872 for (ui = 0; ui < expr_hash_table.size; ui++)
b614171e 1873 {
43c8a043 1874 for (expr = expr_hash_table.table[ui]; expr; expr = expr->next_same_hash)
9b774782
JL
1875 {
1876 /* Note potentially trapping expressions. */
43c8a043 1877 if (may_trap_p (expr->expr))
9b774782 1878 {
d7c028c0 1879 bitmap_set_bit (prune_exprs, expr->bitmap_index);
9b774782
JL
1880 continue;
1881 }
b614171e 1882
43c8a043 1883 if (!pre_p && MEM_P (expr->expr))
9b774782
JL
1884 /* Note memory references that can be clobbered by a call.
1885 We do not split abnormal edges in hoisting, so would
1886 a memory reference get hoisted along an abnormal edge,
1887 it would be placed /before/ the call. Therefore, only
1888 constant memory references can be hoisted along abnormal
1889 edges. */
1890 {
43c8a043
EB
1891 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
1892 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
9b774782 1893 continue;
c66e8ae9 1894
43c8a043
EB
1895 if (MEM_READONLY_P (expr->expr)
1896 && !MEM_VOLATILE_P (expr->expr)
1897 && MEM_NOTRAP_P (expr->expr))
9b774782
JL
1898 /* Constant memory reference, e.g., a PIC address. */
1899 continue;
1900
1901 /* ??? Optimally, we would use interprocedural alias
1902 analysis to determine if this mem is actually killed
1903 by this call. */
1904
d7c028c0 1905 bitmap_set_bit (prune_exprs, expr->bitmap_index);
9b774782
JL
1906 }
1907 }
1908 }
c66e8ae9 1909
11cd3bed 1910 FOR_EACH_BB_FN (bb, cfun)
c66e8ae9 1911 {
b614171e 1912 edge e;
628f6a4e 1913 edge_iterator ei;
b614171e
MM
1914
1915 /* If the current block is the destination of an abnormal edge, we
9b774782
JL
1916 kill all trapping (for PRE) and memory (for hoist) expressions
1917 because we won't be able to properly place the instruction on
1918 the edge. So make them neither anticipatable nor transparent.
1919 This is fairly conservative.
1920
1921 ??? For hoisting it may be necessary to check for set-and-jump
1922 instructions here, not just for abnormal edges. The general problem
1923 is that when an expression cannot not be placed right at the end of
1924 a basic block we should account for any side-effects of a subsequent
1925 jump instructions that could clobber the expression. It would
1926 be best to implement this check along the lines of
b11f0116 1927 should_hoist_expr_to_dom where the target block is already known
9b774782
JL
1928 and, hence, there's no need to conservatively prune expressions on
1929 "intermediate" set-and-jump instructions. */
628f6a4e 1930 FOR_EACH_EDGE (e, ei, bb->preds)
9b774782
JL
1931 if ((e->flags & EDGE_ABNORMAL)
1932 && (pre_p || CALL_P (BB_END (e->src))))
b614171e 1933 {
f61e445a 1934 bitmap_and_compl (antloc[bb->index],
9b774782 1935 antloc[bb->index], prune_exprs);
f61e445a 1936 bitmap_and_compl (transp[bb->index],
9b774782 1937 transp[bb->index], prune_exprs);
b614171e
MM
1938 break;
1939 }
9b774782
JL
1940 }
1941
1942 sbitmap_free (prune_exprs);
1943}
1944
29fa95ed
JL
1945/* It may be necessary to insert a large number of insns on edges to
1946 make the existing occurrences of expressions fully redundant. This
1947 routine examines the set of insertions and deletions and if the ratio
1948 of insertions to deletions is too high for a particular expression, then
1949 the expression is removed from the insertion/deletion sets.
1950
1951 N_ELEMS is the number of elements in the hash table. */
1952
1953static void
1954prune_insertions_deletions (int n_elems)
1955{
1956 sbitmap_iterator sbi;
1957 sbitmap prune_exprs;
1958
1959 /* We always use I to iterate over blocks/edges and J to iterate over
1960 expressions. */
1961 unsigned int i, j;
1962
1963 /* Counts for the number of times an expression needs to be inserted and
1964 number of times an expression can be removed as a result. */
1965 int *insertions = GCNEWVEC (int, n_elems);
1966 int *deletions = GCNEWVEC (int, n_elems);
1967
1968 /* Set of expressions which require too many insertions relative to
1969 the number of deletions achieved. We will prune these out of the
1970 insertion/deletion sets. */
1971 prune_exprs = sbitmap_alloc (n_elems);
f61e445a 1972 bitmap_clear (prune_exprs);
29fa95ed
JL
1973
1974 /* Iterate over the edges counting the number of times each expression
1975 needs to be inserted. */
dc936fb2 1976 for (i = 0; i < (unsigned) n_edges_for_fn (cfun); i++)
29fa95ed 1977 {
d4ac4ce2 1978 EXECUTE_IF_SET_IN_BITMAP (pre_insert_map[i], 0, j, sbi)
29fa95ed
JL
1979 insertions[j]++;
1980 }
1981
1982 /* Similarly for deletions, but those occur in blocks rather than on
1983 edges. */
8b1c6fd7 1984 for (i = 0; i < (unsigned) last_basic_block_for_fn (cfun); i++)
29fa95ed 1985 {
d4ac4ce2 1986 EXECUTE_IF_SET_IN_BITMAP (pre_delete_map[i], 0, j, sbi)
29fa95ed
JL
1987 deletions[j]++;
1988 }
1989
1990 /* Now that we have accurate counts, iterate over the elements in the
1991 hash table and see if any need too many insertions relative to the
1992 number of evaluations that can be removed. If so, mark them in
1993 PRUNE_EXPRS. */
1994 for (j = 0; j < (unsigned) n_elems; j++)
1995 if (deletions[j]
1996 && ((unsigned) insertions[j] / deletions[j]) > MAX_GCSE_INSERTION_RATIO)
d7c028c0 1997 bitmap_set_bit (prune_exprs, j);
29fa95ed
JL
1998
1999 /* Now prune PRE_INSERT_MAP and PRE_DELETE_MAP based on PRUNE_EXPRS. */
d4ac4ce2 2000 EXECUTE_IF_SET_IN_BITMAP (prune_exprs, 0, j, sbi)
29fa95ed 2001 {
dc936fb2 2002 for (i = 0; i < (unsigned) n_edges_for_fn (cfun); i++)
d7c028c0 2003 bitmap_clear_bit (pre_insert_map[i], j);
29fa95ed 2004
8b1c6fd7 2005 for (i = 0; i < (unsigned) last_basic_block_for_fn (cfun); i++)
d7c028c0 2006 bitmap_clear_bit (pre_delete_map[i], j);
29fa95ed
JL
2007 }
2008
2009 sbitmap_free (prune_exprs);
2010 free (insertions);
2011 free (deletions);
2012}
2013
9b774782 2014/* Top level routine to do the dataflow analysis needed by PRE. */
b614171e 2015
43c8a043 2016static struct edge_list *
9b774782
JL
2017compute_pre_data (void)
2018{
43c8a043 2019 struct edge_list *edge_list;
9b774782
JL
2020 basic_block bb;
2021
2022 compute_local_properties (transp, comp, antloc, &expr_hash_table);
2023 prune_expressions (true);
8b1c6fd7 2024 bitmap_vector_clear (ae_kill, last_basic_block_for_fn (cfun));
9b774782
JL
2025
2026 /* Compute ae_kill for each basic block using:
2027
2028 ~(TRANSP | COMP)
2029 */
2030
11cd3bed 2031 FOR_EACH_BB_FN (bb, cfun)
9b774782 2032 {
f61e445a
LC
2033 bitmap_ior (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
2034 bitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
c66e8ae9
JL
2035 }
2036
10d22567 2037 edge_list = pre_edge_lcm (expr_hash_table.n_elems, transp, comp, antloc,
a42cd965 2038 ae_kill, &pre_insert_map, &pre_delete_map);
5a660bff 2039 sbitmap_vector_free (antloc);
bd3675fc 2040 antloc = NULL;
5a660bff 2041 sbitmap_vector_free (ae_kill);
589005ff 2042 ae_kill = NULL;
29fa95ed
JL
2043
2044 prune_insertions_deletions (expr_hash_table.n_elems);
43c8a043
EB
2045
2046 return edge_list;
7506f491
DE
2047}
2048\f
2049/* PRE utilities */
2050
cc2902df 2051/* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
a65f3558 2052 block BB.
7506f491
DE
2053
2054 VISITED is a pointer to a working buffer for tracking which BB's have
2055 been visited. It is NULL for the top-level call.
2056
2057 We treat reaching expressions that go through blocks containing the same
2058 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
2059 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
2060 2 as not reaching. The intent is to improve the probability of finding
2061 only one reaching expression and to reduce register lifetimes by picking
2062 the closest such expression. */
2063
2064static int
43c8a043
EB
2065pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr,
2066 basic_block bb, char *visited)
7506f491 2067{
36349f8b 2068 edge pred;
628f6a4e 2069 edge_iterator ei;
b8698a0f 2070
628f6a4e 2071 FOR_EACH_EDGE (pred, ei, bb->preds)
7506f491 2072 {
e2d2ed72 2073 basic_block pred_bb = pred->src;
7506f491 2074
fefa31b5 2075 if (pred->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
7506f491 2076 /* Has predecessor has already been visited? */
0b17ab2f 2077 || visited[pred_bb->index])
c4c81601
RK
2078 ;/* Nothing to do. */
2079
7506f491 2080 /* Does this predecessor generate this expression? */
d7c028c0 2081 else if (bitmap_bit_p (comp[pred_bb->index], expr->bitmap_index))
7506f491
DE
2082 {
2083 /* Is this the occurrence we're looking for?
2084 Note that there's only one generating occurrence per block
2085 so we just need to check the block number. */
a65f3558 2086 if (occr_bb == pred_bb)
7506f491 2087 return 1;
c4c81601 2088
0b17ab2f 2089 visited[pred_bb->index] = 1;
7506f491
DE
2090 }
2091 /* Ignore this predecessor if it kills the expression. */
d7c028c0 2092 else if (! bitmap_bit_p (transp[pred_bb->index], expr->bitmap_index))
0b17ab2f 2093 visited[pred_bb->index] = 1;
c4c81601 2094
7506f491
DE
2095 /* Neither gen nor kill. */
2096 else
ac7c5af5 2097 {
0b17ab2f 2098 visited[pred_bb->index] = 1;
89e606c9 2099 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
7506f491 2100 return 1;
ac7c5af5 2101 }
7506f491
DE
2102 }
2103
2104 /* All paths have been checked. */
2105 return 0;
2106}
283a2545
RL
2107
2108/* The wrapper for pre_expr_reaches_here_work that ensures that any
dc297297 2109 memory allocated for that function is returned. */
283a2545
RL
2110
2111static int
1d088dee 2112pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
283a2545
RL
2113{
2114 int rval;
8b1c6fd7 2115 char *visited = XCNEWVEC (char, last_basic_block_for_fn (cfun));
283a2545 2116
8e42ace1 2117 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
283a2545
RL
2118
2119 free (visited);
c4c81601 2120 return rval;
283a2545 2121}
7506f491 2122\f
43c8a043 2123/* Generate RTL to copy an EXPR to its `reaching_reg' and return it. */
a42cd965
AM
2124
2125static rtx
1d088dee 2126process_insert_insn (struct expr *expr)
a42cd965
AM
2127{
2128 rtx reg = expr->reaching_reg;
43c8a043 2129 /* Copy the expression to make sure we don't have any sharing issues. */
fb0c0a12
RK
2130 rtx exp = copy_rtx (expr->expr);
2131 rtx pat;
a42cd965
AM
2132
2133 start_sequence ();
fb0c0a12
RK
2134
2135 /* If the expression is something that's an operand, like a constant,
2136 just copy it to a register. */
2137 if (general_operand (exp, GET_MODE (reg)))
2138 emit_move_insn (reg, exp);
2139
2140 /* Otherwise, make a new insn to compute this expression and make sure the
43c8a043 2141 insn will be recognized (this also adds any needed CLOBBERs). */
282899df
NS
2142 else
2143 {
2144 rtx insn = emit_insn (gen_rtx_SET (VOIDmode, reg, exp));
2145
57ac4c34 2146 if (insn_invalid_p (insn, false))
2f021b67 2147 gcc_unreachable ();
282899df 2148 }
b8698a0f 2149
2f937369 2150 pat = get_insns ();
a42cd965
AM
2151 end_sequence ();
2152
2153 return pat;
2154}
589005ff 2155
a65f3558
JL
2156/* Add EXPR to the end of basic block BB.
2157
eae7938e 2158 This is used by both the PRE and code hoisting. */
7506f491
DE
2159
2160static void
eae7938e 2161insert_insn_end_basic_block (struct expr *expr, basic_block bb)
7506f491 2162{
a813c111 2163 rtx insn = BB_END (bb);
7506f491
DE
2164 rtx new_insn;
2165 rtx reg = expr->reaching_reg;
2166 int regno = REGNO (reg);
2f937369 2167 rtx pat, pat_end;
7506f491 2168
a42cd965 2169 pat = process_insert_insn (expr);
282899df 2170 gcc_assert (pat && INSN_P (pat));
2f937369
DM
2171
2172 pat_end = pat;
2173 while (NEXT_INSN (pat_end) != NULL_RTX)
2174 pat_end = NEXT_INSN (pat_end);
7506f491
DE
2175
2176 /* If the last insn is a jump, insert EXPR in front [taking care to
4d6922ee 2177 handle cc0, etc. properly]. Similarly we need to care trapping
068473ec 2178 instructions in presence of non-call exceptions. */
7506f491 2179
7b1b4aed 2180 if (JUMP_P (insn)
4b4bf941 2181 || (NONJUMP_INSN_P (insn)
c5cbcccf
ZD
2182 && (!single_succ_p (bb)
2183 || single_succ_edge (bb)->flags & EDGE_ABNORMAL)))
7506f491 2184 {
7506f491
DE
2185#ifdef HAVE_cc0
2186 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
2187 if cc0 isn't set. */
39718607 2188 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
7506f491
DE
2189 if (note)
2190 insn = XEXP (note, 0);
2191 else
2192 {
2193 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
2194 if (maybe_cc0_setter
2c3c49de 2195 && INSN_P (maybe_cc0_setter)
7506f491
DE
2196 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
2197 insn = maybe_cc0_setter;
2198 }
2199#endif
2200 /* FIXME: What if something in cc0/jump uses value set in new insn? */
6fb5fa3c 2201 new_insn = emit_insn_before_noloc (pat, insn, bb);
3947e2f9 2202 }
c4c81601 2203
3947e2f9
RH
2204 /* Likewise if the last insn is a call, as will happen in the presence
2205 of exception handling. */
7b1b4aed 2206 else if (CALL_P (insn)
c5cbcccf
ZD
2207 && (!single_succ_p (bb)
2208 || single_succ_edge (bb)->flags & EDGE_ABNORMAL))
3947e2f9 2209 {
42db504c
SB
2210 /* Keeping in mind targets with small register classes and parameters
2211 in registers, we search backward and place the instructions before
2212 the first parameter is loaded. Do this for everyone for consistency
eae7938e 2213 and a presumption that we'll get better code elsewhere as well. */
3947e2f9
RH
2214
2215 /* Since different machines initialize their parameter registers
2216 in different orders, assume nothing. Collect the set of all
2217 parameter registers. */
a813c111 2218 insn = find_first_parameter_load (insn, BB_HEAD (bb));
3947e2f9 2219
b1d26727
JL
2220 /* If we found all the parameter loads, then we want to insert
2221 before the first parameter load.
2222
2223 If we did not find all the parameter loads, then we might have
2224 stopped on the head of the block, which could be a CODE_LABEL.
2225 If we inserted before the CODE_LABEL, then we would be putting
2226 the insn in the wrong basic block. In that case, put the insn
b5229628 2227 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
7b1b4aed 2228 while (LABEL_P (insn)
589ca5cb 2229 || NOTE_INSN_BASIC_BLOCK_P (insn))
b5229628 2230 insn = NEXT_INSN (insn);
c4c81601 2231
6fb5fa3c 2232 new_insn = emit_insn_before_noloc (pat, insn, bb);
7506f491
DE
2233 }
2234 else
6fb5fa3c 2235 new_insn = emit_insn_after_noloc (pat, insn, bb);
7506f491 2236
2f937369 2237 while (1)
a65f3558 2238 {
2f937369 2239 if (INSN_P (pat))
4a81774c 2240 add_label_notes (PATTERN (pat), new_insn);
2f937369
DM
2241 if (pat == pat_end)
2242 break;
2243 pat = NEXT_INSN (pat);
a65f3558 2244 }
3947e2f9 2245
7506f491
DE
2246 gcse_create_count++;
2247
10d22567 2248 if (dump_file)
7506f491 2249 {
10d22567 2250 fprintf (dump_file, "PRE/HOIST: end of bb %d, insn %d, ",
0b17ab2f 2251 bb->index, INSN_UID (new_insn));
10d22567 2252 fprintf (dump_file, "copying expression %d to reg %d\n",
c4c81601 2253 expr->bitmap_index, regno);
7506f491
DE
2254 }
2255}
2256
a42cd965
AM
2257/* Insert partially redundant expressions on edges in the CFG to make
2258 the expressions fully redundant. */
7506f491 2259
a42cd965 2260static int
1d088dee 2261pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
7506f491 2262{
c4c81601 2263 int e, i, j, num_edges, set_size, did_insert = 0;
a65f3558
JL
2264 sbitmap *inserted;
2265
a42cd965
AM
2266 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
2267 if it reaches any of the deleted expressions. */
7506f491 2268
a42cd965
AM
2269 set_size = pre_insert_map[0]->size;
2270 num_edges = NUM_EDGES (edge_list);
02280659 2271 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
f61e445a 2272 bitmap_vector_clear (inserted, num_edges);
7506f491 2273
a42cd965 2274 for (e = 0; e < num_edges; e++)
7506f491
DE
2275 {
2276 int indx;
e2d2ed72 2277 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
a65f3558 2278
a65f3558 2279 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
7506f491 2280 {
a42cd965 2281 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
7506f491 2282
43c8a043
EB
2283 for (j = indx;
2284 insert && j < (int) expr_hash_table.n_elems;
2285 j++, insert >>= 1)
c4c81601
RK
2286 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
2287 {
2288 struct expr *expr = index_map[j];
2289 struct occr *occr;
a65f3558 2290
ff7cc307 2291 /* Now look at each deleted occurrence of this expression. */
c4c81601
RK
2292 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
2293 {
2294 if (! occr->deleted_p)
2295 continue;
2296
3f117656 2297 /* Insert this expression on this edge if it would
ff7cc307 2298 reach the deleted occurrence in BB. */
d7c028c0 2299 if (!bitmap_bit_p (inserted[e], j))
c4c81601
RK
2300 {
2301 rtx insn;
2302 edge eg = INDEX_EDGE (edge_list, e);
2303
2304 /* We can't insert anything on an abnormal and
2305 critical edge, so we insert the insn at the end of
2306 the previous block. There are several alternatives
2307 detailed in Morgans book P277 (sec 10.5) for
2308 handling this situation. This one is easiest for
2309 now. */
2310
b16aa8a5 2311 if (eg->flags & EDGE_ABNORMAL)
eae7938e 2312 insert_insn_end_basic_block (index_map[j], bb);
c4c81601
RK
2313 else
2314 {
2315 insn = process_insert_insn (index_map[j]);
2316 insert_insn_on_edge (insn, eg);
2317 }
2318
10d22567 2319 if (dump_file)
c4c81601 2320 {
5f39ad47 2321 fprintf (dump_file, "PRE: edge (%d,%d), ",
0b17ab2f
RH
2322 bb->index,
2323 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
10d22567 2324 fprintf (dump_file, "copy expression %d\n",
c4c81601
RK
2325 expr->bitmap_index);
2326 }
2327
a13d4ebf 2328 update_ld_motion_stores (expr);
d7c028c0 2329 bitmap_set_bit (inserted[e], j);
c4c81601
RK
2330 did_insert = 1;
2331 gcse_create_count++;
2332 }
2333 }
2334 }
7506f491
DE
2335 }
2336 }
5faf03ae 2337
5a660bff 2338 sbitmap_vector_free (inserted);
a42cd965 2339 return did_insert;
7506f491
DE
2340}
2341
073089a7 2342/* Copy the result of EXPR->EXPR generated by INSN to EXPR->REACHING_REG.
b885908b
MH
2343 Given "old_reg <- expr" (INSN), instead of adding after it
2344 reaching_reg <- old_reg
2345 it's better to do the following:
2346 reaching_reg <- expr
2347 old_reg <- reaching_reg
2348 because this way copy propagation can discover additional PRE
f5f2e3cd
MH
2349 opportunities. But if this fails, we try the old way.
2350 When "expr" is a store, i.e.
2351 given "MEM <- old_reg", instead of adding after it
2352 reaching_reg <- old_reg
2353 it's better to add it before as follows:
2354 reaching_reg <- old_reg
2355 MEM <- reaching_reg. */
7506f491
DE
2356
2357static void
1d088dee 2358pre_insert_copy_insn (struct expr *expr, rtx insn)
7506f491
DE
2359{
2360 rtx reg = expr->reaching_reg;
2361 int regno = REGNO (reg);
2362 int indx = expr->bitmap_index;
073089a7 2363 rtx pat = PATTERN (insn);
64068ca2 2364 rtx set, first_set, new_insn;
b885908b 2365 rtx old_reg;
073089a7 2366 int i;
7506f491 2367
073089a7 2368 /* This block matches the logic in hash_scan_insn. */
282899df 2369 switch (GET_CODE (pat))
073089a7 2370 {
282899df
NS
2371 case SET:
2372 set = pat;
2373 break;
2374
2375 case PARALLEL:
073089a7
RS
2376 /* Search through the parallel looking for the set whose
2377 source was the expression that we're interested in. */
64068ca2 2378 first_set = NULL_RTX;
073089a7
RS
2379 set = NULL_RTX;
2380 for (i = 0; i < XVECLEN (pat, 0); i++)
2381 {
2382 rtx x = XVECEXP (pat, 0, i);
64068ca2 2383 if (GET_CODE (x) == SET)
073089a7 2384 {
64068ca2
RS
2385 /* If the source was a REG_EQUAL or REG_EQUIV note, we
2386 may not find an equivalent expression, but in this
2387 case the PARALLEL will have a single set. */
2388 if (first_set == NULL_RTX)
2389 first_set = x;
2390 if (expr_equiv_p (SET_SRC (x), expr->expr))
2391 {
2392 set = x;
2393 break;
2394 }
073089a7
RS
2395 }
2396 }
64068ca2
RS
2397
2398 gcc_assert (first_set);
2399 if (set == NULL_RTX)
2400 set = first_set;
282899df
NS
2401 break;
2402
2403 default:
2404 gcc_unreachable ();
073089a7 2405 }
c4c81601 2406
7b1b4aed 2407 if (REG_P (SET_DEST (set)))
073089a7 2408 {
f5f2e3cd
MH
2409 old_reg = SET_DEST (set);
2410 /* Check if we can modify the set destination in the original insn. */
2411 if (validate_change (insn, &SET_DEST (set), reg, 0))
2412 {
2413 new_insn = gen_move_insn (old_reg, reg);
2414 new_insn = emit_insn_after (new_insn, insn);
f5f2e3cd
MH
2415 }
2416 else
2417 {
2418 new_insn = gen_move_insn (reg, old_reg);
2419 new_insn = emit_insn_after (new_insn, insn);
f5f2e3cd 2420 }
073089a7 2421 }
f5f2e3cd 2422 else /* This is possible only in case of a store to memory. */
073089a7 2423 {
f5f2e3cd 2424 old_reg = SET_SRC (set);
073089a7 2425 new_insn = gen_move_insn (reg, old_reg);
f5f2e3cd
MH
2426
2427 /* Check if we can modify the set source in the original insn. */
2428 if (validate_change (insn, &SET_SRC (set), reg, 0))
2429 new_insn = emit_insn_before (new_insn, insn);
2430 else
2431 new_insn = emit_insn_after (new_insn, insn);
073089a7 2432 }
7506f491
DE
2433
2434 gcse_create_count++;
2435
10d22567
ZD
2436 if (dump_file)
2437 fprintf (dump_file,
a42cd965 2438 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
b0de17ef 2439 BLOCK_FOR_INSN (insn)->index, INSN_UID (new_insn), indx,
a42cd965 2440 INSN_UID (insn), regno);
7506f491
DE
2441}
2442
2443/* Copy available expressions that reach the redundant expression
2444 to `reaching_reg'. */
2445
2446static void
1d088dee 2447pre_insert_copies (void)
7506f491 2448{
f5f2e3cd 2449 unsigned int i, added_copy;
c4c81601
RK
2450 struct expr *expr;
2451 struct occr *occr;
2452 struct occr *avail;
a65f3558 2453
7506f491
DE
2454 /* For each available expression in the table, copy the result to
2455 `reaching_reg' if the expression reaches a deleted one.
2456
2457 ??? The current algorithm is rather brute force.
2458 Need to do some profiling. */
2459
02280659 2460 for (i = 0; i < expr_hash_table.size; i++)
43c8a043 2461 for (expr = expr_hash_table.table[i]; expr; expr = expr->next_same_hash)
c4c81601
RK
2462 {
2463 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
2464 we don't want to insert a copy here because the expression may not
2465 really be redundant. So only insert an insn if the expression was
2466 deleted. This test also avoids further processing if the
2467 expression wasn't deleted anywhere. */
2468 if (expr->reaching_reg == NULL)
2469 continue;
7b1b4aed 2470
f5f2e3cd 2471 /* Set when we add a copy for that expression. */
7b1b4aed 2472 added_copy = 0;
c4c81601
RK
2473
2474 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
2475 {
2476 if (! occr->deleted_p)
2477 continue;
7506f491 2478
c4c81601
RK
2479 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
2480 {
2481 rtx insn = avail->insn;
7506f491 2482
c4c81601
RK
2483 /* No need to handle this one if handled already. */
2484 if (avail->copied_p)
2485 continue;
7506f491 2486
c4c81601 2487 /* Don't handle this one if it's a redundant one. */
4a81774c 2488 if (INSN_DELETED_P (insn))
c4c81601 2489 continue;
7506f491 2490
c4c81601 2491 /* Or if the expression doesn't reach the deleted one. */
589005ff 2492 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
e2d2ed72
AM
2493 expr,
2494 BLOCK_FOR_INSN (occr->insn)))
c4c81601 2495 continue;
7506f491 2496
f5f2e3cd
MH
2497 added_copy = 1;
2498
c4c81601
RK
2499 /* Copy the result of avail to reaching_reg. */
2500 pre_insert_copy_insn (expr, insn);
2501 avail->copied_p = 1;
2502 }
2503 }
f5f2e3cd 2504
7b1b4aed 2505 if (added_copy)
f5f2e3cd 2506 update_ld_motion_stores (expr);
c4c81601 2507 }
7506f491
DE
2508}
2509
b6808818
JDA
2510struct set_data
2511{
2512 rtx insn;
2513 const_rtx set;
2514 int nsets;
2515};
2516
2517/* Increment number of sets and record set in DATA. */
2518
2519static void
2520record_set_data (rtx dest, const_rtx set, void *data)
2521{
2522 struct set_data *s = (struct set_data *)data;
2523
2524 if (GET_CODE (set) == SET)
2525 {
2526 /* We allow insns having multiple sets, where all but one are
2527 dead as single set insns. In the common case only a single
2528 set is present, so we want to avoid checking for REG_UNUSED
2529 notes unless necessary. */
2530 if (s->nsets == 1
2531 && find_reg_note (s->insn, REG_UNUSED, SET_DEST (s->set))
2532 && !side_effects_p (s->set))
2533 s->nsets = 0;
2534
2535 if (!s->nsets)
2536 {
2537 /* Record this set. */
2538 s->nsets += 1;
2539 s->set = set;
2540 }
2541 else if (!find_reg_note (s->insn, REG_UNUSED, dest)
2542 || side_effects_p (set))
2543 s->nsets += 1;
2544 }
2545}
2546
2547static const_rtx
2548single_set_gcse (rtx insn)
2549{
2550 struct set_data s;
2551 rtx pattern;
2552
2553 gcc_assert (INSN_P (insn));
2554
2555 /* Optimize common case. */
2556 pattern = PATTERN (insn);
2557 if (GET_CODE (pattern) == SET)
2558 return pattern;
2559
2560 s.insn = insn;
2561 s.nsets = 0;
2562 note_stores (pattern, record_set_data, &s);
2563
2564 /* Considered invariant insns have exactly one set. */
2565 gcc_assert (s.nsets == 1);
2566 return s.set;
2567}
2568
10d1bb36
JH
2569/* Emit move from SRC to DEST noting the equivalence with expression computed
2570 in INSN. */
43c8a043 2571
10d1bb36 2572static rtx
43c8a043 2573gcse_emit_move_after (rtx dest, rtx src, rtx insn)
10d1bb36 2574{
60564289 2575 rtx new_rtx;
b6808818
JDA
2576 const_rtx set = single_set_gcse (insn);
2577 rtx set2;
10d1bb36 2578 rtx note;
dca3da7a 2579 rtx eqv = NULL_RTX;
10d1bb36
JH
2580
2581 /* This should never fail since we're creating a reg->reg copy
2582 we've verified to be valid. */
2583
60564289 2584 new_rtx = emit_insn_after (gen_move_insn (dest, src), insn);
285464d0 2585
dca3da7a
SB
2586 /* Note the equivalence for local CSE pass. Take the note from the old
2587 set if there was one. Otherwise record the SET_SRC from the old set
2588 unless DEST is also an operand of the SET_SRC. */
60564289 2589 set2 = single_set (new_rtx);
6bdb8dd6 2590 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
60564289 2591 return new_rtx;
10d1bb36
JH
2592 if ((note = find_reg_equal_equiv_note (insn)))
2593 eqv = XEXP (note, 0);
dca3da7a
SB
2594 else if (! REG_P (dest)
2595 || ! reg_mentioned_p (dest, SET_SRC (set)))
10d1bb36
JH
2596 eqv = SET_SRC (set);
2597
dca3da7a
SB
2598 if (eqv != NULL_RTX)
2599 set_unique_reg_note (new_rtx, REG_EQUAL, copy_insn_1 (eqv));
10d1bb36 2600
60564289 2601 return new_rtx;
10d1bb36
JH
2602}
2603
7506f491 2604/* Delete redundant computations.
7506f491
DE
2605 Deletion is done by changing the insn to copy the `reaching_reg' of
2606 the expression into the result of the SET. It is left to later passes
337405fd 2607 to propagate the copy or eliminate it.
7506f491 2608
43c8a043 2609 Return nonzero if a change is made. */
7506f491
DE
2610
2611static int
1d088dee 2612pre_delete (void)
7506f491 2613{
2e653e39 2614 unsigned int i;
63bc1d05 2615 int changed;
c4c81601
RK
2616 struct expr *expr;
2617 struct occr *occr;
a65f3558 2618
7506f491 2619 changed = 0;
02280659 2620 for (i = 0; i < expr_hash_table.size; i++)
43c8a043 2621 for (expr = expr_hash_table.table[i]; expr; expr = expr->next_same_hash)
c4c81601
RK
2622 {
2623 int indx = expr->bitmap_index;
7506f491 2624
43c8a043 2625 /* We only need to search antic_occr since we require ANTLOC != 0. */
c4c81601
RK
2626 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
2627 {
2628 rtx insn = occr->insn;
2629 rtx set;
e2d2ed72 2630 basic_block bb = BLOCK_FOR_INSN (insn);
7506f491 2631
073089a7 2632 /* We only delete insns that have a single_set. */
d7c028c0 2633 if (bitmap_bit_p (pre_delete_map[bb->index], indx)
6fb5fa3c
DB
2634 && (set = single_set (insn)) != 0
2635 && dbg_cnt (pre_insn))
c4c81601 2636 {
c4c81601
RK
2637 /* Create a pseudo-reg to store the result of reaching
2638 expressions into. Get the mode for the new pseudo from
2639 the mode of the original destination pseudo. */
2640 if (expr->reaching_reg == NULL)
46b71b03 2641 expr->reaching_reg = gen_reg_rtx_and_attrs (SET_DEST (set));
c4c81601 2642
43c8a043 2643 gcse_emit_move_after (SET_DEST (set), expr->reaching_reg, insn);
10d1bb36
JH
2644 delete_insn (insn);
2645 occr->deleted_p = 1;
10d1bb36
JH
2646 changed = 1;
2647 gcse_subst_count++;
7506f491 2648
10d22567 2649 if (dump_file)
c4c81601 2650 {
10d22567 2651 fprintf (dump_file,
c4c81601
RK
2652 "PRE: redundant insn %d (expression %d) in ",
2653 INSN_UID (insn), indx);
10d22567 2654 fprintf (dump_file, "bb %d, reaching reg is %d\n",
0b17ab2f 2655 bb->index, REGNO (expr->reaching_reg));
c4c81601
RK
2656 }
2657 }
2658 }
2659 }
7506f491
DE
2660
2661 return changed;
2662}
2663
2664/* Perform GCSE optimizations using PRE.
2665 This is called by one_pre_gcse_pass after all the dataflow analysis
2666 has been done.
2667
c4c81601
RK
2668 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
2669 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
2670 Compiler Design and Implementation.
7506f491 2671
c4c81601
RK
2672 ??? A new pseudo reg is created to hold the reaching expression. The nice
2673 thing about the classical approach is that it would try to use an existing
2674 reg. If the register can't be adequately optimized [i.e. we introduce
2675 reload problems], one could add a pass here to propagate the new register
2676 through the block.
7506f491 2677
c4c81601
RK
2678 ??? We don't handle single sets in PARALLELs because we're [currently] not
2679 able to copy the rest of the parallel when we insert copies to create full
2680 redundancies from partial redundancies. However, there's no reason why we
2681 can't handle PARALLELs in the cases where there are no partial
7506f491
DE
2682 redundancies. */
2683
2684static int
43c8a043 2685pre_gcse (struct edge_list *edge_list)
7506f491 2686{
2e653e39
RK
2687 unsigned int i;
2688 int did_insert, changed;
7506f491 2689 struct expr **index_map;
c4c81601 2690 struct expr *expr;
7506f491
DE
2691
2692 /* Compute a mapping from expression number (`bitmap_index') to
2693 hash table entry. */
2694
5ed6ace5 2695 index_map = XCNEWVEC (struct expr *, expr_hash_table.n_elems);
02280659 2696 for (i = 0; i < expr_hash_table.size; i++)
43c8a043 2697 for (expr = expr_hash_table.table[i]; expr; expr = expr->next_same_hash)
c4c81601 2698 index_map[expr->bitmap_index] = expr;
7506f491 2699
7506f491
DE
2700 /* Delete the redundant insns first so that
2701 - we know what register to use for the new insns and for the other
2702 ones with reaching expressions
2703 - we know which insns are redundant when we go to create copies */
c4c81601 2704
7506f491 2705 changed = pre_delete ();
a42cd965 2706 did_insert = pre_edge_insert (edge_list, index_map);
c4c81601 2707
7506f491 2708 /* In other places with reaching expressions, copy the expression to the
a42cd965 2709 specially allocated pseudo-reg that reaches the redundant expr. */
7506f491 2710 pre_insert_copies ();
a42cd965
AM
2711 if (did_insert)
2712 {
2713 commit_edge_insertions ();
2714 changed = 1;
2715 }
7506f491 2716
283a2545 2717 free (index_map);
7506f491
DE
2718 return changed;
2719}
2720
2721/* Top level routine to perform one PRE GCSE pass.
2722
cc2902df 2723 Return nonzero if a change was made. */
7506f491
DE
2724
2725static int
5f39ad47 2726one_pre_gcse_pass (void)
7506f491
DE
2727{
2728 int changed = 0;
2729
2730 gcse_subst_count = 0;
2731 gcse_create_count = 0;
2732
5f39ad47 2733 /* Return if there's nothing to do, or it is too expensive. */
0cae8d31 2734 if (n_basic_blocks_for_fn (cfun) <= NUM_FIXED_BLOCKS + 1
5f39ad47
SB
2735 || is_too_expensive (_("PRE disabled")))
2736 return 0;
2737
2738 /* We need alias. */
2739 init_alias_analysis ();
2740
2741 bytes_used = 0;
2742 gcc_obstack_init (&gcse_obstack);
2743 alloc_gcse_mem ();
2744
e45425ec 2745 alloc_hash_table (&expr_hash_table);
a42cd965 2746 add_noreturn_fake_exit_edges ();
a13d4ebf
AM
2747 if (flag_gcse_lm)
2748 compute_ld_motion_mems ();
2749
02280659 2750 compute_hash_table (&expr_hash_table);
43c8a043
EB
2751 if (flag_gcse_lm)
2752 trim_ld_motion_mems ();
10d22567
ZD
2753 if (dump_file)
2754 dump_hash_table (dump_file, "Expression", &expr_hash_table);
c4c81601 2755
02280659 2756 if (expr_hash_table.n_elems > 0)
7506f491 2757 {
43c8a043 2758 struct edge_list *edge_list;
8b1c6fd7 2759 alloc_pre_mem (last_basic_block_for_fn (cfun), expr_hash_table.n_elems);
43c8a043
EB
2760 edge_list = compute_pre_data ();
2761 changed |= pre_gcse (edge_list);
a42cd965 2762 free_edge_list (edge_list);
7506f491
DE
2763 free_pre_mem ();
2764 }
c4c81601 2765
43c8a043
EB
2766 if (flag_gcse_lm)
2767 free_ld_motion_mems ();
6809cbf9 2768 remove_fake_exit_edges ();
02280659 2769 free_hash_table (&expr_hash_table);
7506f491 2770
5f39ad47
SB
2771 free_gcse_mem ();
2772 obstack_free (&gcse_obstack, NULL);
2773
2774 /* We are finished with alias. */
2775 end_alias_analysis ();
2776
10d22567 2777 if (dump_file)
7506f491 2778 {
5f39ad47 2779 fprintf (dump_file, "PRE GCSE of %s, %d basic blocks, %d bytes needed, ",
0cae8d31
DM
2780 current_function_name (), n_basic_blocks_for_fn (cfun),
2781 bytes_used);
10d22567 2782 fprintf (dump_file, "%d substs, %d insns created\n",
c4c81601 2783 gcse_subst_count, gcse_create_count);
7506f491
DE
2784 }
2785
2786 return changed;
2787}
aeb2f500 2788\f
cf7c4aa6
HPN
2789/* If X contains any LABEL_REF's, add REG_LABEL_OPERAND notes for them
2790 to INSN. If such notes are added to an insn which references a
2791 CODE_LABEL, the LABEL_NUSES count is incremented. We have to add
2792 that note, because the following loop optimization pass requires
2793 them. */
aeb2f500 2794
aeb2f500
JW
2795/* ??? If there was a jump optimization pass after gcse and before loop,
2796 then we would not need to do this here, because jump would add the
cf7c4aa6 2797 necessary REG_LABEL_OPERAND and REG_LABEL_TARGET notes. */
aeb2f500
JW
2798
2799static void
1d088dee 2800add_label_notes (rtx x, rtx insn)
aeb2f500
JW
2801{
2802 enum rtx_code code = GET_CODE (x);
2803 int i, j;
6f7d635c 2804 const char *fmt;
aeb2f500
JW
2805
2806 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
2807 {
6b3603c2 2808 /* This code used to ignore labels that referred to dispatch tables to
e0bb17a8 2809 avoid flow generating (slightly) worse code.
6b3603c2 2810
ac7c5af5
JL
2811 We no longer ignore such label references (see LABEL_REF handling in
2812 mark_jump_label for additional information). */
c4c81601 2813
cb2f563b
HPN
2814 /* There's no reason for current users to emit jump-insns with
2815 such a LABEL_REF, so we don't have to handle REG_LABEL_TARGET
2816 notes. */
2817 gcc_assert (!JUMP_P (insn));
65c5f2a6
ILT
2818 add_reg_note (insn, REG_LABEL_OPERAND, XEXP (x, 0));
2819
cb2f563b
HPN
2820 if (LABEL_P (XEXP (x, 0)))
2821 LABEL_NUSES (XEXP (x, 0))++;
2822
aeb2f500
JW
2823 return;
2824 }
2825
c4c81601 2826 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
aeb2f500
JW
2827 {
2828 if (fmt[i] == 'e')
2829 add_label_notes (XEXP (x, i), insn);
2830 else if (fmt[i] == 'E')
2831 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2832 add_label_notes (XVECEXP (x, i, j), insn);
2833 }
2834}
a65f3558 2835
bb457bd9
JL
2836/* Code Hoisting variables and subroutines. */
2837
2838/* Very busy expressions. */
2839static sbitmap *hoist_vbein;
2840static sbitmap *hoist_vbeout;
2841
bb457bd9 2842/* ??? We could compute post dominators and run this algorithm in
68e82b83 2843 reverse to perform tail merging, doing so would probably be
bb457bd9
JL
2844 more effective than the tail merging code in jump.c.
2845
2846 It's unclear if tail merging could be run in parallel with
2847 code hoisting. It would be nice. */
2848
2849/* Allocate vars used for code hoisting analysis. */
2850
2851static void
1d088dee 2852alloc_code_hoist_mem (int n_blocks, int n_exprs)
bb457bd9
JL
2853{
2854 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
2855 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
2856 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
2857
2858 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
2859 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
bb457bd9
JL
2860}
2861
2862/* Free vars used for code hoisting analysis. */
2863
2864static void
1d088dee 2865free_code_hoist_mem (void)
bb457bd9 2866{
5a660bff
DB
2867 sbitmap_vector_free (antloc);
2868 sbitmap_vector_free (transp);
2869 sbitmap_vector_free (comp);
bb457bd9 2870
5a660bff
DB
2871 sbitmap_vector_free (hoist_vbein);
2872 sbitmap_vector_free (hoist_vbeout);
bb457bd9 2873
d47cc544 2874 free_dominance_info (CDI_DOMINATORS);
bb457bd9
JL
2875}
2876
2877/* Compute the very busy expressions at entry/exit from each block.
2878
2879 An expression is very busy if all paths from a given point
2880 compute the expression. */
2881
2882static void
1d088dee 2883compute_code_hoist_vbeinout (void)
bb457bd9 2884{
e0082a72
ZD
2885 int changed, passes;
2886 basic_block bb;
bb457bd9 2887
8b1c6fd7
DM
2888 bitmap_vector_clear (hoist_vbeout, last_basic_block_for_fn (cfun));
2889 bitmap_vector_clear (hoist_vbein, last_basic_block_for_fn (cfun));
bb457bd9
JL
2890
2891 passes = 0;
2892 changed = 1;
c4c81601 2893
bb457bd9
JL
2894 while (changed)
2895 {
2896 changed = 0;
c4c81601 2897
bb457bd9
JL
2898 /* We scan the blocks in the reverse order to speed up
2899 the convergence. */
4f42035e 2900 FOR_EACH_BB_REVERSE_FN (bb, cfun)
bb457bd9 2901 {
fefa31b5 2902 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
ce4c0015 2903 {
d7c028c0
LC
2904 bitmap_intersection_of_succs (hoist_vbeout[bb->index],
2905 hoist_vbein, bb);
ce4c0015
MK
2906
2907 /* Include expressions in VBEout that are calculated
2908 in BB and available at its end. */
f61e445a 2909 bitmap_ior (hoist_vbeout[bb->index],
ce4c0015
MK
2910 hoist_vbeout[bb->index], comp[bb->index]);
2911 }
f8423fea 2912
f61e445a 2913 changed |= bitmap_or_and (hoist_vbein[bb->index],
f8423fea
SB
2914 antloc[bb->index],
2915 hoist_vbeout[bb->index],
2916 transp[bb->index]);
bb457bd9 2917 }
c4c81601 2918
bb457bd9
JL
2919 passes++;
2920 }
2921
10d22567 2922 if (dump_file)
cad9aa15
MK
2923 {
2924 fprintf (dump_file, "hoisting vbeinout computation: %d passes\n", passes);
2925
11cd3bed 2926 FOR_EACH_BB_FN (bb, cfun)
cad9aa15
MK
2927 {
2928 fprintf (dump_file, "vbein (%d): ", bb->index);
f61e445a 2929 dump_bitmap_file (dump_file, hoist_vbein[bb->index]);
cad9aa15 2930 fprintf (dump_file, "vbeout(%d): ", bb->index);
f61e445a 2931 dump_bitmap_file (dump_file, hoist_vbeout[bb->index]);
cad9aa15
MK
2932 }
2933 }
bb457bd9
JL
2934}
2935
2936/* Top level routine to do the dataflow analysis needed by code hoisting. */
2937
2938static void
1d088dee 2939compute_code_hoist_data (void)
bb457bd9 2940{
02280659 2941 compute_local_properties (transp, comp, antloc, &expr_hash_table);
9b774782 2942 prune_expressions (false);
bb457bd9 2943 compute_code_hoist_vbeinout ();
d47cc544 2944 calculate_dominance_info (CDI_DOMINATORS);
10d22567
ZD
2945 if (dump_file)
2946 fprintf (dump_file, "\n");
bb457bd9
JL
2947}
2948
4b8181c5
BC
2949/* Update register pressure for BB when hoisting an expression from
2950 instruction FROM, if live ranges of inputs are shrunk. Also
2951 maintain live_in information if live range of register referred
2952 in FROM is shrunk.
2953
2954 Return 0 if register pressure doesn't change, otherwise return
2955 the number by which register pressure is decreased.
2956
2957 NOTE: Register pressure won't be increased in this function. */
2958
2959static int
2960update_bb_reg_pressure (basic_block bb, rtx from)
2961{
2962 rtx dreg, insn;
2963 basic_block succ_bb;
bfac633a 2964 df_ref use, op_ref;
4b8181c5
BC
2965 edge succ;
2966 edge_iterator ei;
2967 int decreased_pressure = 0;
2968 int nregs;
2969 enum reg_class pressure_class;
bfac633a
RS
2970
2971 FOR_EACH_INSN_USE (use, from)
4b8181c5 2972 {
bfac633a 2973 dreg = DF_REF_REAL_REG (use);
4b8181c5
BC
2974 /* The live range of register is shrunk only if it isn't:
2975 1. referred on any path from the end of this block to EXIT, or
2976 2. referred by insns other than FROM in this block. */
2977 FOR_EACH_EDGE (succ, ei, bb->succs)
2978 {
2979 succ_bb = succ->dest;
fefa31b5 2980 if (succ_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4b8181c5
BC
2981 continue;
2982
2983 if (bitmap_bit_p (BB_DATA (succ_bb)->live_in, REGNO (dreg)))
2984 break;
2985 }
2986 if (succ != NULL)
2987 continue;
2988
2989 op_ref = DF_REG_USE_CHAIN (REGNO (dreg));
2990 for (; op_ref; op_ref = DF_REF_NEXT_REG (op_ref))
2991 {
2992 if (!DF_REF_INSN_INFO (op_ref))
2993 continue;
2994
2995 insn = DF_REF_INSN (op_ref);
2996 if (BLOCK_FOR_INSN (insn) == bb
2997 && NONDEBUG_INSN_P (insn) && insn != from)
2998 break;
2999 }
3000
3001 pressure_class = get_regno_pressure_class (REGNO (dreg), &nregs);
3002 /* Decrease register pressure and update live_in information for
3003 this block. */
3004 if (!op_ref && pressure_class != NO_REGS)
3005 {
3006 decreased_pressure += nregs;
3007 BB_DATA (bb)->max_reg_pressure[pressure_class] -= nregs;
3008 bitmap_clear_bit (BB_DATA (bb)->live_in, REGNO (dreg));
3009 }
3010 }
3011 return decreased_pressure;
3012}
3013
b11f0116
BC
3014/* Determine if the expression EXPR should be hoisted to EXPR_BB up in
3015 flow graph, if it can reach BB unimpared. Stop the search if the
3016 expression would need to be moved more than DISTANCE instructions.
3017
3018 DISTANCE is the number of instructions through which EXPR can be
3019 hoisted up in flow graph.
3020
3021 BB_SIZE points to an array which contains the number of instructions
3022 for each basic block.
3023
3024 PRESSURE_CLASS and NREGS are register class and number of hard registers
3025 for storing EXPR.
3026
3027 HOISTED_BBS points to a bitmap indicating basic blocks through which
3028 EXPR is hoisted.
bb457bd9 3029
4b8181c5
BC
3030 FROM is the instruction from which EXPR is hoisted.
3031
bb457bd9
JL
3032 It's unclear exactly what Muchnick meant by "unimpared". It seems
3033 to me that the expression must either be computed or transparent in
3034 *every* block in the path(s) from EXPR_BB to BB. Any other definition
3035 would allow the expression to be hoisted out of loops, even if
3036 the expression wasn't a loop invariant.
3037
3038 Contrast this to reachability for PRE where an expression is
3039 considered reachable if *any* path reaches instead of *all*
3040 paths. */
3041
3042static int
b11f0116
BC
3043should_hoist_expr_to_dom (basic_block expr_bb, struct expr *expr,
3044 basic_block bb, sbitmap visited, int distance,
3045 int *bb_size, enum reg_class pressure_class,
4b8181c5 3046 int *nregs, bitmap hoisted_bbs, rtx from)
bb457bd9 3047{
b11f0116 3048 unsigned int i;
bb457bd9 3049 edge pred;
628f6a4e 3050 edge_iterator ei;
b11f0116 3051 sbitmap_iterator sbi;
283a2545 3052 int visited_allocated_locally = 0;
4b8181c5 3053 int decreased_pressure = 0;
589005ff 3054
4b8181c5
BC
3055 if (flag_ira_hoist_pressure)
3056 {
3057 /* Record old information of basic block BB when it is visited
3058 at the first time. */
3059 if (!bitmap_bit_p (hoisted_bbs, bb->index))
3060 {
3061 struct bb_data *data = BB_DATA (bb);
3062 bitmap_copy (data->backup, data->live_in);
3063 data->old_pressure = data->max_reg_pressure[pressure_class];
3064 }
3065 decreased_pressure = update_bb_reg_pressure (bb, from);
3066 }
20160347
MK
3067 /* Terminate the search if distance, for which EXPR is allowed to move,
3068 is exhausted. */
3069 if (distance > 0)
3070 {
4b8181c5
BC
3071 if (flag_ira_hoist_pressure)
3072 {
3073 /* Prefer to hoist EXPR if register pressure is decreased. */
3074 if (decreased_pressure > *nregs)
3075 distance += bb_size[bb->index];
3076 /* Let EXPR be hoisted through basic block at no cost if one
3077 of following conditions is satisfied:
3078
3079 1. The basic block has low register pressure.
3080 2. Register pressure won't be increases after hoisting EXPR.
3081
3082 Constant expressions is handled conservatively, because
3083 hoisting constant expression aggressively results in worse
3084 code. This decision is made by the observation of CSiBE
3085 on ARM target, while it has no obvious effect on other
3086 targets like x86, x86_64, mips and powerpc. */
3087 else if (CONST_INT_P (expr->expr)
3088 || (BB_DATA (bb)->max_reg_pressure[pressure_class]
3089 >= ira_class_hard_regs_num[pressure_class]
3090 && decreased_pressure < *nregs))
3091 distance -= bb_size[bb->index];
3092 }
3093 else
b11f0116 3094 distance -= bb_size[bb->index];
20160347
MK
3095
3096 if (distance <= 0)
3097 return 0;
3098 }
3099 else
3100 gcc_assert (distance == 0);
bb457bd9
JL
3101
3102 if (visited == NULL)
3103 {
8e42ace1 3104 visited_allocated_locally = 1;
8b1c6fd7 3105 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
f61e445a 3106 bitmap_clear (visited);
bb457bd9
JL
3107 }
3108
628f6a4e 3109 FOR_EACH_EDGE (pred, ei, bb->preds)
bb457bd9 3110 {
e2d2ed72 3111 basic_block pred_bb = pred->src;
bb457bd9 3112
fefa31b5 3113 if (pred->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
bb457bd9 3114 break;
f305679f
JH
3115 else if (pred_bb == expr_bb)
3116 continue;
d7c028c0 3117 else if (bitmap_bit_p (visited, pred_bb->index))
bb457bd9 3118 continue;
d7c028c0 3119 else if (! bitmap_bit_p (transp[pred_bb->index], expr->bitmap_index))
bb457bd9
JL
3120 break;
3121 /* Not killed. */
3122 else
3123 {
d7c028c0 3124 bitmap_set_bit (visited, pred_bb->index);
b11f0116
BC
3125 if (! should_hoist_expr_to_dom (expr_bb, expr, pred_bb,
3126 visited, distance, bb_size,
4b8181c5
BC
3127 pressure_class, nregs,
3128 hoisted_bbs, from))
bb457bd9
JL
3129 break;
3130 }
3131 }
589005ff 3132 if (visited_allocated_locally)
b11f0116
BC
3133 {
3134 /* If EXPR can be hoisted to expr_bb, record basic blocks through
4b8181c5 3135 which EXPR is hoisted in hoisted_bbs. */
b11f0116
BC
3136 if (flag_ira_hoist_pressure && !pred)
3137 {
4b8181c5
BC
3138 /* Record the basic block from which EXPR is hoisted. */
3139 bitmap_set_bit (visited, bb->index);
d4ac4ce2 3140 EXECUTE_IF_SET_IN_BITMAP (visited, 0, i, sbi)
4b8181c5 3141 bitmap_set_bit (hoisted_bbs, i);
b11f0116
BC
3142 }
3143 sbitmap_free (visited);
3144 }
c4c81601 3145
bb457bd9
JL
3146 return (pred == NULL);
3147}
3148\f
073a8998 3149/* Find occurrence in BB. */
43c8a043 3150
20160347
MK
3151static struct occr *
3152find_occr_in_bb (struct occr *occr, basic_block bb)
3153{
3154 /* Find the right occurrence of this expression. */
3155 while (occr && BLOCK_FOR_INSN (occr->insn) != bb)
3156 occr = occr->next;
3157
3158 return occr;
3159}
3160
b11f0116
BC
3161/* Actually perform code hoisting.
3162
3163 The code hoisting pass can hoist multiple computations of the same
3164 expression along dominated path to a dominating basic block, like
3165 from b2/b3 to b1 as depicted below:
3166
3167 b1 ------
3168 /\ |
3169 / \ |
3170 bx by distance
3171 / \ |
3172 / \ |
3173 b2 b3 ------
3174
3175 Unfortunately code hoisting generally extends the live range of an
3176 output pseudo register, which increases register pressure and hurts
3177 register allocation. To address this issue, an attribute MAX_DISTANCE
3178 is computed and attached to each expression. The attribute is computed
3179 from rtx cost of the corresponding expression and it's used to control
3180 how long the expression can be hoisted up in flow graph. As the
3181 expression is hoisted up in flow graph, GCC decreases its DISTANCE
4b8181c5
BC
3182 and stops the hoist if DISTANCE reaches 0. Code hoisting can decrease
3183 register pressure if live ranges of inputs are shrunk.
b11f0116
BC
3184
3185 Option "-fira-hoist-pressure" implements register pressure directed
3186 hoist based on upper method. The rationale is:
3187 1. Calculate register pressure for each basic block by reusing IRA
3188 facility.
3189 2. When expression is hoisted through one basic block, GCC checks
4b8181c5
BC
3190 the change of live ranges for inputs/output. The basic block's
3191 register pressure will be increased because of extended live
3192 range of output. However, register pressure will be decreased
3193 if the live ranges of inputs are shrunk.
3194 3. After knowing how hoisting affects register pressure, GCC prefers
3195 to hoist the expression if it can decrease register pressure, by
3196 increasing DISTANCE of the corresponding expression.
3197 4. If hoisting the expression increases register pressure, GCC checks
3198 register pressure of the basic block and decrease DISTANCE only if
3199 the register pressure is high. In other words, expression will be
3200 hoisted through at no cost if the basic block has low register
3201 pressure.
3202 5. Update register pressure information for basic blocks through
3203 which expression is hoisted. */
c4c81601 3204
5f39ad47 3205static int
1d088dee 3206hoist_code (void)
bb457bd9 3207{
e0082a72 3208 basic_block bb, dominated;
9771b263 3209 vec<basic_block> dom_tree_walk;
cad9aa15 3210 unsigned int dom_tree_walk_index;
9771b263 3211 vec<basic_block> domby;
b11f0116 3212 unsigned int i, j, k;
bb457bd9 3213 struct expr **index_map;
c4c81601 3214 struct expr *expr;
20160347
MK
3215 int *to_bb_head;
3216 int *bb_size;
5f39ad47 3217 int changed = 0;
b11f0116
BC
3218 struct bb_data *data;
3219 /* Basic blocks that have occurrences reachable from BB. */
3220 bitmap from_bbs;
3221 /* Basic blocks through which expr is hoisted. */
3222 bitmap hoisted_bbs = NULL;
3223 bitmap_iterator bi;
bb457bd9 3224
bb457bd9
JL
3225 /* Compute a mapping from expression number (`bitmap_index') to
3226 hash table entry. */
3227
5ed6ace5 3228 index_map = XCNEWVEC (struct expr *, expr_hash_table.n_elems);
02280659 3229 for (i = 0; i < expr_hash_table.size; i++)
43c8a043 3230 for (expr = expr_hash_table.table[i]; expr; expr = expr->next_same_hash)
c4c81601 3231 index_map[expr->bitmap_index] = expr;
bb457bd9 3232
20160347
MK
3233 /* Calculate sizes of basic blocks and note how far
3234 each instruction is from the start of its block. We then use this
3235 data to restrict distance an expression can travel. */
3236
3237 to_bb_head = XCNEWVEC (int, get_max_uid ());
8b1c6fd7 3238 bb_size = XCNEWVEC (int, last_basic_block_for_fn (cfun));
20160347 3239
11cd3bed 3240 FOR_EACH_BB_FN (bb, cfun)
20160347
MK
3241 {
3242 rtx insn;
20160347
MK
3243 int to_head;
3244
20160347 3245 to_head = 0;
05b5ea34 3246 FOR_BB_INSNS (bb, insn)
20160347
MK
3247 {
3248 /* Don't count debug instructions to avoid them affecting
3249 decision choices. */
3250 if (NONDEBUG_INSN_P (insn))
3251 to_bb_head[INSN_UID (insn)] = to_head++;
20160347
MK
3252 }
3253
3254 bb_size[bb->index] = to_head;
3255 }
3256
fefa31b5
DM
3257 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1
3258 && (EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0)->dest
3259 == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb));
cad9aa15 3260
b11f0116
BC
3261 from_bbs = BITMAP_ALLOC (NULL);
3262 if (flag_ira_hoist_pressure)
3263 hoisted_bbs = BITMAP_ALLOC (NULL);
3264
cad9aa15 3265 dom_tree_walk = get_all_dominated_blocks (CDI_DOMINATORS,
fefa31b5 3266 ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb);
cad9aa15 3267
bb457bd9
JL
3268 /* Walk over each basic block looking for potentially hoistable
3269 expressions, nothing gets hoisted from the entry block. */
9771b263 3270 FOR_EACH_VEC_ELT (dom_tree_walk, dom_tree_walk_index, bb)
bb457bd9 3271 {
cad9aa15
MK
3272 domby = get_dominated_to_depth (CDI_DOMINATORS, bb, MAX_HOIST_DEPTH);
3273
9771b263 3274 if (domby.length () == 0)
cad9aa15 3275 continue;
bb457bd9
JL
3276
3277 /* Examine each expression that is very busy at the exit of this
3278 block. These are the potentially hoistable expressions. */
5829cc0f 3279 for (i = 0; i < SBITMAP_SIZE (hoist_vbeout[bb->index]); i++)
bb457bd9 3280 {
d7c028c0 3281 if (bitmap_bit_p (hoist_vbeout[bb->index], i))
bb457bd9 3282 {
b11f0116
BC
3283 int nregs = 0;
3284 enum reg_class pressure_class = NO_REGS;
cad9aa15
MK
3285 /* Current expression. */
3286 struct expr *expr = index_map[i];
073a8998 3287 /* Number of occurrences of EXPR that can be hoisted to BB. */
cad9aa15 3288 int hoistable = 0;
073a8998 3289 /* Occurrences reachable from BB. */
6e1aa848 3290 vec<occr_t> occrs_to_hoist = vNULL;
cad9aa15
MK
3291 /* We want to insert the expression into BB only once, so
3292 note when we've inserted it. */
3293 int insn_inserted_p;
3294 occr_t occr;
3295
ce4c0015 3296 /* If an expression is computed in BB and is available at end of
073a8998 3297 BB, hoist all occurrences dominated by BB to BB. */
d7c028c0 3298 if (bitmap_bit_p (comp[bb->index], i))
cad9aa15
MK
3299 {
3300 occr = find_occr_in_bb (expr->antic_occr, bb);
3301
3302 if (occr)
3303 {
073a8998 3304 /* An occurrence might've been already deleted
cad9aa15 3305 while processing a dominator of BB. */
2d36b47f 3306 if (!occr->deleted_p)
cad9aa15
MK
3307 {
3308 gcc_assert (NONDEBUG_INSN_P (occr->insn));
3309 hoistable++;
3310 }
3311 }
3312 else
3313 hoistable++;
3314 }
ce4c0015 3315
bb457bd9
JL
3316 /* We've found a potentially hoistable expression, now
3317 we look at every block BB dominates to see if it
3318 computes the expression. */
9771b263 3319 FOR_EACH_VEC_ELT (domby, j, dominated)
bb457bd9 3320 {
20160347
MK
3321 int max_distance;
3322
bb457bd9 3323 /* Ignore self dominance. */
c635a1ec 3324 if (bb == dominated)
bb457bd9 3325 continue;
bb457bd9
JL
3326 /* We've found a dominated block, now see if it computes
3327 the busy expression and whether or not moving that
3328 expression to the "beginning" of that block is safe. */
d7c028c0 3329 if (!bitmap_bit_p (antloc[dominated->index], i))
bb457bd9
JL
3330 continue;
3331
cad9aa15
MK
3332 occr = find_occr_in_bb (expr->antic_occr, dominated);
3333 gcc_assert (occr);
20160347 3334
073a8998 3335 /* An occurrence might've been already deleted
cad9aa15
MK
3336 while processing a dominator of BB. */
3337 if (occr->deleted_p)
2d36b47f 3338 continue;
cad9aa15
MK
3339 gcc_assert (NONDEBUG_INSN_P (occr->insn));
3340
3341 max_distance = expr->max_distance;
3342 if (max_distance > 0)
3343 /* Adjust MAX_DISTANCE to account for the fact that
3344 OCCR won't have to travel all of DOMINATED, but
3345 only part of it. */
3346 max_distance += (bb_size[dominated->index]
3347 - to_bb_head[INSN_UID (occr->insn)]);
20160347 3348
b11f0116
BC
3349 pressure_class = get_pressure_class_and_nregs (occr->insn,
3350 &nregs);
3351
3352 /* Note if the expression should be hoisted from the dominated
3353 block to BB if it can reach DOMINATED unimpared.
bb457bd9
JL
3354
3355 Keep track of how many times this expression is hoistable
3356 from a dominated block into BB. */
b11f0116
BC
3357 if (should_hoist_expr_to_dom (bb, expr, dominated, NULL,
3358 max_distance, bb_size,
3359 pressure_class, &nregs,
4b8181c5 3360 hoisted_bbs, occr->insn))
cad9aa15
MK
3361 {
3362 hoistable++;
9771b263 3363 occrs_to_hoist.safe_push (occr);
cad9aa15
MK
3364 bitmap_set_bit (from_bbs, dominated->index);
3365 }
bb457bd9
JL
3366 }
3367
ff7cc307 3368 /* If we found more than one hoistable occurrence of this
cad9aa15 3369 expression, then note it in the vector of expressions to
bb457bd9
JL
3370 hoist. It makes no sense to hoist things which are computed
3371 in only one BB, and doing so tends to pessimize register
3372 allocation. One could increase this value to try harder
3373 to avoid any possible code expansion due to register
3374 allocation issues; however experiments have shown that
3375 the vast majority of hoistable expressions are only movable
e0bb17a8 3376 from two successors, so raising this threshold is likely
bb457bd9 3377 to nullify any benefit we get from code hoisting. */
62a3f636 3378 if (hoistable > 1 && dbg_cnt (hoist_insn))
bb457bd9 3379 {
9771b263 3380 /* If (hoistable != vec::length), then there is
073a8998 3381 an occurrence of EXPR in BB itself. Don't waste
cad9aa15 3382 time looking for LCA in this case. */
9771b263 3383 if ((unsigned) hoistable == occrs_to_hoist.length ())
cad9aa15
MK
3384 {
3385 basic_block lca;
3386
3387 lca = nearest_common_dominator_for_set (CDI_DOMINATORS,
3388 from_bbs);
3389 if (lca != bb)
073a8998 3390 /* Punt, it's better to hoist these occurrences to
cad9aa15 3391 LCA. */
9771b263 3392 occrs_to_hoist.release ();
cad9aa15 3393 }
bb457bd9 3394 }
cad9aa15 3395 else
688010ba 3396 /* Punt, no point hoisting a single occurrence. */
9771b263 3397 occrs_to_hoist.release ();
bb457bd9 3398
b11f0116 3399 if (flag_ira_hoist_pressure
9771b263 3400 && !occrs_to_hoist.is_empty ())
b11f0116 3401 {
4b8181c5
BC
3402 /* Increase register pressure of basic blocks to which
3403 expr is hoisted because of extended live range of
3404 output. */
b11f0116
BC
3405 data = BB_DATA (bb);
3406 data->max_reg_pressure[pressure_class] += nregs;
4b8181c5
BC
3407 EXECUTE_IF_SET_IN_BITMAP (hoisted_bbs, 0, k, bi)
3408 {
06e28de2 3409 data = BB_DATA (BASIC_BLOCK_FOR_FN (cfun, k));
4b8181c5
BC
3410 data->max_reg_pressure[pressure_class] += nregs;
3411 }
b11f0116
BC
3412 }
3413 else if (flag_ira_hoist_pressure)
3414 {
4b8181c5
BC
3415 /* Restore register pressure and live_in info for basic
3416 blocks recorded in hoisted_bbs when expr will not be
3417 hoisted. */
b11f0116
BC
3418 EXECUTE_IF_SET_IN_BITMAP (hoisted_bbs, 0, k, bi)
3419 {
06e28de2 3420 data = BB_DATA (BASIC_BLOCK_FOR_FN (cfun, k));
4b8181c5
BC
3421 bitmap_copy (data->live_in, data->backup);
3422 data->max_reg_pressure[pressure_class]
3423 = data->old_pressure;
b11f0116
BC
3424 }
3425 }
3426
3427 if (flag_ira_hoist_pressure)
3428 bitmap_clear (hoisted_bbs);
3429
cad9aa15 3430 insn_inserted_p = 0;
bb457bd9 3431
073a8998 3432 /* Walk through occurrences of I'th expressions we want
cad9aa15 3433 to hoist to BB and make the transformations. */
9771b263 3434 FOR_EACH_VEC_ELT (occrs_to_hoist, j, occr)
bb457bd9 3435 {
cad9aa15 3436 rtx insn;
b6808818 3437 const_rtx set;
cad9aa15
MK
3438
3439 gcc_assert (!occr->deleted_p);
3440
3441 insn = occr->insn;
b6808818 3442 set = single_set_gcse (insn);
cad9aa15
MK
3443
3444 /* Create a pseudo-reg to store the result of reaching
3445 expressions into. Get the mode for the new pseudo
3446 from the mode of the original destination pseudo.
3447
3448 It is important to use new pseudos whenever we
3449 emit a set. This will allow reload to use
3450 rematerialization for such registers. */
3451 if (!insn_inserted_p)
3452 expr->reaching_reg
3453 = gen_reg_rtx_and_attrs (SET_DEST (set));
3454
43c8a043 3455 gcse_emit_move_after (SET_DEST (set), expr->reaching_reg,
cad9aa15
MK
3456 insn);
3457 delete_insn (insn);
3458 occr->deleted_p = 1;
3459 changed = 1;
3460 gcse_subst_count++;
3461
3462 if (!insn_inserted_p)
bb457bd9 3463 {
cad9aa15
MK
3464 insert_insn_end_basic_block (expr, bb);
3465 insn_inserted_p = 1;
bb457bd9
JL
3466 }
3467 }
cad9aa15 3468
9771b263 3469 occrs_to_hoist.release ();
cad9aa15 3470 bitmap_clear (from_bbs);
bb457bd9
JL
3471 }
3472 }
9771b263 3473 domby.release ();
bb457bd9 3474 }
c4c81601 3475
9771b263 3476 dom_tree_walk.release ();
b11f0116
BC
3477 BITMAP_FREE (from_bbs);
3478 if (flag_ira_hoist_pressure)
3479 BITMAP_FREE (hoisted_bbs);
3480
20160347
MK
3481 free (bb_size);
3482 free (to_bb_head);
8e42ace1 3483 free (index_map);
5f39ad47
SB
3484
3485 return changed;
bb457bd9
JL
3486}
3487
b11f0116
BC
3488/* Return pressure class and number of needed hard registers (through
3489 *NREGS) of register REGNO. */
3490static enum reg_class
3491get_regno_pressure_class (int regno, int *nregs)
3492{
3493 if (regno >= FIRST_PSEUDO_REGISTER)
3494 {
3495 enum reg_class pressure_class;
3496
3497 pressure_class = reg_allocno_class (regno);
3498 pressure_class = ira_pressure_class_translate[pressure_class];
3499 *nregs
3500 = ira_reg_class_max_nregs[pressure_class][PSEUDO_REGNO_MODE (regno)];
3501 return pressure_class;
3502 }
3503 else if (! TEST_HARD_REG_BIT (ira_no_alloc_regs, regno)
3504 && ! TEST_HARD_REG_BIT (eliminable_regset, regno))
3505 {
3506 *nregs = 1;
3507 return ira_pressure_class_translate[REGNO_REG_CLASS (regno)];
3508 }
3509 else
3510 {
3511 *nregs = 0;
3512 return NO_REGS;
3513 }
3514}
3515
3516/* Return pressure class and number of hard registers (through *NREGS)
3517 for destination of INSN. */
3518static enum reg_class
3519get_pressure_class_and_nregs (rtx insn, int *nregs)
3520{
3521 rtx reg;
3522 enum reg_class pressure_class;
b6808818 3523 const_rtx set = single_set_gcse (insn);
b11f0116 3524
b11f0116
BC
3525 reg = SET_DEST (set);
3526 if (GET_CODE (reg) == SUBREG)
3527 reg = SUBREG_REG (reg);
3528 if (MEM_P (reg))
3529 {
3530 *nregs = 0;
3531 pressure_class = NO_REGS;
3532 }
3533 else
3534 {
3535 gcc_assert (REG_P (reg));
3536 pressure_class = reg_allocno_class (REGNO (reg));
3537 pressure_class = ira_pressure_class_translate[pressure_class];
3538 *nregs
3539 = ira_reg_class_max_nregs[pressure_class][GET_MODE (SET_SRC (set))];
3540 }
3541 return pressure_class;
3542}
3543
3544/* Increase (if INCR_P) or decrease current register pressure for
3545 register REGNO. */
3546static void
3547change_pressure (int regno, bool incr_p)
3548{
3549 int nregs;
3550 enum reg_class pressure_class;
3551
3552 pressure_class = get_regno_pressure_class (regno, &nregs);
3553 if (! incr_p)
3554 curr_reg_pressure[pressure_class] -= nregs;
3555 else
3556 {
3557 curr_reg_pressure[pressure_class] += nregs;
3558 if (BB_DATA (curr_bb)->max_reg_pressure[pressure_class]
3559 < curr_reg_pressure[pressure_class])
3560 BB_DATA (curr_bb)->max_reg_pressure[pressure_class]
3561 = curr_reg_pressure[pressure_class];
3562 }
3563}
3564
3565/* Calculate register pressure for each basic block by walking insns
3566 from last to first. */
3567static void
3568calculate_bb_reg_pressure (void)
3569{
3570 int i;
3571 unsigned int j;
3572 rtx insn;
3573 basic_block bb;
3574 bitmap curr_regs_live;
3575 bitmap_iterator bi;
3576
3577
8d49e7ef 3578 ira_setup_eliminable_regset ();
b11f0116 3579 curr_regs_live = BITMAP_ALLOC (&reg_obstack);
11cd3bed 3580 FOR_EACH_BB_FN (bb, cfun)
b11f0116
BC
3581 {
3582 curr_bb = bb;
4b8181c5
BC
3583 BB_DATA (bb)->live_in = BITMAP_ALLOC (NULL);
3584 BB_DATA (bb)->backup = BITMAP_ALLOC (NULL);
3585 bitmap_copy (BB_DATA (bb)->live_in, df_get_live_in (bb));
3586 bitmap_copy (curr_regs_live, df_get_live_out (bb));
b11f0116
BC
3587 for (i = 0; i < ira_pressure_classes_num; i++)
3588 curr_reg_pressure[ira_pressure_classes[i]] = 0;
3589 EXECUTE_IF_SET_IN_BITMAP (curr_regs_live, 0, j, bi)
3590 change_pressure (j, true);
3591
3592 FOR_BB_INSNS_REVERSE (bb, insn)
3593 {
3594 rtx dreg;
3595 int regno;
bfac633a 3596 df_ref def, use;
b11f0116
BC
3597
3598 if (! NONDEBUG_INSN_P (insn))
3599 continue;
3600
bfac633a 3601 FOR_EACH_INSN_DEF (def, insn)
b11f0116 3602 {
bfac633a 3603 dreg = DF_REF_REAL_REG (def);
b11f0116
BC
3604 gcc_assert (REG_P (dreg));
3605 regno = REGNO (dreg);
bfac633a 3606 if (!(DF_REF_FLAGS (def)
b11f0116
BC
3607 & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
3608 {
3609 if (bitmap_clear_bit (curr_regs_live, regno))
3610 change_pressure (regno, false);
3611 }
3612 }
3613
bfac633a 3614 FOR_EACH_INSN_USE (use, insn)
b11f0116 3615 {
bfac633a 3616 dreg = DF_REF_REAL_REG (use);
b11f0116
BC
3617 gcc_assert (REG_P (dreg));
3618 regno = REGNO (dreg);
3619 if (bitmap_set_bit (curr_regs_live, regno))
3620 change_pressure (regno, true);
3621 }
3622 }
3623 }
3624 BITMAP_FREE (curr_regs_live);
3625
3626 if (dump_file == NULL)
3627 return;
3628
3629 fprintf (dump_file, "\nRegister Pressure: \n");
11cd3bed 3630 FOR_EACH_BB_FN (bb, cfun)
b11f0116
BC
3631 {
3632 fprintf (dump_file, " Basic block %d: \n", bb->index);
3633 for (i = 0; (int) i < ira_pressure_classes_num; i++)
3634 {
3635 enum reg_class pressure_class;
3636
3637 pressure_class = ira_pressure_classes[i];
3638 if (BB_DATA (bb)->max_reg_pressure[pressure_class] == 0)
3639 continue;
3640
3641 fprintf (dump_file, " %s=%d\n", reg_class_names[pressure_class],
3642 BB_DATA (bb)->max_reg_pressure[pressure_class]);
3643 }
3644 }
3645 fprintf (dump_file, "\n");
3646}
3647
bb457bd9
JL
3648/* Top level routine to perform one code hoisting (aka unification) pass
3649
cc2902df 3650 Return nonzero if a change was made. */
bb457bd9
JL
3651
3652static int
1d088dee 3653one_code_hoisting_pass (void)
bb457bd9
JL
3654{
3655 int changed = 0;
3656
5f39ad47
SB
3657 gcse_subst_count = 0;
3658 gcse_create_count = 0;
3659
3660 /* Return if there's nothing to do, or it is too expensive. */
0cae8d31 3661 if (n_basic_blocks_for_fn (cfun) <= NUM_FIXED_BLOCKS + 1
5f39ad47
SB
3662 || is_too_expensive (_("GCSE disabled")))
3663 return 0;
3664
20160347
MK
3665 doing_code_hoisting_p = true;
3666
b11f0116
BC
3667 /* Calculate register pressure for each basic block. */
3668 if (flag_ira_hoist_pressure)
3669 {
3670 regstat_init_n_sets_and_refs ();
3671 ira_set_pseudo_classes (false, dump_file);
3672 alloc_aux_for_blocks (sizeof (struct bb_data));
3673 calculate_bb_reg_pressure ();
3674 regstat_free_n_sets_and_refs ();
3675 }
3676
5f39ad47
SB
3677 /* We need alias. */
3678 init_alias_analysis ();
3679
3680 bytes_used = 0;
3681 gcc_obstack_init (&gcse_obstack);
3682 alloc_gcse_mem ();
3683
e45425ec 3684 alloc_hash_table (&expr_hash_table);
02280659 3685 compute_hash_table (&expr_hash_table);
10d22567
ZD
3686 if (dump_file)
3687 dump_hash_table (dump_file, "Code Hosting Expressions", &expr_hash_table);
c4c81601 3688
02280659 3689 if (expr_hash_table.n_elems > 0)
bb457bd9 3690 {
8b1c6fd7
DM
3691 alloc_code_hoist_mem (last_basic_block_for_fn (cfun),
3692 expr_hash_table.n_elems);
bb457bd9 3693 compute_code_hoist_data ();
5f39ad47 3694 changed = hoist_code ();
bb457bd9
JL
3695 free_code_hoist_mem ();
3696 }
c4c81601 3697
b11f0116
BC
3698 if (flag_ira_hoist_pressure)
3699 {
3700 free_aux_for_blocks ();
3701 free_reg_info ();
3702 }
02280659 3703 free_hash_table (&expr_hash_table);
5f39ad47
SB
3704 free_gcse_mem ();
3705 obstack_free (&gcse_obstack, NULL);
3706
3707 /* We are finished with alias. */
3708 end_alias_analysis ();
3709
3710 if (dump_file)
3711 {
3712 fprintf (dump_file, "HOIST of %s, %d basic blocks, %d bytes needed, ",
0cae8d31
DM
3713 current_function_name (), n_basic_blocks_for_fn (cfun),
3714 bytes_used);
5f39ad47
SB
3715 fprintf (dump_file, "%d substs, %d insns created\n",
3716 gcse_subst_count, gcse_create_count);
3717 }
bb457bd9 3718
20160347
MK
3719 doing_code_hoisting_p = false;
3720
bb457bd9
JL
3721 return changed;
3722}
a13d4ebf 3723\f
43c8a043
EB
3724/* Here we provide the things required to do store motion towards the exit.
3725 In order for this to be effective, gcse also needed to be taught how to
3726 move a load when it is killed only by a store to itself.
a13d4ebf
AM
3727
3728 int i;
3729 float a[10];
3730
3731 void foo(float scale)
3732 {
3733 for (i=0; i<10; i++)
3734 a[i] *= scale;
3735 }
3736
3737 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
589005ff
KH
3738 the load out since its live around the loop, and stored at the bottom
3739 of the loop.
a13d4ebf 3740
589005ff 3741 The 'Load Motion' referred to and implemented in this file is
43c8a043 3742 an enhancement to gcse which when using edge based LCM, recognizes
a13d4ebf
AM
3743 this situation and allows gcse to move the load out of the loop.
3744
3745 Once gcse has hoisted the load, store motion can then push this
3746 load towards the exit, and we end up with no loads or stores of 'i'
3747 in the loop. */
3748
ff7cc307 3749/* This will search the ldst list for a matching expression. If it
a13d4ebf
AM
3750 doesn't find one, we create one and initialize it. */
3751
3752static struct ls_expr *
1d088dee 3753ldst_entry (rtx x)
a13d4ebf 3754{
b58b21d5 3755 int do_not_record_p = 0;
a13d4ebf 3756 struct ls_expr * ptr;
b58b21d5 3757 unsigned int hash;
4a8fb1a1 3758 ls_expr **slot;
9727e468 3759 struct ls_expr e;
a13d4ebf 3760
0516f6fe
SB
3761 hash = hash_rtx (x, GET_MODE (x), &do_not_record_p,
3762 NULL, /*have_reg_qty=*/false);
a13d4ebf 3763
9727e468 3764 e.pattern = x;
c203e8a7 3765 slot = pre_ldst_table->find_slot_with_hash (&e, hash, INSERT);
9727e468 3766 if (*slot)
4a8fb1a1 3767 return *slot;
b58b21d5 3768
5ed6ace5 3769 ptr = XNEW (struct ls_expr);
b58b21d5
RS
3770
3771 ptr->next = pre_ldst_mems;
3772 ptr->expr = NULL;
3773 ptr->pattern = x;
3774 ptr->pattern_regs = NULL_RTX;
3775 ptr->loads = NULL_RTX;
3776 ptr->stores = NULL_RTX;
3777 ptr->reaching_reg = NULL_RTX;
3778 ptr->invalid = 0;
3779 ptr->index = 0;
3780 ptr->hash_index = hash;
3781 pre_ldst_mems = ptr;
9727e468 3782 *slot = ptr;
589005ff 3783
a13d4ebf
AM
3784 return ptr;
3785}
3786
3787/* Free up an individual ldst entry. */
3788
589005ff 3789static void
1d088dee 3790free_ldst_entry (struct ls_expr * ptr)
a13d4ebf 3791{
aaa4ca30
AJ
3792 free_INSN_LIST_list (& ptr->loads);
3793 free_INSN_LIST_list (& ptr->stores);
a13d4ebf
AM
3794
3795 free (ptr);
3796}
3797
3798/* Free up all memory associated with the ldst list. */
3799
3800static void
43c8a043 3801free_ld_motion_mems (void)
a13d4ebf 3802{
c203e8a7
TS
3803 delete pre_ldst_table;
3804 pre_ldst_table = NULL;
9727e468 3805
589005ff 3806 while (pre_ldst_mems)
a13d4ebf
AM
3807 {
3808 struct ls_expr * tmp = pre_ldst_mems;
3809
3810 pre_ldst_mems = pre_ldst_mems->next;
3811
3812 free_ldst_entry (tmp);
3813 }
3814
3815 pre_ldst_mems = NULL;
3816}
3817
3818/* Dump debugging info about the ldst list. */
3819
3820static void
1d088dee 3821print_ldst_list (FILE * file)
a13d4ebf
AM
3822{
3823 struct ls_expr * ptr;
3824
3825 fprintf (file, "LDST list: \n");
3826
43c8a043 3827 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
a13d4ebf
AM
3828 {
3829 fprintf (file, " Pattern (%3d): ", ptr->index);
3830
3831 print_rtl (file, ptr->pattern);
3832
3833 fprintf (file, "\n Loads : ");
3834
3835 if (ptr->loads)
3836 print_rtl (file, ptr->loads);
3837 else
3838 fprintf (file, "(nil)");
3839
3840 fprintf (file, "\n Stores : ");
3841
3842 if (ptr->stores)
3843 print_rtl (file, ptr->stores);
3844 else
3845 fprintf (file, "(nil)");
3846
3847 fprintf (file, "\n\n");
3848 }
3849
3850 fprintf (file, "\n");
3851}
3852
3853/* Returns 1 if X is in the list of ldst only expressions. */
3854
3855static struct ls_expr *
1d088dee 3856find_rtx_in_ldst (rtx x)
a13d4ebf 3857{
9727e468 3858 struct ls_expr e;
4a8fb1a1 3859 ls_expr **slot;
c203e8a7 3860 if (!pre_ldst_table)
6375779a 3861 return NULL;
9727e468 3862 e.pattern = x;
c203e8a7 3863 slot = pre_ldst_table->find_slot (&e, NO_INSERT);
4a8fb1a1 3864 if (!slot || (*slot)->invalid)
9727e468 3865 return NULL;
4a8fb1a1 3866 return *slot;
a13d4ebf 3867}
a13d4ebf
AM
3868\f
3869/* Load Motion for loads which only kill themselves. */
3870
43c8a043
EB
3871/* Return true if x, a MEM, is a simple access with no side effects.
3872 These are the types of loads we consider for the ld_motion list,
3873 otherwise we let the usual aliasing take care of it. */
a13d4ebf 3874
589005ff 3875static int
ed7a4b4b 3876simple_mem (const_rtx x)
a13d4ebf 3877{
a13d4ebf
AM
3878 if (MEM_VOLATILE_P (x))
3879 return 0;
589005ff 3880
a13d4ebf
AM
3881 if (GET_MODE (x) == BLKmode)
3882 return 0;
aaa4ca30 3883
47a3dae1 3884 /* If we are handling exceptions, we must be careful with memory references
8f4f502f 3885 that may trap. If we are not, the behavior is undefined, so we may just
47a3dae1 3886 continue. */
8f4f502f 3887 if (cfun->can_throw_non_call_exceptions && may_trap_p (x))
98d3d336
RS
3888 return 0;
3889
47a3dae1
ZD
3890 if (side_effects_p (x))
3891 return 0;
589005ff 3892
47a3dae1
ZD
3893 /* Do not consider function arguments passed on stack. */
3894 if (reg_mentioned_p (stack_pointer_rtx, x))
3895 return 0;
3896
3897 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
3898 return 0;
3899
3900 return 1;
a13d4ebf
AM
3901}
3902
589005ff
KH
3903/* Make sure there isn't a buried reference in this pattern anywhere.
3904 If there is, invalidate the entry for it since we're not capable
3905 of fixing it up just yet.. We have to be sure we know about ALL
a13d4ebf
AM
3906 loads since the aliasing code will allow all entries in the
3907 ld_motion list to not-alias itself. If we miss a load, we will get
589005ff 3908 the wrong value since gcse might common it and we won't know to
a13d4ebf
AM
3909 fix it up. */
3910
3911static void
1d088dee 3912invalidate_any_buried_refs (rtx x)
a13d4ebf
AM
3913{
3914 const char * fmt;
8e42ace1 3915 int i, j;
a13d4ebf
AM
3916 struct ls_expr * ptr;
3917
3918 /* Invalidate it in the list. */
7b1b4aed 3919 if (MEM_P (x) && simple_mem (x))
a13d4ebf
AM
3920 {
3921 ptr = ldst_entry (x);
3922 ptr->invalid = 1;
3923 }
3924
3925 /* Recursively process the insn. */
3926 fmt = GET_RTX_FORMAT (GET_CODE (x));
589005ff 3927
a13d4ebf
AM
3928 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3929 {
3930 if (fmt[i] == 'e')
3931 invalidate_any_buried_refs (XEXP (x, i));
3932 else if (fmt[i] == 'E')
3933 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3934 invalidate_any_buried_refs (XVECEXP (x, i, j));
3935 }
3936}
3937
4d3eb89a
HPN
3938/* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
3939 being defined as MEM loads and stores to symbols, with no side effects
3940 and no registers in the expression. For a MEM destination, we also
3941 check that the insn is still valid if we replace the destination with a
3942 REG, as is done in update_ld_motion_stores. If there are any uses/defs
3943 which don't match this criteria, they are invalidated and trimmed out
3944 later. */
a13d4ebf 3945
589005ff 3946static void
1d088dee 3947compute_ld_motion_mems (void)
a13d4ebf
AM
3948{
3949 struct ls_expr * ptr;
e0082a72 3950 basic_block bb;
a13d4ebf 3951 rtx insn;
589005ff 3952
a13d4ebf 3953 pre_ldst_mems = NULL;
c203e8a7 3954 pre_ldst_table = new hash_table<pre_ldst_expr_hasher> (13);
a13d4ebf 3955
11cd3bed 3956 FOR_EACH_BB_FN (bb, cfun)
a13d4ebf 3957 {
eb232f4e 3958 FOR_BB_INSNS (bb, insn)
a13d4ebf 3959 {
b5b8b0ac 3960 if (NONDEBUG_INSN_P (insn))
a13d4ebf
AM
3961 {
3962 if (GET_CODE (PATTERN (insn)) == SET)
3963 {
3964 rtx src = SET_SRC (PATTERN (insn));
3965 rtx dest = SET_DEST (PATTERN (insn));
2df013f3
JB
3966 rtx note = find_reg_equal_equiv_note (insn);
3967 rtx src_eq;
a13d4ebf
AM
3968
3969 /* Check for a simple LOAD... */
7b1b4aed 3970 if (MEM_P (src) && simple_mem (src))
a13d4ebf
AM
3971 {
3972 ptr = ldst_entry (src);
7b1b4aed 3973 if (REG_P (dest))
a13d4ebf
AM
3974 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
3975 else
3976 ptr->invalid = 1;
3977 }
3978 else
3979 {
3980 /* Make sure there isn't a buried load somewhere. */
3981 invalidate_any_buried_refs (src);
3982 }
589005ff 3983
2df013f3
JB
3984 if (note != 0 && REG_NOTE_KIND (note) == REG_EQUAL)
3985 src_eq = XEXP (note, 0);
3986 else
3987 src_eq = NULL_RTX;
3988
3989 if (src_eq != NULL_RTX
3990 && !(MEM_P (src_eq) && simple_mem (src_eq)))
3991 invalidate_any_buried_refs (src_eq);
3992
a13d4ebf
AM
3993 /* Check for stores. Don't worry about aliased ones, they
3994 will block any movement we might do later. We only care
3995 about this exact pattern since those are the only
3996 circumstance that we will ignore the aliasing info. */
7b1b4aed 3997 if (MEM_P (dest) && simple_mem (dest))
a13d4ebf
AM
3998 {
3999 ptr = ldst_entry (dest);
589005ff 4000
7b1b4aed 4001 if (! MEM_P (src)
4d3eb89a
HPN
4002 && GET_CODE (src) != ASM_OPERANDS
4003 /* Check for REG manually since want_to_gcse_p
4004 returns 0 for all REGs. */
df35c271 4005 && can_assign_to_reg_without_clobbers_p (src))
a13d4ebf
AM
4006 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
4007 else
4008 ptr->invalid = 1;
4009 }
4010 }
4011 else
4012 invalidate_any_buried_refs (PATTERN (insn));
4013 }
4014 }
4015 }
4016}
4017
589005ff 4018/* Remove any references that have been either invalidated or are not in the
a13d4ebf
AM
4019 expression list for pre gcse. */
4020
4021static void
1d088dee 4022trim_ld_motion_mems (void)
a13d4ebf 4023{
b58b21d5
RS
4024 struct ls_expr * * last = & pre_ldst_mems;
4025 struct ls_expr * ptr = pre_ldst_mems;
a13d4ebf
AM
4026
4027 while (ptr != NULL)
4028 {
b58b21d5 4029 struct expr * expr;
589005ff 4030
a13d4ebf 4031 /* Delete if entry has been made invalid. */
b58b21d5 4032 if (! ptr->invalid)
a13d4ebf 4033 {
a13d4ebf 4034 /* Delete if we cannot find this mem in the expression list. */
b58b21d5 4035 unsigned int hash = ptr->hash_index % expr_hash_table.size;
589005ff 4036
b58b21d5
RS
4037 for (expr = expr_hash_table.table[hash];
4038 expr != NULL;
4039 expr = expr->next_same_hash)
4040 if (expr_equiv_p (expr->expr, ptr->pattern))
4041 break;
a13d4ebf
AM
4042 }
4043 else
b58b21d5
RS
4044 expr = (struct expr *) 0;
4045
4046 if (expr)
a13d4ebf
AM
4047 {
4048 /* Set the expression field if we are keeping it. */
a13d4ebf 4049 ptr->expr = expr;
b58b21d5 4050 last = & ptr->next;
a13d4ebf
AM
4051 ptr = ptr->next;
4052 }
b58b21d5
RS
4053 else
4054 {
4055 *last = ptr->next;
c203e8a7 4056 pre_ldst_table->remove_elt_with_hash (ptr, ptr->hash_index);
b58b21d5
RS
4057 free_ldst_entry (ptr);
4058 ptr = * last;
4059 }
a13d4ebf
AM
4060 }
4061
4062 /* Show the world what we've found. */
10d22567
ZD
4063 if (dump_file && pre_ldst_mems != NULL)
4064 print_ldst_list (dump_file);
a13d4ebf
AM
4065}
4066
4067/* This routine will take an expression which we are replacing with
4068 a reaching register, and update any stores that are needed if
4069 that expression is in the ld_motion list. Stores are updated by
a98ebe2e 4070 copying their SRC to the reaching register, and then storing
a13d4ebf
AM
4071 the reaching register into the store location. These keeps the
4072 correct value in the reaching register for the loads. */
4073
4074static void
1d088dee 4075update_ld_motion_stores (struct expr * expr)
a13d4ebf
AM
4076{
4077 struct ls_expr * mem_ptr;
4078
4079 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
4080 {
589005ff
KH
4081 /* We can try to find just the REACHED stores, but is shouldn't
4082 matter to set the reaching reg everywhere... some might be
a13d4ebf
AM
4083 dead and should be eliminated later. */
4084
4d3eb89a
HPN
4085 /* We replace (set mem expr) with (set reg expr) (set mem reg)
4086 where reg is the reaching reg used in the load. We checked in
4087 compute_ld_motion_mems that we can replace (set mem expr) with
4088 (set reg expr) in that insn. */
a13d4ebf 4089 rtx list = mem_ptr->stores;
589005ff 4090
a13d4ebf
AM
4091 for ( ; list != NULL_RTX; list = XEXP (list, 1))
4092 {
4093 rtx insn = XEXP (list, 0);
4094 rtx pat = PATTERN (insn);
4095 rtx src = SET_SRC (pat);
4096 rtx reg = expr->reaching_reg;
038dc49a 4097 rtx copy;
a13d4ebf
AM
4098
4099 /* If we've already copied it, continue. */
4100 if (expr->reaching_reg == src)
4101 continue;
589005ff 4102
10d22567 4103 if (dump_file)
a13d4ebf 4104 {
10d22567 4105 fprintf (dump_file, "PRE: store updated with reaching reg ");
43c8a043 4106 print_rtl (dump_file, reg);
10d22567
ZD
4107 fprintf (dump_file, ":\n ");
4108 print_inline_rtx (dump_file, insn, 8);
4109 fprintf (dump_file, "\n");
a13d4ebf 4110 }
589005ff 4111
4a81774c 4112 copy = gen_move_insn (reg, copy_rtx (SET_SRC (pat)));
038dc49a 4113 emit_insn_before (copy, insn);
a13d4ebf 4114 SET_SRC (pat) = reg;
6fb5fa3c 4115 df_insn_rescan (insn);
a13d4ebf
AM
4116
4117 /* un-recognize this pattern since it's probably different now. */
4118 INSN_CODE (insn) = -1;
4119 gcse_create_count++;
4120 }
4121 }
4122}
4123\f
df35c271
SB
4124/* Return true if the graph is too expensive to optimize. PASS is the
4125 optimization about to be performed. */
47a3dae1 4126
df35c271
SB
4127static bool
4128is_too_expensive (const char *pass)
4129{
4130 /* Trying to perform global optimizations on flow graphs which have
4131 a high connectivity will take a long time and is unlikely to be
4132 particularly useful.
aaa4ca30 4133
df35c271
SB
4134 In normal circumstances a cfg should have about twice as many
4135 edges as blocks. But we do not want to punish small functions
4136 which have a couple switch statements. Rather than simply
4137 threshold the number of blocks, uses something with a more
4138 graceful degradation. */
dc936fb2 4139 if (n_edges_for_fn (cfun) > 20000 + n_basic_blocks_for_fn (cfun) * 4)
df35c271
SB
4140 {
4141 warning (OPT_Wdisabled_optimization,
4142 "%s: %d basic blocks and %d edges/basic block",
0cae8d31 4143 pass, n_basic_blocks_for_fn (cfun),
dc936fb2 4144 n_edges_for_fn (cfun) / n_basic_blocks_for_fn (cfun));
a13d4ebf 4145
df35c271
SB
4146 return true;
4147 }
a13d4ebf 4148
e45425ec 4149 /* If allocating memory for the dataflow bitmaps would take up too much
df35c271 4150 storage it's better just to disable the optimization. */
0cae8d31 4151 if ((n_basic_blocks_for_fn (cfun)
df35c271
SB
4152 * SBITMAP_SET_SIZE (max_reg_num ())
4153 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
4154 {
4155 warning (OPT_Wdisabled_optimization,
4156 "%s: %d basic blocks and %d registers",
0cae8d31 4157 pass, n_basic_blocks_for_fn (cfun), max_reg_num ());
a13d4ebf 4158
df35c271
SB
4159 return true;
4160 }
adfcce61 4161
df35c271 4162 return false;
01c43039 4163}
df35c271 4164\f
df35c271
SB
4165static unsigned int
4166execute_rtl_pre (void)
4167{
f2b01cfb 4168 int changed;
df35c271 4169 delete_unreachable_blocks ();
df35c271 4170 df_analyze ();
f2b01cfb
RG
4171 changed = one_pre_gcse_pass ();
4172 flag_rerun_cse_after_global_opts |= changed;
4173 if (changed)
4174 cleanup_cfg (0);
df35c271
SB
4175 return 0;
4176}
aaa4ca30 4177
df35c271
SB
4178static unsigned int
4179execute_rtl_hoist (void)
4180{
f2b01cfb 4181 int changed;
df35c271 4182 delete_unreachable_blocks ();
df35c271 4183 df_analyze ();
f2b01cfb
RG
4184 changed = one_code_hoisting_pass ();
4185 flag_rerun_cse_after_global_opts |= changed;
4186 if (changed)
4187 cleanup_cfg (0);
df35c271
SB
4188 return 0;
4189}
ef330312 4190
27a4cd48
DM
4191namespace {
4192
4193const pass_data pass_data_rtl_pre =
ef330312 4194{
27a4cd48
DM
4195 RTL_PASS, /* type */
4196 "rtl pre", /* name */
4197 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
4198 true, /* has_execute */
4199 TV_PRE, /* tv_id */
4200 PROP_cfglayout, /* properties_required */
4201 0, /* properties_provided */
4202 0, /* properties_destroyed */
4203 0, /* todo_flags_start */
3bea341f 4204 TODO_df_finish, /* todo_flags_finish */
5f39ad47 4205};
ef330312 4206
27a4cd48
DM
4207class pass_rtl_pre : public rtl_opt_pass
4208{
4209public:
c3284718
RS
4210 pass_rtl_pre (gcc::context *ctxt)
4211 : rtl_opt_pass (pass_data_rtl_pre, ctxt)
27a4cd48
DM
4212 {}
4213
4214 /* opt_pass methods: */
1a3d085c 4215 virtual bool gate (function *);
be55bfe6 4216 virtual unsigned int execute (function *) { return execute_rtl_pre (); }
27a4cd48
DM
4217
4218}; // class pass_rtl_pre
4219
1a3d085c
TS
4220/* We do not construct an accurate cfg in functions which call
4221 setjmp, so none of these passes runs if the function calls
4222 setjmp.
4223 FIXME: Should just handle setjmp via REG_SETJMP notes. */
4224
4225bool
4226pass_rtl_pre::gate (function *fun)
4227{
4228 return optimize > 0 && flag_gcse
4229 && !fun->calls_setjmp
4230 && optimize_function_for_speed_p (fun)
4231 && dbg_cnt (pre);
4232}
4233
27a4cd48
DM
4234} // anon namespace
4235
4236rtl_opt_pass *
4237make_pass_rtl_pre (gcc::context *ctxt)
4238{
4239 return new pass_rtl_pre (ctxt);
4240}
4241
4242namespace {
4243
4244const pass_data pass_data_rtl_hoist =
ef330312 4245{
27a4cd48
DM
4246 RTL_PASS, /* type */
4247 "hoist", /* name */
4248 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
4249 true, /* has_execute */
4250 TV_HOIST, /* tv_id */
4251 PROP_cfglayout, /* properties_required */
4252 0, /* properties_provided */
4253 0, /* properties_destroyed */
4254 0, /* todo_flags_start */
3bea341f 4255 TODO_df_finish, /* todo_flags_finish */
ef330312
PB
4256};
4257
27a4cd48
DM
4258class pass_rtl_hoist : public rtl_opt_pass
4259{
4260public:
c3284718
RS
4261 pass_rtl_hoist (gcc::context *ctxt)
4262 : rtl_opt_pass (pass_data_rtl_hoist, ctxt)
27a4cd48
DM
4263 {}
4264
4265 /* opt_pass methods: */
1a3d085c 4266 virtual bool gate (function *);
be55bfe6 4267 virtual unsigned int execute (function *) { return execute_rtl_hoist (); }
27a4cd48
DM
4268
4269}; // class pass_rtl_hoist
4270
1a3d085c
TS
4271bool
4272pass_rtl_hoist::gate (function *)
4273{
4274 return optimize > 0 && flag_gcse
4275 && !cfun->calls_setjmp
4276 /* It does not make sense to run code hoisting unless we are optimizing
4277 for code size -- it rarely makes programs faster, and can make then
4278 bigger if we did PRE (when optimizing for space, we don't run PRE). */
4279 && optimize_function_for_size_p (cfun)
4280 && dbg_cnt (hoist);
4281}
4282
27a4cd48
DM
4283} // anon namespace
4284
4285rtl_opt_pass *
4286make_pass_rtl_hoist (gcc::context *ctxt)
4287{
4288 return new pass_rtl_hoist (ctxt);
4289}
4290
e2500fed 4291#include "gt-gcse.h"
This page took 5.203998 seconds and 5 git commands to generate.