]> gcc.gnu.org Git - gcc.git/blame - gcc/lcm.c
In gcc/testsuite/: 2010-10-20 Nicola Pero <nicola.pero@meta-innovation.com>
[gcc.git] / gcc / lcm.c
CommitLineData
f4e72d6e 1/* Generic partial redundancy elimination with lazy code motion support.
66647d44 2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008
0c20a65f 3 Free Software Foundation, Inc.
d2ecda27 4
1322177d 5This file is part of GCC.
d2ecda27 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9dcd6f09 9Software Foundation; either version 3, or (at your option) any later
1322177d 10version.
d2ecda27 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
d2ecda27
JL
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
d2ecda27
JL
20
21/* These routines are meant to be used by various optimization
b3bb6456 22 passes which can be modeled as lazy code motion problems.
d2ecda27
JL
23 Including, but not limited to:
24
25 * Traditional partial redundancy elimination.
26
27 * Placement of caller/caller register save/restores.
28
29 * Load/store motion.
30
31 * Copy motion.
32
33 * Conversion of flat register files to a stacked register
34 model.
35
36 * Dead load/store elimination.
37
38 These routines accept as input:
39
40 * Basic block information (number of blocks, lists of
41 predecessors and successors). Note the granularity
42 does not need to be basic block, they could be statements
43 or functions.
44
45 * Bitmaps of local properties (computed, transparent and
46 anticipatable expressions).
47
48 The output of these routines is bitmap of redundant computations
49 and a bitmap of optimal placement points. */
50
51
52#include "config.h"
53#include "system.h"
4977bab6
ZW
54#include "coretypes.h"
55#include "tm.h"
d2ecda27
JL
56#include "rtl.h"
57#include "regs.h"
58#include "hard-reg-set.h"
59#include "flags.h"
d2ecda27
JL
60#include "insn-config.h"
61#include "recog.h"
62#include "basic-block.h"
81b40b72 63#include "output.h"
9f09b1f2 64#include "tm_p.h"
fa1a0d02 65#include "function.h"
7a8cba34 66#include "sbitmap.h"
f4e72d6e 67
9f09b1f2
R
68/* We want target macros for the mode switching code to be able to refer
69 to instruction attribute values. */
70#include "insn-attr.h"
d2ecda27 71
a42cd965 72/* Edge based LCM routines. */
0c20a65f
AJ
73static void compute_antinout_edge (sbitmap *, sbitmap *, sbitmap *, sbitmap *);
74static void compute_earliest (struct edge_list *, int, sbitmap *, sbitmap *,
75 sbitmap *, sbitmap *, sbitmap *);
76static void compute_laterin (struct edge_list *, sbitmap *, sbitmap *,
77 sbitmap *, sbitmap *);
78static void compute_insert_delete (struct edge_list *edge_list, sbitmap *,
79 sbitmap *, sbitmap *, sbitmap *, sbitmap *);
a42cd965
AM
80
81/* Edge based LCM routines on a reverse flowgraph. */
0c20a65f
AJ
82static void compute_farthest (struct edge_list *, int, sbitmap *, sbitmap *,
83 sbitmap*, sbitmap *, sbitmap *);
84static void compute_nearerout (struct edge_list *, sbitmap *, sbitmap *,
85 sbitmap *, sbitmap *);
86static void compute_rev_insert_delete (struct edge_list *edge_list, sbitmap *,
87 sbitmap *, sbitmap *, sbitmap *,
88 sbitmap *);
a42cd965
AM
89\f
90/* Edge based lcm routines. */
9ca88d5a 91
b3bb6456
AJ
92/* Compute expression anticipatability at entrance and exit of each block.
93 This is done based on the flow graph, and not on the pred-succ lists.
a42cd965 94 Other than that, its pretty much identical to compute_antinout. */
d2ecda27
JL
95
96static void
0c20a65f
AJ
97compute_antinout_edge (sbitmap *antloc, sbitmap *transp, sbitmap *antin,
98 sbitmap *antout)
d2ecda27 99{
e0082a72 100 basic_block bb;
a42cd965 101 edge e;
274969ea
MM
102 basic_block *worklist, *qin, *qout, *qend;
103 unsigned int qlen;
628f6a4e 104 edge_iterator ei;
9ca88d5a 105
bd0eaec2
JL
106 /* Allocate a worklist array/queue. Entries are only added to the
107 list if they were not already on the list. So the size is
108 bounded by the number of basic blocks. */
5ed6ace5 109 qin = qout = worklist = XNEWVEC (basic_block, n_basic_blocks);
d2ecda27 110
bd0eaec2
JL
111 /* We want a maximal solution, so make an optimistic initialization of
112 ANTIN. */
d55bc081 113 sbitmap_vector_ones (antin, last_basic_block);
d2ecda27 114
ce724250
JL
115 /* Put every block on the worklist; this is necessary because of the
116 optimistic initialization of ANTIN above. */
e0082a72 117 FOR_EACH_BB_REVERSE (bb)
d2ecda27 118 {
6a87d634 119 *qin++ = bb;
e0082a72 120 bb->aux = bb;
bd0eaec2 121 }
b3bb6456 122
274969ea 123 qin = worklist;
24bd1a0b
DB
124 qend = &worklist[n_basic_blocks - NUM_FIXED_BLOCKS];
125 qlen = n_basic_blocks - NUM_FIXED_BLOCKS;
d2ecda27 126
ce724250
JL
127 /* Mark blocks which are predecessors of the exit block so that we
128 can easily identify them below. */
628f6a4e 129 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
ce724250
JL
130 e->src->aux = EXIT_BLOCK_PTR;
131
bd0eaec2 132 /* Iterate until the worklist is empty. */
274969ea 133 while (qlen)
bd0eaec2
JL
134 {
135 /* Take the first entry off the worklist. */
e0082a72 136 bb = *qout++;
274969ea 137 qlen--;
9ca88d5a 138
274969ea 139 if (qout >= qend)
e11e816e 140 qout = worklist;
d2ecda27 141
e0082a72 142 if (bb->aux == EXIT_BLOCK_PTR)
f4e72d6e
RK
143 /* Do not clear the aux field for blocks which are predecessors of
144 the EXIT block. That way we never add then to the worklist
145 again. */
e0082a72 146 sbitmap_zero (antout[bb->index]);
bd0eaec2
JL
147 else
148 {
149 /* Clear the aux field of this block so that it can be added to
150 the worklist again if necessary. */
e0082a72
ZD
151 bb->aux = NULL;
152 sbitmap_intersection_of_succs (antout[bb->index], antin, bb->index);
bd0eaec2 153 }
a42cd965 154
e0082a72
ZD
155 if (sbitmap_a_or_b_and_c_cg (antin[bb->index], antloc[bb->index],
156 transp[bb->index], antout[bb->index]))
f4e72d6e
RK
157 /* If the in state of this block changed, then we need
158 to add the predecessors of this block to the worklist
159 if they are not already on the worklist. */
628f6a4e 160 FOR_EACH_EDGE (e, ei, bb->preds)
f4e72d6e 161 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
d2ecda27 162 {
274969ea 163 *qin++ = e->src;
f4e72d6e 164 e->src->aux = e;
274969ea
MM
165 qlen++;
166 if (qin >= qend)
e11e816e 167 qin = worklist;
d2ecda27 168 }
d2ecda27 169 }
f4e72d6e 170
108c1afc
RH
171 clear_aux_for_edges ();
172 clear_aux_for_blocks ();
274969ea 173 free (worklist);
d2ecda27
JL
174}
175
a42cd965 176/* Compute the earliest vector for edge based lcm. */
f4e72d6e 177
d2ecda27 178static void
0c20a65f
AJ
179compute_earliest (struct edge_list *edge_list, int n_exprs, sbitmap *antin,
180 sbitmap *antout, sbitmap *avout, sbitmap *kill,
181 sbitmap *earliest)
d2ecda27 182{
a42cd965 183 sbitmap difference, temp_bitmap;
b3bb6456 184 int x, num_edges;
a42cd965 185 basic_block pred, succ;
d2ecda27 186
a42cd965 187 num_edges = NUM_EDGES (edge_list);
d2ecda27 188
a42cd965
AM
189 difference = sbitmap_alloc (n_exprs);
190 temp_bitmap = sbitmap_alloc (n_exprs);
d2ecda27 191
a42cd965 192 for (x = 0; x < num_edges; x++)
d2ecda27 193 {
a42cd965
AM
194 pred = INDEX_EDGE_PRED_BB (edge_list, x);
195 succ = INDEX_EDGE_SUCC_BB (edge_list, x);
196 if (pred == ENTRY_BLOCK_PTR)
0b17ab2f 197 sbitmap_copy (earliest[x], antin[succ->index]);
a42cd965 198 else
e11e816e 199 {
81b40b72 200 if (succ == EXIT_BLOCK_PTR)
f4e72d6e 201 sbitmap_zero (earliest[x]);
a42cd965 202 else
d2ecda27 203 {
0b17ab2f
RH
204 sbitmap_difference (difference, antin[succ->index],
205 avout[pred->index]);
206 sbitmap_not (temp_bitmap, antout[pred->index]);
f4e72d6e 207 sbitmap_a_and_b_or_c (earliest[x], difference,
0b17ab2f 208 kill[pred->index], temp_bitmap);
d2ecda27
JL
209 }
210 }
d2ecda27 211 }
f4e72d6e 212
76ac938b
MH
213 sbitmap_free (temp_bitmap);
214 sbitmap_free (difference);
d2ecda27
JL
215}
216
bd0eaec2
JL
217/* later(p,s) is dependent on the calculation of laterin(p).
218 laterin(p) is dependent on the calculation of later(p2,p).
219
220 laterin(ENTRY) is defined as all 0's
221 later(ENTRY, succs(ENTRY)) are defined using laterin(ENTRY)
222 laterin(succs(ENTRY)) is defined by later(ENTRY, succs(ENTRY)).
223
224 If we progress in this manner, starting with all basic blocks
225 in the work list, anytime we change later(bb), we need to add
226 succs(bb) to the worklist if they are not already on the worklist.
227
228 Boundary conditions:
229
230 We prime the worklist all the normal basic blocks. The ENTRY block can
231 never be added to the worklist since it is never the successor of any
232 block. We explicitly prevent the EXIT block from being added to the
233 worklist.
234
235 We optimistically initialize LATER. That is the only time this routine
236 will compute LATER for an edge out of the entry block since the entry
237 block is never on the worklist. Thus, LATERIN is neither used nor
238 computed for the ENTRY block.
239
240 Since the EXIT block is never added to the worklist, we will neither
241 use nor compute LATERIN for the exit block. Edges which reach the
242 EXIT block are handled in the normal fashion inside the loop. However,
243 the insertion/deletion computation needs LATERIN(EXIT), so we have
244 to compute it. */
b3bb6456 245
d2ecda27 246static void
0c20a65f
AJ
247compute_laterin (struct edge_list *edge_list, sbitmap *earliest,
248 sbitmap *antloc, sbitmap *later, sbitmap *laterin)
d2ecda27 249{
e0082a72 250 int num_edges, i;
bd0eaec2 251 edge e;
e0082a72 252 basic_block *worklist, *qin, *qout, *qend, bb;
274969ea 253 unsigned int qlen;
628f6a4e 254 edge_iterator ei;
d2ecda27 255
a42cd965 256 num_edges = NUM_EDGES (edge_list);
d2ecda27 257
bd0eaec2
JL
258 /* Allocate a worklist array/queue. Entries are only added to the
259 list if they were not already on the list. So the size is
260 bounded by the number of basic blocks. */
274969ea 261 qin = qout = worklist
5ed6ace5 262 = XNEWVEC (basic_block, n_basic_blocks);
bd0eaec2
JL
263
264 /* Initialize a mapping from each edge to its index. */
265 for (i = 0; i < num_edges; i++)
63408827 266 INDEX_EDGE (edge_list, i)->aux = (void *) (size_t) i;
bd0eaec2
JL
267
268 /* We want a maximal solution, so initially consider LATER true for
269 all edges. This allows propagation through a loop since the incoming
270 loop edge will have LATER set, so if all the other incoming edges
271 to the loop are set, then LATERIN will be set for the head of the
272 loop.
273
274 If the optimistic setting of LATER on that edge was incorrect (for
275 example the expression is ANTLOC in a block within the loop) then
276 this algorithm will detect it when we process the block at the head
277 of the optimistic edge. That will requeue the affected blocks. */
278 sbitmap_vector_ones (later, num_edges);
279
89e606c9
JL
280 /* Note that even though we want an optimistic setting of LATER, we
281 do not want to be overly optimistic. Consider an outgoing edge from
282 the entry block. That edge should always have a LATER value the
283 same as EARLIEST for that edge. */
628f6a4e 284 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
f4e72d6e 285 sbitmap_copy (later[(size_t) e->aux], earliest[(size_t) e->aux]);
89e606c9 286
bd0eaec2
JL
287 /* Add all the blocks to the worklist. This prevents an early exit from
288 the loop given our optimistic initialization of LATER above. */
e0082a72 289 FOR_EACH_BB (bb)
d2ecda27 290 {
e0082a72
ZD
291 *qin++ = bb;
292 bb->aux = bb;
a42cd965 293 }
d70bb61f 294
274969ea 295 /* Note that we do not use the last allocated element for our queue,
24bd1a0b 296 as EXIT_BLOCK is never inserted into it. */
d70bb61f 297 qin = worklist;
24bd1a0b
DB
298 qend = &worklist[n_basic_blocks - NUM_FIXED_BLOCKS];
299 qlen = n_basic_blocks - NUM_FIXED_BLOCKS;
a42cd965 300
bd0eaec2 301 /* Iterate until the worklist is empty. */
274969ea 302 while (qlen)
a42cd965 303 {
bd0eaec2 304 /* Take the first entry off the worklist. */
e0082a72
ZD
305 bb = *qout++;
306 bb->aux = NULL;
274969ea
MM
307 qlen--;
308 if (qout >= qend)
e11e816e 309 qout = worklist;
bd0eaec2
JL
310
311 /* Compute the intersection of LATERIN for each incoming edge to B. */
e0082a72 312 sbitmap_ones (laterin[bb->index]);
628f6a4e 313 FOR_EACH_EDGE (e, ei, bb->preds)
d70bb61f
RK
314 sbitmap_a_and_b (laterin[bb->index], laterin[bb->index],
315 later[(size_t)e->aux]);
bd0eaec2
JL
316
317 /* Calculate LATER for all outgoing edges. */
628f6a4e 318 FOR_EACH_EDGE (e, ei, bb->succs)
b47374fa 319 if (sbitmap_union_of_diff_cg (later[(size_t) e->aux],
0b17ab2f
RH
320 earliest[(size_t) e->aux],
321 laterin[e->src->index],
322 antloc[e->src->index])
f4e72d6e
RK
323 /* If LATER for an outgoing edge was changed, then we need
324 to add the target of the outgoing edge to the worklist. */
325 && e->dest != EXIT_BLOCK_PTR && e->dest->aux == 0)
326 {
274969ea 327 *qin++ = e->dest;
f4e72d6e 328 e->dest->aux = e;
274969ea
MM
329 qlen++;
330 if (qin >= qend)
331 qin = worklist;
f4e72d6e 332 }
d2ecda27
JL
333 }
334
bd0eaec2
JL
335 /* Computation of insertion and deletion points requires computing LATERIN
336 for the EXIT block. We allocated an extra entry in the LATERIN array
337 for just this purpose. */
d55bc081 338 sbitmap_ones (laterin[last_basic_block]);
628f6a4e 339 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
d55bc081
ZD
340 sbitmap_a_and_b (laterin[last_basic_block],
341 laterin[last_basic_block],
63408827 342 later[(size_t) e->aux]);
bd0eaec2 343
108c1afc 344 clear_aux_for_edges ();
274969ea 345 free (worklist);
d2ecda27
JL
346}
347
a42cd965 348/* Compute the insertion and deletion points for edge based LCM. */
f4e72d6e 349
a42cd965 350static void
0c20a65f
AJ
351compute_insert_delete (struct edge_list *edge_list, sbitmap *antloc,
352 sbitmap *later, sbitmap *laterin, sbitmap *insert,
60564289 353 sbitmap *del)
a42cd965
AM
354{
355 int x;
e0082a72 356 basic_block bb;
d2ecda27 357
e0082a72 358 FOR_EACH_BB (bb)
60564289 359 sbitmap_difference (del[bb->index], antloc[bb->index],
d70bb61f 360 laterin[bb->index]);
b3bb6456 361
a42cd965
AM
362 for (x = 0; x < NUM_EDGES (edge_list); x++)
363 {
364 basic_block b = INDEX_EDGE_SUCC_BB (edge_list, x);
f4e72d6e 365
a42cd965 366 if (b == EXIT_BLOCK_PTR)
d55bc081 367 sbitmap_difference (insert[x], later[x], laterin[last_basic_block]);
a42cd965 368 else
0b17ab2f 369 sbitmap_difference (insert[x], later[x], laterin[b->index]);
a42cd965
AM
370 }
371}
d2ecda27 372
f4e72d6e
RK
373/* Given local properties TRANSP, ANTLOC, AVOUT, KILL return the insert and
374 delete vectors for edge based LCM. Returns an edgelist which is used to
375 map the insert vector to what edge an expression should be inserted on. */
d2ecda27 376
a42cd965 377struct edge_list *
10d22567 378pre_edge_lcm (int n_exprs, sbitmap *transp,
0c20a65f 379 sbitmap *avloc, sbitmap *antloc, sbitmap *kill,
60564289 380 sbitmap **insert, sbitmap **del)
d2ecda27 381{
a42cd965
AM
382 sbitmap *antin, *antout, *earliest;
383 sbitmap *avin, *avout;
384 sbitmap *later, *laterin;
385 struct edge_list *edge_list;
386 int num_edges;
d2ecda27 387
a42cd965
AM
388 edge_list = create_edge_list ();
389 num_edges = NUM_EDGES (edge_list);
d2ecda27 390
a42cd965 391#ifdef LCM_DEBUG_INFO
10d22567 392 if (dump_file)
d2ecda27 393 {
10d22567
ZD
394 fprintf (dump_file, "Edge List:\n");
395 verify_edge_list (dump_file, edge_list);
396 print_edge_list (dump_file, edge_list);
397 dump_sbitmap_vector (dump_file, "transp", "", transp, last_basic_block);
398 dump_sbitmap_vector (dump_file, "antloc", "", antloc, last_basic_block);
399 dump_sbitmap_vector (dump_file, "avloc", "", avloc, last_basic_block);
400 dump_sbitmap_vector (dump_file, "kill", "", kill, last_basic_block);
d2ecda27 401 }
a42cd965 402#endif
d2ecda27 403
a42cd965 404 /* Compute global availability. */
d55bc081
ZD
405 avin = sbitmap_vector_alloc (last_basic_block, n_exprs);
406 avout = sbitmap_vector_alloc (last_basic_block, n_exprs);
a42cd965 407 compute_available (avloc, kill, avout, avin);
5a660bff 408 sbitmap_vector_free (avin);
d2ecda27 409
a42cd965 410 /* Compute global anticipatability. */
d55bc081
ZD
411 antin = sbitmap_vector_alloc (last_basic_block, n_exprs);
412 antout = sbitmap_vector_alloc (last_basic_block, n_exprs);
a42cd965 413 compute_antinout_edge (antloc, transp, antin, antout);
d2ecda27 414
a42cd965 415#ifdef LCM_DEBUG_INFO
10d22567 416 if (dump_file)
d2ecda27 417 {
10d22567
ZD
418 dump_sbitmap_vector (dump_file, "antin", "", antin, last_basic_block);
419 dump_sbitmap_vector (dump_file, "antout", "", antout, last_basic_block);
d2ecda27 420 }
a42cd965 421#endif
d2ecda27 422
a42cd965
AM
423 /* Compute earliestness. */
424 earliest = sbitmap_vector_alloc (num_edges, n_exprs);
425 compute_earliest (edge_list, n_exprs, antin, antout, avout, kill, earliest);
d2ecda27 426
a42cd965 427#ifdef LCM_DEBUG_INFO
10d22567
ZD
428 if (dump_file)
429 dump_sbitmap_vector (dump_file, "earliest", "", earliest, num_edges);
a42cd965 430#endif
d2ecda27 431
5a660bff
DB
432 sbitmap_vector_free (antout);
433 sbitmap_vector_free (antin);
434 sbitmap_vector_free (avout);
d2ecda27 435
a42cd965 436 later = sbitmap_vector_alloc (num_edges, n_exprs);
f4e72d6e 437
a42cd965 438 /* Allocate an extra element for the exit block in the laterin vector. */
d55bc081 439 laterin = sbitmap_vector_alloc (last_basic_block + 1, n_exprs);
bd0eaec2
JL
440 compute_laterin (edge_list, earliest, antloc, later, laterin);
441
a42cd965 442#ifdef LCM_DEBUG_INFO
10d22567 443 if (dump_file)
a42cd965 444 {
10d22567
ZD
445 dump_sbitmap_vector (dump_file, "laterin", "", laterin, last_basic_block + 1);
446 dump_sbitmap_vector (dump_file, "later", "", later, num_edges);
a42cd965
AM
447 }
448#endif
d2ecda27 449
5a660bff 450 sbitmap_vector_free (earliest);
a42cd965
AM
451
452 *insert = sbitmap_vector_alloc (num_edges, n_exprs);
60564289
KG
453 *del = sbitmap_vector_alloc (last_basic_block, n_exprs);
454 compute_insert_delete (edge_list, antloc, later, laterin, *insert, *del);
d2ecda27 455
5a660bff
DB
456 sbitmap_vector_free (laterin);
457 sbitmap_vector_free (later);
a42cd965
AM
458
459#ifdef LCM_DEBUG_INFO
10d22567 460 if (dump_file)
d2ecda27 461 {
10d22567 462 dump_sbitmap_vector (dump_file, "pre_insert_map", "", *insert, num_edges);
60564289 463 dump_sbitmap_vector (dump_file, "pre_delete_map", "", *del,
d55bc081 464 last_basic_block);
d2ecda27 465 }
a42cd965 466#endif
d2ecda27 467
a42cd965
AM
468 return edge_list;
469}
9ca88d5a
DB
470
471/* Compute the AVIN and AVOUT vectors from the AVLOC and KILL vectors.
472 Return the number of passes we performed to iterate to a solution. */
473
bd0eaec2 474void
0c20a65f
AJ
475compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout,
476 sbitmap *avin)
d2ecda27 477{
9ca88d5a 478 edge e;
e0082a72 479 basic_block *worklist, *qin, *qout, *qend, bb;
9ca88d5a 480 unsigned int qlen;
628f6a4e 481 edge_iterator ei;
9ca88d5a
DB
482
483 /* Allocate a worklist array/queue. Entries are only added to the
484 list if they were not already on the list. So the size is
485 bounded by the number of basic blocks. */
b8698a0f 486 qin = qout = worklist =
5ed6ace5 487 XNEWVEC (basic_block, n_basic_blocks - NUM_FIXED_BLOCKS);
9ca88d5a
DB
488
489 /* We want a maximal solution. */
d55bc081 490 sbitmap_vector_ones (avout, last_basic_block);
9ca88d5a
DB
491
492 /* Put every block on the worklist; this is necessary because of the
493 optimistic initialization of AVOUT above. */
e0082a72 494 FOR_EACH_BB (bb)
9ca88d5a 495 {
e0082a72
ZD
496 *qin++ = bb;
497 bb->aux = bb;
9ca88d5a
DB
498 }
499
500 qin = worklist;
24bd1a0b
DB
501 qend = &worklist[n_basic_blocks - NUM_FIXED_BLOCKS];
502 qlen = n_basic_blocks - NUM_FIXED_BLOCKS;
9ca88d5a
DB
503
504 /* Mark blocks which are successors of the entry block so that we
505 can easily identify them below. */
628f6a4e 506 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
9ca88d5a
DB
507 e->dest->aux = ENTRY_BLOCK_PTR;
508
509 /* Iterate until the worklist is empty. */
510 while (qlen)
511 {
512 /* Take the first entry off the worklist. */
e0082a72 513 bb = *qout++;
9ca88d5a
DB
514 qlen--;
515
516 if (qout >= qend)
e11e816e 517 qout = worklist;
9ca88d5a
DB
518
519 /* If one of the predecessor blocks is the ENTRY block, then the
520 intersection of avouts is the null set. We can identify such blocks
521 by the special value in the AUX field in the block structure. */
e0082a72 522 if (bb->aux == ENTRY_BLOCK_PTR)
9ca88d5a
DB
523 /* Do not clear the aux field for blocks which are successors of the
524 ENTRY block. That way we never add then to the worklist again. */
e0082a72 525 sbitmap_zero (avin[bb->index]);
9ca88d5a
DB
526 else
527 {
528 /* Clear the aux field of this block so that it can be added to
529 the worklist again if necessary. */
e0082a72
ZD
530 bb->aux = NULL;
531 sbitmap_intersection_of_preds (avin[bb->index], avout, bb->index);
9ca88d5a
DB
532 }
533
d70bb61f
RK
534 if (sbitmap_union_of_diff_cg (avout[bb->index], avloc[bb->index],
535 avin[bb->index], kill[bb->index]))
9ca88d5a
DB
536 /* If the out state of this block changed, then we need
537 to add the successors of this block to the worklist
538 if they are not already on the worklist. */
628f6a4e 539 FOR_EACH_EDGE (e, ei, bb->succs)
9ca88d5a
DB
540 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
541 {
542 *qin++ = e->dest;
543 e->dest->aux = e;
544 qlen++;
545
546 if (qin >= qend)
e11e816e 547 qin = worklist;
9ca88d5a
DB
548 }
549 }
550
551 clear_aux_for_edges ();
552 clear_aux_for_blocks ();
553 free (worklist);
d2ecda27
JL
554}
555
a42cd965 556/* Compute the farthest vector for edge based lcm. */
f4e72d6e 557
d2ecda27 558static void
0c20a65f
AJ
559compute_farthest (struct edge_list *edge_list, int n_exprs,
560 sbitmap *st_avout, sbitmap *st_avin, sbitmap *st_antin,
561 sbitmap *kill, sbitmap *farthest)
d2ecda27 562{
a42cd965 563 sbitmap difference, temp_bitmap;
b3bb6456 564 int x, num_edges;
a42cd965 565 basic_block pred, succ;
d2ecda27 566
a42cd965 567 num_edges = NUM_EDGES (edge_list);
d2ecda27 568
a42cd965
AM
569 difference = sbitmap_alloc (n_exprs);
570 temp_bitmap = sbitmap_alloc (n_exprs);
d2ecda27 571
a42cd965 572 for (x = 0; x < num_edges; x++)
d2ecda27 573 {
a42cd965
AM
574 pred = INDEX_EDGE_PRED_BB (edge_list, x);
575 succ = INDEX_EDGE_SUCC_BB (edge_list, x);
576 if (succ == EXIT_BLOCK_PTR)
0b17ab2f 577 sbitmap_copy (farthest[x], st_avout[pred->index]);
a42cd965 578 else
d2ecda27 579 {
a42cd965 580 if (pred == ENTRY_BLOCK_PTR)
f4e72d6e 581 sbitmap_zero (farthest[x]);
a42cd965
AM
582 else
583 {
0b17ab2f
RH
584 sbitmap_difference (difference, st_avout[pred->index],
585 st_antin[succ->index]);
586 sbitmap_not (temp_bitmap, st_avin[succ->index]);
b3bb6456 587 sbitmap_a_and_b_or_c (farthest[x], difference,
0b17ab2f 588 kill[succ->index], temp_bitmap);
a42cd965 589 }
d2ecda27 590 }
d2ecda27 591 }
f4e72d6e 592
76ac938b
MH
593 sbitmap_free (temp_bitmap);
594 sbitmap_free (difference);
d2ecda27
JL
595}
596
bd0eaec2
JL
597/* Compute nearer and nearerout vectors for edge based lcm.
598
599 This is the mirror of compute_laterin, additional comments on the
600 implementation can be found before compute_laterin. */
601
d2ecda27 602static void
0c20a65f
AJ
603compute_nearerout (struct edge_list *edge_list, sbitmap *farthest,
604 sbitmap *st_avloc, sbitmap *nearer, sbitmap *nearerout)
d2ecda27 605{
e0082a72 606 int num_edges, i;
bd0eaec2 607 edge e;
e0082a72 608 basic_block *worklist, *tos, bb;
628f6a4e 609 edge_iterator ei;
d2ecda27 610
a42cd965 611 num_edges = NUM_EDGES (edge_list);
d2ecda27 612
bd0eaec2
JL
613 /* Allocate a worklist array/queue. Entries are only added to the
614 list if they were not already on the list. So the size is
615 bounded by the number of basic blocks. */
5ed6ace5 616 tos = worklist = XNEWVEC (basic_block, n_basic_blocks + 1);
d2ecda27 617
bd0eaec2
JL
618 /* Initialize NEARER for each edge and build a mapping from an edge to
619 its index. */
620 for (i = 0; i < num_edges; i++)
63408827 621 INDEX_EDGE (edge_list, i)->aux = (void *) (size_t) i;
a42cd965 622
bd0eaec2
JL
623 /* We want a maximal solution. */
624 sbitmap_vector_ones (nearer, num_edges);
625
89e606c9
JL
626 /* Note that even though we want an optimistic setting of NEARER, we
627 do not want to be overly optimistic. Consider an incoming edge to
628 the exit block. That edge should always have a NEARER value the
629 same as FARTHEST for that edge. */
628f6a4e 630 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
e5b7ca32 631 sbitmap_copy (nearer[(size_t)e->aux], farthest[(size_t)e->aux]);
89e606c9 632
bd0eaec2
JL
633 /* Add all the blocks to the worklist. This prevents an early exit
634 from the loop given our optimistic initialization of NEARER. */
e0082a72 635 FOR_EACH_BB (bb)
d2ecda27 636 {
e0082a72
ZD
637 *tos++ = bb;
638 bb->aux = bb;
a42cd965 639 }
b3bb6456 640
bd0eaec2
JL
641 /* Iterate until the worklist is empty. */
642 while (tos != worklist)
a42cd965 643 {
bd0eaec2 644 /* Take the first entry off the worklist. */
e0082a72
ZD
645 bb = *--tos;
646 bb->aux = NULL;
bd0eaec2
JL
647
648 /* Compute the intersection of NEARER for each outgoing edge from B. */
e0082a72 649 sbitmap_ones (nearerout[bb->index]);
628f6a4e 650 FOR_EACH_EDGE (e, ei, bb->succs)
e0082a72 651 sbitmap_a_and_b (nearerout[bb->index], nearerout[bb->index],
63408827 652 nearer[(size_t) e->aux]);
bd0eaec2
JL
653
654 /* Calculate NEARER for all incoming edges. */
628f6a4e 655 FOR_EACH_EDGE (e, ei, bb->preds)
b47374fa 656 if (sbitmap_union_of_diff_cg (nearer[(size_t) e->aux],
0b17ab2f
RH
657 farthest[(size_t) e->aux],
658 nearerout[e->dest->index],
659 st_avloc[e->dest->index])
f4e72d6e
RK
660 /* If NEARER for an incoming edge was changed, then we need
661 to add the source of the incoming edge to the worklist. */
662 && e->src != ENTRY_BLOCK_PTR && e->src->aux == 0)
663 {
664 *tos++ = e->src;
665 e->src->aux = e;
666 }
a42cd965 667 }
d2ecda27 668
bd0eaec2
JL
669 /* Computation of insertion and deletion points requires computing NEAREROUT
670 for the ENTRY block. We allocated an extra entry in the NEAREROUT array
671 for just this purpose. */
d55bc081 672 sbitmap_ones (nearerout[last_basic_block]);
628f6a4e 673 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
d55bc081
ZD
674 sbitmap_a_and_b (nearerout[last_basic_block],
675 nearerout[last_basic_block],
63408827 676 nearer[(size_t) e->aux]);
bd0eaec2 677
108c1afc 678 clear_aux_for_edges ();
bd0eaec2 679 free (tos);
a42cd965 680}
d2ecda27 681
a42cd965 682/* Compute the insertion and deletion points for edge based LCM. */
f4e72d6e 683
d2ecda27 684static void
0c20a65f
AJ
685compute_rev_insert_delete (struct edge_list *edge_list, sbitmap *st_avloc,
686 sbitmap *nearer, sbitmap *nearerout,
60564289 687 sbitmap *insert, sbitmap *del)
d2ecda27 688{
a42cd965 689 int x;
e0082a72 690 basic_block bb;
d2ecda27 691
e0082a72 692 FOR_EACH_BB (bb)
60564289 693 sbitmap_difference (del[bb->index], st_avloc[bb->index],
d70bb61f 694 nearerout[bb->index]);
b3bb6456 695
a42cd965 696 for (x = 0; x < NUM_EDGES (edge_list); x++)
d2ecda27 697 {
a42cd965
AM
698 basic_block b = INDEX_EDGE_PRED_BB (edge_list, x);
699 if (b == ENTRY_BLOCK_PTR)
d55bc081 700 sbitmap_difference (insert[x], nearer[x], nearerout[last_basic_block]);
d2ecda27 701 else
0b17ab2f 702 sbitmap_difference (insert[x], nearer[x], nearerout[b->index]);
d2ecda27 703 }
d2ecda27
JL
704}
705
b3bb6456 706/* Given local properties TRANSP, ST_AVLOC, ST_ANTLOC, KILL return the
a42cd965
AM
707 insert and delete vectors for edge based reverse LCM. Returns an
708 edgelist which is used to map the insert vector to what edge
709 an expression should be inserted on. */
d2ecda27 710
a42cd965 711struct edge_list *
10d22567 712pre_edge_rev_lcm (int n_exprs, sbitmap *transp,
0c20a65f 713 sbitmap *st_avloc, sbitmap *st_antloc, sbitmap *kill,
60564289 714 sbitmap **insert, sbitmap **del)
d2ecda27 715{
a42cd965
AM
716 sbitmap *st_antin, *st_antout;
717 sbitmap *st_avout, *st_avin, *farthest;
718 sbitmap *nearer, *nearerout;
719 struct edge_list *edge_list;
4b66e1c0 720 int num_edges;
a42cd965
AM
721
722 edge_list = create_edge_list ();
723 num_edges = NUM_EDGES (edge_list);
724
703ad42b
KG
725 st_antin = sbitmap_vector_alloc (last_basic_block, n_exprs);
726 st_antout = sbitmap_vector_alloc (last_basic_block, n_exprs);
d55bc081
ZD
727 sbitmap_vector_zero (st_antin, last_basic_block);
728 sbitmap_vector_zero (st_antout, last_basic_block);
a42cd965
AM
729 compute_antinout_edge (st_antloc, transp, st_antin, st_antout);
730
731 /* Compute global anticipatability. */
d55bc081
ZD
732 st_avout = sbitmap_vector_alloc (last_basic_block, n_exprs);
733 st_avin = sbitmap_vector_alloc (last_basic_block, n_exprs);
a42cd965
AM
734 compute_available (st_avloc, kill, st_avout, st_avin);
735
736#ifdef LCM_DEBUG_INFO
10d22567 737 if (dump_file)
a42cd965 738 {
10d22567
ZD
739 fprintf (dump_file, "Edge List:\n");
740 verify_edge_list (dump_file, edge_list);
741 print_edge_list (dump_file, edge_list);
742 dump_sbitmap_vector (dump_file, "transp", "", transp, last_basic_block);
743 dump_sbitmap_vector (dump_file, "st_avloc", "", st_avloc, last_basic_block);
744 dump_sbitmap_vector (dump_file, "st_antloc", "", st_antloc, last_basic_block);
745 dump_sbitmap_vector (dump_file, "st_antin", "", st_antin, last_basic_block);
746 dump_sbitmap_vector (dump_file, "st_antout", "", st_antout, last_basic_block);
747 dump_sbitmap_vector (dump_file, "st_kill", "", kill, last_basic_block);
a42cd965
AM
748 }
749#endif
d2ecda27 750
a42cd965 751#ifdef LCM_DEBUG_INFO
10d22567 752 if (dump_file)
a42cd965 753 {
10d22567
ZD
754 dump_sbitmap_vector (dump_file, "st_avout", "", st_avout, last_basic_block);
755 dump_sbitmap_vector (dump_file, "st_avin", "", st_avin, last_basic_block);
a42cd965
AM
756 }
757#endif
d2ecda27 758
a42cd965
AM
759 /* Compute farthestness. */
760 farthest = sbitmap_vector_alloc (num_edges, n_exprs);
b3bb6456 761 compute_farthest (edge_list, n_exprs, st_avout, st_avin, st_antin,
a42cd965
AM
762 kill, farthest);
763
764#ifdef LCM_DEBUG_INFO
10d22567
ZD
765 if (dump_file)
766 dump_sbitmap_vector (dump_file, "farthest", "", farthest, num_edges);
a42cd965
AM
767#endif
768
5a660bff
DB
769 sbitmap_vector_free (st_antin);
770 sbitmap_vector_free (st_antout);
771
772 sbitmap_vector_free (st_avin);
773 sbitmap_vector_free (st_avout);
a42cd965
AM
774
775 nearer = sbitmap_vector_alloc (num_edges, n_exprs);
f4e72d6e 776
a42cd965 777 /* Allocate an extra element for the entry block. */
d55bc081 778 nearerout = sbitmap_vector_alloc (last_basic_block + 1, n_exprs);
bd0eaec2 779 compute_nearerout (edge_list, farthest, st_avloc, nearer, nearerout);
a42cd965
AM
780
781#ifdef LCM_DEBUG_INFO
10d22567 782 if (dump_file)
d2ecda27 783 {
10d22567 784 dump_sbitmap_vector (dump_file, "nearerout", "", nearerout,
d55bc081 785 last_basic_block + 1);
10d22567 786 dump_sbitmap_vector (dump_file, "nearer", "", nearer, num_edges);
d2ecda27 787 }
a42cd965
AM
788#endif
789
5a660bff 790 sbitmap_vector_free (farthest);
a42cd965
AM
791
792 *insert = sbitmap_vector_alloc (num_edges, n_exprs);
60564289 793 *del = sbitmap_vector_alloc (last_basic_block, n_exprs);
f4e72d6e 794 compute_rev_insert_delete (edge_list, st_avloc, nearer, nearerout,
60564289 795 *insert, *del);
a42cd965 796
5a660bff
DB
797 sbitmap_vector_free (nearerout);
798 sbitmap_vector_free (nearer);
a42cd965
AM
799
800#ifdef LCM_DEBUG_INFO
10d22567 801 if (dump_file)
a42cd965 802 {
10d22567 803 dump_sbitmap_vector (dump_file, "pre_insert_map", "", *insert, num_edges);
60564289 804 dump_sbitmap_vector (dump_file, "pre_delete_map", "", *del,
d55bc081 805 last_basic_block);
a42cd965
AM
806 }
807#endif
a42cd965 808 return edge_list;
d2ecda27 809}
9f09b1f2 810
This page took 4.980743 seconds and 5 git commands to generate.