]>
Commit | Line | Data |
---|---|---|
d7429b6a | 1 | /* Data flow analysis for GNU compiler. |
081f5e7e | 2 | Copyright (C) 1987, 88, 92-97, 1998 Free Software Foundation, Inc. |
d7429b6a RK |
3 | |
4 | This file is part of GNU CC. | |
5 | ||
6 | GNU CC is free software; you can redistribute it and/or modify | |
7 | it under the terms of the GNU General Public License as published by | |
8 | the Free Software Foundation; either version 2, or (at your option) | |
9 | any later version. | |
10 | ||
11 | GNU CC is distributed in the hope that it will be useful, | |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | GNU General Public License for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
17 | along with GNU CC; see the file COPYING. If not, write to | |
a35311b0 RK |
18 | the Free Software Foundation, 59 Temple Place - Suite 330, |
19 | Boston, MA 02111-1307, USA. */ | |
d7429b6a RK |
20 | |
21 | ||
22 | /* This file contains the data flow analysis pass of the compiler. | |
23 | It computes data flow information | |
24 | which tells combine_instructions which insns to consider combining | |
25 | and controls register allocation. | |
26 | ||
27 | Additional data flow information that is too bulky to record | |
28 | is generated during the analysis, and is used at that time to | |
29 | create autoincrement and autodecrement addressing. | |
30 | ||
31 | The first step is dividing the function into basic blocks. | |
32 | find_basic_blocks does this. Then life_analysis determines | |
33 | where each register is live and where it is dead. | |
34 | ||
35 | ** find_basic_blocks ** | |
36 | ||
37 | find_basic_blocks divides the current function's rtl | |
38 | into basic blocks. It records the beginnings and ends of the | |
39 | basic blocks in the vectors basic_block_head and basic_block_end, | |
40 | and the number of blocks in n_basic_blocks. | |
41 | ||
42 | find_basic_blocks also finds any unreachable loops | |
43 | and deletes them. | |
44 | ||
45 | ** life_analysis ** | |
46 | ||
47 | life_analysis is called immediately after find_basic_blocks. | |
48 | It uses the basic block information to determine where each | |
49 | hard or pseudo register is live. | |
50 | ||
51 | ** live-register info ** | |
52 | ||
53 | The information about where each register is live is in two parts: | |
54 | the REG_NOTES of insns, and the vector basic_block_live_at_start. | |
55 | ||
56 | basic_block_live_at_start has an element for each basic block, | |
57 | and the element is a bit-vector with a bit for each hard or pseudo | |
58 | register. The bit is 1 if the register is live at the beginning | |
59 | of the basic block. | |
60 | ||
61 | Two types of elements can be added to an insn's REG_NOTES. | |
62 | A REG_DEAD note is added to an insn's REG_NOTES for any register | |
63 | that meets both of two conditions: The value in the register is not | |
64 | needed in subsequent insns and the insn does not replace the value in | |
65 | the register (in the case of multi-word hard registers, the value in | |
66 | each register must be replaced by the insn to avoid a REG_DEAD note). | |
67 | ||
68 | In the vast majority of cases, an object in a REG_DEAD note will be | |
69 | used somewhere in the insn. The (rare) exception to this is if an | |
70 | insn uses a multi-word hard register and only some of the registers are | |
71 | needed in subsequent insns. In that case, REG_DEAD notes will be | |
72 | provided for those hard registers that are not subsequently needed. | |
73 | Partial REG_DEAD notes of this type do not occur when an insn sets | |
74 | only some of the hard registers used in such a multi-word operand; | |
75 | omitting REG_DEAD notes for objects stored in an insn is optional and | |
76 | the desire to do so does not justify the complexity of the partial | |
77 | REG_DEAD notes. | |
78 | ||
79 | REG_UNUSED notes are added for each register that is set by the insn | |
80 | but is unused subsequently (if every register set by the insn is unused | |
81 | and the insn does not reference memory or have some other side-effect, | |
82 | the insn is deleted instead). If only part of a multi-word hard | |
83 | register is used in a subsequent insn, REG_UNUSED notes are made for | |
84 | the parts that will not be used. | |
85 | ||
86 | To determine which registers are live after any insn, one can | |
87 | start from the beginning of the basic block and scan insns, noting | |
88 | which registers are set by each insn and which die there. | |
89 | ||
90 | ** Other actions of life_analysis ** | |
91 | ||
92 | life_analysis sets up the LOG_LINKS fields of insns because the | |
93 | information needed to do so is readily available. | |
94 | ||
95 | life_analysis deletes insns whose only effect is to store a value | |
96 | that is never used. | |
97 | ||
98 | life_analysis notices cases where a reference to a register as | |
99 | a memory address can be combined with a preceding or following | |
100 | incrementation or decrementation of the register. The separate | |
101 | instruction to increment or decrement is deleted and the address | |
102 | is changed to a POST_INC or similar rtx. | |
103 | ||
104 | Each time an incrementing or decrementing address is created, | |
105 | a REG_INC element is added to the insn's REG_NOTES list. | |
106 | ||
107 | life_analysis fills in certain vectors containing information about | |
108 | register usage: reg_n_refs, reg_n_deaths, reg_n_sets, reg_live_length, | |
fdb8a883 JW |
109 | reg_n_calls_crosses and reg_basic_block. |
110 | ||
111 | life_analysis sets current_function_sp_is_unchanging if the function | |
112 | doesn't modify the stack pointer. */ | |
d7429b6a | 113 | \f |
d7429b6a | 114 | #include "config.h" |
670ee920 | 115 | #include "system.h" |
d7429b6a RK |
116 | #include "rtl.h" |
117 | #include "basic-block.h" | |
118 | #include "insn-config.h" | |
119 | #include "regs.h" | |
120 | #include "hard-reg-set.h" | |
121 | #include "flags.h" | |
122 | #include "output.h" | |
3d195391 | 123 | #include "except.h" |
2e107e9e | 124 | #include "toplev.h" |
d7429b6a RK |
125 | |
126 | #include "obstack.h" | |
127 | #define obstack_chunk_alloc xmalloc | |
128 | #define obstack_chunk_free free | |
129 | ||
7eb136d6 MM |
130 | /* The contents of the current function definition are allocated |
131 | in this obstack, and all are freed at the end of the function. | |
132 | For top-level functions, this is temporary_obstack. | |
133 | Separate obstacks are made for nested functions. */ | |
134 | ||
135 | extern struct obstack *function_obstack; | |
136 | ||
d7429b6a RK |
137 | /* List of labels that must never be deleted. */ |
138 | extern rtx forced_labels; | |
139 | ||
140 | /* Get the basic block number of an insn. | |
141 | This info should not be expected to remain available | |
142 | after the end of life_analysis. */ | |
143 | ||
144 | /* This is the limit of the allocated space in the following two arrays. */ | |
145 | ||
146 | static int max_uid_for_flow; | |
147 | ||
148 | #define BLOCK_NUM(INSN) uid_block_number[INSN_UID (INSN)] | |
149 | ||
150 | /* This is where the BLOCK_NUM values are really stored. | |
151 | This is set up by find_basic_blocks and used there and in life_analysis, | |
152 | and then freed. */ | |
153 | ||
5ece9746 | 154 | int *uid_block_number; |
d7429b6a RK |
155 | |
156 | /* INSN_VOLATILE (insn) is 1 if the insn refers to anything volatile. */ | |
157 | ||
158 | #define INSN_VOLATILE(INSN) uid_volatile[INSN_UID (INSN)] | |
159 | static char *uid_volatile; | |
160 | ||
161 | /* Number of basic blocks in the current function. */ | |
162 | ||
163 | int n_basic_blocks; | |
164 | ||
165 | /* Maximum register number used in this function, plus one. */ | |
166 | ||
167 | int max_regno; | |
168 | ||
0f41302f MS |
169 | /* Maximum number of SCRATCH rtx's used in any basic block of this |
170 | function. */ | |
d7429b6a RK |
171 | |
172 | int max_scratch; | |
173 | ||
174 | /* Number of SCRATCH rtx's in the current block. */ | |
175 | ||
176 | static int num_scratch; | |
177 | ||
b1f21e0a | 178 | /* Indexed by n, giving various register information */ |
d7429b6a | 179 | |
6feacd09 | 180 | varray_type reg_n_info; |
d7429b6a | 181 | |
a494747c MM |
182 | /* Size of the reg_n_info table. */ |
183 | ||
184 | unsigned int reg_n_max; | |
185 | ||
d7429b6a RK |
186 | /* Element N is the next insn that uses (hard or pseudo) register number N |
187 | within the current basic block; or zero, if there is no such insn. | |
188 | This is valid only during the final backward scan in propagate_block. */ | |
189 | ||
190 | static rtx *reg_next_use; | |
191 | ||
192 | /* Size of a regset for the current function, | |
193 | in (1) bytes and (2) elements. */ | |
194 | ||
195 | int regset_bytes; | |
196 | int regset_size; | |
197 | ||
198 | /* Element N is first insn in basic block N. | |
199 | This info lasts until we finish compiling the function. */ | |
200 | ||
201 | rtx *basic_block_head; | |
202 | ||
203 | /* Element N is last insn in basic block N. | |
204 | This info lasts until we finish compiling the function. */ | |
205 | ||
206 | rtx *basic_block_end; | |
207 | ||
4d1d8045 BS |
208 | /* Element N indicates whether basic block N can be reached through a |
209 | computed jump. */ | |
210 | ||
211 | char *basic_block_computed_jump_target; | |
212 | ||
d7429b6a RK |
213 | /* Element N is a regset describing the registers live |
214 | at the start of basic block N. | |
215 | This info lasts until we finish compiling the function. */ | |
216 | ||
217 | regset *basic_block_live_at_start; | |
218 | ||
219 | /* Regset of regs live when calls to `setjmp'-like functions happen. */ | |
220 | ||
221 | regset regs_live_at_setjmp; | |
222 | ||
223 | /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers | |
224 | that have to go in the same hard reg. | |
225 | The first two regs in the list are a pair, and the next two | |
226 | are another pair, etc. */ | |
227 | rtx regs_may_share; | |
228 | ||
229 | /* Element N is nonzero if control can drop into basic block N | |
230 | from the preceding basic block. Freed after life_analysis. */ | |
231 | ||
232 | static char *basic_block_drops_in; | |
233 | ||
234 | /* Element N is depth within loops of the last insn in basic block number N. | |
235 | Freed after life_analysis. */ | |
236 | ||
237 | static short *basic_block_loop_depth; | |
238 | ||
d7429b6a RK |
239 | /* Depth within loops of basic block being scanned for lifetime analysis, |
240 | plus one. This is the weight attached to references to registers. */ | |
241 | ||
242 | static int loop_depth; | |
243 | ||
244 | /* During propagate_block, this is non-zero if the value of CC0 is live. */ | |
245 | ||
246 | static int cc0_live; | |
247 | ||
248 | /* During propagate_block, this contains the last MEM stored into. It | |
249 | is used to eliminate consecutive stores to the same location. */ | |
250 | ||
251 | static rtx last_mem_set; | |
252 | ||
253 | /* Set of registers that may be eliminable. These are handled specially | |
254 | in updating regs_ever_live. */ | |
255 | ||
256 | static HARD_REG_SET elim_reg_set; | |
257 | ||
258 | /* Forward declarations */ | |
4a7bd8e2 | 259 | static void find_basic_blocks_1 PROTO((rtx, rtx)); |
dc2ede84 | 260 | static void make_edges PROTO((int)); |
e658434c | 261 | static void mark_label_ref PROTO((rtx, rtx, int)); |
dc2ede84 BS |
262 | static int delete_unreachable_blocks PROTO((void)); |
263 | static int delete_block PROTO((int)); | |
5ece9746 | 264 | static void life_analysis_1 PROTO((rtx, int)); |
e658434c RK |
265 | static void propagate_block PROTO((regset, rtx, rtx, int, |
266 | regset, int)); | |
8329b5ec | 267 | static rtx flow_delete_insn PROTO((rtx)); |
dc2ede84 BS |
268 | static int set_noop_p PROTO((rtx)); |
269 | static int noop_move_p PROTO((rtx)); | |
270 | static void record_volatile_insns PROTO((rtx)); | |
271 | static void mark_regs_live_at_end PROTO((regset)); | |
e658434c RK |
272 | static int insn_dead_p PROTO((rtx, regset, int)); |
273 | static int libcall_dead_p PROTO((rtx, regset, rtx, rtx)); | |
274 | static void mark_set_regs PROTO((regset, regset, rtx, | |
275 | rtx, regset)); | |
276 | static void mark_set_1 PROTO((regset, regset, rtx, | |
277 | rtx, regset)); | |
1d300e19 | 278 | #ifdef AUTO_INC_DEC |
e658434c | 279 | static void find_auto_inc PROTO((regset, rtx, rtx)); |
e658434c RK |
280 | static int try_pre_increment_1 PROTO((rtx)); |
281 | static int try_pre_increment PROTO((rtx, rtx, HOST_WIDE_INT)); | |
1d300e19 KG |
282 | #endif |
283 | static void mark_used_regs PROTO((regset, regset, rtx, int, rtx)); | |
e658434c | 284 | void dump_flow_info PROTO((FILE *)); |
5ece9746 JL |
285 | static void add_pred_succ PROTO ((int, int, int_list_ptr *, |
286 | int_list_ptr *, int *, int *)); | |
287 | static int_list_ptr alloc_int_list_node PROTO ((int_list_block **)); | |
288 | static int_list_ptr add_int_list_node PROTO ((int_list_block **, | |
289 | int_list **, int)); | |
04821e98 JL |
290 | static void init_regset_vector PROTO ((regset *, int, |
291 | struct obstack *)); | |
4c649323 JL |
292 | static void count_reg_sets_1 PROTO ((rtx)); |
293 | static void count_reg_sets PROTO ((rtx)); | |
294 | static void count_reg_references PROTO ((rtx)); | |
fdb8a883 | 295 | static void notice_stack_pointer_modification PROTO ((rtx, rtx)); |
d7429b6a | 296 | \f |
5ece9746 | 297 | /* Find basic blocks of the current function. |
d7429b6a | 298 | F is the first insn of the function and NREGS the number of register numbers |
5ece9746 JL |
299 | in use. |
300 | LIVE_REACHABLE_P is non-zero if the caller needs all live blocks to | |
301 | be reachable. This turns on a kludge that causes the control flow | |
302 | information to be inaccurate and not suitable for passes like GCSE. */ | |
d7429b6a RK |
303 | |
304 | void | |
9265dacf | 305 | find_basic_blocks (f, nregs, file) |
d7429b6a RK |
306 | rtx f; |
307 | int nregs; | |
308 | FILE *file; | |
309 | { | |
310 | register rtx insn; | |
311 | register int i; | |
d7e4fe8b | 312 | rtx nonlocal_label_list = nonlocal_label_rtx_list (); |
2c3a56ad | 313 | int in_libcall_block = 0; |
5c35539b | 314 | int extra_uids_for_flow = 0; |
d7429b6a | 315 | |
d7429b6a RK |
316 | /* Count the basic blocks. Also find maximum insn uid value used. */ |
317 | ||
318 | { | |
319 | register RTX_CODE prev_code = JUMP_INSN; | |
320 | register RTX_CODE code; | |
74d7ab55 | 321 | int eh_region = 0; |
c2b7e122 | 322 | int call_had_abnormal_edge = 0; |
d7429b6a RK |
323 | |
324 | max_uid_for_flow = 0; | |
325 | ||
326 | for (insn = f, i = 0; insn; insn = NEXT_INSN (insn)) | |
327 | { | |
2c3a56ad JL |
328 | /* Track when we are inside in LIBCALL block. */ |
329 | if (GET_RTX_CLASS (GET_CODE (insn)) == 'i' | |
330 | && find_reg_note (insn, REG_LIBCALL, NULL_RTX)) | |
331 | in_libcall_block = 1; | |
332 | ||
d7429b6a RK |
333 | code = GET_CODE (insn); |
334 | if (INSN_UID (insn) > max_uid_for_flow) | |
335 | max_uid_for_flow = INSN_UID (insn); | |
5c35539b | 336 | if (code == CODE_LABEL) |
d7429b6a | 337 | i++; |
5c35539b RH |
338 | else if (GET_RTX_CLASS (code) == 'i') |
339 | { | |
340 | if (prev_code == JUMP_INSN || prev_code == BARRIER) | |
341 | i++; | |
342 | else if (prev_code == CALL_INSN) | |
343 | { | |
c2b7e122 | 344 | if (call_had_abnormal_edge) |
5c35539b RH |
345 | i++; |
346 | else | |
347 | { | |
348 | /* Else this call does not force a new block to be | |
349 | created. However, it may still be the end of a basic | |
350 | block if it is followed by a CODE_LABEL or a BARRIER. | |
351 | ||
352 | To disambiguate calls which force new blocks to be | |
353 | created from those which just happen to be at the end | |
354 | of a block we insert nops during find_basic_blocks_1 | |
355 | after calls which are the last insn in a block by | |
356 | chance. We must account for such insns in | |
357 | max_uid_for_flow. */ | |
358 | ||
359 | extra_uids_for_flow++; | |
360 | } | |
361 | } | |
362 | } | |
8cfe18d6 | 363 | |
d06c6389 JW |
364 | /* We change the code of the CALL_INSN, so that it won't start a |
365 | new block. */ | |
366 | if (code == CALL_INSN && in_libcall_block) | |
8cfe18d6 RK |
367 | code = INSN; |
368 | ||
c2b7e122 RH |
369 | /* Record whether this call created an edge. */ |
370 | if (code == CALL_INSN) | |
371 | call_had_abnormal_edge = (nonlocal_label_list != 0 || eh_region); | |
372 | ||
6b67ec08 | 373 | if (code != NOTE) |
d7429b6a | 374 | prev_code = code; |
74d7ab55 JM |
375 | else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) |
376 | ++eh_region; | |
377 | else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END) | |
378 | --eh_region; | |
2c3a56ad JL |
379 | |
380 | if (GET_RTX_CLASS (GET_CODE (insn)) == 'i' | |
381 | && find_reg_note (insn, REG_RETVAL, NULL_RTX)) | |
382 | in_libcall_block = 0; | |
d7429b6a RK |
383 | } |
384 | } | |
385 | ||
5ece9746 JL |
386 | n_basic_blocks = i; |
387 | ||
d7429b6a | 388 | #ifdef AUTO_INC_DEC |
5ece9746 JL |
389 | /* Leave space for insns life_analysis makes in some cases for auto-inc. |
390 | These cases are rare, so we don't need too much space. */ | |
d7429b6a RK |
391 | max_uid_for_flow += max_uid_for_flow / 10; |
392 | #endif | |
5c35539b | 393 | max_uid_for_flow += extra_uids_for_flow; |
d7429b6a RK |
394 | |
395 | /* Allocate some tables that last till end of compiling this function | |
396 | and some needed only in find_basic_blocks and life_analysis. */ | |
397 | ||
5ece9746 JL |
398 | basic_block_head = (rtx *) xmalloc (n_basic_blocks * sizeof (rtx)); |
399 | basic_block_end = (rtx *) xmalloc (n_basic_blocks * sizeof (rtx)); | |
400 | basic_block_drops_in = (char *) xmalloc (n_basic_blocks); | |
4d1d8045 | 401 | basic_block_computed_jump_target = (char *) oballoc (n_basic_blocks); |
5ece9746 | 402 | basic_block_loop_depth = (short *) xmalloc (n_basic_blocks * sizeof (short)); |
d7429b6a | 403 | uid_block_number |
5ece9746 JL |
404 | = (int *) xmalloc ((max_uid_for_flow + 1) * sizeof (int)); |
405 | uid_volatile = (char *) xmalloc (max_uid_for_flow + 1); | |
d7429b6a RK |
406 | bzero (uid_volatile, max_uid_for_flow + 1); |
407 | ||
9265dacf | 408 | find_basic_blocks_1 (f, nonlocal_label_list); |
d7429b6a | 409 | } |
5ece9746 | 410 | |
dc2ede84 BS |
411 | /* For communication between find_basic_blocks_1 and its subroutines. */ |
412 | ||
413 | /* An array of CODE_LABELs, indexed by UID for the start of the active | |
414 | EH handler for each insn in F. */ | |
415 | static int *active_eh_region; | |
416 | static int *nested_eh_region; | |
417 | ||
418 | /* Element N nonzero if basic block N can actually be reached. */ | |
419 | ||
420 | static char *block_live_static; | |
421 | ||
422 | /* List of label_refs to all labels whose addresses are taken | |
423 | and used as data. */ | |
424 | static rtx label_value_list; | |
425 | ||
426 | /* a list of non-local labels in the function. */ | |
427 | static rtx nonlocal_label_list; | |
428 | ||
d7429b6a RK |
429 | /* Find all basic blocks of the function whose first insn is F. |
430 | Store the correct data in the tables that describe the basic blocks, | |
431 | set up the chains of references for each CODE_LABEL, and | |
d7e4fe8b RS |
432 | delete any entire basic blocks that cannot be reached. |
433 | ||
dc2ede84 | 434 | NONLOCAL_LABELS is a list of non-local labels in the function. |
5ece9746 JL |
435 | Blocks that are otherwise unreachable may be reachable with a non-local |
436 | goto. | |
437 | LIVE_REACHABLE_P is non-zero if the caller needs all live blocks to | |
438 | be reachable. This turns on a kludge that causes the control flow | |
439 | information to be inaccurate and not suitable for passes like GCSE. */ | |
d7429b6a RK |
440 | |
441 | static void | |
9265dacf | 442 | find_basic_blocks_1 (f, nonlocal_labels) |
dc2ede84 | 443 | rtx f, nonlocal_labels; |
d7429b6a RK |
444 | { |
445 | register rtx insn; | |
446 | register int i; | |
447 | register char *block_live = (char *) alloca (n_basic_blocks); | |
448 | register char *block_marked = (char *) alloca (n_basic_blocks); | |
dc2ede84 | 449 | rtx note, eh_note; |
e658434c | 450 | enum rtx_code prev_code, code; |
8329b5ec | 451 | int depth, pass; |
2c3a56ad | 452 | int in_libcall_block = 0; |
5c35539b | 453 | int call_had_abnormal_edge = 0; |
d7429b6a | 454 | |
8329b5ec | 455 | pass = 1; |
9a0d1e1b AM |
456 | active_eh_region = (int *) alloca ((max_uid_for_flow + 1) * sizeof (int)); |
457 | nested_eh_region = (int *) alloca ((max_label_num () + 1) * sizeof (int)); | |
dc2ede84 | 458 | nonlocal_label_list = nonlocal_labels; |
8329b5ec DE |
459 | restart: |
460 | ||
461 | label_value_list = 0; | |
d7429b6a RK |
462 | block_live_static = block_live; |
463 | bzero (block_live, n_basic_blocks); | |
464 | bzero (block_marked, n_basic_blocks); | |
4d1d8045 | 465 | bzero (basic_block_computed_jump_target, n_basic_blocks); |
9a0d1e1b AM |
466 | bzero ((char *) active_eh_region, (max_uid_for_flow + 1) * sizeof (int)); |
467 | bzero ((char *) nested_eh_region, (max_label_num () + 1) * sizeof (int)); | |
4d1d8045 | 468 | current_function_has_computed_jump = 0; |
d7429b6a RK |
469 | |
470 | /* Initialize with just block 0 reachable and no blocks marked. */ | |
471 | if (n_basic_blocks > 0) | |
472 | block_live[0] = 1; | |
473 | ||
e658434c RK |
474 | /* Initialize the ref chain of each label to 0. Record where all the |
475 | blocks start and end and their depth in loops. For each insn, record | |
476 | the block it is in. Also mark as reachable any blocks headed by labels | |
477 | that must not be deleted. */ | |
d7429b6a | 478 | |
2ec1535d | 479 | for (eh_note = NULL_RTX, insn = f, i = -1, prev_code = JUMP_INSN, depth = 1; |
e658434c RK |
480 | insn; insn = NEXT_INSN (insn)) |
481 | { | |
2c3a56ad JL |
482 | |
483 | /* Track when we are inside in LIBCALL block. */ | |
484 | if (GET_RTX_CLASS (GET_CODE (insn)) == 'i' | |
485 | && find_reg_note (insn, REG_LIBCALL, NULL_RTX)) | |
486 | in_libcall_block = 1; | |
487 | ||
e658434c RK |
488 | code = GET_CODE (insn); |
489 | if (code == NOTE) | |
490 | { | |
491 | if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG) | |
492 | depth++; | |
493 | else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END) | |
494 | depth--; | |
495 | } | |
d7429b6a | 496 | |
e658434c RK |
497 | /* A basic block starts at label, or after something that can jump. */ |
498 | else if (code == CODE_LABEL | |
499 | || (GET_RTX_CLASS (code) == 'i' | |
500 | && (prev_code == JUMP_INSN | |
5c35539b | 501 | || (prev_code == CALL_INSN && call_had_abnormal_edge) |
e658434c RK |
502 | || prev_code == BARRIER))) |
503 | { | |
504 | basic_block_head[++i] = insn; | |
505 | basic_block_end[i] = insn; | |
506 | basic_block_loop_depth[i] = depth; | |
507 | ||
508 | if (code == CODE_LABEL) | |
509 | { | |
5c35539b RH |
510 | LABEL_REFS (insn) = insn; |
511 | /* Any label that cannot be deleted | |
512 | is considered to start a reachable block. */ | |
513 | if (LABEL_PRESERVE_P (insn)) | |
514 | block_live[i] = 1; | |
515 | } | |
516 | ||
517 | /* If the previous insn was a call that did not create an | |
518 | abnormal edge, we want to add a nop so that the CALL_INSN | |
519 | itself is not at basic_block_end. This allows us to easily | |
520 | distinguish between normal calls and those which create | |
521 | abnormal edges in the flow graph. */ | |
522 | ||
523 | if (i > 0 && !call_had_abnormal_edge | |
524 | && GET_CODE (basic_block_end[i-1]) == CALL_INSN) | |
525 | { | |
526 | rtx nop = gen_rtx_USE (VOIDmode, const0_rtx); | |
527 | nop = emit_insn_after (nop, basic_block_end[i-1]); | |
528 | basic_block_end[i-1] = nop; | |
529 | } | |
e658434c | 530 | } |
d7429b6a | 531 | |
e658434c RK |
532 | else if (GET_RTX_CLASS (code) == 'i') |
533 | { | |
534 | basic_block_end[i] = insn; | |
535 | basic_block_loop_depth[i] = depth; | |
42fa3cfb | 536 | } |
e658434c | 537 | |
42fa3cfb JW |
538 | if (GET_RTX_CLASS (code) == 'i') |
539 | { | |
e658434c RK |
540 | /* Make a list of all labels referred to other than by jumps. */ |
541 | for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) | |
c708eef9 | 542 | if (REG_NOTE_KIND (note) == REG_LABEL |
71038426 | 543 | && XEXP (note, 0) != eh_return_stub_label) |
38a448ca RH |
544 | label_value_list = gen_rtx_EXPR_LIST (VOIDmode, XEXP (note, 0), |
545 | label_value_list); | |
42fa3cfb | 546 | } |
d7429b6a | 547 | |
9a0d1e1b | 548 | /* Keep a lifo list of the currently active exception notes. */ |
2ec1535d JL |
549 | if (GET_CODE (insn) == NOTE) |
550 | { | |
551 | if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) | |
552 | { | |
9a0d1e1b AM |
553 | if (eh_note) |
554 | nested_eh_region [NOTE_BLOCK_NUMBER (insn)] = | |
555 | NOTE_BLOCK_NUMBER (XEXP (eh_note, 0)); | |
556 | else | |
557 | nested_eh_region [NOTE_BLOCK_NUMBER (insn)] = 0; | |
558 | eh_note = gen_rtx_EXPR_LIST (VOIDmode, | |
559 | insn, eh_note); | |
2ec1535d JL |
560 | } |
561 | else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END) | |
562 | eh_note = XEXP (eh_note, 1); | |
563 | } | |
564 | /* If we encounter a CALL_INSN, note which exception handler it | |
565 | might pass control to. | |
566 | ||
567 | If doing asynchronous exceptions, record the active EH handler | |
568 | for every insn, since most insns can throw. */ | |
569 | else if (eh_note | |
570 | && (asynchronous_exceptions | |
571 | || (GET_CODE (insn) == CALL_INSN | |
2c3a56ad | 572 | && ! in_libcall_block))) |
dc2ede84 | 573 | active_eh_region[INSN_UID (insn)] = |
9a0d1e1b | 574 | NOTE_BLOCK_NUMBER (XEXP (eh_note, 0)); |
e658434c | 575 | BLOCK_NUM (insn) = i; |
d7e4fe8b | 576 | |
d06c6389 JW |
577 | /* We change the code of the CALL_INSN, so that it won't start a |
578 | new block. */ | |
579 | if (code == CALL_INSN && in_libcall_block) | |
580 | code = INSN; | |
581 | ||
5c35539b RH |
582 | /* Record whether this call created an edge. */ |
583 | if (code == CALL_INSN) | |
584 | call_had_abnormal_edge = (nonlocal_label_list != 0 || eh_note); | |
585 | ||
6b67ec08 | 586 | if (code != NOTE) |
e658434c | 587 | prev_code = code; |
2c3a56ad JL |
588 | |
589 | if (GET_RTX_CLASS (GET_CODE (insn)) == 'i' | |
590 | && find_reg_note (insn, REG_RETVAL, NULL_RTX)) | |
591 | in_libcall_block = 0; | |
e658434c RK |
592 | } |
593 | ||
2aec79e2 DE |
594 | /* During the second pass, `n_basic_blocks' is only an upper bound. |
595 | Only perform the sanity check for the first pass, and on the second | |
596 | pass ensure `n_basic_blocks' is set to the correct value. */ | |
597 | if (pass == 1 && i + 1 != n_basic_blocks) | |
e658434c | 598 | abort (); |
2aec79e2 | 599 | n_basic_blocks = i + 1; |
d7429b6a RK |
600 | |
601 | /* Record which basic blocks control can drop in to. */ | |
602 | ||
e658434c RK |
603 | for (i = 0; i < n_basic_blocks; i++) |
604 | { | |
605 | for (insn = PREV_INSN (basic_block_head[i]); | |
606 | insn && GET_CODE (insn) == NOTE; insn = PREV_INSN (insn)) | |
607 | ; | |
608 | ||
609 | basic_block_drops_in[i] = insn && GET_CODE (insn) != BARRIER; | |
610 | } | |
d7429b6a RK |
611 | |
612 | /* Now find which basic blocks can actually be reached | |
613 | and put all jump insns' LABEL_REFS onto the ref-chains | |
614 | of their target labels. */ | |
615 | ||
616 | if (n_basic_blocks > 0) | |
617 | { | |
618 | int something_marked = 1; | |
8329b5ec | 619 | int deleted; |
d7429b6a | 620 | |
d7429b6a RK |
621 | /* Pass over all blocks, marking each block that is reachable |
622 | and has not yet been marked. | |
623 | Keep doing this until, in one pass, no blocks have been marked. | |
624 | Then blocks_live and blocks_marked are identical and correct. | |
625 | In addition, all jumps actually reachable have been marked. */ | |
626 | ||
627 | while (something_marked) | |
628 | { | |
629 | something_marked = 0; | |
630 | for (i = 0; i < n_basic_blocks; i++) | |
631 | if (block_live[i] && !block_marked[i]) | |
632 | { | |
633 | block_marked[i] = 1; | |
634 | something_marked = 1; | |
2ec1535d | 635 | |
dc2ede84 | 636 | make_edges (i); |
d7429b6a RK |
637 | } |
638 | } | |
639 | ||
2ec1535d JL |
640 | /* This should never happen. If it does that means we've computed an |
641 | incorrect flow graph, which can lead to aborts/crashes later in the | |
642 | compiler or incorrect code generation. | |
af14ce9c | 643 | |
2ec1535d JL |
644 | We used to try and continue here, but that's just asking for trouble |
645 | later during the compile or at runtime. It's easier to debug the | |
646 | problem here than later! */ | |
af14ce9c RK |
647 | for (i = 1; i < n_basic_blocks; i++) |
648 | if (block_live[i] && ! basic_block_drops_in[i] | |
649 | && GET_CODE (basic_block_head[i]) == CODE_LABEL | |
650 | && LABEL_REFS (basic_block_head[i]) == basic_block_head[i]) | |
2ec1535d | 651 | abort (); |
af14ce9c | 652 | |
dc2ede84 | 653 | deleted = delete_unreachable_blocks (); |
8329b5ec | 654 | |
9faa82d8 | 655 | /* There are pathological cases where one function calling hundreds of |
8329b5ec DE |
656 | nested inline functions can generate lots and lots of unreachable |
657 | blocks that jump can't delete. Since we don't use sparse matrices | |
658 | a lot of memory will be needed to compile such functions. | |
659 | Implementing sparse matrices is a fair bit of work and it is not | |
660 | clear that they win more than they lose (we don't want to | |
661 | unnecessarily slow down compilation of normal code). By making | |
9faa82d8 | 662 | another pass for the pathological case, we can greatly speed up |
8329b5ec DE |
663 | their compilation without hurting normal code. This works because |
664 | all the insns in the unreachable blocks have either been deleted or | |
49b6c81e DE |
665 | turned into notes. |
666 | Note that we're talking about reducing memory usage by 10's of | |
667 | megabytes and reducing compilation time by several minutes. */ | |
8329b5ec DE |
668 | /* ??? The choice of when to make another pass is a bit arbitrary, |
669 | and was derived from empirical data. */ | |
670 | if (pass == 1 | |
49b6c81e | 671 | && deleted > 200) |
8329b5ec DE |
672 | { |
673 | pass++; | |
674 | n_basic_blocks -= deleted; | |
2aec79e2 DE |
675 | /* `n_basic_blocks' may not be correct at this point: two previously |
676 | separate blocks may now be merged. That's ok though as we | |
677 | recalculate it during the second pass. It certainly can't be | |
678 | any larger than the current value. */ | |
8329b5ec DE |
679 | goto restart; |
680 | } | |
d7429b6a RK |
681 | } |
682 | } | |
5ece9746 JL |
683 | |
684 | /* Record INSN's block number as BB. */ | |
685 | ||
686 | void | |
687 | set_block_num (insn, bb) | |
688 | rtx insn; | |
689 | int bb; | |
690 | { | |
691 | if (INSN_UID (insn) >= max_uid_for_flow) | |
692 | { | |
693 | /* Add one-eighth the size so we don't keep calling xrealloc. */ | |
694 | max_uid_for_flow = INSN_UID (insn) + (INSN_UID (insn) + 7) / 8; | |
695 | uid_block_number = (int *) | |
696 | xrealloc (uid_block_number, (max_uid_for_flow + 1) * sizeof (int)); | |
697 | } | |
698 | BLOCK_NUM (insn) = bb; | |
699 | } | |
700 | ||
d7429b6a | 701 | \f |
8329b5ec DE |
702 | /* Subroutines of find_basic_blocks. */ |
703 | ||
dc2ede84 BS |
704 | /* For basic block I, make edges and mark live all blocks which are reachable |
705 | from it. */ | |
706 | static void | |
707 | make_edges (i) | |
708 | int i; | |
709 | { | |
710 | rtx insn, x; | |
711 | ||
712 | if (i + 1 < n_basic_blocks && basic_block_drops_in[i + 1]) | |
713 | block_live_static[i + 1] = 1; | |
714 | insn = basic_block_end[i]; | |
715 | if (GET_CODE (insn) == JUMP_INSN) | |
716 | mark_label_ref (PATTERN (insn), insn, 0); | |
717 | ||
718 | /* If we have any forced labels, mark them as potentially reachable from | |
719 | this block. */ | |
720 | for (x = forced_labels; x; x = XEXP (x, 1)) | |
721 | if (! LABEL_REF_NONLOCAL_P (x)) | |
722 | mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, XEXP (x, 0)), | |
723 | insn, 0); | |
724 | ||
725 | /* Now scan the insns for this block, we may need to make edges for some of | |
726 | them to various non-obvious locations (exception handlers, nonlocal | |
727 | labels, etc). */ | |
728 | for (insn = basic_block_head[i]; | |
729 | insn != NEXT_INSN (basic_block_end[i]); | |
730 | insn = NEXT_INSN (insn)) | |
731 | { | |
732 | if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') | |
733 | { | |
734 | rtx note; | |
735 | /* References to labels in non-jumping insns have REG_LABEL notes | |
736 | attached to them. | |
737 | ||
738 | This can happen for computed gotos; we don't care about them | |
739 | here since the values are also on the label_value_list and will | |
740 | be marked live if we find a live computed goto. | |
741 | ||
742 | This can also happen when we take the address of a label to pass | |
743 | as an argument to __throw. Note throw only uses the value to | |
744 | determine what handler should be called -- ie the label is not | |
745 | used as a jump target, it just marks regions in the code. | |
746 | ||
747 | In theory we should be able to ignore the REG_LABEL notes, but | |
748 | we have to make sure that the label and associated insns aren't | |
749 | marked dead, so we make the block in question live and create an | |
750 | edge from this insn to the label. This is not strictly correct, | |
751 | but it is close enough for now. | |
752 | ||
753 | See below for code that handles the eh_stub label specially. */ | |
754 | for (note = REG_NOTES (insn); | |
755 | note; | |
756 | note = XEXP (note, 1)) | |
757 | { | |
758 | if (REG_NOTE_KIND (note) == REG_LABEL | |
759 | && XEXP (note, 0) != eh_return_stub_label) | |
760 | { | |
761 | x = XEXP (note, 0); | |
762 | block_live_static[BLOCK_NUM (x)] = 1; | |
763 | mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, x), | |
764 | insn, 0); | |
765 | } | |
766 | } | |
767 | ||
768 | /* If this is a computed jump, then mark it as reaching everything | |
769 | on the label_value_list and forced_labels list. */ | |
770 | if (computed_jump_p (insn)) | |
771 | { | |
772 | current_function_has_computed_jump = 1; | |
773 | for (x = label_value_list; x; x = XEXP (x, 1)) | |
774 | { | |
775 | int b = BLOCK_NUM (XEXP (x, 0)); | |
776 | basic_block_computed_jump_target[b] = 1; | |
777 | mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, XEXP (x, 0)), | |
778 | insn, 0); | |
779 | } | |
780 | ||
781 | for (x = forced_labels; x; x = XEXP (x, 1)) | |
782 | { | |
783 | int b = BLOCK_NUM (XEXP (x, 0)); | |
784 | basic_block_computed_jump_target[b] = 1; | |
785 | mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, XEXP (x, 0)), | |
786 | insn, 0); | |
787 | } | |
788 | } | |
789 | ||
790 | /* If this is a CALL_INSN, then mark it as reaching the active EH | |
791 | handler for this CALL_INSN. If we're handling asynchronous | |
792 | exceptions mark every insn as reaching the active EH handler. | |
793 | ||
794 | Also mark the CALL_INSN as reaching any nonlocal goto sites. */ | |
795 | else if (asynchronous_exceptions | |
796 | || (GET_CODE (insn) == CALL_INSN | |
797 | && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))) | |
798 | { | |
799 | if (active_eh_region[INSN_UID (insn)]) | |
800 | { | |
801 | int region; | |
802 | handler_info *ptr; | |
803 | region = active_eh_region[INSN_UID (insn)]; | |
804 | for ( ; region; | |
805 | region = nested_eh_region[region]) | |
806 | { | |
807 | ptr = get_first_handler (region); | |
808 | for ( ; ptr ; ptr = ptr->next) | |
809 | mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, | |
810 | ptr->handler_label), | |
811 | insn, 0); | |
812 | } | |
813 | } | |
814 | if (! asynchronous_exceptions) | |
815 | { | |
816 | for (x = nonlocal_label_list; x; x = XEXP (x, 1)) | |
817 | mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, XEXP (x, 0)), | |
818 | insn, 0); | |
819 | } | |
820 | /* ??? This could be made smarter: in some cases it's possible | |
821 | to tell that certain calls will not do a nonlocal goto. | |
822 | ||
823 | For example, if the nested functions that do the nonlocal | |
824 | gotos do not have their addresses taken, then only calls to | |
825 | those functions or to other nested functions that use them | |
826 | could possibly do nonlocal gotos. */ | |
827 | } | |
828 | } | |
829 | } | |
830 | /* We know something about the structure of the function __throw in | |
831 | libgcc2.c. It is the only function that ever contains eh_stub labels. | |
832 | It modifies its return address so that the last block returns to one of | |
833 | the eh_stub labels within it. So we have to make additional edges in | |
834 | the flow graph. */ | |
835 | if (i + 1 == n_basic_blocks && eh_return_stub_label != 0) | |
836 | { | |
837 | mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, eh_return_stub_label), | |
838 | basic_block_end[i], 0); | |
839 | } | |
840 | } | |
841 | ||
d7429b6a RK |
842 | /* Check expression X for label references; |
843 | if one is found, add INSN to the label's chain of references. | |
844 | ||
845 | CHECKDUP means check for and avoid creating duplicate references | |
846 | from the same insn. Such duplicates do no serious harm but | |
847 | can slow life analysis. CHECKDUP is set only when duplicates | |
848 | are likely. */ | |
849 | ||
850 | static void | |
851 | mark_label_ref (x, insn, checkdup) | |
852 | rtx x, insn; | |
853 | int checkdup; | |
854 | { | |
855 | register RTX_CODE code; | |
856 | register int i; | |
857 | register char *fmt; | |
858 | ||
859 | /* We can be called with NULL when scanning label_value_list. */ | |
860 | if (x == 0) | |
861 | return; | |
862 | ||
863 | code = GET_CODE (x); | |
864 | if (code == LABEL_REF) | |
865 | { | |
866 | register rtx label = XEXP (x, 0); | |
867 | register rtx y; | |
868 | if (GET_CODE (label) != CODE_LABEL) | |
869 | abort (); | |
870 | /* If the label was never emitted, this insn is junk, | |
871 | but avoid a crash trying to refer to BLOCK_NUM (label). | |
872 | This can happen as a result of a syntax error | |
873 | and a diagnostic has already been printed. */ | |
874 | if (INSN_UID (label) == 0) | |
875 | return; | |
876 | CONTAINING_INSN (x) = insn; | |
877 | /* if CHECKDUP is set, check for duplicate ref from same insn | |
878 | and don't insert. */ | |
879 | if (checkdup) | |
880 | for (y = LABEL_REFS (label); y != label; y = LABEL_NEXTREF (y)) | |
881 | if (CONTAINING_INSN (y) == insn) | |
882 | return; | |
883 | LABEL_NEXTREF (x) = LABEL_REFS (label); | |
884 | LABEL_REFS (label) = x; | |
885 | block_live_static[BLOCK_NUM (label)] = 1; | |
886 | return; | |
887 | } | |
888 | ||
889 | fmt = GET_RTX_FORMAT (code); | |
890 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
891 | { | |
892 | if (fmt[i] == 'e') | |
893 | mark_label_ref (XEXP (x, i), insn, 0); | |
894 | if (fmt[i] == 'E') | |
895 | { | |
896 | register int j; | |
897 | for (j = 0; j < XVECLEN (x, i); j++) | |
898 | mark_label_ref (XVECEXP (x, i, j), insn, 1); | |
899 | } | |
900 | } | |
901 | } | |
8329b5ec | 902 | |
dc2ede84 BS |
903 | /* Now delete the code for any basic blocks that can't be reached. |
904 | They can occur because jump_optimize does not recognize unreachable loops | |
905 | as unreachable. | |
906 | Return the number of deleted blocks. */ | |
907 | static int | |
908 | delete_unreachable_blocks () | |
909 | { | |
910 | int deleted_handler = 0; | |
911 | int deleted = 0; | |
912 | int i; | |
913 | rtx insn; | |
914 | ||
915 | for (i = 0; i < n_basic_blocks; i++) | |
916 | if (! block_live_static[i]) | |
917 | { | |
918 | deleted++; | |
919 | ||
920 | deleted_handler |= delete_block (i); | |
921 | } | |
922 | ||
923 | /* If we deleted an exception handler, we may have EH region | |
924 | begin/end blocks to remove as well. */ | |
925 | if (deleted_handler) | |
926 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
927 | if (GET_CODE (insn) == NOTE) | |
928 | { | |
929 | if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) || | |
930 | (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)) | |
931 | { | |
932 | int num = CODE_LABEL_NUMBER (insn); | |
933 | /* A NULL handler indicates a region is no longer needed */ | |
934 | if (get_first_handler (num) == NULL) | |
935 | { | |
936 | NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; | |
937 | NOTE_SOURCE_FILE (insn) = 0; | |
938 | } | |
939 | } | |
940 | } | |
941 | return deleted; | |
942 | } | |
943 | ||
944 | /* Delete the insns in a (non-live) block. We physically delete every | |
945 | non-note insn except the start and end (so basic_block_head/end needn't | |
946 | be updated), we turn the latter into NOTE_INSN_DELETED notes. | |
947 | ||
948 | We use to "delete" the insns by turning them into notes, but we may be | |
949 | deleting lots of insns that subsequent passes would otherwise have to | |
950 | process. Secondly, lots of deleted blocks in a row can really slow down | |
951 | propagate_block since it will otherwise process insn-turned-notes multiple | |
952 | times when it looks for loop begin/end notes. | |
953 | ||
954 | Return nonzero if we deleted an exception handler. */ | |
955 | static int | |
956 | delete_block (i) | |
957 | int i; | |
958 | { | |
959 | int deleted_handler = 0; | |
960 | rtx insn; | |
961 | ||
962 | if (basic_block_head[i] != basic_block_end[i]) | |
963 | { | |
964 | /* It would be quicker to delete all of these with a single | |
965 | unchaining, rather than one at a time, but we need to keep | |
966 | the NOTE's. */ | |
967 | insn = NEXT_INSN (basic_block_head[i]); | |
968 | while (insn != basic_block_end[i]) | |
969 | { | |
970 | if (GET_CODE (insn) == BARRIER) | |
971 | abort (); | |
972 | else if (GET_CODE (insn) != NOTE) | |
973 | insn = flow_delete_insn (insn); | |
974 | else | |
975 | insn = NEXT_INSN (insn); | |
976 | } | |
977 | } | |
978 | ||
979 | insn = basic_block_head[i]; | |
980 | if (GET_CODE (insn) != NOTE) | |
981 | { | |
982 | /* Turn the head into a deleted insn note. */ | |
983 | if (GET_CODE (insn) == BARRIER) | |
984 | abort (); | |
985 | ||
986 | /* If the head of this block is a CODE_LABEL, then it might | |
987 | be the label for an exception handler which can't be | |
988 | reached. | |
989 | ||
990 | We need to remove the label from the exception_handler_label | |
991 | list and remove the associated NOTE_EH_REGION_BEG and | |
992 | NOTE_EH_REGION_END notes. */ | |
993 | if (GET_CODE (insn) == CODE_LABEL) | |
994 | { | |
995 | rtx x, *prev = &exception_handler_labels; | |
996 | ||
997 | for (x = exception_handler_labels; x; x = XEXP (x, 1)) | |
998 | { | |
999 | if (XEXP (x, 0) == insn) | |
1000 | { | |
1001 | /* Found a match, splice this label out of the | |
1002 | EH label list. */ | |
1003 | *prev = XEXP (x, 1); | |
1004 | XEXP (x, 1) = NULL_RTX; | |
1005 | XEXP (x, 0) = NULL_RTX; | |
1006 | ||
1007 | /* Remove the handler from all regions */ | |
1008 | remove_handler (insn); | |
1009 | deleted_handler = 1; | |
1010 | break; | |
1011 | } | |
1012 | prev = &XEXP (x, 1); | |
1013 | } | |
1014 | } | |
1015 | ||
1016 | PUT_CODE (insn, NOTE); | |
1017 | NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; | |
1018 | NOTE_SOURCE_FILE (insn) = 0; | |
1019 | } | |
1020 | insn = basic_block_end[i]; | |
1021 | if (GET_CODE (insn) != NOTE) | |
1022 | { | |
1023 | /* Turn the tail into a deleted insn note. */ | |
1024 | if (GET_CODE (insn) == BARRIER) | |
1025 | abort (); | |
1026 | PUT_CODE (insn, NOTE); | |
1027 | NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; | |
1028 | NOTE_SOURCE_FILE (insn) = 0; | |
1029 | } | |
1030 | /* BARRIERs are between basic blocks, not part of one. | |
1031 | Delete a BARRIER if the preceding jump is deleted. | |
1032 | We cannot alter a BARRIER into a NOTE | |
1033 | because it is too short; but we can really delete | |
1034 | it because it is not part of a basic block. */ | |
1035 | if (NEXT_INSN (insn) != 0 | |
1036 | && GET_CODE (NEXT_INSN (insn)) == BARRIER) | |
1037 | delete_insn (NEXT_INSN (insn)); | |
1038 | ||
1039 | /* Each time we delete some basic blocks, | |
1040 | see if there is a jump around them that is | |
1041 | being turned into a no-op. If so, delete it. */ | |
1042 | ||
1043 | if (block_live_static[i - 1]) | |
1044 | { | |
1045 | register int j; | |
1046 | for (j = i + 1; j < n_basic_blocks; j++) | |
1047 | if (block_live_static[j]) | |
1048 | { | |
1049 | rtx label; | |
1050 | insn = basic_block_end[i - 1]; | |
1051 | if (GET_CODE (insn) == JUMP_INSN | |
1052 | /* An unconditional jump is the only possibility | |
1053 | we must check for, since a conditional one | |
1054 | would make these blocks live. */ | |
1055 | && simplejump_p (insn) | |
1056 | && (label = XEXP (SET_SRC (PATTERN (insn)), 0), 1) | |
1057 | && INSN_UID (label) != 0 | |
1058 | && BLOCK_NUM (label) == j) | |
1059 | { | |
1060 | int k; | |
1061 | ||
1062 | /* The deleted blocks still show up in the cfg, | |
1063 | so we must set basic_block_drops_in for blocks | |
1064 | I to J inclusive to keep the cfg accurate. */ | |
1065 | for (k = i; k <= j; k++) | |
1066 | basic_block_drops_in[k] = 1; | |
1067 | ||
1068 | PUT_CODE (insn, NOTE); | |
1069 | NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; | |
1070 | NOTE_SOURCE_FILE (insn) = 0; | |
1071 | if (GET_CODE (NEXT_INSN (insn)) != BARRIER) | |
1072 | abort (); | |
1073 | delete_insn (NEXT_INSN (insn)); | |
1074 | } | |
1075 | break; | |
1076 | } | |
1077 | } | |
1078 | ||
1079 | return deleted_handler; | |
1080 | } | |
1081 | ||
8329b5ec DE |
1082 | /* Delete INSN by patching it out. |
1083 | Return the next insn. */ | |
1084 | ||
1085 | static rtx | |
1086 | flow_delete_insn (insn) | |
1087 | rtx insn; | |
1088 | { | |
1089 | /* ??? For the moment we assume we don't have to watch for NULLs here | |
1090 | since the start/end of basic blocks aren't deleted like this. */ | |
1091 | NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn); | |
1092 | PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn); | |
1093 | return NEXT_INSN (insn); | |
1094 | } | |
d7429b6a | 1095 | \f |
5ece9746 JL |
1096 | /* Perform data flow analysis. |
1097 | F is the first insn of the function and NREGS the number of register numbers | |
1098 | in use. */ | |
1099 | ||
1100 | void | |
1101 | life_analysis (f, nregs, file) | |
1102 | rtx f; | |
1103 | int nregs; | |
1104 | FILE *file; | |
1105 | { | |
5ece9746 | 1106 | #ifdef ELIMINABLE_REGS |
ecb06768 | 1107 | register size_t i; |
5ece9746 JL |
1108 | static struct {int from, to; } eliminables[] = ELIMINABLE_REGS; |
1109 | #endif | |
1110 | ||
1111 | /* Record which registers will be eliminated. We use this in | |
1112 | mark_used_regs. */ | |
1113 | ||
1114 | CLEAR_HARD_REG_SET (elim_reg_set); | |
1115 | ||
1116 | #ifdef ELIMINABLE_REGS | |
1117 | for (i = 0; i < sizeof eliminables / sizeof eliminables[0]; i++) | |
1118 | SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from); | |
1119 | #else | |
1120 | SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM); | |
1121 | #endif | |
1122 | ||
1123 | life_analysis_1 (f, nregs); | |
1124 | if (file) | |
1125 | dump_flow_info (file); | |
1126 | ||
1127 | free_basic_block_vars (1); | |
1128 | } | |
1129 | ||
1130 | /* Free the variables allocated by find_basic_blocks. | |
1131 | ||
1132 | KEEP_HEAD_END_P is non-zero if basic_block_head and basic_block_end | |
1133 | are not to be freed. */ | |
1134 | ||
1135 | void | |
1136 | free_basic_block_vars (keep_head_end_p) | |
1137 | int keep_head_end_p; | |
1138 | { | |
1139 | if (basic_block_drops_in) | |
1140 | { | |
1141 | free (basic_block_drops_in); | |
1142 | /* Tell dump_flow_info this isn't available anymore. */ | |
1143 | basic_block_drops_in = 0; | |
1144 | } | |
1145 | if (basic_block_loop_depth) | |
1146 | { | |
1147 | free (basic_block_loop_depth); | |
1148 | basic_block_loop_depth = 0; | |
1149 | } | |
1150 | if (uid_block_number) | |
1151 | { | |
1152 | free (uid_block_number); | |
1153 | uid_block_number = 0; | |
1154 | } | |
1155 | if (uid_volatile) | |
1156 | { | |
1157 | free (uid_volatile); | |
1158 | uid_volatile = 0; | |
1159 | } | |
1160 | ||
1161 | if (! keep_head_end_p && basic_block_head) | |
1162 | { | |
1163 | free (basic_block_head); | |
1164 | basic_block_head = 0; | |
1165 | free (basic_block_end); | |
1166 | basic_block_end = 0; | |
1167 | } | |
1168 | } | |
1169 | ||
dc2ede84 BS |
1170 | /* Return nonzero if the destination of SET equals the source. */ |
1171 | static int | |
1172 | set_noop_p (set) | |
1173 | rtx set; | |
1174 | { | |
1175 | rtx src = SET_SRC (set); | |
1176 | rtx dst = SET_DEST (set); | |
1177 | if (GET_CODE (src) == REG && GET_CODE (dst) == REG | |
1178 | && REGNO (src) == REGNO (dst)) | |
1179 | return 1; | |
1180 | if (GET_CODE (src) != SUBREG || GET_CODE (dst) != SUBREG | |
1181 | || SUBREG_WORD (src) != SUBREG_WORD (dst)) | |
1182 | return 0; | |
1183 | src = SUBREG_REG (src); | |
1184 | dst = SUBREG_REG (dst); | |
1185 | if (GET_CODE (src) == REG && GET_CODE (dst) == REG | |
1186 | && REGNO (src) == REGNO (dst)) | |
1187 | return 1; | |
1188 | return 0; | |
1189 | } | |
1190 | ||
1191 | /* Return nonzero if an insn consists only of SETs, each of which only sets a | |
1192 | value to itself. */ | |
1193 | static int | |
1194 | noop_move_p (insn) | |
1195 | rtx insn; | |
1196 | { | |
1197 | rtx pat = PATTERN (insn); | |
1198 | ||
1199 | /* Insns carrying these notes are useful later on. */ | |
1200 | if (find_reg_note (insn, REG_EQUAL, NULL_RTX)) | |
1201 | return 0; | |
1202 | ||
1203 | if (GET_CODE (pat) == SET && set_noop_p (pat)) | |
1204 | return 1; | |
1205 | ||
1206 | if (GET_CODE (pat) == PARALLEL) | |
1207 | { | |
1208 | int i; | |
1209 | /* If nothing but SETs of registers to themselves, | |
1210 | this insn can also be deleted. */ | |
1211 | for (i = 0; i < XVECLEN (pat, 0); i++) | |
1212 | { | |
1213 | rtx tem = XVECEXP (pat, 0, i); | |
1214 | ||
1215 | if (GET_CODE (tem) == USE | |
1216 | || GET_CODE (tem) == CLOBBER) | |
1217 | continue; | |
1218 | ||
1219 | if (GET_CODE (tem) != SET || ! set_noop_p (tem)) | |
1220 | return 0; | |
1221 | } | |
1222 | ||
1223 | return 1; | |
1224 | } | |
1225 | return 0; | |
1226 | } | |
1227 | ||
fdb8a883 JW |
1228 | static void |
1229 | notice_stack_pointer_modification (x, pat) | |
1230 | rtx x; | |
1231 | rtx pat ATTRIBUTE_UNUSED; | |
1232 | { | |
1233 | if (x == stack_pointer_rtx | |
1234 | /* The stack pointer is only modified indirectly as the result | |
1235 | of a push until later in flow. See the comments in rtl.texi | |
1236 | regarding Embedded Side-Effects on Addresses. */ | |
1237 | || (GET_CODE (x) == MEM | |
1238 | && (GET_CODE (XEXP (x, 0)) == PRE_DEC | |
1239 | || GET_CODE (XEXP (x, 0)) == PRE_INC | |
1240 | || GET_CODE (XEXP (x, 0)) == POST_DEC | |
1241 | || GET_CODE (XEXP (x, 0)) == POST_INC) | |
1242 | && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx)) | |
1243 | current_function_sp_is_unchanging = 0; | |
1244 | } | |
1245 | ||
dc2ede84 BS |
1246 | /* Record which insns refer to any volatile memory |
1247 | or for any reason can't be deleted just because they are dead stores. | |
fdb8a883 JW |
1248 | Also, delete any insns that copy a register to itself. |
1249 | And see if the stack pointer is modified. */ | |
dc2ede84 BS |
1250 | static void |
1251 | record_volatile_insns (f) | |
1252 | rtx f; | |
1253 | { | |
1254 | rtx insn; | |
1255 | for (insn = f; insn; insn = NEXT_INSN (insn)) | |
1256 | { | |
1257 | enum rtx_code code1 = GET_CODE (insn); | |
1258 | if (code1 == CALL_INSN) | |
1259 | INSN_VOLATILE (insn) = 1; | |
1260 | else if (code1 == INSN || code1 == JUMP_INSN) | |
1261 | { | |
1262 | if (GET_CODE (PATTERN (insn)) != USE | |
1263 | && volatile_refs_p (PATTERN (insn))) | |
1264 | INSN_VOLATILE (insn) = 1; | |
1265 | ||
1266 | /* A SET that makes space on the stack cannot be dead. | |
1267 | (Such SETs occur only for allocating variable-size data, | |
1268 | so they will always have a PLUS or MINUS according to the | |
1269 | direction of stack growth.) | |
1270 | Even if this function never uses this stack pointer value, | |
1271 | signal handlers do! */ | |
1272 | else if (code1 == INSN && GET_CODE (PATTERN (insn)) == SET | |
1273 | && SET_DEST (PATTERN (insn)) == stack_pointer_rtx | |
1274 | #ifdef STACK_GROWS_DOWNWARD | |
1275 | && GET_CODE (SET_SRC (PATTERN (insn))) == MINUS | |
1276 | #else | |
1277 | && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS | |
1278 | #endif | |
1279 | && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx) | |
1280 | INSN_VOLATILE (insn) = 1; | |
1281 | ||
1282 | /* Delete (in effect) any obvious no-op moves. */ | |
1283 | else if (noop_move_p (insn)) | |
1284 | { | |
1285 | PUT_CODE (insn, NOTE); | |
1286 | NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; | |
1287 | NOTE_SOURCE_FILE (insn) = 0; | |
1288 | } | |
1289 | } | |
fdb8a883 JW |
1290 | |
1291 | /* Check if insn modifies the stack pointer. */ | |
1292 | if ( current_function_sp_is_unchanging | |
1293 | && GET_RTX_CLASS (GET_CODE (insn)) == 'i') | |
1294 | note_stores (PATTERN (insn), notice_stack_pointer_modification); | |
dc2ede84 BS |
1295 | } |
1296 | } | |
1297 | ||
1298 | /* Mark those regs which are needed at the end of the function as live | |
1299 | at the end of the last basic block. */ | |
1300 | static void | |
1301 | mark_regs_live_at_end (set) | |
1302 | regset set; | |
1303 | { | |
1304 | int i; | |
1305 | ||
1306 | #ifdef EXIT_IGNORE_STACK | |
1307 | if (! EXIT_IGNORE_STACK | |
1308 | || (! FRAME_POINTER_REQUIRED | |
1309 | && ! current_function_calls_alloca | |
fdb8a883 JW |
1310 | && flag_omit_frame_pointer) |
1311 | || current_function_sp_is_unchanging) | |
dc2ede84 BS |
1312 | #endif |
1313 | /* If exiting needs the right stack value, | |
1314 | consider the stack pointer live at the end of the function. */ | |
1315 | SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM); | |
1316 | ||
1317 | /* Mark the frame pointer is needed at the end of the function. If | |
1318 | we end up eliminating it, it will be removed from the live list | |
1319 | of each basic block by reload. */ | |
1320 | ||
1321 | SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM); | |
1322 | #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM | |
1323 | /* If they are different, also mark the hard frame pointer as live */ | |
1324 | SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM); | |
1325 | #endif | |
1326 | ||
1327 | ||
1328 | /* Mark all global registers and all registers used by the epilogue | |
1329 | as being live at the end of the function since they may be | |
1330 | referenced by our caller. */ | |
1331 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
1332 | if (global_regs[i] | |
1333 | #ifdef EPILOGUE_USES | |
1334 | || EPILOGUE_USES (i) | |
1335 | #endif | |
1336 | ) | |
1337 | SET_REGNO_REG_SET (set, i); | |
1338 | } | |
1339 | ||
d7429b6a RK |
1340 | /* Determine which registers are live at the start of each |
1341 | basic block of the function whose first insn is F. | |
1342 | NREGS is the number of registers used in F. | |
1343 | We allocate the vector basic_block_live_at_start | |
1344 | and the regsets that it points to, and fill them with the data. | |
1345 | regset_size and regset_bytes are also set here. */ | |
1346 | ||
1347 | static void | |
5ece9746 | 1348 | life_analysis_1 (f, nregs) |
d7429b6a RK |
1349 | rtx f; |
1350 | int nregs; | |
1351 | { | |
d7429b6a RK |
1352 | int first_pass; |
1353 | int changed; | |
1354 | /* For each basic block, a bitmask of regs | |
1355 | live on exit from the block. */ | |
1356 | regset *basic_block_live_at_end; | |
1357 | /* For each basic block, a bitmask of regs | |
1358 | live on entry to a successor-block of this block. | |
1359 | If this does not match basic_block_live_at_end, | |
1360 | that must be updated, and the block must be rescanned. */ | |
1361 | regset *basic_block_new_live_at_end; | |
1362 | /* For each basic block, a bitmask of regs | |
1363 | whose liveness at the end of the basic block | |
1364 | can make a difference in which regs are live on entry to the block. | |
1365 | These are the regs that are set within the basic block, | |
1366 | possibly excluding those that are used after they are set. */ | |
1367 | regset *basic_block_significant; | |
1368 | register int i; | |
1369 | rtx insn; | |
1370 | ||
1371 | struct obstack flow_obstack; | |
1372 | ||
1373 | gcc_obstack_init (&flow_obstack); | |
1374 | ||
1375 | max_regno = nregs; | |
1376 | ||
1377 | bzero (regs_ever_live, sizeof regs_ever_live); | |
1378 | ||
1379 | /* Allocate and zero out many data structures | |
1380 | that will record the data from lifetime analysis. */ | |
1381 | ||
1382 | allocate_for_life_analysis (); | |
1383 | ||
1384 | reg_next_use = (rtx *) alloca (nregs * sizeof (rtx)); | |
4c9a05bc | 1385 | bzero ((char *) reg_next_use, nregs * sizeof (rtx)); |
d7429b6a RK |
1386 | |
1387 | /* Set up several regset-vectors used internally within this function. | |
1388 | Their meanings are documented above, with their declarations. */ | |
1389 | ||
4c9a05bc RK |
1390 | basic_block_live_at_end |
1391 | = (regset *) alloca (n_basic_blocks * sizeof (regset)); | |
1392 | ||
d7429b6a RK |
1393 | /* Don't use alloca since that leads to a crash rather than an error message |
1394 | if there isn't enough space. | |
1395 | Don't use oballoc since we may need to allocate other things during | |
1396 | this function on the temporary obstack. */ | |
67f0e213 | 1397 | init_regset_vector (basic_block_live_at_end, n_basic_blocks, &flow_obstack); |
d7429b6a | 1398 | |
4c9a05bc RK |
1399 | basic_block_new_live_at_end |
1400 | = (regset *) alloca (n_basic_blocks * sizeof (regset)); | |
67f0e213 | 1401 | init_regset_vector (basic_block_new_live_at_end, n_basic_blocks, |
7eb136d6 | 1402 | &flow_obstack); |
d7429b6a | 1403 | |
4c9a05bc RK |
1404 | basic_block_significant |
1405 | = (regset *) alloca (n_basic_blocks * sizeof (regset)); | |
67f0e213 | 1406 | init_regset_vector (basic_block_significant, n_basic_blocks, &flow_obstack); |
d7429b6a | 1407 | |
fdb8a883 JW |
1408 | /* Assume that the stack pointer is unchanging if alloca hasn't been used. |
1409 | This will be cleared by record_volatile_insns if it encounters an insn | |
1410 | which modifies the stack pointer. */ | |
1411 | current_function_sp_is_unchanging = !current_function_calls_alloca; | |
1412 | ||
dc2ede84 | 1413 | record_volatile_insns (f); |
fe0f9c4b RK |
1414 | |
1415 | if (n_basic_blocks > 0) | |
1416 | { | |
dc2ede84 BS |
1417 | mark_regs_live_at_end (basic_block_live_at_end[n_basic_blocks - 1]); |
1418 | COPY_REG_SET (basic_block_new_live_at_end[n_basic_blocks - 1], | |
1419 | basic_block_live_at_end[n_basic_blocks - 1]); | |
1420 | } | |
d7429b6a RK |
1421 | |
1422 | /* Propagate life info through the basic blocks | |
1423 | around the graph of basic blocks. | |
1424 | ||
1425 | This is a relaxation process: each time a new register | |
1426 | is live at the end of the basic block, we must scan the block | |
1427 | to determine which registers are, as a consequence, live at the beginning | |
1428 | of that block. These registers must then be marked live at the ends | |
1429 | of all the blocks that can transfer control to that block. | |
1430 | The process continues until it reaches a fixed point. */ | |
1431 | ||
1432 | first_pass = 1; | |
1433 | changed = 1; | |
1434 | while (changed) | |
1435 | { | |
1436 | changed = 0; | |
1437 | for (i = n_basic_blocks - 1; i >= 0; i--) | |
1438 | { | |
1439 | int consider = first_pass; | |
1440 | int must_rescan = first_pass; | |
1441 | register int j; | |
1442 | ||
1443 | if (!first_pass) | |
1444 | { | |
1445 | /* Set CONSIDER if this block needs thinking about at all | |
1446 | (that is, if the regs live now at the end of it | |
1447 | are not the same as were live at the end of it when | |
1448 | we last thought about it). | |
1449 | Set must_rescan if it needs to be thought about | |
1450 | instruction by instruction (that is, if any additional | |
1451 | reg that is live at the end now but was not live there before | |
1452 | is one of the significant regs of this basic block). */ | |
1453 | ||
b5835272 RK |
1454 | EXECUTE_IF_AND_COMPL_IN_REG_SET |
1455 | (basic_block_new_live_at_end[i], | |
1456 | basic_block_live_at_end[i], 0, j, | |
1457 | { | |
1458 | consider = 1; | |
1459 | if (REGNO_REG_SET_P (basic_block_significant[i], j)) | |
1460 | { | |
1461 | must_rescan = 1; | |
1462 | goto done; | |
1463 | } | |
1464 | }); | |
916b1701 | 1465 | done: |
d7429b6a RK |
1466 | if (! consider) |
1467 | continue; | |
1468 | } | |
1469 | ||
1470 | /* The live_at_start of this block may be changing, | |
1471 | so another pass will be required after this one. */ | |
1472 | changed = 1; | |
1473 | ||
1474 | if (! must_rescan) | |
1475 | { | |
1476 | /* No complete rescan needed; | |
1477 | just record those variables newly known live at end | |
1478 | as live at start as well. */ | |
916b1701 MM |
1479 | IOR_AND_COMPL_REG_SET (basic_block_live_at_start[i], |
1480 | basic_block_new_live_at_end[i], | |
1481 | basic_block_live_at_end[i]); | |
1482 | ||
1483 | IOR_AND_COMPL_REG_SET (basic_block_live_at_end[i], | |
1484 | basic_block_new_live_at_end[i], | |
1485 | basic_block_live_at_end[i]); | |
d7429b6a RK |
1486 | } |
1487 | else | |
1488 | { | |
1489 | /* Update the basic_block_live_at_start | |
1490 | by propagation backwards through the block. */ | |
916b1701 MM |
1491 | COPY_REG_SET (basic_block_live_at_end[i], |
1492 | basic_block_new_live_at_end[i]); | |
1493 | COPY_REG_SET (basic_block_live_at_start[i], | |
1494 | basic_block_live_at_end[i]); | |
d7429b6a RK |
1495 | propagate_block (basic_block_live_at_start[i], |
1496 | basic_block_head[i], basic_block_end[i], 0, | |
5f4f0e22 CH |
1497 | first_pass ? basic_block_significant[i] |
1498 | : (regset) 0, | |
d7429b6a RK |
1499 | i); |
1500 | } | |
1501 | ||
1502 | { | |
1503 | register rtx jump, head; | |
af14ce9c | 1504 | |
d7429b6a RK |
1505 | /* Update the basic_block_new_live_at_end's of the block |
1506 | that falls through into this one (if any). */ | |
1507 | head = basic_block_head[i]; | |
d7429b6a | 1508 | if (basic_block_drops_in[i]) |
916b1701 MM |
1509 | IOR_REG_SET (basic_block_new_live_at_end[i-1], |
1510 | basic_block_live_at_start[i]); | |
af14ce9c | 1511 | |
d7429b6a RK |
1512 | /* Update the basic_block_new_live_at_end's of |
1513 | all the blocks that jump to this one. */ | |
1514 | if (GET_CODE (head) == CODE_LABEL) | |
1515 | for (jump = LABEL_REFS (head); | |
1516 | jump != head; | |
1517 | jump = LABEL_NEXTREF (jump)) | |
1518 | { | |
1519 | register int from_block = BLOCK_NUM (CONTAINING_INSN (jump)); | |
916b1701 MM |
1520 | IOR_REG_SET (basic_block_new_live_at_end[from_block], |
1521 | basic_block_live_at_start[i]); | |
d7429b6a RK |
1522 | } |
1523 | } | |
1524 | #ifdef USE_C_ALLOCA | |
1525 | alloca (0); | |
1526 | #endif | |
1527 | } | |
1528 | first_pass = 0; | |
1529 | } | |
1530 | ||
1531 | /* The only pseudos that are live at the beginning of the function are | |
1532 | those that were not set anywhere in the function. local-alloc doesn't | |
1533 | know how to handle these correctly, so mark them as not local to any | |
1534 | one basic block. */ | |
1535 | ||
1536 | if (n_basic_blocks > 0) | |
916b1701 MM |
1537 | EXECUTE_IF_SET_IN_REG_SET (basic_block_live_at_start[0], |
1538 | FIRST_PSEUDO_REGISTER, i, | |
1539 | { | |
1540 | REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; | |
1541 | }); | |
d7429b6a RK |
1542 | |
1543 | /* Now the life information is accurate. | |
1544 | Make one more pass over each basic block | |
1545 | to delete dead stores, create autoincrement addressing | |
1546 | and record how many times each register is used, is set, or dies. | |
1547 | ||
1548 | To save time, we operate directly in basic_block_live_at_end[i], | |
1549 | thus destroying it (in fact, converting it into a copy of | |
1550 | basic_block_live_at_start[i]). This is ok now because | |
1551 | basic_block_live_at_end[i] is no longer used past this point. */ | |
1552 | ||
1553 | max_scratch = 0; | |
1554 | ||
1555 | for (i = 0; i < n_basic_blocks; i++) | |
1556 | { | |
1557 | propagate_block (basic_block_live_at_end[i], | |
5f4f0e22 CH |
1558 | basic_block_head[i], basic_block_end[i], 1, |
1559 | (regset) 0, i); | |
d7429b6a RK |
1560 | #ifdef USE_C_ALLOCA |
1561 | alloca (0); | |
1562 | #endif | |
1563 | } | |
1564 | ||
1565 | #if 0 | |
1566 | /* Something live during a setjmp should not be put in a register | |
1567 | on certain machines which restore regs from stack frames | |
1568 | rather than from the jmpbuf. | |
1569 | But we don't need to do this for the user's variables, since | |
1570 | ANSI says only volatile variables need this. */ | |
1571 | #ifdef LONGJMP_RESTORE_FROM_STACK | |
916b1701 MM |
1572 | EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp, |
1573 | FIRST_PSEUDO_REGISTER, i, | |
1574 | { | |
1575 | if (regno_reg_rtx[i] != 0 | |
1576 | && ! REG_USERVAR_P (regno_reg_rtx[i])) | |
1577 | { | |
1578 | REG_LIVE_LENGTH (i) = -1; | |
1579 | REG_BASIC_BLOCK (i) = -1; | |
1580 | } | |
1581 | }); | |
d7429b6a RK |
1582 | #endif |
1583 | #endif | |
1584 | ||
1585 | /* We have a problem with any pseudoreg that | |
1586 | lives across the setjmp. ANSI says that if a | |
1587 | user variable does not change in value | |
1588 | between the setjmp and the longjmp, then the longjmp preserves it. | |
1589 | This includes longjmp from a place where the pseudo appears dead. | |
1590 | (In principle, the value still exists if it is in scope.) | |
1591 | If the pseudo goes in a hard reg, some other value may occupy | |
1592 | that hard reg where this pseudo is dead, thus clobbering the pseudo. | |
1593 | Conclusion: such a pseudo must not go in a hard reg. */ | |
916b1701 MM |
1594 | EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp, |
1595 | FIRST_PSEUDO_REGISTER, i, | |
1596 | { | |
1597 | if (regno_reg_rtx[i] != 0) | |
1598 | { | |
1599 | REG_LIVE_LENGTH (i) = -1; | |
1600 | REG_BASIC_BLOCK (i) = -1; | |
1601 | } | |
1602 | }); | |
d7429b6a | 1603 | |
67f0e213 RK |
1604 | |
1605 | free_regset_vector (basic_block_live_at_end, n_basic_blocks); | |
1606 | free_regset_vector (basic_block_new_live_at_end, n_basic_blocks); | |
1607 | free_regset_vector (basic_block_significant, n_basic_blocks); | |
1608 | basic_block_live_at_end = (regset *)0; | |
1609 | basic_block_new_live_at_end = (regset *)0; | |
1610 | basic_block_significant = (regset *)0; | |
1611 | ||
5f4f0e22 | 1612 | obstack_free (&flow_obstack, NULL_PTR); |
d7429b6a RK |
1613 | } |
1614 | \f | |
1615 | /* Subroutines of life analysis. */ | |
1616 | ||
1617 | /* Allocate the permanent data structures that represent the results | |
1618 | of life analysis. Not static since used also for stupid life analysis. */ | |
1619 | ||
1620 | void | |
1621 | allocate_for_life_analysis () | |
1622 | { | |
1623 | register int i; | |
d7429b6a | 1624 | |
67f0e213 RK |
1625 | /* Recalculate the register space, in case it has grown. Old style |
1626 | vector oriented regsets would set regset_{size,bytes} here also. */ | |
1627 | allocate_reg_info (max_regno, FALSE, FALSE); | |
d7429b6a | 1628 | |
b1f21e0a MM |
1629 | /* Because both reg_scan and flow_analysis want to set up the REG_N_SETS |
1630 | information, explicitly reset it here. The allocation should have | |
1631 | already happened on the previous reg_scan pass. Make sure in case | |
1632 | some more registers were allocated. */ | |
d7429b6a | 1633 | for (i = 0; i < max_regno; i++) |
b1f21e0a | 1634 | REG_N_SETS (i) = 0; |
d7429b6a | 1635 | |
4c9a05bc RK |
1636 | basic_block_live_at_start |
1637 | = (regset *) oballoc (n_basic_blocks * sizeof (regset)); | |
67f0e213 | 1638 | init_regset_vector (basic_block_live_at_start, n_basic_blocks, |
7eb136d6 | 1639 | function_obstack); |
d7429b6a | 1640 | |
7eb136d6 MM |
1641 | regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack); |
1642 | CLEAR_REG_SET (regs_live_at_setjmp); | |
d7429b6a RK |
1643 | } |
1644 | ||
67f0e213 RK |
1645 | /* Make each element of VECTOR point at a regset. The vector has |
1646 | NELTS elements, and space is allocated from the ALLOC_OBSTACK | |
1647 | obstack. */ | |
d7429b6a | 1648 | |
04821e98 | 1649 | static void |
67f0e213 | 1650 | init_regset_vector (vector, nelts, alloc_obstack) |
d7429b6a | 1651 | regset *vector; |
d7429b6a | 1652 | int nelts; |
7eb136d6 | 1653 | struct obstack *alloc_obstack; |
d7429b6a RK |
1654 | { |
1655 | register int i; | |
d7429b6a RK |
1656 | |
1657 | for (i = 0; i < nelts; i++) | |
1658 | { | |
7eb136d6 MM |
1659 | vector[i] = OBSTACK_ALLOC_REG_SET (alloc_obstack); |
1660 | CLEAR_REG_SET (vector[i]); | |
d7429b6a RK |
1661 | } |
1662 | } | |
e658434c | 1663 | |
67f0e213 RK |
1664 | /* Release any additional space allocated for each element of VECTOR point |
1665 | other than the regset header itself. The vector has NELTS elements. */ | |
1666 | ||
1667 | void | |
1668 | free_regset_vector (vector, nelts) | |
1669 | regset *vector; | |
1670 | int nelts; | |
1671 | { | |
1672 | register int i; | |
1673 | ||
1674 | for (i = 0; i < nelts; i++) | |
1675 | FREE_REG_SET (vector[i]); | |
1676 | } | |
1677 | ||
d7429b6a RK |
1678 | /* Compute the registers live at the beginning of a basic block |
1679 | from those live at the end. | |
1680 | ||
1681 | When called, OLD contains those live at the end. | |
1682 | On return, it contains those live at the beginning. | |
1683 | FIRST and LAST are the first and last insns of the basic block. | |
1684 | ||
1685 | FINAL is nonzero if we are doing the final pass which is not | |
1686 | for computing the life info (since that has already been done) | |
1687 | but for acting on it. On this pass, we delete dead stores, | |
1688 | set up the logical links and dead-variables lists of instructions, | |
1689 | and merge instructions for autoincrement and autodecrement addresses. | |
1690 | ||
1691 | SIGNIFICANT is nonzero only the first time for each basic block. | |
1692 | If it is nonzero, it points to a regset in which we store | |
1693 | a 1 for each register that is set within the block. | |
1694 | ||
1695 | BNUM is the number of the basic block. */ | |
1696 | ||
1697 | static void | |
1698 | propagate_block (old, first, last, final, significant, bnum) | |
1699 | register regset old; | |
1700 | rtx first; | |
1701 | rtx last; | |
1702 | int final; | |
1703 | regset significant; | |
1704 | int bnum; | |
1705 | { | |
1706 | register rtx insn; | |
1707 | rtx prev; | |
1708 | regset live; | |
1709 | regset dead; | |
1710 | ||
1711 | /* The following variables are used only if FINAL is nonzero. */ | |
1712 | /* This vector gets one element for each reg that has been live | |
1713 | at any point in the basic block that has been scanned so far. | |
916b1701 MM |
1714 | SOMETIMES_MAX says how many elements are in use so far. */ |
1715 | register int *regs_sometimes_live; | |
d7429b6a RK |
1716 | int sometimes_max = 0; |
1717 | /* This regset has 1 for each reg that we have seen live so far. | |
1718 | It and REGS_SOMETIMES_LIVE are updated together. */ | |
1719 | regset maxlive; | |
1720 | ||
1721 | /* The loop depth may change in the middle of a basic block. Since we | |
1722 | scan from end to beginning, we start with the depth at the end of the | |
1723 | current basic block, and adjust as we pass ends and starts of loops. */ | |
1724 | loop_depth = basic_block_loop_depth[bnum]; | |
1725 | ||
7eb136d6 MM |
1726 | dead = ALLOCA_REG_SET (); |
1727 | live = ALLOCA_REG_SET (); | |
d7429b6a RK |
1728 | |
1729 | cc0_live = 0; | |
1730 | last_mem_set = 0; | |
1731 | ||
1732 | /* Include any notes at the end of the block in the scan. | |
1733 | This is in case the block ends with a call to setjmp. */ | |
1734 | ||
1735 | while (NEXT_INSN (last) != 0 && GET_CODE (NEXT_INSN (last)) == NOTE) | |
1736 | { | |
1737 | /* Look for loop boundaries, we are going forward here. */ | |
1738 | last = NEXT_INSN (last); | |
1739 | if (NOTE_LINE_NUMBER (last) == NOTE_INSN_LOOP_BEG) | |
1740 | loop_depth++; | |
1741 | else if (NOTE_LINE_NUMBER (last) == NOTE_INSN_LOOP_END) | |
1742 | loop_depth--; | |
1743 | } | |
1744 | ||
1745 | if (final) | |
1746 | { | |
916b1701 | 1747 | register int i; |
d7429b6a RK |
1748 | |
1749 | num_scratch = 0; | |
7eb136d6 | 1750 | maxlive = ALLOCA_REG_SET (); |
916b1701 MM |
1751 | COPY_REG_SET (maxlive, old); |
1752 | regs_sometimes_live = (int *) alloca (max_regno * sizeof (int)); | |
d7429b6a RK |
1753 | |
1754 | /* Process the regs live at the end of the block. | |
1755 | Enter them in MAXLIVE and REGS_SOMETIMES_LIVE. | |
916b1701 MM |
1756 | Also mark them as not local to any one basic block. */ |
1757 | EXECUTE_IF_SET_IN_REG_SET (old, 0, i, | |
1758 | { | |
1759 | REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; | |
1760 | regs_sometimes_live[sometimes_max] = i; | |
1761 | sometimes_max++; | |
1762 | }); | |
d7429b6a RK |
1763 | } |
1764 | ||
1765 | /* Scan the block an insn at a time from end to beginning. */ | |
1766 | ||
1767 | for (insn = last; ; insn = prev) | |
1768 | { | |
1769 | prev = PREV_INSN (insn); | |
1770 | ||
8329b5ec | 1771 | if (GET_CODE (insn) == NOTE) |
d7429b6a | 1772 | { |
8329b5ec DE |
1773 | /* Look for loop boundaries, remembering that we are going |
1774 | backwards. */ | |
1775 | if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END) | |
1776 | loop_depth++; | |
1777 | else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG) | |
1778 | loop_depth--; | |
1779 | ||
1780 | /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error. | |
1781 | Abort now rather than setting register status incorrectly. */ | |
1782 | if (loop_depth == 0) | |
1783 | abort (); | |
1784 | ||
1785 | /* If this is a call to `setjmp' et al, | |
1786 | warn if any non-volatile datum is live. */ | |
1787 | ||
1788 | if (final && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP) | |
916b1701 | 1789 | IOR_REG_SET (regs_live_at_setjmp, old); |
d7429b6a RK |
1790 | } |
1791 | ||
1792 | /* Update the life-status of regs for this insn. | |
1793 | First DEAD gets which regs are set in this insn | |
1794 | then LIVE gets which regs are used in this insn. | |
1795 | Then the regs live before the insn | |
1796 | are those live after, with DEAD regs turned off, | |
1797 | and then LIVE regs turned on. */ | |
1798 | ||
8329b5ec | 1799 | else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') |
d7429b6a RK |
1800 | { |
1801 | register int i; | |
5f4f0e22 | 1802 | rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX); |
d7429b6a RK |
1803 | int insn_is_dead |
1804 | = (insn_dead_p (PATTERN (insn), old, 0) | |
1805 | /* Don't delete something that refers to volatile storage! */ | |
1806 | && ! INSN_VOLATILE (insn)); | |
1807 | int libcall_is_dead | |
1808 | = (insn_is_dead && note != 0 | |
1809 | && libcall_dead_p (PATTERN (insn), old, note, insn)); | |
1810 | ||
1811 | /* If an instruction consists of just dead store(s) on final pass, | |
1812 | "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED. | |
1813 | We could really delete it with delete_insn, but that | |
1814 | can cause trouble for first or last insn in a basic block. */ | |
1815 | if (final && insn_is_dead) | |
1816 | { | |
1817 | PUT_CODE (insn, NOTE); | |
1818 | NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; | |
1819 | NOTE_SOURCE_FILE (insn) = 0; | |
1820 | ||
e5df1ea3 RK |
1821 | /* CC0 is now known to be dead. Either this insn used it, |
1822 | in which case it doesn't anymore, or clobbered it, | |
1823 | so the next insn can't use it. */ | |
1824 | cc0_live = 0; | |
1825 | ||
d7429b6a RK |
1826 | /* If this insn is copying the return value from a library call, |
1827 | delete the entire library call. */ | |
1828 | if (libcall_is_dead) | |
1829 | { | |
1830 | rtx first = XEXP (note, 0); | |
1831 | rtx p = insn; | |
1832 | while (INSN_DELETED_P (first)) | |
1833 | first = NEXT_INSN (first); | |
1834 | while (p != first) | |
1835 | { | |
1836 | p = PREV_INSN (p); | |
1837 | PUT_CODE (p, NOTE); | |
1838 | NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED; | |
1839 | NOTE_SOURCE_FILE (p) = 0; | |
1840 | } | |
1841 | } | |
1842 | goto flushed; | |
1843 | } | |
1844 | ||
916b1701 MM |
1845 | CLEAR_REG_SET (dead); |
1846 | CLEAR_REG_SET (live); | |
d7429b6a RK |
1847 | |
1848 | /* See if this is an increment or decrement that can be | |
1849 | merged into a following memory address. */ | |
1850 | #ifdef AUTO_INC_DEC | |
1851 | { | |
956d6950 JL |
1852 | register rtx x = single_set (insn); |
1853 | ||
d7429b6a | 1854 | /* Does this instruction increment or decrement a register? */ |
956d6950 | 1855 | if (final && x != 0 |
d7429b6a RK |
1856 | && GET_CODE (SET_DEST (x)) == REG |
1857 | && (GET_CODE (SET_SRC (x)) == PLUS | |
1858 | || GET_CODE (SET_SRC (x)) == MINUS) | |
1859 | && XEXP (SET_SRC (x), 0) == SET_DEST (x) | |
1860 | && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT | |
1861 | /* Ok, look for a following memory ref we can combine with. | |
1862 | If one is found, change the memory ref to a PRE_INC | |
1863 | or PRE_DEC, cancel this insn, and return 1. | |
1864 | Return 0 if nothing has been done. */ | |
1865 | && try_pre_increment_1 (insn)) | |
1866 | goto flushed; | |
1867 | } | |
1868 | #endif /* AUTO_INC_DEC */ | |
1869 | ||
1870 | /* If this is not the final pass, and this insn is copying the | |
1871 | value of a library call and it's dead, don't scan the | |
1872 | insns that perform the library call, so that the call's | |
1873 | arguments are not marked live. */ | |
1874 | if (libcall_is_dead) | |
1875 | { | |
1876 | /* Mark the dest reg as `significant'. */ | |
5f4f0e22 | 1877 | mark_set_regs (old, dead, PATTERN (insn), NULL_RTX, significant); |
d7429b6a RK |
1878 | |
1879 | insn = XEXP (note, 0); | |
1880 | prev = PREV_INSN (insn); | |
1881 | } | |
1882 | else if (GET_CODE (PATTERN (insn)) == SET | |
1883 | && SET_DEST (PATTERN (insn)) == stack_pointer_rtx | |
1884 | && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS | |
1885 | && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx | |
1886 | && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT) | |
1887 | /* We have an insn to pop a constant amount off the stack. | |
1888 | (Such insns use PLUS regardless of the direction of the stack, | |
1889 | and any insn to adjust the stack by a constant is always a pop.) | |
1890 | These insns, if not dead stores, have no effect on life. */ | |
1891 | ; | |
1892 | else | |
1893 | { | |
1894 | /* LIVE gets the regs used in INSN; | |
1895 | DEAD gets those set by it. Dead insns don't make anything | |
1896 | live. */ | |
1897 | ||
5f4f0e22 CH |
1898 | mark_set_regs (old, dead, PATTERN (insn), |
1899 | final ? insn : NULL_RTX, significant); | |
d7429b6a RK |
1900 | |
1901 | /* If an insn doesn't use CC0, it becomes dead since we | |
1902 | assume that every insn clobbers it. So show it dead here; | |
1903 | mark_used_regs will set it live if it is referenced. */ | |
1904 | cc0_live = 0; | |
1905 | ||
1906 | if (! insn_is_dead) | |
1907 | mark_used_regs (old, live, PATTERN (insn), final, insn); | |
1908 | ||
1909 | /* Sometimes we may have inserted something before INSN (such as | |
1910 | a move) when we make an auto-inc. So ensure we will scan | |
1911 | those insns. */ | |
1912 | #ifdef AUTO_INC_DEC | |
1913 | prev = PREV_INSN (insn); | |
1914 | #endif | |
1915 | ||
1916 | if (! insn_is_dead && GET_CODE (insn) == CALL_INSN) | |
1917 | { | |
1918 | register int i; | |
1919 | ||
6b67ec08 RK |
1920 | rtx note; |
1921 | ||
1922 | for (note = CALL_INSN_FUNCTION_USAGE (insn); | |
1923 | note; | |
1924 | note = XEXP (note, 1)) | |
1925 | if (GET_CODE (XEXP (note, 0)) == USE) | |
1926 | mark_used_regs (old, live, SET_DEST (XEXP (note, 0)), | |
1927 | final, insn); | |
1928 | ||
d7429b6a | 1929 | /* Each call clobbers all call-clobbered regs that are not |
e4329280 | 1930 | global or fixed. Note that the function-value reg is a |
d7429b6a RK |
1931 | call-clobbered reg, and mark_set_regs has already had |
1932 | a chance to handle it. */ | |
1933 | ||
1934 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
e4329280 RK |
1935 | if (call_used_regs[i] && ! global_regs[i] |
1936 | && ! fixed_regs[i]) | |
916b1701 | 1937 | SET_REGNO_REG_SET (dead, i); |
d7429b6a RK |
1938 | |
1939 | /* The stack ptr is used (honorarily) by a CALL insn. */ | |
916b1701 | 1940 | SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM); |
d7429b6a RK |
1941 | |
1942 | /* Calls may also reference any of the global registers, | |
1943 | so they are made live. */ | |
d7429b6a RK |
1944 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
1945 | if (global_regs[i]) | |
9b316aa2 | 1946 | mark_used_regs (old, live, |
38a448ca | 1947 | gen_rtx_REG (reg_raw_mode[i], i), |
9b316aa2 | 1948 | final, insn); |
d7429b6a RK |
1949 | |
1950 | /* Calls also clobber memory. */ | |
1951 | last_mem_set = 0; | |
1952 | } | |
1953 | ||
1954 | /* Update OLD for the registers used or set. */ | |
916b1701 MM |
1955 | AND_COMPL_REG_SET (old, dead); |
1956 | IOR_REG_SET (old, live); | |
d7429b6a RK |
1957 | |
1958 | if (GET_CODE (insn) == CALL_INSN && final) | |
1959 | { | |
1960 | /* Any regs live at the time of a call instruction | |
1961 | must not go in a register clobbered by calls. | |
1962 | Find all regs now live and record this for them. */ | |
1963 | ||
916b1701 | 1964 | register int *p = regs_sometimes_live; |
d7429b6a RK |
1965 | |
1966 | for (i = 0; i < sometimes_max; i++, p++) | |
916b1701 MM |
1967 | if (REGNO_REG_SET_P (old, *p)) |
1968 | REG_N_CALLS_CROSSED (*p)++; | |
d7429b6a RK |
1969 | } |
1970 | } | |
1971 | ||
1972 | /* On final pass, add any additional sometimes-live regs | |
1973 | into MAXLIVE and REGS_SOMETIMES_LIVE. | |
1974 | Also update counts of how many insns each reg is live at. */ | |
1975 | ||
1976 | if (final) | |
1977 | { | |
916b1701 MM |
1978 | register int regno; |
1979 | register int *p; | |
d7429b6a | 1980 | |
b5835272 RK |
1981 | EXECUTE_IF_AND_COMPL_IN_REG_SET |
1982 | (live, maxlive, 0, regno, | |
1983 | { | |
1984 | regs_sometimes_live[sometimes_max++] = regno; | |
1985 | SET_REGNO_REG_SET (maxlive, regno); | |
1986 | }); | |
d7429b6a | 1987 | |
916b1701 MM |
1988 | p = regs_sometimes_live; |
1989 | for (i = 0; i < sometimes_max; i++) | |
1990 | { | |
1991 | regno = *p++; | |
1992 | if (REGNO_REG_SET_P (old, regno)) | |
1993 | REG_LIVE_LENGTH (regno)++; | |
1994 | } | |
d7429b6a RK |
1995 | } |
1996 | } | |
1997 | flushed: ; | |
1998 | if (insn == first) | |
1999 | break; | |
2000 | } | |
2001 | ||
67f0e213 RK |
2002 | FREE_REG_SET (dead); |
2003 | FREE_REG_SET (live); | |
2004 | if (final) | |
2005 | FREE_REG_SET (maxlive); | |
2006 | ||
d7429b6a RK |
2007 | if (num_scratch > max_scratch) |
2008 | max_scratch = num_scratch; | |
2009 | } | |
2010 | \f | |
2011 | /* Return 1 if X (the body of an insn, or part of it) is just dead stores | |
2012 | (SET expressions whose destinations are registers dead after the insn). | |
2013 | NEEDED is the regset that says which regs are alive after the insn. | |
2014 | ||
2015 | Unless CALL_OK is non-zero, an insn is needed if it contains a CALL. */ | |
2016 | ||
2017 | static int | |
2018 | insn_dead_p (x, needed, call_ok) | |
2019 | rtx x; | |
2020 | regset needed; | |
2021 | int call_ok; | |
2022 | { | |
e5e809f4 JL |
2023 | enum rtx_code code = GET_CODE (x); |
2024 | ||
d7429b6a RK |
2025 | /* If setting something that's a reg or part of one, |
2026 | see if that register's altered value will be live. */ | |
2027 | ||
2028 | if (code == SET) | |
2029 | { | |
e5e809f4 JL |
2030 | rtx r = SET_DEST (x); |
2031 | ||
d7429b6a RK |
2032 | /* A SET that is a subroutine call cannot be dead. */ |
2033 | if (! call_ok && GET_CODE (SET_SRC (x)) == CALL) | |
2034 | return 0; | |
2035 | ||
2036 | #ifdef HAVE_cc0 | |
2037 | if (GET_CODE (r) == CC0) | |
2038 | return ! cc0_live; | |
2039 | #endif | |
2040 | ||
2041 | if (GET_CODE (r) == MEM && last_mem_set && ! MEM_VOLATILE_P (r) | |
2042 | && rtx_equal_p (r, last_mem_set)) | |
2043 | return 1; | |
2044 | ||
e5e809f4 JL |
2045 | while (GET_CODE (r) == SUBREG || GET_CODE (r) == STRICT_LOW_PART |
2046 | || GET_CODE (r) == ZERO_EXTRACT) | |
d7429b6a RK |
2047 | r = SUBREG_REG (r); |
2048 | ||
2049 | if (GET_CODE (r) == REG) | |
2050 | { | |
e5e809f4 | 2051 | int regno = REGNO (r); |
d7429b6a | 2052 | |
d8c8b8e3 | 2053 | /* Don't delete insns to set global regs. */ |
d7429b6a RK |
2054 | if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno]) |
2055 | /* Make sure insns to set frame pointer aren't deleted. */ | |
2056 | || regno == FRAME_POINTER_REGNUM | |
73a187c1 DE |
2057 | #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM |
2058 | || regno == HARD_FRAME_POINTER_REGNUM | |
2059 | #endif | |
d7e4fe8b RS |
2060 | #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM |
2061 | /* Make sure insns to set arg pointer are never deleted | |
2062 | (if the arg pointer isn't fixed, there will be a USE for | |
0f41302f | 2063 | it, so we can treat it normally). */ |
d7e4fe8b RS |
2064 | || (regno == ARG_POINTER_REGNUM && fixed_regs[regno]) |
2065 | #endif | |
916b1701 | 2066 | || REGNO_REG_SET_P (needed, regno)) |
d7429b6a RK |
2067 | return 0; |
2068 | ||
2069 | /* If this is a hard register, verify that subsequent words are | |
2070 | not needed. */ | |
2071 | if (regno < FIRST_PSEUDO_REGISTER) | |
2072 | { | |
2073 | int n = HARD_REGNO_NREGS (regno, GET_MODE (r)); | |
2074 | ||
2075 | while (--n > 0) | |
916b1701 | 2076 | if (REGNO_REG_SET_P (needed, regno+n)) |
d7429b6a RK |
2077 | return 0; |
2078 | } | |
2079 | ||
2080 | return 1; | |
2081 | } | |
2082 | } | |
e5e809f4 | 2083 | |
d7429b6a RK |
2084 | /* If performing several activities, |
2085 | insn is dead if each activity is individually dead. | |
2086 | Also, CLOBBERs and USEs can be ignored; a CLOBBER or USE | |
2087 | that's inside a PARALLEL doesn't make the insn worth keeping. */ | |
2088 | else if (code == PARALLEL) | |
2089 | { | |
e5e809f4 JL |
2090 | int i = XVECLEN (x, 0); |
2091 | ||
d7429b6a | 2092 | for (i--; i >= 0; i--) |
e5e809f4 JL |
2093 | if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER |
2094 | && GET_CODE (XVECEXP (x, 0, i)) != USE | |
2095 | && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok)) | |
2096 | return 0; | |
2097 | ||
d7429b6a RK |
2098 | return 1; |
2099 | } | |
e5e809f4 JL |
2100 | |
2101 | /* A CLOBBER of a pseudo-register that is dead serves no purpose. That | |
2102 | is not necessarily true for hard registers. */ | |
2103 | else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG | |
2104 | && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER | |
2105 | && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0)))) | |
2106 | return 1; | |
2107 | ||
2108 | /* We do not check other CLOBBER or USE here. An insn consisting of just | |
2109 | a CLOBBER or just a USE should not be deleted. */ | |
d7429b6a RK |
2110 | return 0; |
2111 | } | |
2112 | ||
2113 | /* If X is the pattern of the last insn in a libcall, and assuming X is dead, | |
2114 | return 1 if the entire library call is dead. | |
2115 | This is true if X copies a register (hard or pseudo) | |
2116 | and if the hard return reg of the call insn is dead. | |
2117 | (The caller should have tested the destination of X already for death.) | |
2118 | ||
2119 | If this insn doesn't just copy a register, then we don't | |
2120 | have an ordinary libcall. In that case, cse could not have | |
2121 | managed to substitute the source for the dest later on, | |
2122 | so we can assume the libcall is dead. | |
2123 | ||
2124 | NEEDED is the bit vector of pseudoregs live before this insn. | |
2125 | NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */ | |
2126 | ||
2127 | static int | |
2128 | libcall_dead_p (x, needed, note, insn) | |
2129 | rtx x; | |
2130 | regset needed; | |
2131 | rtx note; | |
2132 | rtx insn; | |
2133 | { | |
2134 | register RTX_CODE code = GET_CODE (x); | |
2135 | ||
2136 | if (code == SET) | |
2137 | { | |
2138 | register rtx r = SET_SRC (x); | |
2139 | if (GET_CODE (r) == REG) | |
2140 | { | |
2141 | rtx call = XEXP (note, 0); | |
2142 | register int i; | |
2143 | ||
2144 | /* Find the call insn. */ | |
2145 | while (call != insn && GET_CODE (call) != CALL_INSN) | |
2146 | call = NEXT_INSN (call); | |
2147 | ||
2148 | /* If there is none, do nothing special, | |
2149 | since ordinary death handling can understand these insns. */ | |
2150 | if (call == insn) | |
2151 | return 0; | |
2152 | ||
2153 | /* See if the hard reg holding the value is dead. | |
2154 | If this is a PARALLEL, find the call within it. */ | |
2155 | call = PATTERN (call); | |
2156 | if (GET_CODE (call) == PARALLEL) | |
2157 | { | |
2158 | for (i = XVECLEN (call, 0) - 1; i >= 0; i--) | |
2159 | if (GET_CODE (XVECEXP (call, 0, i)) == SET | |
2160 | && GET_CODE (SET_SRC (XVECEXP (call, 0, i))) == CALL) | |
2161 | break; | |
2162 | ||
761a5bcd JW |
2163 | /* This may be a library call that is returning a value |
2164 | via invisible pointer. Do nothing special, since | |
2165 | ordinary death handling can understand these insns. */ | |
d7429b6a | 2166 | if (i < 0) |
761a5bcd | 2167 | return 0; |
d7429b6a RK |
2168 | |
2169 | call = XVECEXP (call, 0, i); | |
2170 | } | |
2171 | ||
2172 | return insn_dead_p (call, needed, 1); | |
2173 | } | |
2174 | } | |
2175 | return 1; | |
2176 | } | |
2177 | ||
bd80fbde RH |
2178 | /* Return 1 if register REGNO was used before it was set, i.e. if it is |
2179 | live at function entry. Don't count global register variables, variables | |
2180 | in registers that can be used for function arg passing, or variables in | |
2181 | fixed hard registers. */ | |
d7429b6a RK |
2182 | |
2183 | int | |
2184 | regno_uninitialized (regno) | |
2185 | int regno; | |
2186 | { | |
b0b7b46a | 2187 | if (n_basic_blocks == 0 |
6a45254e | 2188 | || (regno < FIRST_PSEUDO_REGISTER |
bd80fbde RH |
2189 | && (global_regs[regno] |
2190 | || fixed_regs[regno] | |
2191 | || FUNCTION_ARG_REGNO_P (regno)))) | |
d7429b6a RK |
2192 | return 0; |
2193 | ||
916b1701 | 2194 | return REGNO_REG_SET_P (basic_block_live_at_start[0], regno); |
d7429b6a RK |
2195 | } |
2196 | ||
2197 | /* 1 if register REGNO was alive at a place where `setjmp' was called | |
2198 | and was set more than once or is an argument. | |
2199 | Such regs may be clobbered by `longjmp'. */ | |
2200 | ||
2201 | int | |
2202 | regno_clobbered_at_setjmp (regno) | |
2203 | int regno; | |
2204 | { | |
2205 | if (n_basic_blocks == 0) | |
2206 | return 0; | |
2207 | ||
b1f21e0a | 2208 | return ((REG_N_SETS (regno) > 1 |
916b1701 MM |
2209 | || REGNO_REG_SET_P (basic_block_live_at_start[0], regno)) |
2210 | && REGNO_REG_SET_P (regs_live_at_setjmp, regno)); | |
d7429b6a RK |
2211 | } |
2212 | \f | |
2213 | /* Process the registers that are set within X. | |
2214 | Their bits are set to 1 in the regset DEAD, | |
2215 | because they are dead prior to this insn. | |
2216 | ||
2217 | If INSN is nonzero, it is the insn being processed | |
2218 | and the fact that it is nonzero implies this is the FINAL pass | |
2219 | in propagate_block. In this case, various info about register | |
2220 | usage is stored, LOG_LINKS fields of insns are set up. */ | |
2221 | ||
d7429b6a RK |
2222 | static void |
2223 | mark_set_regs (needed, dead, x, insn, significant) | |
2224 | regset needed; | |
2225 | regset dead; | |
2226 | rtx x; | |
2227 | rtx insn; | |
2228 | regset significant; | |
2229 | { | |
2230 | register RTX_CODE code = GET_CODE (x); | |
2231 | ||
2232 | if (code == SET || code == CLOBBER) | |
2233 | mark_set_1 (needed, dead, x, insn, significant); | |
2234 | else if (code == PARALLEL) | |
2235 | { | |
2236 | register int i; | |
2237 | for (i = XVECLEN (x, 0) - 1; i >= 0; i--) | |
2238 | { | |
2239 | code = GET_CODE (XVECEXP (x, 0, i)); | |
2240 | if (code == SET || code == CLOBBER) | |
2241 | mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn, significant); | |
2242 | } | |
2243 | } | |
2244 | } | |
2245 | ||
2246 | /* Process a single SET rtx, X. */ | |
2247 | ||
2248 | static void | |
2249 | mark_set_1 (needed, dead, x, insn, significant) | |
2250 | regset needed; | |
2251 | regset dead; | |
2252 | rtx x; | |
2253 | rtx insn; | |
2254 | regset significant; | |
2255 | { | |
2256 | register int regno; | |
2257 | register rtx reg = SET_DEST (x); | |
2258 | ||
86465af7 DM |
2259 | /* Some targets place small structures in registers for |
2260 | return values of functions. We have to detect this | |
2261 | case specially here to get correct flow information. */ | |
2262 | if (GET_CODE (reg) == PARALLEL | |
2263 | && GET_MODE (reg) == BLKmode) | |
2264 | { | |
2265 | register int i; | |
2266 | ||
2267 | for (i = XVECLEN (reg, 0) - 1; i >= 0; i--) | |
2268 | mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn, significant); | |
2269 | return; | |
2270 | } | |
2271 | ||
d7429b6a RK |
2272 | /* Modifying just one hardware register of a multi-reg value |
2273 | or just a byte field of a register | |
2274 | does not mean the value from before this insn is now dead. | |
2275 | But it does mean liveness of that register at the end of the block | |
2276 | is significant. | |
2277 | ||
2278 | Within mark_set_1, however, we treat it as if the register is | |
2279 | indeed modified. mark_used_regs will, however, also treat this | |
2280 | register as being used. Thus, we treat these insns as setting a | |
2281 | new value for the register as a function of its old value. This | |
2282 | cases LOG_LINKS to be made appropriately and this will help combine. */ | |
2283 | ||
2284 | while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT | |
2285 | || GET_CODE (reg) == SIGN_EXTRACT | |
2286 | || GET_CODE (reg) == STRICT_LOW_PART) | |
2287 | reg = XEXP (reg, 0); | |
2288 | ||
2289 | /* If we are writing into memory or into a register mentioned in the | |
2290 | address of the last thing stored into memory, show we don't know | |
2291 | what the last store was. If we are writing memory, save the address | |
2292 | unless it is volatile. */ | |
2293 | if (GET_CODE (reg) == MEM | |
2294 | || (GET_CODE (reg) == REG | |
2295 | && last_mem_set != 0 && reg_overlap_mentioned_p (reg, last_mem_set))) | |
2296 | last_mem_set = 0; | |
2297 | ||
2298 | if (GET_CODE (reg) == MEM && ! side_effects_p (reg) | |
2299 | /* There are no REG_INC notes for SP, so we can't assume we'll see | |
2300 | everything that invalidates it. To be safe, don't eliminate any | |
2301 | stores though SP; none of them should be redundant anyway. */ | |
2302 | && ! reg_mentioned_p (stack_pointer_rtx, reg)) | |
2303 | last_mem_set = reg; | |
2304 | ||
2305 | if (GET_CODE (reg) == REG | |
2306 | && (regno = REGNO (reg), regno != FRAME_POINTER_REGNUM) | |
73a187c1 DE |
2307 | #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM |
2308 | && regno != HARD_FRAME_POINTER_REGNUM | |
2309 | #endif | |
d7e4fe8b RS |
2310 | #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM |
2311 | && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno]) | |
2312 | #endif | |
d7429b6a RK |
2313 | && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])) |
2314 | /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */ | |
2315 | { | |
916b1701 MM |
2316 | int some_needed = REGNO_REG_SET_P (needed, regno); |
2317 | int some_not_needed = ! some_needed; | |
d7429b6a RK |
2318 | |
2319 | /* Mark it as a significant register for this basic block. */ | |
2320 | if (significant) | |
916b1701 | 2321 | SET_REGNO_REG_SET (significant, regno); |
d7429b6a | 2322 | |
38e01259 | 2323 | /* Mark it as dead before this insn. */ |
916b1701 | 2324 | SET_REGNO_REG_SET (dead, regno); |
d7429b6a RK |
2325 | |
2326 | /* A hard reg in a wide mode may really be multiple registers. | |
2327 | If so, mark all of them just like the first. */ | |
2328 | if (regno < FIRST_PSEUDO_REGISTER) | |
2329 | { | |
2330 | int n; | |
2331 | ||
2332 | /* Nothing below is needed for the stack pointer; get out asap. | |
2333 | Eg, log links aren't needed, since combine won't use them. */ | |
2334 | if (regno == STACK_POINTER_REGNUM) | |
2335 | return; | |
2336 | ||
2337 | n = HARD_REGNO_NREGS (regno, GET_MODE (reg)); | |
2338 | while (--n > 0) | |
2339 | { | |
916b1701 MM |
2340 | int regno_n = regno + n; |
2341 | int needed_regno = REGNO_REG_SET_P (needed, regno_n); | |
d7429b6a | 2342 | if (significant) |
916b1701 | 2343 | SET_REGNO_REG_SET (significant, regno_n); |
cb9e8ad1 | 2344 | |
916b1701 MM |
2345 | SET_REGNO_REG_SET (dead, regno_n); |
2346 | some_needed |= needed_regno; | |
2347 | some_not_needed |= ! needed_regno; | |
d7429b6a RK |
2348 | } |
2349 | } | |
2350 | /* Additional data to record if this is the final pass. */ | |
2351 | if (insn) | |
2352 | { | |
2353 | register rtx y = reg_next_use[regno]; | |
2354 | register int blocknum = BLOCK_NUM (insn); | |
2355 | ||
2356 | /* If this is a hard reg, record this function uses the reg. */ | |
2357 | ||
2358 | if (regno < FIRST_PSEUDO_REGISTER) | |
2359 | { | |
2360 | register int i; | |
2361 | int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)); | |
2362 | ||
2363 | for (i = regno; i < endregno; i++) | |
2364 | { | |
93514916 JW |
2365 | /* The next use is no longer "next", since a store |
2366 | intervenes. */ | |
2367 | reg_next_use[i] = 0; | |
2368 | ||
d7429b6a | 2369 | regs_ever_live[i] = 1; |
b1f21e0a | 2370 | REG_N_SETS (i)++; |
d7429b6a RK |
2371 | } |
2372 | } | |
2373 | else | |
2374 | { | |
93514916 JW |
2375 | /* The next use is no longer "next", since a store |
2376 | intervenes. */ | |
2377 | reg_next_use[regno] = 0; | |
2378 | ||
d7429b6a RK |
2379 | /* Keep track of which basic blocks each reg appears in. */ |
2380 | ||
b1f21e0a MM |
2381 | if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN) |
2382 | REG_BASIC_BLOCK (regno) = blocknum; | |
2383 | else if (REG_BASIC_BLOCK (regno) != blocknum) | |
2384 | REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL; | |
d7429b6a RK |
2385 | |
2386 | /* Count (weighted) references, stores, etc. This counts a | |
2387 | register twice if it is modified, but that is correct. */ | |
b1f21e0a | 2388 | REG_N_SETS (regno)++; |
d7429b6a | 2389 | |
b1f21e0a | 2390 | REG_N_REFS (regno) += loop_depth; |
d7429b6a RK |
2391 | |
2392 | /* The insns where a reg is live are normally counted | |
2393 | elsewhere, but we want the count to include the insn | |
2394 | where the reg is set, and the normal counting mechanism | |
2395 | would not count it. */ | |
b1f21e0a | 2396 | REG_LIVE_LENGTH (regno)++; |
d7429b6a RK |
2397 | } |
2398 | ||
cb9e8ad1 | 2399 | if (! some_not_needed) |
d7429b6a RK |
2400 | { |
2401 | /* Make a logical link from the next following insn | |
2402 | that uses this register, back to this insn. | |
2403 | The following insns have already been processed. | |
2404 | ||
2405 | We don't build a LOG_LINK for hard registers containing | |
2406 | in ASM_OPERANDs. If these registers get replaced, | |
2407 | we might wind up changing the semantics of the insn, | |
2408 | even if reload can make what appear to be valid assignments | |
2409 | later. */ | |
2410 | if (y && (BLOCK_NUM (y) == blocknum) | |
2411 | && (regno >= FIRST_PSEUDO_REGISTER | |
2412 | || asm_noperands (PATTERN (y)) < 0)) | |
2413 | LOG_LINKS (y) | |
38a448ca | 2414 | = gen_rtx_INSN_LIST (VOIDmode, insn, LOG_LINKS (y)); |
d7429b6a RK |
2415 | } |
2416 | else if (! some_needed) | |
2417 | { | |
2418 | /* Note that dead stores have already been deleted when possible | |
2419 | If we get here, we have found a dead store that cannot | |
2420 | be eliminated (because the same insn does something useful). | |
2421 | Indicate this by marking the reg being set as dying here. */ | |
2422 | REG_NOTES (insn) | |
38a448ca | 2423 | = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn)); |
b1f21e0a | 2424 | REG_N_DEATHS (REGNO (reg))++; |
d7429b6a RK |
2425 | } |
2426 | else | |
2427 | { | |
2428 | /* This is a case where we have a multi-word hard register | |
2429 | and some, but not all, of the words of the register are | |
2430 | needed in subsequent insns. Write REG_UNUSED notes | |
2431 | for those parts that were not needed. This case should | |
2432 | be rare. */ | |
2433 | ||
2434 | int i; | |
2435 | ||
2436 | for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1; | |
2437 | i >= 0; i--) | |
916b1701 | 2438 | if (!REGNO_REG_SET_P (needed, regno + i)) |
d7429b6a | 2439 | REG_NOTES (insn) |
38a448ca RH |
2440 | = gen_rtx_EXPR_LIST (REG_UNUSED, |
2441 | gen_rtx_REG (reg_raw_mode[regno + i], | |
2442 | regno + i), | |
2443 | REG_NOTES (insn)); | |
d7429b6a RK |
2444 | } |
2445 | } | |
2446 | } | |
8244fc4f RS |
2447 | else if (GET_CODE (reg) == REG) |
2448 | reg_next_use[regno] = 0; | |
d7429b6a RK |
2449 | |
2450 | /* If this is the last pass and this is a SCRATCH, show it will be dying | |
2451 | here and count it. */ | |
2452 | else if (GET_CODE (reg) == SCRATCH && insn != 0) | |
2453 | { | |
2454 | REG_NOTES (insn) | |
38a448ca | 2455 | = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn)); |
d7429b6a RK |
2456 | num_scratch++; |
2457 | } | |
2458 | } | |
2459 | \f | |
2460 | #ifdef AUTO_INC_DEC | |
2461 | ||
2462 | /* X is a MEM found in INSN. See if we can convert it into an auto-increment | |
2463 | reference. */ | |
2464 | ||
2465 | static void | |
2466 | find_auto_inc (needed, x, insn) | |
2467 | regset needed; | |
2468 | rtx x; | |
2469 | rtx insn; | |
2470 | { | |
2471 | rtx addr = XEXP (x, 0); | |
e658434c | 2472 | HOST_WIDE_INT offset = 0; |
05ed5d57 | 2473 | rtx set; |
d7429b6a RK |
2474 | |
2475 | /* Here we detect use of an index register which might be good for | |
2476 | postincrement, postdecrement, preincrement, or predecrement. */ | |
2477 | ||
2478 | if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT) | |
2479 | offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0); | |
2480 | ||
2481 | if (GET_CODE (addr) == REG) | |
2482 | { | |
2483 | register rtx y; | |
2484 | register int size = GET_MODE_SIZE (GET_MODE (x)); | |
2485 | rtx use; | |
2486 | rtx incr; | |
2487 | int regno = REGNO (addr); | |
2488 | ||
2489 | /* Is the next use an increment that might make auto-increment? */ | |
05ed5d57 RK |
2490 | if ((incr = reg_next_use[regno]) != 0 |
2491 | && (set = single_set (incr)) != 0 | |
2492 | && GET_CODE (set) == SET | |
d7429b6a RK |
2493 | && BLOCK_NUM (incr) == BLOCK_NUM (insn) |
2494 | /* Can't add side effects to jumps; if reg is spilled and | |
2495 | reloaded, there's no way to store back the altered value. */ | |
2496 | && GET_CODE (insn) != JUMP_INSN | |
05ed5d57 | 2497 | && (y = SET_SRC (set), GET_CODE (y) == PLUS) |
d7429b6a RK |
2498 | && XEXP (y, 0) == addr |
2499 | && GET_CODE (XEXP (y, 1)) == CONST_INT | |
2500 | && (0 | |
2501 | #ifdef HAVE_POST_INCREMENT | |
2502 | || (INTVAL (XEXP (y, 1)) == size && offset == 0) | |
2503 | #endif | |
2504 | #ifdef HAVE_POST_DECREMENT | |
2505 | || (INTVAL (XEXP (y, 1)) == - size && offset == 0) | |
2506 | #endif | |
2507 | #ifdef HAVE_PRE_INCREMENT | |
2508 | || (INTVAL (XEXP (y, 1)) == size && offset == size) | |
2509 | #endif | |
2510 | #ifdef HAVE_PRE_DECREMENT | |
2511 | || (INTVAL (XEXP (y, 1)) == - size && offset == - size) | |
2512 | #endif | |
2513 | ) | |
2514 | /* Make sure this reg appears only once in this insn. */ | |
2515 | && (use = find_use_as_address (PATTERN (insn), addr, offset), | |
2516 | use != 0 && use != (rtx) 1)) | |
2517 | { | |
05ed5d57 | 2518 | rtx q = SET_DEST (set); |
7280c2a4 RK |
2519 | enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size |
2520 | ? (offset ? PRE_INC : POST_INC) | |
2521 | : (offset ? PRE_DEC : POST_DEC)); | |
d7429b6a RK |
2522 | |
2523 | if (dead_or_set_p (incr, addr)) | |
7280c2a4 RK |
2524 | { |
2525 | /* This is the simple case. Try to make the auto-inc. If | |
2526 | we can't, we are done. Otherwise, we will do any | |
2527 | needed updates below. */ | |
2528 | if (! validate_change (insn, &XEXP (x, 0), | |
38a448ca | 2529 | gen_rtx_fmt_e (inc_code, Pmode, addr), |
7280c2a4 RK |
2530 | 0)) |
2531 | return; | |
2532 | } | |
5175ad37 DE |
2533 | else if (GET_CODE (q) == REG |
2534 | /* PREV_INSN used here to check the semi-open interval | |
2535 | [insn,incr). */ | |
b24884cd JL |
2536 | && ! reg_used_between_p (q, PREV_INSN (insn), incr) |
2537 | /* We must also check for sets of q as q may be | |
2538 | a call clobbered hard register and there may | |
2539 | be a call between PREV_INSN (insn) and incr. */ | |
2540 | && ! reg_set_between_p (q, PREV_INSN (insn), incr)) | |
d7429b6a | 2541 | { |
5175ad37 | 2542 | /* We have *p followed sometime later by q = p+size. |
d7429b6a | 2543 | Both p and q must be live afterward, |
9ec36da5 | 2544 | and q is not used between INSN and its assignment. |
d7429b6a RK |
2545 | Change it to q = p, ...*q..., q = q+size. |
2546 | Then fall into the usual case. */ | |
2547 | rtx insns, temp; | |
2548 | ||
2549 | start_sequence (); | |
2550 | emit_move_insn (q, addr); | |
2551 | insns = get_insns (); | |
2552 | end_sequence (); | |
2553 | ||
2554 | /* If anything in INSNS have UID's that don't fit within the | |
2555 | extra space we allocate earlier, we can't make this auto-inc. | |
2556 | This should never happen. */ | |
2557 | for (temp = insns; temp; temp = NEXT_INSN (temp)) | |
2558 | { | |
2559 | if (INSN_UID (temp) > max_uid_for_flow) | |
2560 | return; | |
2561 | BLOCK_NUM (temp) = BLOCK_NUM (insn); | |
2562 | } | |
2563 | ||
7280c2a4 RK |
2564 | /* If we can't make the auto-inc, or can't make the |
2565 | replacement into Y, exit. There's no point in making | |
2566 | the change below if we can't do the auto-inc and doing | |
2567 | so is not correct in the pre-inc case. */ | |
2568 | ||
2569 | validate_change (insn, &XEXP (x, 0), | |
38a448ca | 2570 | gen_rtx_fmt_e (inc_code, Pmode, q), |
7280c2a4 RK |
2571 | 1); |
2572 | validate_change (incr, &XEXP (y, 0), q, 1); | |
2573 | if (! apply_change_group ()) | |
2574 | return; | |
2575 | ||
2576 | /* We now know we'll be doing this change, so emit the | |
2577 | new insn(s) and do the updates. */ | |
d7429b6a | 2578 | emit_insns_before (insns, insn); |
e8b641a1 RK |
2579 | |
2580 | if (basic_block_head[BLOCK_NUM (insn)] == insn) | |
2581 | basic_block_head[BLOCK_NUM (insn)] = insns; | |
2582 | ||
d7429b6a RK |
2583 | /* INCR will become a NOTE and INSN won't contain a |
2584 | use of ADDR. If a use of ADDR was just placed in | |
2585 | the insn before INSN, make that the next use. | |
2586 | Otherwise, invalidate it. */ | |
2587 | if (GET_CODE (PREV_INSN (insn)) == INSN | |
2588 | && GET_CODE (PATTERN (PREV_INSN (insn))) == SET | |
2589 | && SET_SRC (PATTERN (PREV_INSN (insn))) == addr) | |
2590 | reg_next_use[regno] = PREV_INSN (insn); | |
2591 | else | |
2592 | reg_next_use[regno] = 0; | |
2593 | ||
2594 | addr = q; | |
2595 | regno = REGNO (q); | |
d7429b6a RK |
2596 | |
2597 | /* REGNO is now used in INCR which is below INSN, but | |
2598 | it previously wasn't live here. If we don't mark | |
2599 | it as needed, we'll put a REG_DEAD note for it | |
2600 | on this insn, which is incorrect. */ | |
916b1701 | 2601 | SET_REGNO_REG_SET (needed, regno); |
d7429b6a RK |
2602 | |
2603 | /* If there are any calls between INSN and INCR, show | |
2604 | that REGNO now crosses them. */ | |
2605 | for (temp = insn; temp != incr; temp = NEXT_INSN (temp)) | |
2606 | if (GET_CODE (temp) == CALL_INSN) | |
b1f21e0a | 2607 | REG_N_CALLS_CROSSED (regno)++; |
d7429b6a | 2608 | } |
02df8aba RK |
2609 | else |
2610 | return; | |
d7429b6a | 2611 | |
7280c2a4 RK |
2612 | /* If we haven't returned, it means we were able to make the |
2613 | auto-inc, so update the status. First, record that this insn | |
2614 | has an implicit side effect. */ | |
2615 | ||
2616 | REG_NOTES (insn) | |
38a448ca | 2617 | = gen_rtx_EXPR_LIST (REG_INC, addr, REG_NOTES (insn)); |
7280c2a4 RK |
2618 | |
2619 | /* Modify the old increment-insn to simply copy | |
2620 | the already-incremented value of our register. */ | |
2621 | if (! validate_change (incr, &SET_SRC (set), addr, 0)) | |
2622 | abort (); | |
2623 | ||
2624 | /* If that makes it a no-op (copying the register into itself) delete | |
2625 | it so it won't appear to be a "use" and a "set" of this | |
2626 | register. */ | |
2627 | if (SET_DEST (set) == addr) | |
d7429b6a | 2628 | { |
7280c2a4 RK |
2629 | PUT_CODE (incr, NOTE); |
2630 | NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED; | |
2631 | NOTE_SOURCE_FILE (incr) = 0; | |
2632 | } | |
d7429b6a | 2633 | |
7280c2a4 RK |
2634 | if (regno >= FIRST_PSEUDO_REGISTER) |
2635 | { | |
2636 | /* Count an extra reference to the reg. When a reg is | |
2637 | incremented, spilling it is worse, so we want to make | |
2638 | that less likely. */ | |
b1f21e0a | 2639 | REG_N_REFS (regno) += loop_depth; |
7280c2a4 RK |
2640 | |
2641 | /* Count the increment as a setting of the register, | |
2642 | even though it isn't a SET in rtl. */ | |
b1f21e0a | 2643 | REG_N_SETS (regno)++; |
d7429b6a RK |
2644 | } |
2645 | } | |
2646 | } | |
2647 | } | |
2648 | #endif /* AUTO_INC_DEC */ | |
2649 | \f | |
2650 | /* Scan expression X and store a 1-bit in LIVE for each reg it uses. | |
2651 | This is done assuming the registers needed from X | |
2652 | are those that have 1-bits in NEEDED. | |
2653 | ||
2654 | On the final pass, FINAL is 1. This means try for autoincrement | |
2655 | and count the uses and deaths of each pseudo-reg. | |
2656 | ||
2657 | INSN is the containing instruction. If INSN is dead, this function is not | |
2658 | called. */ | |
2659 | ||
2660 | static void | |
2661 | mark_used_regs (needed, live, x, final, insn) | |
2662 | regset needed; | |
2663 | regset live; | |
2664 | rtx x; | |
d7429b6a | 2665 | int final; |
e658434c | 2666 | rtx insn; |
d7429b6a RK |
2667 | { |
2668 | register RTX_CODE code; | |
2669 | register int regno; | |
2670 | int i; | |
2671 | ||
2672 | retry: | |
2673 | code = GET_CODE (x); | |
2674 | switch (code) | |
2675 | { | |
2676 | case LABEL_REF: | |
2677 | case SYMBOL_REF: | |
2678 | case CONST_INT: | |
2679 | case CONST: | |
2680 | case CONST_DOUBLE: | |
2681 | case PC: | |
d7429b6a RK |
2682 | case ADDR_VEC: |
2683 | case ADDR_DIFF_VEC: | |
2684 | case ASM_INPUT: | |
2685 | return; | |
2686 | ||
2687 | #ifdef HAVE_cc0 | |
2688 | case CC0: | |
2689 | cc0_live = 1; | |
2690 | return; | |
2691 | #endif | |
2692 | ||
2f1553a4 RK |
2693 | case CLOBBER: |
2694 | /* If we are clobbering a MEM, mark any registers inside the address | |
2695 | as being used. */ | |
2696 | if (GET_CODE (XEXP (x, 0)) == MEM) | |
2697 | mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), final, insn); | |
2698 | return; | |
2699 | ||
d7429b6a | 2700 | case MEM: |
7eb136d6 MM |
2701 | /* Invalidate the data for the last MEM stored, but only if MEM is |
2702 | something that can be stored into. */ | |
2703 | if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF | |
2704 | && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0))) | |
2705 | ; /* needn't clear last_mem_set */ | |
2706 | else | |
2707 | last_mem_set = 0; | |
d7429b6a RK |
2708 | |
2709 | #ifdef AUTO_INC_DEC | |
2710 | if (final) | |
2711 | find_auto_inc (needed, x, insn); | |
2712 | #endif | |
2713 | break; | |
2714 | ||
80f8f04a RK |
2715 | case SUBREG: |
2716 | if (GET_CODE (SUBREG_REG (x)) == REG | |
2717 | && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER | |
2718 | && (GET_MODE_SIZE (GET_MODE (x)) | |
88285acf | 2719 | != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))) |
b1f21e0a | 2720 | REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1; |
80f8f04a RK |
2721 | |
2722 | /* While we're here, optimize this case. */ | |
2723 | x = SUBREG_REG (x); | |
2724 | ||
e100a3bb | 2725 | /* In case the SUBREG is not of a register, don't optimize */ |
ce79abf3 | 2726 | if (GET_CODE (x) != REG) |
e100a3bb MM |
2727 | { |
2728 | mark_used_regs (needed, live, x, final, insn); | |
2729 | return; | |
2730 | } | |
ce79abf3 | 2731 | |
0f41302f | 2732 | /* ... fall through ... */ |
80f8f04a | 2733 | |
d7429b6a RK |
2734 | case REG: |
2735 | /* See a register other than being set | |
2736 | => mark it as needed. */ | |
2737 | ||
2738 | regno = REGNO (x); | |
2739 | { | |
67f0e213 RK |
2740 | int some_needed = REGNO_REG_SET_P (needed, regno); |
2741 | int some_not_needed = ! some_needed; | |
d7429b6a | 2742 | |
916b1701 | 2743 | SET_REGNO_REG_SET (live, regno); |
cb9e8ad1 | 2744 | |
d7429b6a RK |
2745 | /* A hard reg in a wide mode may really be multiple registers. |
2746 | If so, mark all of them just like the first. */ | |
2747 | if (regno < FIRST_PSEUDO_REGISTER) | |
2748 | { | |
2749 | int n; | |
2750 | ||
d7e4fe8b | 2751 | /* For stack ptr or fixed arg pointer, |
d7429b6a RK |
2752 | nothing below can be necessary, so waste no more time. */ |
2753 | if (regno == STACK_POINTER_REGNUM | |
73a187c1 DE |
2754 | #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM |
2755 | || regno == HARD_FRAME_POINTER_REGNUM | |
2756 | #endif | |
d7e4fe8b RS |
2757 | #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM |
2758 | || (regno == ARG_POINTER_REGNUM && fixed_regs[regno]) | |
2759 | #endif | |
d7429b6a RK |
2760 | || regno == FRAME_POINTER_REGNUM) |
2761 | { | |
2762 | /* If this is a register we are going to try to eliminate, | |
2763 | don't mark it live here. If we are successful in | |
2764 | eliminating it, it need not be live unless it is used for | |
2765 | pseudos, in which case it will have been set live when | |
2766 | it was allocated to the pseudos. If the register will not | |
2767 | be eliminated, reload will set it live at that point. */ | |
2768 | ||
2769 | if (! TEST_HARD_REG_BIT (elim_reg_set, regno)) | |
2770 | regs_ever_live[regno] = 1; | |
2771 | return; | |
2772 | } | |
2773 | /* No death notes for global register variables; | |
2774 | their values are live after this function exits. */ | |
2775 | if (global_regs[regno]) | |
d8c8b8e3 RS |
2776 | { |
2777 | if (final) | |
2778 | reg_next_use[regno] = insn; | |
2779 | return; | |
2780 | } | |
d7429b6a RK |
2781 | |
2782 | n = HARD_REGNO_NREGS (regno, GET_MODE (x)); | |
2783 | while (--n > 0) | |
2784 | { | |
916b1701 MM |
2785 | int regno_n = regno + n; |
2786 | int needed_regno = REGNO_REG_SET_P (needed, regno_n); | |
cb9e8ad1 | 2787 | |
916b1701 MM |
2788 | SET_REGNO_REG_SET (live, regno_n); |
2789 | some_needed |= needed_regno; | |
931c6c7a | 2790 | some_not_needed |= ! needed_regno; |
d7429b6a RK |
2791 | } |
2792 | } | |
2793 | if (final) | |
2794 | { | |
2795 | /* Record where each reg is used, so when the reg | |
2796 | is set we know the next insn that uses it. */ | |
2797 | ||
2798 | reg_next_use[regno] = insn; | |
2799 | ||
2800 | if (regno < FIRST_PSEUDO_REGISTER) | |
2801 | { | |
2802 | /* If a hard reg is being used, | |
2803 | record that this function does use it. */ | |
2804 | ||
2805 | i = HARD_REGNO_NREGS (regno, GET_MODE (x)); | |
2806 | if (i == 0) | |
2807 | i = 1; | |
2808 | do | |
2809 | regs_ever_live[regno + --i] = 1; | |
2810 | while (i > 0); | |
2811 | } | |
2812 | else | |
2813 | { | |
2814 | /* Keep track of which basic block each reg appears in. */ | |
2815 | ||
2816 | register int blocknum = BLOCK_NUM (insn); | |
2817 | ||
b1f21e0a MM |
2818 | if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN) |
2819 | REG_BASIC_BLOCK (regno) = blocknum; | |
2820 | else if (REG_BASIC_BLOCK (regno) != blocknum) | |
2821 | REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL; | |
d7429b6a RK |
2822 | |
2823 | /* Count (weighted) number of uses of each reg. */ | |
2824 | ||
b1f21e0a | 2825 | REG_N_REFS (regno) += loop_depth; |
d7429b6a RK |
2826 | } |
2827 | ||
2828 | /* Record and count the insns in which a reg dies. | |
2829 | If it is used in this insn and was dead below the insn | |
2830 | then it dies in this insn. If it was set in this insn, | |
2831 | we do not make a REG_DEAD note; likewise if we already | |
2832 | made such a note. */ | |
2833 | ||
cb9e8ad1 | 2834 | if (some_not_needed |
d7429b6a RK |
2835 | && ! dead_or_set_p (insn, x) |
2836 | #if 0 | |
2837 | && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno]) | |
2838 | #endif | |
2839 | ) | |
2840 | { | |
ab28041e JW |
2841 | /* Check for the case where the register dying partially |
2842 | overlaps the register set by this insn. */ | |
2843 | if (regno < FIRST_PSEUDO_REGISTER | |
2844 | && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1) | |
2845 | { | |
480eac3b | 2846 | int n = HARD_REGNO_NREGS (regno, GET_MODE (x)); |
ab28041e JW |
2847 | while (--n >= 0) |
2848 | some_needed |= dead_or_set_regno_p (insn, regno + n); | |
2849 | } | |
2850 | ||
d7429b6a RK |
2851 | /* If none of the words in X is needed, make a REG_DEAD |
2852 | note. Otherwise, we must make partial REG_DEAD notes. */ | |
2853 | if (! some_needed) | |
2854 | { | |
2855 | REG_NOTES (insn) | |
38a448ca | 2856 | = gen_rtx_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn)); |
b1f21e0a | 2857 | REG_N_DEATHS (regno)++; |
d7429b6a RK |
2858 | } |
2859 | else | |
2860 | { | |
2861 | int i; | |
2862 | ||
2863 | /* Don't make a REG_DEAD note for a part of a register | |
2864 | that is set in the insn. */ | |
2865 | ||
2866 | for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1; | |
2867 | i >= 0; i--) | |
916b1701 | 2868 | if (!REGNO_REG_SET_P (needed, regno + i) |
d7429b6a RK |
2869 | && ! dead_or_set_regno_p (insn, regno + i)) |
2870 | REG_NOTES (insn) | |
38a448ca RH |
2871 | = gen_rtx_EXPR_LIST (REG_DEAD, |
2872 | gen_rtx_REG (reg_raw_mode[regno + i], | |
2873 | regno + i), | |
2874 | REG_NOTES (insn)); | |
d7429b6a RK |
2875 | } |
2876 | } | |
2877 | } | |
2878 | } | |
2879 | return; | |
2880 | ||
2881 | case SET: | |
2882 | { | |
2883 | register rtx testreg = SET_DEST (x); | |
2884 | int mark_dest = 0; | |
2885 | ||
2886 | /* If storing into MEM, don't show it as being used. But do | |
2887 | show the address as being used. */ | |
2888 | if (GET_CODE (testreg) == MEM) | |
2889 | { | |
2890 | #ifdef AUTO_INC_DEC | |
2891 | if (final) | |
2892 | find_auto_inc (needed, testreg, insn); | |
2893 | #endif | |
2894 | mark_used_regs (needed, live, XEXP (testreg, 0), final, insn); | |
2895 | mark_used_regs (needed, live, SET_SRC (x), final, insn); | |
2896 | return; | |
2897 | } | |
2898 | ||
2899 | /* Storing in STRICT_LOW_PART is like storing in a reg | |
2900 | in that this SET might be dead, so ignore it in TESTREG. | |
2901 | but in some other ways it is like using the reg. | |
2902 | ||
2903 | Storing in a SUBREG or a bit field is like storing the entire | |
2904 | register in that if the register's value is not used | |
2905 | then this SET is not needed. */ | |
2906 | while (GET_CODE (testreg) == STRICT_LOW_PART | |
2907 | || GET_CODE (testreg) == ZERO_EXTRACT | |
2908 | || GET_CODE (testreg) == SIGN_EXTRACT | |
2909 | || GET_CODE (testreg) == SUBREG) | |
2910 | { | |
88285acf RK |
2911 | if (GET_CODE (testreg) == SUBREG |
2912 | && GET_CODE (SUBREG_REG (testreg)) == REG | |
2913 | && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER | |
2914 | && (GET_MODE_SIZE (GET_MODE (testreg)) | |
2915 | != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg))))) | |
b1f21e0a | 2916 | REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1; |
88285acf | 2917 | |
d7429b6a RK |
2918 | /* Modifying a single register in an alternate mode |
2919 | does not use any of the old value. But these other | |
2920 | ways of storing in a register do use the old value. */ | |
2921 | if (GET_CODE (testreg) == SUBREG | |
2922 | && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg))) | |
2923 | ; | |
2924 | else | |
2925 | mark_dest = 1; | |
2926 | ||
2927 | testreg = XEXP (testreg, 0); | |
2928 | } | |
2929 | ||
2930 | /* If this is a store into a register, | |
2931 | recursively scan the value being stored. */ | |
2932 | ||
86465af7 DM |
2933 | if ((GET_CODE (testreg) == PARALLEL |
2934 | && GET_MODE (testreg) == BLKmode) | |
2935 | || (GET_CODE (testreg) == REG | |
2936 | && (regno = REGNO (testreg), regno != FRAME_POINTER_REGNUM) | |
73a187c1 | 2937 | #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM |
86465af7 | 2938 | && regno != HARD_FRAME_POINTER_REGNUM |
73a187c1 | 2939 | #endif |
d7e4fe8b | 2940 | #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM |
86465af7 | 2941 | && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno]) |
d7e4fe8b | 2942 | #endif |
86465af7 | 2943 | )) |
d8c8b8e3 RS |
2944 | /* We used to exclude global_regs here, but that seems wrong. |
2945 | Storing in them is like storing in mem. */ | |
d7429b6a RK |
2946 | { |
2947 | mark_used_regs (needed, live, SET_SRC (x), final, insn); | |
2948 | if (mark_dest) | |
2949 | mark_used_regs (needed, live, SET_DEST (x), final, insn); | |
2950 | return; | |
2951 | } | |
2952 | } | |
2953 | break; | |
2954 | ||
2955 | case RETURN: | |
2956 | /* If exiting needs the right stack value, consider this insn as | |
2957 | using the stack pointer. In any event, consider it as using | |
632c9d9e | 2958 | all global registers and all registers used by return. */ |
d7429b6a RK |
2959 | |
2960 | #ifdef EXIT_IGNORE_STACK | |
2961 | if (! EXIT_IGNORE_STACK | |
0200b5ed JL |
2962 | || (! FRAME_POINTER_REQUIRED |
2963 | && ! current_function_calls_alloca | |
2964 | && flag_omit_frame_pointer)) | |
d7429b6a | 2965 | #endif |
916b1701 | 2966 | SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM); |
d7429b6a RK |
2967 | |
2968 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
632c9d9e MS |
2969 | if (global_regs[i] |
2970 | #ifdef EPILOGUE_USES | |
2971 | || EPILOGUE_USES (i) | |
2972 | #endif | |
2973 | ) | |
916b1701 | 2974 | SET_REGNO_REG_SET (live, i); |
d7429b6a | 2975 | break; |
e9a25f70 JL |
2976 | |
2977 | default: | |
2978 | break; | |
d7429b6a RK |
2979 | } |
2980 | ||
2981 | /* Recursively scan the operands of this expression. */ | |
2982 | ||
2983 | { | |
2984 | register char *fmt = GET_RTX_FORMAT (code); | |
2985 | register int i; | |
2986 | ||
2987 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
2988 | { | |
2989 | if (fmt[i] == 'e') | |
2990 | { | |
2991 | /* Tail recursive case: save a function call level. */ | |
2992 | if (i == 0) | |
2993 | { | |
2994 | x = XEXP (x, 0); | |
2995 | goto retry; | |
2996 | } | |
2997 | mark_used_regs (needed, live, XEXP (x, i), final, insn); | |
2998 | } | |
2999 | else if (fmt[i] == 'E') | |
3000 | { | |
3001 | register int j; | |
3002 | for (j = 0; j < XVECLEN (x, i); j++) | |
3003 | mark_used_regs (needed, live, XVECEXP (x, i, j), final, insn); | |
3004 | } | |
3005 | } | |
3006 | } | |
3007 | } | |
3008 | \f | |
3009 | #ifdef AUTO_INC_DEC | |
3010 | ||
3011 | static int | |
3012 | try_pre_increment_1 (insn) | |
3013 | rtx insn; | |
3014 | { | |
3015 | /* Find the next use of this reg. If in same basic block, | |
3016 | make it do pre-increment or pre-decrement if appropriate. */ | |
956d6950 | 3017 | rtx x = single_set (insn); |
5f4f0e22 | 3018 | HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1) |
d7429b6a RK |
3019 | * INTVAL (XEXP (SET_SRC (x), 1))); |
3020 | int regno = REGNO (SET_DEST (x)); | |
3021 | rtx y = reg_next_use[regno]; | |
3022 | if (y != 0 | |
3023 | && BLOCK_NUM (y) == BLOCK_NUM (insn) | |
89861c38 | 3024 | /* Don't do this if the reg dies, or gets set in y; a standard addressing |
0f41302f | 3025 | mode would be better. */ |
89861c38 | 3026 | && ! dead_or_set_p (y, SET_DEST (x)) |
956d6950 | 3027 | && try_pre_increment (y, SET_DEST (x), amount)) |
d7429b6a RK |
3028 | { |
3029 | /* We have found a suitable auto-increment | |
3030 | and already changed insn Y to do it. | |
3031 | So flush this increment-instruction. */ | |
3032 | PUT_CODE (insn, NOTE); | |
3033 | NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; | |
3034 | NOTE_SOURCE_FILE (insn) = 0; | |
3035 | /* Count a reference to this reg for the increment | |
3036 | insn we are deleting. When a reg is incremented. | |
3037 | spilling it is worse, so we want to make that | |
3038 | less likely. */ | |
3039 | if (regno >= FIRST_PSEUDO_REGISTER) | |
3040 | { | |
b1f21e0a MM |
3041 | REG_N_REFS (regno) += loop_depth; |
3042 | REG_N_SETS (regno)++; | |
d7429b6a RK |
3043 | } |
3044 | return 1; | |
3045 | } | |
3046 | return 0; | |
3047 | } | |
3048 | ||
3049 | /* Try to change INSN so that it does pre-increment or pre-decrement | |
3050 | addressing on register REG in order to add AMOUNT to REG. | |
3051 | AMOUNT is negative for pre-decrement. | |
3052 | Returns 1 if the change could be made. | |
3053 | This checks all about the validity of the result of modifying INSN. */ | |
3054 | ||
3055 | static int | |
3056 | try_pre_increment (insn, reg, amount) | |
3057 | rtx insn, reg; | |
5f4f0e22 | 3058 | HOST_WIDE_INT amount; |
d7429b6a RK |
3059 | { |
3060 | register rtx use; | |
3061 | ||
3062 | /* Nonzero if we can try to make a pre-increment or pre-decrement. | |
3063 | For example, addl $4,r1; movl (r1),... can become movl +(r1),... */ | |
3064 | int pre_ok = 0; | |
3065 | /* Nonzero if we can try to make a post-increment or post-decrement. | |
3066 | For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,... | |
3067 | It is possible for both PRE_OK and POST_OK to be nonzero if the machine | |
3068 | supports both pre-inc and post-inc, or both pre-dec and post-dec. */ | |
3069 | int post_ok = 0; | |
3070 | ||
3071 | /* Nonzero if the opportunity actually requires post-inc or post-dec. */ | |
3072 | int do_post = 0; | |
3073 | ||
3074 | /* From the sign of increment, see which possibilities are conceivable | |
3075 | on this target machine. */ | |
3076 | #ifdef HAVE_PRE_INCREMENT | |
3077 | if (amount > 0) | |
3078 | pre_ok = 1; | |
3079 | #endif | |
3080 | #ifdef HAVE_POST_INCREMENT | |
3081 | if (amount > 0) | |
3082 | post_ok = 1; | |
3083 | #endif | |
3084 | ||
3085 | #ifdef HAVE_PRE_DECREMENT | |
3086 | if (amount < 0) | |
3087 | pre_ok = 1; | |
3088 | #endif | |
3089 | #ifdef HAVE_POST_DECREMENT | |
3090 | if (amount < 0) | |
3091 | post_ok = 1; | |
3092 | #endif | |
3093 | ||
3094 | if (! (pre_ok || post_ok)) | |
3095 | return 0; | |
3096 | ||
3097 | /* It is not safe to add a side effect to a jump insn | |
3098 | because if the incremented register is spilled and must be reloaded | |
3099 | there would be no way to store the incremented value back in memory. */ | |
3100 | ||
3101 | if (GET_CODE (insn) == JUMP_INSN) | |
3102 | return 0; | |
3103 | ||
3104 | use = 0; | |
3105 | if (pre_ok) | |
3106 | use = find_use_as_address (PATTERN (insn), reg, 0); | |
3107 | if (post_ok && (use == 0 || use == (rtx) 1)) | |
3108 | { | |
3109 | use = find_use_as_address (PATTERN (insn), reg, -amount); | |
3110 | do_post = 1; | |
3111 | } | |
3112 | ||
3113 | if (use == 0 || use == (rtx) 1) | |
3114 | return 0; | |
3115 | ||
3116 | if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount)) | |
3117 | return 0; | |
3118 | ||
a0fbc3a9 SC |
3119 | /* See if this combination of instruction and addressing mode exists. */ |
3120 | if (! validate_change (insn, &XEXP (use, 0), | |
38a448ca RH |
3121 | gen_rtx_fmt_e (amount > 0 |
3122 | ? (do_post ? POST_INC : PRE_INC) | |
3123 | : (do_post ? POST_DEC : PRE_DEC), | |
3124 | Pmode, reg), 0)) | |
a0fbc3a9 | 3125 | return 0; |
d7429b6a RK |
3126 | |
3127 | /* Record that this insn now has an implicit side effect on X. */ | |
38a448ca | 3128 | REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_INC, reg, REG_NOTES (insn)); |
d7429b6a RK |
3129 | return 1; |
3130 | } | |
3131 | ||
3132 | #endif /* AUTO_INC_DEC */ | |
3133 | \f | |
3134 | /* Find the place in the rtx X where REG is used as a memory address. | |
3135 | Return the MEM rtx that so uses it. | |
3136 | If PLUSCONST is nonzero, search instead for a memory address equivalent to | |
3137 | (plus REG (const_int PLUSCONST)). | |
3138 | ||
3139 | If such an address does not appear, return 0. | |
3140 | If REG appears more than once, or is used other than in such an address, | |
3141 | return (rtx)1. */ | |
3142 | ||
8c660648 | 3143 | rtx |
d7429b6a RK |
3144 | find_use_as_address (x, reg, plusconst) |
3145 | register rtx x; | |
3146 | rtx reg; | |
e658434c | 3147 | HOST_WIDE_INT plusconst; |
d7429b6a RK |
3148 | { |
3149 | enum rtx_code code = GET_CODE (x); | |
3150 | char *fmt = GET_RTX_FORMAT (code); | |
3151 | register int i; | |
3152 | register rtx value = 0; | |
3153 | register rtx tem; | |
3154 | ||
3155 | if (code == MEM && XEXP (x, 0) == reg && plusconst == 0) | |
3156 | return x; | |
3157 | ||
3158 | if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS | |
3159 | && XEXP (XEXP (x, 0), 0) == reg | |
3160 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
3161 | && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst) | |
3162 | return x; | |
3163 | ||
3164 | if (code == SIGN_EXTRACT || code == ZERO_EXTRACT) | |
3165 | { | |
3166 | /* If REG occurs inside a MEM used in a bit-field reference, | |
3167 | that is unacceptable. */ | |
3168 | if (find_use_as_address (XEXP (x, 0), reg, 0) != 0) | |
6fa5c106 | 3169 | return (rtx) (HOST_WIDE_INT) 1; |
d7429b6a RK |
3170 | } |
3171 | ||
3172 | if (x == reg) | |
6fa5c106 | 3173 | return (rtx) (HOST_WIDE_INT) 1; |
d7429b6a RK |
3174 | |
3175 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
3176 | { | |
3177 | if (fmt[i] == 'e') | |
3178 | { | |
3179 | tem = find_use_as_address (XEXP (x, i), reg, plusconst); | |
3180 | if (value == 0) | |
3181 | value = tem; | |
3182 | else if (tem != 0) | |
6fa5c106 | 3183 | return (rtx) (HOST_WIDE_INT) 1; |
d7429b6a RK |
3184 | } |
3185 | if (fmt[i] == 'E') | |
3186 | { | |
3187 | register int j; | |
3188 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
3189 | { | |
3190 | tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst); | |
3191 | if (value == 0) | |
3192 | value = tem; | |
3193 | else if (tem != 0) | |
6fa5c106 | 3194 | return (rtx) (HOST_WIDE_INT) 1; |
d7429b6a RK |
3195 | } |
3196 | } | |
3197 | } | |
3198 | ||
3199 | return value; | |
3200 | } | |
3201 | \f | |
3202 | /* Write information about registers and basic blocks into FILE. | |
3203 | This is part of making a debugging dump. */ | |
3204 | ||
3205 | void | |
3206 | dump_flow_info (file) | |
3207 | FILE *file; | |
3208 | { | |
3209 | register int i; | |
3210 | static char *reg_class_names[] = REG_CLASS_NAMES; | |
3211 | ||
3212 | fprintf (file, "%d registers.\n", max_regno); | |
3213 | ||
3214 | for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++) | |
b1f21e0a | 3215 | if (REG_N_REFS (i)) |
d7429b6a | 3216 | { |
e4600702 | 3217 | enum reg_class class, altclass; |
d7429b6a | 3218 | fprintf (file, "\nRegister %d used %d times across %d insns", |
b1f21e0a MM |
3219 | i, REG_N_REFS (i), REG_LIVE_LENGTH (i)); |
3220 | if (REG_BASIC_BLOCK (i) >= 0) | |
3221 | fprintf (file, " in block %d", REG_BASIC_BLOCK (i)); | |
6fc4610b MM |
3222 | if (REG_N_SETS (i)) |
3223 | fprintf (file, "; set %d time%s", REG_N_SETS (i), | |
3224 | (REG_N_SETS (i) == 1) ? "" : "s"); | |
3225 | if (REG_USERVAR_P (regno_reg_rtx[i])) | |
3226 | fprintf (file, "; user var"); | |
b1f21e0a MM |
3227 | if (REG_N_DEATHS (i) != 1) |
3228 | fprintf (file, "; dies in %d places", REG_N_DEATHS (i)); | |
3229 | if (REG_N_CALLS_CROSSED (i) == 1) | |
d7429b6a | 3230 | fprintf (file, "; crosses 1 call"); |
b1f21e0a MM |
3231 | else if (REG_N_CALLS_CROSSED (i)) |
3232 | fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i)); | |
d7429b6a RK |
3233 | if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD) |
3234 | fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i)); | |
3235 | class = reg_preferred_class (i); | |
e4600702 RK |
3236 | altclass = reg_alternate_class (i); |
3237 | if (class != GENERAL_REGS || altclass != ALL_REGS) | |
d7429b6a | 3238 | { |
e4600702 RK |
3239 | if (altclass == ALL_REGS || class == ALL_REGS) |
3240 | fprintf (file, "; pref %s", reg_class_names[(int) class]); | |
3241 | else if (altclass == NO_REGS) | |
d7429b6a RK |
3242 | fprintf (file, "; %s or none", reg_class_names[(int) class]); |
3243 | else | |
e4600702 RK |
3244 | fprintf (file, "; pref %s, else %s", |
3245 | reg_class_names[(int) class], | |
3246 | reg_class_names[(int) altclass]); | |
d7429b6a RK |
3247 | } |
3248 | if (REGNO_POINTER_FLAG (i)) | |
3249 | fprintf (file, "; pointer"); | |
3250 | fprintf (file, ".\n"); | |
3251 | } | |
3252 | fprintf (file, "\n%d basic blocks.\n", n_basic_blocks); | |
3253 | for (i = 0; i < n_basic_blocks; i++) | |
3254 | { | |
3255 | register rtx head, jump; | |
3256 | register int regno; | |
3257 | fprintf (file, "\nBasic block %d: first insn %d, last %d.\n", | |
3258 | i, | |
3259 | INSN_UID (basic_block_head[i]), | |
3260 | INSN_UID (basic_block_end[i])); | |
3261 | /* The control flow graph's storage is freed | |
3262 | now when flow_analysis returns. | |
3263 | Don't try to print it if it is gone. */ | |
3264 | if (basic_block_drops_in) | |
3265 | { | |
3266 | fprintf (file, "Reached from blocks: "); | |
3267 | head = basic_block_head[i]; | |
3268 | if (GET_CODE (head) == CODE_LABEL) | |
3269 | for (jump = LABEL_REFS (head); | |
3270 | jump != head; | |
3271 | jump = LABEL_NEXTREF (jump)) | |
3272 | { | |
3273 | register int from_block = BLOCK_NUM (CONTAINING_INSN (jump)); | |
3274 | fprintf (file, " %d", from_block); | |
3275 | } | |
3276 | if (basic_block_drops_in[i]) | |
3277 | fprintf (file, " previous"); | |
3278 | } | |
3279 | fprintf (file, "\nRegisters live at start:"); | |
3280 | for (regno = 0; regno < max_regno; regno++) | |
916b1701 MM |
3281 | if (REGNO_REG_SET_P (basic_block_live_at_start[i], regno)) |
3282 | fprintf (file, " %d", regno); | |
d7429b6a RK |
3283 | fprintf (file, "\n"); |
3284 | } | |
3285 | fprintf (file, "\n"); | |
3286 | } | |
3e28fe44 MM |
3287 | |
3288 | \f | |
3289 | /* Like print_rtl, but also print out live information for the start of each | |
3290 | basic block. */ | |
3291 | ||
3292 | void | |
3293 | print_rtl_with_bb (outf, rtx_first) | |
3294 | FILE *outf; | |
3295 | rtx rtx_first; | |
3296 | { | |
3297 | register rtx tmp_rtx; | |
3298 | ||
3299 | if (rtx_first == 0) | |
3300 | fprintf (outf, "(nil)\n"); | |
3301 | ||
3302 | else | |
3303 | { | |
3304 | int i, bb; | |
3305 | enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB }; | |
3306 | int max_uid = get_max_uid (); | |
adfc539e PDM |
3307 | int *start = (int *) alloca (max_uid * sizeof (int)); |
3308 | int *end = (int *) alloca (max_uid * sizeof (int)); | |
2a92c071 GS |
3309 | enum bb_state *in_bb_p = (enum bb_state *) |
3310 | alloca (max_uid * sizeof (enum bb_state)); | |
3e28fe44 MM |
3311 | |
3312 | for (i = 0; i < max_uid; i++) | |
3313 | { | |
3314 | start[i] = end[i] = -1; | |
3315 | in_bb_p[i] = NOT_IN_BB; | |
3316 | } | |
3317 | ||
3318 | for (i = n_basic_blocks-1; i >= 0; i--) | |
3319 | { | |
3320 | rtx x; | |
3321 | start[INSN_UID (basic_block_head[i])] = i; | |
3322 | end[INSN_UID (basic_block_end[i])] = i; | |
3323 | for (x = basic_block_head[i]; x != NULL_RTX; x = NEXT_INSN (x)) | |
3324 | { | |
1791f8e2 MM |
3325 | in_bb_p[ INSN_UID(x)] |
3326 | = (in_bb_p[ INSN_UID(x)] == NOT_IN_BB) | |
3b33f637 | 3327 | ? IN_ONE_BB : IN_MULTIPLE_BB; |
3e28fe44 MM |
3328 | if (x == basic_block_end[i]) |
3329 | break; | |
3330 | } | |
3331 | } | |
3332 | ||
3333 | for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx)) | |
3334 | { | |
b707b450 R |
3335 | int did_output; |
3336 | ||
3e28fe44 MM |
3337 | if ((bb = start[INSN_UID (tmp_rtx)]) >= 0) |
3338 | { | |
3339 | fprintf (outf, ";; Start of basic block %d, registers live:", | |
3340 | bb); | |
3341 | ||
3342 | EXECUTE_IF_SET_IN_REG_SET (basic_block_live_at_start[bb], 0, i, | |
3343 | { | |
3344 | fprintf (outf, " %d", i); | |
3345 | if (i < FIRST_PSEUDO_REGISTER) | |
3346 | fprintf (outf, " [%s]", | |
3347 | reg_names[i]); | |
3348 | }); | |
3349 | putc ('\n', outf); | |
3350 | } | |
3351 | ||
3352 | if (in_bb_p[ INSN_UID(tmp_rtx)] == NOT_IN_BB | |
3353 | && GET_CODE (tmp_rtx) != NOTE | |
3354 | && GET_CODE (tmp_rtx) != BARRIER) | |
3355 | fprintf (outf, ";; Insn is not within a basic block\n"); | |
3356 | else if (in_bb_p[ INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB) | |
3357 | fprintf (outf, ";; Insn is in multiple basic blocks\n"); | |
3358 | ||
b707b450 | 3359 | did_output = print_rtl_single (outf, tmp_rtx); |
3e28fe44 MM |
3360 | |
3361 | if ((bb = end[INSN_UID (tmp_rtx)]) >= 0) | |
3362 | fprintf (outf, ";; End of basic block %d\n", bb); | |
3363 | ||
b707b450 | 3364 | if (did_output) |
9ec36da5 | 3365 | putc ('\n', outf); |
3e28fe44 MM |
3366 | } |
3367 | } | |
3368 | } | |
5ece9746 JL |
3369 | |
3370 | \f | |
3371 | /* Integer list support. */ | |
3372 | ||
3373 | /* Allocate a node from list *HEAD_PTR. */ | |
3374 | ||
3375 | static int_list_ptr | |
3376 | alloc_int_list_node (head_ptr) | |
3377 | int_list_block **head_ptr; | |
3378 | { | |
3379 | struct int_list_block *first_blk = *head_ptr; | |
3380 | ||
3381 | if (first_blk == NULL || first_blk->nodes_left <= 0) | |
3382 | { | |
3383 | first_blk = (struct int_list_block *) xmalloc (sizeof (struct int_list_block)); | |
3384 | first_blk->nodes_left = INT_LIST_NODES_IN_BLK; | |
3385 | first_blk->next = *head_ptr; | |
3386 | *head_ptr = first_blk; | |
3387 | } | |
3388 | ||
3389 | first_blk->nodes_left--; | |
3390 | return &first_blk->nodes[first_blk->nodes_left]; | |
3391 | } | |
3392 | ||
3393 | /* Pointer to head of predecessor/successor block list. */ | |
3394 | static int_list_block *pred_int_list_blocks; | |
3395 | ||
3396 | /* Add a new node to integer list LIST with value VAL. | |
3397 | LIST is a pointer to a list object to allow for different implementations. | |
3398 | If *LIST is initially NULL, the list is empty. | |
3399 | The caller must not care whether the element is added to the front or | |
3400 | to the end of the list (to allow for different implementations). */ | |
3401 | ||
3402 | static int_list_ptr | |
3403 | add_int_list_node (blk_list, list, val) | |
3404 | int_list_block **blk_list; | |
3405 | int_list **list; | |
3406 | int val; | |
3407 | { | |
3408 | int_list_ptr p = alloc_int_list_node (blk_list); | |
3409 | ||
3410 | p->val = val; | |
3411 | p->next = *list; | |
3412 | *list = p; | |
3413 | return p; | |
3414 | } | |
3415 | ||
3416 | /* Free the blocks of lists at BLK_LIST. */ | |
3417 | ||
3418 | void | |
3419 | free_int_list (blk_list) | |
3420 | int_list_block **blk_list; | |
3421 | { | |
3422 | int_list_block *p, *next; | |
3423 | ||
3424 | for (p = *blk_list; p != NULL; p = next) | |
3425 | { | |
3426 | next = p->next; | |
3427 | free (p); | |
3428 | } | |
3429 | ||
3430 | /* Mark list as empty for the next function we compile. */ | |
3431 | *blk_list = NULL; | |
3432 | } | |
3433 | \f | |
3434 | /* Predecessor/successor computation. */ | |
3435 | ||
3436 | /* Mark PRED_BB a precessor of SUCC_BB, | |
3437 | and conversely SUCC_BB a successor of PRED_BB. */ | |
3438 | ||
3439 | static void | |
3440 | add_pred_succ (pred_bb, succ_bb, s_preds, s_succs, num_preds, num_succs) | |
3441 | int pred_bb; | |
3442 | int succ_bb; | |
3443 | int_list_ptr *s_preds; | |
3444 | int_list_ptr *s_succs; | |
3445 | int *num_preds; | |
3446 | int *num_succs; | |
3447 | { | |
3448 | if (succ_bb != EXIT_BLOCK) | |
3449 | { | |
3450 | add_int_list_node (&pred_int_list_blocks, &s_preds[succ_bb], pred_bb); | |
3451 | num_preds[succ_bb]++; | |
3452 | } | |
3453 | if (pred_bb != ENTRY_BLOCK) | |
3454 | { | |
3455 | add_int_list_node (&pred_int_list_blocks, &s_succs[pred_bb], succ_bb); | |
3456 | num_succs[pred_bb]++; | |
3457 | } | |
3458 | } | |
3459 | ||
3460 | /* Compute the predecessors and successors for each block. */ | |
743bb12d | 3461 | void |
5ece9746 JL |
3462 | compute_preds_succs (s_preds, s_succs, num_preds, num_succs) |
3463 | int_list_ptr *s_preds; | |
3464 | int_list_ptr *s_succs; | |
3465 | int *num_preds; | |
3466 | int *num_succs; | |
3467 | { | |
3468 | int bb, clear_local_bb_vars = 0; | |
3469 | ||
3470 | bzero ((char *) s_preds, n_basic_blocks * sizeof (int_list_ptr)); | |
3471 | bzero ((char *) s_succs, n_basic_blocks * sizeof (int_list_ptr)); | |
3472 | bzero ((char *) num_preds, n_basic_blocks * sizeof (int)); | |
3473 | bzero ((char *) num_succs, n_basic_blocks * sizeof (int)); | |
3474 | ||
3475 | /* This routine can be called after life analysis; in that case | |
3476 | basic_block_drops_in and uid_block_number will not be available | |
3477 | and we must recompute their values. */ | |
3478 | if (basic_block_drops_in == NULL || uid_block_number == NULL) | |
3479 | { | |
3480 | clear_local_bb_vars = 1; | |
3481 | basic_block_drops_in = (char *) alloca (n_basic_blocks); | |
3482 | uid_block_number = (int *) alloca ((get_max_uid () + 1) * sizeof (int)); | |
3483 | ||
3484 | bzero ((char *) basic_block_drops_in, n_basic_blocks * sizeof (char)); | |
3485 | bzero ((char *) uid_block_number, n_basic_blocks * sizeof (int)); | |
3486 | ||
3487 | /* Scan each basic block setting basic_block_drops_in and | |
3488 | uid_block_number as needed. */ | |
3489 | for (bb = 0; bb < n_basic_blocks; bb++) | |
3490 | { | |
168cbdf9 | 3491 | rtx insn, stop_insn; |
5ece9746 | 3492 | |
168cbdf9 JL |
3493 | if (bb == 0) |
3494 | stop_insn = NULL_RTX; | |
3495 | else | |
3496 | stop_insn = basic_block_end[bb-1]; | |
3497 | ||
3498 | /* Look backwards from the start of this block. Stop if we | |
3499 | hit the start of the function or the end of a previous | |
3500 | block. Don't walk backwards through blocks that are just | |
3501 | deleted insns! */ | |
5ece9746 | 3502 | for (insn = PREV_INSN (basic_block_head[bb]); |
168cbdf9 JL |
3503 | insn && insn != stop_insn && GET_CODE (insn) == NOTE; |
3504 | insn = PREV_INSN (insn)) | |
5ece9746 JL |
3505 | ; |
3506 | ||
168cbdf9 JL |
3507 | /* Never set basic_block_drops_in for the first block. It is |
3508 | implicit. | |
3509 | ||
3510 | If we stopped on anything other than a BARRIER, then this | |
3511 | block drops in. */ | |
3512 | if (bb != 0) | |
3513 | basic_block_drops_in[bb] = (insn ? GET_CODE (insn) != BARRIER : 1); | |
5ece9746 JL |
3514 | |
3515 | insn = basic_block_head[bb]; | |
3516 | while (insn) | |
3517 | { | |
3518 | BLOCK_NUM (insn) = bb; | |
3519 | if (insn == basic_block_end[bb]) | |
3520 | break; | |
3521 | insn = NEXT_INSN (insn); | |
3522 | } | |
3523 | } | |
5ece9746 | 3524 | } |
168cbdf9 | 3525 | |
5ece9746 JL |
3526 | for (bb = 0; bb < n_basic_blocks; bb++) |
3527 | { | |
3528 | rtx head; | |
3529 | rtx jump; | |
3530 | ||
3531 | head = BLOCK_HEAD (bb); | |
3532 | ||
3533 | if (GET_CODE (head) == CODE_LABEL) | |
3534 | for (jump = LABEL_REFS (head); | |
3535 | jump != head; | |
3536 | jump = LABEL_NEXTREF (jump)) | |
168cbdf9 JL |
3537 | { |
3538 | if (! INSN_DELETED_P (CONTAINING_INSN (jump)) | |
3539 | && (GET_CODE (CONTAINING_INSN (jump)) != NOTE | |
3540 | || (NOTE_LINE_NUMBER (CONTAINING_INSN (jump)) | |
3541 | != NOTE_INSN_DELETED))) | |
3542 | add_pred_succ (BLOCK_NUM (CONTAINING_INSN (jump)), bb, | |
3543 | s_preds, s_succs, num_preds, num_succs); | |
3544 | } | |
5ece9746 JL |
3545 | |
3546 | jump = BLOCK_END (bb); | |
3547 | /* If this is a RETURN insn or a conditional jump in the last | |
3548 | basic block, or a non-jump insn in the last basic block, then | |
3549 | this block reaches the exit block. */ | |
3550 | if ((GET_CODE (jump) == JUMP_INSN && GET_CODE (PATTERN (jump)) == RETURN) | |
3551 | || (((GET_CODE (jump) == JUMP_INSN | |
3552 | && condjump_p (jump) && !simplejump_p (jump)) | |
3553 | || GET_CODE (jump) != JUMP_INSN) | |
3554 | && (bb == n_basic_blocks - 1))) | |
3555 | add_pred_succ (bb, EXIT_BLOCK, s_preds, s_succs, num_preds, num_succs); | |
3556 | ||
3557 | if (basic_block_drops_in[bb]) | |
3558 | add_pred_succ (bb - 1, bb, s_preds, s_succs, num_preds, num_succs); | |
3559 | } | |
3560 | ||
3561 | add_pred_succ (ENTRY_BLOCK, 0, s_preds, s_succs, num_preds, num_succs); | |
3562 | ||
3563 | ||
3564 | /* If we allocated any variables in temporary storage, clear out the | |
3565 | pointer to the local storage to avoid dangling pointers. */ | |
3566 | if (clear_local_bb_vars) | |
3567 | { | |
3568 | basic_block_drops_in = NULL; | |
3569 | uid_block_number = NULL; | |
3570 | ||
3571 | } | |
3572 | } | |
3573 | ||
3574 | void | |
3575 | dump_bb_data (file, preds, succs) | |
3576 | FILE *file; | |
3577 | int_list_ptr *preds; | |
3578 | int_list_ptr *succs; | |
3579 | { | |
3580 | int bb; | |
3581 | int_list_ptr p; | |
3582 | ||
3583 | fprintf (file, "BB data\n\n"); | |
3584 | for (bb = 0; bb < n_basic_blocks; bb++) | |
3585 | { | |
3586 | fprintf (file, "BB %d, start %d, end %d\n", bb, | |
3587 | INSN_UID (BLOCK_HEAD (bb)), INSN_UID (BLOCK_END (bb))); | |
3588 | fprintf (file, " preds:"); | |
3589 | for (p = preds[bb]; p != NULL; p = p->next) | |
3590 | { | |
3591 | int pred_bb = INT_LIST_VAL (p); | |
3592 | if (pred_bb == ENTRY_BLOCK) | |
3593 | fprintf (file, " entry"); | |
3594 | else | |
3595 | fprintf (file, " %d", pred_bb); | |
3596 | } | |
3597 | fprintf (file, "\n"); | |
3598 | fprintf (file, " succs:"); | |
3599 | for (p = succs[bb]; p != NULL; p = p->next) | |
3600 | { | |
3601 | int succ_bb = INT_LIST_VAL (p); | |
3602 | if (succ_bb == EXIT_BLOCK) | |
3603 | fprintf (file, " exit"); | |
3604 | else | |
3605 | fprintf (file, " %d", succ_bb); | |
3606 | } | |
3607 | fprintf (file, "\n"); | |
3608 | } | |
3609 | fprintf (file, "\n"); | |
3610 | } | |
3611 | ||
3522e0f2 JL |
3612 | void |
3613 | dump_sbitmap (file, bmap) | |
3614 | FILE *file; | |
3615 | sbitmap bmap; | |
3616 | { | |
3617 | int i,j,n; | |
3618 | int set_size = bmap->size; | |
3619 | int total_bits = bmap->n_bits; | |
3620 | ||
3621 | fprintf (file, " "); | |
3622 | for (i = n = 0; i < set_size && n < total_bits; i++) | |
3623 | { | |
3624 | for (j = 0; j < SBITMAP_ELT_BITS && n < total_bits; j++, n++) | |
3625 | { | |
3626 | if (n != 0 && n % 10 == 0) | |
3627 | fprintf (file, " "); | |
3628 | fprintf (file, "%d", (bmap->elms[i] & (1L << j)) != 0); | |
3629 | } | |
3630 | } | |
3631 | fprintf (file, "\n"); | |
3632 | } | |
3633 | ||
3634 | void | |
3635 | dump_sbitmap_vector (file, title, subtitle, bmaps, n_maps) | |
3636 | FILE *file; | |
3637 | char *title, *subtitle; | |
3638 | sbitmap *bmaps; | |
3639 | int n_maps; | |
3640 | { | |
3641 | int bb; | |
3642 | ||
3643 | fprintf (file, "%s\n", title); | |
3644 | for (bb = 0; bb < n_maps; bb++) | |
3645 | { | |
3646 | fprintf (file, "%s %d\n", subtitle, bb); | |
3647 | dump_sbitmap (file, bmaps[bb]); | |
3648 | } | |
3649 | fprintf (file, "\n"); | |
3650 | } | |
3651 | ||
5ece9746 JL |
3652 | /* Free basic block data storage. */ |
3653 | ||
3654 | void | |
3655 | free_bb_mem () | |
3656 | { | |
3657 | free_int_list (&pred_int_list_blocks); | |
3658 | } | |
3659 | \f | |
3660 | /* Bitmap manipulation routines. */ | |
3661 | ||
3662 | /* Allocate a simple bitmap of N_ELMS bits. */ | |
3663 | ||
3664 | sbitmap | |
3665 | sbitmap_alloc (n_elms) | |
3666 | int n_elms; | |
3667 | { | |
3668 | int bytes, size, amt; | |
3669 | sbitmap bmap; | |
3670 | ||
3671 | size = SBITMAP_SET_SIZE (n_elms); | |
3672 | bytes = size * sizeof (SBITMAP_ELT_TYPE); | |
3673 | amt = (sizeof (struct simple_bitmap_def) | |
3674 | + bytes - sizeof (SBITMAP_ELT_TYPE)); | |
3675 | bmap = (sbitmap) xmalloc (amt); | |
3676 | bmap->n_bits = n_elms; | |
3677 | bmap->size = size; | |
3678 | bmap->bytes = bytes; | |
3679 | return bmap; | |
3680 | } | |
3681 | ||
3682 | /* Allocate a vector of N_VECS bitmaps of N_ELMS bits. */ | |
3683 | ||
3684 | sbitmap * | |
3685 | sbitmap_vector_alloc (n_vecs, n_elms) | |
3686 | int n_vecs, n_elms; | |
3687 | { | |
a9a05945 | 3688 | int i, bytes, offset, elm_bytes, size, amt, vector_bytes; |
5ece9746 JL |
3689 | sbitmap *bitmap_vector; |
3690 | ||
3691 | size = SBITMAP_SET_SIZE (n_elms); | |
3692 | bytes = size * sizeof (SBITMAP_ELT_TYPE); | |
3693 | elm_bytes = (sizeof (struct simple_bitmap_def) | |
3694 | + bytes - sizeof (SBITMAP_ELT_TYPE)); | |
a9a05945 | 3695 | vector_bytes = n_vecs * sizeof (sbitmap *); |
5ece9746 | 3696 | |
a9a05945 DE |
3697 | /* Round up `vector_bytes' to account for the alignment requirements |
3698 | of an sbitmap. One could allocate the vector-table and set of sbitmaps | |
3699 | separately, but that requires maintaining two pointers or creating | |
3700 | a cover struct to hold both pointers (so our result is still just | |
3701 | one pointer). Neither is a bad idea, but this is simpler for now. */ | |
3702 | { | |
3703 | /* Based on DEFAULT_ALIGNMENT computation in obstack.c. */ | |
3704 | struct { char x; SBITMAP_ELT_TYPE y; } align; | |
3705 | int alignment = (char *) & align.y - & align.x; | |
3706 | vector_bytes = (vector_bytes + alignment - 1) & ~ (alignment - 1); | |
3707 | } | |
3708 | ||
3709 | amt = vector_bytes + (n_vecs * elm_bytes); | |
3710 | bitmap_vector = (sbitmap *) xmalloc (amt); | |
5ece9746 | 3711 | |
a9a05945 | 3712 | for (i = 0, offset = vector_bytes; |
5ece9746 JL |
3713 | i < n_vecs; |
3714 | i++, offset += elm_bytes) | |
3715 | { | |
3716 | sbitmap b = (sbitmap) ((char *) bitmap_vector + offset); | |
3717 | bitmap_vector[i] = b; | |
3718 | b->n_bits = n_elms; | |
3719 | b->size = size; | |
3720 | b->bytes = bytes; | |
3721 | } | |
3722 | ||
3723 | return bitmap_vector; | |
3724 | } | |
3725 | ||
3726 | /* Copy sbitmap SRC to DST. */ | |
3727 | ||
3728 | void | |
3729 | sbitmap_copy (dst, src) | |
3730 | sbitmap dst, src; | |
3731 | { | |
913f68c1 | 3732 | bcopy (src->elms, dst->elms, sizeof (SBITMAP_ELT_TYPE) * dst->size); |
5ece9746 JL |
3733 | } |
3734 | ||
3735 | /* Zero all elements in a bitmap. */ | |
3736 | ||
3737 | void | |
3738 | sbitmap_zero (bmap) | |
3739 | sbitmap bmap; | |
3740 | { | |
3741 | bzero ((char *) bmap->elms, bmap->bytes); | |
3742 | } | |
3743 | ||
3744 | /* Set to ones all elements in a bitmap. */ | |
3745 | ||
3746 | void | |
3747 | sbitmap_ones (bmap) | |
3748 | sbitmap bmap; | |
3749 | { | |
3750 | memset (bmap->elms, -1, bmap->bytes); | |
3751 | } | |
3752 | ||
3753 | /* Zero a vector of N_VECS bitmaps. */ | |
3754 | ||
3755 | void | |
3756 | sbitmap_vector_zero (bmap, n_vecs) | |
3757 | sbitmap *bmap; | |
3758 | int n_vecs; | |
3759 | { | |
3760 | int i; | |
3761 | ||
3762 | for (i = 0; i < n_vecs; i++) | |
3763 | sbitmap_zero (bmap[i]); | |
3764 | } | |
3765 | ||
3766 | /* Set to ones a vector of N_VECS bitmaps. */ | |
3767 | ||
3768 | void | |
3769 | sbitmap_vector_ones (bmap, n_vecs) | |
3770 | sbitmap *bmap; | |
3771 | int n_vecs; | |
3772 | { | |
3773 | int i; | |
3774 | ||
3775 | for (i = 0; i < n_vecs; i++) | |
3776 | sbitmap_ones (bmap[i]); | |
3777 | } | |
3778 | ||
3779 | /* Set DST to be A union (B - C). | |
3780 | DST = A | (B & ~C). | |
3781 | Return non-zero if any change is made. */ | |
3782 | ||
3783 | int | |
3784 | sbitmap_union_of_diff (dst, a, b, c) | |
3785 | sbitmap dst, a, b, c; | |
3786 | { | |
3787 | int i,changed; | |
3788 | sbitmap_ptr dstp, ap, bp, cp; | |
3789 | ||
3790 | changed = 0; | |
3791 | dstp = dst->elms; | |
3792 | ap = a->elms; | |
3793 | bp = b->elms; | |
3794 | cp = c->elms; | |
3795 | for (i = 0; i < dst->size; i++) | |
3796 | { | |
3797 | SBITMAP_ELT_TYPE tmp = *ap | (*bp & ~*cp); | |
3798 | if (*dstp != tmp) | |
3799 | changed = 1; | |
3800 | *dstp = tmp; | |
3801 | dstp++; ap++; bp++; cp++; | |
3802 | } | |
3803 | return changed; | |
3804 | } | |
3805 | ||
3806 | /* Set bitmap DST to the bitwise negation of the bitmap SRC. */ | |
3807 | ||
3808 | void | |
3809 | sbitmap_not (dst, src) | |
3810 | sbitmap dst, src; | |
3811 | { | |
3812 | int i; | |
3813 | sbitmap_ptr dstp, ap; | |
3814 | ||
3815 | dstp = dst->elms; | |
3816 | ap = src->elms; | |
3817 | for (i = 0; i < dst->size; i++) | |
3818 | { | |
3819 | SBITMAP_ELT_TYPE tmp = ~(*ap); | |
3820 | *dstp = tmp; | |
3821 | dstp++; ap++; | |
3822 | } | |
3823 | } | |
3824 | ||
3825 | /* Set the bits in DST to be the difference between the bits | |
3826 | in A and the bits in B. i.e. dst = a - b. | |
3827 | The - operator is implemented as a & (~b). */ | |
3828 | ||
3829 | void | |
3830 | sbitmap_difference (dst, a, b) | |
3831 | sbitmap dst, a, b; | |
3832 | { | |
3833 | int i; | |
3834 | sbitmap_ptr dstp, ap, bp; | |
3835 | ||
3836 | dstp = dst->elms; | |
3837 | ap = a->elms; | |
3838 | bp = b->elms; | |
3839 | for (i = 0; i < dst->size; i++) | |
3840 | *dstp++ = *ap++ & (~*bp++); | |
3841 | } | |
3842 | ||
3843 | /* Set DST to be (A and B)). | |
3844 | Return non-zero if any change is made. */ | |
3845 | ||
3846 | int | |
3847 | sbitmap_a_and_b (dst, a, b) | |
3848 | sbitmap dst, a, b; | |
3849 | { | |
3850 | int i,changed; | |
3851 | sbitmap_ptr dstp, ap, bp; | |
3852 | ||
3853 | changed = 0; | |
3854 | dstp = dst->elms; | |
3855 | ap = a->elms; | |
3856 | bp = b->elms; | |
3857 | for (i = 0; i < dst->size; i++) | |
3858 | { | |
3859 | SBITMAP_ELT_TYPE tmp = *ap & *bp; | |
3860 | if (*dstp != tmp) | |
3861 | changed = 1; | |
3862 | *dstp = tmp; | |
3863 | dstp++; ap++; bp++; | |
3864 | } | |
3865 | return changed; | |
3866 | } | |
3867 | /* Set DST to be (A or B)). | |
3868 | Return non-zero if any change is made. */ | |
3869 | ||
3870 | int | |
3871 | sbitmap_a_or_b (dst, a, b) | |
3872 | sbitmap dst, a, b; | |
3873 | { | |
3874 | int i,changed; | |
3875 | sbitmap_ptr dstp, ap, bp; | |
3876 | ||
3877 | changed = 0; | |
3878 | dstp = dst->elms; | |
3879 | ap = a->elms; | |
3880 | bp = b->elms; | |
3881 | for (i = 0; i < dst->size; i++) | |
3882 | { | |
3883 | SBITMAP_ELT_TYPE tmp = *ap | *bp; | |
3884 | if (*dstp != tmp) | |
3885 | changed = 1; | |
3886 | *dstp = tmp; | |
3887 | dstp++; ap++; bp++; | |
3888 | } | |
3889 | return changed; | |
3890 | } | |
3891 | ||
3892 | /* Set DST to be (A or (B and C)). | |
3893 | Return non-zero if any change is made. */ | |
3894 | ||
3895 | int | |
3896 | sbitmap_a_or_b_and_c (dst, a, b, c) | |
3897 | sbitmap dst, a, b, c; | |
3898 | { | |
3899 | int i,changed; | |
3900 | sbitmap_ptr dstp, ap, bp, cp; | |
3901 | ||
3902 | changed = 0; | |
3903 | dstp = dst->elms; | |
3904 | ap = a->elms; | |
3905 | bp = b->elms; | |
3906 | cp = c->elms; | |
3907 | for (i = 0; i < dst->size; i++) | |
3908 | { | |
3909 | SBITMAP_ELT_TYPE tmp = *ap | (*bp & *cp); | |
3910 | if (*dstp != tmp) | |
3911 | changed = 1; | |
3912 | *dstp = tmp; | |
3913 | dstp++; ap++; bp++; cp++; | |
3914 | } | |
3915 | return changed; | |
3916 | } | |
3917 | ||
3918 | /* Set DST to be (A ann (B or C)). | |
3919 | Return non-zero if any change is made. */ | |
3920 | ||
3921 | int | |
3922 | sbitmap_a_and_b_or_c (dst, a, b, c) | |
3923 | sbitmap dst, a, b, c; | |
3924 | { | |
3925 | int i,changed; | |
3926 | sbitmap_ptr dstp, ap, bp, cp; | |
3927 | ||
3928 | changed = 0; | |
3929 | dstp = dst->elms; | |
3930 | ap = a->elms; | |
3931 | bp = b->elms; | |
3932 | cp = c->elms; | |
3933 | for (i = 0; i < dst->size; i++) | |
3934 | { | |
3935 | SBITMAP_ELT_TYPE tmp = *ap & (*bp | *cp); | |
3936 | if (*dstp != tmp) | |
3937 | changed = 1; | |
3938 | *dstp = tmp; | |
3939 | dstp++; ap++; bp++; cp++; | |
3940 | } | |
3941 | return changed; | |
3942 | } | |
3943 | ||
3944 | /* Set the bitmap DST to the intersection of SRC of all predecessors or | |
3945 | successors of block number BB (PRED_SUCC says which). */ | |
3946 | ||
3947 | void | |
3948 | sbitmap_intersect_of_predsucc (dst, src, bb, pred_succ) | |
3949 | sbitmap dst; | |
3950 | sbitmap *src; | |
3951 | int bb; | |
3952 | int_list_ptr *pred_succ; | |
3953 | { | |
3954 | int_list_ptr ps; | |
3955 | int ps_bb; | |
3956 | int set_size = dst->size; | |
3957 | ||
3958 | ps = pred_succ[bb]; | |
3959 | ||
3960 | /* It is possible that there are no predecessors(/successors). | |
3961 | This can happen for example in unreachable code. */ | |
3962 | ||
3963 | if (ps == NULL) | |
3964 | { | |
3965 | /* In APL-speak this is the `and' reduction of the empty set and thus | |
3966 | the result is the identity for `and'. */ | |
3967 | sbitmap_ones (dst); | |
3968 | return; | |
3969 | } | |
3970 | ||
3971 | /* Set result to first predecessor/successor. */ | |
3972 | ||
3973 | for ( ; ps != NULL; ps = ps->next) | |
3974 | { | |
3975 | ps_bb = INT_LIST_VAL (ps); | |
3976 | if (ps_bb == ENTRY_BLOCK || ps_bb == EXIT_BLOCK) | |
3977 | continue; | |
3978 | sbitmap_copy (dst, src[ps_bb]); | |
3979 | /* Break out since we're only doing first predecessor. */ | |
3980 | break; | |
3981 | } | |
3982 | if (ps == NULL) | |
3983 | return; | |
3984 | ||
3985 | /* Now do the remaining predecessors/successors. */ | |
3986 | ||
3987 | for (ps = ps->next; ps != NULL; ps = ps->next) | |
3988 | { | |
3989 | int i; | |
3990 | sbitmap_ptr p,r; | |
3991 | ||
3992 | ps_bb = INT_LIST_VAL (ps); | |
3993 | if (ps_bb == ENTRY_BLOCK || ps_bb == EXIT_BLOCK) | |
3994 | continue; | |
3995 | ||
3996 | p = src[ps_bb]->elms; | |
3997 | r = dst->elms; | |
3998 | ||
3999 | for (i = 0; i < set_size; i++) | |
4000 | *r++ &= *p++; | |
4001 | } | |
4002 | } | |
4003 | ||
4004 | /* Set the bitmap DST to the intersection of SRC of all predecessors | |
4005 | of block number BB. */ | |
4006 | ||
4007 | void | |
4008 | sbitmap_intersect_of_predecessors (dst, src, bb, s_preds) | |
4009 | sbitmap dst; | |
4010 | sbitmap *src; | |
4011 | int bb; | |
4012 | int_list_ptr *s_preds; | |
4013 | { | |
4014 | sbitmap_intersect_of_predsucc (dst, src, bb, s_preds); | |
4015 | } | |
4016 | ||
4017 | /* Set the bitmap DST to the intersection of SRC of all successors | |
4018 | of block number BB. */ | |
4019 | ||
4020 | void | |
4021 | sbitmap_intersect_of_successors (dst, src, bb, s_succs) | |
4022 | sbitmap dst; | |
4023 | sbitmap *src; | |
4024 | int bb; | |
4025 | int_list_ptr *s_succs; | |
4026 | { | |
4027 | sbitmap_intersect_of_predsucc (dst, src, bb, s_succs); | |
4028 | } | |
4029 | ||
4030 | /* Set the bitmap DST to the union of SRC of all predecessors/successors of | |
4031 | block number BB. */ | |
4032 | ||
4033 | void | |
4034 | sbitmap_union_of_predsucc (dst, src, bb, pred_succ) | |
4035 | sbitmap dst; | |
4036 | sbitmap *src; | |
4037 | int bb; | |
4038 | int_list_ptr *pred_succ; | |
4039 | { | |
4040 | int_list_ptr ps; | |
4041 | int ps_bb; | |
4042 | int set_size = dst->size; | |
4043 | ||
4044 | ps = pred_succ[bb]; | |
4045 | ||
4046 | /* It is possible that there are no predecessors(/successors). | |
4047 | This can happen for example in unreachable code. */ | |
4048 | ||
4049 | if (ps == NULL) | |
4050 | { | |
4051 | /* In APL-speak this is the `or' reduction of the empty set and thus | |
4052 | the result is the identity for `or'. */ | |
4053 | sbitmap_zero (dst); | |
4054 | return; | |
4055 | } | |
4056 | ||
4057 | /* Set result to first predecessor/successor. */ | |
4058 | ||
4059 | for ( ; ps != NULL; ps = ps->next) | |
4060 | { | |
4061 | ps_bb = INT_LIST_VAL (ps); | |
4062 | if (ps_bb == ENTRY_BLOCK || ps_bb == EXIT_BLOCK) | |
4063 | continue; | |
4064 | sbitmap_copy (dst, src[ps_bb]); | |
4065 | /* Break out since we're only doing first predecessor. */ | |
4066 | break; | |
4067 | } | |
4068 | if (ps == NULL) | |
4069 | return; | |
4070 | ||
4071 | /* Now do the remaining predecessors/successors. */ | |
4072 | ||
4073 | for (ps = ps->next; ps != NULL; ps = ps->next) | |
4074 | { | |
4075 | int i; | |
4076 | sbitmap_ptr p,r; | |
4077 | ||
4078 | ps_bb = INT_LIST_VAL (ps); | |
4079 | if (ps_bb == ENTRY_BLOCK || ps_bb == EXIT_BLOCK) | |
4080 | continue; | |
4081 | ||
4082 | p = src[ps_bb]->elms; | |
4083 | r = dst->elms; | |
4084 | ||
4085 | for (i = 0; i < set_size; i++) | |
4086 | *r++ |= *p++; | |
4087 | } | |
4088 | } | |
4089 | ||
4090 | /* Set the bitmap DST to the union of SRC of all predecessors of | |
4091 | block number BB. */ | |
4092 | ||
4093 | void | |
4094 | sbitmap_union_of_predecessors (dst, src, bb, s_preds) | |
4095 | sbitmap dst; | |
4096 | sbitmap *src; | |
4097 | int bb; | |
4098 | int_list_ptr *s_preds; | |
4099 | { | |
4100 | sbitmap_union_of_predsucc (dst, src, bb, s_preds); | |
4101 | } | |
4102 | ||
5e89e58b JL |
4103 | /* Set the bitmap DST to the union of SRC of all predecessors of |
4104 | block number BB. */ | |
4105 | ||
4106 | void | |
4107 | sbitmap_union_of_successors (dst, src, bb, s_succ) | |
4108 | sbitmap dst; | |
4109 | sbitmap *src; | |
4110 | int bb; | |
4111 | int_list_ptr *s_succ; | |
4112 | { | |
4113 | sbitmap_union_of_predsucc (dst, src, bb, s_succ); | |
4114 | } | |
4115 | ||
5ece9746 JL |
4116 | /* Compute dominator relationships. */ |
4117 | void | |
4118 | compute_dominators (dominators, post_dominators, s_preds, s_succs) | |
4119 | sbitmap *dominators; | |
4120 | sbitmap *post_dominators; | |
4121 | int_list_ptr *s_preds; | |
4122 | int_list_ptr *s_succs; | |
4123 | { | |
4124 | int bb, changed, passes; | |
4125 | sbitmap *temp_bitmap; | |
4126 | ||
4127 | temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks); | |
4128 | sbitmap_vector_ones (dominators, n_basic_blocks); | |
4129 | sbitmap_vector_ones (post_dominators, n_basic_blocks); | |
4130 | sbitmap_vector_zero (temp_bitmap, n_basic_blocks); | |
4131 | ||
4132 | sbitmap_zero (dominators[0]); | |
4133 | SET_BIT (dominators[0], 0); | |
4134 | ||
4135 | sbitmap_zero (post_dominators[n_basic_blocks-1]); | |
4136 | SET_BIT (post_dominators[n_basic_blocks-1], 0); | |
4137 | ||
4138 | passes = 0; | |
4139 | changed = 1; | |
4140 | while (changed) | |
4141 | { | |
4142 | changed = 0; | |
4143 | for (bb = 1; bb < n_basic_blocks; bb++) | |
4144 | { | |
4145 | sbitmap_intersect_of_predecessors (temp_bitmap[bb], dominators, | |
4146 | bb, s_preds); | |
4147 | SET_BIT (temp_bitmap[bb], bb); | |
4148 | changed |= sbitmap_a_and_b (dominators[bb], | |
4149 | dominators[bb], | |
4150 | temp_bitmap[bb]); | |
4151 | sbitmap_intersect_of_successors (temp_bitmap[bb], post_dominators, | |
4152 | bb, s_succs); | |
4153 | SET_BIT (temp_bitmap[bb], bb); | |
4154 | changed |= sbitmap_a_and_b (post_dominators[bb], | |
4155 | post_dominators[bb], | |
4156 | temp_bitmap[bb]); | |
4157 | } | |
4158 | passes++; | |
4159 | } | |
4160 | ||
4161 | free (temp_bitmap); | |
4162 | } | |
4c649323 JL |
4163 | |
4164 | /* Count for a single SET rtx, X. */ | |
4165 | ||
4166 | static void | |
4167 | count_reg_sets_1 (x) | |
4168 | rtx x; | |
4169 | { | |
4170 | register int regno; | |
4171 | register rtx reg = SET_DEST (x); | |
4172 | ||
4173 | /* Find the register that's set/clobbered. */ | |
4174 | while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT | |
4175 | || GET_CODE (reg) == SIGN_EXTRACT | |
4176 | || GET_CODE (reg) == STRICT_LOW_PART) | |
4177 | reg = XEXP (reg, 0); | |
4178 | ||
86465af7 DM |
4179 | if (GET_CODE (reg) == PARALLEL |
4180 | && GET_MODE (reg) == BLKmode) | |
4181 | { | |
4182 | register int i; | |
4183 | for (i = XVECLEN (reg, 0) - 1; i >= 0; i--) | |
4184 | count_reg_sets_1 (XVECEXP (reg, 0, i)); | |
4185 | return; | |
4186 | } | |
4187 | ||
4c649323 JL |
4188 | if (GET_CODE (reg) == REG) |
4189 | { | |
4190 | regno = REGNO (reg); | |
4191 | if (regno >= FIRST_PSEUDO_REGISTER) | |
4192 | { | |
4193 | /* Count (weighted) references, stores, etc. This counts a | |
4194 | register twice if it is modified, but that is correct. */ | |
4195 | REG_N_SETS (regno)++; | |
4196 | ||
4197 | REG_N_REFS (regno) += loop_depth; | |
4198 | } | |
4199 | } | |
4200 | } | |
4201 | ||
4202 | /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment | |
4203 | REG_N_REFS by the current loop depth for each SET or CLOBBER found. */ | |
4204 | ||
4205 | static void | |
4206 | count_reg_sets (x) | |
4207 | rtx x; | |
4208 | { | |
4209 | register RTX_CODE code = GET_CODE (x); | |
4210 | ||
4211 | if (code == SET || code == CLOBBER) | |
4212 | count_reg_sets_1 (x); | |
4213 | else if (code == PARALLEL) | |
4214 | { | |
4215 | register int i; | |
4216 | for (i = XVECLEN (x, 0) - 1; i >= 0; i--) | |
4217 | { | |
4218 | code = GET_CODE (XVECEXP (x, 0, i)); | |
4219 | if (code == SET || code == CLOBBER) | |
4220 | count_reg_sets_1 (XVECEXP (x, 0, i)); | |
4221 | } | |
4222 | } | |
4223 | } | |
4224 | ||
4225 | /* Increment REG_N_REFS by the current loop depth each register reference | |
4226 | found in X. */ | |
4227 | ||
4228 | static void | |
4229 | count_reg_references (x) | |
4230 | rtx x; | |
4231 | { | |
4232 | register RTX_CODE code; | |
4c649323 JL |
4233 | |
4234 | retry: | |
4235 | code = GET_CODE (x); | |
4236 | switch (code) | |
4237 | { | |
4238 | case LABEL_REF: | |
4239 | case SYMBOL_REF: | |
4240 | case CONST_INT: | |
4241 | case CONST: | |
4242 | case CONST_DOUBLE: | |
4243 | case PC: | |
4244 | case ADDR_VEC: | |
4245 | case ADDR_DIFF_VEC: | |
4246 | case ASM_INPUT: | |
4247 | return; | |
4248 | ||
4249 | #ifdef HAVE_cc0 | |
4250 | case CC0: | |
4251 | return; | |
4252 | #endif | |
4253 | ||
4254 | case CLOBBER: | |
4255 | /* If we are clobbering a MEM, mark any registers inside the address | |
4256 | as being used. */ | |
4257 | if (GET_CODE (XEXP (x, 0)) == MEM) | |
4258 | count_reg_references (XEXP (XEXP (x, 0), 0)); | |
4259 | return; | |
4260 | ||
4261 | case SUBREG: | |
4262 | /* While we're here, optimize this case. */ | |
4263 | x = SUBREG_REG (x); | |
4264 | ||
4265 | /* In case the SUBREG is not of a register, don't optimize */ | |
4266 | if (GET_CODE (x) != REG) | |
4267 | { | |
4268 | count_reg_references (x); | |
4269 | return; | |
4270 | } | |
4271 | ||
4272 | /* ... fall through ... */ | |
4273 | ||
4274 | case REG: | |
4275 | if (REGNO (x) >= FIRST_PSEUDO_REGISTER) | |
4276 | REG_N_REFS (REGNO (x)) += loop_depth; | |
4277 | return; | |
4278 | ||
4279 | case SET: | |
4280 | { | |
4281 | register rtx testreg = SET_DEST (x); | |
4282 | int mark_dest = 0; | |
4283 | ||
4284 | /* If storing into MEM, don't show it as being used. But do | |
4285 | show the address as being used. */ | |
4286 | if (GET_CODE (testreg) == MEM) | |
4287 | { | |
4288 | count_reg_references (XEXP (testreg, 0)); | |
4289 | count_reg_references (SET_SRC (x)); | |
4290 | return; | |
4291 | } | |
4292 | ||
4293 | /* Storing in STRICT_LOW_PART is like storing in a reg | |
4294 | in that this SET might be dead, so ignore it in TESTREG. | |
4295 | but in some other ways it is like using the reg. | |
4296 | ||
4297 | Storing in a SUBREG or a bit field is like storing the entire | |
4298 | register in that if the register's value is not used | |
4299 | then this SET is not needed. */ | |
4300 | while (GET_CODE (testreg) == STRICT_LOW_PART | |
4301 | || GET_CODE (testreg) == ZERO_EXTRACT | |
4302 | || GET_CODE (testreg) == SIGN_EXTRACT | |
4303 | || GET_CODE (testreg) == SUBREG) | |
4304 | { | |
4305 | /* Modifying a single register in an alternate mode | |
4306 | does not use any of the old value. But these other | |
4307 | ways of storing in a register do use the old value. */ | |
4308 | if (GET_CODE (testreg) == SUBREG | |
4309 | && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg))) | |
4310 | ; | |
4311 | else | |
4312 | mark_dest = 1; | |
4313 | ||
4314 | testreg = XEXP (testreg, 0); | |
4315 | } | |
4316 | ||
4317 | /* If this is a store into a register, | |
4318 | recursively scan the value being stored. */ | |
4319 | ||
86465af7 DM |
4320 | if ((GET_CODE (testreg) == PARALLEL |
4321 | && GET_MODE (testreg) == BLKmode) | |
4322 | || GET_CODE (testreg) == REG) | |
4c649323 JL |
4323 | { |
4324 | count_reg_references (SET_SRC (x)); | |
4325 | if (mark_dest) | |
4326 | count_reg_references (SET_DEST (x)); | |
4327 | return; | |
4328 | } | |
4329 | } | |
4330 | break; | |
4331 | ||
4332 | default: | |
4333 | break; | |
4334 | } | |
4335 | ||
4336 | /* Recursively scan the operands of this expression. */ | |
4337 | ||
4338 | { | |
4339 | register char *fmt = GET_RTX_FORMAT (code); | |
4340 | register int i; | |
4341 | ||
4342 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
4343 | { | |
4344 | if (fmt[i] == 'e') | |
4345 | { | |
4346 | /* Tail recursive case: save a function call level. */ | |
4347 | if (i == 0) | |
4348 | { | |
4349 | x = XEXP (x, 0); | |
4350 | goto retry; | |
4351 | } | |
4352 | count_reg_references (XEXP (x, i)); | |
4353 | } | |
4354 | else if (fmt[i] == 'E') | |
4355 | { | |
4356 | register int j; | |
4357 | for (j = 0; j < XVECLEN (x, i); j++) | |
4358 | count_reg_references (XVECEXP (x, i, j)); | |
4359 | } | |
4360 | } | |
4361 | } | |
4362 | } | |
4363 | ||
4364 | /* Recompute register set/reference counts immediately prior to register | |
4365 | allocation. | |
4366 | ||
4367 | This avoids problems with set/reference counts changing to/from values | |
4368 | which have special meanings to the register allocators. | |
4369 | ||
4370 | Additionally, the reference counts are the primary component used by the | |
4371 | register allocators to prioritize pseudos for allocation to hard regs. | |
4372 | More accurate reference counts generally lead to better register allocation. | |
4373 | ||
4374 | It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and | |
4375 | possibly other information which is used by the register allocators. */ | |
4376 | ||
762a1d90 | 4377 | void |
4c649323 JL |
4378 | recompute_reg_usage (f) |
4379 | rtx f; | |
4380 | { | |
4381 | rtx insn; | |
4382 | int i, max_reg; | |
4383 | ||
4384 | /* Clear out the old data. */ | |
4385 | max_reg = max_reg_num (); | |
4386 | for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++) | |
4387 | { | |
4388 | REG_N_SETS (i) = 0; | |
4389 | REG_N_REFS (i) = 0; | |
4390 | } | |
4391 | ||
4392 | /* Scan each insn in the chain and count how many times each register is | |
4393 | set/used. */ | |
4394 | loop_depth = 1; | |
4395 | for (insn = f; insn; insn = NEXT_INSN (insn)) | |
4396 | { | |
4397 | /* Keep track of loop depth. */ | |
4398 | if (GET_CODE (insn) == NOTE) | |
4399 | { | |
4400 | /* Look for loop boundaries. */ | |
4401 | if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END) | |
4402 | loop_depth--; | |
4403 | else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG) | |
4404 | loop_depth++; | |
4405 | ||
4406 | /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error. | |
4407 | Abort now rather than setting register status incorrectly. */ | |
4408 | if (loop_depth == 0) | |
4409 | abort (); | |
4410 | } | |
4411 | else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') | |
4412 | { | |
4413 | rtx links; | |
4414 | ||
4415 | /* This call will increment REG_N_SETS for each SET or CLOBBER | |
4416 | of a register in INSN. It will also increment REG_N_REFS | |
4417 | by the loop depth for each set of a register in INSN. */ | |
4418 | count_reg_sets (PATTERN (insn)); | |
4419 | ||
4420 | /* count_reg_sets does not detect autoincrement address modes, so | |
4421 | detect them here by looking at the notes attached to INSN. */ | |
4422 | for (links = REG_NOTES (insn); links; links = XEXP (links, 1)) | |
4423 | { | |
4424 | if (REG_NOTE_KIND (links) == REG_INC) | |
4425 | /* Count (weighted) references, stores, etc. This counts a | |
4426 | register twice if it is modified, but that is correct. */ | |
4427 | REG_N_SETS (REGNO (XEXP (links, 0)))++; | |
4428 | } | |
4429 | ||
4430 | /* This call will increment REG_N_REFS by the current loop depth for | |
4431 | each reference to a register in INSN. */ | |
4432 | count_reg_references (PATTERN (insn)); | |
4433 | ||
4434 | /* count_reg_references will not include counts for arguments to | |
4435 | function calls, so detect them here by examining the | |
4436 | CALL_INSN_FUNCTION_USAGE data. */ | |
4437 | if (GET_CODE (insn) == CALL_INSN) | |
4438 | { | |
4439 | rtx note; | |
4440 | ||
4441 | for (note = CALL_INSN_FUNCTION_USAGE (insn); | |
4442 | note; | |
4443 | note = XEXP (note, 1)) | |
4444 | if (GET_CODE (XEXP (note, 0)) == USE) | |
4445 | count_reg_references (SET_DEST (XEXP (note, 0))); | |
4446 | } | |
4447 | } | |
4448 | } | |
4449 | } |