]>
Commit | Line | Data |
---|---|---|
058e97ec | 1 | /* IRA processing allocno lives to build allocno live ranges. |
66647d44 | 2 | Copyright (C) 2006, 2007, 2008, 2009 |
058e97ec VM |
3 | Free Software Foundation, Inc. |
4 | Contributed by Vladimir Makarov <vmakarov@redhat.com>. | |
5 | ||
6 | This file is part of GCC. | |
7 | ||
8 | GCC is free software; you can redistribute it and/or modify it under | |
9 | the terms of the GNU General Public License as published by the Free | |
10 | Software Foundation; either version 3, or (at your option) any later | |
11 | version. | |
12 | ||
13 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
14 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 | for more details. | |
17 | ||
18 | You should have received a copy of the GNU General Public License | |
19 | along with GCC; see the file COPYING3. If not see | |
20 | <http://www.gnu.org/licenses/>. */ | |
21 | ||
22 | #include "config.h" | |
23 | #include "system.h" | |
24 | #include "coretypes.h" | |
25 | #include "tm.h" | |
26 | #include "regs.h" | |
27 | #include "rtl.h" | |
28 | #include "tm_p.h" | |
29 | #include "target.h" | |
30 | #include "flags.h" | |
cf0c2a60 | 31 | #include "except.h" |
058e97ec VM |
32 | #include "hard-reg-set.h" |
33 | #include "basic-block.h" | |
34 | #include "insn-config.h" | |
35 | #include "recog.h" | |
36 | #include "toplev.h" | |
37 | #include "params.h" | |
38 | #include "df.h" | |
39 | #include "sparseset.h" | |
40 | #include "ira-int.h" | |
41 | ||
42 | /* The code in this file is similar to one in global but the code | |
43 | works on the allocno basis and creates live ranges instead of | |
44 | pseudo-register conflicts. */ | |
45 | ||
46 | /* Program points are enumerated by numbers from range | |
47 | 0..IRA_MAX_POINT-1. There are approximately two times more program | |
48 | points than insns. Program points are places in the program where | |
49 | liveness info can be changed. In most general case (there are more | |
50 | complicated cases too) some program points correspond to places | |
51 | where input operand dies and other ones correspond to places where | |
52 | output operands are born. */ | |
53 | int ira_max_point; | |
54 | ||
55 | /* Arrays of size IRA_MAX_POINT mapping a program point to the allocno | |
56 | live ranges with given start/finish point. */ | |
57 | allocno_live_range_t *ira_start_point_ranges, *ira_finish_point_ranges; | |
58 | ||
59 | /* Number of the current program point. */ | |
60 | static int curr_point; | |
61 | ||
62 | /* Point where register pressure excess started or -1 if there is no | |
63 | register pressure excess. Excess pressure for a register class at | |
64 | some point means that there are more allocnos of given register | |
65 | class living at the point than number of hard-registers of the | |
66 | class available for the allocation. It is defined only for cover | |
67 | classes. */ | |
68 | static int high_pressure_start_point[N_REG_CLASSES]; | |
69 | ||
70 | /* Allocnos live at current point in the scan. */ | |
71 | static sparseset allocnos_live; | |
72 | ||
73 | /* Set of hard regs (except eliminable ones) currently live. */ | |
74 | static HARD_REG_SET hard_regs_live; | |
75 | ||
76 | /* The loop tree node corresponding to the current basic block. */ | |
77 | static ira_loop_tree_node_t curr_bb_node; | |
78 | ||
cb1ca6ac VM |
79 | /* The number of the last processed call. */ |
80 | static int last_call_num; | |
81 | /* The number of last call at which given allocno was saved. */ | |
82 | static int *allocno_saved_at_call; | |
83 | ||
058e97ec VM |
84 | /* The function processing birth of register REGNO. It updates living |
85 | hard regs and conflict hard regs for living allocnos or starts a | |
86 | new live range for the allocno corresponding to REGNO if it is | |
87 | necessary. */ | |
88 | static void | |
89 | make_regno_born (int regno) | |
90 | { | |
91 | unsigned int i; | |
92 | ira_allocno_t a; | |
93 | allocno_live_range_t p; | |
94 | ||
95 | if (regno < FIRST_PSEUDO_REGISTER) | |
96 | { | |
97 | SET_HARD_REG_BIT (hard_regs_live, regno); | |
98 | EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, i) | |
99 | { | |
100 | SET_HARD_REG_BIT (ALLOCNO_CONFLICT_HARD_REGS (ira_allocnos[i]), | |
101 | regno); | |
102 | SET_HARD_REG_BIT (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (ira_allocnos[i]), | |
103 | regno); | |
104 | } | |
105 | return; | |
106 | } | |
107 | a = ira_curr_regno_allocno_map[regno]; | |
108 | if (a == NULL) | |
109 | return; | |
110 | if ((p = ALLOCNO_LIVE_RANGES (a)) == NULL | |
111 | || (p->finish != curr_point && p->finish + 1 != curr_point)) | |
112 | ALLOCNO_LIVE_RANGES (a) | |
113 | = ira_create_allocno_live_range (a, curr_point, -1, | |
114 | ALLOCNO_LIVE_RANGES (a)); | |
115 | } | |
116 | ||
117 | /* Update ALLOCNO_EXCESS_PRESSURE_POINTS_NUM for allocno A. */ | |
118 | static void | |
119 | update_allocno_pressure_excess_length (ira_allocno_t a) | |
120 | { | |
7db7ed3c VM |
121 | int start, i; |
122 | enum reg_class cover_class, cl; | |
058e97ec VM |
123 | allocno_live_range_t p; |
124 | ||
125 | cover_class = ALLOCNO_COVER_CLASS (a); | |
7db7ed3c VM |
126 | for (i = 0; |
127 | (cl = ira_reg_class_super_classes[cover_class][i]) != LIM_REG_CLASSES; | |
128 | i++) | |
129 | { | |
130 | if (high_pressure_start_point[cl] < 0) | |
131 | continue; | |
132 | p = ALLOCNO_LIVE_RANGES (a); | |
133 | ira_assert (p != NULL); | |
134 | start = (high_pressure_start_point[cl] > p->start | |
135 | ? high_pressure_start_point[cl] : p->start); | |
136 | ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a) += curr_point - start + 1; | |
137 | } | |
058e97ec VM |
138 | } |
139 | ||
140 | /* Process the death of register REGNO. This updates hard_regs_live | |
141 | or finishes the current live range for the allocno corresponding to | |
142 | REGNO. */ | |
143 | static void | |
144 | make_regno_dead (int regno) | |
145 | { | |
146 | ira_allocno_t a; | |
147 | allocno_live_range_t p; | |
148 | ||
149 | if (regno < FIRST_PSEUDO_REGISTER) | |
150 | { | |
151 | CLEAR_HARD_REG_BIT (hard_regs_live, regno); | |
152 | return; | |
153 | } | |
154 | a = ira_curr_regno_allocno_map[regno]; | |
155 | if (a == NULL) | |
156 | return; | |
157 | p = ALLOCNO_LIVE_RANGES (a); | |
158 | ira_assert (p != NULL); | |
159 | p->finish = curr_point; | |
160 | update_allocno_pressure_excess_length (a); | |
161 | } | |
162 | ||
058e97ec VM |
163 | /* The current register pressures for each cover class for the current |
164 | basic block. */ | |
165 | static int curr_reg_pressure[N_REG_CLASSES]; | |
166 | ||
167 | /* Mark allocno A as currently living and update current register | |
168 | pressure, maximal register pressure for the current BB, start point | |
169 | of the register pressure excess, and conflicting hard registers of | |
170 | A. */ | |
171 | static void | |
172 | set_allocno_live (ira_allocno_t a) | |
173 | { | |
7db7ed3c VM |
174 | int i; |
175 | enum reg_class cover_class, cl; | |
058e97ec | 176 | |
cb1ca6ac VM |
177 | /* Invalidate because it is referenced. */ |
178 | allocno_saved_at_call[ALLOCNO_NUM (a)] = 0; | |
058e97ec VM |
179 | if (sparseset_bit_p (allocnos_live, ALLOCNO_NUM (a))) |
180 | return; | |
181 | sparseset_set_bit (allocnos_live, ALLOCNO_NUM (a)); | |
182 | IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a), hard_regs_live); | |
183 | IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a), hard_regs_live); | |
184 | cover_class = ALLOCNO_COVER_CLASS (a); | |
7db7ed3c VM |
185 | for (i = 0; |
186 | (cl = ira_reg_class_super_classes[cover_class][i]) != LIM_REG_CLASSES; | |
187 | i++) | |
188 | { | |
189 | curr_reg_pressure[cl] += ira_reg_class_nregs[cl][ALLOCNO_MODE (a)]; | |
190 | if (high_pressure_start_point[cl] < 0 | |
191 | && (curr_reg_pressure[cl] > ira_available_class_regs[cl])) | |
192 | high_pressure_start_point[cl] = curr_point; | |
193 | if (curr_bb_node->reg_pressure[cl] < curr_reg_pressure[cl]) | |
194 | curr_bb_node->reg_pressure[cl] = curr_reg_pressure[cl]; | |
195 | } | |
058e97ec VM |
196 | } |
197 | ||
198 | /* Mark allocno A as currently not living and update current register | |
199 | pressure, start point of the register pressure excess, and register | |
200 | pressure excess length for living allocnos. */ | |
201 | static void | |
202 | clear_allocno_live (ira_allocno_t a) | |
203 | { | |
7db7ed3c VM |
204 | int i; |
205 | unsigned int j; | |
206 | enum reg_class cover_class, cl; | |
207 | bool set_p; | |
058e97ec | 208 | |
cb1ca6ac VM |
209 | /* Invalidate because it is referenced. */ |
210 | allocno_saved_at_call[ALLOCNO_NUM (a)] = 0; | |
058e97ec VM |
211 | if (sparseset_bit_p (allocnos_live, ALLOCNO_NUM (a))) |
212 | { | |
213 | cover_class = ALLOCNO_COVER_CLASS (a); | |
7db7ed3c VM |
214 | set_p = false; |
215 | for (i = 0; | |
216 | (cl = ira_reg_class_super_classes[cover_class][i]) | |
217 | != LIM_REG_CLASSES; | |
218 | i++) | |
058e97ec | 219 | { |
7db7ed3c VM |
220 | curr_reg_pressure[cl] -= ira_reg_class_nregs[cl][ALLOCNO_MODE (a)]; |
221 | ira_assert (curr_reg_pressure[cl] >= 0); | |
222 | if (high_pressure_start_point[cl] >= 0 | |
223 | && curr_reg_pressure[cl] <= ira_available_class_regs[cl]) | |
224 | set_p = true; | |
225 | } | |
226 | if (set_p) | |
227 | { | |
228 | EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, j) | |
229 | update_allocno_pressure_excess_length (ira_allocnos[j]); | |
230 | for (i = 0; | |
231 | (cl = ira_reg_class_super_classes[cover_class][i]) | |
232 | != LIM_REG_CLASSES; | |
233 | i++) | |
234 | if (high_pressure_start_point[cl] >= 0 | |
235 | && curr_reg_pressure[cl] <= ira_available_class_regs[cl]) | |
236 | high_pressure_start_point[cl] = -1; | |
237 | ||
058e97ec VM |
238 | } |
239 | } | |
240 | sparseset_clear_bit (allocnos_live, ALLOCNO_NUM (a)); | |
241 | } | |
242 | ||
3517d3a0 VM |
243 | /* Mark the register REG as live. Store a 1 in hard_regs_live or |
244 | allocnos_live for this register or the corresponding allocno, | |
245 | record how many consecutive hardware registers it actually | |
246 | needs. */ | |
058e97ec | 247 | static void |
3517d3a0 | 248 | mark_reg_live (rtx reg) |
058e97ec | 249 | { |
7db7ed3c | 250 | int i, regno; |
058e97ec | 251 | |
acb37d29 | 252 | gcc_assert (REG_P (reg)); |
058e97ec VM |
253 | regno = REGNO (reg); |
254 | ||
255 | if (regno >= FIRST_PSEUDO_REGISTER) | |
256 | { | |
257 | ira_allocno_t a = ira_curr_regno_allocno_map[regno]; | |
258 | ||
259 | if (a != NULL) | |
260 | { | |
261 | if (sparseset_bit_p (allocnos_live, ALLOCNO_NUM (a))) | |
cb1ca6ac VM |
262 | { |
263 | /* Invalidate because it is referenced. */ | |
264 | allocno_saved_at_call[ALLOCNO_NUM (a)] = 0; | |
265 | return; | |
266 | } | |
058e97ec VM |
267 | set_allocno_live (a); |
268 | } | |
269 | make_regno_born (regno); | |
270 | } | |
271 | else if (! TEST_HARD_REG_BIT (ira_no_alloc_regs, regno)) | |
272 | { | |
273 | int last = regno + hard_regno_nregs[regno][GET_MODE (reg)]; | |
7db7ed3c | 274 | enum reg_class cover_class, cl; |
058e97ec VM |
275 | |
276 | while (regno < last) | |
277 | { | |
278 | if (! TEST_HARD_REG_BIT (hard_regs_live, regno) | |
279 | && ! TEST_HARD_REG_BIT (eliminable_regset, regno)) | |
280 | { | |
c0683a82 | 281 | cover_class = ira_hard_regno_cover_class[regno]; |
7db7ed3c VM |
282 | for (i = 0; |
283 | (cl = ira_reg_class_super_classes[cover_class][i]) | |
284 | != LIM_REG_CLASSES; | |
285 | i++) | |
058e97ec | 286 | { |
7db7ed3c VM |
287 | curr_reg_pressure[cl]++; |
288 | if (high_pressure_start_point[cl] < 0 | |
289 | && (curr_reg_pressure[cl] | |
290 | > ira_available_class_regs[cl])) | |
291 | high_pressure_start_point[cl] = curr_point; | |
058e97ec VM |
292 | } |
293 | make_regno_born (regno); | |
7db7ed3c VM |
294 | for (i = 0; |
295 | (cl = ira_reg_class_super_classes[cover_class][i]) | |
296 | != LIM_REG_CLASSES; | |
297 | i++) | |
298 | { | |
299 | if (curr_bb_node->reg_pressure[cl] < curr_reg_pressure[cl]) | |
300 | curr_bb_node->reg_pressure[cl] = curr_reg_pressure[cl]; | |
301 | } | |
058e97ec VM |
302 | } |
303 | regno++; | |
304 | } | |
305 | } | |
306 | } | |
307 | ||
3517d3a0 VM |
308 | /* Mark the register referenced by use or def REF as live. */ |
309 | static void | |
57512f53 | 310 | mark_ref_live (df_ref ref) |
058e97ec | 311 | { |
3517d3a0 VM |
312 | rtx reg; |
313 | ||
314 | reg = DF_REF_REG (ref); | |
315 | if (GET_CODE (reg) == SUBREG) | |
316 | reg = SUBREG_REG (reg); | |
317 | mark_reg_live (reg); | |
058e97ec VM |
318 | } |
319 | ||
3517d3a0 | 320 | /* Mark the register REG as dead. Store a 0 in hard_regs_live or |
acb37d29 | 321 | allocnos_live for the register. */ |
058e97ec | 322 | static void |
3517d3a0 | 323 | mark_reg_dead (rtx reg) |
058e97ec VM |
324 | { |
325 | int regno; | |
326 | ||
acb37d29 | 327 | gcc_assert (REG_P (reg)); |
058e97ec VM |
328 | regno = REGNO (reg); |
329 | ||
058e97ec VM |
330 | if (regno >= FIRST_PSEUDO_REGISTER) |
331 | { | |
332 | ira_allocno_t a = ira_curr_regno_allocno_map[regno]; | |
333 | ||
334 | if (a != NULL) | |
335 | { | |
336 | if (! sparseset_bit_p (allocnos_live, ALLOCNO_NUM (a))) | |
cb1ca6ac VM |
337 | { |
338 | /* Invalidate because it is referenced. */ | |
339 | allocno_saved_at_call[ALLOCNO_NUM (a)] = 0; | |
340 | return; | |
341 | } | |
058e97ec VM |
342 | clear_allocno_live (a); |
343 | } | |
344 | make_regno_dead (regno); | |
345 | } | |
346 | else if (! TEST_HARD_REG_BIT (ira_no_alloc_regs, regno)) | |
347 | { | |
7db7ed3c VM |
348 | int i; |
349 | unsigned int j; | |
058e97ec | 350 | int last = regno + hard_regno_nregs[regno][GET_MODE (reg)]; |
7db7ed3c VM |
351 | enum reg_class cover_class, cl; |
352 | bool set_p; | |
058e97ec VM |
353 | |
354 | while (regno < last) | |
355 | { | |
356 | if (TEST_HARD_REG_BIT (hard_regs_live, regno)) | |
357 | { | |
7db7ed3c | 358 | set_p = false; |
c0683a82 | 359 | cover_class = ira_hard_regno_cover_class[regno]; |
7db7ed3c VM |
360 | for (i = 0; |
361 | (cl = ira_reg_class_super_classes[cover_class][i]) | |
362 | != LIM_REG_CLASSES; | |
363 | i++) | |
364 | { | |
365 | curr_reg_pressure[cl]--; | |
366 | if (high_pressure_start_point[cl] >= 0 | |
367 | && curr_reg_pressure[cl] <= ira_available_class_regs[cl]) | |
368 | set_p = true; | |
369 | ira_assert (curr_reg_pressure[cl] >= 0); | |
370 | } | |
371 | if (set_p) | |
058e97ec | 372 | { |
7db7ed3c VM |
373 | EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, j) |
374 | update_allocno_pressure_excess_length (ira_allocnos[j]); | |
375 | for (i = 0; | |
376 | (cl = ira_reg_class_super_classes[cover_class][i]) | |
377 | != LIM_REG_CLASSES; | |
378 | i++) | |
379 | if (high_pressure_start_point[cl] >= 0 | |
380 | && (curr_reg_pressure[cl] | |
381 | <= ira_available_class_regs[cl])) | |
382 | high_pressure_start_point[cl] = -1; | |
058e97ec VM |
383 | } |
384 | make_regno_dead (regno); | |
385 | } | |
386 | regno++; | |
387 | } | |
388 | } | |
389 | } | |
390 | ||
3517d3a0 VM |
391 | /* Mark the register referenced by definition DEF as dead, if the |
392 | definition is a total one. */ | |
393 | static void | |
57512f53 | 394 | mark_ref_dead (df_ref def) |
3517d3a0 VM |
395 | { |
396 | rtx reg; | |
397 | ||
398 | if (DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL) | |
399 | || DF_REF_FLAGS_IS_SET (def, DF_REF_CONDITIONAL)) | |
400 | return; | |
401 | ||
402 | reg = DF_REF_REG (def); | |
403 | if (GET_CODE (reg) == SUBREG) | |
404 | reg = SUBREG_REG (reg); | |
405 | mark_reg_dead (reg); | |
406 | } | |
407 | ||
22c02455 VM |
408 | /* Make pseudo REG conflicting with pseudo DREG, if the 1st pseudo |
409 | class is intersected with class CL. Advance the current program | |
410 | point before making the conflict if ADVANCE_P. Return TRUE if we | |
411 | will need to advance the current program point. */ | |
3517d3a0 | 412 | static bool |
22c02455 | 413 | make_pseudo_conflict (rtx reg, enum reg_class cl, rtx dreg, bool advance_p) |
3517d3a0 | 414 | { |
22c02455 | 415 | ira_allocno_t a; |
3517d3a0 | 416 | |
22c02455 VM |
417 | if (GET_CODE (reg) == SUBREG) |
418 | reg = SUBREG_REG (reg); | |
419 | ||
420 | if (! REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER) | |
421 | return advance_p; | |
422 | ||
423 | a = ira_curr_regno_allocno_map[REGNO (reg)]; | |
424 | if (! reg_classes_intersect_p (cl, ALLOCNO_COVER_CLASS (a))) | |
425 | return advance_p; | |
3517d3a0 | 426 | |
22c02455 VM |
427 | if (advance_p) |
428 | curr_point++; | |
3517d3a0 | 429 | |
22c02455 VM |
430 | mark_reg_live (reg); |
431 | mark_reg_live (dreg); | |
432 | mark_reg_dead (reg); | |
433 | mark_reg_dead (dreg); | |
434 | ||
435 | return false; | |
436 | } | |
3517d3a0 | 437 | |
22c02455 VM |
438 | /* Check and make if necessary conflicts for pseudo DREG of class |
439 | DEF_CL of the current insn with input operand USE of class USE_CL. | |
440 | Advance the current program point before making the conflict if | |
441 | ADVANCE_P. Return TRUE if we will need to advance the current | |
442 | program point. */ | |
443 | static bool | |
444 | check_and_make_def_use_conflict (rtx dreg, enum reg_class def_cl, | |
445 | int use, enum reg_class use_cl, | |
446 | bool advance_p) | |
447 | { | |
448 | if (! reg_classes_intersect_p (def_cl, use_cl)) | |
449 | return advance_p; | |
450 | ||
451 | advance_p = make_pseudo_conflict (recog_data.operand[use], | |
452 | use_cl, dreg, advance_p); | |
453 | /* Reload may end up swapping commutative operands, so you | |
454 | have to take both orderings into account. The | |
455 | constraints for the two operands can be completely | |
456 | different. (Indeed, if the constraints for the two | |
457 | operands are the same for all alternatives, there's no | |
458 | point marking them as commutative.) */ | |
459 | if (use < recog_data.n_operands + 1 | |
460 | && recog_data.constraints[use][0] == '%') | |
461 | advance_p | |
462 | = make_pseudo_conflict (recog_data.operand[use + 1], | |
463 | use_cl, dreg, advance_p); | |
464 | if (use >= 1 | |
465 | && recog_data.constraints[use - 1][0] == '%') | |
466 | advance_p | |
467 | = make_pseudo_conflict (recog_data.operand[use - 1], | |
468 | use_cl, dreg, advance_p); | |
469 | return advance_p; | |
470 | } | |
471 | ||
472 | /* Check and make if necessary conflicts for definition DEF of class | |
473 | DEF_CL of the current insn with input operands. Process only | |
474 | constraints of alternative ALT. */ | |
475 | static void | |
476 | check_and_make_def_conflict (int alt, int def, enum reg_class def_cl) | |
477 | { | |
478 | int use, use_match; | |
479 | ira_allocno_t a; | |
480 | enum reg_class use_cl, acl; | |
481 | bool advance_p; | |
482 | rtx dreg = recog_data.operand[def]; | |
483 | ||
484 | if (def_cl == NO_REGS) | |
485 | return; | |
486 | ||
487 | if (GET_CODE (dreg) == SUBREG) | |
488 | dreg = SUBREG_REG (dreg); | |
489 | ||
490 | if (! REG_P (dreg) || REGNO (dreg) < FIRST_PSEUDO_REGISTER) | |
491 | return; | |
492 | ||
493 | a = ira_curr_regno_allocno_map[REGNO (dreg)]; | |
494 | acl = ALLOCNO_COVER_CLASS (a); | |
495 | if (! reg_classes_intersect_p (acl, def_cl)) | |
496 | return; | |
497 | ||
498 | advance_p = true; | |
499 | ||
500 | for (use = 0; use < recog_data.n_operands; use++) | |
501 | { | |
502 | if (use == def || recog_data.operand_type[use] == OP_OUT) | |
503 | return; | |
504 | ||
505 | if (recog_op_alt[use][alt].anything_ok) | |
506 | use_cl = ALL_REGS; | |
3517d3a0 | 507 | else |
22c02455 VM |
508 | use_cl = recog_op_alt[use][alt].cl; |
509 | ||
510 | advance_p = check_and_make_def_use_conflict (dreg, def_cl, use, | |
511 | use_cl, advance_p); | |
512 | ||
513 | if ((use_match = recog_op_alt[use][alt].matches) >= 0) | |
514 | { | |
515 | if (use_match == def) | |
516 | return; | |
517 | ||
518 | if (recog_op_alt[use_match][alt].anything_ok) | |
519 | use_cl = ALL_REGS; | |
520 | else | |
521 | use_cl = recog_op_alt[use_match][alt].cl; | |
522 | advance_p = check_and_make_def_use_conflict (dreg, def_cl, use, | |
523 | use_cl, advance_p); | |
524 | } | |
3517d3a0 | 525 | } |
22c02455 VM |
526 | } |
527 | ||
528 | /* Make conflicts of early clobber pseudo registers of the current | |
529 | insn with its inputs. Avoid introducing unnecessary conflicts by | |
530 | checking classes of the constraints and pseudos because otherwise | |
531 | significant code degradation is possible for some targets. */ | |
532 | static void | |
533 | make_early_clobber_and_input_conflicts (void) | |
534 | { | |
535 | int alt; | |
536 | int def, def_match; | |
537 | enum reg_class def_cl; | |
538 | ||
539 | for (alt = 0; alt < recog_data.n_alternatives; alt++) | |
540 | for (def = 0; def < recog_data.n_operands; def++) | |
541 | { | |
542 | def_cl = NO_REGS; | |
543 | if (recog_op_alt[def][alt].earlyclobber) | |
544 | { | |
545 | if (recog_op_alt[def][alt].anything_ok) | |
546 | def_cl = ALL_REGS; | |
547 | else | |
548 | def_cl = recog_op_alt[def][alt].cl; | |
549 | check_and_make_def_conflict (alt, def, def_cl); | |
550 | } | |
551 | if ((def_match = recog_op_alt[def][alt].matches) >= 0 | |
552 | && (recog_op_alt[def_match][alt].earlyclobber | |
553 | || recog_op_alt[def][alt].earlyclobber)) | |
554 | { | |
555 | if (recog_op_alt[def_match][alt].anything_ok) | |
556 | def_cl = ALL_REGS; | |
557 | else | |
558 | def_cl = recog_op_alt[def_match][alt].cl; | |
559 | check_and_make_def_conflict (alt, def, def_cl); | |
560 | } | |
561 | } | |
562 | } | |
563 | ||
564 | /* Mark early clobber hard registers of the current INSN as live (if | |
565 | LIVE_P) or dead. Return true if there are such registers. */ | |
566 | static bool | |
567 | mark_hard_reg_early_clobbers (rtx insn, bool live_p) | |
568 | { | |
569 | df_ref *def_rec; | |
570 | bool set_p = false; | |
3517d3a0 VM |
571 | |
572 | for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++) | |
573 | if (DF_REF_FLAGS_IS_SET (*def_rec, DF_REF_MUST_CLOBBER)) | |
574 | { | |
575 | rtx dreg = DF_REF_REG (*def_rec); | |
576 | ||
577 | if (GET_CODE (dreg) == SUBREG) | |
578 | dreg = SUBREG_REG (dreg); | |
579 | if (! REG_P (dreg) || REGNO (dreg) >= FIRST_PSEUDO_REGISTER) | |
580 | continue; | |
581 | ||
582 | /* Hard register clobbers are believed to be early clobber | |
583 | because there is no way to say that non-operand hard | |
584 | register clobbers are not early ones. */ | |
585 | if (live_p) | |
586 | mark_ref_live (*def_rec); | |
587 | else | |
588 | mark_ref_dead (*def_rec); | |
589 | set_p = true; | |
590 | } | |
591 | ||
592 | return set_p; | |
593 | } | |
594 | ||
058e97ec VM |
595 | /* Checks that CONSTRAINTS permits to use only one hard register. If |
596 | it is so, the function returns the class of the hard register. | |
597 | Otherwise it returns NO_REGS. */ | |
598 | static enum reg_class | |
599 | single_reg_class (const char *constraints, rtx op, rtx equiv_const) | |
600 | { | |
601 | int ignore_p; | |
602 | enum reg_class cl, next_cl; | |
603 | int c; | |
604 | ||
605 | cl = NO_REGS; | |
606 | for (ignore_p = false; | |
607 | (c = *constraints); | |
608 | constraints += CONSTRAINT_LEN (c, constraints)) | |
609 | if (c == '#') | |
610 | ignore_p = true; | |
611 | else if (c == ',') | |
612 | ignore_p = false; | |
613 | else if (! ignore_p) | |
614 | switch (c) | |
615 | { | |
616 | case ' ': | |
617 | case '\t': | |
618 | case '=': | |
619 | case '+': | |
620 | case '*': | |
621 | case '&': | |
622 | case '%': | |
623 | case '!': | |
624 | case '?': | |
625 | break; | |
626 | case 'i': | |
627 | if (CONSTANT_P (op) | |
628 | || (equiv_const != NULL_RTX && CONSTANT_P (equiv_const))) | |
629 | return NO_REGS; | |
630 | break; | |
631 | ||
632 | case 'n': | |
481683e1 | 633 | if (CONST_INT_P (op) |
058e97ec VM |
634 | || (GET_CODE (op) == CONST_DOUBLE && GET_MODE (op) == VOIDmode) |
635 | || (equiv_const != NULL_RTX | |
481683e1 | 636 | && (CONST_INT_P (equiv_const) |
058e97ec VM |
637 | || (GET_CODE (equiv_const) == CONST_DOUBLE |
638 | && GET_MODE (equiv_const) == VOIDmode)))) | |
639 | return NO_REGS; | |
640 | break; | |
641 | ||
642 | case 's': | |
481683e1 | 643 | if ((CONSTANT_P (op) && !CONST_INT_P (op) |
058e97ec VM |
644 | && (GET_CODE (op) != CONST_DOUBLE || GET_MODE (op) != VOIDmode)) |
645 | || (equiv_const != NULL_RTX | |
646 | && CONSTANT_P (equiv_const) | |
481683e1 | 647 | && !CONST_INT_P (equiv_const) |
058e97ec VM |
648 | && (GET_CODE (equiv_const) != CONST_DOUBLE |
649 | || GET_MODE (equiv_const) != VOIDmode))) | |
650 | return NO_REGS; | |
651 | break; | |
652 | ||
653 | case 'I': | |
654 | case 'J': | |
655 | case 'K': | |
656 | case 'L': | |
657 | case 'M': | |
658 | case 'N': | |
659 | case 'O': | |
660 | case 'P': | |
481683e1 | 661 | if ((CONST_INT_P (op) |
058e97ec VM |
662 | && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, constraints)) |
663 | || (equiv_const != NULL_RTX | |
481683e1 | 664 | && CONST_INT_P (equiv_const) |
058e97ec VM |
665 | && CONST_OK_FOR_CONSTRAINT_P (INTVAL (equiv_const), |
666 | c, constraints))) | |
667 | return NO_REGS; | |
668 | break; | |
669 | ||
670 | case 'E': | |
671 | case 'F': | |
672 | if (GET_CODE (op) == CONST_DOUBLE | |
673 | || (GET_CODE (op) == CONST_VECTOR | |
674 | && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT) | |
675 | || (equiv_const != NULL_RTX | |
676 | && (GET_CODE (equiv_const) == CONST_DOUBLE | |
677 | || (GET_CODE (equiv_const) == CONST_VECTOR | |
678 | && (GET_MODE_CLASS (GET_MODE (equiv_const)) | |
679 | == MODE_VECTOR_FLOAT))))) | |
680 | return NO_REGS; | |
681 | break; | |
682 | ||
683 | case 'G': | |
684 | case 'H': | |
685 | if ((GET_CODE (op) == CONST_DOUBLE | |
686 | && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, constraints)) | |
687 | || (equiv_const != NULL_RTX | |
688 | && GET_CODE (equiv_const) == CONST_DOUBLE | |
689 | && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (equiv_const, | |
690 | c, constraints))) | |
691 | return NO_REGS; | |
692 | /* ??? what about memory */ | |
693 | case 'r': | |
694 | case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': | |
695 | case 'h': case 'j': case 'k': case 'l': | |
696 | case 'q': case 't': case 'u': | |
697 | case 'v': case 'w': case 'x': case 'y': case 'z': | |
698 | case 'A': case 'B': case 'C': case 'D': | |
699 | case 'Q': case 'R': case 'S': case 'T': case 'U': | |
700 | case 'W': case 'Y': case 'Z': | |
701 | next_cl = (c == 'r' | |
702 | ? GENERAL_REGS | |
703 | : REG_CLASS_FROM_CONSTRAINT (c, constraints)); | |
704 | if ((cl != NO_REGS && next_cl != cl) | |
705 | || ira_available_class_regs[next_cl] > 1) | |
706 | return NO_REGS; | |
707 | cl = next_cl; | |
708 | break; | |
709 | ||
710 | case '0': case '1': case '2': case '3': case '4': | |
711 | case '5': case '6': case '7': case '8': case '9': | |
712 | next_cl | |
713 | = single_reg_class (recog_data.constraints[c - '0'], | |
714 | recog_data.operand[c - '0'], NULL_RTX); | |
715 | if ((cl != NO_REGS && next_cl != cl) || next_cl == NO_REGS | |
716 | || ira_available_class_regs[next_cl] > 1) | |
717 | return NO_REGS; | |
718 | cl = next_cl; | |
719 | break; | |
720 | ||
721 | default: | |
722 | return NO_REGS; | |
723 | } | |
724 | return cl; | |
725 | } | |
726 | ||
727 | /* The function checks that operand OP_NUM of the current insn can use | |
728 | only one hard register. If it is so, the function returns the | |
729 | class of the hard register. Otherwise it returns NO_REGS. */ | |
730 | static enum reg_class | |
731 | single_reg_operand_class (int op_num) | |
732 | { | |
733 | if (op_num < 0 || recog_data.n_alternatives == 0) | |
734 | return NO_REGS; | |
735 | return single_reg_class (recog_data.constraints[op_num], | |
736 | recog_data.operand[op_num], NULL_RTX); | |
737 | } | |
738 | ||
739 | /* Processes input operands, if IN_P, or output operands otherwise of | |
740 | the current insn with FREQ to find allocno which can use only one | |
741 | hard register and makes other currently living allocnos conflicting | |
742 | with the hard register. */ | |
743 | static void | |
744 | process_single_reg_class_operands (bool in_p, int freq) | |
745 | { | |
746 | int i, regno, cost; | |
747 | unsigned int px; | |
748 | enum reg_class cl, cover_class; | |
749 | rtx operand; | |
750 | ira_allocno_t operand_a, a; | |
751 | ||
752 | for (i = 0; i < recog_data.n_operands; i++) | |
753 | { | |
754 | operand = recog_data.operand[i]; | |
755 | if (in_p && recog_data.operand_type[i] != OP_IN | |
756 | && recog_data.operand_type[i] != OP_INOUT) | |
757 | continue; | |
758 | if (! in_p && recog_data.operand_type[i] != OP_OUT | |
759 | && recog_data.operand_type[i] != OP_INOUT) | |
760 | continue; | |
761 | cl = single_reg_operand_class (i); | |
762 | if (cl == NO_REGS) | |
763 | continue; | |
764 | ||
765 | operand_a = NULL; | |
766 | ||
767 | if (GET_CODE (operand) == SUBREG) | |
768 | operand = SUBREG_REG (operand); | |
769 | ||
770 | if (REG_P (operand) | |
771 | && (regno = REGNO (operand)) >= FIRST_PSEUDO_REGISTER) | |
772 | { | |
773 | enum machine_mode mode; | |
774 | enum reg_class cover_class; | |
775 | ||
776 | operand_a = ira_curr_regno_allocno_map[regno]; | |
777 | mode = ALLOCNO_MODE (operand_a); | |
778 | cover_class = ALLOCNO_COVER_CLASS (operand_a); | |
779 | if (ira_class_subset_p[cl][cover_class] | |
780 | && ira_class_hard_regs_num[cl] != 0 | |
781 | && (ira_class_hard_reg_index[cover_class] | |
782 | [ira_class_hard_regs[cl][0]]) >= 0 | |
783 | && reg_class_size[cl] <= (unsigned) CLASS_MAX_NREGS (cl, mode)) | |
784 | { | |
6080348f VM |
785 | cost |
786 | = (freq | |
787 | * (in_p | |
788 | ? ira_get_register_move_cost (mode, cover_class, cl) | |
789 | : ira_get_register_move_cost (mode, cl, cover_class))); | |
058e97ec VM |
790 | ira_allocate_and_set_costs |
791 | (&ALLOCNO_CONFLICT_HARD_REG_COSTS (operand_a), cover_class, 0); | |
792 | ALLOCNO_CONFLICT_HARD_REG_COSTS (operand_a) | |
793 | [ira_class_hard_reg_index | |
794 | [cover_class][ira_class_hard_regs[cl][0]]] | |
795 | -= cost; | |
796 | } | |
797 | } | |
798 | ||
799 | EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, px) | |
800 | { | |
801 | a = ira_allocnos[px]; | |
802 | cover_class = ALLOCNO_COVER_CLASS (a); | |
803 | if (a != operand_a) | |
804 | { | |
805 | /* We could increase costs of A instead of making it | |
806 | conflicting with the hard register. But it works worse | |
807 | because it will be spilled in reload in anyway. */ | |
808 | IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a), | |
809 | reg_class_contents[cl]); | |
810 | IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a), | |
811 | reg_class_contents[cl]); | |
812 | } | |
813 | } | |
814 | } | |
815 | } | |
816 | ||
817 | /* Process insns of the basic block given by its LOOP_TREE_NODE to | |
818 | update allocno live ranges, allocno hard register conflicts, | |
819 | intersected calls, and register pressure info for allocnos for the | |
820 | basic block for and regions containing the basic block. */ | |
821 | static void | |
822 | process_bb_node_lives (ira_loop_tree_node_t loop_tree_node) | |
823 | { | |
acb37d29 | 824 | int i, freq; |
058e97ec VM |
825 | unsigned int j; |
826 | basic_block bb; | |
827 | rtx insn; | |
058e97ec | 828 | bitmap_iterator bi; |
acb37d29 | 829 | bitmap reg_live_out; |
058e97ec | 830 | unsigned int px; |
3517d3a0 | 831 | bool set_p; |
058e97ec VM |
832 | |
833 | bb = loop_tree_node->bb; | |
834 | if (bb != NULL) | |
835 | { | |
836 | for (i = 0; i < ira_reg_class_cover_size; i++) | |
837 | { | |
838 | curr_reg_pressure[ira_reg_class_cover[i]] = 0; | |
839 | high_pressure_start_point[ira_reg_class_cover[i]] = -1; | |
840 | } | |
841 | curr_bb_node = loop_tree_node; | |
174b3107 | 842 | reg_live_out = DF_LR_OUT (bb); |
058e97ec | 843 | sparseset_clear (allocnos_live); |
acb37d29 | 844 | REG_SET_TO_HARD_REG_SET (hard_regs_live, reg_live_out); |
058e97ec VM |
845 | AND_COMPL_HARD_REG_SET (hard_regs_live, eliminable_regset); |
846 | AND_COMPL_HARD_REG_SET (hard_regs_live, ira_no_alloc_regs); | |
847 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
848 | if (TEST_HARD_REG_BIT (hard_regs_live, i)) | |
849 | { | |
7db7ed3c | 850 | enum reg_class cover_class, cl; |
058e97ec | 851 | |
7db7ed3c VM |
852 | cover_class = ira_class_translate[REGNO_REG_CLASS (i)]; |
853 | for (j = 0; | |
854 | (cl = ira_reg_class_super_classes[cover_class][j]) | |
855 | != LIM_REG_CLASSES; | |
856 | j++) | |
857 | { | |
858 | curr_reg_pressure[cl]++; | |
859 | if (curr_bb_node->reg_pressure[cl] < curr_reg_pressure[cl]) | |
860 | curr_bb_node->reg_pressure[cl] = curr_reg_pressure[cl]; | |
861 | ira_assert (curr_reg_pressure[cl] | |
862 | <= ira_available_class_regs[cl]); | |
863 | } | |
058e97ec | 864 | } |
acb37d29 | 865 | EXECUTE_IF_SET_IN_BITMAP (reg_live_out, FIRST_PSEUDO_REGISTER, j, bi) |
058e97ec VM |
866 | { |
867 | ira_allocno_t a = ira_curr_regno_allocno_map[j]; | |
868 | ||
869 | if (a == NULL) | |
870 | continue; | |
871 | ira_assert (! sparseset_bit_p (allocnos_live, ALLOCNO_NUM (a))); | |
872 | set_allocno_live (a); | |
873 | make_regno_born (j); | |
874 | } | |
875 | ||
acb37d29 RS |
876 | freq = REG_FREQ_FROM_BB (bb); |
877 | if (freq == 0) | |
878 | freq = 1; | |
879 | ||
cb1ca6ac VM |
880 | /* Invalidate all allocno_saved_at_call entries. */ |
881 | last_call_num++; | |
882 | ||
058e97ec | 883 | /* Scan the code of this basic block, noting which allocnos and |
acb37d29 RS |
884 | hard regs are born or die. |
885 | ||
886 | Note that this loop treats uninitialized values as live until | |
887 | the beginning of the block. For example, if an instruction | |
888 | uses (reg:DI foo), and only (subreg:SI (reg:DI foo) 0) is ever | |
889 | set, FOO will remain live until the beginning of the block. | |
890 | Likewise if FOO is not set at all. This is unnecessarily | |
891 | pessimistic, but it probably doesn't matter much in practice. */ | |
892 | FOR_BB_INSNS_REVERSE (bb, insn) | |
058e97ec | 893 | { |
57512f53 | 894 | df_ref *def_rec, *use_rec; |
acb37d29 | 895 | bool call_p; |
058e97ec VM |
896 | |
897 | if (! INSN_P (insn)) | |
898 | continue; | |
899 | ||
058e97ec VM |
900 | if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL) |
901 | fprintf (ira_dump_file, " Insn %u(l%d): point = %d\n", | |
902 | INSN_UID (insn), loop_tree_node->parent->loop->num, | |
903 | curr_point); | |
904 | ||
acb37d29 RS |
905 | /* Mark each defined value as live. We need to do this for |
906 | unused values because they still conflict with quantities | |
907 | that are live at the time of the definition. | |
908 | ||
909 | Ignore DF_REF_MAY_CLOBBERs on a call instruction. Such | |
910 | references represent the effect of the called function | |
911 | on a call-clobbered register. Marking the register as | |
912 | live would stop us from allocating it to a call-crossing | |
913 | allocno. */ | |
914 | call_p = CALL_P (insn); | |
915 | for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++) | |
916 | if (!call_p || !DF_REF_FLAGS_IS_SET (*def_rec, DF_REF_MAY_CLOBBER)) | |
917 | mark_ref_live (*def_rec); | |
918 | ||
919 | /* If INSN has multiple outputs, then any value used in one | |
920 | of the outputs conflicts with the other outputs. Model this | |
921 | by making the used value live during the output phase. | |
922 | ||
923 | It is unsafe to use !single_set here since it will ignore | |
924 | an unused output. Just because an output is unused does | |
925 | not mean the compiler can assume the side effect will not | |
926 | occur. Consider if ALLOCNO appears in the address of an | |
927 | output and we reload the output. If we allocate ALLOCNO | |
928 | to the same hard register as an unused output we could | |
929 | set the hard register before the output reload insn. */ | |
930 | if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn)) | |
931 | for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++) | |
932 | { | |
933 | int i; | |
934 | rtx reg; | |
935 | ||
936 | reg = DF_REF_REG (*use_rec); | |
937 | for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--) | |
938 | { | |
939 | rtx set; | |
940 | ||
941 | set = XVECEXP (PATTERN (insn), 0, i); | |
942 | if (GET_CODE (set) == SET | |
943 | && reg_overlap_mentioned_p (reg, SET_DEST (set))) | |
944 | { | |
945 | /* After the previous loop, this is a no-op if | |
946 | REG is contained within SET_DEST (SET). */ | |
947 | mark_ref_live (*use_rec); | |
948 | break; | |
949 | } | |
950 | } | |
951 | } | |
058e97ec VM |
952 | |
953 | extract_insn (insn); | |
3517d3a0 | 954 | preprocess_constraints (); |
acb37d29 | 955 | process_single_reg_class_operands (false, freq); |
058e97ec | 956 | |
acb37d29 RS |
957 | /* See which defined values die here. */ |
958 | for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++) | |
959 | if (!call_p || !DF_REF_FLAGS_IS_SET (*def_rec, DF_REF_MAY_CLOBBER)) | |
960 | mark_ref_dead (*def_rec); | |
058e97ec | 961 | |
acb37d29 | 962 | if (call_p) |
058e97ec | 963 | { |
cb1ca6ac | 964 | last_call_num++; |
acb37d29 | 965 | /* The current set of live allocnos are live across the call. */ |
058e97ec VM |
966 | EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, i) |
967 | { | |
968 | ira_allocno_t a = ira_allocnos[i]; | |
969 | ||
cb1ca6ac VM |
970 | if (allocno_saved_at_call[i] != last_call_num) |
971 | /* Here we are mimicking caller-save.c behaviour | |
972 | which does not save hard register at a call if | |
973 | it was saved on previous call in the same basic | |
974 | block and the hard register was not mentioned | |
975 | between the two calls. */ | |
976 | ALLOCNO_CALL_FREQ (a) += freq; | |
977 | /* Mark it as saved at the next call. */ | |
978 | allocno_saved_at_call[i] = last_call_num + 1; | |
058e97ec | 979 | ALLOCNO_CALLS_CROSSED_NUM (a)++; |
5feec5c1 VM |
980 | /* Don't allocate allocnos that cross setjmps or any |
981 | call, if this function receives a nonlocal | |
982 | goto. */ | |
983 | if (cfun->has_nonlocal_label | |
984 | || find_reg_note (insn, REG_SETJMP, | |
985 | NULL_RTX) != NULL_RTX) | |
058e97ec VM |
986 | { |
987 | SET_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a)); | |
988 | SET_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a)); | |
989 | } | |
cf0c2a60 VM |
990 | if (can_throw_internal (insn)) |
991 | { | |
992 | IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a), | |
993 | call_used_reg_set); | |
994 | IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a), | |
995 | call_used_reg_set); | |
996 | } | |
058e97ec VM |
997 | } |
998 | } | |
999 | ||
22c02455 VM |
1000 | make_early_clobber_and_input_conflicts (); |
1001 | ||
acb37d29 RS |
1002 | curr_point++; |
1003 | ||
1004 | /* Mark each used value as live. */ | |
1005 | for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++) | |
1006 | mark_ref_live (*use_rec); | |
1007 | ||
acb37d29 | 1008 | process_single_reg_class_operands (true, freq); |
058e97ec | 1009 | |
22c02455 VM |
1010 | set_p = mark_hard_reg_early_clobbers (insn, true); |
1011 | ||
3517d3a0 VM |
1012 | if (set_p) |
1013 | { | |
22c02455 | 1014 | mark_hard_reg_early_clobbers (insn, false); |
3517d3a0 | 1015 | |
22c02455 | 1016 | /* Mark each hard reg as live again. For example, a |
3517d3a0 VM |
1017 | hard register can be in clobber and in an insn |
1018 | input. */ | |
1019 | for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++) | |
22c02455 VM |
1020 | { |
1021 | rtx ureg = DF_REF_REG (*use_rec); | |
1022 | ||
1023 | if (GET_CODE (ureg) == SUBREG) | |
1024 | ureg = SUBREG_REG (ureg); | |
1025 | if (! REG_P (ureg) || REGNO (ureg) >= FIRST_PSEUDO_REGISTER) | |
1026 | continue; | |
1027 | ||
1028 | mark_ref_live (*use_rec); | |
1029 | } | |
3517d3a0 | 1030 | } |
058e97ec | 1031 | |
058e97ec VM |
1032 | curr_point++; |
1033 | } | |
acb37d29 | 1034 | |
5dbd125c EB |
1035 | #ifdef EH_RETURN_DATA_REGNO |
1036 | if (bb_has_eh_pred (bb)) | |
1037 | for (j = 0; ; ++j) | |
1038 | { | |
1039 | unsigned int regno = EH_RETURN_DATA_REGNO (j); | |
1040 | if (regno == INVALID_REGNUM) | |
1041 | break; | |
1042 | make_regno_born (regno); | |
1043 | } | |
1044 | #endif | |
1045 | ||
acb37d29 RS |
1046 | /* Allocnos can't go in stack regs at the start of a basic block |
1047 | that is reached by an abnormal edge. Likewise for call | |
1048 | clobbered regs, because caller-save, fixup_abnormal_edges and | |
1049 | possibly the table driven EH machinery are not quite ready to | |
1050 | handle such allocnos live across such edges. */ | |
5dbd125c | 1051 | if (bb_has_abnormal_pred (bb)) |
acb37d29 RS |
1052 | { |
1053 | #ifdef STACK_REGS | |
1054 | EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, px) | |
1055 | { | |
1056 | ALLOCNO_NO_STACK_REG_P (ira_allocnos[px]) = true; | |
1057 | ALLOCNO_TOTAL_NO_STACK_REG_P (ira_allocnos[px]) = true; | |
1058 | } | |
1059 | for (px = FIRST_STACK_REG; px <= LAST_STACK_REG; px++) | |
1060 | make_regno_born (px); | |
1061 | #endif | |
1062 | /* No need to record conflicts for call clobbered regs if we | |
1063 | have nonlocal labels around, as we don't ever try to | |
1064 | allocate such regs in this case. */ | |
1065 | if (!cfun->has_nonlocal_label) | |
1066 | for (px = 0; px < FIRST_PSEUDO_REGISTER; px++) | |
1067 | if (call_used_regs[px]) | |
1068 | make_regno_born (px); | |
1069 | } | |
1070 | ||
058e97ec | 1071 | EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, i) |
acb37d29 RS |
1072 | { |
1073 | make_regno_dead (ALLOCNO_REGNO (ira_allocnos[i])); | |
1074 | } | |
058e97ec VM |
1075 | |
1076 | curr_point++; | |
1077 | ||
1078 | } | |
1079 | /* Propagate register pressure to upper loop tree nodes: */ | |
1080 | if (loop_tree_node != ira_loop_tree_root) | |
1081 | for (i = 0; i < ira_reg_class_cover_size; i++) | |
1082 | { | |
1083 | enum reg_class cover_class; | |
1084 | ||
1085 | cover_class = ira_reg_class_cover[i]; | |
1086 | if (loop_tree_node->reg_pressure[cover_class] | |
1087 | > loop_tree_node->parent->reg_pressure[cover_class]) | |
1088 | loop_tree_node->parent->reg_pressure[cover_class] | |
1089 | = loop_tree_node->reg_pressure[cover_class]; | |
1090 | } | |
1091 | } | |
1092 | ||
1093 | /* Create and set up IRA_START_POINT_RANGES and | |
1094 | IRA_FINISH_POINT_RANGES. */ | |
1095 | static void | |
1096 | create_start_finish_chains (void) | |
1097 | { | |
1098 | ira_allocno_t a; | |
1099 | ira_allocno_iterator ai; | |
1100 | allocno_live_range_t r; | |
1101 | ||
1102 | ira_start_point_ranges | |
1103 | = (allocno_live_range_t *) ira_allocate (ira_max_point | |
1104 | * sizeof (allocno_live_range_t)); | |
1105 | memset (ira_start_point_ranges, 0, | |
1106 | ira_max_point * sizeof (allocno_live_range_t)); | |
1107 | ira_finish_point_ranges | |
1108 | = (allocno_live_range_t *) ira_allocate (ira_max_point | |
1109 | * sizeof (allocno_live_range_t)); | |
1110 | memset (ira_finish_point_ranges, 0, | |
1111 | ira_max_point * sizeof (allocno_live_range_t)); | |
1112 | FOR_EACH_ALLOCNO (a, ai) | |
1113 | { | |
1114 | for (r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = r->next) | |
1115 | { | |
1116 | r->start_next = ira_start_point_ranges[r->start]; | |
1117 | ira_start_point_ranges[r->start] = r; | |
1118 | r->finish_next = ira_finish_point_ranges[r->finish]; | |
1119 | ira_finish_point_ranges[r->finish] = r; | |
1120 | } | |
1121 | } | |
1122 | } | |
1123 | ||
1124 | /* Rebuild IRA_START_POINT_RANGES and IRA_FINISH_POINT_RANGES after | |
1125 | new live ranges and program points were added as a result if new | |
1126 | insn generation. */ | |
1127 | void | |
1128 | ira_rebuild_start_finish_chains (void) | |
1129 | { | |
1130 | ira_free (ira_finish_point_ranges); | |
1131 | ira_free (ira_start_point_ranges); | |
1132 | create_start_finish_chains (); | |
1133 | } | |
1134 | ||
b15a7ae6 VM |
1135 | /* Compress allocno live ranges by removing program points where |
1136 | nothing happens. */ | |
1137 | static void | |
1138 | remove_some_program_points_and_update_live_ranges (void) | |
1139 | { | |
1140 | unsigned i; | |
1141 | int n; | |
1142 | int *map; | |
1143 | ira_allocno_t a; | |
1144 | ira_allocno_iterator ai; | |
1145 | allocno_live_range_t r; | |
1146 | bitmap born_or_died; | |
1147 | bitmap_iterator bi; | |
1148 | ||
1149 | born_or_died = ira_allocate_bitmap (); | |
1150 | FOR_EACH_ALLOCNO (a, ai) | |
1151 | { | |
1152 | for (r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = r->next) | |
1153 | { | |
1154 | ira_assert (r->start <= r->finish); | |
1155 | bitmap_set_bit (born_or_died, r->start); | |
1156 | bitmap_set_bit (born_or_died, r->finish); | |
1157 | } | |
1158 | } | |
1159 | map = (int *) ira_allocate (sizeof (int) * ira_max_point); | |
1160 | n = 0; | |
1161 | EXECUTE_IF_SET_IN_BITMAP(born_or_died, 0, i, bi) | |
1162 | { | |
1163 | map[i] = n++; | |
1164 | } | |
1165 | ira_free_bitmap (born_or_died); | |
1166 | if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL) | |
1167 | fprintf (ira_dump_file, "Compressing live ranges: from %d to %d - %d%%\n", | |
1168 | ira_max_point, n, 100 * n / ira_max_point); | |
1169 | ira_max_point = n; | |
1170 | FOR_EACH_ALLOCNO (a, ai) | |
1171 | { | |
1172 | for (r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = r->next) | |
1173 | { | |
1174 | r->start = map[r->start]; | |
1175 | r->finish = map[r->finish]; | |
1176 | } | |
1177 | } | |
1178 | ira_free (map); | |
1179 | } | |
1180 | ||
058e97ec VM |
1181 | /* Print live ranges R to file F. */ |
1182 | void | |
1183 | ira_print_live_range_list (FILE *f, allocno_live_range_t r) | |
1184 | { | |
1185 | for (; r != NULL; r = r->next) | |
1186 | fprintf (f, " [%d..%d]", r->start, r->finish); | |
1187 | fprintf (f, "\n"); | |
1188 | } | |
1189 | ||
1190 | /* Print live ranges R to stderr. */ | |
1191 | void | |
1192 | ira_debug_live_range_list (allocno_live_range_t r) | |
1193 | { | |
1194 | ira_print_live_range_list (stderr, r); | |
1195 | } | |
1196 | ||
1197 | /* Print live ranges of allocno A to file F. */ | |
1198 | static void | |
1199 | print_allocno_live_ranges (FILE *f, ira_allocno_t a) | |
1200 | { | |
1201 | fprintf (f, " a%d(r%d):", ALLOCNO_NUM (a), ALLOCNO_REGNO (a)); | |
1202 | ira_print_live_range_list (f, ALLOCNO_LIVE_RANGES (a)); | |
1203 | } | |
1204 | ||
1205 | /* Print live ranges of allocno A to stderr. */ | |
1206 | void | |
1207 | ira_debug_allocno_live_ranges (ira_allocno_t a) | |
1208 | { | |
1209 | print_allocno_live_ranges (stderr, a); | |
1210 | } | |
1211 | ||
1212 | /* Print live ranges of all allocnos to file F. */ | |
1213 | static void | |
1214 | print_live_ranges (FILE *f) | |
1215 | { | |
1216 | ira_allocno_t a; | |
1217 | ira_allocno_iterator ai; | |
1218 | ||
1219 | FOR_EACH_ALLOCNO (a, ai) | |
1220 | print_allocno_live_ranges (f, a); | |
1221 | } | |
1222 | ||
1223 | /* Print live ranges of all allocnos to stderr. */ | |
1224 | void | |
1225 | ira_debug_live_ranges (void) | |
1226 | { | |
1227 | print_live_ranges (stderr); | |
1228 | } | |
1229 | ||
1230 | /* The main entry function creates live ranges, set up | |
1231 | CONFLICT_HARD_REGS and TOTAL_CONFLICT_HARD_REGS for allocnos, and | |
1232 | calculate register pressure info. */ | |
1233 | void | |
1234 | ira_create_allocno_live_ranges (void) | |
1235 | { | |
1236 | allocnos_live = sparseset_alloc (ira_allocnos_num); | |
058e97ec | 1237 | curr_point = 0; |
cb1ca6ac VM |
1238 | last_call_num = 0; |
1239 | allocno_saved_at_call | |
1240 | = (int *) ira_allocate (ira_allocnos_num * sizeof (int)); | |
1241 | memset (allocno_saved_at_call, 0, ira_allocnos_num * sizeof (int)); | |
058e97ec VM |
1242 | ira_traverse_loop_tree (true, ira_loop_tree_root, NULL, |
1243 | process_bb_node_lives); | |
1244 | ira_max_point = curr_point; | |
1245 | create_start_finish_chains (); | |
1246 | if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL) | |
1247 | print_live_ranges (ira_dump_file); | |
1248 | /* Clean up. */ | |
cb1ca6ac | 1249 | ira_free (allocno_saved_at_call); |
058e97ec VM |
1250 | sparseset_free (allocnos_live); |
1251 | } | |
1252 | ||
b15a7ae6 VM |
1253 | /* Compress allocno live ranges. */ |
1254 | void | |
1255 | ira_compress_allocno_live_ranges (void) | |
1256 | { | |
1257 | remove_some_program_points_and_update_live_ranges (); | |
1258 | ira_rebuild_start_finish_chains (); | |
1259 | if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL) | |
1260 | { | |
1261 | fprintf (ira_dump_file, "Ranges after the compression:\n"); | |
1262 | print_live_ranges (ira_dump_file); | |
1263 | } | |
1264 | } | |
1265 | ||
058e97ec VM |
1266 | /* Free arrays IRA_START_POINT_RANGES and IRA_FINISH_POINT_RANGES. */ |
1267 | void | |
1268 | ira_finish_allocno_live_ranges (void) | |
1269 | { | |
1270 | ira_free (ira_finish_point_ranges); | |
1271 | ira_free (ira_start_point_ranges); | |
1272 | } |