]> gcc.gnu.org Git - gcc.git/blame - gcc/ira-build.c
In gcc/testsuite/: 2010-10-20 Nicola Pero <nicola.pero@meta-innovation.com>
[gcc.git] / gcc / ira-build.c
CommitLineData
058e97ec 1/* Building internal representation for IRA.
4ac293e2 2 Copyright (C) 2006, 2007, 2008, 2009, 2010
058e97ec
VM
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "rtl.h"
27#include "tm_p.h"
28#include "target.h"
29#include "regs.h"
30#include "flags.h"
31#include "hard-reg-set.h"
32#include "basic-block.h"
33#include "insn-config.h"
34#include "recog.h"
718f9c0f 35#include "diagnostic-core.h"
058e97ec
VM
36#include "toplev.h"
37#include "params.h"
38#include "df.h"
39#include "output.h"
40#include "reload.h"
41#include "sparseset.h"
42#include "ira-int.h"
5936d944 43#include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
058e97ec
VM
44
45static ira_copy_t find_allocno_copy (ira_allocno_t, ira_allocno_t, rtx,
46 ira_loop_tree_node_t);
47
48/* The root of the loop tree corresponding to the all function. */
49ira_loop_tree_node_t ira_loop_tree_root;
50
51/* Height of the loop tree. */
52int ira_loop_tree_height;
53
54/* All nodes representing basic blocks are referred through the
55 following array. We can not use basic block member `aux' for this
56 because it is used for insertion of insns on edges. */
57ira_loop_tree_node_t ira_bb_nodes;
58
59/* All nodes representing loops are referred through the following
60 array. */
61ira_loop_tree_node_t ira_loop_nodes;
62
b8698a0f 63/* Map regno -> allocnos with given regno (see comments for
058e97ec
VM
64 allocno member `next_regno_allocno'). */
65ira_allocno_t *ira_regno_allocno_map;
66
67/* Array of references to all allocnos. The order number of the
68 allocno corresponds to the index in the array. Removed allocnos
69 have NULL element value. */
70ira_allocno_t *ira_allocnos;
71
72/* Sizes of the previous array. */
73int ira_allocnos_num;
74
a49ae217
BS
75/* Count of conflict record structures we've created, used when creating
76 a new conflict id. */
77int ira_objects_num;
78
79/* Map a conflict id to its conflict record. */
80ira_object_t *ira_object_id_map;
058e97ec
VM
81
82/* Array of references to all copies. The order number of the copy
83 corresponds to the index in the array. Removed copies have NULL
84 element value. */
85ira_copy_t *ira_copies;
86
87/* Size of the previous array. */
88int ira_copies_num;
89
90\f
91
92/* LAST_BASIC_BLOCK before generating additional insns because of live
93 range splitting. Emitting insns on a critical edge creates a new
94 basic block. */
95static int last_basic_block_before_change;
96
97/* The following function allocates the loop tree nodes. If LOOPS_P
98 is FALSE, the nodes corresponding to the loops (except the root
99 which corresponds the all function) will be not allocated but nodes
100 will still be allocated for basic blocks. */
101static void
102create_loop_tree_nodes (bool loops_p)
103{
104 unsigned int i, j;
105 int max_regno;
106 bool skip_p;
107 edge_iterator ei;
108 edge e;
109 VEC (edge, heap) *edges;
110 loop_p loop;
111
112 ira_bb_nodes
113 = ((struct ira_loop_tree_node *)
114 ira_allocate (sizeof (struct ira_loop_tree_node) * last_basic_block));
115 last_basic_block_before_change = last_basic_block;
116 for (i = 0; i < (unsigned int) last_basic_block; i++)
117 {
118 ira_bb_nodes[i].regno_allocno_map = NULL;
119 memset (ira_bb_nodes[i].reg_pressure, 0,
120 sizeof (ira_bb_nodes[i].reg_pressure));
49d988e7 121 ira_bb_nodes[i].all_allocnos = NULL;
058e97ec
VM
122 ira_bb_nodes[i].modified_regnos = NULL;
123 ira_bb_nodes[i].border_allocnos = NULL;
124 ira_bb_nodes[i].local_copies = NULL;
125 }
126 ira_loop_nodes = ((struct ira_loop_tree_node *)
127 ira_allocate (sizeof (struct ira_loop_tree_node)
128 * VEC_length (loop_p, ira_loops.larray)));
129 max_regno = max_reg_num ();
ac47786e 130 FOR_EACH_VEC_ELT (loop_p, ira_loops.larray, i, loop)
058e97ec
VM
131 {
132 if (loop != ira_loops.tree_root)
133 {
134 ira_loop_nodes[i].regno_allocno_map = NULL;
135 if (! loops_p)
136 continue;
137 skip_p = false;
138 FOR_EACH_EDGE (e, ei, loop->header->preds)
139 if (e->src != loop->latch
140 && (e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
141 {
142 skip_p = true;
143 break;
144 }
145 if (skip_p)
146 continue;
147 edges = get_loop_exit_edges (loop);
ac47786e 148 FOR_EACH_VEC_ELT (edge, edges, j, e)
058e97ec
VM
149 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
150 {
151 skip_p = true;
152 break;
153 }
154 VEC_free (edge, heap, edges);
155 if (skip_p)
156 continue;
157 }
158 ira_loop_nodes[i].regno_allocno_map
159 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t) * max_regno);
160 memset (ira_loop_nodes[i].regno_allocno_map, 0,
161 sizeof (ira_allocno_t) * max_regno);
162 memset (ira_loop_nodes[i].reg_pressure, 0,
163 sizeof (ira_loop_nodes[i].reg_pressure));
49d988e7 164 ira_loop_nodes[i].all_allocnos = ira_allocate_bitmap ();
058e97ec
VM
165 ira_loop_nodes[i].modified_regnos = ira_allocate_bitmap ();
166 ira_loop_nodes[i].border_allocnos = ira_allocate_bitmap ();
167 ira_loop_nodes[i].local_copies = ira_allocate_bitmap ();
168 }
169}
170
171/* The function returns TRUE if there are more one allocation
172 region. */
173static bool
174more_one_region_p (void)
175{
176 unsigned int i;
177 loop_p loop;
178
ac47786e 179 FOR_EACH_VEC_ELT (loop_p, ira_loops.larray, i, loop)
058e97ec
VM
180 if (ira_loop_nodes[i].regno_allocno_map != NULL
181 && ira_loop_tree_root != &ira_loop_nodes[i])
182 return true;
183 return false;
184}
185
186/* Free the loop tree node of a loop. */
187static void
188finish_loop_tree_node (ira_loop_tree_node_t loop)
189{
190 if (loop->regno_allocno_map != NULL)
191 {
192 ira_assert (loop->bb == NULL);
193 ira_free_bitmap (loop->local_copies);
194 ira_free_bitmap (loop->border_allocnos);
195 ira_free_bitmap (loop->modified_regnos);
49d988e7 196 ira_free_bitmap (loop->all_allocnos);
058e97ec
VM
197 ira_free (loop->regno_allocno_map);
198 loop->regno_allocno_map = NULL;
199 }
200}
201
202/* Free the loop tree nodes. */
203static void
204finish_loop_tree_nodes (void)
205{
206 unsigned int i;
207 loop_p loop;
208
ac47786e 209 FOR_EACH_VEC_ELT (loop_p, ira_loops.larray, i, loop)
058e97ec
VM
210 finish_loop_tree_node (&ira_loop_nodes[i]);
211 ira_free (ira_loop_nodes);
212 for (i = 0; i < (unsigned int) last_basic_block_before_change; i++)
213 {
214 if (ira_bb_nodes[i].local_copies != NULL)
215 ira_free_bitmap (ira_bb_nodes[i].local_copies);
216 if (ira_bb_nodes[i].border_allocnos != NULL)
217 ira_free_bitmap (ira_bb_nodes[i].border_allocnos);
218 if (ira_bb_nodes[i].modified_regnos != NULL)
219 ira_free_bitmap (ira_bb_nodes[i].modified_regnos);
49d988e7
VM
220 if (ira_bb_nodes[i].all_allocnos != NULL)
221 ira_free_bitmap (ira_bb_nodes[i].all_allocnos);
058e97ec
VM
222 if (ira_bb_nodes[i].regno_allocno_map != NULL)
223 ira_free (ira_bb_nodes[i].regno_allocno_map);
224 }
225 ira_free (ira_bb_nodes);
226}
227
228\f
229
230/* The following recursive function adds LOOP to the loop tree
231 hierarchy. LOOP is added only once. */
232static void
233add_loop_to_tree (struct loop *loop)
234{
235 struct loop *parent;
236 ira_loop_tree_node_t loop_node, parent_node;
237
238 /* We can not use loop node access macros here because of potential
239 checking and because the nodes are not initialized enough
240 yet. */
241 if (loop_outer (loop) != NULL)
242 add_loop_to_tree (loop_outer (loop));
243 if (ira_loop_nodes[loop->num].regno_allocno_map != NULL
244 && ira_loop_nodes[loop->num].children == NULL)
245 {
246 /* We have not added loop node to the tree yet. */
247 loop_node = &ira_loop_nodes[loop->num];
248 loop_node->loop = loop;
249 loop_node->bb = NULL;
250 for (parent = loop_outer (loop);
251 parent != NULL;
252 parent = loop_outer (parent))
253 if (ira_loop_nodes[parent->num].regno_allocno_map != NULL)
254 break;
255 if (parent == NULL)
256 {
257 loop_node->next = NULL;
258 loop_node->subloop_next = NULL;
259 loop_node->parent = NULL;
260 }
261 else
262 {
263 parent_node = &ira_loop_nodes[parent->num];
264 loop_node->next = parent_node->children;
265 parent_node->children = loop_node;
266 loop_node->subloop_next = parent_node->subloops;
267 parent_node->subloops = loop_node;
268 loop_node->parent = parent_node;
269 }
270 }
271}
272
273/* The following recursive function sets up levels of nodes of the
274 tree given its root LOOP_NODE. The enumeration starts with LEVEL.
275 The function returns maximal value of level in the tree + 1. */
276static int
277setup_loop_tree_level (ira_loop_tree_node_t loop_node, int level)
278{
279 int height, max_height;
280 ira_loop_tree_node_t subloop_node;
281
282 ira_assert (loop_node->bb == NULL);
283 loop_node->level = level;
284 max_height = level + 1;
285 for (subloop_node = loop_node->subloops;
286 subloop_node != NULL;
287 subloop_node = subloop_node->subloop_next)
288 {
289 ira_assert (subloop_node->bb == NULL);
290 height = setup_loop_tree_level (subloop_node, level + 1);
291 if (height > max_height)
292 max_height = height;
293 }
294 return max_height;
295}
296
297/* Create the loop tree. The algorithm is designed to provide correct
298 order of loops (they are ordered by their last loop BB) and basic
299 blocks in the chain formed by member next. */
300static void
301form_loop_tree (void)
302{
303 unsigned int i;
304 basic_block bb;
305 struct loop *parent;
306 ira_loop_tree_node_t bb_node, loop_node;
307 loop_p loop;
308
309 /* We can not use loop/bb node access macros because of potential
310 checking and because the nodes are not initialized enough
311 yet. */
ac47786e 312 FOR_EACH_VEC_ELT (loop_p, ira_loops.larray, i, loop)
058e97ec
VM
313 if (ira_loop_nodes[i].regno_allocno_map != NULL)
314 {
315 ira_loop_nodes[i].children = NULL;
316 ira_loop_nodes[i].subloops = NULL;
317 }
acb37d29 318 FOR_EACH_BB (bb)
058e97ec
VM
319 {
320 bb_node = &ira_bb_nodes[bb->index];
321 bb_node->bb = bb;
322 bb_node->loop = NULL;
323 bb_node->subloops = NULL;
324 bb_node->children = NULL;
325 bb_node->subloop_next = NULL;
326 bb_node->next = NULL;
327 for (parent = bb->loop_father;
328 parent != NULL;
329 parent = loop_outer (parent))
330 if (ira_loop_nodes[parent->num].regno_allocno_map != NULL)
331 break;
332 add_loop_to_tree (parent);
333 loop_node = &ira_loop_nodes[parent->num];
334 bb_node->next = loop_node->children;
335 bb_node->parent = loop_node;
336 loop_node->children = bb_node;
337 }
338 ira_loop_tree_root = IRA_LOOP_NODE_BY_INDEX (ira_loops.tree_root->num);
339 ira_loop_tree_height = setup_loop_tree_level (ira_loop_tree_root, 0);
340 ira_assert (ira_loop_tree_root->regno_allocno_map != NULL);
341}
342
343\f
344
345/* Rebuild IRA_REGNO_ALLOCNO_MAP and REGNO_ALLOCNO_MAPs of the loop
346 tree nodes. */
347static void
348rebuild_regno_allocno_maps (void)
349{
350 unsigned int l;
351 int max_regno, regno;
352 ira_allocno_t a;
353 ira_loop_tree_node_t loop_tree_node;
354 loop_p loop;
355 ira_allocno_iterator ai;
356
357 max_regno = max_reg_num ();
ac47786e 358 FOR_EACH_VEC_ELT (loop_p, ira_loops.larray, l, loop)
058e97ec
VM
359 if (ira_loop_nodes[l].regno_allocno_map != NULL)
360 {
361 ira_free (ira_loop_nodes[l].regno_allocno_map);
362 ira_loop_nodes[l].regno_allocno_map
363 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t)
364 * max_regno);
365 memset (ira_loop_nodes[l].regno_allocno_map, 0,
366 sizeof (ira_allocno_t) * max_regno);
367 }
368 ira_free (ira_regno_allocno_map);
369 ira_regno_allocno_map
370 = (ira_allocno_t *) ira_allocate (max_regno * sizeof (ira_allocno_t));
371 memset (ira_regno_allocno_map, 0, max_regno * sizeof (ira_allocno_t));
372 FOR_EACH_ALLOCNO (a, ai)
373 {
374 if (ALLOCNO_CAP_MEMBER (a) != NULL)
375 /* Caps are not in the regno allocno maps. */
376 continue;
377 regno = ALLOCNO_REGNO (a);
378 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
379 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = ira_regno_allocno_map[regno];
380 ira_regno_allocno_map[regno] = a;
381 if (loop_tree_node->regno_allocno_map[regno] == NULL)
382 /* Remember that we can create temporary allocnos to break
383 cycles in register shuffle. */
384 loop_tree_node->regno_allocno_map[regno] = a;
385 }
386}
058e97ec
VM
387\f
388
a49ae217
BS
389/* Pools for allocnos, allocno live ranges and objects. */
390static alloc_pool allocno_pool, live_range_pool, object_pool;
058e97ec
VM
391
392/* Vec containing references to all created allocnos. It is a
393 container of array allocnos. */
394static VEC(ira_allocno_t,heap) *allocno_vec;
395
a49ae217
BS
396/* Vec containing references to all created ira_objects. It is a
397 container of ira_object_id_map. */
398static VEC(ira_object_t,heap) *ira_object_id_map_vec;
058e97ec
VM
399
400/* Initialize data concerning allocnos. */
401static void
402initiate_allocnos (void)
403{
b14151b5
BS
404 live_range_pool
405 = create_alloc_pool ("live ranges",
406 sizeof (struct live_range), 100);
058e97ec
VM
407 allocno_pool
408 = create_alloc_pool ("allocnos", sizeof (struct ira_allocno), 100);
a49ae217
BS
409 object_pool
410 = create_alloc_pool ("objects", sizeof (struct ira_object), 100);
058e97ec
VM
411 allocno_vec = VEC_alloc (ira_allocno_t, heap, max_reg_num () * 2);
412 ira_allocnos = NULL;
413 ira_allocnos_num = 0;
a49ae217
BS
414 ira_objects_num = 0;
415 ira_object_id_map_vec
416 = VEC_alloc (ira_object_t, heap, max_reg_num () * 2);
417 ira_object_id_map = NULL;
058e97ec
VM
418 ira_regno_allocno_map
419 = (ira_allocno_t *) ira_allocate (max_reg_num () * sizeof (ira_allocno_t));
420 memset (ira_regno_allocno_map, 0, max_reg_num () * sizeof (ira_allocno_t));
421}
422
a49ae217
BS
423/* Create and return an object corresponding to a new allocno A. */
424static ira_object_t
ac0ab4f7 425ira_create_object (ira_allocno_t a, int subword)
a49ae217
BS
426{
427 enum reg_class cover_class = ALLOCNO_COVER_CLASS (a);
428 ira_object_t obj = (ira_object_t) pool_alloc (object_pool);
429
430 OBJECT_ALLOCNO (obj) = a;
ac0ab4f7 431 OBJECT_SUBWORD (obj) = subword;
a49ae217
BS
432 OBJECT_CONFLICT_ID (obj) = ira_objects_num;
433 OBJECT_CONFLICT_VEC_P (obj) = false;
434 OBJECT_CONFLICT_ARRAY (obj) = NULL;
435 OBJECT_NUM_CONFLICTS (obj) = 0;
436 COPY_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj), ira_no_alloc_regs);
437 COPY_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj), ira_no_alloc_regs);
438 IOR_COMPL_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj),
439 reg_class_contents[cover_class]);
440 IOR_COMPL_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj),
441 reg_class_contents[cover_class]);
442 OBJECT_MIN (obj) = INT_MAX;
443 OBJECT_MAX (obj) = -1;
9140d27b 444 OBJECT_LIVE_RANGES (obj) = NULL;
a49ae217
BS
445
446 VEC_safe_push (ira_object_t, heap, ira_object_id_map_vec, obj);
447 ira_object_id_map
448 = VEC_address (ira_object_t, ira_object_id_map_vec);
449 ira_objects_num = VEC_length (ira_object_t, ira_object_id_map_vec);
ac0ab4f7 450
a49ae217
BS
451 return obj;
452}
453
058e97ec
VM
454/* Create and return the allocno corresponding to REGNO in
455 LOOP_TREE_NODE. Add the allocno to the list of allocnos with the
456 same regno if CAP_P is FALSE. */
457ira_allocno_t
458ira_create_allocno (int regno, bool cap_p, ira_loop_tree_node_t loop_tree_node)
459{
460 ira_allocno_t a;
461
462 a = (ira_allocno_t) pool_alloc (allocno_pool);
463 ALLOCNO_REGNO (a) = regno;
464 ALLOCNO_LOOP_TREE_NODE (a) = loop_tree_node;
465 if (! cap_p)
466 {
467 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = ira_regno_allocno_map[regno];
468 ira_regno_allocno_map[regno] = a;
469 if (loop_tree_node->regno_allocno_map[regno] == NULL)
470 /* Remember that we can create temporary allocnos to break
471 cycles in register shuffle on region borders (see
472 ira-emit.c). */
473 loop_tree_node->regno_allocno_map[regno] = a;
474 }
475 ALLOCNO_CAP (a) = NULL;
476 ALLOCNO_CAP_MEMBER (a) = NULL;
477 ALLOCNO_NUM (a) = ira_allocnos_num;
49d988e7 478 bitmap_set_bit (loop_tree_node->all_allocnos, ALLOCNO_NUM (a));
058e97ec 479 ALLOCNO_NREFS (a) = 0;
854bd721 480 ALLOCNO_FREQ (a) = 0;
058e97ec
VM
481 ALLOCNO_HARD_REGNO (a) = -1;
482 ALLOCNO_CALL_FREQ (a) = 0;
483 ALLOCNO_CALLS_CROSSED_NUM (a) = 0;
484#ifdef STACK_REGS
485 ALLOCNO_NO_STACK_REG_P (a) = false;
486 ALLOCNO_TOTAL_NO_STACK_REG_P (a) = false;
487#endif
488 ALLOCNO_MEM_OPTIMIZED_DEST (a) = NULL;
489 ALLOCNO_MEM_OPTIMIZED_DEST_P (a) = false;
490 ALLOCNO_SOMEWHERE_RENAMED_P (a) = false;
491 ALLOCNO_CHILD_RENAMED_P (a) = false;
492 ALLOCNO_DONT_REASSIGN_P (a) = false;
927425df 493 ALLOCNO_BAD_SPILL_P (a) = false;
058e97ec
VM
494 ALLOCNO_IN_GRAPH_P (a) = false;
495 ALLOCNO_ASSIGNED_P (a) = false;
496 ALLOCNO_MAY_BE_SPILLED_P (a) = false;
497 ALLOCNO_SPLAY_REMOVED_P (a) = false;
058e97ec
VM
498 ALLOCNO_MODE (a) = (regno < 0 ? VOIDmode : PSEUDO_REGNO_MODE (regno));
499 ALLOCNO_COPIES (a) = NULL;
500 ALLOCNO_HARD_REG_COSTS (a) = NULL;
501 ALLOCNO_CONFLICT_HARD_REG_COSTS (a) = NULL;
502 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
503 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
5b0c0b2c 504 ALLOCNO_LEFT_CONFLICTS_SIZE (a) = -1;
058e97ec 505 ALLOCNO_COVER_CLASS (a) = NO_REGS;
cb1ca6ac 506 ALLOCNO_UPDATED_COVER_CLASS_COST (a) = 0;
058e97ec
VM
507 ALLOCNO_COVER_CLASS_COST (a) = 0;
508 ALLOCNO_MEMORY_COST (a) = 0;
509 ALLOCNO_UPDATED_MEMORY_COST (a) = 0;
510 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a) = 0;
511 ALLOCNO_NEXT_BUCKET_ALLOCNO (a) = NULL;
512 ALLOCNO_PREV_BUCKET_ALLOCNO (a) = NULL;
513 ALLOCNO_FIRST_COALESCED_ALLOCNO (a) = a;
514 ALLOCNO_NEXT_COALESCED_ALLOCNO (a) = a;
ac0ab4f7 515 ALLOCNO_NUM_OBJECTS (a) = 0;
a49ae217 516
058e97ec
VM
517 VEC_safe_push (ira_allocno_t, heap, allocno_vec, a);
518 ira_allocnos = VEC_address (ira_allocno_t, allocno_vec);
519 ira_allocnos_num = VEC_length (ira_allocno_t, allocno_vec);
ac0ab4f7 520
058e97ec
VM
521 return a;
522}
523
524/* Set up cover class for A and update its conflict hard registers. */
525void
526ira_set_allocno_cover_class (ira_allocno_t a, enum reg_class cover_class)
527{
528 ALLOCNO_COVER_CLASS (a) = cover_class;
a49ae217
BS
529}
530
ac0ab4f7
BS
531/* Determine the number of objects we should associate with allocno A
532 and allocate them. */
a49ae217 533void
ac0ab4f7 534ira_create_allocno_objects (ira_allocno_t a)
a49ae217 535{
ac0ab4f7
BS
536 enum machine_mode mode = ALLOCNO_MODE (a);
537 enum reg_class cover_class = ALLOCNO_COVER_CLASS (a);
538 int n = ira_reg_class_nregs[cover_class][mode];
539 int i;
540
541 if (GET_MODE_SIZE (mode) != 2 * UNITS_PER_WORD || n != 2)
542 n = 1;
543
544 ALLOCNO_NUM_OBJECTS (a) = n;
545 for (i = 0; i < n; i++)
546 ALLOCNO_OBJECT (a, i) = ira_create_object (a, i);
a49ae217
BS
547}
548
ac0ab4f7
BS
549/* For each allocno, set ALLOCNO_NUM_OBJECTS and create the
550 ALLOCNO_OBJECT structures. This must be called after the cover
551 classes are known. */
a49ae217
BS
552static void
553create_allocno_objects (void)
554{
555 ira_allocno_t a;
556 ira_allocno_iterator ai;
557
558 FOR_EACH_ALLOCNO (a, ai)
ac0ab4f7 559 ira_create_allocno_objects (a);
058e97ec
VM
560}
561
ac0ab4f7
BS
562/* Merge hard register conflict information for all objects associated with
563 allocno TO into the corresponding objects associated with FROM.
564 If TOTAL_ONLY is true, we only merge OBJECT_TOTAL_CONFLICT_HARD_REGS. */
3c55880a
BS
565static void
566merge_hard_reg_conflicts (ira_allocno_t from, ira_allocno_t to,
567 bool total_only)
568{
ac0ab4f7
BS
569 int i;
570 gcc_assert (ALLOCNO_NUM_OBJECTS (to) == ALLOCNO_NUM_OBJECTS (from));
571 for (i = 0; i < ALLOCNO_NUM_OBJECTS (to); i++)
572 {
573 ira_object_t from_obj = ALLOCNO_OBJECT (from, i);
574 ira_object_t to_obj = ALLOCNO_OBJECT (to, i);
575 if (!total_only)
576 IOR_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (to_obj),
577 OBJECT_CONFLICT_HARD_REGS (from_obj));
578 IOR_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (to_obj),
579 OBJECT_TOTAL_CONFLICT_HARD_REGS (from_obj));
580 }
3c55880a
BS
581#ifdef STACK_REGS
582 if (!total_only && ALLOCNO_NO_STACK_REG_P (from))
583 ALLOCNO_NO_STACK_REG_P (to) = true;
584 if (ALLOCNO_TOTAL_NO_STACK_REG_P (from))
585 ALLOCNO_TOTAL_NO_STACK_REG_P (to) = true;
586#endif
587}
588
ac0ab4f7
BS
589/* Update hard register conflict information for all objects associated with
590 A to include the regs in SET. */
591void
592ior_hard_reg_conflicts (ira_allocno_t a, HARD_REG_SET *set)
593{
594 ira_allocno_object_iterator i;
595 ira_object_t obj;
596 FOR_EACH_ALLOCNO_OBJECT (a, obj, i)
597 {
598 IOR_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj), *set);
599 IOR_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj), *set);
600 }
601}
602
a49ae217
BS
603/* Return TRUE if a conflict vector with NUM elements is more
604 profitable than a conflict bit vector for OBJ. */
058e97ec 605bool
a49ae217 606ira_conflict_vector_profitable_p (ira_object_t obj, int num)
058e97ec
VM
607{
608 int nw;
a49ae217
BS
609 int max = OBJECT_MAX (obj);
610 int min = OBJECT_MIN (obj);
058e97ec 611
a49ae217
BS
612 if (max < min)
613 /* We prefer a bit vector in such case because it does not result
614 in allocation. */
058e97ec
VM
615 return false;
616
a49ae217
BS
617 nw = (max - min + IRA_INT_BITS) / IRA_INT_BITS;
618 return (2 * sizeof (ira_object_t) * (num + 1)
058e97ec
VM
619 < 3 * nw * sizeof (IRA_INT_TYPE));
620}
621
a49ae217
BS
622/* Allocates and initialize the conflict vector of OBJ for NUM
623 conflicting objects. */
058e97ec 624void
a49ae217 625ira_allocate_conflict_vec (ira_object_t obj, int num)
058e97ec
VM
626{
627 int size;
a49ae217 628 ira_object_t *vec;
058e97ec 629
a49ae217 630 ira_assert (OBJECT_CONFLICT_ARRAY (obj) == NULL);
058e97ec 631 num++; /* for NULL end marker */
a49ae217
BS
632 size = sizeof (ira_object_t) * num;
633 OBJECT_CONFLICT_ARRAY (obj) = ira_allocate (size);
634 vec = (ira_object_t *) OBJECT_CONFLICT_ARRAY (obj);
058e97ec 635 vec[0] = NULL;
a49ae217
BS
636 OBJECT_NUM_CONFLICTS (obj) = 0;
637 OBJECT_CONFLICT_ARRAY_SIZE (obj) = size;
638 OBJECT_CONFLICT_VEC_P (obj) = true;
058e97ec
VM
639}
640
a49ae217 641/* Allocate and initialize the conflict bit vector of OBJ. */
058e97ec 642static void
a49ae217 643allocate_conflict_bit_vec (ira_object_t obj)
058e97ec
VM
644{
645 unsigned int size;
646
a49ae217
BS
647 ira_assert (OBJECT_CONFLICT_ARRAY (obj) == NULL);
648 size = ((OBJECT_MAX (obj) - OBJECT_MIN (obj) + IRA_INT_BITS)
058e97ec 649 / IRA_INT_BITS * sizeof (IRA_INT_TYPE));
a49ae217
BS
650 OBJECT_CONFLICT_ARRAY (obj) = ira_allocate (size);
651 memset (OBJECT_CONFLICT_ARRAY (obj), 0, size);
652 OBJECT_CONFLICT_ARRAY_SIZE (obj) = size;
653 OBJECT_CONFLICT_VEC_P (obj) = false;
058e97ec
VM
654}
655
656/* Allocate and initialize the conflict vector or conflict bit vector
ac0ab4f7 657 of OBJ for NUM conflicting allocnos whatever is more profitable. */
058e97ec 658void
ac0ab4f7 659ira_allocate_object_conflicts (ira_object_t obj, int num)
058e97ec 660{
ac0ab4f7
BS
661 if (ira_conflict_vector_profitable_p (obj, num))
662 ira_allocate_conflict_vec (obj, num);
058e97ec 663 else
ac0ab4f7 664 allocate_conflict_bit_vec (obj);
058e97ec
VM
665}
666
a49ae217 667/* Add OBJ2 to the conflicts of OBJ1. */
058e97ec 668static void
a49ae217 669add_to_conflicts (ira_object_t obj1, ira_object_t obj2)
058e97ec
VM
670{
671 int num;
672 unsigned int size;
673
a49ae217 674 if (OBJECT_CONFLICT_VEC_P (obj1))
058e97ec 675 {
a49ae217
BS
676 ira_object_t *vec = OBJECT_CONFLICT_VEC (obj1);
677 int curr_num = OBJECT_NUM_CONFLICTS (obj1);
678 num = curr_num + 2;
679 if (OBJECT_CONFLICT_ARRAY_SIZE (obj1) < num * sizeof (ira_object_t))
058e97ec 680 {
a49ae217 681 ira_object_t *newvec;
058e97ec 682 size = (3 * num / 2 + 1) * sizeof (ira_allocno_t);
a49ae217
BS
683 newvec = (ira_object_t *) ira_allocate (size);
684 memcpy (newvec, vec, curr_num * sizeof (ira_object_t));
685 ira_free (vec);
686 vec = newvec;
687 OBJECT_CONFLICT_ARRAY (obj1) = vec;
688 OBJECT_CONFLICT_ARRAY_SIZE (obj1) = size;
058e97ec 689 }
a49ae217 690 vec[num - 2] = obj2;
058e97ec 691 vec[num - 1] = NULL;
a49ae217 692 OBJECT_NUM_CONFLICTS (obj1)++;
058e97ec
VM
693 }
694 else
695 {
696 int nw, added_head_nw, id;
a49ae217 697 IRA_INT_TYPE *vec = OBJECT_CONFLICT_BITVEC (obj1);
058e97ec 698
a49ae217
BS
699 id = OBJECT_CONFLICT_ID (obj2);
700 if (OBJECT_MIN (obj1) > id)
058e97ec
VM
701 {
702 /* Expand head of the bit vector. */
a49ae217
BS
703 added_head_nw = (OBJECT_MIN (obj1) - id - 1) / IRA_INT_BITS + 1;
704 nw = (OBJECT_MAX (obj1) - OBJECT_MIN (obj1)) / IRA_INT_BITS + 1;
058e97ec 705 size = (nw + added_head_nw) * sizeof (IRA_INT_TYPE);
a49ae217 706 if (OBJECT_CONFLICT_ARRAY_SIZE (obj1) >= size)
058e97ec
VM
707 {
708 memmove ((char *) vec + added_head_nw * sizeof (IRA_INT_TYPE),
709 vec, nw * sizeof (IRA_INT_TYPE));
710 memset (vec, 0, added_head_nw * sizeof (IRA_INT_TYPE));
711 }
712 else
713 {
714 size
715 = (3 * (nw + added_head_nw) / 2 + 1) * sizeof (IRA_INT_TYPE);
716 vec = (IRA_INT_TYPE *) ira_allocate (size);
717 memcpy ((char *) vec + added_head_nw * sizeof (IRA_INT_TYPE),
a49ae217 718 OBJECT_CONFLICT_ARRAY (obj1), nw * sizeof (IRA_INT_TYPE));
058e97ec
VM
719 memset (vec, 0, added_head_nw * sizeof (IRA_INT_TYPE));
720 memset ((char *) vec
721 + (nw + added_head_nw) * sizeof (IRA_INT_TYPE),
722 0, size - (nw + added_head_nw) * sizeof (IRA_INT_TYPE));
a49ae217
BS
723 ira_free (OBJECT_CONFLICT_ARRAY (obj1));
724 OBJECT_CONFLICT_ARRAY (obj1) = vec;
725 OBJECT_CONFLICT_ARRAY_SIZE (obj1) = size;
058e97ec 726 }
a49ae217 727 OBJECT_MIN (obj1) -= added_head_nw * IRA_INT_BITS;
058e97ec 728 }
a49ae217 729 else if (OBJECT_MAX (obj1) < id)
058e97ec 730 {
a49ae217 731 nw = (id - OBJECT_MIN (obj1)) / IRA_INT_BITS + 1;
058e97ec 732 size = nw * sizeof (IRA_INT_TYPE);
a49ae217 733 if (OBJECT_CONFLICT_ARRAY_SIZE (obj1) < size)
058e97ec
VM
734 {
735 /* Expand tail of the bit vector. */
736 size = (3 * nw / 2 + 1) * sizeof (IRA_INT_TYPE);
737 vec = (IRA_INT_TYPE *) ira_allocate (size);
a49ae217
BS
738 memcpy (vec, OBJECT_CONFLICT_ARRAY (obj1), OBJECT_CONFLICT_ARRAY_SIZE (obj1));
739 memset ((char *) vec + OBJECT_CONFLICT_ARRAY_SIZE (obj1),
740 0, size - OBJECT_CONFLICT_ARRAY_SIZE (obj1));
741 ira_free (OBJECT_CONFLICT_ARRAY (obj1));
742 OBJECT_CONFLICT_ARRAY (obj1) = vec;
743 OBJECT_CONFLICT_ARRAY_SIZE (obj1) = size;
058e97ec 744 }
a49ae217 745 OBJECT_MAX (obj1) = id;
058e97ec 746 }
a49ae217 747 SET_MINMAX_SET_BIT (vec, id, OBJECT_MIN (obj1), OBJECT_MAX (obj1));
058e97ec
VM
748 }
749}
750
a49ae217
BS
751/* Add OBJ1 to the conflicts of OBJ2 and vice versa. */
752static void
753ira_add_conflict (ira_object_t obj1, ira_object_t obj2)
058e97ec 754{
a49ae217
BS
755 add_to_conflicts (obj1, obj2);
756 add_to_conflicts (obj2, obj1);
058e97ec
VM
757}
758
a49ae217 759/* Clear all conflicts of OBJ. */
058e97ec 760static void
a49ae217 761clear_conflicts (ira_object_t obj)
058e97ec 762{
a49ae217 763 if (OBJECT_CONFLICT_VEC_P (obj))
058e97ec 764 {
a49ae217
BS
765 OBJECT_NUM_CONFLICTS (obj) = 0;
766 OBJECT_CONFLICT_VEC (obj)[0] = NULL;
058e97ec 767 }
a49ae217 768 else if (OBJECT_CONFLICT_ARRAY_SIZE (obj) != 0)
058e97ec
VM
769 {
770 int nw;
771
a49ae217
BS
772 nw = (OBJECT_MAX (obj) - OBJECT_MIN (obj)) / IRA_INT_BITS + 1;
773 memset (OBJECT_CONFLICT_BITVEC (obj), 0, nw * sizeof (IRA_INT_TYPE));
058e97ec
VM
774 }
775}
776
777/* The array used to find duplications in conflict vectors of
778 allocnos. */
a49ae217 779static int *conflict_check;
058e97ec
VM
780
781/* The value used to mark allocation presence in conflict vector of
782 the current allocno. */
a49ae217 783static int curr_conflict_check_tick;
058e97ec 784
a49ae217 785/* Remove duplications in conflict vector of OBJ. */
058e97ec 786static void
a49ae217 787compress_conflict_vec (ira_object_t obj)
058e97ec 788{
a49ae217 789 ira_object_t *vec, conflict_obj;
058e97ec
VM
790 int i, j;
791
a49ae217
BS
792 ira_assert (OBJECT_CONFLICT_VEC_P (obj));
793 vec = OBJECT_CONFLICT_VEC (obj);
794 curr_conflict_check_tick++;
795 for (i = j = 0; (conflict_obj = vec[i]) != NULL; i++)
058e97ec 796 {
a49ae217
BS
797 int id = OBJECT_CONFLICT_ID (conflict_obj);
798 if (conflict_check[id] != curr_conflict_check_tick)
058e97ec 799 {
a49ae217
BS
800 conflict_check[id] = curr_conflict_check_tick;
801 vec[j++] = conflict_obj;
058e97ec
VM
802 }
803 }
a49ae217 804 OBJECT_NUM_CONFLICTS (obj) = j;
058e97ec
VM
805 vec[j] = NULL;
806}
807
808/* Remove duplications in conflict vectors of all allocnos. */
809static void
810compress_conflict_vecs (void)
811{
ac0ab4f7
BS
812 ira_object_t obj;
813 ira_object_iterator oi;
058e97ec 814
a49ae217
BS
815 conflict_check = (int *) ira_allocate (sizeof (int) * ira_objects_num);
816 memset (conflict_check, 0, sizeof (int) * ira_objects_num);
817 curr_conflict_check_tick = 0;
ac0ab4f7 818 FOR_EACH_OBJECT (obj, oi)
a49ae217 819 {
a49ae217
BS
820 if (OBJECT_CONFLICT_VEC_P (obj))
821 compress_conflict_vec (obj);
822 }
823 ira_free (conflict_check);
058e97ec
VM
824}
825
826/* This recursive function outputs allocno A and if it is a cap the
827 function outputs its members. */
828void
829ira_print_expanded_allocno (ira_allocno_t a)
830{
831 basic_block bb;
832
833 fprintf (ira_dump_file, " a%d(r%d", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
834 if ((bb = ALLOCNO_LOOP_TREE_NODE (a)->bb) != NULL)
835 fprintf (ira_dump_file, ",b%d", bb->index);
836 else
837 fprintf (ira_dump_file, ",l%d", ALLOCNO_LOOP_TREE_NODE (a)->loop->num);
838 if (ALLOCNO_CAP_MEMBER (a) != NULL)
839 {
840 fprintf (ira_dump_file, ":");
841 ira_print_expanded_allocno (ALLOCNO_CAP_MEMBER (a));
842 }
843 fprintf (ira_dump_file, ")");
844}
845
846/* Create and return the cap representing allocno A in the
847 parent loop. */
848static ira_allocno_t
849create_cap_allocno (ira_allocno_t a)
850{
851 ira_allocno_t cap;
852 ira_loop_tree_node_t parent;
853 enum reg_class cover_class;
854
855 ira_assert (ALLOCNO_FIRST_COALESCED_ALLOCNO (a) == a
856 && ALLOCNO_NEXT_COALESCED_ALLOCNO (a) == a);
857 parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
858 cap = ira_create_allocno (ALLOCNO_REGNO (a), true, parent);
859 ALLOCNO_MODE (cap) = ALLOCNO_MODE (a);
860 cover_class = ALLOCNO_COVER_CLASS (a);
861 ira_set_allocno_cover_class (cap, cover_class);
ac0ab4f7 862 ira_create_allocno_objects (cap);
058e97ec
VM
863 ALLOCNO_AVAILABLE_REGS_NUM (cap) = ALLOCNO_AVAILABLE_REGS_NUM (a);
864 ALLOCNO_CAP_MEMBER (cap) = a;
058e97ec
VM
865 ALLOCNO_CAP (a) = cap;
866 ALLOCNO_COVER_CLASS_COST (cap) = ALLOCNO_COVER_CLASS_COST (a);
867 ALLOCNO_MEMORY_COST (cap) = ALLOCNO_MEMORY_COST (a);
058e97ec
VM
868 ira_allocate_and_copy_costs
869 (&ALLOCNO_HARD_REG_COSTS (cap), cover_class, ALLOCNO_HARD_REG_COSTS (a));
870 ira_allocate_and_copy_costs
871 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (cap), cover_class,
872 ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
927425df 873 ALLOCNO_BAD_SPILL_P (cap) = ALLOCNO_BAD_SPILL_P (a);
058e97ec
VM
874 ALLOCNO_NREFS (cap) = ALLOCNO_NREFS (a);
875 ALLOCNO_FREQ (cap) = ALLOCNO_FREQ (a);
876 ALLOCNO_CALL_FREQ (cap) = ALLOCNO_CALL_FREQ (a);
ac0ab4f7 877
3c55880a 878 merge_hard_reg_conflicts (a, cap, false);
ac0ab4f7 879
058e97ec 880 ALLOCNO_CALLS_CROSSED_NUM (cap) = ALLOCNO_CALLS_CROSSED_NUM (a);
058e97ec
VM
881 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
882 {
883 fprintf (ira_dump_file, " Creating cap ");
884 ira_print_expanded_allocno (cap);
885 fprintf (ira_dump_file, "\n");
886 }
887 return cap;
888}
889
ac0ab4f7 890/* Create and return a live range for OBJECT with given attributes. */
b14151b5 891live_range_t
9140d27b
BS
892ira_create_live_range (ira_object_t obj, int start, int finish,
893 live_range_t next)
058e97ec 894{
b14151b5 895 live_range_t p;
058e97ec 896
b14151b5 897 p = (live_range_t) pool_alloc (live_range_pool);
9140d27b 898 p->object = obj;
058e97ec
VM
899 p->start = start;
900 p->finish = finish;
901 p->next = next;
902 return p;
903}
904
ac0ab4f7
BS
905/* Create a new live range for OBJECT and queue it at the head of its
906 live range list. */
907void
908ira_add_live_range_to_object (ira_object_t object, int start, int finish)
909{
910 live_range_t p;
911 p = ira_create_live_range (object, start, finish,
912 OBJECT_LIVE_RANGES (object));
913 OBJECT_LIVE_RANGES (object) = p;
914}
915
058e97ec 916/* Copy allocno live range R and return the result. */
b14151b5 917static live_range_t
9140d27b 918copy_live_range (live_range_t r)
058e97ec 919{
b14151b5 920 live_range_t p;
058e97ec 921
b14151b5 922 p = (live_range_t) pool_alloc (live_range_pool);
058e97ec
VM
923 *p = *r;
924 return p;
925}
926
927/* Copy allocno live range list given by its head R and return the
928 result. */
b14151b5 929live_range_t
9140d27b 930ira_copy_live_range_list (live_range_t r)
058e97ec 931{
b14151b5 932 live_range_t p, first, last;
058e97ec
VM
933
934 if (r == NULL)
935 return NULL;
936 for (first = last = NULL; r != NULL; r = r->next)
937 {
9140d27b 938 p = copy_live_range (r);
058e97ec
VM
939 if (first == NULL)
940 first = p;
941 else
942 last->next = p;
943 last = p;
944 }
945 return first;
946}
947
3553f0bb
VM
948/* Merge ranges R1 and R2 and returns the result. The function
949 maintains the order of ranges and tries to minimize number of the
950 result ranges. */
b14151b5 951live_range_t
9140d27b 952ira_merge_live_ranges (live_range_t r1, live_range_t r2)
3553f0bb 953{
b14151b5 954 live_range_t first, last, temp;
3553f0bb
VM
955
956 if (r1 == NULL)
957 return r2;
958 if (r2 == NULL)
959 return r1;
960 for (first = last = NULL; r1 != NULL && r2 != NULL;)
961 {
962 if (r1->start < r2->start)
963 {
964 temp = r1;
965 r1 = r2;
966 r2 = temp;
967 }
968 if (r1->start <= r2->finish + 1)
969 {
970 /* Intersected ranges: merge r1 and r2 into r1. */
971 r1->start = r2->start;
972 if (r1->finish < r2->finish)
973 r1->finish = r2->finish;
974 temp = r2;
975 r2 = r2->next;
9140d27b 976 ira_finish_live_range (temp);
3553f0bb
VM
977 if (r2 == NULL)
978 {
979 /* To try to merge with subsequent ranges in r1. */
980 r2 = r1->next;
981 r1->next = NULL;
982 }
983 }
984 else
985 {
986 /* Add r1 to the result. */
987 if (first == NULL)
988 first = last = r1;
989 else
990 {
991 last->next = r1;
992 last = r1;
993 }
994 r1 = r1->next;
995 if (r1 == NULL)
996 {
997 /* To try to merge with subsequent ranges in r2. */
998 r1 = r2->next;
999 r2->next = NULL;
1000 }
1001 }
1002 }
1003 if (r1 != NULL)
1004 {
1005 if (first == NULL)
1006 first = r1;
1007 else
1008 last->next = r1;
1009 ira_assert (r1->next == NULL);
1010 }
1011 else if (r2 != NULL)
1012 {
1013 if (first == NULL)
1014 first = r2;
1015 else
1016 last->next = r2;
1017 ira_assert (r2->next == NULL);
1018 }
1019 else
1020 {
1021 ira_assert (last->next == NULL);
1022 }
1023 return first;
1024}
1025
1026/* Return TRUE if live ranges R1 and R2 intersect. */
1027bool
9140d27b 1028ira_live_ranges_intersect_p (live_range_t r1, live_range_t r2)
3553f0bb
VM
1029{
1030 /* Remember the live ranges are always kept ordered. */
1031 while (r1 != NULL && r2 != NULL)
1032 {
1033 if (r1->start > r2->finish)
1034 r1 = r1->next;
1035 else if (r2->start > r1->finish)
1036 r2 = r2->next;
1037 else
1038 return true;
1039 }
1040 return false;
1041}
1042
058e97ec
VM
1043/* Free allocno live range R. */
1044void
9140d27b 1045ira_finish_live_range (live_range_t r)
058e97ec 1046{
b14151b5 1047 pool_free (live_range_pool, r);
058e97ec
VM
1048}
1049
3553f0bb
VM
1050/* Free list of allocno live ranges starting with R. */
1051void
9140d27b 1052ira_finish_live_range_list (live_range_t r)
3553f0bb 1053{
b14151b5 1054 live_range_t next_r;
3553f0bb
VM
1055
1056 for (; r != NULL; r = next_r)
1057 {
1058 next_r = r->next;
9140d27b 1059 ira_finish_live_range (r);
3553f0bb
VM
1060 }
1061}
1062
058e97ec
VM
1063/* Free updated register costs of allocno A. */
1064void
1065ira_free_allocno_updated_costs (ira_allocno_t a)
1066{
1067 enum reg_class cover_class;
1068
1069 cover_class = ALLOCNO_COVER_CLASS (a);
1070 if (ALLOCNO_UPDATED_HARD_REG_COSTS (a) != NULL)
1071 ira_free_cost_vector (ALLOCNO_UPDATED_HARD_REG_COSTS (a), cover_class);
1072 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
1073 if (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) != NULL)
1074 ira_free_cost_vector (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a),
1075 cover_class);
1076 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
1077}
1078
1079/* Free the memory allocated for allocno A. */
1080static void
1081finish_allocno (ira_allocno_t a)
1082{
058e97ec 1083 enum reg_class cover_class = ALLOCNO_COVER_CLASS (a);
ac0ab4f7
BS
1084 ira_object_t obj;
1085 ira_allocno_object_iterator oi;
058e97ec 1086
ac0ab4f7
BS
1087 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
1088 {
1089 ira_finish_live_range_list (OBJECT_LIVE_RANGES (obj));
1090 ira_object_id_map[OBJECT_CONFLICT_ID (obj)] = NULL;
1091 if (OBJECT_CONFLICT_ARRAY (obj) != NULL)
1092 ira_free (OBJECT_CONFLICT_ARRAY (obj));
1093 pool_free (object_pool, obj);
1094 }
9140d27b 1095
058e97ec 1096 ira_allocnos[ALLOCNO_NUM (a)] = NULL;
058e97ec
VM
1097 if (ALLOCNO_HARD_REG_COSTS (a) != NULL)
1098 ira_free_cost_vector (ALLOCNO_HARD_REG_COSTS (a), cover_class);
1099 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) != NULL)
1100 ira_free_cost_vector (ALLOCNO_CONFLICT_HARD_REG_COSTS (a), cover_class);
1101 if (ALLOCNO_UPDATED_HARD_REG_COSTS (a) != NULL)
1102 ira_free_cost_vector (ALLOCNO_UPDATED_HARD_REG_COSTS (a), cover_class);
1103 if (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) != NULL)
1104 ira_free_cost_vector (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a),
1105 cover_class);
058e97ec
VM
1106 pool_free (allocno_pool, a);
1107}
1108
1109/* Free the memory allocated for all allocnos. */
1110static void
1111finish_allocnos (void)
1112{
1113 ira_allocno_t a;
1114 ira_allocno_iterator ai;
1115
1116 FOR_EACH_ALLOCNO (a, ai)
1117 finish_allocno (a);
1118 ira_free (ira_regno_allocno_map);
a49ae217 1119 VEC_free (ira_object_t, heap, ira_object_id_map_vec);
058e97ec
VM
1120 VEC_free (ira_allocno_t, heap, allocno_vec);
1121 free_alloc_pool (allocno_pool);
a49ae217 1122 free_alloc_pool (object_pool);
b14151b5 1123 free_alloc_pool (live_range_pool);
058e97ec
VM
1124}
1125
1126\f
1127
1128/* Pools for copies. */
1129static alloc_pool copy_pool;
1130
1131/* Vec containing references to all created copies. It is a
1132 container of array ira_copies. */
1133static VEC(ira_copy_t,heap) *copy_vec;
1134
1135/* The function initializes data concerning allocno copies. */
1136static void
1137initiate_copies (void)
1138{
1139 copy_pool
1140 = create_alloc_pool ("copies", sizeof (struct ira_allocno_copy), 100);
1141 copy_vec = VEC_alloc (ira_copy_t, heap, get_max_uid ());
1142 ira_copies = NULL;
1143 ira_copies_num = 0;
1144}
1145
1146/* Return copy connecting A1 and A2 and originated from INSN of
1147 LOOP_TREE_NODE if any. */
1148static ira_copy_t
1149find_allocno_copy (ira_allocno_t a1, ira_allocno_t a2, rtx insn,
1150 ira_loop_tree_node_t loop_tree_node)
1151{
1152 ira_copy_t cp, next_cp;
1153 ira_allocno_t another_a;
1154
1155 for (cp = ALLOCNO_COPIES (a1); cp != NULL; cp = next_cp)
1156 {
1157 if (cp->first == a1)
1158 {
1159 next_cp = cp->next_first_allocno_copy;
1160 another_a = cp->second;
1161 }
1162 else if (cp->second == a1)
1163 {
1164 next_cp = cp->next_second_allocno_copy;
1165 another_a = cp->first;
1166 }
1167 else
1168 gcc_unreachable ();
1169 if (another_a == a2 && cp->insn == insn
1170 && cp->loop_tree_node == loop_tree_node)
1171 return cp;
1172 }
1173 return NULL;
1174}
1175
1176/* Create and return copy with given attributes LOOP_TREE_NODE, FIRST,
548a6322 1177 SECOND, FREQ, CONSTRAINT_P, and INSN. */
058e97ec 1178ira_copy_t
548a6322
VM
1179ira_create_copy (ira_allocno_t first, ira_allocno_t second, int freq,
1180 bool constraint_p, rtx insn,
058e97ec
VM
1181 ira_loop_tree_node_t loop_tree_node)
1182{
1183 ira_copy_t cp;
1184
1185 cp = (ira_copy_t) pool_alloc (copy_pool);
1186 cp->num = ira_copies_num;
1187 cp->first = first;
1188 cp->second = second;
1189 cp->freq = freq;
548a6322 1190 cp->constraint_p = constraint_p;
058e97ec
VM
1191 cp->insn = insn;
1192 cp->loop_tree_node = loop_tree_node;
1193 VEC_safe_push (ira_copy_t, heap, copy_vec, cp);
1194 ira_copies = VEC_address (ira_copy_t, copy_vec);
1195 ira_copies_num = VEC_length (ira_copy_t, copy_vec);
1196 return cp;
1197}
1198
1199/* Attach a copy CP to allocnos involved into the copy. */
1200void
1201ira_add_allocno_copy_to_list (ira_copy_t cp)
1202{
1203 ira_allocno_t first = cp->first, second = cp->second;
1204
1205 cp->prev_first_allocno_copy = NULL;
1206 cp->prev_second_allocno_copy = NULL;
1207 cp->next_first_allocno_copy = ALLOCNO_COPIES (first);
1208 if (cp->next_first_allocno_copy != NULL)
1209 {
1210 if (cp->next_first_allocno_copy->first == first)
1211 cp->next_first_allocno_copy->prev_first_allocno_copy = cp;
1212 else
1213 cp->next_first_allocno_copy->prev_second_allocno_copy = cp;
1214 }
1215 cp->next_second_allocno_copy = ALLOCNO_COPIES (second);
1216 if (cp->next_second_allocno_copy != NULL)
1217 {
1218 if (cp->next_second_allocno_copy->second == second)
1219 cp->next_second_allocno_copy->prev_second_allocno_copy = cp;
1220 else
1221 cp->next_second_allocno_copy->prev_first_allocno_copy = cp;
1222 }
1223 ALLOCNO_COPIES (first) = cp;
1224 ALLOCNO_COPIES (second) = cp;
1225}
1226
058e97ec
VM
1227/* Make a copy CP a canonical copy where number of the
1228 first allocno is less than the second one. */
1229void
1230ira_swap_allocno_copy_ends_if_necessary (ira_copy_t cp)
1231{
1232 ira_allocno_t temp;
1233 ira_copy_t temp_cp;
1234
1235 if (ALLOCNO_NUM (cp->first) <= ALLOCNO_NUM (cp->second))
1236 return;
1237
1238 temp = cp->first;
1239 cp->first = cp->second;
1240 cp->second = temp;
1241
1242 temp_cp = cp->prev_first_allocno_copy;
1243 cp->prev_first_allocno_copy = cp->prev_second_allocno_copy;
1244 cp->prev_second_allocno_copy = temp_cp;
1245
1246 temp_cp = cp->next_first_allocno_copy;
1247 cp->next_first_allocno_copy = cp->next_second_allocno_copy;
1248 cp->next_second_allocno_copy = temp_cp;
1249}
1250
1251/* Create (or update frequency if the copy already exists) and return
1252 the copy of allocnos FIRST and SECOND with frequency FREQ
1253 corresponding to move insn INSN (if any) and originated from
1254 LOOP_TREE_NODE. */
1255ira_copy_t
1256ira_add_allocno_copy (ira_allocno_t first, ira_allocno_t second, int freq,
548a6322
VM
1257 bool constraint_p, rtx insn,
1258 ira_loop_tree_node_t loop_tree_node)
058e97ec
VM
1259{
1260 ira_copy_t cp;
1261
1262 if ((cp = find_allocno_copy (first, second, insn, loop_tree_node)) != NULL)
1263 {
1264 cp->freq += freq;
1265 return cp;
1266 }
548a6322
VM
1267 cp = ira_create_copy (first, second, freq, constraint_p, insn,
1268 loop_tree_node);
058e97ec
VM
1269 ira_assert (first != NULL && second != NULL);
1270 ira_add_allocno_copy_to_list (cp);
1271 ira_swap_allocno_copy_ends_if_necessary (cp);
1272 return cp;
1273}
1274
4cda38d5
VM
1275/* Print info about copy CP into file F. */
1276static void
1277print_copy (FILE *f, ira_copy_t cp)
1278{
548a6322 1279 fprintf (f, " cp%d:a%d(r%d)<->a%d(r%d)@%d:%s\n", cp->num,
4cda38d5 1280 ALLOCNO_NUM (cp->first), ALLOCNO_REGNO (cp->first),
548a6322
VM
1281 ALLOCNO_NUM (cp->second), ALLOCNO_REGNO (cp->second), cp->freq,
1282 cp->insn != NULL
1283 ? "move" : cp->constraint_p ? "constraint" : "shuffle");
4cda38d5
VM
1284}
1285
1286/* Print info about copy CP into stderr. */
1287void
1288ira_debug_copy (ira_copy_t cp)
1289{
1290 print_copy (stderr, cp);
1291}
1292
1293/* Print info about all copies into file F. */
1294static void
1295print_copies (FILE *f)
1296{
1297 ira_copy_t cp;
1298 ira_copy_iterator ci;
1299
1300 FOR_EACH_COPY (cp, ci)
1301 print_copy (f, cp);
1302}
1303
1304/* Print info about all copies into stderr. */
1305void
1306ira_debug_copies (void)
1307{
1308 print_copies (stderr);
1309}
1310
058e97ec
VM
1311/* Print info about copies involving allocno A into file F. */
1312static void
1313print_allocno_copies (FILE *f, ira_allocno_t a)
1314{
1315 ira_allocno_t another_a;
1316 ira_copy_t cp, next_cp;
1317
1318 fprintf (f, " a%d(r%d):", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
1319 for (cp = ALLOCNO_COPIES (a); cp != NULL; cp = next_cp)
1320 {
1321 if (cp->first == a)
1322 {
1323 next_cp = cp->next_first_allocno_copy;
1324 another_a = cp->second;
1325 }
1326 else if (cp->second == a)
1327 {
1328 next_cp = cp->next_second_allocno_copy;
1329 another_a = cp->first;
1330 }
1331 else
1332 gcc_unreachable ();
1333 fprintf (f, " cp%d:a%d(r%d)@%d", cp->num,
1334 ALLOCNO_NUM (another_a), ALLOCNO_REGNO (another_a), cp->freq);
1335 }
1336 fprintf (f, "\n");
1337}
1338
1339/* Print info about copies involving allocno A into stderr. */
1340void
1341ira_debug_allocno_copies (ira_allocno_t a)
1342{
1343 print_allocno_copies (stderr, a);
1344}
1345
1346/* The function frees memory allocated for copy CP. */
1347static void
1348finish_copy (ira_copy_t cp)
1349{
1350 pool_free (copy_pool, cp);
1351}
1352
1353
1354/* Free memory allocated for all copies. */
1355static void
1356finish_copies (void)
1357{
1358 ira_copy_t cp;
1359 ira_copy_iterator ci;
1360
1361 FOR_EACH_COPY (cp, ci)
1362 finish_copy (cp);
1363 VEC_free (ira_copy_t, heap, copy_vec);
1364 free_alloc_pool (copy_pool);
1365}
1366
1367\f
1368
1369/* Pools for cost vectors. It is defined only for cover classes. */
1370static alloc_pool cost_vector_pool[N_REG_CLASSES];
1371
1372/* The function initiates work with hard register cost vectors. It
1373 creates allocation pool for each cover class. */
1374static void
1375initiate_cost_vectors (void)
1376{
1377 int i;
1378 enum reg_class cover_class;
1379
1380 for (i = 0; i < ira_reg_class_cover_size; i++)
1381 {
1382 cover_class = ira_reg_class_cover[i];
1383 cost_vector_pool[cover_class]
1384 = create_alloc_pool ("cost vectors",
1385 sizeof (int)
1386 * ira_class_hard_regs_num[cover_class],
1387 100);
1388 }
1389}
1390
1391/* Allocate and return a cost vector VEC for COVER_CLASS. */
1392int *
1393ira_allocate_cost_vector (enum reg_class cover_class)
1394{
1395 return (int *) pool_alloc (cost_vector_pool[cover_class]);
1396}
1397
1398/* Free a cost vector VEC for COVER_CLASS. */
1399void
1400ira_free_cost_vector (int *vec, enum reg_class cover_class)
1401{
1402 ira_assert (vec != NULL);
1403 pool_free (cost_vector_pool[cover_class], vec);
1404}
1405
1406/* Finish work with hard register cost vectors. Release allocation
1407 pool for each cover class. */
1408static void
1409finish_cost_vectors (void)
1410{
1411 int i;
1412 enum reg_class cover_class;
1413
1414 for (i = 0; i < ira_reg_class_cover_size; i++)
1415 {
1416 cover_class = ira_reg_class_cover[i];
1417 free_alloc_pool (cost_vector_pool[cover_class]);
1418 }
1419}
1420
1421\f
1422
1423/* The current loop tree node and its regno allocno map. */
1424ira_loop_tree_node_t ira_curr_loop_tree_node;
1425ira_allocno_t *ira_curr_regno_allocno_map;
1426
1427/* This recursive function traverses loop tree with root LOOP_NODE
1428 calling non-null functions PREORDER_FUNC and POSTORDER_FUNC
1429 correspondingly in preorder and postorder. The function sets up
1430 IRA_CURR_LOOP_TREE_NODE and IRA_CURR_REGNO_ALLOCNO_MAP. If BB_P,
1431 basic block nodes of LOOP_NODE is also processed (before its
1432 subloop nodes). */
1433void
1434ira_traverse_loop_tree (bool bb_p, ira_loop_tree_node_t loop_node,
1435 void (*preorder_func) (ira_loop_tree_node_t),
1436 void (*postorder_func) (ira_loop_tree_node_t))
1437{
1438 ira_loop_tree_node_t subloop_node;
1439
1440 ira_assert (loop_node->bb == NULL);
1441 ira_curr_loop_tree_node = loop_node;
1442 ira_curr_regno_allocno_map = ira_curr_loop_tree_node->regno_allocno_map;
1443
1444 if (preorder_func != NULL)
1445 (*preorder_func) (loop_node);
b8698a0f 1446
058e97ec
VM
1447 if (bb_p)
1448 for (subloop_node = loop_node->children;
1449 subloop_node != NULL;
1450 subloop_node = subloop_node->next)
1451 if (subloop_node->bb != NULL)
1452 {
1453 if (preorder_func != NULL)
1454 (*preorder_func) (subloop_node);
b8698a0f 1455
058e97ec
VM
1456 if (postorder_func != NULL)
1457 (*postorder_func) (subloop_node);
1458 }
b8698a0f 1459
058e97ec
VM
1460 for (subloop_node = loop_node->subloops;
1461 subloop_node != NULL;
1462 subloop_node = subloop_node->subloop_next)
1463 {
1464 ira_assert (subloop_node->bb == NULL);
1465 ira_traverse_loop_tree (bb_p, subloop_node,
1466 preorder_func, postorder_func);
1467 }
1468
1469 ira_curr_loop_tree_node = loop_node;
1470 ira_curr_regno_allocno_map = ira_curr_loop_tree_node->regno_allocno_map;
1471
1472 if (postorder_func != NULL)
1473 (*postorder_func) (loop_node);
1474}
1475
1476\f
1477
1478/* The basic block currently being processed. */
1479static basic_block curr_bb;
1480
1481/* This recursive function creates allocnos corresponding to
1482 pseudo-registers containing in X. True OUTPUT_P means that X is
1483 a lvalue. */
1484static void
1485create_insn_allocnos (rtx x, bool output_p)
1486{
1487 int i, j;
1488 const char *fmt;
1489 enum rtx_code code = GET_CODE (x);
1490
1491 if (code == REG)
1492 {
1493 int regno;
1494
1495 if ((regno = REGNO (x)) >= FIRST_PSEUDO_REGISTER)
1496 {
1497 ira_allocno_t a;
1498
1499 if ((a = ira_curr_regno_allocno_map[regno]) == NULL)
1500 a = ira_create_allocno (regno, false, ira_curr_loop_tree_node);
b8698a0f 1501
058e97ec
VM
1502 ALLOCNO_NREFS (a)++;
1503 ALLOCNO_FREQ (a) += REG_FREQ_FROM_BB (curr_bb);
058e97ec
VM
1504 if (output_p)
1505 bitmap_set_bit (ira_curr_loop_tree_node->modified_regnos, regno);
1506 }
1507 return;
1508 }
1509 else if (code == SET)
1510 {
1511 create_insn_allocnos (SET_DEST (x), true);
1512 create_insn_allocnos (SET_SRC (x), false);
1513 return;
1514 }
1515 else if (code == CLOBBER)
1516 {
1517 create_insn_allocnos (XEXP (x, 0), true);
1518 return;
1519 }
1520 else if (code == MEM)
1521 {
1522 create_insn_allocnos (XEXP (x, 0), false);
1523 return;
1524 }
b8698a0f 1525 else if (code == PRE_DEC || code == POST_DEC || code == PRE_INC ||
058e97ec
VM
1526 code == POST_INC || code == POST_MODIFY || code == PRE_MODIFY)
1527 {
1528 create_insn_allocnos (XEXP (x, 0), true);
1529 create_insn_allocnos (XEXP (x, 0), false);
1530 return;
1531 }
1532
1533 fmt = GET_RTX_FORMAT (code);
1534 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1535 {
1536 if (fmt[i] == 'e')
1537 create_insn_allocnos (XEXP (x, i), output_p);
1538 else if (fmt[i] == 'E')
1539 for (j = 0; j < XVECLEN (x, i); j++)
1540 create_insn_allocnos (XVECEXP (x, i, j), output_p);
1541 }
1542}
1543
1544/* Create allocnos corresponding to pseudo-registers living in the
1545 basic block represented by the corresponding loop tree node
1546 BB_NODE. */
1547static void
1548create_bb_allocnos (ira_loop_tree_node_t bb_node)
1549{
1550 basic_block bb;
1551 rtx insn;
1552 unsigned int i;
1553 bitmap_iterator bi;
1554
1555 curr_bb = bb = bb_node->bb;
1556 ira_assert (bb != NULL);
acb37d29 1557 FOR_BB_INSNS_REVERSE (bb, insn)
b5b8b0ac 1558 if (NONDEBUG_INSN_P (insn))
058e97ec
VM
1559 create_insn_allocnos (PATTERN (insn), false);
1560 /* It might be a allocno living through from one subloop to
1561 another. */
174b3107 1562 EXECUTE_IF_SET_IN_REG_SET (DF_LR_IN (bb), FIRST_PSEUDO_REGISTER, i, bi)
058e97ec
VM
1563 if (ira_curr_regno_allocno_map[i] == NULL)
1564 ira_create_allocno (i, false, ira_curr_loop_tree_node);
1565}
1566
1567/* Create allocnos corresponding to pseudo-registers living on edge E
1568 (a loop entry or exit). Also mark the allocnos as living on the
1569 loop border. */
1570static void
1571create_loop_allocnos (edge e)
1572{
1573 unsigned int i;
1574 bitmap live_in_regs, border_allocnos;
1575 bitmap_iterator bi;
1576 ira_loop_tree_node_t parent;
1577
174b3107 1578 live_in_regs = DF_LR_IN (e->dest);
058e97ec 1579 border_allocnos = ira_curr_loop_tree_node->border_allocnos;
174b3107 1580 EXECUTE_IF_SET_IN_REG_SET (DF_LR_OUT (e->src),
058e97ec
VM
1581 FIRST_PSEUDO_REGISTER, i, bi)
1582 if (bitmap_bit_p (live_in_regs, i))
1583 {
1584 if (ira_curr_regno_allocno_map[i] == NULL)
1585 {
1586 /* The order of creations is important for right
1587 ira_regno_allocno_map. */
1588 if ((parent = ira_curr_loop_tree_node->parent) != NULL
1589 && parent->regno_allocno_map[i] == NULL)
1590 ira_create_allocno (i, false, parent);
1591 ira_create_allocno (i, false, ira_curr_loop_tree_node);
1592 }
1593 bitmap_set_bit (border_allocnos,
1594 ALLOCNO_NUM (ira_curr_regno_allocno_map[i]));
1595 }
1596}
1597
1598/* Create allocnos corresponding to pseudo-registers living in loop
1599 represented by the corresponding loop tree node LOOP_NODE. This
1600 function is called by ira_traverse_loop_tree. */
1601static void
1602create_loop_tree_node_allocnos (ira_loop_tree_node_t loop_node)
1603{
1604 if (loop_node->bb != NULL)
1605 create_bb_allocnos (loop_node);
1606 else if (loop_node != ira_loop_tree_root)
1607 {
1608 int i;
1609 edge_iterator ei;
1610 edge e;
1611 VEC (edge, heap) *edges;
1612
1613 FOR_EACH_EDGE (e, ei, loop_node->loop->header->preds)
1614 if (e->src != loop_node->loop->latch)
1615 create_loop_allocnos (e);
b8698a0f 1616
058e97ec 1617 edges = get_loop_exit_edges (loop_node->loop);
ac47786e 1618 FOR_EACH_VEC_ELT (edge, edges, i, e)
058e97ec
VM
1619 create_loop_allocnos (e);
1620 VEC_free (edge, heap, edges);
1621 }
1622}
1623
1624/* Propagate information about allocnos modified inside the loop given
1625 by its LOOP_TREE_NODE to its parent. */
1626static void
1627propagate_modified_regnos (ira_loop_tree_node_t loop_tree_node)
1628{
1629 if (loop_tree_node == ira_loop_tree_root)
1630 return;
1631 ira_assert (loop_tree_node->bb == NULL);
1632 bitmap_ior_into (loop_tree_node->parent->modified_regnos,
1633 loop_tree_node->modified_regnos);
1634}
1635
1636/* Propagate new info about allocno A (see comments about accumulated
1637 info in allocno definition) to the corresponding allocno on upper
1638 loop tree level. So allocnos on upper levels accumulate
1639 information about the corresponding allocnos in nested regions.
1640 The new info means allocno info finally calculated in this
1641 file. */
1642static void
1643propagate_allocno_info (void)
1644{
1645 int i;
1646 ira_allocno_t a, parent_a;
1647 ira_loop_tree_node_t parent;
1648 enum reg_class cover_class;
1649
7db7ed3c
VM
1650 if (flag_ira_region != IRA_REGION_ALL
1651 && flag_ira_region != IRA_REGION_MIXED)
058e97ec
VM
1652 return;
1653 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
1654 for (a = ira_regno_allocno_map[i];
1655 a != NULL;
1656 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
1657 if ((parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) != NULL
1658 && (parent_a = parent->regno_allocno_map[i]) != NULL
1659 /* There are no caps yet at this point. So use
1660 border_allocnos to find allocnos for the propagation. */
1661 && bitmap_bit_p (ALLOCNO_LOOP_TREE_NODE (a)->border_allocnos,
1662 ALLOCNO_NUM (a)))
1663 {
927425df
VM
1664 if (! ALLOCNO_BAD_SPILL_P (a))
1665 ALLOCNO_BAD_SPILL_P (parent_a) = false;
058e97ec
VM
1666 ALLOCNO_NREFS (parent_a) += ALLOCNO_NREFS (a);
1667 ALLOCNO_FREQ (parent_a) += ALLOCNO_FREQ (a);
1668 ALLOCNO_CALL_FREQ (parent_a) += ALLOCNO_CALL_FREQ (a);
3c55880a 1669 merge_hard_reg_conflicts (a, parent_a, true);
058e97ec
VM
1670 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
1671 += ALLOCNO_CALLS_CROSSED_NUM (a);
1672 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
1673 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
1674 cover_class = ALLOCNO_COVER_CLASS (a);
1675 ira_assert (cover_class == ALLOCNO_COVER_CLASS (parent_a));
1676 ira_allocate_and_accumulate_costs
1677 (&ALLOCNO_HARD_REG_COSTS (parent_a), cover_class,
1678 ALLOCNO_HARD_REG_COSTS (a));
1679 ira_allocate_and_accumulate_costs
1680 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a),
1681 cover_class,
1682 ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
1683 ALLOCNO_COVER_CLASS_COST (parent_a)
1684 += ALLOCNO_COVER_CLASS_COST (a);
1685 ALLOCNO_MEMORY_COST (parent_a) += ALLOCNO_MEMORY_COST (a);
058e97ec
VM
1686 }
1687}
1688
1689/* Create allocnos corresponding to pseudo-registers in the current
1690 function. Traverse the loop tree for this. */
1691static void
1692create_allocnos (void)
1693{
1694 /* We need to process BB first to correctly link allocnos by member
1695 next_regno_allocno. */
1696 ira_traverse_loop_tree (true, ira_loop_tree_root,
1697 create_loop_tree_node_allocnos, NULL);
1698 if (optimize)
1699 ira_traverse_loop_tree (false, ira_loop_tree_root, NULL,
1700 propagate_modified_regnos);
1701}
1702
1703\f
1704
1705/* The page contains function to remove some regions from a separate
1706 register allocation. We remove regions whose separate allocation
1707 will hardly improve the result. As a result we speed up regional
1708 register allocation. */
1709
9140d27b 1710/* The function changes the object in range list given by R to OBJ. */
058e97ec 1711static void
9140d27b 1712change_object_in_range_list (live_range_t r, ira_object_t obj)
058e97ec
VM
1713{
1714 for (; r != NULL; r = r->next)
9140d27b 1715 r->object = obj;
058e97ec
VM
1716}
1717
3c55880a
BS
1718/* Move all live ranges associated with allocno FROM to allocno TO. */
1719static void
1720move_allocno_live_ranges (ira_allocno_t from, ira_allocno_t to)
1721{
ac0ab4f7
BS
1722 int i;
1723 int n = ALLOCNO_NUM_OBJECTS (from);
1724
1725 gcc_assert (n == ALLOCNO_NUM_OBJECTS (to));
3c55880a 1726
ac0ab4f7 1727 for (i = 0; i < n; i++)
3c55880a 1728 {
ac0ab4f7
BS
1729 ira_object_t from_obj = ALLOCNO_OBJECT (from, i);
1730 ira_object_t to_obj = ALLOCNO_OBJECT (to, i);
1731 live_range_t lr = OBJECT_LIVE_RANGES (from_obj);
1732
1733 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
1734 {
1735 fprintf (ira_dump_file,
1736 " Moving ranges of a%dr%d to a%dr%d: ",
1737 ALLOCNO_NUM (from), ALLOCNO_REGNO (from),
1738 ALLOCNO_NUM (to), ALLOCNO_REGNO (to));
1739 ira_print_live_range_list (ira_dump_file, lr);
1740 }
1741 change_object_in_range_list (lr, to_obj);
1742 OBJECT_LIVE_RANGES (to_obj)
1743 = ira_merge_live_ranges (lr, OBJECT_LIVE_RANGES (to_obj));
1744 OBJECT_LIVE_RANGES (from_obj) = NULL;
3c55880a 1745 }
3c55880a
BS
1746}
1747
3c55880a
BS
1748static void
1749copy_allocno_live_ranges (ira_allocno_t from, ira_allocno_t to)
1750{
ac0ab4f7
BS
1751 int i;
1752 int n = ALLOCNO_NUM_OBJECTS (from);
3c55880a 1753
ac0ab4f7
BS
1754 gcc_assert (n == ALLOCNO_NUM_OBJECTS (to));
1755
1756 for (i = 0; i < n; i++)
3c55880a 1757 {
ac0ab4f7
BS
1758 ira_object_t from_obj = ALLOCNO_OBJECT (from, i);
1759 ira_object_t to_obj = ALLOCNO_OBJECT (to, i);
1760 live_range_t lr = OBJECT_LIVE_RANGES (from_obj);
1761
1762 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
1763 {
1764 fprintf (ira_dump_file, " Copying ranges of a%dr%d to a%dr%d: ",
1765 ALLOCNO_NUM (from), ALLOCNO_REGNO (from),
1766 ALLOCNO_NUM (to), ALLOCNO_REGNO (to));
1767 ira_print_live_range_list (ira_dump_file, lr);
1768 }
1769 lr = ira_copy_live_range_list (lr);
1770 change_object_in_range_list (lr, to_obj);
1771 OBJECT_LIVE_RANGES (to_obj)
1772 = ira_merge_live_ranges (lr, OBJECT_LIVE_RANGES (to_obj));
3c55880a 1773 }
3c55880a
BS
1774}
1775
058e97ec
VM
1776/* Return TRUE if NODE represents a loop with low register
1777 pressure. */
1778static bool
1779low_pressure_loop_node_p (ira_loop_tree_node_t node)
1780{
1781 int i;
1782 enum reg_class cover_class;
b8698a0f 1783
058e97ec
VM
1784 if (node->bb != NULL)
1785 return false;
b8698a0f 1786
058e97ec
VM
1787 for (i = 0; i < ira_reg_class_cover_size; i++)
1788 {
1789 cover_class = ira_reg_class_cover[i];
1790 if (node->reg_pressure[cover_class]
1791 > ira_available_class_regs[cover_class])
1792 return false;
1793 }
1794 return true;
1795}
1796
30ea859e
VM
1797/* Sort loops for marking them for removal. We put already marked
1798 loops first, then less frequent loops next, and then outer loops
1799 next. */
1800static int
1801loop_compare_func (const void *v1p, const void *v2p)
1802{
1803 int diff;
1804 ira_loop_tree_node_t l1 = *(const ira_loop_tree_node_t *) v1p;
1805 ira_loop_tree_node_t l2 = *(const ira_loop_tree_node_t *) v2p;
1806
1807 ira_assert (l1->parent != NULL && l2->parent != NULL);
1808 if (l1->to_remove_p && ! l2->to_remove_p)
1809 return -1;
1810 if (! l1->to_remove_p && l2->to_remove_p)
1811 return 1;
1812 if ((diff = l1->loop->header->frequency - l2->loop->header->frequency) != 0)
1813 return diff;
1814 if ((diff = (int) loop_depth (l1->loop) - (int) loop_depth (l2->loop)) != 0)
1815 return diff;
1816 /* Make sorting stable. */
1817 return l1->loop->num - l2->loop->num;
1818}
1819
1820
1821/* Mark loops which should be removed from regional allocation. We
1822 remove a loop with low register pressure inside another loop with
1823 register pressure. In this case a separate allocation of the loop
1824 hardly helps (for irregular register file architecture it could
1825 help by choosing a better hard register in the loop but we prefer
1826 faster allocation even in this case). We also remove cheap loops
1827 if there are more than IRA_MAX_LOOPS_NUM of them. */
1828static void
1829mark_loops_for_removal (void)
058e97ec 1830{
30ea859e
VM
1831 int i, n;
1832 ira_loop_tree_node_t *sorted_loops;
1833 loop_p loop;
1834
1835 sorted_loops
1836 = (ira_loop_tree_node_t *) ira_allocate (sizeof (ira_loop_tree_node_t)
1837 * VEC_length (loop_p,
1838 ira_loops.larray));
1839 for (n = i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
1840 if (ira_loop_nodes[i].regno_allocno_map != NULL)
1841 {
1842 if (ira_loop_nodes[i].parent == NULL)
1843 {
1844 /* Don't remove the root. */
1845 ira_loop_nodes[i].to_remove_p = false;
1846 continue;
1847 }
1848 sorted_loops[n++] = &ira_loop_nodes[i];
1849 ira_loop_nodes[i].to_remove_p
1850 = (low_pressure_loop_node_p (ira_loop_nodes[i].parent)
1851 && low_pressure_loop_node_p (&ira_loop_nodes[i]));
1852 }
1853 qsort (sorted_loops, n, sizeof (ira_loop_tree_node_t), loop_compare_func);
1854 for (i = 0; n - i + 1 > IRA_MAX_LOOPS_NUM; i++)
1855 {
1856 sorted_loops[i]->to_remove_p = true;
1857 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
1858 fprintf
1859 (ira_dump_file,
1860 " Mark loop %d (header %d, freq %d, depth %d) for removal (%s)\n",
1861 sorted_loops[i]->loop->num, sorted_loops[i]->loop->header->index,
1862 sorted_loops[i]->loop->header->frequency,
1863 loop_depth (sorted_loops[i]->loop),
1864 low_pressure_loop_node_p (sorted_loops[i]->parent)
1865 && low_pressure_loop_node_p (sorted_loops[i])
1866 ? "low pressure" : "cheap loop");
1867 }
1868 ira_free (sorted_loops);
058e97ec
VM
1869}
1870
311aab06
VM
1871/* Mark all loops but root for removing. */
1872static void
1873mark_all_loops_for_removal (void)
1874{
1875 int i;
1876 loop_p loop;
1877
ac47786e 1878 FOR_EACH_VEC_ELT (loop_p, ira_loops.larray, i, loop)
311aab06
VM
1879 if (ira_loop_nodes[i].regno_allocno_map != NULL)
1880 {
1881 if (ira_loop_nodes[i].parent == NULL)
1882 {
1883 /* Don't remove the root. */
1884 ira_loop_nodes[i].to_remove_p = false;
1885 continue;
1886 }
1887 ira_loop_nodes[i].to_remove_p = true;
1888 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
1889 fprintf
1890 (ira_dump_file,
1891 " Mark loop %d (header %d, freq %d, depth %d) for removal\n",
1892 ira_loop_nodes[i].loop->num,
1893 ira_loop_nodes[i].loop->header->index,
1894 ira_loop_nodes[i].loop->header->frequency,
1895 loop_depth (ira_loop_nodes[i].loop));
1896 }
1897}
30ea859e 1898
058e97ec
VM
1899/* Definition of vector of loop tree nodes. */
1900DEF_VEC_P(ira_loop_tree_node_t);
1901DEF_VEC_ALLOC_P(ira_loop_tree_node_t, heap);
1902
1903/* Vec containing references to all removed loop tree nodes. */
1904static VEC(ira_loop_tree_node_t,heap) *removed_loop_vec;
1905
1906/* Vec containing references to all children of loop tree nodes. */
1907static VEC(ira_loop_tree_node_t,heap) *children_vec;
1908
1909/* Remove subregions of NODE if their separate allocation will not
1910 improve the result. */
1911static void
1912remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_node_t node)
1913{
1914 unsigned int start;
1915 bool remove_p;
1916 ira_loop_tree_node_t subnode;
1917
30ea859e 1918 remove_p = node->to_remove_p;
058e97ec
VM
1919 if (! remove_p)
1920 VEC_safe_push (ira_loop_tree_node_t, heap, children_vec, node);
1921 start = VEC_length (ira_loop_tree_node_t, children_vec);
1922 for (subnode = node->children; subnode != NULL; subnode = subnode->next)
1923 if (subnode->bb == NULL)
1924 remove_uneccesary_loop_nodes_from_loop_tree (subnode);
1925 else
1926 VEC_safe_push (ira_loop_tree_node_t, heap, children_vec, subnode);
1927 node->children = node->subloops = NULL;
1928 if (remove_p)
1929 {
1930 VEC_safe_push (ira_loop_tree_node_t, heap, removed_loop_vec, node);
1931 return;
1932 }
1933 while (VEC_length (ira_loop_tree_node_t, children_vec) > start)
1934 {
1935 subnode = VEC_pop (ira_loop_tree_node_t, children_vec);
1936 subnode->parent = node;
1937 subnode->next = node->children;
1938 node->children = subnode;
1939 if (subnode->bb == NULL)
1940 {
1941 subnode->subloop_next = node->subloops;
1942 node->subloops = subnode;
1943 }
1944 }
1945}
1946
c6bb4c93
VM
1947/* Return TRUE if NODE is inside PARENT. */
1948static bool
1949loop_is_inside_p (ira_loop_tree_node_t node, ira_loop_tree_node_t parent)
1950{
1951 for (node = node->parent; node != NULL; node = node->parent)
1952 if (node == parent)
1953 return true;
1954 return false;
1955}
1956
1957/* Sort allocnos according to their order in regno allocno list. */
1958static int
1959regno_allocno_order_compare_func (const void *v1p, const void *v2p)
1960{
1961 ira_allocno_t a1 = *(const ira_allocno_t *) v1p;
1962 ira_allocno_t a2 = *(const ira_allocno_t *) v2p;
1963 ira_loop_tree_node_t n1 = ALLOCNO_LOOP_TREE_NODE (a1);
1964 ira_loop_tree_node_t n2 = ALLOCNO_LOOP_TREE_NODE (a2);
1965
1966 if (loop_is_inside_p (n1, n2))
1967 return -1;
1968 else if (loop_is_inside_p (n2, n1))
1969 return 1;
1970 /* If allocnos are equally good, sort by allocno numbers, so that
1971 the results of qsort leave nothing to chance. We put allocnos
1972 with higher number first in the list because it is the original
1973 order for allocnos from loops on the same levels. */
1974 return ALLOCNO_NUM (a2) - ALLOCNO_NUM (a1);
1975}
1976
1977/* This array is used to sort allocnos to restore allocno order in
1978 the regno allocno list. */
1979static ira_allocno_t *regno_allocnos;
1980
1981/* Restore allocno order for REGNO in the regno allocno list. */
1982static void
1983ira_rebuild_regno_allocno_list (int regno)
1984{
1985 int i, n;
1986 ira_allocno_t a;
1987
1988 for (n = 0, a = ira_regno_allocno_map[regno];
1989 a != NULL;
1990 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
1991 regno_allocnos[n++] = a;
1992 ira_assert (n > 0);
b8698a0f 1993 qsort (regno_allocnos, n, sizeof (ira_allocno_t),
c6bb4c93
VM
1994 regno_allocno_order_compare_func);
1995 for (i = 1; i < n; i++)
1996 ALLOCNO_NEXT_REGNO_ALLOCNO (regno_allocnos[i - 1]) = regno_allocnos[i];
1997 ALLOCNO_NEXT_REGNO_ALLOCNO (regno_allocnos[n - 1]) = NULL;
1998 ira_regno_allocno_map[regno] = regno_allocnos[0];
1999 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
2000 fprintf (ira_dump_file, " Rebuilding regno allocno list for %d\n", regno);
2001}
2002
311aab06
VM
2003/* Propagate info from allocno FROM_A to allocno A. */
2004static void
2005propagate_some_info_from_allocno (ira_allocno_t a, ira_allocno_t from_a)
2006{
2007 enum reg_class cover_class;
2008
3c55880a 2009 merge_hard_reg_conflicts (from_a, a, false);
311aab06
VM
2010 ALLOCNO_NREFS (a) += ALLOCNO_NREFS (from_a);
2011 ALLOCNO_FREQ (a) += ALLOCNO_FREQ (from_a);
2012 ALLOCNO_CALL_FREQ (a) += ALLOCNO_CALL_FREQ (from_a);
311aab06
VM
2013 ALLOCNO_CALLS_CROSSED_NUM (a) += ALLOCNO_CALLS_CROSSED_NUM (from_a);
2014 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a)
2015 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (from_a);
2016 if (! ALLOCNO_BAD_SPILL_P (from_a))
2017 ALLOCNO_BAD_SPILL_P (a) = false;
311aab06
VM
2018 cover_class = ALLOCNO_COVER_CLASS (from_a);
2019 ira_assert (cover_class == ALLOCNO_COVER_CLASS (a));
2020 ira_allocate_and_accumulate_costs (&ALLOCNO_HARD_REG_COSTS (a), cover_class,
2021 ALLOCNO_HARD_REG_COSTS (from_a));
2022 ira_allocate_and_accumulate_costs (&ALLOCNO_CONFLICT_HARD_REG_COSTS (a),
2023 cover_class,
2024 ALLOCNO_CONFLICT_HARD_REG_COSTS (from_a));
2025 ALLOCNO_COVER_CLASS_COST (a) += ALLOCNO_COVER_CLASS_COST (from_a);
2026 ALLOCNO_MEMORY_COST (a) += ALLOCNO_MEMORY_COST (from_a);
2027}
2028
058e97ec
VM
2029/* Remove allocnos from loops removed from the allocation
2030 consideration. */
2031static void
2032remove_unnecessary_allocnos (void)
2033{
2034 int regno;
c6bb4c93 2035 bool merged_p, rebuild_p;
058e97ec
VM
2036 ira_allocno_t a, prev_a, next_a, parent_a;
2037 ira_loop_tree_node_t a_node, parent;
058e97ec
VM
2038
2039 merged_p = false;
c6bb4c93 2040 regno_allocnos = NULL;
058e97ec 2041 for (regno = max_reg_num () - 1; regno >= FIRST_PSEUDO_REGISTER; regno--)
c6bb4c93
VM
2042 {
2043 rebuild_p = false;
2044 for (prev_a = NULL, a = ira_regno_allocno_map[regno];
2045 a != NULL;
2046 a = next_a)
2047 {
2048 next_a = ALLOCNO_NEXT_REGNO_ALLOCNO (a);
2049 a_node = ALLOCNO_LOOP_TREE_NODE (a);
2050 if (! a_node->to_remove_p)
2051 prev_a = a;
2052 else
2053 {
2054 for (parent = a_node->parent;
2055 (parent_a = parent->regno_allocno_map[regno]) == NULL
2056 && parent->to_remove_p;
2057 parent = parent->parent)
2058 ;
2059 if (parent_a == NULL)
2060 {
311aab06
VM
2061 /* There are no allocnos with the same regno in
2062 upper region -- just move the allocno to the
2063 upper region. */
c6bb4c93
VM
2064 prev_a = a;
2065 ALLOCNO_LOOP_TREE_NODE (a) = parent;
2066 parent->regno_allocno_map[regno] = a;
2067 bitmap_set_bit (parent->all_allocnos, ALLOCNO_NUM (a));
2068 rebuild_p = true;
2069 }
2070 else
2071 {
2072 /* Remove the allocno and update info of allocno in
2073 the upper region. */
2074 if (prev_a == NULL)
2075 ira_regno_allocno_map[regno] = next_a;
2076 else
2077 ALLOCNO_NEXT_REGNO_ALLOCNO (prev_a) = next_a;
3c55880a 2078 move_allocno_live_ranges (a, parent_a);
c6bb4c93 2079 merged_p = true;
311aab06 2080 propagate_some_info_from_allocno (parent_a, a);
46044dd9
L
2081 /* Remove it from the corresponding regno allocno
2082 map to avoid info propagation of subsequent
2083 allocno into this already removed allocno. */
2084 a_node->regno_allocno_map[regno] = NULL;
c6bb4c93
VM
2085 finish_allocno (a);
2086 }
2087 }
2088 }
2089 if (rebuild_p)
2090 /* We need to restore the order in regno allocno list. */
2091 {
2092 if (regno_allocnos == NULL)
2093 regno_allocnos
2094 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t)
2095 * ira_allocnos_num);
2096 ira_rebuild_regno_allocno_list (regno);
2097 }
2098 }
058e97ec
VM
2099 if (merged_p)
2100 ira_rebuild_start_finish_chains ();
c6bb4c93
VM
2101 if (regno_allocnos != NULL)
2102 ira_free (regno_allocnos);
058e97ec
VM
2103}
2104
311aab06 2105/* Remove allocnos from all loops but the root. */
058e97ec 2106static void
311aab06 2107remove_low_level_allocnos (void)
058e97ec 2108{
311aab06
VM
2109 int regno;
2110 bool merged_p, propagate_p;
2111 ira_allocno_t a, top_a;
2112 ira_loop_tree_node_t a_node, parent;
311aab06
VM
2113 ira_allocno_iterator ai;
2114
2115 merged_p = false;
2116 FOR_EACH_ALLOCNO (a, ai)
2117 {
2118 a_node = ALLOCNO_LOOP_TREE_NODE (a);
2119 if (a_node == ira_loop_tree_root || ALLOCNO_CAP_MEMBER (a) != NULL)
2120 continue;
2121 regno = ALLOCNO_REGNO (a);
2122 if ((top_a = ira_loop_tree_root->regno_allocno_map[regno]) == NULL)
2123 {
2124 ALLOCNO_LOOP_TREE_NODE (a) = ira_loop_tree_root;
2125 ira_loop_tree_root->regno_allocno_map[regno] = a;
2126 continue;
2127 }
2128 propagate_p = a_node->parent->regno_allocno_map[regno] == NULL;
2129 /* Remove the allocno and update info of allocno in the upper
2130 region. */
3c55880a 2131 move_allocno_live_ranges (a, top_a);
311aab06 2132 merged_p = true;
311aab06
VM
2133 if (propagate_p)
2134 propagate_some_info_from_allocno (top_a, a);
2135 }
2136 FOR_EACH_ALLOCNO (a, ai)
2137 {
2138 a_node = ALLOCNO_LOOP_TREE_NODE (a);
2139 if (a_node == ira_loop_tree_root)
2140 continue;
2141 parent = a_node->parent;
2142 regno = ALLOCNO_REGNO (a);
2143 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2144 ira_assert (ALLOCNO_CAP (a) != NULL);
2145 else if (ALLOCNO_CAP (a) == NULL)
2146 ira_assert (parent->regno_allocno_map[regno] != NULL);
2147 }
2148 FOR_EACH_ALLOCNO (a, ai)
2149 {
2150 regno = ALLOCNO_REGNO (a);
2151 if (ira_loop_tree_root->regno_allocno_map[regno] == a)
2152 {
ac0ab4f7
BS
2153 ira_object_t obj;
2154 ira_allocno_object_iterator oi;
a49ae217 2155
311aab06
VM
2156 ira_regno_allocno_map[regno] = a;
2157 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = NULL;
2158 ALLOCNO_CAP_MEMBER (a) = NULL;
ac0ab4f7
BS
2159 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
2160 COPY_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj),
2161 OBJECT_TOTAL_CONFLICT_HARD_REGS (obj));
311aab06
VM
2162#ifdef STACK_REGS
2163 if (ALLOCNO_TOTAL_NO_STACK_REG_P (a))
2164 ALLOCNO_NO_STACK_REG_P (a) = true;
2165#endif
2166 }
2167 else
2168 finish_allocno (a);
2169 }
2170 if (merged_p)
2171 ira_rebuild_start_finish_chains ();
2172}
2173
2174/* Remove loops from consideration. We remove all loops except for
2175 root if ALL_P or loops for which a separate allocation will not
2176 improve the result. We have to do this after allocno creation and
2177 their costs and cover class evaluation because only after that the
2178 register pressure can be known and is calculated. */
2179static void
2180remove_unnecessary_regions (bool all_p)
2181{
2182 if (all_p)
2183 mark_all_loops_for_removal ();
2184 else
2185 mark_loops_for_removal ();
058e97ec
VM
2186 children_vec
2187 = VEC_alloc (ira_loop_tree_node_t, heap,
2188 last_basic_block + VEC_length (loop_p, ira_loops.larray));
2189 removed_loop_vec
2190 = VEC_alloc (ira_loop_tree_node_t, heap,
2191 last_basic_block + VEC_length (loop_p, ira_loops.larray));
2192 remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_root) ;
2193 VEC_free (ira_loop_tree_node_t, heap, children_vec);
311aab06
VM
2194 if (all_p)
2195 remove_low_level_allocnos ();
2196 else
2197 remove_unnecessary_allocnos ();
058e97ec
VM
2198 while (VEC_length (ira_loop_tree_node_t, removed_loop_vec) > 0)
2199 finish_loop_tree_node (VEC_pop (ira_loop_tree_node_t, removed_loop_vec));
2200 VEC_free (ira_loop_tree_node_t, heap, removed_loop_vec);
2201}
2202
2203\f
2204
927425df
VM
2205/* At this point true value of allocno attribute bad_spill_p means
2206 that there is an insn where allocno occurs and where the allocno
2207 can not be used as memory. The function updates the attribute, now
2208 it can be true only for allocnos which can not be used as memory in
2209 an insn and in whose live ranges there is other allocno deaths.
2210 Spilling allocnos with true value will not improve the code because
2211 it will not make other allocnos colorable and additional reloads
2212 for the corresponding pseudo will be generated in reload pass for
2213 each insn it occurs.
2214
2215 This is a trick mentioned in one classic article of Chaitin etc
2216 which is frequently omitted in other implementations of RA based on
2217 graph coloring. */
2218static void
2219update_bad_spill_attribute (void)
2220{
2221 int i;
2222 ira_allocno_t a;
2223 ira_allocno_iterator ai;
ac0ab4f7
BS
2224 ira_allocno_object_iterator aoi;
2225 ira_object_t obj;
b14151b5 2226 live_range_t r;
927425df
VM
2227 enum reg_class cover_class;
2228 bitmap_head dead_points[N_REG_CLASSES];
2229
2230 for (i = 0; i < ira_reg_class_cover_size; i++)
2231 {
2232 cover_class = ira_reg_class_cover[i];
2233 bitmap_initialize (&dead_points[cover_class], &reg_obstack);
2234 }
2235 FOR_EACH_ALLOCNO (a, ai)
2236 {
2237 cover_class = ALLOCNO_COVER_CLASS (a);
2238 if (cover_class == NO_REGS)
2239 continue;
ac0ab4f7
BS
2240 FOR_EACH_ALLOCNO_OBJECT (a, obj, aoi)
2241 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
2242 bitmap_set_bit (&dead_points[cover_class], r->finish);
927425df
VM
2243 }
2244 FOR_EACH_ALLOCNO (a, ai)
2245 {
2246 cover_class = ALLOCNO_COVER_CLASS (a);
2247 if (cover_class == NO_REGS)
2248 continue;
2249 if (! ALLOCNO_BAD_SPILL_P (a))
2250 continue;
ac0ab4f7 2251 FOR_EACH_ALLOCNO_OBJECT (a, obj, aoi)
927425df 2252 {
ac0ab4f7
BS
2253 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
2254 {
2255 for (i = r->start + 1; i < r->finish; i++)
2256 if (bitmap_bit_p (&dead_points[cover_class], i))
2257 break;
2258 if (i < r->finish)
2259 break;
2260 }
2261 if (r != NULL)
2262 {
2263 ALLOCNO_BAD_SPILL_P (a) = false;
927425df 2264 break;
ac0ab4f7 2265 }
927425df 2266 }
927425df
VM
2267 }
2268 for (i = 0; i < ira_reg_class_cover_size; i++)
2269 {
2270 cover_class = ira_reg_class_cover[i];
2271 bitmap_clear (&dead_points[cover_class]);
2272 }
2273}
2274
2275\f
2276
058e97ec
VM
2277/* Set up minimal and maximal live range points for allocnos. */
2278static void
2279setup_min_max_allocno_live_range_point (void)
2280{
2281 int i;
2282 ira_allocno_t a, parent_a, cap;
2283 ira_allocno_iterator ai;
ac0ab4f7
BS
2284#ifdef ENABLE_IRA_CHECKING
2285 ira_object_iterator oi;
2286 ira_object_t obj;
2287#endif
b14151b5 2288 live_range_t r;
058e97ec
VM
2289 ira_loop_tree_node_t parent;
2290
2291 FOR_EACH_ALLOCNO (a, ai)
2292 {
ac0ab4f7
BS
2293 int n = ALLOCNO_NUM_OBJECTS (a);
2294 for (i = 0; i < n; i++)
2295 {
2296 ira_object_t obj = ALLOCNO_OBJECT (a, i);
2297 r = OBJECT_LIVE_RANGES (obj);
2298 if (r == NULL)
2299 continue;
2300 OBJECT_MAX (obj) = r->finish;
2301 for (; r->next != NULL; r = r->next)
2302 ;
2303 OBJECT_MIN (obj) = r->start;
2304 }
058e97ec
VM
2305 }
2306 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
2307 for (a = ira_regno_allocno_map[i];
2308 a != NULL;
2309 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2310 {
ac0ab4f7
BS
2311 int j;
2312 int n = ALLOCNO_NUM_OBJECTS (a);
2313 for (j = 0; j < n; j++)
058e97ec 2314 {
ac0ab4f7
BS
2315 ira_object_t obj = ALLOCNO_OBJECT (a, j);
2316 ira_object_t parent_obj;
2317
2318 if (OBJECT_MAX (obj) < 0)
2319 continue;
2320 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
2321 /* Accumulation of range info. */
2322 if (ALLOCNO_CAP (a) != NULL)
058e97ec 2323 {
ac0ab4f7
BS
2324 for (cap = ALLOCNO_CAP (a); cap != NULL; cap = ALLOCNO_CAP (cap))
2325 {
2326 ira_object_t cap_obj = ALLOCNO_OBJECT (cap, j);
2327 if (OBJECT_MAX (cap_obj) < OBJECT_MAX (obj))
2328 OBJECT_MAX (cap_obj) = OBJECT_MAX (obj);
2329 if (OBJECT_MIN (cap_obj) > OBJECT_MIN (obj))
2330 OBJECT_MIN (cap_obj) = OBJECT_MIN (obj);
2331 }
2332 continue;
058e97ec 2333 }
ac0ab4f7
BS
2334 if ((parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) == NULL)
2335 continue;
2336 parent_a = parent->regno_allocno_map[i];
2337 parent_obj = ALLOCNO_OBJECT (parent_a, j);
2338 if (OBJECT_MAX (parent_obj) < OBJECT_MAX (obj))
2339 OBJECT_MAX (parent_obj) = OBJECT_MAX (obj);
2340 if (OBJECT_MIN (parent_obj) > OBJECT_MIN (obj))
2341 OBJECT_MIN (parent_obj) = OBJECT_MIN (obj);
058e97ec 2342 }
058e97ec
VM
2343 }
2344#ifdef ENABLE_IRA_CHECKING
ac0ab4f7 2345 FOR_EACH_OBJECT (obj, oi)
058e97ec 2346 {
a49ae217
BS
2347 if ((0 <= OBJECT_MIN (obj) && OBJECT_MIN (obj) <= ira_max_point)
2348 && (0 <= OBJECT_MAX (obj) && OBJECT_MAX (obj) <= ira_max_point))
058e97ec
VM
2349 continue;
2350 gcc_unreachable ();
2351 }
2352#endif
2353}
2354
2355/* Sort allocnos according to their live ranges. Allocnos with
7db7ed3c 2356 smaller cover class are put first unless we use priority coloring.
a49ae217 2357 Allocnos with the same cover class are ordered according their start
7db7ed3c
VM
2358 (min). Allocnos with the same start are ordered according their
2359 finish (max). */
058e97ec 2360static int
ac0ab4f7 2361object_range_compare_func (const void *v1p, const void *v2p)
058e97ec
VM
2362{
2363 int diff;
a49ae217
BS
2364 ira_object_t obj1 = *(const ira_object_t *) v1p;
2365 ira_object_t obj2 = *(const ira_object_t *) v2p;
2366 ira_allocno_t a1 = OBJECT_ALLOCNO (obj1);
2367 ira_allocno_t a2 = OBJECT_ALLOCNO (obj2);
058e97ec 2368
7db7ed3c
VM
2369 if (flag_ira_algorithm != IRA_ALGORITHM_PRIORITY
2370 && (diff = ALLOCNO_COVER_CLASS (a1) - ALLOCNO_COVER_CLASS (a2)) != 0)
058e97ec 2371 return diff;
a49ae217 2372 if ((diff = OBJECT_MIN (obj1) - OBJECT_MIN (obj2)) != 0)
058e97ec 2373 return diff;
a49ae217 2374 if ((diff = OBJECT_MAX (obj1) - OBJECT_MAX (obj2)) != 0)
058e97ec
VM
2375 return diff;
2376 return ALLOCNO_NUM (a1) - ALLOCNO_NUM (a2);
2377}
2378
a49ae217 2379/* Sort ira_object_id_map and set up conflict id of allocnos. */
058e97ec 2380static void
a49ae217 2381sort_conflict_id_map (void)
058e97ec
VM
2382{
2383 int i, num;
2384 ira_allocno_t a;
2385 ira_allocno_iterator ai;
2386
2387 num = 0;
2388 FOR_EACH_ALLOCNO (a, ai)
ac0ab4f7
BS
2389 {
2390 ira_allocno_object_iterator oi;
2391 ira_object_t obj;
2392
2393 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
2394 ira_object_id_map[num++] = obj;
2395 }
a49ae217 2396 qsort (ira_object_id_map, num, sizeof (ira_object_t),
ac0ab4f7 2397 object_range_compare_func);
058e97ec 2398 for (i = 0; i < num; i++)
a49ae217
BS
2399 {
2400 ira_object_t obj = ira_object_id_map[i];
2401 gcc_assert (obj != NULL);
2402 OBJECT_CONFLICT_ID (obj) = i;
2403 }
2404 for (i = num; i < ira_objects_num; i++)
2405 ira_object_id_map[i] = NULL;
058e97ec
VM
2406}
2407
2408/* Set up minimal and maximal conflict ids of allocnos with which
2409 given allocno can conflict. */
2410static void
2411setup_min_max_conflict_allocno_ids (void)
2412{
7db7ed3c 2413 int cover_class;
058e97ec
VM
2414 int i, j, min, max, start, finish, first_not_finished, filled_area_start;
2415 int *live_range_min, *last_lived;
ac0ab4f7 2416 int word0_min, word0_max;
058e97ec 2417 ira_allocno_t a;
ac0ab4f7 2418 ira_allocno_iterator ai;
058e97ec 2419
a49ae217 2420 live_range_min = (int *) ira_allocate (sizeof (int) * ira_objects_num);
058e97ec
VM
2421 cover_class = -1;
2422 first_not_finished = -1;
a49ae217 2423 for (i = 0; i < ira_objects_num; i++)
058e97ec 2424 {
a49ae217
BS
2425 ira_object_t obj = ira_object_id_map[i];
2426 if (obj == NULL)
058e97ec 2427 continue;
a49ae217
BS
2428
2429 a = OBJECT_ALLOCNO (obj);
2430
7db7ed3c
VM
2431 if (cover_class < 0
2432 || (flag_ira_algorithm != IRA_ALGORITHM_PRIORITY
2433 && cover_class != (int) ALLOCNO_COVER_CLASS (a)))
058e97ec
VM
2434 {
2435 cover_class = ALLOCNO_COVER_CLASS (a);
2436 min = i;
2437 first_not_finished = i;
2438 }
2439 else
2440 {
a49ae217 2441 start = OBJECT_MIN (obj);
058e97ec
VM
2442 /* If we skip an allocno, the allocno with smaller ids will
2443 be also skipped because of the secondary sorting the
2444 range finishes (see function
ac0ab4f7 2445 object_range_compare_func). */
058e97ec 2446 while (first_not_finished < i
a49ae217 2447 && start > OBJECT_MAX (ira_object_id_map
ac0ab4f7 2448 [first_not_finished]))
058e97ec
VM
2449 first_not_finished++;
2450 min = first_not_finished;
b8698a0f 2451 }
058e97ec
VM
2452 if (min == i)
2453 /* We could increase min further in this case but it is good
2454 enough. */
2455 min++;
a49ae217
BS
2456 live_range_min[i] = OBJECT_MIN (obj);
2457 OBJECT_MIN (obj) = min;
058e97ec
VM
2458 }
2459 last_lived = (int *) ira_allocate (sizeof (int) * ira_max_point);
2460 cover_class = -1;
2461 filled_area_start = -1;
a49ae217 2462 for (i = ira_objects_num - 1; i >= 0; i--)
058e97ec 2463 {
a49ae217
BS
2464 ira_object_t obj = ira_object_id_map[i];
2465 if (obj == NULL)
058e97ec 2466 continue;
a49ae217
BS
2467
2468 a = OBJECT_ALLOCNO (obj);
7db7ed3c
VM
2469 if (cover_class < 0
2470 || (flag_ira_algorithm != IRA_ALGORITHM_PRIORITY
2471 && cover_class != (int) ALLOCNO_COVER_CLASS (a)))
058e97ec
VM
2472 {
2473 cover_class = ALLOCNO_COVER_CLASS (a);
2474 for (j = 0; j < ira_max_point; j++)
2475 last_lived[j] = -1;
2476 filled_area_start = ira_max_point;
2477 }
2478 min = live_range_min[i];
a49ae217 2479 finish = OBJECT_MAX (obj);
058e97ec
VM
2480 max = last_lived[finish];
2481 if (max < 0)
2482 /* We could decrease max further in this case but it is good
2483 enough. */
a49ae217
BS
2484 max = OBJECT_CONFLICT_ID (obj) - 1;
2485 OBJECT_MAX (obj) = max;
058e97ec
VM
2486 /* In filling, we can go further A range finish to recognize
2487 intersection quickly because if the finish of subsequently
2488 processed allocno (it has smaller conflict id) range is
2489 further A range finish than they are definitely intersected
2490 (the reason for this is the allocnos with bigger conflict id
2491 have their range starts not smaller than allocnos with
2492 smaller ids. */
2493 for (j = min; j < filled_area_start; j++)
2494 last_lived[j] = i;
2495 filled_area_start = min;
2496 }
2497 ira_free (last_lived);
2498 ira_free (live_range_min);
ac0ab4f7
BS
2499
2500 /* For allocnos with more than one object, we may later record extra conflicts in
2501 subobject 0 that we cannot really know about here.
2502 For now, simply widen the min/max range of these subobjects. */
2503
2504 word0_min = INT_MAX;
2505 word0_max = INT_MIN;
2506
2507 FOR_EACH_ALLOCNO (a, ai)
2508 {
2509 int n = ALLOCNO_NUM_OBJECTS (a);
2510 ira_object_t obj0;
2511 if (n < 2)
2512 continue;
2513 obj0 = ALLOCNO_OBJECT (a, 0);
2514 if (OBJECT_CONFLICT_ID (obj0) < word0_min)
2515 word0_min = OBJECT_CONFLICT_ID (obj0);
2516 if (OBJECT_CONFLICT_ID (obj0) > word0_max)
2517 word0_max = OBJECT_CONFLICT_ID (obj0);
2518 }
2519 FOR_EACH_ALLOCNO (a, ai)
2520 {
2521 int n = ALLOCNO_NUM_OBJECTS (a);
2522 ira_object_t obj0;
2523 if (n < 2)
2524 continue;
2525 obj0 = ALLOCNO_OBJECT (a, 0);
2526 if (OBJECT_MIN (obj0) > word0_min)
2527 OBJECT_MIN (obj0) = word0_min;
2528 if (OBJECT_MAX (obj0) < word0_max)
2529 OBJECT_MAX (obj0) = word0_max;
2530 }
058e97ec
VM
2531}
2532
2533\f
2534
2535static void
2536create_caps (void)
2537{
2538 ira_allocno_t a;
2539 ira_allocno_iterator ai;
2540 ira_loop_tree_node_t loop_tree_node;
2541
2542 FOR_EACH_ALLOCNO (a, ai)
2543 {
2544 if (ALLOCNO_LOOP_TREE_NODE (a) == ira_loop_tree_root)
2545 continue;
2546 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2547 create_cap_allocno (a);
2548 else if (ALLOCNO_CAP (a) == NULL)
2549 {
2550 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
2551 if (!bitmap_bit_p (loop_tree_node->border_allocnos, ALLOCNO_NUM (a)))
2552 create_cap_allocno (a);
2553 }
2554 }
2555}
2556
2557\f
2558
2559/* The page contains code transforming more one region internal
2560 representation (IR) to one region IR which is necessary for reload.
2561 This transformation is called IR flattening. We might just rebuild
2562 the IR for one region but we don't do it because it takes a lot of
2563 time. */
2564
82b33628
VM
2565/* Map: regno -> allocnos which will finally represent the regno for
2566 IR with one region. */
2567static ira_allocno_t *regno_top_level_allocno_map;
2568
029da7d4
BS
2569/* Find the allocno that corresponds to A at a level one higher up in the
2570 loop tree. Returns NULL if A is a cap, or if it has no parent. */
2571ira_allocno_t
2572ira_parent_allocno (ira_allocno_t a)
2573{
2574 ira_loop_tree_node_t parent;
2575
2576 if (ALLOCNO_CAP (a) != NULL)
2577 return NULL;
2578
2579 parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
2580 if (parent == NULL)
2581 return NULL;
2582
2583 return parent->regno_allocno_map[ALLOCNO_REGNO (a)];
2584}
2585
2586/* Find the allocno that corresponds to A at a level one higher up in the
2587 loop tree. If ALLOCNO_CAP is set for A, return that. */
2588ira_allocno_t
2589ira_parent_or_cap_allocno (ira_allocno_t a)
2590{
2591 if (ALLOCNO_CAP (a) != NULL)
2592 return ALLOCNO_CAP (a);
2593
2594 return ira_parent_allocno (a);
2595}
2596
82b33628 2597/* Process all allocnos originated from pseudo REGNO and copy live
801f03e3
VM
2598 ranges, hard reg conflicts, and allocno stack reg attributes from
2599 low level allocnos to final allocnos which are destinations of
2600 removed stores at a loop exit. Return true if we copied live
2601 ranges. */
82b33628 2602static bool
801f03e3 2603copy_info_to_removed_store_destinations (int regno)
82b33628 2604{
504b33d8
ILT
2605 ira_allocno_t a;
2606 ira_allocno_t parent_a = NULL;
82b33628 2607 ira_loop_tree_node_t parent;
82b33628
VM
2608 bool merged_p;
2609
2610 merged_p = false;
2611 for (a = ira_regno_allocno_map[regno];
2612 a != NULL;
2613 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2614 {
2615 if (a != regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))])
2616 /* This allocno will be removed. */
2617 continue;
ac0ab4f7 2618
82b33628
VM
2619 /* Caps will be removed. */
2620 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
2621 for (parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
2622 parent != NULL;
2623 parent = parent->parent)
2624 if ((parent_a = parent->regno_allocno_map[regno]) == NULL
2625 || (parent_a == regno_top_level_allocno_map[REGNO (ALLOCNO_REG
2626 (parent_a))]
2627 && ALLOCNO_MEM_OPTIMIZED_DEST_P (parent_a)))
2628 break;
2629 if (parent == NULL || parent_a == NULL)
2630 continue;
ac0ab4f7 2631
3c55880a
BS
2632 copy_allocno_live_ranges (a, parent_a);
2633 merge_hard_reg_conflicts (a, parent_a, true);
ac0ab4f7 2634
ea1c67e6
VM
2635 ALLOCNO_CALL_FREQ (parent_a) += ALLOCNO_CALL_FREQ (a);
2636 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
2637 += ALLOCNO_CALLS_CROSSED_NUM (a);
2638 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
2639 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
82b33628
VM
2640 merged_p = true;
2641 }
2642 return merged_p;
058e97ec
VM
2643}
2644
2645/* Flatten the IR. In other words, this function transforms IR as if
2646 it were built with one region (without loops). We could make it
2647 much simpler by rebuilding IR with one region, but unfortunately it
2648 takes a lot of time. MAX_REGNO_BEFORE_EMIT and
2649 IRA_MAX_POINT_BEFORE_EMIT are correspondingly MAX_REG_NUM () and
2650 IRA_MAX_POINT before emitting insns on the loop borders. */
2651void
2652ira_flattening (int max_regno_before_emit, int ira_max_point_before_emit)
2653{
9140d27b 2654 int i, j;
ea1c67e6 2655 bool keep_p;
058e97ec 2656 int hard_regs_num;
82b33628 2657 bool new_pseudos_p, merged_p, mem_dest_p;
058e97ec
VM
2658 unsigned int n;
2659 enum reg_class cover_class;
2660 ira_allocno_t a, parent_a, first, second, node_first, node_second;
058e97ec 2661 ira_copy_t cp;
029da7d4 2662 ira_loop_tree_node_t node;
b14151b5 2663 live_range_t r;
058e97ec
VM
2664 ira_allocno_iterator ai;
2665 ira_copy_iterator ci;
058e97ec
VM
2666
2667 regno_top_level_allocno_map
2668 = (ira_allocno_t *) ira_allocate (max_reg_num () * sizeof (ira_allocno_t));
2669 memset (regno_top_level_allocno_map, 0,
2670 max_reg_num () * sizeof (ira_allocno_t));
058e97ec 2671 new_pseudos_p = merged_p = false;
0ca9fa56
VM
2672 FOR_EACH_ALLOCNO (a, ai)
2673 {
ac0ab4f7
BS
2674 ira_allocno_object_iterator oi;
2675 ira_object_t obj;
0ca9fa56
VM
2676 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2677 /* Caps are not in the regno allocno maps and they are never
2678 will be transformed into allocnos existing after IR
2679 flattening. */
2680 continue;
ac0ab4f7
BS
2681 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
2682 COPY_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj),
2683 OBJECT_CONFLICT_HARD_REGS (obj));
0ca9fa56
VM
2684#ifdef STACK_REGS
2685 ALLOCNO_TOTAL_NO_STACK_REG_P (a) = ALLOCNO_NO_STACK_REG_P (a);
2686#endif
2687 }
058e97ec
VM
2688 /* Fix final allocno attributes. */
2689 for (i = max_regno_before_emit - 1; i >= FIRST_PSEUDO_REGISTER; i--)
2690 {
0ca9fa56 2691 mem_dest_p = false;
058e97ec
VM
2692 for (a = ira_regno_allocno_map[i];
2693 a != NULL;
2694 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2695 {
2696 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
2697 if (ALLOCNO_SOMEWHERE_RENAMED_P (a))
2698 new_pseudos_p = true;
029da7d4
BS
2699 parent_a = ira_parent_allocno (a);
2700 if (parent_a == NULL)
058e97ec
VM
2701 {
2702 ALLOCNO_COPIES (a) = NULL;
2703 regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))] = a;
2704 continue;
2705 }
2706 ira_assert (ALLOCNO_CAP_MEMBER (parent_a) == NULL);
b8698a0f 2707
82b33628
VM
2708 if (ALLOCNO_MEM_OPTIMIZED_DEST (a) != NULL)
2709 mem_dest_p = true;
0ca9fa56 2710 if (REGNO (ALLOCNO_REG (a)) == REGNO (ALLOCNO_REG (parent_a)))
058e97ec 2711 {
3c55880a
BS
2712 merge_hard_reg_conflicts (a, parent_a, true);
2713 move_allocno_live_ranges (a, parent_a);
058e97ec 2714 merged_p = true;
058e97ec
VM
2715 ALLOCNO_MEM_OPTIMIZED_DEST_P (parent_a)
2716 = (ALLOCNO_MEM_OPTIMIZED_DEST_P (parent_a)
2717 || ALLOCNO_MEM_OPTIMIZED_DEST_P (a));
2718 continue;
2719 }
2720 new_pseudos_p = true;
058e97ec
VM
2721 for (;;)
2722 {
058e97ec
VM
2723 ALLOCNO_NREFS (parent_a) -= ALLOCNO_NREFS (a);
2724 ALLOCNO_FREQ (parent_a) -= ALLOCNO_FREQ (a);
ea1c67e6
VM
2725 ALLOCNO_CALL_FREQ (parent_a) -= ALLOCNO_CALL_FREQ (a);
2726 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
2727 -= ALLOCNO_CALLS_CROSSED_NUM (a);
2728 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
2729 -= ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
058e97ec
VM
2730 ira_assert (ALLOCNO_CALLS_CROSSED_NUM (parent_a) >= 0
2731 && ALLOCNO_NREFS (parent_a) >= 0
2732 && ALLOCNO_FREQ (parent_a) >= 0);
2733 cover_class = ALLOCNO_COVER_CLASS (parent_a);
2734 hard_regs_num = ira_class_hard_regs_num[cover_class];
2735 if (ALLOCNO_HARD_REG_COSTS (a) != NULL
2736 && ALLOCNO_HARD_REG_COSTS (parent_a) != NULL)
2737 for (j = 0; j < hard_regs_num; j++)
2738 ALLOCNO_HARD_REG_COSTS (parent_a)[j]
2739 -= ALLOCNO_HARD_REG_COSTS (a)[j];
2740 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) != NULL
2741 && ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a) != NULL)
2742 for (j = 0; j < hard_regs_num; j++)
2743 ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a)[j]
2744 -= ALLOCNO_CONFLICT_HARD_REG_COSTS (a)[j];
2745 ALLOCNO_COVER_CLASS_COST (parent_a)
2746 -= ALLOCNO_COVER_CLASS_COST (a);
2747 ALLOCNO_MEMORY_COST (parent_a) -= ALLOCNO_MEMORY_COST (a);
029da7d4
BS
2748 parent_a = ira_parent_allocno (parent_a);
2749 if (parent_a == NULL)
058e97ec
VM
2750 break;
2751 }
058e97ec
VM
2752 ALLOCNO_COPIES (a) = NULL;
2753 regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))] = a;
2754 }
801f03e3 2755 if (mem_dest_p && copy_info_to_removed_store_destinations (i))
82b33628 2756 merged_p = true;
058e97ec 2757 }
058e97ec
VM
2758 ira_assert (new_pseudos_p || ira_max_point_before_emit == ira_max_point);
2759 if (merged_p || ira_max_point_before_emit != ira_max_point)
2760 ira_rebuild_start_finish_chains ();
2761 if (new_pseudos_p)
2762 {
9140d27b
BS
2763 sparseset objects_live;
2764
058e97ec
VM
2765 /* Rebuild conflicts. */
2766 FOR_EACH_ALLOCNO (a, ai)
2767 {
ac0ab4f7
BS
2768 ira_allocno_object_iterator oi;
2769 ira_object_t obj;
058e97ec
VM
2770 if (a != regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))]
2771 || ALLOCNO_CAP_MEMBER (a) != NULL)
2772 continue;
ac0ab4f7
BS
2773 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
2774 {
2775 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
2776 ira_assert (r->object == obj);
2777 clear_conflicts (obj);
2778 }
058e97ec 2779 }
9140d27b 2780 objects_live = sparseset_alloc (ira_objects_num);
058e97ec
VM
2781 for (i = 0; i < ira_max_point; i++)
2782 {
2783 for (r = ira_start_point_ranges[i]; r != NULL; r = r->start_next)
2784 {
9140d27b
BS
2785 ira_object_t obj = r->object;
2786 a = OBJECT_ALLOCNO (obj);
058e97ec
VM
2787 if (a != regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))]
2788 || ALLOCNO_CAP_MEMBER (a) != NULL)
2789 continue;
ac0ab4f7 2790
058e97ec 2791 cover_class = ALLOCNO_COVER_CLASS (a);
9140d27b
BS
2792 sparseset_set_bit (objects_live, OBJECT_CONFLICT_ID (obj));
2793 EXECUTE_IF_SET_IN_SPARSESET (objects_live, n)
058e97ec 2794 {
9140d27b
BS
2795 ira_object_t live_obj = ira_object_id_map[n];
2796 ira_allocno_t live_a = OBJECT_ALLOCNO (live_obj);
2797 enum reg_class live_cover = ALLOCNO_COVER_CLASS (live_a);
9140d27b 2798 if (ira_reg_classes_intersect_p[cover_class][live_cover]
058e97ec 2799 /* Don't set up conflict for the allocno with itself. */
9140d27b
BS
2800 && live_a != a)
2801 ira_add_conflict (obj, live_obj);
058e97ec
VM
2802 }
2803 }
b8698a0f 2804
058e97ec 2805 for (r = ira_finish_point_ranges[i]; r != NULL; r = r->finish_next)
9140d27b 2806 sparseset_clear_bit (objects_live, OBJECT_CONFLICT_ID (r->object));
058e97ec 2807 }
9140d27b 2808 sparseset_free (objects_live);
058e97ec
VM
2809 compress_conflict_vecs ();
2810 }
2811 /* Mark some copies for removing and change allocnos in the rest
2812 copies. */
2813 FOR_EACH_COPY (cp, ci)
2814 {
2815 if (ALLOCNO_CAP_MEMBER (cp->first) != NULL
2816 || ALLOCNO_CAP_MEMBER (cp->second) != NULL)
2817 {
2818 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2819 fprintf
2820 (ira_dump_file, " Remove cp%d:%c%dr%d-%c%dr%d\n",
2821 cp->num, ALLOCNO_CAP_MEMBER (cp->first) != NULL ? 'c' : 'a',
2822 ALLOCNO_NUM (cp->first), REGNO (ALLOCNO_REG (cp->first)),
2823 ALLOCNO_CAP_MEMBER (cp->second) != NULL ? 'c' : 'a',
2824 ALLOCNO_NUM (cp->second), REGNO (ALLOCNO_REG (cp->second)));
2825 cp->loop_tree_node = NULL;
2826 continue;
2827 }
2828 first = regno_top_level_allocno_map[REGNO (ALLOCNO_REG (cp->first))];
2829 second = regno_top_level_allocno_map[REGNO (ALLOCNO_REG (cp->second))];
2830 node = cp->loop_tree_node;
2831 if (node == NULL)
2832 keep_p = true; /* It copy generated in ira-emit.c. */
2833 else
2834 {
2835 /* Check that the copy was not propagated from level on
2836 which we will have different pseudos. */
2837 node_first = node->regno_allocno_map[ALLOCNO_REGNO (cp->first)];
2838 node_second = node->regno_allocno_map[ALLOCNO_REGNO (cp->second)];
2839 keep_p = ((REGNO (ALLOCNO_REG (first))
2840 == REGNO (ALLOCNO_REG (node_first)))
2841 && (REGNO (ALLOCNO_REG (second))
2842 == REGNO (ALLOCNO_REG (node_second))));
2843 }
2844 if (keep_p)
2845 {
2846 cp->loop_tree_node = ira_loop_tree_root;
2847 cp->first = first;
2848 cp->second = second;
2849 }
2850 else
2851 {
2852 cp->loop_tree_node = NULL;
2853 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2854 fprintf (ira_dump_file, " Remove cp%d:a%dr%d-a%dr%d\n",
2855 cp->num, ALLOCNO_NUM (cp->first),
2856 REGNO (ALLOCNO_REG (cp->first)), ALLOCNO_NUM (cp->second),
2857 REGNO (ALLOCNO_REG (cp->second)));
2858 }
2859 }
2860 /* Remove unnecessary allocnos on lower levels of the loop tree. */
2861 FOR_EACH_ALLOCNO (a, ai)
2862 {
2863 if (a != regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))]
2864 || ALLOCNO_CAP_MEMBER (a) != NULL)
2865 {
2866 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2867 fprintf (ira_dump_file, " Remove a%dr%d\n",
2868 ALLOCNO_NUM (a), REGNO (ALLOCNO_REG (a)));
2869 finish_allocno (a);
2870 continue;
2871 }
2872 ALLOCNO_LOOP_TREE_NODE (a) = ira_loop_tree_root;
2873 ALLOCNO_REGNO (a) = REGNO (ALLOCNO_REG (a));
2874 ALLOCNO_CAP (a) = NULL;
cb1ca6ac 2875 /* Restore updated costs for assignments from reload. */
058e97ec 2876 ALLOCNO_UPDATED_MEMORY_COST (a) = ALLOCNO_MEMORY_COST (a);
cb1ca6ac 2877 ALLOCNO_UPDATED_COVER_CLASS_COST (a) = ALLOCNO_COVER_CLASS_COST (a);
058e97ec
VM
2878 if (! ALLOCNO_ASSIGNED_P (a))
2879 ira_free_allocno_updated_costs (a);
2880 ira_assert (ALLOCNO_UPDATED_HARD_REG_COSTS (a) == NULL);
2881 ira_assert (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) == NULL);
2882 }
2883 /* Remove unnecessary copies. */
2884 FOR_EACH_COPY (cp, ci)
2885 {
2886 if (cp->loop_tree_node == NULL)
2887 {
2888 ira_copies[cp->num] = NULL;
2889 finish_copy (cp);
2890 continue;
2891 }
2892 ira_assert
2893 (ALLOCNO_LOOP_TREE_NODE (cp->first) == ira_loop_tree_root
2894 && ALLOCNO_LOOP_TREE_NODE (cp->second) == ira_loop_tree_root);
2895 ira_add_allocno_copy_to_list (cp);
2896 ira_swap_allocno_copy_ends_if_necessary (cp);
2897 }
2898 rebuild_regno_allocno_maps ();
b15a7ae6
VM
2899 if (ira_max_point != ira_max_point_before_emit)
2900 ira_compress_allocno_live_ranges ();
058e97ec
VM
2901 ira_free (regno_top_level_allocno_map);
2902}
2903
2904\f
2905
2906#ifdef ENABLE_IRA_CHECKING
2907/* Check creation of all allocnos. Allocnos on lower levels should
2908 have allocnos or caps on all upper levels. */
2909static void
2910check_allocno_creation (void)
2911{
2912 ira_allocno_t a;
2913 ira_allocno_iterator ai;
2914 ira_loop_tree_node_t loop_tree_node;
2915
2916 FOR_EACH_ALLOCNO (a, ai)
2917 {
49d988e7
VM
2918 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
2919 ira_assert (bitmap_bit_p (loop_tree_node->all_allocnos,
2920 ALLOCNO_NUM (a)));
2921 if (loop_tree_node == ira_loop_tree_root)
058e97ec
VM
2922 continue;
2923 if (ALLOCNO_CAP_MEMBER (a) != NULL)
49d988e7 2924 ira_assert (ALLOCNO_CAP (a) != NULL);
058e97ec 2925 else if (ALLOCNO_CAP (a) == NULL)
49d988e7
VM
2926 ira_assert (loop_tree_node->parent
2927 ->regno_allocno_map[ALLOCNO_REGNO (a)] != NULL
2928 && bitmap_bit_p (loop_tree_node->border_allocnos,
2929 ALLOCNO_NUM (a)));
058e97ec
VM
2930 }
2931}
2932#endif
2933
4ac293e2
VM
2934/* Identify allocnos which prefer a register class with a single hard register.
2935 Adjust ALLOCNO_CONFLICT_HARD_REG_COSTS so that conflicting allocnos are
2936 less likely to use the preferred singleton register. */
2937static void
2938update_conflict_hard_reg_costs (void)
2939{
2940 ira_allocno_t a;
2941 ira_allocno_iterator ai;
2942 int i, index, min;
2943
2944 FOR_EACH_ALLOCNO (a, ai)
2945 {
2946 enum reg_class cover_class = ALLOCNO_COVER_CLASS (a);
2947 enum reg_class pref = reg_preferred_class (ALLOCNO_REGNO (a));
2948
2949 if (reg_class_size[pref] != 1)
2950 continue;
2951 index = (ira_class_hard_reg_index[cover_class]
2952 [ira_class_hard_regs[pref][0]]);
2953 if (index < 0)
2954 continue;
2955 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) == NULL
2956 || ALLOCNO_HARD_REG_COSTS (a) == NULL)
2957 continue;
2958 min = INT_MAX;
2959 for (i = ira_class_hard_regs_num[cover_class] - 1; i >= 0; i--)
2960 if (ALLOCNO_HARD_REG_COSTS (a)[i] > ALLOCNO_COVER_CLASS_COST (a)
2961 && min > ALLOCNO_HARD_REG_COSTS (a)[i])
2962 min = ALLOCNO_HARD_REG_COSTS (a)[i];
2963 if (min == INT_MAX)
2964 continue;
2965 ira_allocate_and_set_costs (&ALLOCNO_CONFLICT_HARD_REG_COSTS (a),
2966 cover_class, 0);
2967 ALLOCNO_CONFLICT_HARD_REG_COSTS (a)[index]
2968 -= min - ALLOCNO_COVER_CLASS_COST (a);
2969 }
2970}
2971
058e97ec
VM
2972/* Create a internal representation (IR) for IRA (allocnos, copies,
2973 loop tree nodes). If LOOPS_P is FALSE the nodes corresponding to
2974 the loops (except the root which corresponds the all function) and
2975 correspondingly allocnos for the loops will be not created. Such
2976 parameter value is used for Chaitin-Briggs coloring. The function
2977 returns TRUE if we generate loop structure (besides nodes
2978 representing all function and the basic blocks) for regional
2979 allocation. A true return means that we really need to flatten IR
2980 before the reload. */
2981bool
2982ira_build (bool loops_p)
2983{
2984 df_analyze ();
2985
2986 initiate_cost_vectors ();
2987 initiate_allocnos ();
2988 initiate_copies ();
2989 create_loop_tree_nodes (loops_p);
2990 form_loop_tree ();
2991 create_allocnos ();
2992 ira_costs ();
a49ae217 2993 create_allocno_objects ();
058e97ec 2994 ira_create_allocno_live_ranges ();
311aab06 2995 remove_unnecessary_regions (false);
b15a7ae6 2996 ira_compress_allocno_live_ranges ();
927425df 2997 update_bad_spill_attribute ();
058e97ec
VM
2998 loops_p = more_one_region_p ();
2999 if (loops_p)
3000 {
3001 propagate_allocno_info ();
3002 create_caps ();
3003 }
3004 ira_tune_allocno_costs_and_cover_classes ();
3005#ifdef ENABLE_IRA_CHECKING
3006 check_allocno_creation ();
3007#endif
3008 setup_min_max_allocno_live_range_point ();
a49ae217 3009 sort_conflict_id_map ();
058e97ec
VM
3010 setup_min_max_conflict_allocno_ids ();
3011 ira_build_conflicts ();
4ac293e2 3012 update_conflict_hard_reg_costs ();
311aab06
VM
3013 if (! ira_conflicts_p)
3014 {
3015 ira_allocno_t a;
3016 ira_allocno_iterator ai;
3017
3018 /* Remove all regions but root one. */
3019 if (loops_p)
3020 {
3021 remove_unnecessary_regions (true);
3022 loops_p = false;
3023 }
3024 /* We don't save hard registers around calls for fast allocation
3025 -- add caller clobbered registers as conflicting ones to
3026 allocno crossing calls. */
3027 FOR_EACH_ALLOCNO (a, ai)
3028 if (ALLOCNO_CALLS_CROSSED_NUM (a) != 0)
ac0ab4f7 3029 ior_hard_reg_conflicts (a, &call_used_reg_set);
311aab06 3030 }
4cda38d5
VM
3031 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
3032 print_copies (ira_dump_file);
058e97ec
VM
3033 if (internal_flag_ira_verbose > 0 && ira_dump_file != NULL)
3034 {
ac0ab4f7 3035 int n, nr, nr_big;
058e97ec 3036 ira_allocno_t a;
b14151b5 3037 live_range_t r;
058e97ec
VM
3038 ira_allocno_iterator ai;
3039
3040 n = 0;
ac0ab4f7
BS
3041 nr = 0;
3042 nr_big = 0;
058e97ec 3043 FOR_EACH_ALLOCNO (a, ai)
a49ae217 3044 {
ac0ab4f7
BS
3045 int j, nobj = ALLOCNO_NUM_OBJECTS (a);
3046 if (nobj > 1)
3047 nr_big++;
3048 for (j = 0; j < nobj; j++)
3049 {
3050 ira_object_t obj = ALLOCNO_OBJECT (a, j);
3051 n += OBJECT_NUM_CONFLICTS (obj);
3052 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
3053 nr++;
3054 }
a49ae217 3055 }
058e97ec
VM
3056 fprintf (ira_dump_file, " regions=%d, blocks=%d, points=%d\n",
3057 VEC_length (loop_p, ira_loops.larray), n_basic_blocks,
3058 ira_max_point);
3059 fprintf (ira_dump_file,
ac0ab4f7
BS
3060 " allocnos=%d (big %d), copies=%d, conflicts=%d, ranges=%d\n",
3061 ira_allocnos_num, nr_big, ira_copies_num, n, nr);
058e97ec
VM
3062 }
3063 return loops_p;
3064}
3065
3066/* Release the data created by function ira_build. */
3067void
3068ira_destroy (void)
3069{
3070 finish_loop_tree_nodes ();
3071 finish_copies ();
3072 finish_allocnos ();
3073 finish_cost_vectors ();
3074 ira_finish_allocno_live_ranges ();
3075}
This page took 1.111119 seconds and 5 git commands to generate.