]> gcc.gnu.org Git - gcc.git/blob - gcc/cfgexpand.c
re PR rtl-optimization/28121 (verify_flow_info failed (wrong amount of branch edges...
[gcc.git] / gcc / cfgexpand.c
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "timevar.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "except.h"
37 #include "flags.h"
38 #include "diagnostic.h"
39 #include "toplev.h"
40 #include "debug.h"
41 #include "params.h"
42
43 /* Verify that there is exactly single jump instruction since last and attach
44 REG_BR_PROB note specifying probability.
45 ??? We really ought to pass the probability down to RTL expanders and let it
46 re-distribute it when the conditional expands into multiple conditionals.
47 This is however difficult to do. */
48 static void
49 add_reg_br_prob_note (rtx last, int probability)
50 {
51 if (profile_status == PROFILE_ABSENT)
52 return;
53 for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last))
54 if (JUMP_P (last))
55 {
56 /* It is common to emit condjump-around-jump sequence when we don't know
57 how to reverse the conditional. Special case this. */
58 if (!any_condjump_p (last)
59 || !JUMP_P (NEXT_INSN (last))
60 || !simplejump_p (NEXT_INSN (last))
61 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
62 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
63 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
64 goto failed;
65 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
66 REG_NOTES (last)
67 = gen_rtx_EXPR_LIST (REG_BR_PROB,
68 GEN_INT (REG_BR_PROB_BASE - probability),
69 REG_NOTES (last));
70 return;
71 }
72 if (!last || !JUMP_P (last) || !any_condjump_p (last))
73 goto failed;
74 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
75 REG_NOTES (last)
76 = gen_rtx_EXPR_LIST (REG_BR_PROB,
77 GEN_INT (probability), REG_NOTES (last));
78 return;
79 failed:
80 if (dump_file)
81 fprintf (dump_file, "Failed to add probability note\n");
82 }
83
84
85 #ifndef LOCAL_ALIGNMENT
86 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
87 #endif
88
89 #ifndef STACK_ALIGNMENT_NEEDED
90 #define STACK_ALIGNMENT_NEEDED 1
91 #endif
92
93
94 /* This structure holds data relevant to one variable that will be
95 placed in a stack slot. */
96 struct stack_var
97 {
98 /* The Variable. */
99 tree decl;
100
101 /* The offset of the variable. During partitioning, this is the
102 offset relative to the partition. After partitioning, this
103 is relative to the stack frame. */
104 HOST_WIDE_INT offset;
105
106 /* Initially, the size of the variable. Later, the size of the partition,
107 if this variable becomes it's partition's representative. */
108 HOST_WIDE_INT size;
109
110 /* The *byte* alignment required for this variable. Or as, with the
111 size, the alignment for this partition. */
112 unsigned int alignb;
113
114 /* The partition representative. */
115 size_t representative;
116
117 /* The next stack variable in the partition, or EOC. */
118 size_t next;
119 };
120
121 #define EOC ((size_t)-1)
122
123 /* We have an array of such objects while deciding allocation. */
124 static struct stack_var *stack_vars;
125 static size_t stack_vars_alloc;
126 static size_t stack_vars_num;
127
128 /* An array of indicies such that stack_vars[stack_vars_sorted[i]].size
129 is non-decreasing. */
130 static size_t *stack_vars_sorted;
131
132 /* We have an interference graph between such objects. This graph
133 is lower triangular. */
134 static bool *stack_vars_conflict;
135 static size_t stack_vars_conflict_alloc;
136
137 /* The phase of the stack frame. This is the known misalignment of
138 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
139 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
140 static int frame_phase;
141
142 /* Used during expand_used_vars to remember if we saw any decls for
143 which we'd like to enable stack smashing protection. */
144 static bool has_protected_decls;
145
146 /* Used during expand_used_vars. Remember if we say a character buffer
147 smaller than our cutoff threshold. Used for -Wstack-protector. */
148 static bool has_short_buffer;
149
150 /* Discover the byte alignment to use for DECL. Ignore alignment
151 we can't do with expected alignment of the stack boundary. */
152
153 static unsigned int
154 get_decl_align_unit (tree decl)
155 {
156 unsigned int align;
157
158 align = DECL_ALIGN (decl);
159 align = LOCAL_ALIGNMENT (TREE_TYPE (decl), align);
160 if (align > PREFERRED_STACK_BOUNDARY)
161 align = PREFERRED_STACK_BOUNDARY;
162 if (cfun->stack_alignment_needed < align)
163 cfun->stack_alignment_needed = align;
164
165 return align / BITS_PER_UNIT;
166 }
167
168 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
169 Return the frame offset. */
170
171 static HOST_WIDE_INT
172 alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
173 {
174 HOST_WIDE_INT offset, new_frame_offset;
175
176 new_frame_offset = frame_offset;
177 if (FRAME_GROWS_DOWNWARD)
178 {
179 new_frame_offset -= size + frame_phase;
180 new_frame_offset &= -align;
181 new_frame_offset += frame_phase;
182 offset = new_frame_offset;
183 }
184 else
185 {
186 new_frame_offset -= frame_phase;
187 new_frame_offset += align - 1;
188 new_frame_offset &= -align;
189 new_frame_offset += frame_phase;
190 offset = new_frame_offset;
191 new_frame_offset += size;
192 }
193 frame_offset = new_frame_offset;
194
195 if (frame_offset_overflow (frame_offset, cfun->decl))
196 frame_offset = offset = 0;
197
198 return offset;
199 }
200
201 /* Accumulate DECL into STACK_VARS. */
202
203 static void
204 add_stack_var (tree decl)
205 {
206 if (stack_vars_num >= stack_vars_alloc)
207 {
208 if (stack_vars_alloc)
209 stack_vars_alloc = stack_vars_alloc * 3 / 2;
210 else
211 stack_vars_alloc = 32;
212 stack_vars
213 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
214 }
215 stack_vars[stack_vars_num].decl = decl;
216 stack_vars[stack_vars_num].offset = 0;
217 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
218 stack_vars[stack_vars_num].alignb = get_decl_align_unit (decl);
219
220 /* All variables are initially in their own partition. */
221 stack_vars[stack_vars_num].representative = stack_vars_num;
222 stack_vars[stack_vars_num].next = EOC;
223
224 /* Ensure that this decl doesn't get put onto the list twice. */
225 SET_DECL_RTL (decl, pc_rtx);
226
227 stack_vars_num++;
228 }
229
230 /* Compute the linear index of a lower-triangular coordinate (I, J). */
231
232 static size_t
233 triangular_index (size_t i, size_t j)
234 {
235 if (i < j)
236 {
237 size_t t;
238 t = i, i = j, j = t;
239 }
240 return (i * (i + 1)) / 2 + j;
241 }
242
243 /* Ensure that STACK_VARS_CONFLICT is large enough for N objects. */
244
245 static void
246 resize_stack_vars_conflict (size_t n)
247 {
248 size_t size = triangular_index (n-1, n-1) + 1;
249
250 if (size <= stack_vars_conflict_alloc)
251 return;
252
253 stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size);
254 memset (stack_vars_conflict + stack_vars_conflict_alloc, 0,
255 (size - stack_vars_conflict_alloc) * sizeof (bool));
256 stack_vars_conflict_alloc = size;
257 }
258
259 /* Make the decls associated with luid's X and Y conflict. */
260
261 static void
262 add_stack_var_conflict (size_t x, size_t y)
263 {
264 size_t index = triangular_index (x, y);
265 gcc_assert (index < stack_vars_conflict_alloc);
266 stack_vars_conflict[index] = true;
267 }
268
269 /* Check whether the decls associated with luid's X and Y conflict. */
270
271 static bool
272 stack_var_conflict_p (size_t x, size_t y)
273 {
274 size_t index = triangular_index (x, y);
275 gcc_assert (index < stack_vars_conflict_alloc);
276 return stack_vars_conflict[index];
277 }
278
279 /* Returns true if TYPE is or contains a union type. */
280
281 static bool
282 aggregate_contains_union_type (tree type)
283 {
284 tree field;
285
286 if (TREE_CODE (type) == UNION_TYPE
287 || TREE_CODE (type) == QUAL_UNION_TYPE)
288 return true;
289 if (TREE_CODE (type) == ARRAY_TYPE)
290 return aggregate_contains_union_type (TREE_TYPE (type));
291 if (TREE_CODE (type) != RECORD_TYPE)
292 return false;
293
294 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
295 if (TREE_CODE (field) == FIELD_DECL)
296 if (aggregate_contains_union_type (TREE_TYPE (field)))
297 return true;
298
299 return false;
300 }
301
302 /* A subroutine of expand_used_vars. If two variables X and Y have alias
303 sets that do not conflict, then do add a conflict for these variables
304 in the interference graph. We also need to make sure to add conflicts
305 for union containing structures. Else RTL alias analysis comes along
306 and due to type based aliasing rules decides that for two overlapping
307 union temporaries { short s; int i; } accesses to the same mem through
308 different types may not alias and happily reorders stores across
309 life-time boundaries of the temporaries (See PR25654).
310 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
311
312 static void
313 add_alias_set_conflicts (void)
314 {
315 size_t i, j, n = stack_vars_num;
316
317 for (i = 0; i < n; ++i)
318 {
319 tree type_i = TREE_TYPE (stack_vars[i].decl);
320 bool aggr_i = AGGREGATE_TYPE_P (type_i);
321 bool contains_union;
322
323 contains_union = aggregate_contains_union_type (type_i);
324 for (j = 0; j < i; ++j)
325 {
326 tree type_j = TREE_TYPE (stack_vars[j].decl);
327 bool aggr_j = AGGREGATE_TYPE_P (type_j);
328 if (aggr_i != aggr_j
329 /* Either the objects conflict by means of type based
330 aliasing rules, or we need to add a conflict. */
331 || !objects_must_conflict_p (type_i, type_j)
332 /* In case the types do not conflict ensure that access
333 to elements will conflict. In case of unions we have
334 to be careful as type based aliasing rules may say
335 access to the same memory does not conflict. So play
336 safe and add a conflict in this case. */
337 || contains_union)
338 add_stack_var_conflict (i, j);
339 }
340 }
341 }
342
343 /* A subroutine of partition_stack_vars. A comparison function for qsort,
344 sorting an array of indicies by the size of the object. */
345
346 static int
347 stack_var_size_cmp (const void *a, const void *b)
348 {
349 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
350 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
351
352 if (sa < sb)
353 return -1;
354 if (sa > sb)
355 return 1;
356 return 0;
357 }
358
359 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
360 partitioning algorithm. Partitions A and B are known to be non-conflicting.
361 Merge them into a single partition A.
362
363 At the same time, add OFFSET to all variables in partition B. At the end
364 of the partitioning process we've have a nice block easy to lay out within
365 the stack frame. */
366
367 static void
368 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
369 {
370 size_t i, last;
371
372 /* Update each element of partition B with the given offset,
373 and merge them into partition A. */
374 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
375 {
376 stack_vars[i].offset += offset;
377 stack_vars[i].representative = a;
378 }
379 stack_vars[last].next = stack_vars[a].next;
380 stack_vars[a].next = b;
381
382 /* Update the required alignment of partition A to account for B. */
383 if (stack_vars[a].alignb < stack_vars[b].alignb)
384 stack_vars[a].alignb = stack_vars[b].alignb;
385
386 /* Update the interference graph and merge the conflicts. */
387 for (last = stack_vars_num, i = 0; i < last; ++i)
388 if (stack_var_conflict_p (b, i))
389 add_stack_var_conflict (a, i);
390 }
391
392 /* A subroutine of expand_used_vars. Binpack the variables into
393 partitions constrained by the interference graph. The overall
394 algorithm used is as follows:
395
396 Sort the objects by size.
397 For each object A {
398 S = size(A)
399 O = 0
400 loop {
401 Look for the largest non-conflicting object B with size <= S.
402 UNION (A, B)
403 offset(B) = O
404 O += size(B)
405 S -= size(B)
406 }
407 }
408 */
409
410 static void
411 partition_stack_vars (void)
412 {
413 size_t si, sj, n = stack_vars_num;
414
415 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
416 for (si = 0; si < n; ++si)
417 stack_vars_sorted[si] = si;
418
419 if (n == 1)
420 return;
421
422 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
423
424 /* Special case: detect when all variables conflict, and thus we can't
425 do anything during the partitioning loop. It isn't uncommon (with
426 C code at least) to declare all variables at the top of the function,
427 and if we're not inlining, then all variables will be in the same scope.
428 Take advantage of very fast libc routines for this scan. */
429 gcc_assert (sizeof(bool) == sizeof(char));
430 if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL)
431 return;
432
433 for (si = 0; si < n; ++si)
434 {
435 size_t i = stack_vars_sorted[si];
436 HOST_WIDE_INT isize = stack_vars[i].size;
437 HOST_WIDE_INT offset = 0;
438
439 for (sj = si; sj-- > 0; )
440 {
441 size_t j = stack_vars_sorted[sj];
442 HOST_WIDE_INT jsize = stack_vars[j].size;
443 unsigned int jalign = stack_vars[j].alignb;
444
445 /* Ignore objects that aren't partition representatives. */
446 if (stack_vars[j].representative != j)
447 continue;
448
449 /* Ignore objects too large for the remaining space. */
450 if (isize < jsize)
451 continue;
452
453 /* Ignore conflicting objects. */
454 if (stack_var_conflict_p (i, j))
455 continue;
456
457 /* Refine the remaining space check to include alignment. */
458 if (offset & (jalign - 1))
459 {
460 HOST_WIDE_INT toff = offset;
461 toff += jalign - 1;
462 toff &= -(HOST_WIDE_INT)jalign;
463 if (isize - (toff - offset) < jsize)
464 continue;
465
466 isize -= toff - offset;
467 offset = toff;
468 }
469
470 /* UNION the objects, placing J at OFFSET. */
471 union_stack_vars (i, j, offset);
472
473 isize -= jsize;
474 if (isize == 0)
475 break;
476 }
477 }
478 }
479
480 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
481
482 static void
483 dump_stack_var_partition (void)
484 {
485 size_t si, i, j, n = stack_vars_num;
486
487 for (si = 0; si < n; ++si)
488 {
489 i = stack_vars_sorted[si];
490
491 /* Skip variables that aren't partition representatives, for now. */
492 if (stack_vars[i].representative != i)
493 continue;
494
495 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
496 " align %u\n", (unsigned long) i, stack_vars[i].size,
497 stack_vars[i].alignb);
498
499 for (j = i; j != EOC; j = stack_vars[j].next)
500 {
501 fputc ('\t', dump_file);
502 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
503 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
504 stack_vars[i].offset);
505 }
506 }
507 }
508
509 /* Assign rtl to DECL at frame offset OFFSET. */
510
511 static void
512 expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
513 {
514 HOST_WIDE_INT align;
515 rtx x;
516
517 /* If this fails, we've overflowed the stack frame. Error nicely? */
518 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
519
520 x = plus_constant (virtual_stack_vars_rtx, offset);
521 x = gen_rtx_MEM (DECL_MODE (decl), x);
522
523 /* Set alignment we actually gave this decl. */
524 offset -= frame_phase;
525 align = offset & -offset;
526 align *= BITS_PER_UNIT;
527 if (align > STACK_BOUNDARY || align == 0)
528 align = STACK_BOUNDARY;
529 DECL_ALIGN (decl) = align;
530 DECL_USER_ALIGN (decl) = 0;
531
532 set_mem_attributes (x, decl, true);
533 SET_DECL_RTL (decl, x);
534 }
535
536 /* A subroutine of expand_used_vars. Give each partition representative
537 a unique location within the stack frame. Update each partition member
538 with that location. */
539
540 static void
541 expand_stack_vars (bool (*pred) (tree))
542 {
543 size_t si, i, j, n = stack_vars_num;
544
545 for (si = 0; si < n; ++si)
546 {
547 HOST_WIDE_INT offset;
548
549 i = stack_vars_sorted[si];
550
551 /* Skip variables that aren't partition representatives, for now. */
552 if (stack_vars[i].representative != i)
553 continue;
554
555 /* Skip variables that have already had rtl assigned. See also
556 add_stack_var where we perpetrate this pc_rtx hack. */
557 if (DECL_RTL (stack_vars[i].decl) != pc_rtx)
558 continue;
559
560 /* Check the predicate to see whether this variable should be
561 allocated in this pass. */
562 if (pred && !pred (stack_vars[i].decl))
563 continue;
564
565 offset = alloc_stack_frame_space (stack_vars[i].size,
566 stack_vars[i].alignb);
567
568 /* Create rtl for each variable based on their location within the
569 partition. */
570 for (j = i; j != EOC; j = stack_vars[j].next)
571 expand_one_stack_var_at (stack_vars[j].decl,
572 stack_vars[j].offset + offset);
573 }
574 }
575
576 /* A subroutine of expand_one_var. Called to immediately assign rtl
577 to a variable to be allocated in the stack frame. */
578
579 static void
580 expand_one_stack_var (tree var)
581 {
582 HOST_WIDE_INT size, offset, align;
583
584 size = tree_low_cst (DECL_SIZE_UNIT (var), 1);
585 align = get_decl_align_unit (var);
586 offset = alloc_stack_frame_space (size, align);
587
588 expand_one_stack_var_at (var, offset);
589 }
590
591 /* A subroutine of expand_one_var. Called to assign rtl
592 to a TREE_STATIC VAR_DECL. */
593
594 static void
595 expand_one_static_var (tree var)
596 {
597 /* In unit-at-a-time all the static variables are expanded at the end
598 of compilation process. */
599 if (flag_unit_at_a_time)
600 return;
601 /* If this is an inlined copy of a static local variable,
602 look up the original. */
603 var = DECL_ORIGIN (var);
604
605 /* If we've already processed this variable because of that, do nothing. */
606 if (TREE_ASM_WRITTEN (var))
607 return;
608
609 /* Give the front end a chance to do whatever. In practice, this is
610 resolving duplicate names for IMA in C. */
611 if (lang_hooks.expand_decl (var))
612 return;
613
614 /* Otherwise, just emit the variable. */
615 rest_of_decl_compilation (var, 0, 0);
616 }
617
618 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
619 that will reside in a hard register. */
620
621 static void
622 expand_one_hard_reg_var (tree var)
623 {
624 rest_of_decl_compilation (var, 0, 0);
625 }
626
627 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
628 that will reside in a pseudo register. */
629
630 static void
631 expand_one_register_var (tree var)
632 {
633 tree type = TREE_TYPE (var);
634 int unsignedp = TYPE_UNSIGNED (type);
635 enum machine_mode reg_mode
636 = promote_mode (type, DECL_MODE (var), &unsignedp, 0);
637 rtx x = gen_reg_rtx (reg_mode);
638
639 SET_DECL_RTL (var, x);
640
641 /* Note if the object is a user variable. */
642 if (!DECL_ARTIFICIAL (var))
643 {
644 mark_user_reg (x);
645
646 /* Trust user variables which have a pointer type to really
647 be pointers. Do not trust compiler generated temporaries
648 as our type system is totally busted as it relates to
649 pointer arithmetic which translates into lots of compiler
650 generated objects with pointer types, but which are not really
651 pointers. */
652 if (POINTER_TYPE_P (type))
653 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (var))));
654 }
655 }
656
657 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
658 has some associated error, e.g. its type is error-mark. We just need
659 to pick something that won't crash the rest of the compiler. */
660
661 static void
662 expand_one_error_var (tree var)
663 {
664 enum machine_mode mode = DECL_MODE (var);
665 rtx x;
666
667 if (mode == BLKmode)
668 x = gen_rtx_MEM (BLKmode, const0_rtx);
669 else if (mode == VOIDmode)
670 x = const0_rtx;
671 else
672 x = gen_reg_rtx (mode);
673
674 SET_DECL_RTL (var, x);
675 }
676
677 /* A subroutine of expand_one_var. VAR is a variable that will be
678 allocated to the local stack frame. Return true if we wish to
679 add VAR to STACK_VARS so that it will be coalesced with other
680 variables. Return false to allocate VAR immediately.
681
682 This function is used to reduce the number of variables considered
683 for coalescing, which reduces the size of the quadratic problem. */
684
685 static bool
686 defer_stack_allocation (tree var, bool toplevel)
687 {
688 /* If stack protection is enabled, *all* stack variables must be deferred,
689 so that we can re-order the strings to the top of the frame. */
690 if (flag_stack_protect)
691 return true;
692
693 /* Variables in the outermost scope automatically conflict with
694 every other variable. The only reason to want to defer them
695 at all is that, after sorting, we can more efficiently pack
696 small variables in the stack frame. Continue to defer at -O2. */
697 if (toplevel && optimize < 2)
698 return false;
699
700 /* Without optimization, *most* variables are allocated from the
701 stack, which makes the quadratic problem large exactly when we
702 want compilation to proceed as quickly as possible. On the
703 other hand, we don't want the function's stack frame size to
704 get completely out of hand. So we avoid adding scalars and
705 "small" aggregates to the list at all. */
706 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
707 return false;
708
709 return true;
710 }
711
712 /* A subroutine of expand_used_vars. Expand one variable according to
713 its flavor. Variables to be placed on the stack are not actually
714 expanded yet, merely recorded. */
715
716 static void
717 expand_one_var (tree var, bool toplevel)
718 {
719 if (TREE_CODE (var) != VAR_DECL)
720 lang_hooks.expand_decl (var);
721 else if (DECL_EXTERNAL (var))
722 ;
723 else if (DECL_HAS_VALUE_EXPR_P (var))
724 ;
725 else if (TREE_STATIC (var))
726 expand_one_static_var (var);
727 else if (DECL_RTL_SET_P (var))
728 ;
729 else if (TREE_TYPE (var) == error_mark_node)
730 expand_one_error_var (var);
731 else if (DECL_HARD_REGISTER (var))
732 expand_one_hard_reg_var (var);
733 else if (use_register_for_decl (var))
734 expand_one_register_var (var);
735 else if (defer_stack_allocation (var, toplevel))
736 add_stack_var (var);
737 else
738 expand_one_stack_var (var);
739 }
740
741 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
742 expanding variables. Those variables that can be put into registers
743 are allocated pseudos; those that can't are put on the stack.
744
745 TOPLEVEL is true if this is the outermost BLOCK. */
746
747 static void
748 expand_used_vars_for_block (tree block, bool toplevel)
749 {
750 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
751 tree t;
752
753 old_sv_num = toplevel ? 0 : stack_vars_num;
754
755 /* Expand all variables at this level. */
756 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
757 if (TREE_USED (t))
758 expand_one_var (t, toplevel);
759
760 this_sv_num = stack_vars_num;
761
762 /* Expand all variables at containing levels. */
763 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
764 expand_used_vars_for_block (t, false);
765
766 /* Since we do not track exact variable lifetimes (which is not even
767 possible for variables whose address escapes), we mirror the block
768 tree in the interference graph. Here we cause all variables at this
769 level, and all sublevels, to conflict. Do make certain that a
770 variable conflicts with itself. */
771 if (old_sv_num < this_sv_num)
772 {
773 new_sv_num = stack_vars_num;
774 resize_stack_vars_conflict (new_sv_num);
775
776 for (i = old_sv_num; i < new_sv_num; ++i)
777 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
778 add_stack_var_conflict (i, j);
779 }
780 }
781
782 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
783 and clear TREE_USED on all local variables. */
784
785 static void
786 clear_tree_used (tree block)
787 {
788 tree t;
789
790 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
791 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
792 TREE_USED (t) = 0;
793
794 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
795 clear_tree_used (t);
796 }
797
798 /* Examine TYPE and determine a bit mask of the following features. */
799
800 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
801 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
802 #define SPCT_HAS_ARRAY 4
803 #define SPCT_HAS_AGGREGATE 8
804
805 static unsigned int
806 stack_protect_classify_type (tree type)
807 {
808 unsigned int ret = 0;
809 tree t;
810
811 switch (TREE_CODE (type))
812 {
813 case ARRAY_TYPE:
814 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
815 if (t == char_type_node
816 || t == signed_char_type_node
817 || t == unsigned_char_type_node)
818 {
819 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
820 unsigned HOST_WIDE_INT len;
821
822 if (!TYPE_SIZE_UNIT (type)
823 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
824 len = max;
825 else
826 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
827
828 if (len < max)
829 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
830 else
831 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
832 }
833 else
834 ret = SPCT_HAS_ARRAY;
835 break;
836
837 case UNION_TYPE:
838 case QUAL_UNION_TYPE:
839 case RECORD_TYPE:
840 ret = SPCT_HAS_AGGREGATE;
841 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
842 if (TREE_CODE (t) == FIELD_DECL)
843 ret |= stack_protect_classify_type (TREE_TYPE (t));
844 break;
845
846 default:
847 break;
848 }
849
850 return ret;
851 }
852
853 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
854 part of the local stack frame. Remember if we ever return nonzero for
855 any variable in this function. The return value is the phase number in
856 which the variable should be allocated. */
857
858 static int
859 stack_protect_decl_phase (tree decl)
860 {
861 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
862 int ret = 0;
863
864 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
865 has_short_buffer = true;
866
867 if (flag_stack_protect == 2)
868 {
869 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
870 && !(bits & SPCT_HAS_AGGREGATE))
871 ret = 1;
872 else if (bits & SPCT_HAS_ARRAY)
873 ret = 2;
874 }
875 else
876 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
877
878 if (ret)
879 has_protected_decls = true;
880
881 return ret;
882 }
883
884 /* Two helper routines that check for phase 1 and phase 2. These are used
885 as callbacks for expand_stack_vars. */
886
887 static bool
888 stack_protect_decl_phase_1 (tree decl)
889 {
890 return stack_protect_decl_phase (decl) == 1;
891 }
892
893 static bool
894 stack_protect_decl_phase_2 (tree decl)
895 {
896 return stack_protect_decl_phase (decl) == 2;
897 }
898
899 /* Ensure that variables in different stack protection phases conflict
900 so that they are not merged and share the same stack slot. */
901
902 static void
903 add_stack_protection_conflicts (void)
904 {
905 size_t i, j, n = stack_vars_num;
906 unsigned char *phase;
907
908 phase = XNEWVEC (unsigned char, n);
909 for (i = 0; i < n; ++i)
910 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
911
912 for (i = 0; i < n; ++i)
913 {
914 unsigned char ph_i = phase[i];
915 for (j = 0; j < i; ++j)
916 if (ph_i != phase[j])
917 add_stack_var_conflict (i, j);
918 }
919
920 XDELETEVEC (phase);
921 }
922
923 /* Create a decl for the guard at the top of the stack frame. */
924
925 static void
926 create_stack_guard (void)
927 {
928 tree guard = build_decl (VAR_DECL, NULL, ptr_type_node);
929 TREE_THIS_VOLATILE (guard) = 1;
930 TREE_USED (guard) = 1;
931 expand_one_stack_var (guard);
932 cfun->stack_protect_guard = guard;
933 }
934
935 /* Expand all variables used in the function. */
936
937 static void
938 expand_used_vars (void)
939 {
940 tree t, outer_block = DECL_INITIAL (current_function_decl);
941
942 /* Compute the phase of the stack frame for this function. */
943 {
944 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
945 int off = STARTING_FRAME_OFFSET % align;
946 frame_phase = off ? align - off : 0;
947 }
948
949 /* Set TREE_USED on all variables in the unexpanded_var_list. */
950 for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
951 TREE_USED (TREE_VALUE (t)) = 1;
952
953 /* Clear TREE_USED on all variables associated with a block scope. */
954 clear_tree_used (outer_block);
955
956 /* Initialize local stack smashing state. */
957 has_protected_decls = false;
958 has_short_buffer = false;
959
960 /* At this point all variables on the unexpanded_var_list with TREE_USED
961 set are not associated with any block scope. Lay them out. */
962 for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
963 {
964 tree var = TREE_VALUE (t);
965 bool expand_now = false;
966
967 /* We didn't set a block for static or extern because it's hard
968 to tell the difference between a global variable (re)declared
969 in a local scope, and one that's really declared there to
970 begin with. And it doesn't really matter much, since we're
971 not giving them stack space. Expand them now. */
972 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
973 expand_now = true;
974
975 /* Any variable that could have been hoisted into an SSA_NAME
976 will have been propagated anywhere the optimizers chose,
977 i.e. not confined to their original block. Allocate them
978 as if they were defined in the outermost scope. */
979 else if (is_gimple_reg (var))
980 expand_now = true;
981
982 /* If the variable is not associated with any block, then it
983 was created by the optimizers, and could be live anywhere
984 in the function. */
985 else if (TREE_USED (var))
986 expand_now = true;
987
988 /* Finally, mark all variables on the list as used. We'll use
989 this in a moment when we expand those associated with scopes. */
990 TREE_USED (var) = 1;
991
992 if (expand_now)
993 expand_one_var (var, true);
994 }
995 cfun->unexpanded_var_list = NULL_TREE;
996
997 /* At this point, all variables within the block tree with TREE_USED
998 set are actually used by the optimized function. Lay them out. */
999 expand_used_vars_for_block (outer_block, true);
1000
1001 if (stack_vars_num > 0)
1002 {
1003 /* Due to the way alias sets work, no variables with non-conflicting
1004 alias sets may be assigned the same address. Add conflicts to
1005 reflect this. */
1006 add_alias_set_conflicts ();
1007
1008 /* If stack protection is enabled, we don't share space between
1009 vulnerable data and non-vulnerable data. */
1010 if (flag_stack_protect)
1011 add_stack_protection_conflicts ();
1012
1013 /* Now that we have collected all stack variables, and have computed a
1014 minimal interference graph, attempt to save some stack space. */
1015 partition_stack_vars ();
1016 if (dump_file)
1017 dump_stack_var_partition ();
1018 }
1019
1020 /* There are several conditions under which we should create a
1021 stack guard: protect-all, alloca used, protected decls present. */
1022 if (flag_stack_protect == 2
1023 || (flag_stack_protect
1024 && (current_function_calls_alloca || has_protected_decls)))
1025 create_stack_guard ();
1026
1027 /* Assign rtl to each variable based on these partitions. */
1028 if (stack_vars_num > 0)
1029 {
1030 /* Reorder decls to be protected by iterating over the variables
1031 array multiple times, and allocating out of each phase in turn. */
1032 /* ??? We could probably integrate this into the qsort we did
1033 earlier, such that we naturally see these variables first,
1034 and thus naturally allocate things in the right order. */
1035 if (has_protected_decls)
1036 {
1037 /* Phase 1 contains only character arrays. */
1038 expand_stack_vars (stack_protect_decl_phase_1);
1039
1040 /* Phase 2 contains other kinds of arrays. */
1041 if (flag_stack_protect == 2)
1042 expand_stack_vars (stack_protect_decl_phase_2);
1043 }
1044
1045 expand_stack_vars (NULL);
1046
1047 /* Free up stack variable graph data. */
1048 XDELETEVEC (stack_vars);
1049 XDELETEVEC (stack_vars_sorted);
1050 XDELETEVEC (stack_vars_conflict);
1051 stack_vars = NULL;
1052 stack_vars_alloc = stack_vars_num = 0;
1053 stack_vars_conflict = NULL;
1054 stack_vars_conflict_alloc = 0;
1055 }
1056
1057 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1058 if (STACK_ALIGNMENT_NEEDED)
1059 {
1060 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1061 if (!FRAME_GROWS_DOWNWARD)
1062 frame_offset += align - 1;
1063 frame_offset &= -align;
1064 }
1065 }
1066
1067
1068 /* If we need to produce a detailed dump, print the tree representation
1069 for STMT to the dump file. SINCE is the last RTX after which the RTL
1070 generated for STMT should have been appended. */
1071
1072 static void
1073 maybe_dump_rtl_for_tree_stmt (tree stmt, rtx since)
1074 {
1075 if (dump_file && (dump_flags & TDF_DETAILS))
1076 {
1077 fprintf (dump_file, "\n;; ");
1078 print_generic_expr (dump_file, stmt, TDF_SLIM);
1079 fprintf (dump_file, "\n");
1080
1081 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1082 }
1083 }
1084
1085 /* A subroutine of expand_gimple_basic_block. Expand one COND_EXPR.
1086 Returns a new basic block if we've terminated the current basic
1087 block and created a new one. */
1088
1089 static basic_block
1090 expand_gimple_cond_expr (basic_block bb, tree stmt)
1091 {
1092 basic_block new_bb, dest;
1093 edge new_edge;
1094 edge true_edge;
1095 edge false_edge;
1096 tree pred = COND_EXPR_COND (stmt);
1097 tree then_exp = COND_EXPR_THEN (stmt);
1098 tree else_exp = COND_EXPR_ELSE (stmt);
1099 rtx last2, last;
1100
1101 last2 = last = get_last_insn ();
1102
1103 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1104 if (EXPR_LOCUS (stmt))
1105 {
1106 emit_line_note (*(EXPR_LOCUS (stmt)));
1107 record_block_change (TREE_BLOCK (stmt));
1108 }
1109
1110 /* These flags have no purpose in RTL land. */
1111 true_edge->flags &= ~EDGE_TRUE_VALUE;
1112 false_edge->flags &= ~EDGE_FALSE_VALUE;
1113
1114 /* We can either have a pure conditional jump with one fallthru edge or
1115 two-way jump that needs to be decomposed into two basic blocks. */
1116 if (TREE_CODE (then_exp) == GOTO_EXPR && IS_EMPTY_STMT (else_exp))
1117 {
1118 jumpif (pred, label_rtx (GOTO_DESTINATION (then_exp)));
1119 add_reg_br_prob_note (last, true_edge->probability);
1120 maybe_dump_rtl_for_tree_stmt (stmt, last);
1121 if (EXPR_LOCUS (then_exp))
1122 emit_line_note (*(EXPR_LOCUS (then_exp)));
1123 return NULL;
1124 }
1125 if (TREE_CODE (else_exp) == GOTO_EXPR && IS_EMPTY_STMT (then_exp))
1126 {
1127 jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_exp)));
1128 add_reg_br_prob_note (last, false_edge->probability);
1129 maybe_dump_rtl_for_tree_stmt (stmt, last);
1130 if (EXPR_LOCUS (else_exp))
1131 emit_line_note (*(EXPR_LOCUS (else_exp)));
1132 return NULL;
1133 }
1134 gcc_assert (TREE_CODE (then_exp) == GOTO_EXPR
1135 && TREE_CODE (else_exp) == GOTO_EXPR);
1136
1137 jumpif (pred, label_rtx (GOTO_DESTINATION (then_exp)));
1138 add_reg_br_prob_note (last, true_edge->probability);
1139 last = get_last_insn ();
1140 expand_expr (else_exp, const0_rtx, VOIDmode, 0);
1141
1142 BB_END (bb) = last;
1143 if (BARRIER_P (BB_END (bb)))
1144 BB_END (bb) = PREV_INSN (BB_END (bb));
1145 update_bb_for_insn (bb);
1146
1147 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1148 dest = false_edge->dest;
1149 redirect_edge_succ (false_edge, new_bb);
1150 false_edge->flags |= EDGE_FALLTHRU;
1151 new_bb->count = false_edge->count;
1152 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1153 new_edge = make_edge (new_bb, dest, 0);
1154 new_edge->probability = REG_BR_PROB_BASE;
1155 new_edge->count = new_bb->count;
1156 if (BARRIER_P (BB_END (new_bb)))
1157 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1158 update_bb_for_insn (new_bb);
1159
1160 maybe_dump_rtl_for_tree_stmt (stmt, last2);
1161
1162 if (EXPR_LOCUS (else_exp))
1163 emit_line_note (*(EXPR_LOCUS (else_exp)));
1164
1165 return new_bb;
1166 }
1167
1168 /* A subroutine of expand_gimple_basic_block. Expand one CALL_EXPR
1169 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
1170 generated a tail call (something that might be denied by the ABI
1171 rules governing the call; see calls.c).
1172
1173 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
1174 can still reach the rest of BB. The case here is __builtin_sqrt,
1175 where the NaN result goes through the external function (with a
1176 tailcall) and the normal result happens via a sqrt instruction. */
1177
1178 static basic_block
1179 expand_gimple_tailcall (basic_block bb, tree stmt, bool *can_fallthru)
1180 {
1181 rtx last2, last;
1182 edge e;
1183 edge_iterator ei;
1184 int probability;
1185 gcov_type count;
1186
1187 last2 = last = get_last_insn ();
1188
1189 expand_expr_stmt (stmt);
1190
1191 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
1192 if (CALL_P (last) && SIBLING_CALL_P (last))
1193 goto found;
1194
1195 maybe_dump_rtl_for_tree_stmt (stmt, last2);
1196
1197 *can_fallthru = true;
1198 return NULL;
1199
1200 found:
1201 /* ??? Wouldn't it be better to just reset any pending stack adjust?
1202 Any instructions emitted here are about to be deleted. */
1203 do_pending_stack_adjust ();
1204
1205 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
1206 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
1207 EH or abnormal edges, we shouldn't have created a tail call in
1208 the first place. So it seems to me we should just be removing
1209 all edges here, or redirecting the existing fallthru edge to
1210 the exit block. */
1211
1212 probability = 0;
1213 count = 0;
1214
1215 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1216 {
1217 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
1218 {
1219 if (e->dest != EXIT_BLOCK_PTR)
1220 {
1221 e->dest->count -= e->count;
1222 e->dest->frequency -= EDGE_FREQUENCY (e);
1223 if (e->dest->count < 0)
1224 e->dest->count = 0;
1225 if (e->dest->frequency < 0)
1226 e->dest->frequency = 0;
1227 }
1228 count += e->count;
1229 probability += e->probability;
1230 remove_edge (e);
1231 }
1232 else
1233 ei_next (&ei);
1234 }
1235
1236 /* This is somewhat ugly: the call_expr expander often emits instructions
1237 after the sibcall (to perform the function return). These confuse the
1238 find_many_sub_basic_blocks code, so we need to get rid of these. */
1239 last = NEXT_INSN (last);
1240 gcc_assert (BARRIER_P (last));
1241
1242 *can_fallthru = false;
1243 while (NEXT_INSN (last))
1244 {
1245 /* For instance an sqrt builtin expander expands if with
1246 sibcall in the then and label for `else`. */
1247 if (LABEL_P (NEXT_INSN (last)))
1248 {
1249 *can_fallthru = true;
1250 break;
1251 }
1252 delete_insn (NEXT_INSN (last));
1253 }
1254
1255 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
1256 e->probability += probability;
1257 e->count += count;
1258 BB_END (bb) = last;
1259 update_bb_for_insn (bb);
1260
1261 if (NEXT_INSN (last))
1262 {
1263 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1264
1265 last = BB_END (bb);
1266 if (BARRIER_P (last))
1267 BB_END (bb) = PREV_INSN (last);
1268 }
1269
1270 maybe_dump_rtl_for_tree_stmt (stmt, last2);
1271
1272 return bb;
1273 }
1274
1275 /* Expand basic block BB from GIMPLE trees to RTL. */
1276
1277 static basic_block
1278 expand_gimple_basic_block (basic_block bb)
1279 {
1280 block_stmt_iterator bsi = bsi_start (bb);
1281 tree stmt = NULL;
1282 rtx note, last;
1283 edge e;
1284 edge_iterator ei;
1285
1286 if (dump_file)
1287 {
1288 fprintf (dump_file,
1289 "\n;; Generating RTL for tree basic block %d\n",
1290 bb->index);
1291 }
1292
1293 init_rtl_bb_info (bb);
1294 bb->flags |= BB_RTL;
1295
1296 if (!bsi_end_p (bsi))
1297 stmt = bsi_stmt (bsi);
1298
1299 if (stmt && TREE_CODE (stmt) == LABEL_EXPR)
1300 {
1301 last = get_last_insn ();
1302
1303 expand_expr_stmt (stmt);
1304
1305 /* Java emits line number notes in the top of labels.
1306 ??? Make this go away once line number notes are obsoleted. */
1307 BB_HEAD (bb) = NEXT_INSN (last);
1308 if (NOTE_P (BB_HEAD (bb)))
1309 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
1310 bsi_next (&bsi);
1311 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
1312
1313 maybe_dump_rtl_for_tree_stmt (stmt, last);
1314 }
1315 else
1316 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
1317
1318 NOTE_BASIC_BLOCK (note) = bb;
1319
1320 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1321 {
1322 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
1323 e->flags &= ~EDGE_EXECUTABLE;
1324
1325 /* At the moment not all abnormal edges match the RTL representation.
1326 It is safe to remove them here as find_many_sub_basic_blocks will
1327 rediscover them. In the future we should get this fixed properly. */
1328 if (e->flags & EDGE_ABNORMAL)
1329 remove_edge (e);
1330 else
1331 ei_next (&ei);
1332 }
1333
1334 for (; !bsi_end_p (bsi); bsi_next (&bsi))
1335 {
1336 tree stmt = bsi_stmt (bsi);
1337 basic_block new_bb;
1338
1339 if (!stmt)
1340 continue;
1341
1342 /* Expand this statement, then evaluate the resulting RTL and
1343 fixup the CFG accordingly. */
1344 if (TREE_CODE (stmt) == COND_EXPR)
1345 {
1346 new_bb = expand_gimple_cond_expr (bb, stmt);
1347 if (new_bb)
1348 return new_bb;
1349 }
1350 else
1351 {
1352 tree call = get_call_expr_in (stmt);
1353 if (call && CALL_EXPR_TAILCALL (call))
1354 {
1355 bool can_fallthru;
1356 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
1357 if (new_bb)
1358 {
1359 if (can_fallthru)
1360 bb = new_bb;
1361 else
1362 return new_bb;
1363 }
1364 }
1365 else
1366 {
1367 last = get_last_insn ();
1368 expand_expr_stmt (stmt);
1369 maybe_dump_rtl_for_tree_stmt (stmt, last);
1370 }
1371 }
1372 }
1373
1374 do_pending_stack_adjust ();
1375
1376 /* Find the block tail. The last insn in the block is the insn
1377 before a barrier and/or table jump insn. */
1378 last = get_last_insn ();
1379 if (BARRIER_P (last))
1380 last = PREV_INSN (last);
1381 if (JUMP_TABLE_DATA_P (last))
1382 last = PREV_INSN (PREV_INSN (last));
1383 BB_END (bb) = last;
1384
1385 update_bb_for_insn (bb);
1386
1387 return bb;
1388 }
1389
1390
1391 /* Create a basic block for initialization code. */
1392
1393 static basic_block
1394 construct_init_block (void)
1395 {
1396 basic_block init_block, first_block;
1397 edge e = NULL;
1398 int flags;
1399
1400 /* Multiple entry points not supported yet. */
1401 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
1402 init_rtl_bb_info (ENTRY_BLOCK_PTR);
1403 init_rtl_bb_info (EXIT_BLOCK_PTR);
1404 ENTRY_BLOCK_PTR->flags |= BB_RTL;
1405 EXIT_BLOCK_PTR->flags |= BB_RTL;
1406
1407 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
1408
1409 /* When entry edge points to first basic block, we don't need jump,
1410 otherwise we have to jump into proper target. */
1411 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
1412 {
1413 tree label = tree_block_label (e->dest);
1414
1415 emit_jump (label_rtx (label));
1416 flags = 0;
1417 }
1418 else
1419 flags = EDGE_FALLTHRU;
1420
1421 init_block = create_basic_block (NEXT_INSN (get_insns ()),
1422 get_last_insn (),
1423 ENTRY_BLOCK_PTR);
1424 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
1425 init_block->count = ENTRY_BLOCK_PTR->count;
1426 if (e)
1427 {
1428 first_block = e->dest;
1429 redirect_edge_succ (e, init_block);
1430 e = make_edge (init_block, first_block, flags);
1431 }
1432 else
1433 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1434 e->probability = REG_BR_PROB_BASE;
1435 e->count = ENTRY_BLOCK_PTR->count;
1436
1437 update_bb_for_insn (init_block);
1438 return init_block;
1439 }
1440
1441
1442 /* Create a block containing landing pads and similar stuff. */
1443
1444 static void
1445 construct_exit_block (void)
1446 {
1447 rtx head = get_last_insn ();
1448 rtx end;
1449 basic_block exit_block;
1450 edge e, e2;
1451 unsigned ix;
1452 edge_iterator ei;
1453
1454 /* Make sure the locus is set to the end of the function, so that
1455 epilogue line numbers and warnings are set properly. */
1456 #ifdef USE_MAPPED_LOCATION
1457 if (cfun->function_end_locus != UNKNOWN_LOCATION)
1458 #else
1459 if (cfun->function_end_locus.file)
1460 #endif
1461 input_location = cfun->function_end_locus;
1462
1463 /* The following insns belong to the top scope. */
1464 record_block_change (DECL_INITIAL (current_function_decl));
1465
1466 /* Generate rtl for function exit. */
1467 expand_function_end ();
1468
1469 end = get_last_insn ();
1470 if (head == end)
1471 return;
1472 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
1473 head = NEXT_INSN (head);
1474 exit_block = create_basic_block (NEXT_INSN (head), end,
1475 EXIT_BLOCK_PTR->prev_bb);
1476 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
1477 exit_block->count = EXIT_BLOCK_PTR->count;
1478
1479 ix = 0;
1480 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
1481 {
1482 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
1483 if (!(e->flags & EDGE_ABNORMAL))
1484 redirect_edge_succ (e, exit_block);
1485 else
1486 ix++;
1487 }
1488
1489 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1490 e->probability = REG_BR_PROB_BASE;
1491 e->count = EXIT_BLOCK_PTR->count;
1492 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
1493 if (e2 != e)
1494 {
1495 e->count -= e2->count;
1496 exit_block->count -= e2->count;
1497 exit_block->frequency -= EDGE_FREQUENCY (e2);
1498 }
1499 if (e->count < 0)
1500 e->count = 0;
1501 if (exit_block->count < 0)
1502 exit_block->count = 0;
1503 if (exit_block->frequency < 0)
1504 exit_block->frequency = 0;
1505 update_bb_for_insn (exit_block);
1506 }
1507
1508 /* Helper function for discover_nonconstant_array_refs.
1509 Look for ARRAY_REF nodes with non-constant indexes and mark them
1510 addressable. */
1511
1512 static tree
1513 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
1514 void *data ATTRIBUTE_UNUSED)
1515 {
1516 tree t = *tp;
1517
1518 if (IS_TYPE_OR_DECL_P (t))
1519 *walk_subtrees = 0;
1520 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1521 {
1522 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1523 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
1524 && (!TREE_OPERAND (t, 2)
1525 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
1526 || (TREE_CODE (t) == COMPONENT_REF
1527 && (!TREE_OPERAND (t,2)
1528 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
1529 || TREE_CODE (t) == BIT_FIELD_REF
1530 || TREE_CODE (t) == REALPART_EXPR
1531 || TREE_CODE (t) == IMAGPART_EXPR
1532 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1533 || TREE_CODE (t) == NOP_EXPR
1534 || TREE_CODE (t) == CONVERT_EXPR)
1535 t = TREE_OPERAND (t, 0);
1536
1537 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1538 {
1539 t = get_base_address (t);
1540 if (t && DECL_P (t))
1541 TREE_ADDRESSABLE (t) = 1;
1542 }
1543
1544 *walk_subtrees = 0;
1545 }
1546
1547 return NULL_TREE;
1548 }
1549
1550 /* RTL expansion is not able to compile array references with variable
1551 offsets for arrays stored in single register. Discover such
1552 expressions and mark variables as addressable to avoid this
1553 scenario. */
1554
1555 static void
1556 discover_nonconstant_array_refs (void)
1557 {
1558 basic_block bb;
1559 block_stmt_iterator bsi;
1560
1561 FOR_EACH_BB (bb)
1562 {
1563 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1564 walk_tree (bsi_stmt_ptr (bsi), discover_nonconstant_array_refs_r,
1565 NULL , NULL);
1566 }
1567 }
1568
1569 /* Translate the intermediate representation contained in the CFG
1570 from GIMPLE trees to RTL.
1571
1572 We do conversion per basic block and preserve/update the tree CFG.
1573 This implies we have to do some magic as the CFG can simultaneously
1574 consist of basic blocks containing RTL and GIMPLE trees. This can
1575 confuse the CFG hooks, so be careful to not manipulate CFG during
1576 the expansion. */
1577
1578 static unsigned int
1579 tree_expand_cfg (void)
1580 {
1581 basic_block bb, init_block;
1582 sbitmap blocks;
1583 edge_iterator ei;
1584 edge e;
1585
1586 /* Some backends want to know that we are expanding to RTL. */
1587 currently_expanding_to_rtl = 1;
1588
1589 /* Prepare the rtl middle end to start recording block changes. */
1590 reset_block_changes ();
1591
1592 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
1593 discover_nonconstant_array_refs ();
1594
1595 /* Expand the variables recorded during gimple lowering. */
1596 expand_used_vars ();
1597
1598 /* Honor stack protection warnings. */
1599 if (warn_stack_protect)
1600 {
1601 if (current_function_calls_alloca)
1602 warning (0, "not protecting local variables: variable length buffer");
1603 if (has_short_buffer && !cfun->stack_protect_guard)
1604 warning (0, "not protecting function: no buffer at least %d bytes long",
1605 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
1606 }
1607
1608 /* Set up parameters and prepare for return, for the function. */
1609 expand_function_start (current_function_decl);
1610
1611 /* If this function is `main', emit a call to `__main'
1612 to run global initializers, etc. */
1613 if (DECL_NAME (current_function_decl)
1614 && MAIN_NAME_P (DECL_NAME (current_function_decl))
1615 && DECL_FILE_SCOPE_P (current_function_decl))
1616 expand_main_function ();
1617
1618 /* Initialize the stack_protect_guard field. This must happen after the
1619 call to __main (if any) so that the external decl is initialized. */
1620 if (cfun->stack_protect_guard)
1621 stack_protect_prologue ();
1622
1623 /* Register rtl specific functions for cfg. */
1624 rtl_register_cfg_hooks ();
1625
1626 init_block = construct_init_block ();
1627
1628 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
1629 remainining edges in expand_gimple_basic_block. */
1630 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
1631 e->flags &= ~EDGE_EXECUTABLE;
1632
1633 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
1634 bb = expand_gimple_basic_block (bb);
1635
1636 construct_exit_block ();
1637
1638 /* We're done expanding trees to RTL. */
1639 currently_expanding_to_rtl = 0;
1640
1641 /* Convert tree EH labels to RTL EH labels, and clean out any unreachable
1642 EH regions. */
1643 convert_from_eh_region_ranges ();
1644
1645 rebuild_jump_labels (get_insns ());
1646 find_exception_handler_labels ();
1647
1648 blocks = sbitmap_alloc (last_basic_block);
1649 sbitmap_ones (blocks);
1650 find_many_sub_basic_blocks (blocks);
1651 purge_all_dead_edges ();
1652 sbitmap_free (blocks);
1653
1654 compact_blocks ();
1655 #ifdef ENABLE_CHECKING
1656 verify_flow_info();
1657 #endif
1658
1659 /* There's no need to defer outputting this function any more; we
1660 know we want to output it. */
1661 DECL_DEFER_OUTPUT (current_function_decl) = 0;
1662
1663 /* Now that we're done expanding trees to RTL, we shouldn't have any
1664 more CONCATs anywhere. */
1665 generating_concat_p = 0;
1666
1667 finalize_block_changes ();
1668
1669 if (dump_file)
1670 {
1671 fprintf (dump_file,
1672 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
1673 /* And the pass manager will dump RTL for us. */
1674 }
1675
1676 /* If we're emitting a nested function, make sure its parent gets
1677 emitted as well. Doing otherwise confuses debug info. */
1678 {
1679 tree parent;
1680 for (parent = DECL_CONTEXT (current_function_decl);
1681 parent != NULL_TREE;
1682 parent = get_containing_scope (parent))
1683 if (TREE_CODE (parent) == FUNCTION_DECL)
1684 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
1685 }
1686
1687 /* We are now committed to emitting code for this function. Do any
1688 preparation, such as emitting abstract debug info for the inline
1689 before it gets mangled by optimization. */
1690 if (cgraph_function_possibly_inlined_p (current_function_decl))
1691 (*debug_hooks->outlining_inline_function) (current_function_decl);
1692
1693 TREE_ASM_WRITTEN (current_function_decl) = 1;
1694
1695 /* After expanding, the return labels are no longer needed. */
1696 return_label = NULL;
1697 naked_return_label = NULL;
1698 return 0;
1699 }
1700
1701 struct tree_opt_pass pass_expand =
1702 {
1703 "expand", /* name */
1704 NULL, /* gate */
1705 tree_expand_cfg, /* execute */
1706 NULL, /* sub */
1707 NULL, /* next */
1708 0, /* static_pass_number */
1709 TV_EXPAND, /* tv_id */
1710 /* ??? If TER is enabled, we actually receive GENERIC. */
1711 PROP_gimple_leh | PROP_cfg, /* properties_required */
1712 PROP_rtl, /* properties_provided */
1713 PROP_trees, /* properties_destroyed */
1714 0, /* todo_flags_start */
1715 TODO_dump_func, /* todo_flags_finish */
1716 'r' /* letter */
1717 };
This page took 0.113003 seconds and 6 git commands to generate.