]> gcc.gnu.org Git - gcc.git/blame - gcc/cfgexpand.c
re PR fortran/44212 ([OOP] ICE when defining a pointer component before defining...
[gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
7604eb4e 2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
66647d44 3 Free Software Foundation, Inc.
242229bb
JH
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
242229bb
JH
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
242229bb
JH
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "rtl.h"
27#include "tm_p.h"
28#include "basic-block.h"
29#include "function.h"
30#include "expr.h"
31#include "langhooks.h"
32#include "tree-flow.h"
33#include "timevar.h"
34#include "tree-dump.h"
35#include "tree-pass.h"
36#include "except.h"
37#include "flags.h"
1f6d3a08 38#include "diagnostic.h"
cf835838
JM
39#include "tree-pretty-print.h"
40#include "gimple-pretty-print.h"
1f6d3a08 41#include "toplev.h"
ef330312 42#include "debug.h"
7d69de61 43#include "params.h"
ff28a94d 44#include "tree-inline.h"
6946b3f7 45#include "value-prof.h"
e41b2a33 46#include "target.h"
4e3825db 47#include "ssaexpand.h"
7d69de61 48
726a989a 49
4e3825db
MM
50/* This variable holds information helping the rewriting of SSA trees
51 into RTL. */
52struct ssaexpand SA;
53
a5883ba0
MM
54/* This variable holds the currently expanded gimple statement for purposes
55 of comminucating the profile info to the builtin expanders. */
56gimple currently_expanding_gimple_stmt;
57
726a989a
RB
58/* Return an expression tree corresponding to the RHS of GIMPLE
59 statement STMT. */
60
61tree
62gimple_assign_rhs_to_tree (gimple stmt)
63{
64 tree t;
82d6e6fc 65 enum gimple_rhs_class grhs_class;
b8698a0f 66
82d6e6fc 67 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
726a989a 68
82d6e6fc 69 if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
70 t = build2 (gimple_assign_rhs_code (stmt),
71 TREE_TYPE (gimple_assign_lhs (stmt)),
72 gimple_assign_rhs1 (stmt),
73 gimple_assign_rhs2 (stmt));
82d6e6fc 74 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
75 t = build1 (gimple_assign_rhs_code (stmt),
76 TREE_TYPE (gimple_assign_lhs (stmt)),
77 gimple_assign_rhs1 (stmt));
82d6e6fc 78 else if (grhs_class == GIMPLE_SINGLE_RHS)
b5b8b0ac
AO
79 {
80 t = gimple_assign_rhs1 (stmt);
81 /* Avoid modifying this tree in place below. */
d0ed412a
JJ
82 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
83 && gimple_location (stmt) != EXPR_LOCATION (t))
84 || (gimple_block (stmt)
85 && currently_expanding_to_rtl
86 && EXPR_P (t)
87 && gimple_block (stmt) != TREE_BLOCK (t)))
b5b8b0ac
AO
88 t = copy_node (t);
89 }
726a989a
RB
90 else
91 gcc_unreachable ();
92
f5045c96
AM
93 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
94 SET_EXPR_LOCATION (t, gimple_location (stmt));
d0ed412a
JJ
95 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
96 TREE_BLOCK (t) = gimple_block (stmt);
f5045c96 97
726a989a
RB
98 return t;
99}
100
726a989a 101
1f6d3a08
RH
102#ifndef STACK_ALIGNMENT_NEEDED
103#define STACK_ALIGNMENT_NEEDED 1
104#endif
105
4e3825db
MM
106#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
107
108/* Associate declaration T with storage space X. If T is no
109 SSA name this is exactly SET_DECL_RTL, otherwise make the
110 partition of T associated with X. */
111static inline void
112set_rtl (tree t, rtx x)
113{
114 if (TREE_CODE (t) == SSA_NAME)
115 {
116 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
117 if (x && !MEM_P (x))
118 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
eb7adebc
MM
119 /* For the benefit of debug information at -O0 (where vartracking
120 doesn't run) record the place also in the base DECL if it's
121 a normal variable (not a parameter). */
122 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
123 {
124 tree var = SSA_NAME_VAR (t);
125 /* If we don't yet have something recorded, just record it now. */
126 if (!DECL_RTL_SET_P (var))
127 SET_DECL_RTL (var, x);
128 /* If we have it set alrady to "multiple places" don't
129 change this. */
130 else if (DECL_RTL (var) == pc_rtx)
131 ;
132 /* If we have something recorded and it's not the same place
133 as we want to record now, we have multiple partitions for the
134 same base variable, with different places. We can't just
135 randomly chose one, hence we have to say that we don't know.
136 This only happens with optimization, and there var-tracking
137 will figure out the right thing. */
138 else if (DECL_RTL (var) != x)
139 SET_DECL_RTL (var, pc_rtx);
140 }
4e3825db
MM
141 }
142 else
143 SET_DECL_RTL (t, x);
144}
1f6d3a08
RH
145
146/* This structure holds data relevant to one variable that will be
147 placed in a stack slot. */
148struct stack_var
149{
150 /* The Variable. */
151 tree decl;
152
153 /* The offset of the variable. During partitioning, this is the
154 offset relative to the partition. After partitioning, this
155 is relative to the stack frame. */
156 HOST_WIDE_INT offset;
157
158 /* Initially, the size of the variable. Later, the size of the partition,
159 if this variable becomes it's partition's representative. */
160 HOST_WIDE_INT size;
161
162 /* The *byte* alignment required for this variable. Or as, with the
163 size, the alignment for this partition. */
164 unsigned int alignb;
165
166 /* The partition representative. */
167 size_t representative;
168
169 /* The next stack variable in the partition, or EOC. */
170 size_t next;
2bdbbe94
MM
171
172 /* The numbers of conflicting stack variables. */
173 bitmap conflicts;
1f6d3a08
RH
174};
175
176#define EOC ((size_t)-1)
177
178/* We have an array of such objects while deciding allocation. */
179static struct stack_var *stack_vars;
180static size_t stack_vars_alloc;
181static size_t stack_vars_num;
182
fa10beec 183/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
1f6d3a08
RH
184 is non-decreasing. */
185static size_t *stack_vars_sorted;
186
1f6d3a08
RH
187/* The phase of the stack frame. This is the known misalignment of
188 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
189 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
190static int frame_phase;
191
7d69de61
RH
192/* Used during expand_used_vars to remember if we saw any decls for
193 which we'd like to enable stack smashing protection. */
194static bool has_protected_decls;
195
196/* Used during expand_used_vars. Remember if we say a character buffer
197 smaller than our cutoff threshold. Used for -Wstack-protector. */
198static bool has_short_buffer;
1f6d3a08
RH
199
200/* Discover the byte alignment to use for DECL. Ignore alignment
201 we can't do with expected alignment of the stack boundary. */
202
203static unsigned int
204get_decl_align_unit (tree decl)
205{
206 unsigned int align;
207
9bfaf89d 208 align = LOCAL_DECL_ALIGNMENT (decl);
2e3f842f
L
209
210 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
211 align = MAX_SUPPORTED_STACK_ALIGNMENT;
212
213 if (SUPPORTS_STACK_ALIGNMENT)
214 {
215 if (crtl->stack_alignment_estimated < align)
216 {
217 gcc_assert(!crtl->stack_realign_processed);
218 crtl->stack_alignment_estimated = align;
219 }
220 }
221
222 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
223 So here we only make sure stack_alignment_needed >= align. */
cb91fab0
JH
224 if (crtl->stack_alignment_needed < align)
225 crtl->stack_alignment_needed = align;
f85882d8
JY
226 if (crtl->max_used_stack_slot_alignment < align)
227 crtl->max_used_stack_slot_alignment = align;
1f6d3a08
RH
228
229 return align / BITS_PER_UNIT;
230}
231
232/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
233 Return the frame offset. */
234
235static HOST_WIDE_INT
236alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
237{
238 HOST_WIDE_INT offset, new_frame_offset;
239
240 new_frame_offset = frame_offset;
241 if (FRAME_GROWS_DOWNWARD)
242 {
243 new_frame_offset -= size + frame_phase;
244 new_frame_offset &= -align;
245 new_frame_offset += frame_phase;
246 offset = new_frame_offset;
247 }
248 else
249 {
250 new_frame_offset -= frame_phase;
251 new_frame_offset += align - 1;
252 new_frame_offset &= -align;
253 new_frame_offset += frame_phase;
254 offset = new_frame_offset;
255 new_frame_offset += size;
256 }
257 frame_offset = new_frame_offset;
258
9fb798d7
EB
259 if (frame_offset_overflow (frame_offset, cfun->decl))
260 frame_offset = offset = 0;
261
1f6d3a08
RH
262 return offset;
263}
264
265/* Accumulate DECL into STACK_VARS. */
266
267static void
268add_stack_var (tree decl)
269{
270 if (stack_vars_num >= stack_vars_alloc)
271 {
272 if (stack_vars_alloc)
273 stack_vars_alloc = stack_vars_alloc * 3 / 2;
274 else
275 stack_vars_alloc = 32;
276 stack_vars
277 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
278 }
279 stack_vars[stack_vars_num].decl = decl;
280 stack_vars[stack_vars_num].offset = 0;
4e3825db
MM
281 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
282 stack_vars[stack_vars_num].alignb = get_decl_align_unit (SSAVAR (decl));
1f6d3a08
RH
283
284 /* All variables are initially in their own partition. */
285 stack_vars[stack_vars_num].representative = stack_vars_num;
286 stack_vars[stack_vars_num].next = EOC;
287
2bdbbe94
MM
288 /* All variables initially conflict with no other. */
289 stack_vars[stack_vars_num].conflicts = NULL;
290
1f6d3a08 291 /* Ensure that this decl doesn't get put onto the list twice. */
4e3825db 292 set_rtl (decl, pc_rtx);
1f6d3a08
RH
293
294 stack_vars_num++;
295}
296
1f6d3a08
RH
297/* Make the decls associated with luid's X and Y conflict. */
298
299static void
300add_stack_var_conflict (size_t x, size_t y)
301{
2bdbbe94
MM
302 struct stack_var *a = &stack_vars[x];
303 struct stack_var *b = &stack_vars[y];
304 if (!a->conflicts)
305 a->conflicts = BITMAP_ALLOC (NULL);
306 if (!b->conflicts)
307 b->conflicts = BITMAP_ALLOC (NULL);
308 bitmap_set_bit (a->conflicts, y);
309 bitmap_set_bit (b->conflicts, x);
1f6d3a08
RH
310}
311
312/* Check whether the decls associated with luid's X and Y conflict. */
313
314static bool
315stack_var_conflict_p (size_t x, size_t y)
316{
2bdbbe94
MM
317 struct stack_var *a = &stack_vars[x];
318 struct stack_var *b = &stack_vars[y];
319 if (!a->conflicts || !b->conflicts)
320 return false;
321 return bitmap_bit_p (a->conflicts, y);
1f6d3a08 322}
b8698a0f 323
d239ed56
SB
324/* Returns true if TYPE is or contains a union type. */
325
326static bool
327aggregate_contains_union_type (tree type)
328{
329 tree field;
330
331 if (TREE_CODE (type) == UNION_TYPE
332 || TREE_CODE (type) == QUAL_UNION_TYPE)
333 return true;
334 if (TREE_CODE (type) == ARRAY_TYPE)
335 return aggregate_contains_union_type (TREE_TYPE (type));
336 if (TREE_CODE (type) != RECORD_TYPE)
337 return false;
338
339 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
340 if (TREE_CODE (field) == FIELD_DECL)
341 if (aggregate_contains_union_type (TREE_TYPE (field)))
342 return true;
343
344 return false;
345}
346
1f6d3a08
RH
347/* A subroutine of expand_used_vars. If two variables X and Y have alias
348 sets that do not conflict, then do add a conflict for these variables
d239ed56
SB
349 in the interference graph. We also need to make sure to add conflicts
350 for union containing structures. Else RTL alias analysis comes along
351 and due to type based aliasing rules decides that for two overlapping
352 union temporaries { short s; int i; } accesses to the same mem through
353 different types may not alias and happily reorders stores across
354 life-time boundaries of the temporaries (See PR25654).
355 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
1f6d3a08
RH
356
357static void
358add_alias_set_conflicts (void)
359{
360 size_t i, j, n = stack_vars_num;
361
362 for (i = 0; i < n; ++i)
363 {
a4d25453
RH
364 tree type_i = TREE_TYPE (stack_vars[i].decl);
365 bool aggr_i = AGGREGATE_TYPE_P (type_i);
d239ed56 366 bool contains_union;
1f6d3a08 367
d239ed56 368 contains_union = aggregate_contains_union_type (type_i);
1f6d3a08
RH
369 for (j = 0; j < i; ++j)
370 {
a4d25453
RH
371 tree type_j = TREE_TYPE (stack_vars[j].decl);
372 bool aggr_j = AGGREGATE_TYPE_P (type_j);
d239ed56
SB
373 if (aggr_i != aggr_j
374 /* Either the objects conflict by means of type based
375 aliasing rules, or we need to add a conflict. */
376 || !objects_must_conflict_p (type_i, type_j)
377 /* In case the types do not conflict ensure that access
378 to elements will conflict. In case of unions we have
379 to be careful as type based aliasing rules may say
380 access to the same memory does not conflict. So play
381 safe and add a conflict in this case. */
382 || contains_union)
1f6d3a08
RH
383 add_stack_var_conflict (i, j);
384 }
385 }
386}
387
388/* A subroutine of partition_stack_vars. A comparison function for qsort,
4e3825db 389 sorting an array of indices by the size and type of the object. */
1f6d3a08
RH
390
391static int
392stack_var_size_cmp (const void *a, const void *b)
393{
394 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
395 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
4e3825db
MM
396 tree decla, declb;
397 unsigned int uida, uidb;
1f6d3a08
RH
398
399 if (sa < sb)
400 return -1;
401 if (sa > sb)
402 return 1;
4e3825db
MM
403 decla = stack_vars[*(const size_t *)a].decl;
404 declb = stack_vars[*(const size_t *)b].decl;
405 /* For stack variables of the same size use and id of the decls
406 to make the sort stable. Two SSA names are compared by their
407 version, SSA names come before non-SSA names, and two normal
408 decls are compared by their DECL_UID. */
409 if (TREE_CODE (decla) == SSA_NAME)
410 {
411 if (TREE_CODE (declb) == SSA_NAME)
412 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
413 else
414 return -1;
415 }
416 else if (TREE_CODE (declb) == SSA_NAME)
417 return 1;
418 else
419 uida = DECL_UID (decla), uidb = DECL_UID (declb);
79f802f5
RG
420 if (uida < uidb)
421 return -1;
422 if (uida > uidb)
423 return 1;
1f6d3a08
RH
424 return 0;
425}
426
55b34b5f
RG
427
428/* If the points-to solution *PI points to variables that are in a partition
429 together with other variables add all partition members to the pointed-to
430 variables bitmap. */
431
432static void
433add_partitioned_vars_to_ptset (struct pt_solution *pt,
434 struct pointer_map_t *decls_to_partitions,
435 struct pointer_set_t *visited, bitmap temp)
436{
437 bitmap_iterator bi;
438 unsigned i;
439 bitmap *part;
440
441 if (pt->anything
442 || pt->vars == NULL
443 /* The pointed-to vars bitmap is shared, it is enough to
444 visit it once. */
445 || pointer_set_insert(visited, pt->vars))
446 return;
447
448 bitmap_clear (temp);
449
450 /* By using a temporary bitmap to store all members of the partitions
451 we have to add we make sure to visit each of the partitions only
452 once. */
453 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
454 if ((!temp
455 || !bitmap_bit_p (temp, i))
456 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
457 (void *)(size_t) i)))
458 bitmap_ior_into (temp, *part);
459 if (!bitmap_empty_p (temp))
460 bitmap_ior_into (pt->vars, temp);
461}
462
463/* Update points-to sets based on partition info, so we can use them on RTL.
464 The bitmaps representing stack partitions will be saved until expand,
465 where partitioned decls used as bases in memory expressions will be
466 rewritten. */
467
468static void
469update_alias_info_with_stack_vars (void)
470{
471 struct pointer_map_t *decls_to_partitions = NULL;
472 size_t i, j;
473 tree var = NULL_TREE;
474
475 for (i = 0; i < stack_vars_num; i++)
476 {
477 bitmap part = NULL;
478 tree name;
479 struct ptr_info_def *pi;
480
481 /* Not interested in partitions with single variable. */
482 if (stack_vars[i].representative != i
483 || stack_vars[i].next == EOC)
484 continue;
485
486 if (!decls_to_partitions)
487 {
488 decls_to_partitions = pointer_map_create ();
489 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
490 }
491
492 /* Create an SSA_NAME that points to the partition for use
493 as base during alias-oracle queries on RTL for bases that
494 have been partitioned. */
495 if (var == NULL_TREE)
496 var = create_tmp_var (ptr_type_node, NULL);
497 name = make_ssa_name (var, NULL);
498
499 /* Create bitmaps representing partitions. They will be used for
500 points-to sets later, so use GGC alloc. */
501 part = BITMAP_GGC_ALLOC ();
502 for (j = i; j != EOC; j = stack_vars[j].next)
503 {
504 tree decl = stack_vars[j].decl;
25a6a873 505 unsigned int uid = DECL_PT_UID (decl);
55b34b5f
RG
506 /* We should never end up partitioning SSA names (though they
507 may end up on the stack). Neither should we allocate stack
508 space to something that is unused and thus unreferenced. */
509 gcc_assert (DECL_P (decl)
25a6a873 510 && referenced_var_lookup (DECL_UID (decl)));
55b34b5f
RG
511 bitmap_set_bit (part, uid);
512 *((bitmap *) pointer_map_insert (decls_to_partitions,
513 (void *)(size_t) uid)) = part;
514 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
515 decl)) = name;
516 }
517
518 /* Make the SSA name point to all partition members. */
519 pi = get_ptr_info (name);
25a6a873 520 pt_solution_set (&pi->pt, part, false, false);
55b34b5f
RG
521 }
522
523 /* Make all points-to sets that contain one member of a partition
524 contain all members of the partition. */
525 if (decls_to_partitions)
526 {
527 unsigned i;
528 struct pointer_set_t *visited = pointer_set_create ();
529 bitmap temp = BITMAP_ALLOC (NULL);
530
531 for (i = 1; i < num_ssa_names; i++)
532 {
533 tree name = ssa_name (i);
534 struct ptr_info_def *pi;
535
536 if (name
537 && POINTER_TYPE_P (TREE_TYPE (name))
538 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
539 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
540 visited, temp);
541 }
542
543 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
544 decls_to_partitions, visited, temp);
55b34b5f
RG
545
546 pointer_set_destroy (visited);
547 pointer_map_destroy (decls_to_partitions);
548 BITMAP_FREE (temp);
549 }
550}
551
1f6d3a08
RH
552/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
553 partitioning algorithm. Partitions A and B are known to be non-conflicting.
554 Merge them into a single partition A.
555
556 At the same time, add OFFSET to all variables in partition B. At the end
557 of the partitioning process we've have a nice block easy to lay out within
558 the stack frame. */
559
560static void
561union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
562{
563 size_t i, last;
2bdbbe94
MM
564 struct stack_var *vb = &stack_vars[b];
565 bitmap_iterator bi;
566 unsigned u;
1f6d3a08
RH
567
568 /* Update each element of partition B with the given offset,
569 and merge them into partition A. */
570 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
571 {
572 stack_vars[i].offset += offset;
573 stack_vars[i].representative = a;
574 }
575 stack_vars[last].next = stack_vars[a].next;
576 stack_vars[a].next = b;
577
578 /* Update the required alignment of partition A to account for B. */
579 if (stack_vars[a].alignb < stack_vars[b].alignb)
580 stack_vars[a].alignb = stack_vars[b].alignb;
581
582 /* Update the interference graph and merge the conflicts. */
2bdbbe94
MM
583 if (vb->conflicts)
584 {
585 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
586 add_stack_var_conflict (a, stack_vars[u].representative);
587 BITMAP_FREE (vb->conflicts);
588 }
1f6d3a08
RH
589}
590
591/* A subroutine of expand_used_vars. Binpack the variables into
592 partitions constrained by the interference graph. The overall
593 algorithm used is as follows:
594
595 Sort the objects by size.
596 For each object A {
597 S = size(A)
598 O = 0
599 loop {
600 Look for the largest non-conflicting object B with size <= S.
601 UNION (A, B)
602 offset(B) = O
603 O += size(B)
604 S -= size(B)
605 }
606 }
607*/
608
609static void
610partition_stack_vars (void)
611{
612 size_t si, sj, n = stack_vars_num;
613
614 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
615 for (si = 0; si < n; ++si)
616 stack_vars_sorted[si] = si;
617
618 if (n == 1)
619 return;
620
621 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
622
1f6d3a08
RH
623 for (si = 0; si < n; ++si)
624 {
625 size_t i = stack_vars_sorted[si];
626 HOST_WIDE_INT isize = stack_vars[i].size;
627 HOST_WIDE_INT offset = 0;
628
629 for (sj = si; sj-- > 0; )
630 {
631 size_t j = stack_vars_sorted[sj];
632 HOST_WIDE_INT jsize = stack_vars[j].size;
633 unsigned int jalign = stack_vars[j].alignb;
634
635 /* Ignore objects that aren't partition representatives. */
636 if (stack_vars[j].representative != j)
637 continue;
638
639 /* Ignore objects too large for the remaining space. */
640 if (isize < jsize)
641 continue;
642
643 /* Ignore conflicting objects. */
644 if (stack_var_conflict_p (i, j))
645 continue;
646
647 /* Refine the remaining space check to include alignment. */
648 if (offset & (jalign - 1))
649 {
650 HOST_WIDE_INT toff = offset;
651 toff += jalign - 1;
652 toff &= -(HOST_WIDE_INT)jalign;
653 if (isize - (toff - offset) < jsize)
654 continue;
655
656 isize -= toff - offset;
657 offset = toff;
658 }
659
660 /* UNION the objects, placing J at OFFSET. */
661 union_stack_vars (i, j, offset);
662
663 isize -= jsize;
664 if (isize == 0)
665 break;
666 }
667 }
55b34b5f 668
0b200b80
RG
669 if (optimize)
670 update_alias_info_with_stack_vars ();
1f6d3a08
RH
671}
672
673/* A debugging aid for expand_used_vars. Dump the generated partitions. */
674
675static void
676dump_stack_var_partition (void)
677{
678 size_t si, i, j, n = stack_vars_num;
679
680 for (si = 0; si < n; ++si)
681 {
682 i = stack_vars_sorted[si];
683
684 /* Skip variables that aren't partition representatives, for now. */
685 if (stack_vars[i].representative != i)
686 continue;
687
688 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
689 " align %u\n", (unsigned long) i, stack_vars[i].size,
690 stack_vars[i].alignb);
691
692 for (j = i; j != EOC; j = stack_vars[j].next)
693 {
694 fputc ('\t', dump_file);
695 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
696 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
1c50a20a 697 stack_vars[j].offset);
1f6d3a08
RH
698 }
699 }
700}
701
702/* Assign rtl to DECL at frame offset OFFSET. */
703
704static void
705expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
706{
2ac26e15
L
707 /* Alignment is unsigned. */
708 unsigned HOST_WIDE_INT align;
1f6d3a08 709 rtx x;
c22cacf3 710
1f6d3a08
RH
711 /* If this fails, we've overflowed the stack frame. Error nicely? */
712 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
713
714 x = plus_constant (virtual_stack_vars_rtx, offset);
4e3825db 715 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
1f6d3a08 716
4e3825db
MM
717 if (TREE_CODE (decl) != SSA_NAME)
718 {
719 /* Set alignment we actually gave this decl if it isn't an SSA name.
720 If it is we generate stack slots only accidentally so it isn't as
721 important, we'll simply use the alignment that is already set. */
722 offset -= frame_phase;
723 align = offset & -offset;
724 align *= BITS_PER_UNIT;
725 if (align == 0)
726 align = STACK_BOUNDARY;
727 else if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
728 align = MAX_SUPPORTED_STACK_ALIGNMENT;
729
730 DECL_ALIGN (decl) = align;
731 DECL_USER_ALIGN (decl) = 0;
732 }
733
734 set_mem_attributes (x, SSAVAR (decl), true);
735 set_rtl (decl, x);
1f6d3a08
RH
736}
737
738/* A subroutine of expand_used_vars. Give each partition representative
739 a unique location within the stack frame. Update each partition member
740 with that location. */
741
742static void
7d69de61 743expand_stack_vars (bool (*pred) (tree))
1f6d3a08
RH
744{
745 size_t si, i, j, n = stack_vars_num;
746
747 for (si = 0; si < n; ++si)
748 {
749 HOST_WIDE_INT offset;
750
751 i = stack_vars_sorted[si];
752
753 /* Skip variables that aren't partition representatives, for now. */
754 if (stack_vars[i].representative != i)
755 continue;
756
7d69de61
RH
757 /* Skip variables that have already had rtl assigned. See also
758 add_stack_var where we perpetrate this pc_rtx hack. */
4e3825db
MM
759 if ((TREE_CODE (stack_vars[i].decl) == SSA_NAME
760 ? SA.partition_to_pseudo[var_to_partition (SA.map, stack_vars[i].decl)]
761 : DECL_RTL (stack_vars[i].decl)) != pc_rtx)
7d69de61
RH
762 continue;
763
c22cacf3 764 /* Check the predicate to see whether this variable should be
7d69de61
RH
765 allocated in this pass. */
766 if (pred && !pred (stack_vars[i].decl))
767 continue;
768
1f6d3a08
RH
769 offset = alloc_stack_frame_space (stack_vars[i].size,
770 stack_vars[i].alignb);
771
772 /* Create rtl for each variable based on their location within the
773 partition. */
774 for (j = i; j != EOC; j = stack_vars[j].next)
f8da8190
AP
775 {
776 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
777 expand_one_stack_var_at (stack_vars[j].decl,
778 stack_vars[j].offset + offset);
779 }
1f6d3a08
RH
780 }
781}
782
ff28a94d
JH
783/* Take into account all sizes of partitions and reset DECL_RTLs. */
784static HOST_WIDE_INT
785account_stack_vars (void)
786{
787 size_t si, j, i, n = stack_vars_num;
788 HOST_WIDE_INT size = 0;
789
790 for (si = 0; si < n; ++si)
791 {
792 i = stack_vars_sorted[si];
793
794 /* Skip variables that aren't partition representatives, for now. */
795 if (stack_vars[i].representative != i)
796 continue;
797
798 size += stack_vars[i].size;
799 for (j = i; j != EOC; j = stack_vars[j].next)
4e3825db 800 set_rtl (stack_vars[j].decl, NULL);
ff28a94d
JH
801 }
802 return size;
803}
804
1f6d3a08
RH
805/* A subroutine of expand_one_var. Called to immediately assign rtl
806 to a variable to be allocated in the stack frame. */
807
808static void
809expand_one_stack_var (tree var)
810{
811 HOST_WIDE_INT size, offset, align;
812
4e3825db
MM
813 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
814 align = get_decl_align_unit (SSAVAR (var));
1f6d3a08
RH
815 offset = alloc_stack_frame_space (size, align);
816
817 expand_one_stack_var_at (var, offset);
818}
819
1f6d3a08
RH
820/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
821 that will reside in a hard register. */
822
823static void
824expand_one_hard_reg_var (tree var)
825{
826 rest_of_decl_compilation (var, 0, 0);
827}
828
829/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
830 that will reside in a pseudo register. */
831
832static void
833expand_one_register_var (tree var)
834{
4e3825db
MM
835 tree decl = SSAVAR (var);
836 tree type = TREE_TYPE (decl);
cde0f3fd 837 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1f6d3a08
RH
838 rtx x = gen_reg_rtx (reg_mode);
839
4e3825db 840 set_rtl (var, x);
1f6d3a08
RH
841
842 /* Note if the object is a user variable. */
4e3825db
MM
843 if (!DECL_ARTIFICIAL (decl))
844 mark_user_reg (x);
1f6d3a08 845
61021c2c 846 if (POINTER_TYPE_P (type))
4e3825db 847 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
1f6d3a08
RH
848}
849
850/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 851 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
852 to pick something that won't crash the rest of the compiler. */
853
854static void
855expand_one_error_var (tree var)
856{
857 enum machine_mode mode = DECL_MODE (var);
858 rtx x;
859
860 if (mode == BLKmode)
861 x = gen_rtx_MEM (BLKmode, const0_rtx);
862 else if (mode == VOIDmode)
863 x = const0_rtx;
864 else
865 x = gen_reg_rtx (mode);
866
867 SET_DECL_RTL (var, x);
868}
869
c22cacf3 870/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
871 allocated to the local stack frame. Return true if we wish to
872 add VAR to STACK_VARS so that it will be coalesced with other
873 variables. Return false to allocate VAR immediately.
874
875 This function is used to reduce the number of variables considered
876 for coalescing, which reduces the size of the quadratic problem. */
877
878static bool
879defer_stack_allocation (tree var, bool toplevel)
880{
7d69de61
RH
881 /* If stack protection is enabled, *all* stack variables must be deferred,
882 so that we can re-order the strings to the top of the frame. */
883 if (flag_stack_protect)
884 return true;
885
1f6d3a08
RH
886 /* Variables in the outermost scope automatically conflict with
887 every other variable. The only reason to want to defer them
888 at all is that, after sorting, we can more efficiently pack
889 small variables in the stack frame. Continue to defer at -O2. */
890 if (toplevel && optimize < 2)
891 return false;
892
893 /* Without optimization, *most* variables are allocated from the
894 stack, which makes the quadratic problem large exactly when we
c22cacf3 895 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
896 other hand, we don't want the function's stack frame size to
897 get completely out of hand. So we avoid adding scalars and
898 "small" aggregates to the list at all. */
899 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
900 return false;
901
902 return true;
903}
904
905/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 906 its flavor. Variables to be placed on the stack are not actually
b8698a0f 907 expanded yet, merely recorded.
ff28a94d
JH
908 When REALLY_EXPAND is false, only add stack values to be allocated.
909 Return stack usage this variable is supposed to take.
910*/
1f6d3a08 911
ff28a94d
JH
912static HOST_WIDE_INT
913expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08 914{
4e3825db
MM
915 tree origvar = var;
916 var = SSAVAR (var);
917
2e3f842f
L
918 if (SUPPORTS_STACK_ALIGNMENT
919 && TREE_TYPE (var) != error_mark_node
920 && TREE_CODE (var) == VAR_DECL)
921 {
922 unsigned int align;
923
924 /* Because we don't know if VAR will be in register or on stack,
925 we conservatively assume it will be on stack even if VAR is
926 eventually put into register after RA pass. For non-automatic
927 variables, which won't be on stack, we collect alignment of
928 type and ignore user specified alignment. */
929 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
ae58e548
JJ
930 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
931 TYPE_MODE (TREE_TYPE (var)),
932 TYPE_ALIGN (TREE_TYPE (var)));
2e3f842f 933 else
ae58e548 934 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
2e3f842f
L
935
936 if (crtl->stack_alignment_estimated < align)
937 {
938 /* stack_alignment_estimated shouldn't change after stack
939 realign decision made */
940 gcc_assert(!crtl->stack_realign_processed);
941 crtl->stack_alignment_estimated = align;
942 }
943 }
944
4e3825db
MM
945 if (TREE_CODE (origvar) == SSA_NAME)
946 {
947 gcc_assert (TREE_CODE (var) != VAR_DECL
948 || (!DECL_EXTERNAL (var)
949 && !DECL_HAS_VALUE_EXPR_P (var)
950 && !TREE_STATIC (var)
4e3825db
MM
951 && TREE_TYPE (var) != error_mark_node
952 && !DECL_HARD_REGISTER (var)
953 && really_expand));
954 }
955 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
4846b435 956 ;
1f6d3a08
RH
957 else if (DECL_EXTERNAL (var))
958 ;
833b3afe 959 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
960 ;
961 else if (TREE_STATIC (var))
7e8b322a 962 ;
eb7adebc 963 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1f6d3a08
RH
964 ;
965 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
966 {
967 if (really_expand)
968 expand_one_error_var (var);
969 }
4e3825db 970 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
ff28a94d
JH
971 {
972 if (really_expand)
973 expand_one_hard_reg_var (var);
974 }
1f6d3a08 975 else if (use_register_for_decl (var))
ff28a94d
JH
976 {
977 if (really_expand)
4e3825db 978 expand_one_register_var (origvar);
ff28a94d 979 }
7604eb4e
JJ
980 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
981 {
982 if (really_expand)
983 {
984 error ("size of variable %q+D is too large", var);
985 expand_one_error_var (var);
986 }
987 }
1f6d3a08 988 else if (defer_stack_allocation (var, toplevel))
4e3825db 989 add_stack_var (origvar);
1f6d3a08 990 else
ff28a94d 991 {
bd9f1b4b 992 if (really_expand)
4e3825db 993 expand_one_stack_var (origvar);
ff28a94d
JH
994 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
995 }
996 return 0;
1f6d3a08
RH
997}
998
999/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1000 expanding variables. Those variables that can be put into registers
1001 are allocated pseudos; those that can't are put on the stack.
1002
1003 TOPLEVEL is true if this is the outermost BLOCK. */
1004
1005static void
1006expand_used_vars_for_block (tree block, bool toplevel)
1007{
1008 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1009 tree t;
1010
1011 old_sv_num = toplevel ? 0 : stack_vars_num;
1012
1013 /* Expand all variables at this level. */
1014 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
7e8b322a 1015 if (TREE_USED (t))
ff28a94d 1016 expand_one_var (t, toplevel, true);
1f6d3a08
RH
1017
1018 this_sv_num = stack_vars_num;
1019
1020 /* Expand all variables at containing levels. */
1021 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1022 expand_used_vars_for_block (t, false);
1023
1024 /* Since we do not track exact variable lifetimes (which is not even
6fc0bb99 1025 possible for variables whose address escapes), we mirror the block
1f6d3a08 1026 tree in the interference graph. Here we cause all variables at this
2bdbbe94 1027 level, and all sublevels, to conflict. */
1f6d3a08
RH
1028 if (old_sv_num < this_sv_num)
1029 {
1030 new_sv_num = stack_vars_num;
1f6d3a08
RH
1031
1032 for (i = old_sv_num; i < new_sv_num; ++i)
2bdbbe94 1033 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
f4a6d54e 1034 add_stack_var_conflict (i, j);
1f6d3a08
RH
1035 }
1036}
1037
1038/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1039 and clear TREE_USED on all local variables. */
1040
1041static void
1042clear_tree_used (tree block)
1043{
1044 tree t;
1045
1046 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1047 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1048 TREE_USED (t) = 0;
1049
1050 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1051 clear_tree_used (t);
1052}
1053
7d69de61
RH
1054/* Examine TYPE and determine a bit mask of the following features. */
1055
1056#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1057#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1058#define SPCT_HAS_ARRAY 4
1059#define SPCT_HAS_AGGREGATE 8
1060
1061static unsigned int
1062stack_protect_classify_type (tree type)
1063{
1064 unsigned int ret = 0;
1065 tree t;
1066
1067 switch (TREE_CODE (type))
1068 {
1069 case ARRAY_TYPE:
1070 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1071 if (t == char_type_node
1072 || t == signed_char_type_node
1073 || t == unsigned_char_type_node)
1074 {
15362b89
JJ
1075 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1076 unsigned HOST_WIDE_INT len;
7d69de61 1077
15362b89
JJ
1078 if (!TYPE_SIZE_UNIT (type)
1079 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1080 len = max;
7d69de61 1081 else
15362b89 1082 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
7d69de61
RH
1083
1084 if (len < max)
1085 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1086 else
1087 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1088 }
1089 else
1090 ret = SPCT_HAS_ARRAY;
1091 break;
1092
1093 case UNION_TYPE:
1094 case QUAL_UNION_TYPE:
1095 case RECORD_TYPE:
1096 ret = SPCT_HAS_AGGREGATE;
1097 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1098 if (TREE_CODE (t) == FIELD_DECL)
1099 ret |= stack_protect_classify_type (TREE_TYPE (t));
1100 break;
1101
1102 default:
1103 break;
1104 }
1105
1106 return ret;
1107}
1108
a4d05547
KH
1109/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1110 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
1111 any variable in this function. The return value is the phase number in
1112 which the variable should be allocated. */
1113
1114static int
1115stack_protect_decl_phase (tree decl)
1116{
1117 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1118 int ret = 0;
1119
1120 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1121 has_short_buffer = true;
1122
1123 if (flag_stack_protect == 2)
1124 {
1125 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1126 && !(bits & SPCT_HAS_AGGREGATE))
1127 ret = 1;
1128 else if (bits & SPCT_HAS_ARRAY)
1129 ret = 2;
1130 }
1131 else
1132 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1133
1134 if (ret)
1135 has_protected_decls = true;
1136
1137 return ret;
1138}
1139
1140/* Two helper routines that check for phase 1 and phase 2. These are used
1141 as callbacks for expand_stack_vars. */
1142
1143static bool
1144stack_protect_decl_phase_1 (tree decl)
1145{
1146 return stack_protect_decl_phase (decl) == 1;
1147}
1148
1149static bool
1150stack_protect_decl_phase_2 (tree decl)
1151{
1152 return stack_protect_decl_phase (decl) == 2;
1153}
1154
1155/* Ensure that variables in different stack protection phases conflict
1156 so that they are not merged and share the same stack slot. */
1157
1158static void
1159add_stack_protection_conflicts (void)
1160{
1161 size_t i, j, n = stack_vars_num;
1162 unsigned char *phase;
1163
1164 phase = XNEWVEC (unsigned char, n);
1165 for (i = 0; i < n; ++i)
1166 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1167
1168 for (i = 0; i < n; ++i)
1169 {
1170 unsigned char ph_i = phase[i];
1171 for (j = 0; j < i; ++j)
1172 if (ph_i != phase[j])
1173 add_stack_var_conflict (i, j);
1174 }
1175
1176 XDELETEVEC (phase);
1177}
1178
1179/* Create a decl for the guard at the top of the stack frame. */
1180
1181static void
1182create_stack_guard (void)
1183{
c2255bc4
AH
1184 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1185 VAR_DECL, NULL, ptr_type_node);
7d69de61
RH
1186 TREE_THIS_VOLATILE (guard) = 1;
1187 TREE_USED (guard) = 1;
1188 expand_one_stack_var (guard);
cb91fab0 1189 crtl->stack_protect_guard = guard;
7d69de61
RH
1190}
1191
ff28a94d
JH
1192/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1193 expanding variables. Those variables that can be put into registers
1194 are allocated pseudos; those that can't are put on the stack.
1195
1196 TOPLEVEL is true if this is the outermost BLOCK. */
1197
1198static HOST_WIDE_INT
1199account_used_vars_for_block (tree block, bool toplevel)
1200{
ff28a94d
JH
1201 tree t;
1202 HOST_WIDE_INT size = 0;
1203
ff28a94d
JH
1204 /* Expand all variables at this level. */
1205 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1206 if (TREE_USED (t))
1207 size += expand_one_var (t, toplevel, false);
1208
ff28a94d
JH
1209 /* Expand all variables at containing levels. */
1210 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1211 size += account_used_vars_for_block (t, false);
1212
ff28a94d
JH
1213 return size;
1214}
1215
1216/* Prepare for expanding variables. */
b8698a0f 1217static void
ff28a94d
JH
1218init_vars_expansion (void)
1219{
1220 tree t;
cb91fab0
JH
1221 /* Set TREE_USED on all variables in the local_decls. */
1222 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
ff28a94d
JH
1223 TREE_USED (TREE_VALUE (t)) = 1;
1224
1225 /* Clear TREE_USED on all variables associated with a block scope. */
1226 clear_tree_used (DECL_INITIAL (current_function_decl));
1227
1228 /* Initialize local stack smashing state. */
1229 has_protected_decls = false;
1230 has_short_buffer = false;
1231}
1232
1233/* Free up stack variable graph data. */
1234static void
1235fini_vars_expansion (void)
1236{
2bdbbe94
MM
1237 size_t i, n = stack_vars_num;
1238 for (i = 0; i < n; i++)
1239 BITMAP_FREE (stack_vars[i].conflicts);
ff28a94d
JH
1240 XDELETEVEC (stack_vars);
1241 XDELETEVEC (stack_vars_sorted);
ff28a94d
JH
1242 stack_vars = NULL;
1243 stack_vars_alloc = stack_vars_num = 0;
ff28a94d
JH
1244}
1245
b5a430f3
SB
1246/* Make a fair guess for the size of the stack frame of the current
1247 function. This doesn't have to be exact, the result is only used
1248 in the inline heuristics. So we don't want to run the full stack
1249 var packing algorithm (which is quadratic in the number of stack
1250 vars). Instead, we calculate the total size of all stack vars.
1251 This turns out to be a pretty fair estimate -- packing of stack
1252 vars doesn't happen very often. */
1253
ff28a94d
JH
1254HOST_WIDE_INT
1255estimated_stack_frame_size (void)
1256{
1257 HOST_WIDE_INT size = 0;
b5a430f3 1258 size_t i;
ff28a94d
JH
1259 tree t, outer_block = DECL_INITIAL (current_function_decl);
1260
1261 init_vars_expansion ();
1262
cb91fab0 1263 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
ff28a94d
JH
1264 {
1265 tree var = TREE_VALUE (t);
1266
1267 if (TREE_USED (var))
1268 size += expand_one_var (var, true, false);
1269 TREE_USED (var) = 1;
1270 }
1271 size += account_used_vars_for_block (outer_block, true);
b5a430f3 1272
ff28a94d
JH
1273 if (stack_vars_num > 0)
1274 {
b5a430f3
SB
1275 /* Fake sorting the stack vars for account_stack_vars (). */
1276 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1277 for (i = 0; i < stack_vars_num; ++i)
1278 stack_vars_sorted[i] = i;
ff28a94d
JH
1279 size += account_stack_vars ();
1280 fini_vars_expansion ();
1281 }
b5a430f3 1282
ff28a94d
JH
1283 return size;
1284}
1285
1f6d3a08 1286/* Expand all variables used in the function. */
727a31fa
RH
1287
1288static void
1289expand_used_vars (void)
1290{
802e9f8e 1291 tree t, next, outer_block = DECL_INITIAL (current_function_decl);
6c6366f6 1292 tree maybe_local_decls = NULL_TREE;
4e3825db 1293 unsigned i;
727a31fa 1294
1f6d3a08
RH
1295 /* Compute the phase of the stack frame for this function. */
1296 {
1297 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1298 int off = STARTING_FRAME_OFFSET % align;
1299 frame_phase = off ? align - off : 0;
1300 }
727a31fa 1301
ff28a94d 1302 init_vars_expansion ();
7d69de61 1303
4e3825db
MM
1304 for (i = 0; i < SA.map->num_partitions; i++)
1305 {
1306 tree var = partition_to_var (SA.map, i);
1307
1308 gcc_assert (is_gimple_reg (var));
1309 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1310 expand_one_var (var, true, true);
1311 else
1312 {
1313 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1314 contain the default def (representing the parm or result itself)
1315 we don't do anything here. But those which don't contain the
1316 default def (representing a temporary based on the parm/result)
1317 we need to allocate space just like for normal VAR_DECLs. */
1318 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1319 {
1320 expand_one_var (var, true, true);
1321 gcc_assert (SA.partition_to_pseudo[i]);
1322 }
1323 }
1324 }
1325
cb91fab0 1326 /* At this point all variables on the local_decls with TREE_USED
1f6d3a08 1327 set are not associated with any block scope. Lay them out. */
802e9f8e
JJ
1328 t = cfun->local_decls;
1329 cfun->local_decls = NULL_TREE;
1330 for (; t; t = next)
1f6d3a08
RH
1331 {
1332 tree var = TREE_VALUE (t);
1333 bool expand_now = false;
1334
802e9f8e
JJ
1335 next = TREE_CHAIN (t);
1336
4e3825db
MM
1337 /* Expanded above already. */
1338 if (is_gimple_reg (var))
eb7adebc
MM
1339 {
1340 TREE_USED (var) = 0;
3adcf52c 1341 goto next;
eb7adebc 1342 }
1f6d3a08
RH
1343 /* We didn't set a block for static or extern because it's hard
1344 to tell the difference between a global variable (re)declared
1345 in a local scope, and one that's really declared there to
1346 begin with. And it doesn't really matter much, since we're
1347 not giving them stack space. Expand them now. */
4e3825db 1348 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1f6d3a08
RH
1349 expand_now = true;
1350
1351 /* If the variable is not associated with any block, then it
1352 was created by the optimizers, and could be live anywhere
1353 in the function. */
1354 else if (TREE_USED (var))
1355 expand_now = true;
1356
1357 /* Finally, mark all variables on the list as used. We'll use
1358 this in a moment when we expand those associated with scopes. */
1359 TREE_USED (var) = 1;
1360
1361 if (expand_now)
3adcf52c
JM
1362 expand_one_var (var, true, true);
1363
1364 next:
1365 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
802e9f8e 1366 {
3adcf52c
JM
1367 rtx rtl = DECL_RTL_IF_SET (var);
1368
1369 /* Keep artificial non-ignored vars in cfun->local_decls
1370 chain until instantiate_decls. */
1371 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
802e9f8e 1372 {
3adcf52c
JM
1373 TREE_CHAIN (t) = cfun->local_decls;
1374 cfun->local_decls = t;
1375 continue;
802e9f8e 1376 }
6c6366f6
JJ
1377 else if (rtl == NULL_RTX)
1378 {
1379 /* If rtl isn't set yet, which can happen e.g. with
1380 -fstack-protector, retry before returning from this
1381 function. */
1382 TREE_CHAIN (t) = maybe_local_decls;
1383 maybe_local_decls = t;
1384 continue;
1385 }
802e9f8e
JJ
1386 }
1387
1388 ggc_free (t);
1f6d3a08 1389 }
1f6d3a08
RH
1390
1391 /* At this point, all variables within the block tree with TREE_USED
1392 set are actually used by the optimized function. Lay them out. */
1393 expand_used_vars_for_block (outer_block, true);
1394
1395 if (stack_vars_num > 0)
1396 {
1397 /* Due to the way alias sets work, no variables with non-conflicting
c22cacf3 1398 alias sets may be assigned the same address. Add conflicts to
1f6d3a08
RH
1399 reflect this. */
1400 add_alias_set_conflicts ();
1401
c22cacf3 1402 /* If stack protection is enabled, we don't share space between
7d69de61
RH
1403 vulnerable data and non-vulnerable data. */
1404 if (flag_stack_protect)
1405 add_stack_protection_conflicts ();
1406
c22cacf3 1407 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
1408 minimal interference graph, attempt to save some stack space. */
1409 partition_stack_vars ();
1410 if (dump_file)
1411 dump_stack_var_partition ();
7d69de61
RH
1412 }
1413
1414 /* There are several conditions under which we should create a
1415 stack guard: protect-all, alloca used, protected decls present. */
1416 if (flag_stack_protect == 2
1417 || (flag_stack_protect
e3b5732b 1418 && (cfun->calls_alloca || has_protected_decls)))
7d69de61 1419 create_stack_guard ();
1f6d3a08 1420
7d69de61
RH
1421 /* Assign rtl to each variable based on these partitions. */
1422 if (stack_vars_num > 0)
1423 {
1424 /* Reorder decls to be protected by iterating over the variables
1425 array multiple times, and allocating out of each phase in turn. */
c22cacf3 1426 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
1427 earlier, such that we naturally see these variables first,
1428 and thus naturally allocate things in the right order. */
1429 if (has_protected_decls)
1430 {
1431 /* Phase 1 contains only character arrays. */
1432 expand_stack_vars (stack_protect_decl_phase_1);
1433
1434 /* Phase 2 contains other kinds of arrays. */
1435 if (flag_stack_protect == 2)
1436 expand_stack_vars (stack_protect_decl_phase_2);
1437 }
1438
1439 expand_stack_vars (NULL);
1f6d3a08 1440
ff28a94d 1441 fini_vars_expansion ();
1f6d3a08
RH
1442 }
1443
6c6366f6
JJ
1444 /* If there were any artificial non-ignored vars without rtl
1445 found earlier, see if deferred stack allocation hasn't assigned
1446 rtl to them. */
1447 for (t = maybe_local_decls; t; t = next)
1448 {
1449 tree var = TREE_VALUE (t);
1450 rtx rtl = DECL_RTL_IF_SET (var);
1451
1452 next = TREE_CHAIN (t);
1453
1454 /* Keep artificial non-ignored vars in cfun->local_decls
1455 chain until instantiate_decls. */
1456 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1457 {
1458 TREE_CHAIN (t) = cfun->local_decls;
1459 cfun->local_decls = t;
1460 continue;
1461 }
1462
1463 ggc_free (t);
1464 }
1465
1f6d3a08
RH
1466 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1467 if (STACK_ALIGNMENT_NEEDED)
1468 {
1469 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1470 if (!FRAME_GROWS_DOWNWARD)
1471 frame_offset += align - 1;
1472 frame_offset &= -align;
1473 }
727a31fa
RH
1474}
1475
1476
b7211528
SB
1477/* If we need to produce a detailed dump, print the tree representation
1478 for STMT to the dump file. SINCE is the last RTX after which the RTL
1479 generated for STMT should have been appended. */
1480
1481static void
726a989a 1482maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
b7211528
SB
1483{
1484 if (dump_file && (dump_flags & TDF_DETAILS))
1485 {
1486 fprintf (dump_file, "\n;; ");
b5b8b0ac
AO
1487 print_gimple_stmt (dump_file, stmt, 0,
1488 TDF_SLIM | (dump_flags & TDF_LINENO));
b7211528
SB
1489 fprintf (dump_file, "\n");
1490
1491 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1492 }
1493}
1494
8b11009b
ZD
1495/* Maps the blocks that do not contain tree labels to rtx labels. */
1496
1497static struct pointer_map_t *lab_rtx_for_bb;
1498
a9b77cd1
ZD
1499/* Returns the label_rtx expression for a label starting basic block BB. */
1500
1501static rtx
726a989a 1502label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
a9b77cd1 1503{
726a989a
RB
1504 gimple_stmt_iterator gsi;
1505 tree lab;
1506 gimple lab_stmt;
8b11009b 1507 void **elt;
a9b77cd1
ZD
1508
1509 if (bb->flags & BB_RTL)
1510 return block_label (bb);
1511
8b11009b
ZD
1512 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1513 if (elt)
ae50c0cb 1514 return (rtx) *elt;
8b11009b
ZD
1515
1516 /* Find the tree label if it is present. */
b8698a0f 1517
726a989a 1518 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
a9b77cd1 1519 {
726a989a
RB
1520 lab_stmt = gsi_stmt (gsi);
1521 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
a9b77cd1
ZD
1522 break;
1523
726a989a 1524 lab = gimple_label_label (lab_stmt);
a9b77cd1
ZD
1525 if (DECL_NONLOCAL (lab))
1526 break;
1527
1528 return label_rtx (lab);
1529 }
1530
8b11009b
ZD
1531 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1532 *elt = gen_label_rtx ();
ae50c0cb 1533 return (rtx) *elt;
a9b77cd1
ZD
1534}
1535
726a989a 1536
529ff441
MM
1537/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1538 of a basic block where we just expanded the conditional at the end,
315adeda
MM
1539 possibly clean up the CFG and instruction sequence. LAST is the
1540 last instruction before the just emitted jump sequence. */
529ff441
MM
1541
1542static void
315adeda 1543maybe_cleanup_end_of_block (edge e, rtx last)
529ff441
MM
1544{
1545 /* Special case: when jumpif decides that the condition is
1546 trivial it emits an unconditional jump (and the necessary
1547 barrier). But we still have two edges, the fallthru one is
1548 wrong. purge_dead_edges would clean this up later. Unfortunately
1549 we have to insert insns (and split edges) before
1550 find_many_sub_basic_blocks and hence before purge_dead_edges.
1551 But splitting edges might create new blocks which depend on the
1552 fact that if there are two edges there's no barrier. So the
1553 barrier would get lost and verify_flow_info would ICE. Instead
1554 of auditing all edge splitters to care for the barrier (which
1555 normally isn't there in a cleaned CFG), fix it here. */
1556 if (BARRIER_P (get_last_insn ()))
1557 {
529ff441
MM
1558 rtx insn;
1559 remove_edge (e);
1560 /* Now, we have a single successor block, if we have insns to
1561 insert on the remaining edge we potentially will insert
1562 it at the end of this block (if the dest block isn't feasible)
1563 in order to avoid splitting the edge. This insertion will take
1564 place in front of the last jump. But we might have emitted
1565 multiple jumps (conditional and one unconditional) to the
1566 same destination. Inserting in front of the last one then
1567 is a problem. See PR 40021. We fix this by deleting all
1568 jumps except the last unconditional one. */
1569 insn = PREV_INSN (get_last_insn ());
1570 /* Make sure we have an unconditional jump. Otherwise we're
1571 confused. */
1572 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
315adeda 1573 for (insn = PREV_INSN (insn); insn != last;)
529ff441
MM
1574 {
1575 insn = PREV_INSN (insn);
1576 if (JUMP_P (NEXT_INSN (insn)))
1577 delete_insn (NEXT_INSN (insn));
1578 }
1579 }
1580}
1581
726a989a 1582/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
80c7a9eb
RH
1583 Returns a new basic block if we've terminated the current basic
1584 block and created a new one. */
1585
1586static basic_block
726a989a 1587expand_gimple_cond (basic_block bb, gimple stmt)
80c7a9eb
RH
1588{
1589 basic_block new_bb, dest;
1590 edge new_edge;
1591 edge true_edge;
1592 edge false_edge;
b7211528 1593 rtx last2, last;
28ed065e
MM
1594 enum tree_code code;
1595 tree op0, op1;
1596
1597 code = gimple_cond_code (stmt);
1598 op0 = gimple_cond_lhs (stmt);
1599 op1 = gimple_cond_rhs (stmt);
1600 /* We're sometimes presented with such code:
1601 D.123_1 = x < y;
1602 if (D.123_1 != 0)
1603 ...
1604 This would expand to two comparisons which then later might
1605 be cleaned up by combine. But some pattern matchers like if-conversion
1606 work better when there's only one compare, so make up for this
1607 here as special exception if TER would have made the same change. */
1608 if (gimple_cond_single_var_p (stmt)
1609 && SA.values
1610 && TREE_CODE (op0) == SSA_NAME
1611 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1612 {
1613 gimple second = SSA_NAME_DEF_STMT (op0);
e83f4b68 1614 if (gimple_code (second) == GIMPLE_ASSIGN)
28ed065e 1615 {
e83f4b68
MM
1616 enum tree_code code2 = gimple_assign_rhs_code (second);
1617 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1618 {
1619 code = code2;
1620 op0 = gimple_assign_rhs1 (second);
1621 op1 = gimple_assign_rhs2 (second);
1622 }
1623 /* If jumps are cheap turn some more codes into
1624 jumpy sequences. */
1625 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1626 {
1627 if ((code2 == BIT_AND_EXPR
1628 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1629 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1630 || code2 == TRUTH_AND_EXPR)
1631 {
1632 code = TRUTH_ANDIF_EXPR;
1633 op0 = gimple_assign_rhs1 (second);
1634 op1 = gimple_assign_rhs2 (second);
1635 }
1636 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1637 {
1638 code = TRUTH_ORIF_EXPR;
1639 op0 = gimple_assign_rhs1 (second);
1640 op1 = gimple_assign_rhs2 (second);
1641 }
1642 }
28ed065e
MM
1643 }
1644 }
b7211528
SB
1645
1646 last2 = last = get_last_insn ();
80c7a9eb
RH
1647
1648 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
726a989a 1649 if (gimple_has_location (stmt))
80c7a9eb 1650 {
726a989a
RB
1651 set_curr_insn_source_location (gimple_location (stmt));
1652 set_curr_insn_block (gimple_block (stmt));
80c7a9eb
RH
1653 }
1654
1655 /* These flags have no purpose in RTL land. */
1656 true_edge->flags &= ~EDGE_TRUE_VALUE;
1657 false_edge->flags &= ~EDGE_FALSE_VALUE;
1658
1659 /* We can either have a pure conditional jump with one fallthru edge or
1660 two-way jump that needs to be decomposed into two basic blocks. */
a9b77cd1 1661 if (false_edge->dest == bb->next_bb)
80c7a9eb 1662 {
40e90eac
JJ
1663 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1664 true_edge->probability);
726a989a 1665 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1666 if (true_edge->goto_locus)
7241571e
JJ
1667 {
1668 set_curr_insn_source_location (true_edge->goto_locus);
1669 set_curr_insn_block (true_edge->goto_block);
1670 true_edge->goto_locus = curr_insn_locator ();
1671 }
1672 true_edge->goto_block = NULL;
a9b77cd1 1673 false_edge->flags |= EDGE_FALLTHRU;
315adeda 1674 maybe_cleanup_end_of_block (false_edge, last);
80c7a9eb
RH
1675 return NULL;
1676 }
a9b77cd1 1677 if (true_edge->dest == bb->next_bb)
80c7a9eb 1678 {
40e90eac
JJ
1679 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1680 false_edge->probability);
726a989a 1681 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1682 if (false_edge->goto_locus)
7241571e
JJ
1683 {
1684 set_curr_insn_source_location (false_edge->goto_locus);
1685 set_curr_insn_block (false_edge->goto_block);
1686 false_edge->goto_locus = curr_insn_locator ();
1687 }
1688 false_edge->goto_block = NULL;
a9b77cd1 1689 true_edge->flags |= EDGE_FALLTHRU;
315adeda 1690 maybe_cleanup_end_of_block (true_edge, last);
80c7a9eb
RH
1691 return NULL;
1692 }
80c7a9eb 1693
40e90eac
JJ
1694 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1695 true_edge->probability);
80c7a9eb 1696 last = get_last_insn ();
7241571e
JJ
1697 if (false_edge->goto_locus)
1698 {
1699 set_curr_insn_source_location (false_edge->goto_locus);
1700 set_curr_insn_block (false_edge->goto_block);
1701 false_edge->goto_locus = curr_insn_locator ();
1702 }
1703 false_edge->goto_block = NULL;
a9b77cd1 1704 emit_jump (label_rtx_for_bb (false_edge->dest));
80c7a9eb
RH
1705
1706 BB_END (bb) = last;
1707 if (BARRIER_P (BB_END (bb)))
1708 BB_END (bb) = PREV_INSN (BB_END (bb));
1709 update_bb_for_insn (bb);
1710
1711 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1712 dest = false_edge->dest;
1713 redirect_edge_succ (false_edge, new_bb);
1714 false_edge->flags |= EDGE_FALLTHRU;
1715 new_bb->count = false_edge->count;
1716 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1717 new_edge = make_edge (new_bb, dest, 0);
1718 new_edge->probability = REG_BR_PROB_BASE;
1719 new_edge->count = new_bb->count;
1720 if (BARRIER_P (BB_END (new_bb)))
1721 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1722 update_bb_for_insn (new_bb);
1723
726a989a 1724 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
c22cacf3 1725
7787b4aa
JJ
1726 if (true_edge->goto_locus)
1727 {
1728 set_curr_insn_source_location (true_edge->goto_locus);
1729 set_curr_insn_block (true_edge->goto_block);
1730 true_edge->goto_locus = curr_insn_locator ();
1731 }
1732 true_edge->goto_block = NULL;
1733
80c7a9eb
RH
1734 return new_bb;
1735}
1736
28ed065e
MM
1737/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1738 statement STMT. */
1739
1740static void
1741expand_call_stmt (gimple stmt)
1742{
1743 tree exp;
1744 tree lhs = gimple_call_lhs (stmt);
28ed065e 1745 size_t i;
e23817b3
RG
1746 bool builtin_p;
1747 tree decl;
28ed065e
MM
1748
1749 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1750
1751 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
e23817b3
RG
1752 decl = gimple_call_fndecl (stmt);
1753 builtin_p = decl && DECL_BUILT_IN (decl);
1754
28ed065e
MM
1755 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1756 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1757
1758 for (i = 0; i < gimple_call_num_args (stmt); i++)
e23817b3
RG
1759 {
1760 tree arg = gimple_call_arg (stmt, i);
1761 gimple def;
1762 /* TER addresses into arguments of builtin functions so we have a
1763 chance to infer more correct alignment information. See PR39954. */
1764 if (builtin_p
1765 && TREE_CODE (arg) == SSA_NAME
1766 && (def = get_gimple_for_ssa_name (arg))
1767 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1768 arg = gimple_assign_rhs1 (def);
1769 CALL_EXPR_ARG (exp, i) = arg;
1770 }
28ed065e 1771
93f28ca7 1772 if (gimple_has_side_effects (stmt))
28ed065e
MM
1773 TREE_SIDE_EFFECTS (exp) = 1;
1774
93f28ca7 1775 if (gimple_call_nothrow_p (stmt))
28ed065e
MM
1776 TREE_NOTHROW (exp) = 1;
1777
1778 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1779 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1780 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1781 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1782 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1783 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1784 TREE_BLOCK (exp) = gimple_block (stmt);
1785
28ed065e
MM
1786 if (lhs)
1787 expand_assignment (lhs, exp, false);
1788 else
1789 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1790}
1791
1792/* A subroutine of expand_gimple_stmt, expanding one gimple statement
1793 STMT that doesn't require special handling for outgoing edges. That
1794 is no tailcalls and no GIMPLE_COND. */
1795
1796static void
1797expand_gimple_stmt_1 (gimple stmt)
1798{
1799 tree op0;
1800 switch (gimple_code (stmt))
1801 {
1802 case GIMPLE_GOTO:
1803 op0 = gimple_goto_dest (stmt);
1804 if (TREE_CODE (op0) == LABEL_DECL)
1805 expand_goto (op0);
1806 else
1807 expand_computed_goto (op0);
1808 break;
1809 case GIMPLE_LABEL:
1810 expand_label (gimple_label_label (stmt));
1811 break;
1812 case GIMPLE_NOP:
1813 case GIMPLE_PREDICT:
1814 break;
28ed065e
MM
1815 case GIMPLE_SWITCH:
1816 expand_case (stmt);
1817 break;
1818 case GIMPLE_ASM:
1819 expand_asm_stmt (stmt);
1820 break;
1821 case GIMPLE_CALL:
1822 expand_call_stmt (stmt);
1823 break;
1824
1825 case GIMPLE_RETURN:
1826 op0 = gimple_return_retval (stmt);
1827
1828 if (op0 && op0 != error_mark_node)
1829 {
1830 tree result = DECL_RESULT (current_function_decl);
1831
1832 /* If we are not returning the current function's RESULT_DECL,
1833 build an assignment to it. */
1834 if (op0 != result)
1835 {
1836 /* I believe that a function's RESULT_DECL is unique. */
1837 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1838
1839 /* ??? We'd like to use simply expand_assignment here,
1840 but this fails if the value is of BLKmode but the return
1841 decl is a register. expand_return has special handling
1842 for this combination, which eventually should move
1843 to common code. See comments there. Until then, let's
1844 build a modify expression :-/ */
1845 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1846 result, op0);
1847 }
1848 }
1849 if (!op0)
1850 expand_null_return ();
1851 else
1852 expand_return (op0);
1853 break;
1854
1855 case GIMPLE_ASSIGN:
1856 {
1857 tree lhs = gimple_assign_lhs (stmt);
1858
1859 /* Tree expand used to fiddle with |= and &= of two bitfield
1860 COMPONENT_REFs here. This can't happen with gimple, the LHS
1861 of binary assigns must be a gimple reg. */
1862
1863 if (TREE_CODE (lhs) != SSA_NAME
1864 || get_gimple_rhs_class (gimple_expr_code (stmt))
1865 == GIMPLE_SINGLE_RHS)
1866 {
1867 tree rhs = gimple_assign_rhs1 (stmt);
1868 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1869 == GIMPLE_SINGLE_RHS);
1870 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1871 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1872 expand_assignment (lhs, rhs,
1873 gimple_assign_nontemporal_move_p (stmt));
1874 }
1875 else
1876 {
1877 rtx target, temp;
1878 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1879 struct separate_ops ops;
1880 bool promoted = false;
1881
1882 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1883 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
1884 promoted = true;
1885
1886 ops.code = gimple_assign_rhs_code (stmt);
1887 ops.type = TREE_TYPE (lhs);
1888 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
1889 {
1890 case GIMPLE_BINARY_RHS:
1891 ops.op1 = gimple_assign_rhs2 (stmt);
1892 /* Fallthru */
1893 case GIMPLE_UNARY_RHS:
1894 ops.op0 = gimple_assign_rhs1 (stmt);
1895 break;
1896 default:
1897 gcc_unreachable ();
1898 }
1899 ops.location = gimple_location (stmt);
1900
1901 /* If we want to use a nontemporal store, force the value to
1902 register first. If we store into a promoted register,
1903 don't directly expand to target. */
1904 temp = nontemporal || promoted ? NULL_RTX : target;
1905 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
1906 EXPAND_NORMAL);
1907
1908 if (temp == target)
1909 ;
1910 else if (promoted)
1911 {
4e18a7d4 1912 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
28ed065e
MM
1913 /* If TEMP is a VOIDmode constant, use convert_modes to make
1914 sure that we properly convert it. */
1915 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1916 {
1917 temp = convert_modes (GET_MODE (target),
1918 TYPE_MODE (ops.type),
4e18a7d4 1919 temp, unsignedp);
28ed065e 1920 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4e18a7d4 1921 GET_MODE (target), temp, unsignedp);
28ed065e
MM
1922 }
1923
4e18a7d4 1924 convert_move (SUBREG_REG (target), temp, unsignedp);
28ed065e
MM
1925 }
1926 else if (nontemporal && emit_storent_insn (target, temp))
1927 ;
1928 else
1929 {
1930 temp = force_operand (temp, target);
1931 if (temp != target)
1932 emit_move_insn (target, temp);
1933 }
1934 }
1935 }
1936 break;
1937
1938 default:
1939 gcc_unreachable ();
1940 }
1941}
1942
1943/* Expand one gimple statement STMT and return the last RTL instruction
1944 before any of the newly generated ones.
1945
1946 In addition to generating the necessary RTL instructions this also
1947 sets REG_EH_REGION notes if necessary and sets the current source
1948 location for diagnostics. */
1949
1950static rtx
1951expand_gimple_stmt (gimple stmt)
1952{
1d65f45c 1953 int lp_nr = 0;
28ed065e
MM
1954 rtx last = NULL;
1955 location_t saved_location = input_location;
1956
1957 last = get_last_insn ();
1958
1959 /* If this is an expression of some kind and it has an associated line
1960 number, then emit the line number before expanding the expression.
1961
1962 We need to save and restore the file and line information so that
1963 errors discovered during expansion are emitted with the right
1964 information. It would be better of the diagnostic routines
1965 used the file/line information embedded in the tree nodes rather
1966 than globals. */
1967 gcc_assert (cfun);
1968
1969 if (gimple_has_location (stmt))
1970 {
1971 input_location = gimple_location (stmt);
1972 set_curr_insn_source_location (input_location);
1973
1974 /* Record where the insns produced belong. */
1975 set_curr_insn_block (gimple_block (stmt));
1976 }
1977
1978 expand_gimple_stmt_1 (stmt);
1979 /* Free any temporaries used to evaluate this statement. */
1980 free_temp_slots ();
1981
1982 input_location = saved_location;
1983
1984 /* Mark all insns that may trap. */
1d65f45c
RH
1985 lp_nr = lookup_stmt_eh_lp (stmt);
1986 if (lp_nr)
28ed065e
MM
1987 {
1988 rtx insn;
1989 for (insn = next_real_insn (last); insn;
1990 insn = next_real_insn (insn))
1991 {
1992 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1993 /* If we want exceptions for non-call insns, any
1994 may_trap_p instruction may throw. */
1995 && GET_CODE (PATTERN (insn)) != CLOBBER
1996 && GET_CODE (PATTERN (insn)) != USE
1d65f45c
RH
1997 && insn_could_throw_p (insn))
1998 make_reg_eh_region_note (insn, 0, lp_nr);
28ed065e
MM
1999 }
2000 }
2001
2002 return last;
2003}
2004
726a989a 2005/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
224e770b
RH
2006 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2007 generated a tail call (something that might be denied by the ABI
cea49550
RH
2008 rules governing the call; see calls.c).
2009
2010 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2011 can still reach the rest of BB. The case here is __builtin_sqrt,
2012 where the NaN result goes through the external function (with a
2013 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
2014
2015static basic_block
726a989a 2016expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
80c7a9eb 2017{
b7211528 2018 rtx last2, last;
224e770b 2019 edge e;
628f6a4e 2020 edge_iterator ei;
224e770b
RH
2021 int probability;
2022 gcov_type count;
80c7a9eb 2023
28ed065e 2024 last2 = last = expand_gimple_stmt (stmt);
80c7a9eb
RH
2025
2026 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
2027 if (CALL_P (last) && SIBLING_CALL_P (last))
2028 goto found;
80c7a9eb 2029
726a989a 2030 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2031
cea49550 2032 *can_fallthru = true;
224e770b 2033 return NULL;
80c7a9eb 2034
224e770b
RH
2035 found:
2036 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2037 Any instructions emitted here are about to be deleted. */
2038 do_pending_stack_adjust ();
2039
2040 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2041 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2042 EH or abnormal edges, we shouldn't have created a tail call in
2043 the first place. So it seems to me we should just be removing
2044 all edges here, or redirecting the existing fallthru edge to
2045 the exit block. */
2046
224e770b
RH
2047 probability = 0;
2048 count = 0;
224e770b 2049
628f6a4e
BE
2050 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2051 {
224e770b
RH
2052 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2053 {
2054 if (e->dest != EXIT_BLOCK_PTR)
80c7a9eb 2055 {
224e770b
RH
2056 e->dest->count -= e->count;
2057 e->dest->frequency -= EDGE_FREQUENCY (e);
2058 if (e->dest->count < 0)
c22cacf3 2059 e->dest->count = 0;
224e770b 2060 if (e->dest->frequency < 0)
c22cacf3 2061 e->dest->frequency = 0;
80c7a9eb 2062 }
224e770b
RH
2063 count += e->count;
2064 probability += e->probability;
2065 remove_edge (e);
80c7a9eb 2066 }
628f6a4e
BE
2067 else
2068 ei_next (&ei);
80c7a9eb
RH
2069 }
2070
224e770b
RH
2071 /* This is somewhat ugly: the call_expr expander often emits instructions
2072 after the sibcall (to perform the function return). These confuse the
12eff7b7 2073 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 2074 last = NEXT_INSN (last);
341c100f 2075 gcc_assert (BARRIER_P (last));
cea49550
RH
2076
2077 *can_fallthru = false;
224e770b
RH
2078 while (NEXT_INSN (last))
2079 {
2080 /* For instance an sqrt builtin expander expands if with
2081 sibcall in the then and label for `else`. */
2082 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
2083 {
2084 *can_fallthru = true;
2085 break;
2086 }
224e770b
RH
2087 delete_insn (NEXT_INSN (last));
2088 }
2089
2090 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2091 e->probability += probability;
2092 e->count += count;
2093 BB_END (bb) = last;
2094 update_bb_for_insn (bb);
2095
2096 if (NEXT_INSN (last))
2097 {
2098 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2099
2100 last = BB_END (bb);
2101 if (BARRIER_P (last))
2102 BB_END (bb) = PREV_INSN (last);
2103 }
2104
726a989a 2105 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2106
224e770b 2107 return bb;
80c7a9eb
RH
2108}
2109
b5b8b0ac
AO
2110/* Return the difference between the floor and the truncated result of
2111 a signed division by OP1 with remainder MOD. */
2112static rtx
2113floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2114{
2115 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2116 return gen_rtx_IF_THEN_ELSE
2117 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2118 gen_rtx_IF_THEN_ELSE
2119 (mode, gen_rtx_LT (BImode,
2120 gen_rtx_DIV (mode, op1, mod),
2121 const0_rtx),
2122 constm1_rtx, const0_rtx),
2123 const0_rtx);
2124}
2125
2126/* Return the difference between the ceil and the truncated result of
2127 a signed division by OP1 with remainder MOD. */
2128static rtx
2129ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2130{
2131 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2132 return gen_rtx_IF_THEN_ELSE
2133 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2134 gen_rtx_IF_THEN_ELSE
2135 (mode, gen_rtx_GT (BImode,
2136 gen_rtx_DIV (mode, op1, mod),
2137 const0_rtx),
2138 const1_rtx, const0_rtx),
2139 const0_rtx);
2140}
2141
2142/* Return the difference between the ceil and the truncated result of
2143 an unsigned division by OP1 with remainder MOD. */
2144static rtx
2145ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2146{
2147 /* (mod != 0 ? 1 : 0) */
2148 return gen_rtx_IF_THEN_ELSE
2149 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2150 const1_rtx, const0_rtx);
2151}
2152
2153/* Return the difference between the rounded and the truncated result
2154 of a signed division by OP1 with remainder MOD. Halfway cases are
2155 rounded away from zero, rather than to the nearest even number. */
2156static rtx
2157round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2158{
2159 /* (abs (mod) >= abs (op1) - abs (mod)
2160 ? (op1 / mod > 0 ? 1 : -1)
2161 : 0) */
2162 return gen_rtx_IF_THEN_ELSE
2163 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2164 gen_rtx_MINUS (mode,
2165 gen_rtx_ABS (mode, op1),
2166 gen_rtx_ABS (mode, mod))),
2167 gen_rtx_IF_THEN_ELSE
2168 (mode, gen_rtx_GT (BImode,
2169 gen_rtx_DIV (mode, op1, mod),
2170 const0_rtx),
2171 const1_rtx, constm1_rtx),
2172 const0_rtx);
2173}
2174
2175/* Return the difference between the rounded and the truncated result
2176 of a unsigned division by OP1 with remainder MOD. Halfway cases
2177 are rounded away from zero, rather than to the nearest even
2178 number. */
2179static rtx
2180round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2181{
2182 /* (mod >= op1 - mod ? 1 : 0) */
2183 return gen_rtx_IF_THEN_ELSE
2184 (mode, gen_rtx_GE (BImode, mod,
2185 gen_rtx_MINUS (mode, op1, mod)),
2186 const1_rtx, const0_rtx);
2187}
2188
dda2da58
AO
2189/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2190 any rtl. */
2191
2192static rtx
2193convert_debug_memory_address (enum machine_mode mode, rtx x)
2194{
2195 enum machine_mode xmode = GET_MODE (x);
2196
2197#ifndef POINTERS_EXTEND_UNSIGNED
2198 gcc_assert (mode == Pmode);
2199 gcc_assert (xmode == mode || xmode == VOIDmode);
2200#else
2201 gcc_assert (mode == Pmode || mode == ptr_mode);
2202
2203 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2204 return x;
2205
2206 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2207 x = simplify_gen_subreg (mode, x, xmode,
2208 subreg_lowpart_offset
2209 (mode, xmode));
2210 else if (POINTERS_EXTEND_UNSIGNED > 0)
2211 x = gen_rtx_ZERO_EXTEND (mode, x);
2212 else if (!POINTERS_EXTEND_UNSIGNED)
2213 x = gen_rtx_SIGN_EXTEND (mode, x);
2214 else
2215 gcc_unreachable ();
2216#endif /* POINTERS_EXTEND_UNSIGNED */
2217
2218 return x;
2219}
2220
b5b8b0ac
AO
2221/* Return an RTX equivalent to the value of the tree expression
2222 EXP. */
2223
2224static rtx
2225expand_debug_expr (tree exp)
2226{
2227 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2228 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2229 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
09e881c9 2230 addr_space_t as;
d4ebfa65 2231 enum machine_mode address_mode;
b5b8b0ac
AO
2232
2233 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2234 {
2235 case tcc_expression:
2236 switch (TREE_CODE (exp))
2237 {
2238 case COND_EXPR:
7ece48b1 2239 case DOT_PROD_EXPR:
b5b8b0ac
AO
2240 goto ternary;
2241
2242 case TRUTH_ANDIF_EXPR:
2243 case TRUTH_ORIF_EXPR:
2244 case TRUTH_AND_EXPR:
2245 case TRUTH_OR_EXPR:
2246 case TRUTH_XOR_EXPR:
2247 goto binary;
2248
2249 case TRUTH_NOT_EXPR:
2250 goto unary;
2251
2252 default:
2253 break;
2254 }
2255 break;
2256
2257 ternary:
2258 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2259 if (!op2)
2260 return NULL_RTX;
2261 /* Fall through. */
2262
2263 binary:
2264 case tcc_binary:
2265 case tcc_comparison:
2266 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2267 if (!op1)
2268 return NULL_RTX;
2269 /* Fall through. */
2270
2271 unary:
2272 case tcc_unary:
2273 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2274 if (!op0)
2275 return NULL_RTX;
2276 break;
2277
2278 case tcc_type:
2279 case tcc_statement:
2280 gcc_unreachable ();
2281
2282 case tcc_constant:
2283 case tcc_exceptional:
2284 case tcc_declaration:
2285 case tcc_reference:
2286 case tcc_vl_exp:
2287 break;
2288 }
2289
2290 switch (TREE_CODE (exp))
2291 {
2292 case STRING_CST:
2293 if (!lookup_constant_def (exp))
2294 {
e1b243a8
JJ
2295 if (strlen (TREE_STRING_POINTER (exp)) + 1
2296 != (size_t) TREE_STRING_LENGTH (exp))
2297 return NULL_RTX;
b5b8b0ac
AO
2298 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2299 op0 = gen_rtx_MEM (BLKmode, op0);
2300 set_mem_attributes (op0, exp, 0);
2301 return op0;
2302 }
2303 /* Fall through... */
2304
2305 case INTEGER_CST:
2306 case REAL_CST:
2307 case FIXED_CST:
2308 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2309 return op0;
2310
2311 case COMPLEX_CST:
2312 gcc_assert (COMPLEX_MODE_P (mode));
2313 op0 = expand_debug_expr (TREE_REALPART (exp));
b5b8b0ac 2314 op1 = expand_debug_expr (TREE_IMAGPART (exp));
b5b8b0ac
AO
2315 return gen_rtx_CONCAT (mode, op0, op1);
2316
0ca5af51
AO
2317 case DEBUG_EXPR_DECL:
2318 op0 = DECL_RTL_IF_SET (exp);
2319
2320 if (op0)
2321 return op0;
2322
2323 op0 = gen_rtx_DEBUG_EXPR (mode);
e4fb38bd 2324 DEBUG_EXPR_TREE_DECL (op0) = exp;
0ca5af51
AO
2325 SET_DECL_RTL (exp, op0);
2326
2327 return op0;
2328
b5b8b0ac
AO
2329 case VAR_DECL:
2330 case PARM_DECL:
2331 case FUNCTION_DECL:
2332 case LABEL_DECL:
2333 case CONST_DECL:
2334 case RESULT_DECL:
2335 op0 = DECL_RTL_IF_SET (exp);
2336
2337 /* This decl was probably optimized away. */
2338 if (!op0)
e1b243a8
JJ
2339 {
2340 if (TREE_CODE (exp) != VAR_DECL
2341 || DECL_EXTERNAL (exp)
2342 || !TREE_STATIC (exp)
2343 || !DECL_NAME (exp)
0fba566c
JJ
2344 || DECL_HARD_REGISTER (exp)
2345 || mode == VOIDmode)
e1b243a8
JJ
2346 return NULL;
2347
b1aa0655 2348 op0 = make_decl_rtl_for_debug (exp);
e1b243a8
JJ
2349 if (!MEM_P (op0)
2350 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2351 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2352 return NULL;
2353 }
2354 else
2355 op0 = copy_rtx (op0);
b5b8b0ac 2356
06796564
JJ
2357 if (GET_MODE (op0) == BLKmode
2358 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2359 below would ICE. While it is likely a FE bug,
2360 try to be robust here. See PR43166. */
132b4e82
JJ
2361 || mode == BLKmode
2362 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
b5b8b0ac
AO
2363 {
2364 gcc_assert (MEM_P (op0));
2365 op0 = adjust_address_nv (op0, mode, 0);
2366 return op0;
2367 }
2368
2369 /* Fall through. */
2370
2371 adjust_mode:
2372 case PAREN_EXPR:
2373 case NOP_EXPR:
2374 case CONVERT_EXPR:
2375 {
2376 enum machine_mode inner_mode = GET_MODE (op0);
2377
2378 if (mode == inner_mode)
2379 return op0;
2380
2381 if (inner_mode == VOIDmode)
2382 {
2a8e30fb
MM
2383 if (TREE_CODE (exp) == SSA_NAME)
2384 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2385 else
2386 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
2387 if (mode == inner_mode)
2388 return op0;
2389 }
2390
2391 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2392 {
2393 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2394 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2395 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2396 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2397 else
2398 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2399 }
2400 else if (FLOAT_MODE_P (mode))
2401 {
2a8e30fb 2402 gcc_assert (TREE_CODE (exp) != SSA_NAME);
b5b8b0ac
AO
2403 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2404 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2405 else
2406 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2407 }
2408 else if (FLOAT_MODE_P (inner_mode))
2409 {
2410 if (unsignedp)
2411 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2412 else
2413 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2414 }
2415 else if (CONSTANT_P (op0)
2416 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2417 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2418 subreg_lowpart_offset (mode,
2419 inner_mode));
2420 else if (unsignedp)
2421 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2422 else
2423 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2424
2425 return op0;
2426 }
2427
2428 case INDIRECT_REF:
2429 case ALIGN_INDIRECT_REF:
2430 case MISALIGNED_INDIRECT_REF:
2431 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2432 if (!op0)
2433 return NULL;
2434
09e881c9 2435 if (POINTER_TYPE_P (TREE_TYPE (exp)))
4e25ca6b
EB
2436 {
2437 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2438 address_mode = targetm.addr_space.address_mode (as);
2439 }
09e881c9 2440 else
4e25ca6b
EB
2441 {
2442 as = ADDR_SPACE_GENERIC;
2443 address_mode = Pmode;
2444 }
b5b8b0ac
AO
2445
2446 if (TREE_CODE (exp) == ALIGN_INDIRECT_REF)
2447 {
2448 int align = TYPE_ALIGN_UNIT (TREE_TYPE (exp));
d4ebfa65 2449 op0 = gen_rtx_AND (address_mode, op0, GEN_INT (-align));
b5b8b0ac
AO
2450 }
2451
2452 op0 = gen_rtx_MEM (mode, op0);
2453
2454 set_mem_attributes (op0, exp, 0);
09e881c9 2455 set_mem_addr_space (op0, as);
b5b8b0ac
AO
2456
2457 return op0;
2458
2459 case TARGET_MEM_REF:
2460 if (TMR_SYMBOL (exp) && !DECL_RTL_SET_P (TMR_SYMBOL (exp)))
2461 return NULL;
2462
2463 op0 = expand_debug_expr
4e25ca6b 2464 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
b5b8b0ac
AO
2465 if (!op0)
2466 return NULL;
2467
09e881c9 2468 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
b5b8b0ac
AO
2469
2470 op0 = gen_rtx_MEM (mode, op0);
2471
2472 set_mem_attributes (op0, exp, 0);
09e881c9 2473 set_mem_addr_space (op0, as);
b5b8b0ac
AO
2474
2475 return op0;
2476
2477 case ARRAY_REF:
2478 case ARRAY_RANGE_REF:
2479 case COMPONENT_REF:
2480 case BIT_FIELD_REF:
2481 case REALPART_EXPR:
2482 case IMAGPART_EXPR:
2483 case VIEW_CONVERT_EXPR:
2484 {
2485 enum machine_mode mode1;
2486 HOST_WIDE_INT bitsize, bitpos;
2487 tree offset;
2488 int volatilep = 0;
2489 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2490 &mode1, &unsignedp, &volatilep, false);
2491 rtx orig_op0;
2492
4f2a9af8
JJ
2493 if (bitsize == 0)
2494 return NULL;
2495
b5b8b0ac
AO
2496 orig_op0 = op0 = expand_debug_expr (tem);
2497
2498 if (!op0)
2499 return NULL;
2500
2501 if (offset)
2502 {
dda2da58
AO
2503 enum machine_mode addrmode, offmode;
2504
aa847cc8
JJ
2505 if (!MEM_P (op0))
2506 return NULL;
b5b8b0ac 2507
dda2da58
AO
2508 op0 = XEXP (op0, 0);
2509 addrmode = GET_MODE (op0);
2510 if (addrmode == VOIDmode)
2511 addrmode = Pmode;
2512
b5b8b0ac
AO
2513 op1 = expand_debug_expr (offset);
2514 if (!op1)
2515 return NULL;
2516
dda2da58
AO
2517 offmode = GET_MODE (op1);
2518 if (offmode == VOIDmode)
2519 offmode = TYPE_MODE (TREE_TYPE (offset));
2520
2521 if (addrmode != offmode)
2522 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2523 subreg_lowpart_offset (addrmode,
2524 offmode));
2525
2526 /* Don't use offset_address here, we don't need a
2527 recognizable address, and we don't want to generate
2528 code. */
2529 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
b5b8b0ac
AO
2530 }
2531
2532 if (MEM_P (op0))
2533 {
4f2a9af8
JJ
2534 if (mode1 == VOIDmode)
2535 /* Bitfield. */
2536 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
b5b8b0ac
AO
2537 if (bitpos >= BITS_PER_UNIT)
2538 {
2539 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2540 bitpos %= BITS_PER_UNIT;
2541 }
2542 else if (bitpos < 0)
2543 {
4f2a9af8
JJ
2544 HOST_WIDE_INT units
2545 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
b5b8b0ac
AO
2546 op0 = adjust_address_nv (op0, mode1, units);
2547 bitpos += units * BITS_PER_UNIT;
2548 }
2549 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2550 op0 = adjust_address_nv (op0, mode, 0);
2551 else if (GET_MODE (op0) != mode1)
2552 op0 = adjust_address_nv (op0, mode1, 0);
2553 else
2554 op0 = copy_rtx (op0);
2555 if (op0 == orig_op0)
2556 op0 = shallow_copy_rtx (op0);
2557 set_mem_attributes (op0, exp, 0);
2558 }
2559
2560 if (bitpos == 0 && mode == GET_MODE (op0))
2561 return op0;
2562
2d3fc6aa
JJ
2563 if (bitpos < 0)
2564 return NULL;
2565
88c04a5d
JJ
2566 if (GET_MODE (op0) == BLKmode)
2567 return NULL;
2568
b5b8b0ac
AO
2569 if ((bitpos % BITS_PER_UNIT) == 0
2570 && bitsize == GET_MODE_BITSIZE (mode1))
2571 {
2572 enum machine_mode opmode = GET_MODE (op0);
2573
b5b8b0ac
AO
2574 if (opmode == VOIDmode)
2575 opmode = mode1;
2576
2577 /* This condition may hold if we're expanding the address
2578 right past the end of an array that turned out not to
2579 be addressable (i.e., the address was only computed in
2580 debug stmts). The gen_subreg below would rightfully
2581 crash, and the address doesn't really exist, so just
2582 drop it. */
2583 if (bitpos >= GET_MODE_BITSIZE (opmode))
2584 return NULL;
2585
7d5d39bb
JJ
2586 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2587 return simplify_gen_subreg (mode, op0, opmode,
2588 bitpos / BITS_PER_UNIT);
b5b8b0ac
AO
2589 }
2590
2591 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2592 && TYPE_UNSIGNED (TREE_TYPE (exp))
2593 ? SIGN_EXTRACT
2594 : ZERO_EXTRACT, mode,
2595 GET_MODE (op0) != VOIDmode
2596 ? GET_MODE (op0) : mode1,
2597 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2598 }
2599
b5b8b0ac
AO
2600 case ABS_EXPR:
2601 return gen_rtx_ABS (mode, op0);
2602
2603 case NEGATE_EXPR:
2604 return gen_rtx_NEG (mode, op0);
2605
2606 case BIT_NOT_EXPR:
2607 return gen_rtx_NOT (mode, op0);
2608
2609 case FLOAT_EXPR:
2610 if (unsignedp)
2611 return gen_rtx_UNSIGNED_FLOAT (mode, op0);
2612 else
2613 return gen_rtx_FLOAT (mode, op0);
2614
2615 case FIX_TRUNC_EXPR:
2616 if (unsignedp)
2617 return gen_rtx_UNSIGNED_FIX (mode, op0);
2618 else
2619 return gen_rtx_FIX (mode, op0);
2620
2621 case POINTER_PLUS_EXPR:
576319a7
DD
2622 /* For the rare target where pointers are not the same size as
2623 size_t, we need to check for mis-matched modes and correct
2624 the addend. */
2625 if (op0 && op1
2626 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
2627 && GET_MODE (op0) != GET_MODE (op1))
2628 {
2629 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2630 op1 = gen_rtx_TRUNCATE (GET_MODE (op0), op1);
2631 else
2632 /* We always sign-extend, regardless of the signedness of
2633 the operand, because the operand is always unsigned
2634 here even if the original C expression is signed. */
2635 op1 = gen_rtx_SIGN_EXTEND (GET_MODE (op0), op1);
2636 }
2637 /* Fall through. */
b5b8b0ac
AO
2638 case PLUS_EXPR:
2639 return gen_rtx_PLUS (mode, op0, op1);
2640
2641 case MINUS_EXPR:
2642 return gen_rtx_MINUS (mode, op0, op1);
2643
2644 case MULT_EXPR:
2645 return gen_rtx_MULT (mode, op0, op1);
2646
2647 case RDIV_EXPR:
2648 case TRUNC_DIV_EXPR:
2649 case EXACT_DIV_EXPR:
2650 if (unsignedp)
2651 return gen_rtx_UDIV (mode, op0, op1);
2652 else
2653 return gen_rtx_DIV (mode, op0, op1);
2654
2655 case TRUNC_MOD_EXPR:
2656 if (unsignedp)
2657 return gen_rtx_UMOD (mode, op0, op1);
2658 else
2659 return gen_rtx_MOD (mode, op0, op1);
2660
2661 case FLOOR_DIV_EXPR:
2662 if (unsignedp)
2663 return gen_rtx_UDIV (mode, op0, op1);
2664 else
2665 {
2666 rtx div = gen_rtx_DIV (mode, op0, op1);
2667 rtx mod = gen_rtx_MOD (mode, op0, op1);
2668 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2669 return gen_rtx_PLUS (mode, div, adj);
2670 }
2671
2672 case FLOOR_MOD_EXPR:
2673 if (unsignedp)
2674 return gen_rtx_UMOD (mode, op0, op1);
2675 else
2676 {
2677 rtx mod = gen_rtx_MOD (mode, op0, op1);
2678 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2679 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2680 return gen_rtx_PLUS (mode, mod, adj);
2681 }
2682
2683 case CEIL_DIV_EXPR:
2684 if (unsignedp)
2685 {
2686 rtx div = gen_rtx_UDIV (mode, op0, op1);
2687 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2688 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2689 return gen_rtx_PLUS (mode, div, adj);
2690 }
2691 else
2692 {
2693 rtx div = gen_rtx_DIV (mode, op0, op1);
2694 rtx mod = gen_rtx_MOD (mode, op0, op1);
2695 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2696 return gen_rtx_PLUS (mode, div, adj);
2697 }
2698
2699 case CEIL_MOD_EXPR:
2700 if (unsignedp)
2701 {
2702 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2703 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2704 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2705 return gen_rtx_PLUS (mode, mod, adj);
2706 }
2707 else
2708 {
2709 rtx mod = gen_rtx_MOD (mode, op0, op1);
2710 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2711 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2712 return gen_rtx_PLUS (mode, mod, adj);
2713 }
2714
2715 case ROUND_DIV_EXPR:
2716 if (unsignedp)
2717 {
2718 rtx div = gen_rtx_UDIV (mode, op0, op1);
2719 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2720 rtx adj = round_udiv_adjust (mode, mod, op1);
2721 return gen_rtx_PLUS (mode, div, adj);
2722 }
2723 else
2724 {
2725 rtx div = gen_rtx_DIV (mode, op0, op1);
2726 rtx mod = gen_rtx_MOD (mode, op0, op1);
2727 rtx adj = round_sdiv_adjust (mode, mod, op1);
2728 return gen_rtx_PLUS (mode, div, adj);
2729 }
2730
2731 case ROUND_MOD_EXPR:
2732 if (unsignedp)
2733 {
2734 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2735 rtx adj = round_udiv_adjust (mode, mod, op1);
2736 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2737 return gen_rtx_PLUS (mode, mod, adj);
2738 }
2739 else
2740 {
2741 rtx mod = gen_rtx_MOD (mode, op0, op1);
2742 rtx adj = round_sdiv_adjust (mode, mod, op1);
2743 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2744 return gen_rtx_PLUS (mode, mod, adj);
2745 }
2746
2747 case LSHIFT_EXPR:
2748 return gen_rtx_ASHIFT (mode, op0, op1);
2749
2750 case RSHIFT_EXPR:
2751 if (unsignedp)
2752 return gen_rtx_LSHIFTRT (mode, op0, op1);
2753 else
2754 return gen_rtx_ASHIFTRT (mode, op0, op1);
2755
2756 case LROTATE_EXPR:
2757 return gen_rtx_ROTATE (mode, op0, op1);
2758
2759 case RROTATE_EXPR:
2760 return gen_rtx_ROTATERT (mode, op0, op1);
2761
2762 case MIN_EXPR:
2763 if (unsignedp)
2764 return gen_rtx_UMIN (mode, op0, op1);
2765 else
2766 return gen_rtx_SMIN (mode, op0, op1);
2767
2768 case MAX_EXPR:
2769 if (unsignedp)
2770 return gen_rtx_UMAX (mode, op0, op1);
2771 else
2772 return gen_rtx_SMAX (mode, op0, op1);
2773
2774 case BIT_AND_EXPR:
2775 case TRUTH_AND_EXPR:
2776 return gen_rtx_AND (mode, op0, op1);
2777
2778 case BIT_IOR_EXPR:
2779 case TRUTH_OR_EXPR:
2780 return gen_rtx_IOR (mode, op0, op1);
2781
2782 case BIT_XOR_EXPR:
2783 case TRUTH_XOR_EXPR:
2784 return gen_rtx_XOR (mode, op0, op1);
2785
2786 case TRUTH_ANDIF_EXPR:
2787 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2788
2789 case TRUTH_ORIF_EXPR:
2790 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2791
2792 case TRUTH_NOT_EXPR:
2793 return gen_rtx_EQ (mode, op0, const0_rtx);
2794
2795 case LT_EXPR:
2796 if (unsignedp)
2797 return gen_rtx_LTU (mode, op0, op1);
2798 else
2799 return gen_rtx_LT (mode, op0, op1);
2800
2801 case LE_EXPR:
2802 if (unsignedp)
2803 return gen_rtx_LEU (mode, op0, op1);
2804 else
2805 return gen_rtx_LE (mode, op0, op1);
2806
2807 case GT_EXPR:
2808 if (unsignedp)
2809 return gen_rtx_GTU (mode, op0, op1);
2810 else
2811 return gen_rtx_GT (mode, op0, op1);
2812
2813 case GE_EXPR:
2814 if (unsignedp)
2815 return gen_rtx_GEU (mode, op0, op1);
2816 else
2817 return gen_rtx_GE (mode, op0, op1);
2818
2819 case EQ_EXPR:
2820 return gen_rtx_EQ (mode, op0, op1);
2821
2822 case NE_EXPR:
2823 return gen_rtx_NE (mode, op0, op1);
2824
2825 case UNORDERED_EXPR:
2826 return gen_rtx_UNORDERED (mode, op0, op1);
2827
2828 case ORDERED_EXPR:
2829 return gen_rtx_ORDERED (mode, op0, op1);
2830
2831 case UNLT_EXPR:
2832 return gen_rtx_UNLT (mode, op0, op1);
2833
2834 case UNLE_EXPR:
2835 return gen_rtx_UNLE (mode, op0, op1);
2836
2837 case UNGT_EXPR:
2838 return gen_rtx_UNGT (mode, op0, op1);
2839
2840 case UNGE_EXPR:
2841 return gen_rtx_UNGE (mode, op0, op1);
2842
2843 case UNEQ_EXPR:
2844 return gen_rtx_UNEQ (mode, op0, op1);
2845
2846 case LTGT_EXPR:
2847 return gen_rtx_LTGT (mode, op0, op1);
2848
2849 case COND_EXPR:
2850 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
2851
2852 case COMPLEX_EXPR:
2853 gcc_assert (COMPLEX_MODE_P (mode));
2854 if (GET_MODE (op0) == VOIDmode)
2855 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
2856 if (GET_MODE (op1) == VOIDmode)
2857 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
2858 return gen_rtx_CONCAT (mode, op0, op1);
2859
d02a5a4b
JJ
2860 case CONJ_EXPR:
2861 if (GET_CODE (op0) == CONCAT)
2862 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2863 gen_rtx_NEG (GET_MODE_INNER (mode),
2864 XEXP (op0, 1)));
2865 else
2866 {
2867 enum machine_mode imode = GET_MODE_INNER (mode);
2868 rtx re, im;
2869
2870 if (MEM_P (op0))
2871 {
2872 re = adjust_address_nv (op0, imode, 0);
2873 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
2874 }
2875 else
2876 {
2877 enum machine_mode ifmode = int_mode_for_mode (mode);
2878 enum machine_mode ihmode = int_mode_for_mode (imode);
2879 rtx halfsize;
2880 if (ifmode == BLKmode || ihmode == BLKmode)
2881 return NULL;
2882 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
2883 re = op0;
2884 if (mode != ifmode)
2885 re = gen_rtx_SUBREG (ifmode, re, 0);
2886 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
2887 if (imode != ihmode)
2888 re = gen_rtx_SUBREG (imode, re, 0);
2889 im = copy_rtx (op0);
2890 if (mode != ifmode)
2891 im = gen_rtx_SUBREG (ifmode, im, 0);
2892 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
2893 if (imode != ihmode)
2894 im = gen_rtx_SUBREG (imode, im, 0);
2895 }
2896 im = gen_rtx_NEG (imode, im);
2897 return gen_rtx_CONCAT (mode, re, im);
2898 }
2899
b5b8b0ac
AO
2900 case ADDR_EXPR:
2901 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2902 if (!op0 || !MEM_P (op0))
2903 return NULL;
2904
dda2da58
AO
2905 op0 = convert_debug_memory_address (mode, XEXP (op0, 0));
2906
2907 return op0;
b5b8b0ac
AO
2908
2909 case VECTOR_CST:
2910 exp = build_constructor_from_list (TREE_TYPE (exp),
2911 TREE_VECTOR_CST_ELTS (exp));
2912 /* Fall through. */
2913
2914 case CONSTRUCTOR:
2915 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
2916 {
2917 unsigned i;
2918 tree val;
2919
2920 op0 = gen_rtx_CONCATN
2921 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
2922
2923 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
2924 {
2925 op1 = expand_debug_expr (val);
2926 if (!op1)
2927 return NULL;
2928 XVECEXP (op0, 0, i) = op1;
2929 }
2930
2931 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
2932 {
2933 op1 = expand_debug_expr
2934 (fold_convert (TREE_TYPE (TREE_TYPE (exp)), integer_zero_node));
2935
2936 if (!op1)
2937 return NULL;
2938
2939 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
2940 XVECEXP (op0, 0, i) = op1;
2941 }
2942
2943 return op0;
2944 }
2945 else
2946 goto flag_unsupported;
2947
2948 case CALL_EXPR:
2949 /* ??? Maybe handle some builtins? */
2950 return NULL;
2951
2952 case SSA_NAME:
2953 {
2a8e30fb
MM
2954 gimple g = get_gimple_for_ssa_name (exp);
2955 if (g)
2956 {
2957 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
2958 if (!op0)
2959 return NULL;
2960 }
2961 else
2962 {
2963 int part = var_to_partition (SA.map, exp);
b5b8b0ac 2964
2a8e30fb
MM
2965 if (part == NO_PARTITION)
2966 return NULL;
b5b8b0ac 2967
2a8e30fb 2968 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
b5b8b0ac 2969
2a8e30fb
MM
2970 op0 = SA.partition_to_pseudo[part];
2971 }
b5b8b0ac
AO
2972 goto adjust_mode;
2973 }
2974
2975 case ERROR_MARK:
2976 return NULL;
2977
7ece48b1
JJ
2978 /* Vector stuff. For most of the codes we don't have rtl codes. */
2979 case REALIGN_LOAD_EXPR:
2980 case REDUC_MAX_EXPR:
2981 case REDUC_MIN_EXPR:
2982 case REDUC_PLUS_EXPR:
2983 case VEC_COND_EXPR:
2984 case VEC_EXTRACT_EVEN_EXPR:
2985 case VEC_EXTRACT_ODD_EXPR:
2986 case VEC_INTERLEAVE_HIGH_EXPR:
2987 case VEC_INTERLEAVE_LOW_EXPR:
2988 case VEC_LSHIFT_EXPR:
2989 case VEC_PACK_FIX_TRUNC_EXPR:
2990 case VEC_PACK_SAT_EXPR:
2991 case VEC_PACK_TRUNC_EXPR:
2992 case VEC_RSHIFT_EXPR:
2993 case VEC_UNPACK_FLOAT_HI_EXPR:
2994 case VEC_UNPACK_FLOAT_LO_EXPR:
2995 case VEC_UNPACK_HI_EXPR:
2996 case VEC_UNPACK_LO_EXPR:
2997 case VEC_WIDEN_MULT_HI_EXPR:
2998 case VEC_WIDEN_MULT_LO_EXPR:
2999 return NULL;
3000
3001 /* Misc codes. */
3002 case ADDR_SPACE_CONVERT_EXPR:
3003 case FIXED_CONVERT_EXPR:
3004 case OBJ_TYPE_REF:
3005 case WITH_SIZE_EXPR:
3006 return NULL;
3007
3008 case DOT_PROD_EXPR:
3009 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3010 && SCALAR_INT_MODE_P (mode))
3011 {
3012 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3013 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3014 else
3015 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3016 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3017 op1 = gen_rtx_ZERO_EXTEND (mode, op1);
3018 else
3019 op1 = gen_rtx_SIGN_EXTEND (mode, op1);
3020 op0 = gen_rtx_MULT (mode, op0, op1);
3021 return gen_rtx_PLUS (mode, op0, op2);
3022 }
3023 return NULL;
3024
3025 case WIDEN_MULT_EXPR:
3026 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3027 && SCALAR_INT_MODE_P (mode))
3028 {
5b58b39b 3029 enum machine_mode inner_mode = GET_MODE (op0);
7ece48b1 3030 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5b58b39b 3031 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
7ece48b1 3032 else
5b58b39b 3033 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
7ece48b1 3034 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5b58b39b 3035 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
7ece48b1 3036 else
5b58b39b 3037 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
7ece48b1
JJ
3038 return gen_rtx_MULT (mode, op0, op1);
3039 }
3040 return NULL;
3041
3042 case WIDEN_SUM_EXPR:
3043 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3044 && SCALAR_INT_MODE_P (mode))
3045 {
3046 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3047 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3048 else
3049 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3050 return gen_rtx_PLUS (mode, op0, op1);
3051 }
3052 return NULL;
3053
b5b8b0ac
AO
3054 default:
3055 flag_unsupported:
3056#ifdef ENABLE_CHECKING
3057 debug_tree (exp);
3058 gcc_unreachable ();
3059#else
3060 return NULL;
3061#endif
3062 }
3063}
3064
3065/* Expand the _LOCs in debug insns. We run this after expanding all
3066 regular insns, so that any variables referenced in the function
3067 will have their DECL_RTLs set. */
3068
3069static void
3070expand_debug_locations (void)
3071{
3072 rtx insn;
3073 rtx last = get_last_insn ();
3074 int save_strict_alias = flag_strict_aliasing;
3075
3076 /* New alias sets while setting up memory attributes cause
3077 -fcompare-debug failures, even though it doesn't bring about any
3078 codegen changes. */
3079 flag_strict_aliasing = 0;
3080
3081 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3082 if (DEBUG_INSN_P (insn))
3083 {
3084 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3085 rtx val;
3086 enum machine_mode mode;
3087
3088 if (value == NULL_TREE)
3089 val = NULL_RTX;
3090 else
3091 {
3092 val = expand_debug_expr (value);
3093 gcc_assert (last == get_last_insn ());
3094 }
3095
3096 if (!val)
3097 val = gen_rtx_UNKNOWN_VAR_LOC ();
3098 else
3099 {
3100 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3101
3102 gcc_assert (mode == GET_MODE (val)
3103 || (GET_MODE (val) == VOIDmode
3104 && (CONST_INT_P (val)
3105 || GET_CODE (val) == CONST_FIXED
3106 || GET_CODE (val) == CONST_DOUBLE
3107 || GET_CODE (val) == LABEL_REF)));
3108 }
3109
3110 INSN_VAR_LOCATION_LOC (insn) = val;
3111 }
3112
3113 flag_strict_aliasing = save_strict_alias;
3114}
3115
242229bb
JH
3116/* Expand basic block BB from GIMPLE trees to RTL. */
3117
3118static basic_block
10d22567 3119expand_gimple_basic_block (basic_block bb)
242229bb 3120{
726a989a
RB
3121 gimple_stmt_iterator gsi;
3122 gimple_seq stmts;
3123 gimple stmt = NULL;
242229bb
JH
3124 rtx note, last;
3125 edge e;
628f6a4e 3126 edge_iterator ei;
8b11009b 3127 void **elt;
242229bb
JH
3128
3129 if (dump_file)
726a989a
RB
3130 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3131 bb->index);
3132
3133 /* Note that since we are now transitioning from GIMPLE to RTL, we
3134 cannot use the gsi_*_bb() routines because they expect the basic
3135 block to be in GIMPLE, instead of RTL. Therefore, we need to
3136 access the BB sequence directly. */
3137 stmts = bb_seq (bb);
3138 bb->il.gimple = NULL;
bf08ebeb 3139 rtl_profile_for_bb (bb);
5e2d947c
JH
3140 init_rtl_bb_info (bb);
3141 bb->flags |= BB_RTL;
3142
a9b77cd1
ZD
3143 /* Remove the RETURN_EXPR if we may fall though to the exit
3144 instead. */
726a989a
RB
3145 gsi = gsi_last (stmts);
3146 if (!gsi_end_p (gsi)
3147 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
a9b77cd1 3148 {
726a989a 3149 gimple ret_stmt = gsi_stmt (gsi);
a9b77cd1
ZD
3150
3151 gcc_assert (single_succ_p (bb));
3152 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3153
3154 if (bb->next_bb == EXIT_BLOCK_PTR
726a989a 3155 && !gimple_return_retval (ret_stmt))
a9b77cd1 3156 {
726a989a 3157 gsi_remove (&gsi, false);
a9b77cd1
ZD
3158 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3159 }
3160 }
3161
726a989a
RB
3162 gsi = gsi_start (stmts);
3163 if (!gsi_end_p (gsi))
8b11009b 3164 {
726a989a
RB
3165 stmt = gsi_stmt (gsi);
3166 if (gimple_code (stmt) != GIMPLE_LABEL)
3167 stmt = NULL;
8b11009b 3168 }
242229bb 3169
8b11009b
ZD
3170 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3171
3172 if (stmt || elt)
242229bb
JH
3173 {
3174 last = get_last_insn ();
3175
8b11009b
ZD
3176 if (stmt)
3177 {
28ed065e 3178 expand_gimple_stmt (stmt);
726a989a 3179 gsi_next (&gsi);
8b11009b
ZD
3180 }
3181
3182 if (elt)
ae50c0cb 3183 emit_label ((rtx) *elt);
242229bb 3184
caf93cb0 3185 /* Java emits line number notes in the top of labels.
c22cacf3 3186 ??? Make this go away once line number notes are obsoleted. */
242229bb 3187 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 3188 if (NOTE_P (BB_HEAD (bb)))
242229bb 3189 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
242229bb 3190 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528 3191
726a989a 3192 maybe_dump_rtl_for_gimple_stmt (stmt, last);
242229bb
JH
3193 }
3194 else
3195 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3196
3197 NOTE_BASIC_BLOCK (note) = bb;
3198
726a989a 3199 for (; !gsi_end_p (gsi); gsi_next (&gsi))
242229bb 3200 {
cea49550 3201 basic_block new_bb;
242229bb 3202
b5b8b0ac 3203 stmt = gsi_stmt (gsi);
2a8e30fb
MM
3204
3205 /* If this statement is a non-debug one, and we generate debug
3206 insns, then this one might be the last real use of a TERed
3207 SSA_NAME, but where there are still some debug uses further
3208 down. Expanding the current SSA name in such further debug
3209 uses by their RHS might lead to wrong debug info, as coalescing
3210 might make the operands of such RHS be placed into the same
3211 pseudo as something else. Like so:
3212 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3213 use(a_1);
3214 a_2 = ...
3215 #DEBUG ... => a_1
3216 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3217 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3218 the write to a_2 would actually have clobbered the place which
3219 formerly held a_0.
3220
3221 So, instead of that, we recognize the situation, and generate
3222 debug temporaries at the last real use of TERed SSA names:
3223 a_1 = a_0 + 1;
3224 #DEBUG #D1 => a_1
3225 use(a_1);
3226 a_2 = ...
3227 #DEBUG ... => #D1
3228 */
3229 if (MAY_HAVE_DEBUG_INSNS
3230 && SA.values
3231 && !is_gimple_debug (stmt))
3232 {
3233 ssa_op_iter iter;
3234 tree op;
3235 gimple def;
3236
3237 location_t sloc = get_curr_insn_source_location ();
3238 tree sblock = get_curr_insn_block ();
3239
3240 /* Look for SSA names that have their last use here (TERed
3241 names always have only one real use). */
3242 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3243 if ((def = get_gimple_for_ssa_name (op)))
3244 {
3245 imm_use_iterator imm_iter;
3246 use_operand_p use_p;
3247 bool have_debug_uses = false;
3248
3249 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3250 {
3251 if (gimple_debug_bind_p (USE_STMT (use_p)))
3252 {
3253 have_debug_uses = true;
3254 break;
3255 }
3256 }
3257
3258 if (have_debug_uses)
3259 {
3260 /* OP is a TERed SSA name, with DEF it's defining
3261 statement, and where OP is used in further debug
3262 instructions. Generate a debug temporary, and
3263 replace all uses of OP in debug insns with that
3264 temporary. */
3265 gimple debugstmt;
3266 tree value = gimple_assign_rhs_to_tree (def);
3267 tree vexpr = make_node (DEBUG_EXPR_DECL);
3268 rtx val;
3269 enum machine_mode mode;
3270
3271 set_curr_insn_source_location (gimple_location (def));
3272 set_curr_insn_block (gimple_block (def));
3273
3274 DECL_ARTIFICIAL (vexpr) = 1;
3275 TREE_TYPE (vexpr) = TREE_TYPE (value);
3276 if (DECL_P (value))
3277 mode = DECL_MODE (value);
3278 else
3279 mode = TYPE_MODE (TREE_TYPE (value));
3280 DECL_MODE (vexpr) = mode;
3281
3282 val = gen_rtx_VAR_LOCATION
3283 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3284
3285 val = emit_debug_insn (val);
3286
3287 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3288 {
3289 if (!gimple_debug_bind_p (debugstmt))
3290 continue;
3291
3292 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3293 SET_USE (use_p, vexpr);
3294
3295 update_stmt (debugstmt);
3296 }
3297 }
3298 }
3299 set_curr_insn_source_location (sloc);
3300 set_curr_insn_block (sblock);
3301 }
3302
a5883ba0 3303 currently_expanding_gimple_stmt = stmt;
b5b8b0ac 3304
242229bb
JH
3305 /* Expand this statement, then evaluate the resulting RTL and
3306 fixup the CFG accordingly. */
726a989a 3307 if (gimple_code (stmt) == GIMPLE_COND)
cea49550 3308 {
726a989a 3309 new_bb = expand_gimple_cond (bb, stmt);
cea49550
RH
3310 if (new_bb)
3311 return new_bb;
3312 }
b5b8b0ac
AO
3313 else if (gimple_debug_bind_p (stmt))
3314 {
3315 location_t sloc = get_curr_insn_source_location ();
3316 tree sblock = get_curr_insn_block ();
3317 gimple_stmt_iterator nsi = gsi;
3318
3319 for (;;)
3320 {
3321 tree var = gimple_debug_bind_get_var (stmt);
3322 tree value;
3323 rtx val;
3324 enum machine_mode mode;
3325
3326 if (gimple_debug_bind_has_value_p (stmt))
3327 value = gimple_debug_bind_get_value (stmt);
3328 else
3329 value = NULL_TREE;
3330
3331 last = get_last_insn ();
3332
3333 set_curr_insn_source_location (gimple_location (stmt));
3334 set_curr_insn_block (gimple_block (stmt));
3335
3336 if (DECL_P (var))
3337 mode = DECL_MODE (var);
3338 else
3339 mode = TYPE_MODE (TREE_TYPE (var));
3340
3341 val = gen_rtx_VAR_LOCATION
3342 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3343
3344 val = emit_debug_insn (val);
3345
3346 if (dump_file && (dump_flags & TDF_DETAILS))
3347 {
3348 /* We can't dump the insn with a TREE where an RTX
3349 is expected. */
3350 INSN_VAR_LOCATION_LOC (val) = const0_rtx;
3351 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3352 INSN_VAR_LOCATION_LOC (val) = (rtx)value;
3353 }
3354
2a8e30fb
MM
3355 /* In order not to generate too many debug temporaries,
3356 we delink all uses of debug statements we already expanded.
3357 Therefore debug statements between definition and real
3358 use of TERed SSA names will continue to use the SSA name,
3359 and not be replaced with debug temps. */
3360 delink_stmt_imm_use (stmt);
3361
b5b8b0ac
AO
3362 gsi = nsi;
3363 gsi_next (&nsi);
3364 if (gsi_end_p (nsi))
3365 break;
3366 stmt = gsi_stmt (nsi);
3367 if (!gimple_debug_bind_p (stmt))
3368 break;
3369 }
3370
3371 set_curr_insn_source_location (sloc);
3372 set_curr_insn_block (sblock);
3373 }
80c7a9eb 3374 else
242229bb 3375 {
726a989a 3376 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
cea49550
RH
3377 {
3378 bool can_fallthru;
3379 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3380 if (new_bb)
3381 {
3382 if (can_fallthru)
3383 bb = new_bb;
3384 else
3385 return new_bb;
3386 }
3387 }
4d7a65ea 3388 else
b7211528 3389 {
4e3825db 3390 def_operand_p def_p;
4e3825db
MM
3391 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3392
3393 if (def_p != NULL)
3394 {
3395 /* Ignore this stmt if it is in the list of
3396 replaceable expressions. */
3397 if (SA.values
b8698a0f 3398 && bitmap_bit_p (SA.values,
e97809c6 3399 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4e3825db
MM
3400 continue;
3401 }
28ed065e 3402 last = expand_gimple_stmt (stmt);
726a989a 3403 maybe_dump_rtl_for_gimple_stmt (stmt, last);
b7211528 3404 }
242229bb
JH
3405 }
3406 }
3407
a5883ba0
MM
3408 currently_expanding_gimple_stmt = NULL;
3409
7241571e 3410 /* Expand implicit goto and convert goto_locus. */
a9b77cd1
ZD
3411 FOR_EACH_EDGE (e, ei, bb->succs)
3412 {
7241571e
JJ
3413 if (e->goto_locus && e->goto_block)
3414 {
3415 set_curr_insn_source_location (e->goto_locus);
3416 set_curr_insn_block (e->goto_block);
3417 e->goto_locus = curr_insn_locator ();
3418 }
3419 e->goto_block = NULL;
3420 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3421 {
3422 emit_jump (label_rtx_for_bb (e->dest));
3423 e->flags &= ~EDGE_FALLTHRU;
3424 }
a9b77cd1
ZD
3425 }
3426
ae761c45
AH
3427 /* Expanded RTL can create a jump in the last instruction of block.
3428 This later might be assumed to be a jump to successor and break edge insertion.
3429 We need to insert dummy move to prevent this. PR41440. */
3430 if (single_succ_p (bb)
3431 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3432 && (last = get_last_insn ())
3433 && JUMP_P (last))
3434 {
3435 rtx dummy = gen_reg_rtx (SImode);
3436 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3437 }
3438
242229bb
JH
3439 do_pending_stack_adjust ();
3440
3f117656 3441 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
3442 before a barrier and/or table jump insn. */
3443 last = get_last_insn ();
4b4bf941 3444 if (BARRIER_P (last))
242229bb
JH
3445 last = PREV_INSN (last);
3446 if (JUMP_TABLE_DATA_P (last))
3447 last = PREV_INSN (PREV_INSN (last));
3448 BB_END (bb) = last;
caf93cb0 3449
242229bb 3450 update_bb_for_insn (bb);
80c7a9eb 3451
242229bb
JH
3452 return bb;
3453}
3454
3455
3456/* Create a basic block for initialization code. */
3457
3458static basic_block
3459construct_init_block (void)
3460{
3461 basic_block init_block, first_block;
fd44f634
JH
3462 edge e = NULL;
3463 int flags;
275a4187 3464
fd44f634
JH
3465 /* Multiple entry points not supported yet. */
3466 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
5e2d947c
JH
3467 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3468 init_rtl_bb_info (EXIT_BLOCK_PTR);
3469 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3470 EXIT_BLOCK_PTR->flags |= BB_RTL;
242229bb 3471
fd44f634 3472 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
275a4187 3473
fd44f634
JH
3474 /* When entry edge points to first basic block, we don't need jump,
3475 otherwise we have to jump into proper target. */
3476 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3477 {
726a989a 3478 tree label = gimple_block_label (e->dest);
fd44f634
JH
3479
3480 emit_jump (label_rtx (label));
3481 flags = 0;
275a4187 3482 }
fd44f634
JH
3483 else
3484 flags = EDGE_FALLTHRU;
242229bb
JH
3485
3486 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3487 get_last_insn (),
3488 ENTRY_BLOCK_PTR);
3489 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3490 init_block->count = ENTRY_BLOCK_PTR->count;
3491 if (e)
3492 {
3493 first_block = e->dest;
3494 redirect_edge_succ (e, init_block);
fd44f634 3495 e = make_edge (init_block, first_block, flags);
242229bb
JH
3496 }
3497 else
3498 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3499 e->probability = REG_BR_PROB_BASE;
3500 e->count = ENTRY_BLOCK_PTR->count;
3501
3502 update_bb_for_insn (init_block);
3503 return init_block;
3504}
3505
55e092c4
JH
3506/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3507 found in the block tree. */
3508
3509static void
3510set_block_levels (tree block, int level)
3511{
3512 while (block)
3513 {
3514 BLOCK_NUMBER (block) = level;
3515 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3516 block = BLOCK_CHAIN (block);
3517 }
3518}
242229bb
JH
3519
3520/* Create a block containing landing pads and similar stuff. */
3521
3522static void
3523construct_exit_block (void)
3524{
3525 rtx head = get_last_insn ();
3526 rtx end;
3527 basic_block exit_block;
628f6a4e
BE
3528 edge e, e2;
3529 unsigned ix;
3530 edge_iterator ei;
071a42f9 3531 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
242229bb 3532
bf08ebeb
JH
3533 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3534
caf93cb0 3535 /* Make sure the locus is set to the end of the function, so that
242229bb 3536 epilogue line numbers and warnings are set properly. */
6773e15f 3537 if (cfun->function_end_locus != UNKNOWN_LOCATION)
242229bb
JH
3538 input_location = cfun->function_end_locus;
3539
3540 /* The following insns belong to the top scope. */
55e092c4 3541 set_curr_insn_block (DECL_INITIAL (current_function_decl));
242229bb 3542
242229bb
JH
3543 /* Generate rtl for function exit. */
3544 expand_function_end ();
3545
3546 end = get_last_insn ();
3547 if (head == end)
3548 return;
071a42f9
JH
3549 /* While emitting the function end we could move end of the last basic block.
3550 */
3551 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4b4bf941 3552 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 3553 head = NEXT_INSN (head);
80c7a9eb
RH
3554 exit_block = create_basic_block (NEXT_INSN (head), end,
3555 EXIT_BLOCK_PTR->prev_bb);
242229bb
JH
3556 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3557 exit_block->count = EXIT_BLOCK_PTR->count;
628f6a4e
BE
3558
3559 ix = 0;
3560 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
242229bb 3561 {
8fb790fd 3562 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
242229bb 3563 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
3564 redirect_edge_succ (e, exit_block);
3565 else
3566 ix++;
242229bb 3567 }
628f6a4e 3568
242229bb
JH
3569 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3570 e->probability = REG_BR_PROB_BASE;
3571 e->count = EXIT_BLOCK_PTR->count;
628f6a4e 3572 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
242229bb
JH
3573 if (e2 != e)
3574 {
c22cacf3 3575 e->count -= e2->count;
242229bb
JH
3576 exit_block->count -= e2->count;
3577 exit_block->frequency -= EDGE_FREQUENCY (e2);
3578 }
3579 if (e->count < 0)
3580 e->count = 0;
3581 if (exit_block->count < 0)
3582 exit_block->count = 0;
3583 if (exit_block->frequency < 0)
3584 exit_block->frequency = 0;
3585 update_bb_for_insn (exit_block);
3586}
3587
c22cacf3 3588/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
3589 Look for ARRAY_REF nodes with non-constant indexes and mark them
3590 addressable. */
3591
3592static tree
3593discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3594 void *data ATTRIBUTE_UNUSED)
3595{
3596 tree t = *tp;
3597
3598 if (IS_TYPE_OR_DECL_P (t))
3599 *walk_subtrees = 0;
3600 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3601 {
3602 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3603 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
3604 && (!TREE_OPERAND (t, 2)
3605 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3606 || (TREE_CODE (t) == COMPONENT_REF
3607 && (!TREE_OPERAND (t,2)
3608 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3609 || TREE_CODE (t) == BIT_FIELD_REF
3610 || TREE_CODE (t) == REALPART_EXPR
3611 || TREE_CODE (t) == IMAGPART_EXPR
3612 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1043771b 3613 || CONVERT_EXPR_P (t))
a1b23b2f
UW
3614 t = TREE_OPERAND (t, 0);
3615
3616 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3617 {
3618 t = get_base_address (t);
6f11d690
RG
3619 if (t && DECL_P (t)
3620 && DECL_MODE (t) != BLKmode)
a1b23b2f
UW
3621 TREE_ADDRESSABLE (t) = 1;
3622 }
3623
3624 *walk_subtrees = 0;
3625 }
3626
3627 return NULL_TREE;
3628}
3629
3630/* RTL expansion is not able to compile array references with variable
3631 offsets for arrays stored in single register. Discover such
3632 expressions and mark variables as addressable to avoid this
3633 scenario. */
3634
3635static void
3636discover_nonconstant_array_refs (void)
3637{
3638 basic_block bb;
726a989a 3639 gimple_stmt_iterator gsi;
a1b23b2f
UW
3640
3641 FOR_EACH_BB (bb)
726a989a
RB
3642 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3643 {
3644 gimple stmt = gsi_stmt (gsi);
aa847cc8
JJ
3645 if (!is_gimple_debug (stmt))
3646 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
726a989a 3647 }
a1b23b2f
UW
3648}
3649
2e3f842f
L
3650/* This function sets crtl->args.internal_arg_pointer to a virtual
3651 register if DRAP is needed. Local register allocator will replace
3652 virtual_incoming_args_rtx with the virtual register. */
3653
3654static void
3655expand_stack_alignment (void)
3656{
3657 rtx drap_rtx;
e939805b 3658 unsigned int preferred_stack_boundary;
2e3f842f
L
3659
3660 if (! SUPPORTS_STACK_ALIGNMENT)
3661 return;
b8698a0f 3662
2e3f842f
L
3663 if (cfun->calls_alloca
3664 || cfun->has_nonlocal_label
3665 || crtl->has_nonlocal_goto)
3666 crtl->need_drap = true;
3667
890b9b96
L
3668 /* Call update_stack_boundary here again to update incoming stack
3669 boundary. It may set incoming stack alignment to a different
3670 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
3671 use the minimum incoming stack alignment to check if it is OK
3672 to perform sibcall optimization since sibcall optimization will
3673 only align the outgoing stack to incoming stack boundary. */
3674 if (targetm.calls.update_stack_boundary)
3675 targetm.calls.update_stack_boundary ();
3676
3677 /* The incoming stack frame has to be aligned at least at
3678 parm_stack_boundary. */
3679 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
2e3f842f 3680
2e3f842f
L
3681 /* Update crtl->stack_alignment_estimated and use it later to align
3682 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
3683 exceptions since callgraph doesn't collect incoming stack alignment
3684 in this case. */
3685 if (flag_non_call_exceptions
3686 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
3687 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3688 else
3689 preferred_stack_boundary = crtl->preferred_stack_boundary;
3690 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
3691 crtl->stack_alignment_estimated = preferred_stack_boundary;
3692 if (preferred_stack_boundary > crtl->stack_alignment_needed)
3693 crtl->stack_alignment_needed = preferred_stack_boundary;
3694
890b9b96
L
3695 gcc_assert (crtl->stack_alignment_needed
3696 <= crtl->stack_alignment_estimated);
3697
2e3f842f 3698 crtl->stack_realign_needed
e939805b 3699 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
d2d93c32 3700 crtl->stack_realign_tried = crtl->stack_realign_needed;
2e3f842f
L
3701
3702 crtl->stack_realign_processed = true;
3703
3704 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3705 alignment. */
3706 gcc_assert (targetm.calls.get_drap_rtx != NULL);
b8698a0f 3707 drap_rtx = targetm.calls.get_drap_rtx ();
2e3f842f 3708
d015f7cc
L
3709 /* stack_realign_drap and drap_rtx must match. */
3710 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
3711
2e3f842f
L
3712 /* Do nothing if NULL is returned, which means DRAP is not needed. */
3713 if (NULL != drap_rtx)
3714 {
3715 crtl->args.internal_arg_pointer = drap_rtx;
3716
3717 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3718 needed. */
3719 fixup_tail_calls ();
3720 }
3721}
3722
242229bb
JH
3723/* Translate the intermediate representation contained in the CFG
3724 from GIMPLE trees to RTL.
3725
3726 We do conversion per basic block and preserve/update the tree CFG.
3727 This implies we have to do some magic as the CFG can simultaneously
3728 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 3729 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
3730 the expansion. */
3731
c2924966 3732static unsigned int
726a989a 3733gimple_expand_cfg (void)
242229bb
JH
3734{
3735 basic_block bb, init_block;
3736 sbitmap blocks;
0ef90296
ZD
3737 edge_iterator ei;
3738 edge e;
4e3825db
MM
3739 unsigned i;
3740
3741 rewrite_out_of_ssa (&SA);
3742 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
3743 sizeof (rtx));
242229bb 3744
4586b4ca
SB
3745 /* Some backends want to know that we are expanding to RTL. */
3746 currently_expanding_to_rtl = 1;
3747
bf08ebeb
JH
3748 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
3749
55e092c4 3750 insn_locators_alloc ();
fe8a7779 3751 if (!DECL_IS_BUILTIN (current_function_decl))
1751ecd6
AH
3752 {
3753 /* Eventually, all FEs should explicitly set function_start_locus. */
3754 if (cfun->function_start_locus == UNKNOWN_LOCATION)
3755 set_curr_insn_source_location
3756 (DECL_SOURCE_LOCATION (current_function_decl));
3757 else
3758 set_curr_insn_source_location (cfun->function_start_locus);
3759 }
55e092c4
JH
3760 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3761 prologue_locator = curr_insn_locator ();
3762
3763 /* Make sure first insn is a note even if we don't want linenums.
3764 This makes sure the first insn will never be deleted.
3765 Also, final expects a note to appear there. */
3766 emit_note (NOTE_INSN_DELETED);
6429e3be 3767
a1b23b2f
UW
3768 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
3769 discover_nonconstant_array_refs ();
3770
e41b2a33 3771 targetm.expand_to_rtl_hook ();
cb91fab0 3772 crtl->stack_alignment_needed = STACK_BOUNDARY;
2e3f842f 3773 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
890b9b96 3774 crtl->stack_alignment_estimated = 0;
cb91fab0
JH
3775 crtl->preferred_stack_boundary = STACK_BOUNDARY;
3776 cfun->cfg->max_jumptable_ents = 0;
3777
e41b2a33 3778
727a31fa 3779 /* Expand the variables recorded during gimple lowering. */
242229bb
JH
3780 expand_used_vars ();
3781
7d69de61
RH
3782 /* Honor stack protection warnings. */
3783 if (warn_stack_protect)
3784 {
e3b5732b 3785 if (cfun->calls_alloca)
b8698a0f 3786 warning (OPT_Wstack_protector,
3b123595
SB
3787 "stack protector not protecting local variables: "
3788 "variable length buffer");
cb91fab0 3789 if (has_short_buffer && !crtl->stack_protect_guard)
b8698a0f 3790 warning (OPT_Wstack_protector,
3b123595
SB
3791 "stack protector not protecting function: "
3792 "all local arrays are less than %d bytes long",
7d69de61
RH
3793 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
3794 }
3795
242229bb 3796 /* Set up parameters and prepare for return, for the function. */
b79c5284 3797 expand_function_start (current_function_decl);
242229bb 3798
4e3825db
MM
3799 /* Now that we also have the parameter RTXs, copy them over to our
3800 partitions. */
3801 for (i = 0; i < SA.map->num_partitions; i++)
3802 {
3803 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
3804
3805 if (TREE_CODE (var) != VAR_DECL
3806 && !SA.partition_to_pseudo[i])
3807 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
3808 gcc_assert (SA.partition_to_pseudo[i]);
eb7adebc
MM
3809
3810 /* If this decl was marked as living in multiple places, reset
3811 this now to NULL. */
3812 if (DECL_RTL_IF_SET (var) == pc_rtx)
3813 SET_DECL_RTL (var, NULL);
3814
4e3825db
MM
3815 /* Some RTL parts really want to look at DECL_RTL(x) when x
3816 was a decl marked in REG_ATTR or MEM_ATTR. We could use
3817 SET_DECL_RTL here making this available, but that would mean
3818 to select one of the potentially many RTLs for one DECL. Instead
3819 of doing that we simply reset the MEM_EXPR of the RTL in question,
3820 then nobody can get at it and hence nobody can call DECL_RTL on it. */
3821 if (!DECL_RTL_SET_P (var))
3822 {
3823 if (MEM_P (SA.partition_to_pseudo[i]))
3824 set_mem_expr (SA.partition_to_pseudo[i], NULL);
3825 }
3826 }
3827
242229bb
JH
3828 /* If this function is `main', emit a call to `__main'
3829 to run global initializers, etc. */
3830 if (DECL_NAME (current_function_decl)
3831 && MAIN_NAME_P (DECL_NAME (current_function_decl))
3832 && DECL_FILE_SCOPE_P (current_function_decl))
3833 expand_main_function ();
3834
7d69de61
RH
3835 /* Initialize the stack_protect_guard field. This must happen after the
3836 call to __main (if any) so that the external decl is initialized. */
cb91fab0 3837 if (crtl->stack_protect_guard)
7d69de61
RH
3838 stack_protect_prologue ();
3839
4e3825db
MM
3840 expand_phi_nodes (&SA);
3841
3fbd86b1 3842 /* Register rtl specific functions for cfg. */
242229bb
JH
3843 rtl_register_cfg_hooks ();
3844
3845 init_block = construct_init_block ();
3846
0ef90296 3847 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4e3825db 3848 remaining edges later. */
0ef90296
ZD
3849 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
3850 e->flags &= ~EDGE_EXECUTABLE;
3851
8b11009b 3852 lab_rtx_for_bb = pointer_map_create ();
242229bb 3853 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
10d22567 3854 bb = expand_gimple_basic_block (bb);
bf08ebeb 3855
b5b8b0ac
AO
3856 if (MAY_HAVE_DEBUG_INSNS)
3857 expand_debug_locations ();
3858
4e3825db
MM
3859 execute_free_datastructures ();
3860 finish_out_of_ssa (&SA);
3861
91753e21
RG
3862 /* We are no longer in SSA form. */
3863 cfun->gimple_df->in_ssa_p = false;
3864
bf08ebeb
JH
3865 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
3866 conservatively to true until they are all profile aware. */
8b11009b 3867 pointer_map_destroy (lab_rtx_for_bb);
cb91fab0 3868 free_histograms ();
242229bb
JH
3869
3870 construct_exit_block ();
55e092c4
JH
3871 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3872 insn_locators_finalize ();
242229bb 3873
1d65f45c 3874 /* Zap the tree EH table. */
e8a2a782 3875 set_eh_throw_stmt_table (cfun, NULL);
242229bb
JH
3876
3877 rebuild_jump_labels (get_insns ());
242229bb 3878
4e3825db
MM
3879 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3880 {
3881 edge e;
3882 edge_iterator ei;
3883 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3884 {
3885 if (e->insns.r)
3886 commit_one_edge_insertion (e);
3887 else
3888 ei_next (&ei);
3889 }
3890 }
3891
3892 /* We're done expanding trees to RTL. */
3893 currently_expanding_to_rtl = 0;
3894
3895 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
3896 {
3897 edge e;
3898 edge_iterator ei;
3899 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3900 {
3901 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
3902 e->flags &= ~EDGE_EXECUTABLE;
3903
3904 /* At the moment not all abnormal edges match the RTL
3905 representation. It is safe to remove them here as
3906 find_many_sub_basic_blocks will rediscover them.
3907 In the future we should get this fixed properly. */
3908 if ((e->flags & EDGE_ABNORMAL)
3909 && !(e->flags & EDGE_SIBCALL))
3910 remove_edge (e);
3911 else
3912 ei_next (&ei);
3913 }
3914 }
3915
242229bb
JH
3916 blocks = sbitmap_alloc (last_basic_block);
3917 sbitmap_ones (blocks);
3918 find_many_sub_basic_blocks (blocks);
242229bb 3919 sbitmap_free (blocks);
4e3825db 3920 purge_all_dead_edges ();
242229bb
JH
3921
3922 compact_blocks ();
2e3f842f
L
3923
3924 expand_stack_alignment ();
3925
242229bb 3926#ifdef ENABLE_CHECKING
62e5bf5d 3927 verify_flow_info ();
242229bb 3928#endif
9f8628ba
PB
3929
3930 /* There's no need to defer outputting this function any more; we
3931 know we want to output it. */
3932 DECL_DEFER_OUTPUT (current_function_decl) = 0;
3933
3934 /* Now that we're done expanding trees to RTL, we shouldn't have any
3935 more CONCATs anywhere. */
3936 generating_concat_p = 0;
3937
b7211528
SB
3938 if (dump_file)
3939 {
3940 fprintf (dump_file,
3941 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
3942 /* And the pass manager will dump RTL for us. */
3943 }
ef330312
PB
3944
3945 /* If we're emitting a nested function, make sure its parent gets
3946 emitted as well. Doing otherwise confuses debug info. */
c22cacf3 3947 {
ef330312
PB
3948 tree parent;
3949 for (parent = DECL_CONTEXT (current_function_decl);
c22cacf3
MS
3950 parent != NULL_TREE;
3951 parent = get_containing_scope (parent))
ef330312 3952 if (TREE_CODE (parent) == FUNCTION_DECL)
c22cacf3 3953 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
ef330312 3954 }
c22cacf3 3955
ef330312
PB
3956 /* We are now committed to emitting code for this function. Do any
3957 preparation, such as emitting abstract debug info for the inline
3958 before it gets mangled by optimization. */
3959 if (cgraph_function_possibly_inlined_p (current_function_decl))
3960 (*debug_hooks->outlining_inline_function) (current_function_decl);
3961
3962 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
3963
3964 /* After expanding, the return labels are no longer needed. */
3965 return_label = NULL;
3966 naked_return_label = NULL;
55e092c4
JH
3967 /* Tag the blocks with a depth number so that change_scope can find
3968 the common parent easily. */
3969 set_block_levels (DECL_INITIAL (cfun->decl), 0);
bf08ebeb 3970 default_rtl_profile ();
c2924966 3971 return 0;
242229bb
JH
3972}
3973
e3b5732b 3974struct rtl_opt_pass pass_expand =
242229bb 3975{
8ddbbcae 3976 {
e3b5732b 3977 RTL_PASS,
c22cacf3 3978 "expand", /* name */
242229bb 3979 NULL, /* gate */
726a989a 3980 gimple_expand_cfg, /* execute */
242229bb
JH
3981 NULL, /* sub */
3982 NULL, /* next */
3983 0, /* static_pass_number */
c22cacf3 3984 TV_EXPAND, /* tv_id */
688a482d
RG
3985 PROP_ssa | PROP_gimple_leh | PROP_cfg
3986 | PROP_gimple_lcx, /* properties_required */
242229bb 3987 PROP_rtl, /* properties_provided */
4e3825db
MM
3988 PROP_ssa | PROP_trees, /* properties_destroyed */
3989 TODO_verify_ssa | TODO_verify_flow
3990 | TODO_verify_stmts, /* todo_flags_start */
3991 TODO_dump_func
3992 | TODO_ggc_collect /* todo_flags_finish */
8ddbbcae 3993 }
242229bb 3994};
This page took 1.839749 seconds and 5 git commands to generate.