]> gcc.gnu.org Git - gcc.git/blame - gcc/cfgexpand.c
re PR target/48605 (gcc.target/i386/sse4_1-insertps-2.c FAILs with -mtune=geode ...
[gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
4c0c3228 2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
66647d44 3 Free Software Foundation, Inc.
242229bb
JH
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
242229bb
JH
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
242229bb
JH
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "rtl.h"
27#include "tm_p.h"
28#include "basic-block.h"
29#include "function.h"
30#include "expr.h"
31#include "langhooks.h"
32#include "tree-flow.h"
33#include "timevar.h"
34#include "tree-dump.h"
35#include "tree-pass.h"
36#include "except.h"
37#include "flags.h"
1f6d3a08 38#include "diagnostic.h"
cf835838
JM
39#include "tree-pretty-print.h"
40#include "gimple-pretty-print.h"
1f6d3a08 41#include "toplev.h"
ef330312 42#include "debug.h"
7d69de61 43#include "params.h"
ff28a94d 44#include "tree-inline.h"
6946b3f7 45#include "value-prof.h"
e41b2a33 46#include "target.h"
4e3825db 47#include "ssaexpand.h"
7a8cba34
SB
48#include "bitmap.h"
49#include "sbitmap.h"
2b21299c 50#include "insn-attr.h" /* For INSN_SCHEDULING. */
726a989a 51
4e3825db
MM
52/* This variable holds information helping the rewriting of SSA trees
53 into RTL. */
54struct ssaexpand SA;
55
a5883ba0
MM
56/* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58gimple currently_expanding_gimple_stmt;
59
726a989a
RB
60/* Return an expression tree corresponding to the RHS of GIMPLE
61 statement STMT. */
62
63tree
64gimple_assign_rhs_to_tree (gimple stmt)
65{
66 tree t;
82d6e6fc 67 enum gimple_rhs_class grhs_class;
b8698a0f 68
82d6e6fc 69 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
726a989a 70
0354c0c7
BS
71 if (grhs_class == GIMPLE_TERNARY_RHS)
72 t = build3 (gimple_assign_rhs_code (stmt),
73 TREE_TYPE (gimple_assign_lhs (stmt)),
74 gimple_assign_rhs1 (stmt),
75 gimple_assign_rhs2 (stmt),
76 gimple_assign_rhs3 (stmt));
77 else if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
78 t = build2 (gimple_assign_rhs_code (stmt),
79 TREE_TYPE (gimple_assign_lhs (stmt)),
80 gimple_assign_rhs1 (stmt),
81 gimple_assign_rhs2 (stmt));
82d6e6fc 82 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
83 t = build1 (gimple_assign_rhs_code (stmt),
84 TREE_TYPE (gimple_assign_lhs (stmt)),
85 gimple_assign_rhs1 (stmt));
82d6e6fc 86 else if (grhs_class == GIMPLE_SINGLE_RHS)
b5b8b0ac
AO
87 {
88 t = gimple_assign_rhs1 (stmt);
89 /* Avoid modifying this tree in place below. */
d0ed412a
JJ
90 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
91 && gimple_location (stmt) != EXPR_LOCATION (t))
92 || (gimple_block (stmt)
93 && currently_expanding_to_rtl
94 && EXPR_P (t)
95 && gimple_block (stmt) != TREE_BLOCK (t)))
b5b8b0ac
AO
96 t = copy_node (t);
97 }
726a989a
RB
98 else
99 gcc_unreachable ();
100
f5045c96
AM
101 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
102 SET_EXPR_LOCATION (t, gimple_location (stmt));
d0ed412a
JJ
103 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
104 TREE_BLOCK (t) = gimple_block (stmt);
f5045c96 105
726a989a
RB
106 return t;
107}
108
726a989a 109
1f6d3a08
RH
110#ifndef STACK_ALIGNMENT_NEEDED
111#define STACK_ALIGNMENT_NEEDED 1
112#endif
113
4e3825db
MM
114#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
115
116/* Associate declaration T with storage space X. If T is no
117 SSA name this is exactly SET_DECL_RTL, otherwise make the
118 partition of T associated with X. */
119static inline void
120set_rtl (tree t, rtx x)
121{
122 if (TREE_CODE (t) == SSA_NAME)
123 {
124 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
125 if (x && !MEM_P (x))
126 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
eb7adebc
MM
127 /* For the benefit of debug information at -O0 (where vartracking
128 doesn't run) record the place also in the base DECL if it's
129 a normal variable (not a parameter). */
130 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
131 {
132 tree var = SSA_NAME_VAR (t);
133 /* If we don't yet have something recorded, just record it now. */
134 if (!DECL_RTL_SET_P (var))
135 SET_DECL_RTL (var, x);
136 /* If we have it set alrady to "multiple places" don't
137 change this. */
138 else if (DECL_RTL (var) == pc_rtx)
139 ;
140 /* If we have something recorded and it's not the same place
141 as we want to record now, we have multiple partitions for the
142 same base variable, with different places. We can't just
143 randomly chose one, hence we have to say that we don't know.
144 This only happens with optimization, and there var-tracking
145 will figure out the right thing. */
146 else if (DECL_RTL (var) != x)
147 SET_DECL_RTL (var, pc_rtx);
148 }
4e3825db
MM
149 }
150 else
151 SET_DECL_RTL (t, x);
152}
1f6d3a08
RH
153
154/* This structure holds data relevant to one variable that will be
155 placed in a stack slot. */
156struct stack_var
157{
158 /* The Variable. */
159 tree decl;
160
161 /* The offset of the variable. During partitioning, this is the
162 offset relative to the partition. After partitioning, this
163 is relative to the stack frame. */
164 HOST_WIDE_INT offset;
165
166 /* Initially, the size of the variable. Later, the size of the partition,
167 if this variable becomes it's partition's representative. */
168 HOST_WIDE_INT size;
169
170 /* The *byte* alignment required for this variable. Or as, with the
171 size, the alignment for this partition. */
172 unsigned int alignb;
173
174 /* The partition representative. */
175 size_t representative;
176
177 /* The next stack variable in the partition, or EOC. */
178 size_t next;
2bdbbe94
MM
179
180 /* The numbers of conflicting stack variables. */
181 bitmap conflicts;
1f6d3a08
RH
182};
183
184#define EOC ((size_t)-1)
185
186/* We have an array of such objects while deciding allocation. */
187static struct stack_var *stack_vars;
188static size_t stack_vars_alloc;
189static size_t stack_vars_num;
190
fa10beec 191/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
1f6d3a08
RH
192 is non-decreasing. */
193static size_t *stack_vars_sorted;
194
1f6d3a08
RH
195/* The phase of the stack frame. This is the known misalignment of
196 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
197 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
198static int frame_phase;
199
7d69de61
RH
200/* Used during expand_used_vars to remember if we saw any decls for
201 which we'd like to enable stack smashing protection. */
202static bool has_protected_decls;
203
204/* Used during expand_used_vars. Remember if we say a character buffer
205 smaller than our cutoff threshold. Used for -Wstack-protector. */
206static bool has_short_buffer;
1f6d3a08 207
6f197850 208/* Compute the byte alignment to use for DECL. Ignore alignment
765c3e8f
L
209 we can't do with expected alignment of the stack boundary. */
210
211static unsigned int
6f197850 212align_local_variable (tree decl)
765c3e8f 213{
3a42502d 214 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
6f197850 215 DECL_ALIGN (decl) = align;
1f6d3a08
RH
216 return align / BITS_PER_UNIT;
217}
218
219/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
220 Return the frame offset. */
221
222static HOST_WIDE_INT
3a42502d 223alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
1f6d3a08
RH
224{
225 HOST_WIDE_INT offset, new_frame_offset;
226
227 new_frame_offset = frame_offset;
228 if (FRAME_GROWS_DOWNWARD)
229 {
230 new_frame_offset -= size + frame_phase;
231 new_frame_offset &= -align;
232 new_frame_offset += frame_phase;
233 offset = new_frame_offset;
234 }
235 else
236 {
237 new_frame_offset -= frame_phase;
238 new_frame_offset += align - 1;
239 new_frame_offset &= -align;
240 new_frame_offset += frame_phase;
241 offset = new_frame_offset;
242 new_frame_offset += size;
243 }
244 frame_offset = new_frame_offset;
245
9fb798d7
EB
246 if (frame_offset_overflow (frame_offset, cfun->decl))
247 frame_offset = offset = 0;
248
1f6d3a08
RH
249 return offset;
250}
251
252/* Accumulate DECL into STACK_VARS. */
253
254static void
255add_stack_var (tree decl)
256{
533f611a
RH
257 struct stack_var *v;
258
1f6d3a08
RH
259 if (stack_vars_num >= stack_vars_alloc)
260 {
261 if (stack_vars_alloc)
262 stack_vars_alloc = stack_vars_alloc * 3 / 2;
263 else
264 stack_vars_alloc = 32;
265 stack_vars
266 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
267 }
533f611a
RH
268 v = &stack_vars[stack_vars_num];
269
270 v->decl = decl;
271 v->offset = 0;
272 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
273 /* Ensure that all variables have size, so that &a != &b for any two
274 variables that are simultaneously live. */
275 if (v->size == 0)
276 v->size = 1;
6f197850 277 v->alignb = align_local_variable (SSAVAR (decl));
1f6d3a08
RH
278
279 /* All variables are initially in their own partition. */
533f611a
RH
280 v->representative = stack_vars_num;
281 v->next = EOC;
1f6d3a08 282
2bdbbe94 283 /* All variables initially conflict with no other. */
533f611a 284 v->conflicts = NULL;
2bdbbe94 285
1f6d3a08 286 /* Ensure that this decl doesn't get put onto the list twice. */
4e3825db 287 set_rtl (decl, pc_rtx);
1f6d3a08
RH
288
289 stack_vars_num++;
290}
291
1f6d3a08
RH
292/* Make the decls associated with luid's X and Y conflict. */
293
294static void
295add_stack_var_conflict (size_t x, size_t y)
296{
2bdbbe94
MM
297 struct stack_var *a = &stack_vars[x];
298 struct stack_var *b = &stack_vars[y];
299 if (!a->conflicts)
300 a->conflicts = BITMAP_ALLOC (NULL);
301 if (!b->conflicts)
302 b->conflicts = BITMAP_ALLOC (NULL);
303 bitmap_set_bit (a->conflicts, y);
304 bitmap_set_bit (b->conflicts, x);
1f6d3a08
RH
305}
306
307/* Check whether the decls associated with luid's X and Y conflict. */
308
309static bool
310stack_var_conflict_p (size_t x, size_t y)
311{
2bdbbe94
MM
312 struct stack_var *a = &stack_vars[x];
313 struct stack_var *b = &stack_vars[y];
314 if (!a->conflicts || !b->conflicts)
315 return false;
316 return bitmap_bit_p (a->conflicts, y);
1f6d3a08 317}
b8698a0f 318
d239ed56
SB
319/* Returns true if TYPE is or contains a union type. */
320
321static bool
322aggregate_contains_union_type (tree type)
323{
324 tree field;
325
326 if (TREE_CODE (type) == UNION_TYPE
327 || TREE_CODE (type) == QUAL_UNION_TYPE)
328 return true;
329 if (TREE_CODE (type) == ARRAY_TYPE)
330 return aggregate_contains_union_type (TREE_TYPE (type));
331 if (TREE_CODE (type) != RECORD_TYPE)
332 return false;
333
910ad8de 334 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
d239ed56
SB
335 if (TREE_CODE (field) == FIELD_DECL)
336 if (aggregate_contains_union_type (TREE_TYPE (field)))
337 return true;
338
339 return false;
340}
341
1f6d3a08
RH
342/* A subroutine of expand_used_vars. If two variables X and Y have alias
343 sets that do not conflict, then do add a conflict for these variables
d239ed56
SB
344 in the interference graph. We also need to make sure to add conflicts
345 for union containing structures. Else RTL alias analysis comes along
346 and due to type based aliasing rules decides that for two overlapping
347 union temporaries { short s; int i; } accesses to the same mem through
348 different types may not alias and happily reorders stores across
349 life-time boundaries of the temporaries (See PR25654).
350 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
1f6d3a08
RH
351
352static void
353add_alias_set_conflicts (void)
354{
355 size_t i, j, n = stack_vars_num;
356
357 for (i = 0; i < n; ++i)
358 {
a4d25453
RH
359 tree type_i = TREE_TYPE (stack_vars[i].decl);
360 bool aggr_i = AGGREGATE_TYPE_P (type_i);
d239ed56 361 bool contains_union;
1f6d3a08 362
d239ed56 363 contains_union = aggregate_contains_union_type (type_i);
1f6d3a08
RH
364 for (j = 0; j < i; ++j)
365 {
a4d25453
RH
366 tree type_j = TREE_TYPE (stack_vars[j].decl);
367 bool aggr_j = AGGREGATE_TYPE_P (type_j);
d239ed56
SB
368 if (aggr_i != aggr_j
369 /* Either the objects conflict by means of type based
370 aliasing rules, or we need to add a conflict. */
371 || !objects_must_conflict_p (type_i, type_j)
372 /* In case the types do not conflict ensure that access
373 to elements will conflict. In case of unions we have
374 to be careful as type based aliasing rules may say
375 access to the same memory does not conflict. So play
376 safe and add a conflict in this case. */
377 || contains_union)
1f6d3a08
RH
378 add_stack_var_conflict (i, j);
379 }
380 }
381}
382
383/* A subroutine of partition_stack_vars. A comparison function for qsort,
3a42502d 384 sorting an array of indices by the properties of the object. */
1f6d3a08
RH
385
386static int
3a42502d 387stack_var_cmp (const void *a, const void *b)
1f6d3a08 388{
3a42502d
RH
389 size_t ia = *(const size_t *)a;
390 size_t ib = *(const size_t *)b;
391 unsigned int aligna = stack_vars[ia].alignb;
392 unsigned int alignb = stack_vars[ib].alignb;
393 HOST_WIDE_INT sizea = stack_vars[ia].size;
394 HOST_WIDE_INT sizeb = stack_vars[ib].size;
395 tree decla = stack_vars[ia].decl;
396 tree declb = stack_vars[ib].decl;
397 bool largea, largeb;
4e3825db 398 unsigned int uida, uidb;
1f6d3a08 399
3a42502d
RH
400 /* Primary compare on "large" alignment. Large comes first. */
401 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
402 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
403 if (largea != largeb)
404 return (int)largeb - (int)largea;
405
406 /* Secondary compare on size, decreasing */
407 if (sizea < sizeb)
1f6d3a08 408 return -1;
3a42502d 409 if (sizea > sizeb)
1f6d3a08 410 return 1;
3a42502d
RH
411
412 /* Tertiary compare on true alignment, decreasing. */
413 if (aligna < alignb)
414 return -1;
415 if (aligna > alignb)
416 return 1;
417
418 /* Final compare on ID for sort stability, increasing.
419 Two SSA names are compared by their version, SSA names come before
420 non-SSA names, and two normal decls are compared by their DECL_UID. */
4e3825db
MM
421 if (TREE_CODE (decla) == SSA_NAME)
422 {
423 if (TREE_CODE (declb) == SSA_NAME)
424 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
425 else
426 return -1;
427 }
428 else if (TREE_CODE (declb) == SSA_NAME)
429 return 1;
430 else
431 uida = DECL_UID (decla), uidb = DECL_UID (declb);
79f802f5 432 if (uida < uidb)
79f802f5 433 return 1;
3a42502d
RH
434 if (uida > uidb)
435 return -1;
1f6d3a08
RH
436 return 0;
437}
438
55b34b5f
RG
439
440/* If the points-to solution *PI points to variables that are in a partition
441 together with other variables add all partition members to the pointed-to
442 variables bitmap. */
443
444static void
445add_partitioned_vars_to_ptset (struct pt_solution *pt,
446 struct pointer_map_t *decls_to_partitions,
447 struct pointer_set_t *visited, bitmap temp)
448{
449 bitmap_iterator bi;
450 unsigned i;
451 bitmap *part;
452
453 if (pt->anything
454 || pt->vars == NULL
455 /* The pointed-to vars bitmap is shared, it is enough to
456 visit it once. */
457 || pointer_set_insert(visited, pt->vars))
458 return;
459
460 bitmap_clear (temp);
461
462 /* By using a temporary bitmap to store all members of the partitions
463 we have to add we make sure to visit each of the partitions only
464 once. */
465 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
466 if ((!temp
467 || !bitmap_bit_p (temp, i))
468 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
469 (void *)(size_t) i)))
470 bitmap_ior_into (temp, *part);
471 if (!bitmap_empty_p (temp))
472 bitmap_ior_into (pt->vars, temp);
473}
474
475/* Update points-to sets based on partition info, so we can use them on RTL.
476 The bitmaps representing stack partitions will be saved until expand,
477 where partitioned decls used as bases in memory expressions will be
478 rewritten. */
479
480static void
481update_alias_info_with_stack_vars (void)
482{
483 struct pointer_map_t *decls_to_partitions = NULL;
484 size_t i, j;
485 tree var = NULL_TREE;
486
487 for (i = 0; i < stack_vars_num; i++)
488 {
489 bitmap part = NULL;
490 tree name;
491 struct ptr_info_def *pi;
492
493 /* Not interested in partitions with single variable. */
494 if (stack_vars[i].representative != i
495 || stack_vars[i].next == EOC)
496 continue;
497
498 if (!decls_to_partitions)
499 {
500 decls_to_partitions = pointer_map_create ();
501 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
502 }
503
504 /* Create an SSA_NAME that points to the partition for use
505 as base during alias-oracle queries on RTL for bases that
506 have been partitioned. */
507 if (var == NULL_TREE)
508 var = create_tmp_var (ptr_type_node, NULL);
509 name = make_ssa_name (var, NULL);
510
511 /* Create bitmaps representing partitions. They will be used for
512 points-to sets later, so use GGC alloc. */
513 part = BITMAP_GGC_ALLOC ();
514 for (j = i; j != EOC; j = stack_vars[j].next)
515 {
516 tree decl = stack_vars[j].decl;
25a6a873 517 unsigned int uid = DECL_PT_UID (decl);
55b34b5f
RG
518 /* We should never end up partitioning SSA names (though they
519 may end up on the stack). Neither should we allocate stack
9b999dc5
JJ
520 space to something that is unused and thus unreferenced, except
521 for -O0 where we are preserving even unreferenced variables. */
55b34b5f 522 gcc_assert (DECL_P (decl)
9b999dc5 523 && (!optimize
27c6b086 524 || referenced_var_lookup (cfun, DECL_UID (decl))));
55b34b5f
RG
525 bitmap_set_bit (part, uid);
526 *((bitmap *) pointer_map_insert (decls_to_partitions,
527 (void *)(size_t) uid)) = part;
528 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
529 decl)) = name;
530 }
531
532 /* Make the SSA name point to all partition members. */
533 pi = get_ptr_info (name);
25a6a873 534 pt_solution_set (&pi->pt, part, false, false);
55b34b5f
RG
535 }
536
537 /* Make all points-to sets that contain one member of a partition
538 contain all members of the partition. */
539 if (decls_to_partitions)
540 {
541 unsigned i;
542 struct pointer_set_t *visited = pointer_set_create ();
543 bitmap temp = BITMAP_ALLOC (NULL);
544
545 for (i = 1; i < num_ssa_names; i++)
546 {
547 tree name = ssa_name (i);
548 struct ptr_info_def *pi;
549
550 if (name
551 && POINTER_TYPE_P (TREE_TYPE (name))
552 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
553 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
554 visited, temp);
555 }
556
557 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
558 decls_to_partitions, visited, temp);
55b34b5f
RG
559
560 pointer_set_destroy (visited);
561 pointer_map_destroy (decls_to_partitions);
562 BITMAP_FREE (temp);
563 }
564}
565
1f6d3a08
RH
566/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
567 partitioning algorithm. Partitions A and B are known to be non-conflicting.
568 Merge them into a single partition A.
569
570 At the same time, add OFFSET to all variables in partition B. At the end
571 of the partitioning process we've have a nice block easy to lay out within
572 the stack frame. */
573
574static void
575union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
576{
577 size_t i, last;
2bdbbe94
MM
578 struct stack_var *vb = &stack_vars[b];
579 bitmap_iterator bi;
580 unsigned u;
1f6d3a08
RH
581
582 /* Update each element of partition B with the given offset,
583 and merge them into partition A. */
584 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
585 {
586 stack_vars[i].offset += offset;
587 stack_vars[i].representative = a;
588 }
589 stack_vars[last].next = stack_vars[a].next;
590 stack_vars[a].next = b;
591
592 /* Update the required alignment of partition A to account for B. */
593 if (stack_vars[a].alignb < stack_vars[b].alignb)
594 stack_vars[a].alignb = stack_vars[b].alignb;
595
596 /* Update the interference graph and merge the conflicts. */
2bdbbe94
MM
597 if (vb->conflicts)
598 {
599 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
600 add_stack_var_conflict (a, stack_vars[u].representative);
601 BITMAP_FREE (vb->conflicts);
602 }
1f6d3a08
RH
603}
604
605/* A subroutine of expand_used_vars. Binpack the variables into
606 partitions constrained by the interference graph. The overall
607 algorithm used is as follows:
608
609 Sort the objects by size.
610 For each object A {
611 S = size(A)
612 O = 0
613 loop {
614 Look for the largest non-conflicting object B with size <= S.
615 UNION (A, B)
616 offset(B) = O
617 O += size(B)
618 S -= size(B)
619 }
620 }
621*/
622
623static void
624partition_stack_vars (void)
625{
626 size_t si, sj, n = stack_vars_num;
627
628 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
629 for (si = 0; si < n; ++si)
630 stack_vars_sorted[si] = si;
631
632 if (n == 1)
633 return;
634
3a42502d 635 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
1f6d3a08 636
1f6d3a08
RH
637 for (si = 0; si < n; ++si)
638 {
639 size_t i = stack_vars_sorted[si];
640 HOST_WIDE_INT isize = stack_vars[i].size;
3a42502d 641 unsigned int ialign = stack_vars[i].alignb;
1f6d3a08
RH
642 HOST_WIDE_INT offset = 0;
643
644 for (sj = si; sj-- > 0; )
645 {
646 size_t j = stack_vars_sorted[sj];
647 HOST_WIDE_INT jsize = stack_vars[j].size;
648 unsigned int jalign = stack_vars[j].alignb;
649
650 /* Ignore objects that aren't partition representatives. */
651 if (stack_vars[j].representative != j)
652 continue;
653
654 /* Ignore objects too large for the remaining space. */
655 if (isize < jsize)
656 continue;
657
658 /* Ignore conflicting objects. */
659 if (stack_var_conflict_p (i, j))
660 continue;
661
3a42502d
RH
662 /* Do not mix objects of "small" (supported) alignment
663 and "large" (unsupported) alignment. */
664 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
665 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
666 continue;
667
1f6d3a08
RH
668 /* Refine the remaining space check to include alignment. */
669 if (offset & (jalign - 1))
670 {
671 HOST_WIDE_INT toff = offset;
672 toff += jalign - 1;
673 toff &= -(HOST_WIDE_INT)jalign;
674 if (isize - (toff - offset) < jsize)
675 continue;
676
677 isize -= toff - offset;
678 offset = toff;
679 }
680
681 /* UNION the objects, placing J at OFFSET. */
682 union_stack_vars (i, j, offset);
683
684 isize -= jsize;
685 if (isize == 0)
686 break;
687 }
688 }
55b34b5f 689
9b999dc5 690 update_alias_info_with_stack_vars ();
1f6d3a08
RH
691}
692
693/* A debugging aid for expand_used_vars. Dump the generated partitions. */
694
695static void
696dump_stack_var_partition (void)
697{
698 size_t si, i, j, n = stack_vars_num;
699
700 for (si = 0; si < n; ++si)
701 {
702 i = stack_vars_sorted[si];
703
704 /* Skip variables that aren't partition representatives, for now. */
705 if (stack_vars[i].representative != i)
706 continue;
707
708 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
709 " align %u\n", (unsigned long) i, stack_vars[i].size,
710 stack_vars[i].alignb);
711
712 for (j = i; j != EOC; j = stack_vars[j].next)
713 {
714 fputc ('\t', dump_file);
715 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
716 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
1c50a20a 717 stack_vars[j].offset);
1f6d3a08
RH
718 }
719 }
720}
721
3a42502d 722/* Assign rtl to DECL at BASE + OFFSET. */
1f6d3a08
RH
723
724static void
3a42502d
RH
725expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
726 HOST_WIDE_INT offset)
1f6d3a08 727{
3a42502d 728 unsigned align;
1f6d3a08 729 rtx x;
c22cacf3 730
1f6d3a08
RH
731 /* If this fails, we've overflowed the stack frame. Error nicely? */
732 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
733
3a42502d 734 x = plus_constant (base, offset);
4e3825db 735 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
1f6d3a08 736
4e3825db
MM
737 if (TREE_CODE (decl) != SSA_NAME)
738 {
739 /* Set alignment we actually gave this decl if it isn't an SSA name.
740 If it is we generate stack slots only accidentally so it isn't as
741 important, we'll simply use the alignment that is already set. */
3a42502d
RH
742 if (base == virtual_stack_vars_rtx)
743 offset -= frame_phase;
4e3825db
MM
744 align = offset & -offset;
745 align *= BITS_PER_UNIT;
3a42502d
RH
746 if (align == 0 || align > base_align)
747 align = base_align;
748
749 /* One would think that we could assert that we're not decreasing
750 alignment here, but (at least) the i386 port does exactly this
751 via the MINIMUM_ALIGNMENT hook. */
4e3825db
MM
752
753 DECL_ALIGN (decl) = align;
754 DECL_USER_ALIGN (decl) = 0;
755 }
756
757 set_mem_attributes (x, SSAVAR (decl), true);
758 set_rtl (decl, x);
1f6d3a08
RH
759}
760
761/* A subroutine of expand_used_vars. Give each partition representative
762 a unique location within the stack frame. Update each partition member
763 with that location. */
764
765static void
7d69de61 766expand_stack_vars (bool (*pred) (tree))
1f6d3a08
RH
767{
768 size_t si, i, j, n = stack_vars_num;
3a42502d
RH
769 HOST_WIDE_INT large_size = 0, large_alloc = 0;
770 rtx large_base = NULL;
771 unsigned large_align = 0;
772 tree decl;
773
774 /* Determine if there are any variables requiring "large" alignment.
775 Since these are dynamically allocated, we only process these if
776 no predicate involved. */
777 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
778 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
779 {
780 /* Find the total size of these variables. */
781 for (si = 0; si < n; ++si)
782 {
783 unsigned alignb;
784
785 i = stack_vars_sorted[si];
786 alignb = stack_vars[i].alignb;
787
788 /* Stop when we get to the first decl with "small" alignment. */
789 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
790 break;
791
792 /* Skip variables that aren't partition representatives. */
793 if (stack_vars[i].representative != i)
794 continue;
795
796 /* Skip variables that have already had rtl assigned. See also
797 add_stack_var where we perpetrate this pc_rtx hack. */
798 decl = stack_vars[i].decl;
799 if ((TREE_CODE (decl) == SSA_NAME
800 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
801 : DECL_RTL (decl)) != pc_rtx)
802 continue;
803
804 large_size += alignb - 1;
805 large_size &= -(HOST_WIDE_INT)alignb;
806 large_size += stack_vars[i].size;
807 }
808
809 /* If there were any, allocate space. */
810 if (large_size > 0)
811 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
812 large_align, true);
813 }
1f6d3a08
RH
814
815 for (si = 0; si < n; ++si)
816 {
3a42502d
RH
817 rtx base;
818 unsigned base_align, alignb;
1f6d3a08
RH
819 HOST_WIDE_INT offset;
820
821 i = stack_vars_sorted[si];
822
823 /* Skip variables that aren't partition representatives, for now. */
824 if (stack_vars[i].representative != i)
825 continue;
826
7d69de61
RH
827 /* Skip variables that have already had rtl assigned. See also
828 add_stack_var where we perpetrate this pc_rtx hack. */
3a42502d
RH
829 decl = stack_vars[i].decl;
830 if ((TREE_CODE (decl) == SSA_NAME
831 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
832 : DECL_RTL (decl)) != pc_rtx)
7d69de61
RH
833 continue;
834
c22cacf3 835 /* Check the predicate to see whether this variable should be
7d69de61 836 allocated in this pass. */
3a42502d 837 if (pred && !pred (decl))
7d69de61
RH
838 continue;
839
3a42502d
RH
840 alignb = stack_vars[i].alignb;
841 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
842 {
843 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
844 base = virtual_stack_vars_rtx;
845 base_align = crtl->max_used_stack_slot_alignment;
846 }
847 else
848 {
849 /* Large alignment is only processed in the last pass. */
850 if (pred)
851 continue;
533f611a 852 gcc_assert (large_base != NULL);
3a42502d
RH
853
854 large_alloc += alignb - 1;
855 large_alloc &= -(HOST_WIDE_INT)alignb;
856 offset = large_alloc;
857 large_alloc += stack_vars[i].size;
858
859 base = large_base;
860 base_align = large_align;
861 }
1f6d3a08
RH
862
863 /* Create rtl for each variable based on their location within the
864 partition. */
865 for (j = i; j != EOC; j = stack_vars[j].next)
f8da8190
AP
866 {
867 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
868 expand_one_stack_var_at (stack_vars[j].decl,
3a42502d 869 base, base_align,
f8da8190
AP
870 stack_vars[j].offset + offset);
871 }
1f6d3a08 872 }
3a42502d
RH
873
874 gcc_assert (large_alloc == large_size);
1f6d3a08
RH
875}
876
ff28a94d
JH
877/* Take into account all sizes of partitions and reset DECL_RTLs. */
878static HOST_WIDE_INT
879account_stack_vars (void)
880{
881 size_t si, j, i, n = stack_vars_num;
882 HOST_WIDE_INT size = 0;
883
884 for (si = 0; si < n; ++si)
885 {
886 i = stack_vars_sorted[si];
887
888 /* Skip variables that aren't partition representatives, for now. */
889 if (stack_vars[i].representative != i)
890 continue;
891
892 size += stack_vars[i].size;
893 for (j = i; j != EOC; j = stack_vars[j].next)
4e3825db 894 set_rtl (stack_vars[j].decl, NULL);
ff28a94d
JH
895 }
896 return size;
897}
898
1f6d3a08
RH
899/* A subroutine of expand_one_var. Called to immediately assign rtl
900 to a variable to be allocated in the stack frame. */
901
902static void
903expand_one_stack_var (tree var)
904{
3a42502d
RH
905 HOST_WIDE_INT size, offset;
906 unsigned byte_align;
1f6d3a08 907
4e3825db 908 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
6f197850 909 byte_align = align_local_variable (SSAVAR (var));
3a42502d
RH
910
911 /* We handle highly aligned variables in expand_stack_vars. */
912 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1f6d3a08 913
3a42502d
RH
914 offset = alloc_stack_frame_space (size, byte_align);
915
916 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
917 crtl->max_used_stack_slot_alignment, offset);
1f6d3a08
RH
918}
919
1f6d3a08
RH
920/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
921 that will reside in a hard register. */
922
923static void
924expand_one_hard_reg_var (tree var)
925{
926 rest_of_decl_compilation (var, 0, 0);
927}
928
929/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
930 that will reside in a pseudo register. */
931
932static void
933expand_one_register_var (tree var)
934{
4e3825db
MM
935 tree decl = SSAVAR (var);
936 tree type = TREE_TYPE (decl);
cde0f3fd 937 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1f6d3a08
RH
938 rtx x = gen_reg_rtx (reg_mode);
939
4e3825db 940 set_rtl (var, x);
1f6d3a08
RH
941
942 /* Note if the object is a user variable. */
4e3825db
MM
943 if (!DECL_ARTIFICIAL (decl))
944 mark_user_reg (x);
1f6d3a08 945
61021c2c 946 if (POINTER_TYPE_P (type))
4e3825db 947 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
1f6d3a08
RH
948}
949
950/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 951 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
952 to pick something that won't crash the rest of the compiler. */
953
954static void
955expand_one_error_var (tree var)
956{
957 enum machine_mode mode = DECL_MODE (var);
958 rtx x;
959
960 if (mode == BLKmode)
961 x = gen_rtx_MEM (BLKmode, const0_rtx);
962 else if (mode == VOIDmode)
963 x = const0_rtx;
964 else
965 x = gen_reg_rtx (mode);
966
967 SET_DECL_RTL (var, x);
968}
969
c22cacf3 970/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
971 allocated to the local stack frame. Return true if we wish to
972 add VAR to STACK_VARS so that it will be coalesced with other
973 variables. Return false to allocate VAR immediately.
974
975 This function is used to reduce the number of variables considered
976 for coalescing, which reduces the size of the quadratic problem. */
977
978static bool
979defer_stack_allocation (tree var, bool toplevel)
980{
7d69de61
RH
981 /* If stack protection is enabled, *all* stack variables must be deferred,
982 so that we can re-order the strings to the top of the frame. */
983 if (flag_stack_protect)
984 return true;
985
3a42502d
RH
986 /* We handle "large" alignment via dynamic allocation. We want to handle
987 this extra complication in only one place, so defer them. */
988 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
989 return true;
990
1f6d3a08
RH
991 /* Variables in the outermost scope automatically conflict with
992 every other variable. The only reason to want to defer them
993 at all is that, after sorting, we can more efficiently pack
994 small variables in the stack frame. Continue to defer at -O2. */
995 if (toplevel && optimize < 2)
996 return false;
997
998 /* Without optimization, *most* variables are allocated from the
999 stack, which makes the quadratic problem large exactly when we
c22cacf3 1000 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
1001 other hand, we don't want the function's stack frame size to
1002 get completely out of hand. So we avoid adding scalars and
1003 "small" aggregates to the list at all. */
1004 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1005 return false;
1006
1007 return true;
1008}
1009
1010/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 1011 its flavor. Variables to be placed on the stack are not actually
b8698a0f 1012 expanded yet, merely recorded.
ff28a94d
JH
1013 When REALLY_EXPAND is false, only add stack values to be allocated.
1014 Return stack usage this variable is supposed to take.
1015*/
1f6d3a08 1016
ff28a94d
JH
1017static HOST_WIDE_INT
1018expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08 1019{
3a42502d 1020 unsigned int align = BITS_PER_UNIT;
4e3825db 1021 tree origvar = var;
3a42502d 1022
4e3825db
MM
1023 var = SSAVAR (var);
1024
3a42502d 1025 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
2e3f842f 1026 {
2e3f842f
L
1027 /* Because we don't know if VAR will be in register or on stack,
1028 we conservatively assume it will be on stack even if VAR is
1029 eventually put into register after RA pass. For non-automatic
1030 variables, which won't be on stack, we collect alignment of
1031 type and ignore user specified alignment. */
1032 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
ae58e548
JJ
1033 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1034 TYPE_MODE (TREE_TYPE (var)),
1035 TYPE_ALIGN (TREE_TYPE (var)));
f3184b4c
JJ
1036 else if (DECL_HAS_VALUE_EXPR_P (var)
1037 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1038 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1039 or variables which were assigned a stack slot already by
1040 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1041 changed from the offset chosen to it. */
1042 align = crtl->stack_alignment_estimated;
2e3f842f 1043 else
ae58e548 1044 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
2e3f842f 1045
3a42502d
RH
1046 /* If the variable alignment is very large we'll dynamicaly allocate
1047 it, which means that in-frame portion is just a pointer. */
1048 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1049 align = POINTER_SIZE;
1050 }
1051
1052 if (SUPPORTS_STACK_ALIGNMENT
1053 && crtl->stack_alignment_estimated < align)
1054 {
1055 /* stack_alignment_estimated shouldn't change after stack
1056 realign decision made */
1057 gcc_assert(!crtl->stack_realign_processed);
1058 crtl->stack_alignment_estimated = align;
2e3f842f
L
1059 }
1060
3a42502d
RH
1061 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1062 So here we only make sure stack_alignment_needed >= align. */
1063 if (crtl->stack_alignment_needed < align)
1064 crtl->stack_alignment_needed = align;
1065 if (crtl->max_used_stack_slot_alignment < align)
1066 crtl->max_used_stack_slot_alignment = align;
1067
4e3825db
MM
1068 if (TREE_CODE (origvar) == SSA_NAME)
1069 {
1070 gcc_assert (TREE_CODE (var) != VAR_DECL
1071 || (!DECL_EXTERNAL (var)
1072 && !DECL_HAS_VALUE_EXPR_P (var)
1073 && !TREE_STATIC (var)
4e3825db
MM
1074 && TREE_TYPE (var) != error_mark_node
1075 && !DECL_HARD_REGISTER (var)
1076 && really_expand));
1077 }
1078 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
4846b435 1079 ;
1f6d3a08
RH
1080 else if (DECL_EXTERNAL (var))
1081 ;
833b3afe 1082 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
1083 ;
1084 else if (TREE_STATIC (var))
7e8b322a 1085 ;
eb7adebc 1086 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1f6d3a08
RH
1087 ;
1088 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
1089 {
1090 if (really_expand)
1091 expand_one_error_var (var);
1092 }
4e3825db 1093 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
ff28a94d
JH
1094 {
1095 if (really_expand)
1096 expand_one_hard_reg_var (var);
1097 }
1f6d3a08 1098 else if (use_register_for_decl (var))
ff28a94d
JH
1099 {
1100 if (really_expand)
4e3825db 1101 expand_one_register_var (origvar);
ff28a94d 1102 }
7604eb4e
JJ
1103 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1104 {
1105 if (really_expand)
1106 {
1107 error ("size of variable %q+D is too large", var);
1108 expand_one_error_var (var);
1109 }
1110 }
1f6d3a08 1111 else if (defer_stack_allocation (var, toplevel))
4e3825db 1112 add_stack_var (origvar);
1f6d3a08 1113 else
ff28a94d 1114 {
bd9f1b4b 1115 if (really_expand)
4e3825db 1116 expand_one_stack_var (origvar);
ff28a94d
JH
1117 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1118 }
1119 return 0;
1f6d3a08
RH
1120}
1121
1122/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1123 expanding variables. Those variables that can be put into registers
1124 are allocated pseudos; those that can't are put on the stack.
1125
1126 TOPLEVEL is true if this is the outermost BLOCK. */
1127
1128static void
1129expand_used_vars_for_block (tree block, bool toplevel)
1130{
1131 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1132 tree t;
1133
1134 old_sv_num = toplevel ? 0 : stack_vars_num;
1135
1136 /* Expand all variables at this level. */
910ad8de 1137 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
7e8b322a 1138 if (TREE_USED (t))
ff28a94d 1139 expand_one_var (t, toplevel, true);
1f6d3a08
RH
1140
1141 this_sv_num = stack_vars_num;
1142
1143 /* Expand all variables at containing levels. */
1144 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1145 expand_used_vars_for_block (t, false);
1146
1147 /* Since we do not track exact variable lifetimes (which is not even
6fc0bb99 1148 possible for variables whose address escapes), we mirror the block
1f6d3a08 1149 tree in the interference graph. Here we cause all variables at this
2bdbbe94 1150 level, and all sublevels, to conflict. */
1f6d3a08
RH
1151 if (old_sv_num < this_sv_num)
1152 {
1153 new_sv_num = stack_vars_num;
1f6d3a08
RH
1154
1155 for (i = old_sv_num; i < new_sv_num; ++i)
2bdbbe94 1156 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
f4a6d54e 1157 add_stack_var_conflict (i, j);
1f6d3a08
RH
1158 }
1159}
1160
1161/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1162 and clear TREE_USED on all local variables. */
1163
1164static void
1165clear_tree_used (tree block)
1166{
1167 tree t;
1168
910ad8de 1169 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1f6d3a08
RH
1170 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1171 TREE_USED (t) = 0;
1172
1173 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1174 clear_tree_used (t);
1175}
1176
7d69de61
RH
1177/* Examine TYPE and determine a bit mask of the following features. */
1178
1179#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1180#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1181#define SPCT_HAS_ARRAY 4
1182#define SPCT_HAS_AGGREGATE 8
1183
1184static unsigned int
1185stack_protect_classify_type (tree type)
1186{
1187 unsigned int ret = 0;
1188 tree t;
1189
1190 switch (TREE_CODE (type))
1191 {
1192 case ARRAY_TYPE:
1193 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1194 if (t == char_type_node
1195 || t == signed_char_type_node
1196 || t == unsigned_char_type_node)
1197 {
15362b89
JJ
1198 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1199 unsigned HOST_WIDE_INT len;
7d69de61 1200
15362b89
JJ
1201 if (!TYPE_SIZE_UNIT (type)
1202 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1203 len = max;
7d69de61 1204 else
15362b89 1205 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
7d69de61
RH
1206
1207 if (len < max)
1208 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1209 else
1210 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1211 }
1212 else
1213 ret = SPCT_HAS_ARRAY;
1214 break;
1215
1216 case UNION_TYPE:
1217 case QUAL_UNION_TYPE:
1218 case RECORD_TYPE:
1219 ret = SPCT_HAS_AGGREGATE;
1220 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1221 if (TREE_CODE (t) == FIELD_DECL)
1222 ret |= stack_protect_classify_type (TREE_TYPE (t));
1223 break;
1224
1225 default:
1226 break;
1227 }
1228
1229 return ret;
1230}
1231
a4d05547
KH
1232/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1233 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
1234 any variable in this function. The return value is the phase number in
1235 which the variable should be allocated. */
1236
1237static int
1238stack_protect_decl_phase (tree decl)
1239{
1240 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1241 int ret = 0;
1242
1243 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1244 has_short_buffer = true;
1245
1246 if (flag_stack_protect == 2)
1247 {
1248 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1249 && !(bits & SPCT_HAS_AGGREGATE))
1250 ret = 1;
1251 else if (bits & SPCT_HAS_ARRAY)
1252 ret = 2;
1253 }
1254 else
1255 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1256
1257 if (ret)
1258 has_protected_decls = true;
1259
1260 return ret;
1261}
1262
1263/* Two helper routines that check for phase 1 and phase 2. These are used
1264 as callbacks for expand_stack_vars. */
1265
1266static bool
1267stack_protect_decl_phase_1 (tree decl)
1268{
1269 return stack_protect_decl_phase (decl) == 1;
1270}
1271
1272static bool
1273stack_protect_decl_phase_2 (tree decl)
1274{
1275 return stack_protect_decl_phase (decl) == 2;
1276}
1277
1278/* Ensure that variables in different stack protection phases conflict
1279 so that they are not merged and share the same stack slot. */
1280
1281static void
1282add_stack_protection_conflicts (void)
1283{
1284 size_t i, j, n = stack_vars_num;
1285 unsigned char *phase;
1286
1287 phase = XNEWVEC (unsigned char, n);
1288 for (i = 0; i < n; ++i)
1289 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1290
1291 for (i = 0; i < n; ++i)
1292 {
1293 unsigned char ph_i = phase[i];
1294 for (j = 0; j < i; ++j)
1295 if (ph_i != phase[j])
1296 add_stack_var_conflict (i, j);
1297 }
1298
1299 XDELETEVEC (phase);
1300}
1301
1302/* Create a decl for the guard at the top of the stack frame. */
1303
1304static void
1305create_stack_guard (void)
1306{
c2255bc4
AH
1307 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1308 VAR_DECL, NULL, ptr_type_node);
7d69de61
RH
1309 TREE_THIS_VOLATILE (guard) = 1;
1310 TREE_USED (guard) = 1;
1311 expand_one_stack_var (guard);
cb91fab0 1312 crtl->stack_protect_guard = guard;
7d69de61
RH
1313}
1314
ff28a94d 1315/* Prepare for expanding variables. */
b8698a0f 1316static void
ff28a94d
JH
1317init_vars_expansion (void)
1318{
1319 tree t;
c021f10b 1320 unsigned ix;
cb91fab0 1321 /* Set TREE_USED on all variables in the local_decls. */
c021f10b
NF
1322 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1323 TREE_USED (t) = 1;
ff28a94d
JH
1324
1325 /* Clear TREE_USED on all variables associated with a block scope. */
1326 clear_tree_used (DECL_INITIAL (current_function_decl));
1327
1328 /* Initialize local stack smashing state. */
1329 has_protected_decls = false;
1330 has_short_buffer = false;
1331}
1332
1333/* Free up stack variable graph data. */
1334static void
1335fini_vars_expansion (void)
1336{
2bdbbe94
MM
1337 size_t i, n = stack_vars_num;
1338 for (i = 0; i < n; i++)
1339 BITMAP_FREE (stack_vars[i].conflicts);
ff28a94d
JH
1340 XDELETEVEC (stack_vars);
1341 XDELETEVEC (stack_vars_sorted);
ff28a94d
JH
1342 stack_vars = NULL;
1343 stack_vars_alloc = stack_vars_num = 0;
ff28a94d
JH
1344}
1345
30925d94
AO
1346/* Make a fair guess for the size of the stack frame of the function
1347 in NODE. This doesn't have to be exact, the result is only used in
1348 the inline heuristics. So we don't want to run the full stack var
1349 packing algorithm (which is quadratic in the number of stack vars).
1350 Instead, we calculate the total size of all stack vars. This turns
1351 out to be a pretty fair estimate -- packing of stack vars doesn't
1352 happen very often. */
b5a430f3 1353
ff28a94d 1354HOST_WIDE_INT
30925d94 1355estimated_stack_frame_size (struct cgraph_node *node)
ff28a94d
JH
1356{
1357 HOST_WIDE_INT size = 0;
b5a430f3 1358 size_t i;
bb7e6d55 1359 tree var;
2e1ec94f 1360 tree old_cur_fun_decl = current_function_decl;
bb7e6d55
AO
1361 referenced_var_iterator rvi;
1362 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
30925d94
AO
1363
1364 current_function_decl = node->decl;
bb7e6d55 1365 push_cfun (fn);
ff28a94d 1366
bb7e6d55
AO
1367 gcc_checking_assert (gimple_referenced_vars (fn));
1368 FOR_EACH_REFERENCED_VAR (fn, var, rvi)
1369 size += expand_one_var (var, true, false);
b5a430f3 1370
ff28a94d
JH
1371 if (stack_vars_num > 0)
1372 {
b5a430f3
SB
1373 /* Fake sorting the stack vars for account_stack_vars (). */
1374 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1375 for (i = 0; i < stack_vars_num; ++i)
1376 stack_vars_sorted[i] = i;
ff28a94d
JH
1377 size += account_stack_vars ();
1378 fini_vars_expansion ();
1379 }
2e1ec94f
RR
1380 pop_cfun ();
1381 current_function_decl = old_cur_fun_decl;
ff28a94d
JH
1382 return size;
1383}
1384
1f6d3a08 1385/* Expand all variables used in the function. */
727a31fa
RH
1386
1387static void
1388expand_used_vars (void)
1389{
c021f10b
NF
1390 tree var, outer_block = DECL_INITIAL (current_function_decl);
1391 VEC(tree,heap) *maybe_local_decls = NULL;
4e3825db 1392 unsigned i;
c021f10b 1393 unsigned len;
727a31fa 1394
1f6d3a08
RH
1395 /* Compute the phase of the stack frame for this function. */
1396 {
1397 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1398 int off = STARTING_FRAME_OFFSET % align;
1399 frame_phase = off ? align - off : 0;
1400 }
727a31fa 1401
ff28a94d 1402 init_vars_expansion ();
7d69de61 1403
4e3825db
MM
1404 for (i = 0; i < SA.map->num_partitions; i++)
1405 {
1406 tree var = partition_to_var (SA.map, i);
1407
1408 gcc_assert (is_gimple_reg (var));
1409 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1410 expand_one_var (var, true, true);
1411 else
1412 {
1413 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1414 contain the default def (representing the parm or result itself)
1415 we don't do anything here. But those which don't contain the
1416 default def (representing a temporary based on the parm/result)
1417 we need to allocate space just like for normal VAR_DECLs. */
1418 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1419 {
1420 expand_one_var (var, true, true);
1421 gcc_assert (SA.partition_to_pseudo[i]);
1422 }
1423 }
1424 }
1425
cb91fab0 1426 /* At this point all variables on the local_decls with TREE_USED
1f6d3a08 1427 set are not associated with any block scope. Lay them out. */
c021f10b
NF
1428
1429 len = VEC_length (tree, cfun->local_decls);
1430 FOR_EACH_LOCAL_DECL (cfun, i, var)
1f6d3a08 1431 {
1f6d3a08
RH
1432 bool expand_now = false;
1433
4e3825db
MM
1434 /* Expanded above already. */
1435 if (is_gimple_reg (var))
eb7adebc
MM
1436 {
1437 TREE_USED (var) = 0;
3adcf52c 1438 goto next;
eb7adebc 1439 }
1f6d3a08
RH
1440 /* We didn't set a block for static or extern because it's hard
1441 to tell the difference between a global variable (re)declared
1442 in a local scope, and one that's really declared there to
1443 begin with. And it doesn't really matter much, since we're
1444 not giving them stack space. Expand them now. */
4e3825db 1445 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1f6d3a08
RH
1446 expand_now = true;
1447
1448 /* If the variable is not associated with any block, then it
1449 was created by the optimizers, and could be live anywhere
1450 in the function. */
1451 else if (TREE_USED (var))
1452 expand_now = true;
1453
1454 /* Finally, mark all variables on the list as used. We'll use
1455 this in a moment when we expand those associated with scopes. */
1456 TREE_USED (var) = 1;
1457
1458 if (expand_now)
3adcf52c
JM
1459 expand_one_var (var, true, true);
1460
1461 next:
1462 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
802e9f8e 1463 {
3adcf52c
JM
1464 rtx rtl = DECL_RTL_IF_SET (var);
1465
1466 /* Keep artificial non-ignored vars in cfun->local_decls
1467 chain until instantiate_decls. */
1468 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 1469 add_local_decl (cfun, var);
6c6366f6 1470 else if (rtl == NULL_RTX)
c021f10b
NF
1471 /* If rtl isn't set yet, which can happen e.g. with
1472 -fstack-protector, retry before returning from this
1473 function. */
1474 VEC_safe_push (tree, heap, maybe_local_decls, var);
802e9f8e 1475 }
1f6d3a08 1476 }
1f6d3a08 1477
c021f10b
NF
1478 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1479
1480 +-----------------+-----------------+
1481 | ...processed... | ...duplicates...|
1482 +-----------------+-----------------+
1483 ^
1484 +-- LEN points here.
1485
1486 We just want the duplicates, as those are the artificial
1487 non-ignored vars that we want to keep until instantiate_decls.
1488 Move them down and truncate the array. */
1489 if (!VEC_empty (tree, cfun->local_decls))
1490 VEC_block_remove (tree, cfun->local_decls, 0, len);
1491
1f6d3a08
RH
1492 /* At this point, all variables within the block tree with TREE_USED
1493 set are actually used by the optimized function. Lay them out. */
1494 expand_used_vars_for_block (outer_block, true);
1495
1496 if (stack_vars_num > 0)
1497 {
1498 /* Due to the way alias sets work, no variables with non-conflicting
c22cacf3 1499 alias sets may be assigned the same address. Add conflicts to
1f6d3a08
RH
1500 reflect this. */
1501 add_alias_set_conflicts ();
1502
c22cacf3 1503 /* If stack protection is enabled, we don't share space between
7d69de61
RH
1504 vulnerable data and non-vulnerable data. */
1505 if (flag_stack_protect)
1506 add_stack_protection_conflicts ();
1507
c22cacf3 1508 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
1509 minimal interference graph, attempt to save some stack space. */
1510 partition_stack_vars ();
1511 if (dump_file)
1512 dump_stack_var_partition ();
7d69de61
RH
1513 }
1514
1515 /* There are several conditions under which we should create a
1516 stack guard: protect-all, alloca used, protected decls present. */
1517 if (flag_stack_protect == 2
1518 || (flag_stack_protect
e3b5732b 1519 && (cfun->calls_alloca || has_protected_decls)))
7d69de61 1520 create_stack_guard ();
1f6d3a08 1521
7d69de61
RH
1522 /* Assign rtl to each variable based on these partitions. */
1523 if (stack_vars_num > 0)
1524 {
1525 /* Reorder decls to be protected by iterating over the variables
1526 array multiple times, and allocating out of each phase in turn. */
c22cacf3 1527 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
1528 earlier, such that we naturally see these variables first,
1529 and thus naturally allocate things in the right order. */
1530 if (has_protected_decls)
1531 {
1532 /* Phase 1 contains only character arrays. */
1533 expand_stack_vars (stack_protect_decl_phase_1);
1534
1535 /* Phase 2 contains other kinds of arrays. */
1536 if (flag_stack_protect == 2)
1537 expand_stack_vars (stack_protect_decl_phase_2);
1538 }
1539
1540 expand_stack_vars (NULL);
1f6d3a08 1541
ff28a94d 1542 fini_vars_expansion ();
1f6d3a08
RH
1543 }
1544
6c6366f6
JJ
1545 /* If there were any artificial non-ignored vars without rtl
1546 found earlier, see if deferred stack allocation hasn't assigned
1547 rtl to them. */
c021f10b 1548 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
6c6366f6 1549 {
6c6366f6
JJ
1550 rtx rtl = DECL_RTL_IF_SET (var);
1551
6c6366f6
JJ
1552 /* Keep artificial non-ignored vars in cfun->local_decls
1553 chain until instantiate_decls. */
1554 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 1555 add_local_decl (cfun, var);
6c6366f6 1556 }
c021f10b 1557 VEC_free (tree, heap, maybe_local_decls);
6c6366f6 1558
1f6d3a08
RH
1559 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1560 if (STACK_ALIGNMENT_NEEDED)
1561 {
1562 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1563 if (!FRAME_GROWS_DOWNWARD)
1564 frame_offset += align - 1;
1565 frame_offset &= -align;
1566 }
727a31fa
RH
1567}
1568
1569
b7211528
SB
1570/* If we need to produce a detailed dump, print the tree representation
1571 for STMT to the dump file. SINCE is the last RTX after which the RTL
1572 generated for STMT should have been appended. */
1573
1574static void
726a989a 1575maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
b7211528
SB
1576{
1577 if (dump_file && (dump_flags & TDF_DETAILS))
1578 {
1579 fprintf (dump_file, "\n;; ");
b5b8b0ac
AO
1580 print_gimple_stmt (dump_file, stmt, 0,
1581 TDF_SLIM | (dump_flags & TDF_LINENO));
b7211528
SB
1582 fprintf (dump_file, "\n");
1583
1584 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1585 }
1586}
1587
8b11009b
ZD
1588/* Maps the blocks that do not contain tree labels to rtx labels. */
1589
1590static struct pointer_map_t *lab_rtx_for_bb;
1591
a9b77cd1
ZD
1592/* Returns the label_rtx expression for a label starting basic block BB. */
1593
1594static rtx
726a989a 1595label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
a9b77cd1 1596{
726a989a
RB
1597 gimple_stmt_iterator gsi;
1598 tree lab;
1599 gimple lab_stmt;
8b11009b 1600 void **elt;
a9b77cd1
ZD
1601
1602 if (bb->flags & BB_RTL)
1603 return block_label (bb);
1604
8b11009b
ZD
1605 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1606 if (elt)
ae50c0cb 1607 return (rtx) *elt;
8b11009b
ZD
1608
1609 /* Find the tree label if it is present. */
b8698a0f 1610
726a989a 1611 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
a9b77cd1 1612 {
726a989a
RB
1613 lab_stmt = gsi_stmt (gsi);
1614 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
a9b77cd1
ZD
1615 break;
1616
726a989a 1617 lab = gimple_label_label (lab_stmt);
a9b77cd1
ZD
1618 if (DECL_NONLOCAL (lab))
1619 break;
1620
1621 return label_rtx (lab);
1622 }
1623
8b11009b
ZD
1624 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1625 *elt = gen_label_rtx ();
ae50c0cb 1626 return (rtx) *elt;
a9b77cd1
ZD
1627}
1628
726a989a 1629
529ff441
MM
1630/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1631 of a basic block where we just expanded the conditional at the end,
315adeda
MM
1632 possibly clean up the CFG and instruction sequence. LAST is the
1633 last instruction before the just emitted jump sequence. */
529ff441
MM
1634
1635static void
315adeda 1636maybe_cleanup_end_of_block (edge e, rtx last)
529ff441
MM
1637{
1638 /* Special case: when jumpif decides that the condition is
1639 trivial it emits an unconditional jump (and the necessary
1640 barrier). But we still have two edges, the fallthru one is
1641 wrong. purge_dead_edges would clean this up later. Unfortunately
1642 we have to insert insns (and split edges) before
1643 find_many_sub_basic_blocks and hence before purge_dead_edges.
1644 But splitting edges might create new blocks which depend on the
1645 fact that if there are two edges there's no barrier. So the
1646 barrier would get lost and verify_flow_info would ICE. Instead
1647 of auditing all edge splitters to care for the barrier (which
1648 normally isn't there in a cleaned CFG), fix it here. */
1649 if (BARRIER_P (get_last_insn ()))
1650 {
529ff441
MM
1651 rtx insn;
1652 remove_edge (e);
1653 /* Now, we have a single successor block, if we have insns to
1654 insert on the remaining edge we potentially will insert
1655 it at the end of this block (if the dest block isn't feasible)
1656 in order to avoid splitting the edge. This insertion will take
1657 place in front of the last jump. But we might have emitted
1658 multiple jumps (conditional and one unconditional) to the
1659 same destination. Inserting in front of the last one then
1660 is a problem. See PR 40021. We fix this by deleting all
1661 jumps except the last unconditional one. */
1662 insn = PREV_INSN (get_last_insn ());
1663 /* Make sure we have an unconditional jump. Otherwise we're
1664 confused. */
1665 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
315adeda 1666 for (insn = PREV_INSN (insn); insn != last;)
529ff441
MM
1667 {
1668 insn = PREV_INSN (insn);
1669 if (JUMP_P (NEXT_INSN (insn)))
90eb3e33 1670 {
8a269cb7 1671 if (!any_condjump_p (NEXT_INSN (insn)))
90eb3e33
JJ
1672 {
1673 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1674 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1675 }
1676 delete_insn (NEXT_INSN (insn));
1677 }
529ff441
MM
1678 }
1679 }
1680}
1681
726a989a 1682/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
80c7a9eb
RH
1683 Returns a new basic block if we've terminated the current basic
1684 block and created a new one. */
1685
1686static basic_block
726a989a 1687expand_gimple_cond (basic_block bb, gimple stmt)
80c7a9eb
RH
1688{
1689 basic_block new_bb, dest;
1690 edge new_edge;
1691 edge true_edge;
1692 edge false_edge;
b7211528 1693 rtx last2, last;
28ed065e
MM
1694 enum tree_code code;
1695 tree op0, op1;
1696
1697 code = gimple_cond_code (stmt);
1698 op0 = gimple_cond_lhs (stmt);
1699 op1 = gimple_cond_rhs (stmt);
1700 /* We're sometimes presented with such code:
1701 D.123_1 = x < y;
1702 if (D.123_1 != 0)
1703 ...
1704 This would expand to two comparisons which then later might
1705 be cleaned up by combine. But some pattern matchers like if-conversion
1706 work better when there's only one compare, so make up for this
1707 here as special exception if TER would have made the same change. */
1708 if (gimple_cond_single_var_p (stmt)
1709 && SA.values
1710 && TREE_CODE (op0) == SSA_NAME
1711 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1712 {
1713 gimple second = SSA_NAME_DEF_STMT (op0);
e83f4b68 1714 if (gimple_code (second) == GIMPLE_ASSIGN)
28ed065e 1715 {
e83f4b68
MM
1716 enum tree_code code2 = gimple_assign_rhs_code (second);
1717 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1718 {
1719 code = code2;
1720 op0 = gimple_assign_rhs1 (second);
1721 op1 = gimple_assign_rhs2 (second);
1722 }
1723 /* If jumps are cheap turn some more codes into
1724 jumpy sequences. */
1725 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1726 {
1727 if ((code2 == BIT_AND_EXPR
1728 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1729 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1730 || code2 == TRUTH_AND_EXPR)
1731 {
1732 code = TRUTH_ANDIF_EXPR;
1733 op0 = gimple_assign_rhs1 (second);
1734 op1 = gimple_assign_rhs2 (second);
1735 }
1736 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1737 {
1738 code = TRUTH_ORIF_EXPR;
1739 op0 = gimple_assign_rhs1 (second);
1740 op1 = gimple_assign_rhs2 (second);
1741 }
1742 }
28ed065e
MM
1743 }
1744 }
b7211528
SB
1745
1746 last2 = last = get_last_insn ();
80c7a9eb
RH
1747
1748 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
c82fee88
EB
1749 set_curr_insn_source_location (gimple_location (stmt));
1750 set_curr_insn_block (gimple_block (stmt));
80c7a9eb
RH
1751
1752 /* These flags have no purpose in RTL land. */
1753 true_edge->flags &= ~EDGE_TRUE_VALUE;
1754 false_edge->flags &= ~EDGE_FALSE_VALUE;
1755
1756 /* We can either have a pure conditional jump with one fallthru edge or
1757 two-way jump that needs to be decomposed into two basic blocks. */
a9b77cd1 1758 if (false_edge->dest == bb->next_bb)
80c7a9eb 1759 {
40e90eac
JJ
1760 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1761 true_edge->probability);
726a989a 1762 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1763 if (true_edge->goto_locus)
7241571e
JJ
1764 {
1765 set_curr_insn_source_location (true_edge->goto_locus);
1766 set_curr_insn_block (true_edge->goto_block);
1767 true_edge->goto_locus = curr_insn_locator ();
1768 }
1769 true_edge->goto_block = NULL;
a9b77cd1 1770 false_edge->flags |= EDGE_FALLTHRU;
315adeda 1771 maybe_cleanup_end_of_block (false_edge, last);
80c7a9eb
RH
1772 return NULL;
1773 }
a9b77cd1 1774 if (true_edge->dest == bb->next_bb)
80c7a9eb 1775 {
40e90eac
JJ
1776 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1777 false_edge->probability);
726a989a 1778 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1779 if (false_edge->goto_locus)
7241571e
JJ
1780 {
1781 set_curr_insn_source_location (false_edge->goto_locus);
1782 set_curr_insn_block (false_edge->goto_block);
1783 false_edge->goto_locus = curr_insn_locator ();
1784 }
1785 false_edge->goto_block = NULL;
a9b77cd1 1786 true_edge->flags |= EDGE_FALLTHRU;
315adeda 1787 maybe_cleanup_end_of_block (true_edge, last);
80c7a9eb
RH
1788 return NULL;
1789 }
80c7a9eb 1790
40e90eac
JJ
1791 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1792 true_edge->probability);
80c7a9eb 1793 last = get_last_insn ();
7241571e
JJ
1794 if (false_edge->goto_locus)
1795 {
1796 set_curr_insn_source_location (false_edge->goto_locus);
1797 set_curr_insn_block (false_edge->goto_block);
1798 false_edge->goto_locus = curr_insn_locator ();
1799 }
1800 false_edge->goto_block = NULL;
a9b77cd1 1801 emit_jump (label_rtx_for_bb (false_edge->dest));
80c7a9eb
RH
1802
1803 BB_END (bb) = last;
1804 if (BARRIER_P (BB_END (bb)))
1805 BB_END (bb) = PREV_INSN (BB_END (bb));
1806 update_bb_for_insn (bb);
1807
1808 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1809 dest = false_edge->dest;
1810 redirect_edge_succ (false_edge, new_bb);
1811 false_edge->flags |= EDGE_FALLTHRU;
1812 new_bb->count = false_edge->count;
1813 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1814 new_edge = make_edge (new_bb, dest, 0);
1815 new_edge->probability = REG_BR_PROB_BASE;
1816 new_edge->count = new_bb->count;
1817 if (BARRIER_P (BB_END (new_bb)))
1818 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1819 update_bb_for_insn (new_bb);
1820
726a989a 1821 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
c22cacf3 1822
7787b4aa
JJ
1823 if (true_edge->goto_locus)
1824 {
1825 set_curr_insn_source_location (true_edge->goto_locus);
1826 set_curr_insn_block (true_edge->goto_block);
1827 true_edge->goto_locus = curr_insn_locator ();
1828 }
1829 true_edge->goto_block = NULL;
1830
80c7a9eb
RH
1831 return new_bb;
1832}
1833
28ed065e
MM
1834/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1835 statement STMT. */
1836
1837static void
1838expand_call_stmt (gimple stmt)
1839{
1840 tree exp;
1841 tree lhs = gimple_call_lhs (stmt);
28ed065e 1842 size_t i;
e23817b3
RG
1843 bool builtin_p;
1844 tree decl;
28ed065e
MM
1845
1846 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1847
1848 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
e23817b3
RG
1849 decl = gimple_call_fndecl (stmt);
1850 builtin_p = decl && DECL_BUILT_IN (decl);
1851
28ed065e
MM
1852 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1853 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1854
1855 for (i = 0; i < gimple_call_num_args (stmt); i++)
e23817b3
RG
1856 {
1857 tree arg = gimple_call_arg (stmt, i);
1858 gimple def;
1859 /* TER addresses into arguments of builtin functions so we have a
1860 chance to infer more correct alignment information. See PR39954. */
1861 if (builtin_p
1862 && TREE_CODE (arg) == SSA_NAME
1863 && (def = get_gimple_for_ssa_name (arg))
1864 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1865 arg = gimple_assign_rhs1 (def);
1866 CALL_EXPR_ARG (exp, i) = arg;
1867 }
28ed065e 1868
93f28ca7 1869 if (gimple_has_side_effects (stmt))
28ed065e
MM
1870 TREE_SIDE_EFFECTS (exp) = 1;
1871
93f28ca7 1872 if (gimple_call_nothrow_p (stmt))
28ed065e
MM
1873 TREE_NOTHROW (exp) = 1;
1874
1875 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1876 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1877 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1878 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1879 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1880 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1881 TREE_BLOCK (exp) = gimple_block (stmt);
1882
28ed065e
MM
1883 if (lhs)
1884 expand_assignment (lhs, exp, false);
1885 else
1886 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1887}
1888
1889/* A subroutine of expand_gimple_stmt, expanding one gimple statement
1890 STMT that doesn't require special handling for outgoing edges. That
1891 is no tailcalls and no GIMPLE_COND. */
1892
1893static void
1894expand_gimple_stmt_1 (gimple stmt)
1895{
1896 tree op0;
c82fee88
EB
1897
1898 set_curr_insn_source_location (gimple_location (stmt));
1899 set_curr_insn_block (gimple_block (stmt));
1900
28ed065e
MM
1901 switch (gimple_code (stmt))
1902 {
1903 case GIMPLE_GOTO:
1904 op0 = gimple_goto_dest (stmt);
1905 if (TREE_CODE (op0) == LABEL_DECL)
1906 expand_goto (op0);
1907 else
1908 expand_computed_goto (op0);
1909 break;
1910 case GIMPLE_LABEL:
1911 expand_label (gimple_label_label (stmt));
1912 break;
1913 case GIMPLE_NOP:
1914 case GIMPLE_PREDICT:
1915 break;
28ed065e
MM
1916 case GIMPLE_SWITCH:
1917 expand_case (stmt);
1918 break;
1919 case GIMPLE_ASM:
1920 expand_asm_stmt (stmt);
1921 break;
1922 case GIMPLE_CALL:
1923 expand_call_stmt (stmt);
1924 break;
1925
1926 case GIMPLE_RETURN:
1927 op0 = gimple_return_retval (stmt);
1928
1929 if (op0 && op0 != error_mark_node)
1930 {
1931 tree result = DECL_RESULT (current_function_decl);
1932
1933 /* If we are not returning the current function's RESULT_DECL,
1934 build an assignment to it. */
1935 if (op0 != result)
1936 {
1937 /* I believe that a function's RESULT_DECL is unique. */
1938 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1939
1940 /* ??? We'd like to use simply expand_assignment here,
1941 but this fails if the value is of BLKmode but the return
1942 decl is a register. expand_return has special handling
1943 for this combination, which eventually should move
1944 to common code. See comments there. Until then, let's
1945 build a modify expression :-/ */
1946 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1947 result, op0);
1948 }
1949 }
1950 if (!op0)
1951 expand_null_return ();
1952 else
1953 expand_return (op0);
1954 break;
1955
1956 case GIMPLE_ASSIGN:
1957 {
1958 tree lhs = gimple_assign_lhs (stmt);
1959
1960 /* Tree expand used to fiddle with |= and &= of two bitfield
1961 COMPONENT_REFs here. This can't happen with gimple, the LHS
1962 of binary assigns must be a gimple reg. */
1963
1964 if (TREE_CODE (lhs) != SSA_NAME
1965 || get_gimple_rhs_class (gimple_expr_code (stmt))
1966 == GIMPLE_SINGLE_RHS)
1967 {
1968 tree rhs = gimple_assign_rhs1 (stmt);
1969 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1970 == GIMPLE_SINGLE_RHS);
1971 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1972 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1973 expand_assignment (lhs, rhs,
1974 gimple_assign_nontemporal_move_p (stmt));
1975 }
1976 else
1977 {
1978 rtx target, temp;
1979 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1980 struct separate_ops ops;
1981 bool promoted = false;
1982
1983 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1984 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
1985 promoted = true;
1986
1987 ops.code = gimple_assign_rhs_code (stmt);
1988 ops.type = TREE_TYPE (lhs);
1989 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
1990 {
0354c0c7
BS
1991 case GIMPLE_TERNARY_RHS:
1992 ops.op2 = gimple_assign_rhs3 (stmt);
1993 /* Fallthru */
28ed065e
MM
1994 case GIMPLE_BINARY_RHS:
1995 ops.op1 = gimple_assign_rhs2 (stmt);
1996 /* Fallthru */
1997 case GIMPLE_UNARY_RHS:
1998 ops.op0 = gimple_assign_rhs1 (stmt);
1999 break;
2000 default:
2001 gcc_unreachable ();
2002 }
2003 ops.location = gimple_location (stmt);
2004
2005 /* If we want to use a nontemporal store, force the value to
2006 register first. If we store into a promoted register,
2007 don't directly expand to target. */
2008 temp = nontemporal || promoted ? NULL_RTX : target;
2009 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2010 EXPAND_NORMAL);
2011
2012 if (temp == target)
2013 ;
2014 else if (promoted)
2015 {
4e18a7d4 2016 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
28ed065e
MM
2017 /* If TEMP is a VOIDmode constant, use convert_modes to make
2018 sure that we properly convert it. */
2019 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2020 {
2021 temp = convert_modes (GET_MODE (target),
2022 TYPE_MODE (ops.type),
4e18a7d4 2023 temp, unsignedp);
28ed065e 2024 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4e18a7d4 2025 GET_MODE (target), temp, unsignedp);
28ed065e
MM
2026 }
2027
4e18a7d4 2028 convert_move (SUBREG_REG (target), temp, unsignedp);
28ed065e
MM
2029 }
2030 else if (nontemporal && emit_storent_insn (target, temp))
2031 ;
2032 else
2033 {
2034 temp = force_operand (temp, target);
2035 if (temp != target)
2036 emit_move_insn (target, temp);
2037 }
2038 }
2039 }
2040 break;
2041
2042 default:
2043 gcc_unreachable ();
2044 }
2045}
2046
2047/* Expand one gimple statement STMT and return the last RTL instruction
2048 before any of the newly generated ones.
2049
2050 In addition to generating the necessary RTL instructions this also
2051 sets REG_EH_REGION notes if necessary and sets the current source
2052 location for diagnostics. */
2053
2054static rtx
2055expand_gimple_stmt (gimple stmt)
2056{
28ed065e 2057 location_t saved_location = input_location;
c82fee88
EB
2058 rtx last = get_last_insn ();
2059 int lp_nr;
28ed065e 2060
28ed065e
MM
2061 gcc_assert (cfun);
2062
c82fee88
EB
2063 /* We need to save and restore the current source location so that errors
2064 discovered during expansion are emitted with the right location. But
2065 it would be better if the diagnostic routines used the source location
2066 embedded in the tree nodes rather than globals. */
28ed065e 2067 if (gimple_has_location (stmt))
c82fee88 2068 input_location = gimple_location (stmt);
28ed065e
MM
2069
2070 expand_gimple_stmt_1 (stmt);
c82fee88 2071
28ed065e
MM
2072 /* Free any temporaries used to evaluate this statement. */
2073 free_temp_slots ();
2074
2075 input_location = saved_location;
2076
2077 /* Mark all insns that may trap. */
1d65f45c
RH
2078 lp_nr = lookup_stmt_eh_lp (stmt);
2079 if (lp_nr)
28ed065e
MM
2080 {
2081 rtx insn;
2082 for (insn = next_real_insn (last); insn;
2083 insn = next_real_insn (insn))
2084 {
2085 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2086 /* If we want exceptions for non-call insns, any
2087 may_trap_p instruction may throw. */
2088 && GET_CODE (PATTERN (insn)) != CLOBBER
2089 && GET_CODE (PATTERN (insn)) != USE
1d65f45c
RH
2090 && insn_could_throw_p (insn))
2091 make_reg_eh_region_note (insn, 0, lp_nr);
28ed065e
MM
2092 }
2093 }
2094
2095 return last;
2096}
2097
726a989a 2098/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
224e770b
RH
2099 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2100 generated a tail call (something that might be denied by the ABI
cea49550
RH
2101 rules governing the call; see calls.c).
2102
2103 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2104 can still reach the rest of BB. The case here is __builtin_sqrt,
2105 where the NaN result goes through the external function (with a
2106 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
2107
2108static basic_block
726a989a 2109expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
80c7a9eb 2110{
b7211528 2111 rtx last2, last;
224e770b 2112 edge e;
628f6a4e 2113 edge_iterator ei;
224e770b
RH
2114 int probability;
2115 gcov_type count;
80c7a9eb 2116
28ed065e 2117 last2 = last = expand_gimple_stmt (stmt);
80c7a9eb
RH
2118
2119 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
2120 if (CALL_P (last) && SIBLING_CALL_P (last))
2121 goto found;
80c7a9eb 2122
726a989a 2123 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2124
cea49550 2125 *can_fallthru = true;
224e770b 2126 return NULL;
80c7a9eb 2127
224e770b
RH
2128 found:
2129 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2130 Any instructions emitted here are about to be deleted. */
2131 do_pending_stack_adjust ();
2132
2133 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2134 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2135 EH or abnormal edges, we shouldn't have created a tail call in
2136 the first place. So it seems to me we should just be removing
2137 all edges here, or redirecting the existing fallthru edge to
2138 the exit block. */
2139
224e770b
RH
2140 probability = 0;
2141 count = 0;
224e770b 2142
628f6a4e
BE
2143 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2144 {
224e770b
RH
2145 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2146 {
2147 if (e->dest != EXIT_BLOCK_PTR)
80c7a9eb 2148 {
224e770b
RH
2149 e->dest->count -= e->count;
2150 e->dest->frequency -= EDGE_FREQUENCY (e);
2151 if (e->dest->count < 0)
c22cacf3 2152 e->dest->count = 0;
224e770b 2153 if (e->dest->frequency < 0)
c22cacf3 2154 e->dest->frequency = 0;
80c7a9eb 2155 }
224e770b
RH
2156 count += e->count;
2157 probability += e->probability;
2158 remove_edge (e);
80c7a9eb 2159 }
628f6a4e
BE
2160 else
2161 ei_next (&ei);
80c7a9eb
RH
2162 }
2163
224e770b
RH
2164 /* This is somewhat ugly: the call_expr expander often emits instructions
2165 after the sibcall (to perform the function return). These confuse the
12eff7b7 2166 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 2167 last = NEXT_INSN (last);
341c100f 2168 gcc_assert (BARRIER_P (last));
cea49550
RH
2169
2170 *can_fallthru = false;
224e770b
RH
2171 while (NEXT_INSN (last))
2172 {
2173 /* For instance an sqrt builtin expander expands if with
2174 sibcall in the then and label for `else`. */
2175 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
2176 {
2177 *can_fallthru = true;
2178 break;
2179 }
224e770b
RH
2180 delete_insn (NEXT_INSN (last));
2181 }
2182
2183 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2184 e->probability += probability;
2185 e->count += count;
2186 BB_END (bb) = last;
2187 update_bb_for_insn (bb);
2188
2189 if (NEXT_INSN (last))
2190 {
2191 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2192
2193 last = BB_END (bb);
2194 if (BARRIER_P (last))
2195 BB_END (bb) = PREV_INSN (last);
2196 }
2197
726a989a 2198 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2199
224e770b 2200 return bb;
80c7a9eb
RH
2201}
2202
b5b8b0ac
AO
2203/* Return the difference between the floor and the truncated result of
2204 a signed division by OP1 with remainder MOD. */
2205static rtx
2206floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2207{
2208 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2209 return gen_rtx_IF_THEN_ELSE
2210 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2211 gen_rtx_IF_THEN_ELSE
2212 (mode, gen_rtx_LT (BImode,
2213 gen_rtx_DIV (mode, op1, mod),
2214 const0_rtx),
2215 constm1_rtx, const0_rtx),
2216 const0_rtx);
2217}
2218
2219/* Return the difference between the ceil and the truncated result of
2220 a signed division by OP1 with remainder MOD. */
2221static rtx
2222ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2223{
2224 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2225 return gen_rtx_IF_THEN_ELSE
2226 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2227 gen_rtx_IF_THEN_ELSE
2228 (mode, gen_rtx_GT (BImode,
2229 gen_rtx_DIV (mode, op1, mod),
2230 const0_rtx),
2231 const1_rtx, const0_rtx),
2232 const0_rtx);
2233}
2234
2235/* Return the difference between the ceil and the truncated result of
2236 an unsigned division by OP1 with remainder MOD. */
2237static rtx
2238ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2239{
2240 /* (mod != 0 ? 1 : 0) */
2241 return gen_rtx_IF_THEN_ELSE
2242 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2243 const1_rtx, const0_rtx);
2244}
2245
2246/* Return the difference between the rounded and the truncated result
2247 of a signed division by OP1 with remainder MOD. Halfway cases are
2248 rounded away from zero, rather than to the nearest even number. */
2249static rtx
2250round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2251{
2252 /* (abs (mod) >= abs (op1) - abs (mod)
2253 ? (op1 / mod > 0 ? 1 : -1)
2254 : 0) */
2255 return gen_rtx_IF_THEN_ELSE
2256 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2257 gen_rtx_MINUS (mode,
2258 gen_rtx_ABS (mode, op1),
2259 gen_rtx_ABS (mode, mod))),
2260 gen_rtx_IF_THEN_ELSE
2261 (mode, gen_rtx_GT (BImode,
2262 gen_rtx_DIV (mode, op1, mod),
2263 const0_rtx),
2264 const1_rtx, constm1_rtx),
2265 const0_rtx);
2266}
2267
2268/* Return the difference between the rounded and the truncated result
2269 of a unsigned division by OP1 with remainder MOD. Halfway cases
2270 are rounded away from zero, rather than to the nearest even
2271 number. */
2272static rtx
2273round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2274{
2275 /* (mod >= op1 - mod ? 1 : 0) */
2276 return gen_rtx_IF_THEN_ELSE
2277 (mode, gen_rtx_GE (BImode, mod,
2278 gen_rtx_MINUS (mode, op1, mod)),
2279 const1_rtx, const0_rtx);
2280}
2281
dda2da58
AO
2282/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2283 any rtl. */
2284
2285static rtx
f61c6f34
JJ
2286convert_debug_memory_address (enum machine_mode mode, rtx x,
2287 addr_space_t as)
dda2da58
AO
2288{
2289 enum machine_mode xmode = GET_MODE (x);
2290
2291#ifndef POINTERS_EXTEND_UNSIGNED
f61c6f34
JJ
2292 gcc_assert (mode == Pmode
2293 || mode == targetm.addr_space.address_mode (as));
dda2da58
AO
2294 gcc_assert (xmode == mode || xmode == VOIDmode);
2295#else
f61c6f34
JJ
2296 rtx temp;
2297 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
2298 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
2299
2300 gcc_assert (mode == address_mode || mode == pointer_mode);
dda2da58
AO
2301
2302 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2303 return x;
2304
2305 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2306 x = simplify_gen_subreg (mode, x, xmode,
2307 subreg_lowpart_offset
2308 (mode, xmode));
2309 else if (POINTERS_EXTEND_UNSIGNED > 0)
2310 x = gen_rtx_ZERO_EXTEND (mode, x);
2311 else if (!POINTERS_EXTEND_UNSIGNED)
2312 x = gen_rtx_SIGN_EXTEND (mode, x);
2313 else
f61c6f34
JJ
2314 {
2315 switch (GET_CODE (x))
2316 {
2317 case SUBREG:
2318 if ((SUBREG_PROMOTED_VAR_P (x)
2319 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2320 || (GET_CODE (SUBREG_REG (x)) == PLUS
2321 && REG_P (XEXP (SUBREG_REG (x), 0))
2322 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2323 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2324 && GET_MODE (SUBREG_REG (x)) == mode)
2325 return SUBREG_REG (x);
2326 break;
2327 case LABEL_REF:
2328 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2329 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2330 return temp;
2331 case SYMBOL_REF:
2332 temp = shallow_copy_rtx (x);
2333 PUT_MODE (temp, mode);
2334 return temp;
2335 case CONST:
2336 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2337 if (temp)
2338 temp = gen_rtx_CONST (mode, temp);
2339 return temp;
2340 case PLUS:
2341 case MINUS:
2342 if (CONST_INT_P (XEXP (x, 1)))
2343 {
2344 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2345 if (temp)
2346 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2347 }
2348 break;
2349 default:
2350 break;
2351 }
2352 /* Don't know how to express ptr_extend as operation in debug info. */
2353 return NULL;
2354 }
dda2da58
AO
2355#endif /* POINTERS_EXTEND_UNSIGNED */
2356
2357 return x;
2358}
2359
b5b8b0ac
AO
2360/* Return an RTX equivalent to the value of the tree expression
2361 EXP. */
2362
2363static rtx
2364expand_debug_expr (tree exp)
2365{
2366 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2367 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2368 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
09e881c9 2369 addr_space_t as;
b5b8b0ac
AO
2370
2371 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2372 {
2373 case tcc_expression:
2374 switch (TREE_CODE (exp))
2375 {
2376 case COND_EXPR:
7ece48b1 2377 case DOT_PROD_EXPR:
0354c0c7
BS
2378 case WIDEN_MULT_PLUS_EXPR:
2379 case WIDEN_MULT_MINUS_EXPR:
0f59b812 2380 case FMA_EXPR:
b5b8b0ac
AO
2381 goto ternary;
2382
2383 case TRUTH_ANDIF_EXPR:
2384 case TRUTH_ORIF_EXPR:
2385 case TRUTH_AND_EXPR:
2386 case TRUTH_OR_EXPR:
2387 case TRUTH_XOR_EXPR:
2388 goto binary;
2389
2390 case TRUTH_NOT_EXPR:
2391 goto unary;
2392
2393 default:
2394 break;
2395 }
2396 break;
2397
2398 ternary:
2399 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2400 if (!op2)
2401 return NULL_RTX;
2402 /* Fall through. */
2403
2404 binary:
2405 case tcc_binary:
2406 case tcc_comparison:
2407 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2408 if (!op1)
2409 return NULL_RTX;
2410 /* Fall through. */
2411
2412 unary:
2413 case tcc_unary:
2414 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2415 if (!op0)
2416 return NULL_RTX;
2417 break;
2418
2419 case tcc_type:
2420 case tcc_statement:
2421 gcc_unreachable ();
2422
2423 case tcc_constant:
2424 case tcc_exceptional:
2425 case tcc_declaration:
2426 case tcc_reference:
2427 case tcc_vl_exp:
2428 break;
2429 }
2430
2431 switch (TREE_CODE (exp))
2432 {
2433 case STRING_CST:
2434 if (!lookup_constant_def (exp))
2435 {
e1b243a8
JJ
2436 if (strlen (TREE_STRING_POINTER (exp)) + 1
2437 != (size_t) TREE_STRING_LENGTH (exp))
2438 return NULL_RTX;
b5b8b0ac
AO
2439 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2440 op0 = gen_rtx_MEM (BLKmode, op0);
2441 set_mem_attributes (op0, exp, 0);
2442 return op0;
2443 }
2444 /* Fall through... */
2445
2446 case INTEGER_CST:
2447 case REAL_CST:
2448 case FIXED_CST:
2449 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2450 return op0;
2451
2452 case COMPLEX_CST:
2453 gcc_assert (COMPLEX_MODE_P (mode));
2454 op0 = expand_debug_expr (TREE_REALPART (exp));
b5b8b0ac 2455 op1 = expand_debug_expr (TREE_IMAGPART (exp));
b5b8b0ac
AO
2456 return gen_rtx_CONCAT (mode, op0, op1);
2457
0ca5af51
AO
2458 case DEBUG_EXPR_DECL:
2459 op0 = DECL_RTL_IF_SET (exp);
2460
2461 if (op0)
2462 return op0;
2463
2464 op0 = gen_rtx_DEBUG_EXPR (mode);
e4fb38bd 2465 DEBUG_EXPR_TREE_DECL (op0) = exp;
0ca5af51
AO
2466 SET_DECL_RTL (exp, op0);
2467
2468 return op0;
2469
b5b8b0ac
AO
2470 case VAR_DECL:
2471 case PARM_DECL:
2472 case FUNCTION_DECL:
2473 case LABEL_DECL:
2474 case CONST_DECL:
2475 case RESULT_DECL:
2476 op0 = DECL_RTL_IF_SET (exp);
2477
2478 /* This decl was probably optimized away. */
2479 if (!op0)
e1b243a8
JJ
2480 {
2481 if (TREE_CODE (exp) != VAR_DECL
2482 || DECL_EXTERNAL (exp)
2483 || !TREE_STATIC (exp)
2484 || !DECL_NAME (exp)
0fba566c
JJ
2485 || DECL_HARD_REGISTER (exp)
2486 || mode == VOIDmode)
e1b243a8
JJ
2487 return NULL;
2488
b1aa0655 2489 op0 = make_decl_rtl_for_debug (exp);
e1b243a8
JJ
2490 if (!MEM_P (op0)
2491 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2492 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2493 return NULL;
2494 }
2495 else
2496 op0 = copy_rtx (op0);
b5b8b0ac 2497
06796564
JJ
2498 if (GET_MODE (op0) == BLKmode
2499 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2500 below would ICE. While it is likely a FE bug,
2501 try to be robust here. See PR43166. */
132b4e82
JJ
2502 || mode == BLKmode
2503 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
b5b8b0ac
AO
2504 {
2505 gcc_assert (MEM_P (op0));
2506 op0 = adjust_address_nv (op0, mode, 0);
2507 return op0;
2508 }
2509
2510 /* Fall through. */
2511
2512 adjust_mode:
2513 case PAREN_EXPR:
2514 case NOP_EXPR:
2515 case CONVERT_EXPR:
2516 {
2517 enum machine_mode inner_mode = GET_MODE (op0);
2518
2519 if (mode == inner_mode)
2520 return op0;
2521
2522 if (inner_mode == VOIDmode)
2523 {
2a8e30fb
MM
2524 if (TREE_CODE (exp) == SSA_NAME)
2525 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2526 else
2527 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
2528 if (mode == inner_mode)
2529 return op0;
2530 }
2531
2532 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2533 {
2534 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2535 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2536 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2537 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2538 else
2539 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2540 }
2541 else if (FLOAT_MODE_P (mode))
2542 {
2a8e30fb 2543 gcc_assert (TREE_CODE (exp) != SSA_NAME);
b5b8b0ac
AO
2544 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2545 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2546 else
2547 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2548 }
2549 else if (FLOAT_MODE_P (inner_mode))
2550 {
2551 if (unsignedp)
2552 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2553 else
2554 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2555 }
2556 else if (CONSTANT_P (op0)
2557 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2558 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2559 subreg_lowpart_offset (mode,
2560 inner_mode));
1b47fe3f
JJ
2561 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2562 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2563 : unsignedp)
b5b8b0ac
AO
2564 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2565 else
2566 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2567
2568 return op0;
2569 }
2570
70f34814 2571 case MEM_REF:
71f3a3f5
JJ
2572 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2573 {
2574 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2575 TREE_OPERAND (exp, 0),
2576 TREE_OPERAND (exp, 1));
2577 if (newexp)
2578 return expand_debug_expr (newexp);
2579 }
2580 /* FALLTHROUGH */
b5b8b0ac 2581 case INDIRECT_REF:
b5b8b0ac
AO
2582 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2583 if (!op0)
2584 return NULL;
2585
cb115041
JJ
2586 if (TREE_CODE (exp) == MEM_REF)
2587 {
583ac69c
JJ
2588 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2589 || (GET_CODE (op0) == PLUS
2590 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2591 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2592 Instead just use get_inner_reference. */
2593 goto component_ref;
2594
cb115041
JJ
2595 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2596 if (!op1 || !CONST_INT_P (op1))
2597 return NULL;
2598
2599 op0 = plus_constant (op0, INTVAL (op1));
2600 }
2601
09e881c9 2602 if (POINTER_TYPE_P (TREE_TYPE (exp)))
75421dcd 2603 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
09e881c9 2604 else
75421dcd 2605 as = ADDR_SPACE_GENERIC;
b5b8b0ac 2606
f61c6f34
JJ
2607 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2608 op0, as);
2609 if (op0 == NULL_RTX)
2610 return NULL;
b5b8b0ac 2611
f61c6f34 2612 op0 = gen_rtx_MEM (mode, op0);
b5b8b0ac 2613 set_mem_attributes (op0, exp, 0);
71f3a3f5
JJ
2614 if (TREE_CODE (exp) == MEM_REF
2615 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2616 set_mem_expr (op0, NULL_TREE);
09e881c9 2617 set_mem_addr_space (op0, as);
b5b8b0ac
AO
2618
2619 return op0;
2620
2621 case TARGET_MEM_REF:
4d948885
RG
2622 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2623 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
b5b8b0ac
AO
2624 return NULL;
2625
2626 op0 = expand_debug_expr
4e25ca6b 2627 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
b5b8b0ac
AO
2628 if (!op0)
2629 return NULL;
2630
f61c6f34
JJ
2631 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2632 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2633 else
2634 as = ADDR_SPACE_GENERIC;
2635
2636 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2637 op0, as);
2638 if (op0 == NULL_RTX)
2639 return NULL;
b5b8b0ac
AO
2640
2641 op0 = gen_rtx_MEM (mode, op0);
2642
2643 set_mem_attributes (op0, exp, 0);
09e881c9 2644 set_mem_addr_space (op0, as);
b5b8b0ac
AO
2645
2646 return op0;
2647
583ac69c 2648 component_ref:
b5b8b0ac
AO
2649 case ARRAY_REF:
2650 case ARRAY_RANGE_REF:
2651 case COMPONENT_REF:
2652 case BIT_FIELD_REF:
2653 case REALPART_EXPR:
2654 case IMAGPART_EXPR:
2655 case VIEW_CONVERT_EXPR:
2656 {
2657 enum machine_mode mode1;
2658 HOST_WIDE_INT bitsize, bitpos;
2659 tree offset;
2660 int volatilep = 0;
2661 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2662 &mode1, &unsignedp, &volatilep, false);
2663 rtx orig_op0;
2664
4f2a9af8
JJ
2665 if (bitsize == 0)
2666 return NULL;
2667
b5b8b0ac
AO
2668 orig_op0 = op0 = expand_debug_expr (tem);
2669
2670 if (!op0)
2671 return NULL;
2672
2673 if (offset)
2674 {
dda2da58
AO
2675 enum machine_mode addrmode, offmode;
2676
aa847cc8
JJ
2677 if (!MEM_P (op0))
2678 return NULL;
b5b8b0ac 2679
dda2da58
AO
2680 op0 = XEXP (op0, 0);
2681 addrmode = GET_MODE (op0);
2682 if (addrmode == VOIDmode)
2683 addrmode = Pmode;
2684
b5b8b0ac
AO
2685 op1 = expand_debug_expr (offset);
2686 if (!op1)
2687 return NULL;
2688
dda2da58
AO
2689 offmode = GET_MODE (op1);
2690 if (offmode == VOIDmode)
2691 offmode = TYPE_MODE (TREE_TYPE (offset));
2692
2693 if (addrmode != offmode)
2694 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2695 subreg_lowpart_offset (addrmode,
2696 offmode));
2697
2698 /* Don't use offset_address here, we don't need a
2699 recognizable address, and we don't want to generate
2700 code. */
2701 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
b5b8b0ac
AO
2702 }
2703
2704 if (MEM_P (op0))
2705 {
4f2a9af8
JJ
2706 if (mode1 == VOIDmode)
2707 /* Bitfield. */
2708 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
b5b8b0ac
AO
2709 if (bitpos >= BITS_PER_UNIT)
2710 {
2711 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2712 bitpos %= BITS_PER_UNIT;
2713 }
2714 else if (bitpos < 0)
2715 {
4f2a9af8
JJ
2716 HOST_WIDE_INT units
2717 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
b5b8b0ac
AO
2718 op0 = adjust_address_nv (op0, mode1, units);
2719 bitpos += units * BITS_PER_UNIT;
2720 }
2721 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2722 op0 = adjust_address_nv (op0, mode, 0);
2723 else if (GET_MODE (op0) != mode1)
2724 op0 = adjust_address_nv (op0, mode1, 0);
2725 else
2726 op0 = copy_rtx (op0);
2727 if (op0 == orig_op0)
2728 op0 = shallow_copy_rtx (op0);
2729 set_mem_attributes (op0, exp, 0);
2730 }
2731
2732 if (bitpos == 0 && mode == GET_MODE (op0))
2733 return op0;
2734
2d3fc6aa
JJ
2735 if (bitpos < 0)
2736 return NULL;
2737
88c04a5d
JJ
2738 if (GET_MODE (op0) == BLKmode)
2739 return NULL;
2740
b5b8b0ac
AO
2741 if ((bitpos % BITS_PER_UNIT) == 0
2742 && bitsize == GET_MODE_BITSIZE (mode1))
2743 {
2744 enum machine_mode opmode = GET_MODE (op0);
2745
b5b8b0ac 2746 if (opmode == VOIDmode)
9712cba0 2747 opmode = TYPE_MODE (TREE_TYPE (tem));
b5b8b0ac
AO
2748
2749 /* This condition may hold if we're expanding the address
2750 right past the end of an array that turned out not to
2751 be addressable (i.e., the address was only computed in
2752 debug stmts). The gen_subreg below would rightfully
2753 crash, and the address doesn't really exist, so just
2754 drop it. */
2755 if (bitpos >= GET_MODE_BITSIZE (opmode))
2756 return NULL;
2757
7d5d39bb
JJ
2758 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2759 return simplify_gen_subreg (mode, op0, opmode,
2760 bitpos / BITS_PER_UNIT);
b5b8b0ac
AO
2761 }
2762
2763 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2764 && TYPE_UNSIGNED (TREE_TYPE (exp))
2765 ? SIGN_EXTRACT
2766 : ZERO_EXTRACT, mode,
2767 GET_MODE (op0) != VOIDmode
9712cba0
JJ
2768 ? GET_MODE (op0)
2769 : TYPE_MODE (TREE_TYPE (tem)),
b5b8b0ac
AO
2770 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2771 }
2772
b5b8b0ac
AO
2773 case ABS_EXPR:
2774 return gen_rtx_ABS (mode, op0);
2775
2776 case NEGATE_EXPR:
2777 return gen_rtx_NEG (mode, op0);
2778
2779 case BIT_NOT_EXPR:
2780 return gen_rtx_NOT (mode, op0);
2781
2782 case FLOAT_EXPR:
2783 if (unsignedp)
2784 return gen_rtx_UNSIGNED_FLOAT (mode, op0);
2785 else
2786 return gen_rtx_FLOAT (mode, op0);
2787
2788 case FIX_TRUNC_EXPR:
2789 if (unsignedp)
2790 return gen_rtx_UNSIGNED_FIX (mode, op0);
2791 else
2792 return gen_rtx_FIX (mode, op0);
2793
2794 case POINTER_PLUS_EXPR:
576319a7
DD
2795 /* For the rare target where pointers are not the same size as
2796 size_t, we need to check for mis-matched modes and correct
2797 the addend. */
2798 if (op0 && op1
2799 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
2800 && GET_MODE (op0) != GET_MODE (op1))
2801 {
2802 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2803 op1 = gen_rtx_TRUNCATE (GET_MODE (op0), op1);
2804 else
2805 /* We always sign-extend, regardless of the signedness of
2806 the operand, because the operand is always unsigned
2807 here even if the original C expression is signed. */
2808 op1 = gen_rtx_SIGN_EXTEND (GET_MODE (op0), op1);
2809 }
2810 /* Fall through. */
b5b8b0ac
AO
2811 case PLUS_EXPR:
2812 return gen_rtx_PLUS (mode, op0, op1);
2813
2814 case MINUS_EXPR:
2815 return gen_rtx_MINUS (mode, op0, op1);
2816
2817 case MULT_EXPR:
2818 return gen_rtx_MULT (mode, op0, op1);
2819
2820 case RDIV_EXPR:
2821 case TRUNC_DIV_EXPR:
2822 case EXACT_DIV_EXPR:
2823 if (unsignedp)
2824 return gen_rtx_UDIV (mode, op0, op1);
2825 else
2826 return gen_rtx_DIV (mode, op0, op1);
2827
2828 case TRUNC_MOD_EXPR:
2829 if (unsignedp)
2830 return gen_rtx_UMOD (mode, op0, op1);
2831 else
2832 return gen_rtx_MOD (mode, op0, op1);
2833
2834 case FLOOR_DIV_EXPR:
2835 if (unsignedp)
2836 return gen_rtx_UDIV (mode, op0, op1);
2837 else
2838 {
2839 rtx div = gen_rtx_DIV (mode, op0, op1);
2840 rtx mod = gen_rtx_MOD (mode, op0, op1);
2841 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2842 return gen_rtx_PLUS (mode, div, adj);
2843 }
2844
2845 case FLOOR_MOD_EXPR:
2846 if (unsignedp)
2847 return gen_rtx_UMOD (mode, op0, op1);
2848 else
2849 {
2850 rtx mod = gen_rtx_MOD (mode, op0, op1);
2851 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2852 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2853 return gen_rtx_PLUS (mode, mod, adj);
2854 }
2855
2856 case CEIL_DIV_EXPR:
2857 if (unsignedp)
2858 {
2859 rtx div = gen_rtx_UDIV (mode, op0, op1);
2860 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2861 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2862 return gen_rtx_PLUS (mode, div, adj);
2863 }
2864 else
2865 {
2866 rtx div = gen_rtx_DIV (mode, op0, op1);
2867 rtx mod = gen_rtx_MOD (mode, op0, op1);
2868 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2869 return gen_rtx_PLUS (mode, div, adj);
2870 }
2871
2872 case CEIL_MOD_EXPR:
2873 if (unsignedp)
2874 {
2875 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2876 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2877 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2878 return gen_rtx_PLUS (mode, mod, adj);
2879 }
2880 else
2881 {
2882 rtx mod = gen_rtx_MOD (mode, op0, op1);
2883 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2884 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2885 return gen_rtx_PLUS (mode, mod, adj);
2886 }
2887
2888 case ROUND_DIV_EXPR:
2889 if (unsignedp)
2890 {
2891 rtx div = gen_rtx_UDIV (mode, op0, op1);
2892 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2893 rtx adj = round_udiv_adjust (mode, mod, op1);
2894 return gen_rtx_PLUS (mode, div, adj);
2895 }
2896 else
2897 {
2898 rtx div = gen_rtx_DIV (mode, op0, op1);
2899 rtx mod = gen_rtx_MOD (mode, op0, op1);
2900 rtx adj = round_sdiv_adjust (mode, mod, op1);
2901 return gen_rtx_PLUS (mode, div, adj);
2902 }
2903
2904 case ROUND_MOD_EXPR:
2905 if (unsignedp)
2906 {
2907 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2908 rtx adj = round_udiv_adjust (mode, mod, op1);
2909 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2910 return gen_rtx_PLUS (mode, mod, adj);
2911 }
2912 else
2913 {
2914 rtx mod = gen_rtx_MOD (mode, op0, op1);
2915 rtx adj = round_sdiv_adjust (mode, mod, op1);
2916 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2917 return gen_rtx_PLUS (mode, mod, adj);
2918 }
2919
2920 case LSHIFT_EXPR:
2921 return gen_rtx_ASHIFT (mode, op0, op1);
2922
2923 case RSHIFT_EXPR:
2924 if (unsignedp)
2925 return gen_rtx_LSHIFTRT (mode, op0, op1);
2926 else
2927 return gen_rtx_ASHIFTRT (mode, op0, op1);
2928
2929 case LROTATE_EXPR:
2930 return gen_rtx_ROTATE (mode, op0, op1);
2931
2932 case RROTATE_EXPR:
2933 return gen_rtx_ROTATERT (mode, op0, op1);
2934
2935 case MIN_EXPR:
2936 if (unsignedp)
2937 return gen_rtx_UMIN (mode, op0, op1);
2938 else
2939 return gen_rtx_SMIN (mode, op0, op1);
2940
2941 case MAX_EXPR:
2942 if (unsignedp)
2943 return gen_rtx_UMAX (mode, op0, op1);
2944 else
2945 return gen_rtx_SMAX (mode, op0, op1);
2946
2947 case BIT_AND_EXPR:
2948 case TRUTH_AND_EXPR:
2949 return gen_rtx_AND (mode, op0, op1);
2950
2951 case BIT_IOR_EXPR:
2952 case TRUTH_OR_EXPR:
2953 return gen_rtx_IOR (mode, op0, op1);
2954
2955 case BIT_XOR_EXPR:
2956 case TRUTH_XOR_EXPR:
2957 return gen_rtx_XOR (mode, op0, op1);
2958
2959 case TRUTH_ANDIF_EXPR:
2960 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2961
2962 case TRUTH_ORIF_EXPR:
2963 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2964
2965 case TRUTH_NOT_EXPR:
2966 return gen_rtx_EQ (mode, op0, const0_rtx);
2967
2968 case LT_EXPR:
2969 if (unsignedp)
2970 return gen_rtx_LTU (mode, op0, op1);
2971 else
2972 return gen_rtx_LT (mode, op0, op1);
2973
2974 case LE_EXPR:
2975 if (unsignedp)
2976 return gen_rtx_LEU (mode, op0, op1);
2977 else
2978 return gen_rtx_LE (mode, op0, op1);
2979
2980 case GT_EXPR:
2981 if (unsignedp)
2982 return gen_rtx_GTU (mode, op0, op1);
2983 else
2984 return gen_rtx_GT (mode, op0, op1);
2985
2986 case GE_EXPR:
2987 if (unsignedp)
2988 return gen_rtx_GEU (mode, op0, op1);
2989 else
2990 return gen_rtx_GE (mode, op0, op1);
2991
2992 case EQ_EXPR:
2993 return gen_rtx_EQ (mode, op0, op1);
2994
2995 case NE_EXPR:
2996 return gen_rtx_NE (mode, op0, op1);
2997
2998 case UNORDERED_EXPR:
2999 return gen_rtx_UNORDERED (mode, op0, op1);
3000
3001 case ORDERED_EXPR:
3002 return gen_rtx_ORDERED (mode, op0, op1);
3003
3004 case UNLT_EXPR:
3005 return gen_rtx_UNLT (mode, op0, op1);
3006
3007 case UNLE_EXPR:
3008 return gen_rtx_UNLE (mode, op0, op1);
3009
3010 case UNGT_EXPR:
3011 return gen_rtx_UNGT (mode, op0, op1);
3012
3013 case UNGE_EXPR:
3014 return gen_rtx_UNGE (mode, op0, op1);
3015
3016 case UNEQ_EXPR:
3017 return gen_rtx_UNEQ (mode, op0, op1);
3018
3019 case LTGT_EXPR:
3020 return gen_rtx_LTGT (mode, op0, op1);
3021
3022 case COND_EXPR:
3023 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3024
3025 case COMPLEX_EXPR:
3026 gcc_assert (COMPLEX_MODE_P (mode));
3027 if (GET_MODE (op0) == VOIDmode)
3028 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3029 if (GET_MODE (op1) == VOIDmode)
3030 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3031 return gen_rtx_CONCAT (mode, op0, op1);
3032
d02a5a4b
JJ
3033 case CONJ_EXPR:
3034 if (GET_CODE (op0) == CONCAT)
3035 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3036 gen_rtx_NEG (GET_MODE_INNER (mode),
3037 XEXP (op0, 1)));
3038 else
3039 {
3040 enum machine_mode imode = GET_MODE_INNER (mode);
3041 rtx re, im;
3042
3043 if (MEM_P (op0))
3044 {
3045 re = adjust_address_nv (op0, imode, 0);
3046 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3047 }
3048 else
3049 {
3050 enum machine_mode ifmode = int_mode_for_mode (mode);
3051 enum machine_mode ihmode = int_mode_for_mode (imode);
3052 rtx halfsize;
3053 if (ifmode == BLKmode || ihmode == BLKmode)
3054 return NULL;
3055 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3056 re = op0;
3057 if (mode != ifmode)
3058 re = gen_rtx_SUBREG (ifmode, re, 0);
3059 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3060 if (imode != ihmode)
3061 re = gen_rtx_SUBREG (imode, re, 0);
3062 im = copy_rtx (op0);
3063 if (mode != ifmode)
3064 im = gen_rtx_SUBREG (ifmode, im, 0);
3065 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3066 if (imode != ihmode)
3067 im = gen_rtx_SUBREG (imode, im, 0);
3068 }
3069 im = gen_rtx_NEG (imode, im);
3070 return gen_rtx_CONCAT (mode, re, im);
3071 }
3072
b5b8b0ac
AO
3073 case ADDR_EXPR:
3074 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3075 if (!op0 || !MEM_P (op0))
c8a27c40
JJ
3076 {
3077 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3078 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3079 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3080 && !TREE_ADDRESSABLE (TREE_OPERAND (exp, 0)))
3081 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3082
3083 if (handled_component_p (TREE_OPERAND (exp, 0)))
3084 {
3085 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3086 tree decl
3087 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3088 &bitoffset, &bitsize, &maxsize);
3089 if ((TREE_CODE (decl) == VAR_DECL
3090 || TREE_CODE (decl) == PARM_DECL
3091 || TREE_CODE (decl) == RESULT_DECL)
3092 && !TREE_ADDRESSABLE (decl)
3093 && (bitoffset % BITS_PER_UNIT) == 0
3094 && bitsize > 0
3095 && bitsize == maxsize)
3096 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3097 bitoffset / BITS_PER_UNIT);
3098 }
3099
3100 return NULL;
3101 }
b5b8b0ac 3102
f61c6f34
JJ
3103 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3104 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
dda2da58
AO
3105
3106 return op0;
b5b8b0ac
AO
3107
3108 case VECTOR_CST:
3109 exp = build_constructor_from_list (TREE_TYPE (exp),
3110 TREE_VECTOR_CST_ELTS (exp));
3111 /* Fall through. */
3112
3113 case CONSTRUCTOR:
3114 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3115 {
3116 unsigned i;
3117 tree val;
3118
3119 op0 = gen_rtx_CONCATN
3120 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3121
3122 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3123 {
3124 op1 = expand_debug_expr (val);
3125 if (!op1)
3126 return NULL;
3127 XVECEXP (op0, 0, i) = op1;
3128 }
3129
3130 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3131 {
3132 op1 = expand_debug_expr
e8160c9a 3133 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
b5b8b0ac
AO
3134
3135 if (!op1)
3136 return NULL;
3137
3138 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3139 XVECEXP (op0, 0, i) = op1;
3140 }
3141
3142 return op0;
3143 }
3144 else
3145 goto flag_unsupported;
3146
3147 case CALL_EXPR:
3148 /* ??? Maybe handle some builtins? */
3149 return NULL;
3150
3151 case SSA_NAME:
3152 {
2a8e30fb
MM
3153 gimple g = get_gimple_for_ssa_name (exp);
3154 if (g)
3155 {
3156 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3157 if (!op0)
3158 return NULL;
3159 }
3160 else
3161 {
3162 int part = var_to_partition (SA.map, exp);
b5b8b0ac 3163
2a8e30fb 3164 if (part == NO_PARTITION)
a58a8e4b
JJ
3165 {
3166 /* If this is a reference to an incoming value of parameter
3167 that is never used in the code or where the incoming
3168 value is never used in the code, use PARM_DECL's
3169 DECL_RTL if set. */
3170 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3171 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3172 {
3173 rtx incoming = DECL_INCOMING_RTL (SSA_NAME_VAR (exp));
3174 if (incoming
3175 && GET_MODE (incoming) != BLKmode
2b80199f
JJ
3176 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
3177 || (MEM_P (incoming)
3178 && REG_P (XEXP (incoming, 0))
3179 && HARD_REGISTER_P (XEXP (incoming, 0)))))
a58a8e4b
JJ
3180 {
3181 op0 = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
3182 ENTRY_VALUE_EXP (op0) = incoming;
3183 goto adjust_mode;
3184 }
3185 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3186 if (!op0)
3187 return NULL;
3188 goto adjust_mode;
3189 }
3190 return NULL;
3191 }
b5b8b0ac 3192
2a8e30fb 3193 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
b5b8b0ac 3194
abfea58d 3195 op0 = copy_rtx (SA.partition_to_pseudo[part]);
2a8e30fb 3196 }
b5b8b0ac
AO
3197 goto adjust_mode;
3198 }
3199
3200 case ERROR_MARK:
3201 return NULL;
3202
7ece48b1
JJ
3203 /* Vector stuff. For most of the codes we don't have rtl codes. */
3204 case REALIGN_LOAD_EXPR:
3205 case REDUC_MAX_EXPR:
3206 case REDUC_MIN_EXPR:
3207 case REDUC_PLUS_EXPR:
3208 case VEC_COND_EXPR:
3209 case VEC_EXTRACT_EVEN_EXPR:
3210 case VEC_EXTRACT_ODD_EXPR:
3211 case VEC_INTERLEAVE_HIGH_EXPR:
3212 case VEC_INTERLEAVE_LOW_EXPR:
3213 case VEC_LSHIFT_EXPR:
3214 case VEC_PACK_FIX_TRUNC_EXPR:
3215 case VEC_PACK_SAT_EXPR:
3216 case VEC_PACK_TRUNC_EXPR:
3217 case VEC_RSHIFT_EXPR:
3218 case VEC_UNPACK_FLOAT_HI_EXPR:
3219 case VEC_UNPACK_FLOAT_LO_EXPR:
3220 case VEC_UNPACK_HI_EXPR:
3221 case VEC_UNPACK_LO_EXPR:
3222 case VEC_WIDEN_MULT_HI_EXPR:
3223 case VEC_WIDEN_MULT_LO_EXPR:
3224 return NULL;
3225
3226 /* Misc codes. */
3227 case ADDR_SPACE_CONVERT_EXPR:
3228 case FIXED_CONVERT_EXPR:
3229 case OBJ_TYPE_REF:
3230 case WITH_SIZE_EXPR:
3231 return NULL;
3232
3233 case DOT_PROD_EXPR:
3234 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3235 && SCALAR_INT_MODE_P (mode))
3236 {
3237 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3238 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3239 else
3240 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3241 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3242 op1 = gen_rtx_ZERO_EXTEND (mode, op1);
3243 else
3244 op1 = gen_rtx_SIGN_EXTEND (mode, op1);
3245 op0 = gen_rtx_MULT (mode, op0, op1);
3246 return gen_rtx_PLUS (mode, op0, op2);
3247 }
3248 return NULL;
3249
3250 case WIDEN_MULT_EXPR:
0354c0c7
BS
3251 case WIDEN_MULT_PLUS_EXPR:
3252 case WIDEN_MULT_MINUS_EXPR:
7ece48b1
JJ
3253 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3254 && SCALAR_INT_MODE_P (mode))
3255 {
5b58b39b 3256 enum machine_mode inner_mode = GET_MODE (op0);
7ece48b1 3257 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5b58b39b 3258 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
7ece48b1 3259 else
5b58b39b 3260 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
7ece48b1 3261 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5b58b39b 3262 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
7ece48b1 3263 else
5b58b39b 3264 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
0354c0c7
BS
3265 op0 = gen_rtx_MULT (mode, op0, op1);
3266 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3267 return op0;
3268 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3269 return gen_rtx_PLUS (mode, op0, op2);
3270 else
3271 return gen_rtx_MINUS (mode, op2, op0);
7ece48b1
JJ
3272 }
3273 return NULL;
3274
3275 case WIDEN_SUM_EXPR:
3276 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3277 && SCALAR_INT_MODE_P (mode))
3278 {
3279 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3280 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3281 else
3282 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3283 return gen_rtx_PLUS (mode, op0, op1);
3284 }
3285 return NULL;
3286
0f59b812
JJ
3287 case FMA_EXPR:
3288 return gen_rtx_FMA (mode, op0, op1, op2);
3289
b5b8b0ac
AO
3290 default:
3291 flag_unsupported:
3292#ifdef ENABLE_CHECKING
3293 debug_tree (exp);
3294 gcc_unreachable ();
3295#else
3296 return NULL;
3297#endif
3298 }
3299}
3300
3301/* Expand the _LOCs in debug insns. We run this after expanding all
3302 regular insns, so that any variables referenced in the function
3303 will have their DECL_RTLs set. */
3304
3305static void
3306expand_debug_locations (void)
3307{
3308 rtx insn;
3309 rtx last = get_last_insn ();
3310 int save_strict_alias = flag_strict_aliasing;
3311
3312 /* New alias sets while setting up memory attributes cause
3313 -fcompare-debug failures, even though it doesn't bring about any
3314 codegen changes. */
3315 flag_strict_aliasing = 0;
3316
3317 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3318 if (DEBUG_INSN_P (insn))
3319 {
3320 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3321 rtx val;
3322 enum machine_mode mode;
3323
3324 if (value == NULL_TREE)
3325 val = NULL_RTX;
3326 else
3327 {
3328 val = expand_debug_expr (value);
3329 gcc_assert (last == get_last_insn ());
3330 }
3331
3332 if (!val)
3333 val = gen_rtx_UNKNOWN_VAR_LOC ();
3334 else
3335 {
3336 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3337
3338 gcc_assert (mode == GET_MODE (val)
3339 || (GET_MODE (val) == VOIDmode
3340 && (CONST_INT_P (val)
3341 || GET_CODE (val) == CONST_FIXED
3342 || GET_CODE (val) == CONST_DOUBLE
3343 || GET_CODE (val) == LABEL_REF)));
3344 }
3345
3346 INSN_VAR_LOCATION_LOC (insn) = val;
3347 }
3348
3349 flag_strict_aliasing = save_strict_alias;
3350}
3351
242229bb
JH
3352/* Expand basic block BB from GIMPLE trees to RTL. */
3353
3354static basic_block
10d22567 3355expand_gimple_basic_block (basic_block bb)
242229bb 3356{
726a989a
RB
3357 gimple_stmt_iterator gsi;
3358 gimple_seq stmts;
3359 gimple stmt = NULL;
242229bb
JH
3360 rtx note, last;
3361 edge e;
628f6a4e 3362 edge_iterator ei;
8b11009b 3363 void **elt;
242229bb
JH
3364
3365 if (dump_file)
726a989a
RB
3366 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3367 bb->index);
3368
3369 /* Note that since we are now transitioning from GIMPLE to RTL, we
3370 cannot use the gsi_*_bb() routines because they expect the basic
3371 block to be in GIMPLE, instead of RTL. Therefore, we need to
3372 access the BB sequence directly. */
3373 stmts = bb_seq (bb);
3374 bb->il.gimple = NULL;
bf08ebeb 3375 rtl_profile_for_bb (bb);
5e2d947c
JH
3376 init_rtl_bb_info (bb);
3377 bb->flags |= BB_RTL;
3378
a9b77cd1
ZD
3379 /* Remove the RETURN_EXPR if we may fall though to the exit
3380 instead. */
726a989a
RB
3381 gsi = gsi_last (stmts);
3382 if (!gsi_end_p (gsi)
3383 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
a9b77cd1 3384 {
726a989a 3385 gimple ret_stmt = gsi_stmt (gsi);
a9b77cd1
ZD
3386
3387 gcc_assert (single_succ_p (bb));
3388 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3389
3390 if (bb->next_bb == EXIT_BLOCK_PTR
726a989a 3391 && !gimple_return_retval (ret_stmt))
a9b77cd1 3392 {
726a989a 3393 gsi_remove (&gsi, false);
a9b77cd1
ZD
3394 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3395 }
3396 }
3397
726a989a
RB
3398 gsi = gsi_start (stmts);
3399 if (!gsi_end_p (gsi))
8b11009b 3400 {
726a989a
RB
3401 stmt = gsi_stmt (gsi);
3402 if (gimple_code (stmt) != GIMPLE_LABEL)
3403 stmt = NULL;
8b11009b 3404 }
242229bb 3405
8b11009b
ZD
3406 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3407
3408 if (stmt || elt)
242229bb
JH
3409 {
3410 last = get_last_insn ();
3411
8b11009b
ZD
3412 if (stmt)
3413 {
28ed065e 3414 expand_gimple_stmt (stmt);
726a989a 3415 gsi_next (&gsi);
8b11009b
ZD
3416 }
3417
3418 if (elt)
ae50c0cb 3419 emit_label ((rtx) *elt);
242229bb 3420
caf93cb0 3421 /* Java emits line number notes in the top of labels.
c22cacf3 3422 ??? Make this go away once line number notes are obsoleted. */
242229bb 3423 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 3424 if (NOTE_P (BB_HEAD (bb)))
242229bb 3425 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
242229bb 3426 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528 3427
726a989a 3428 maybe_dump_rtl_for_gimple_stmt (stmt, last);
242229bb
JH
3429 }
3430 else
3431 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3432
3433 NOTE_BASIC_BLOCK (note) = bb;
3434
726a989a 3435 for (; !gsi_end_p (gsi); gsi_next (&gsi))
242229bb 3436 {
cea49550 3437 basic_block new_bb;
242229bb 3438
b5b8b0ac 3439 stmt = gsi_stmt (gsi);
2a8e30fb
MM
3440
3441 /* If this statement is a non-debug one, and we generate debug
3442 insns, then this one might be the last real use of a TERed
3443 SSA_NAME, but where there are still some debug uses further
3444 down. Expanding the current SSA name in such further debug
3445 uses by their RHS might lead to wrong debug info, as coalescing
3446 might make the operands of such RHS be placed into the same
3447 pseudo as something else. Like so:
3448 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3449 use(a_1);
3450 a_2 = ...
3451 #DEBUG ... => a_1
3452 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3453 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3454 the write to a_2 would actually have clobbered the place which
3455 formerly held a_0.
3456
3457 So, instead of that, we recognize the situation, and generate
3458 debug temporaries at the last real use of TERed SSA names:
3459 a_1 = a_0 + 1;
3460 #DEBUG #D1 => a_1
3461 use(a_1);
3462 a_2 = ...
3463 #DEBUG ... => #D1
3464 */
3465 if (MAY_HAVE_DEBUG_INSNS
3466 && SA.values
3467 && !is_gimple_debug (stmt))
3468 {
3469 ssa_op_iter iter;
3470 tree op;
3471 gimple def;
3472
3473 location_t sloc = get_curr_insn_source_location ();
3474 tree sblock = get_curr_insn_block ();
3475
3476 /* Look for SSA names that have their last use here (TERed
3477 names always have only one real use). */
3478 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3479 if ((def = get_gimple_for_ssa_name (op)))
3480 {
3481 imm_use_iterator imm_iter;
3482 use_operand_p use_p;
3483 bool have_debug_uses = false;
3484
3485 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3486 {
3487 if (gimple_debug_bind_p (USE_STMT (use_p)))
3488 {
3489 have_debug_uses = true;
3490 break;
3491 }
3492 }
3493
3494 if (have_debug_uses)
3495 {
3496 /* OP is a TERed SSA name, with DEF it's defining
3497 statement, and where OP is used in further debug
3498 instructions. Generate a debug temporary, and
3499 replace all uses of OP in debug insns with that
3500 temporary. */
3501 gimple debugstmt;
3502 tree value = gimple_assign_rhs_to_tree (def);
3503 tree vexpr = make_node (DEBUG_EXPR_DECL);
3504 rtx val;
3505 enum machine_mode mode;
3506
3507 set_curr_insn_source_location (gimple_location (def));
3508 set_curr_insn_block (gimple_block (def));
3509
3510 DECL_ARTIFICIAL (vexpr) = 1;
3511 TREE_TYPE (vexpr) = TREE_TYPE (value);
3512 if (DECL_P (value))
3513 mode = DECL_MODE (value);
3514 else
3515 mode = TYPE_MODE (TREE_TYPE (value));
3516 DECL_MODE (vexpr) = mode;
3517
3518 val = gen_rtx_VAR_LOCATION
3519 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3520
e8c6bb74 3521 emit_debug_insn (val);
2a8e30fb
MM
3522
3523 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3524 {
3525 if (!gimple_debug_bind_p (debugstmt))
3526 continue;
3527
3528 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3529 SET_USE (use_p, vexpr);
3530
3531 update_stmt (debugstmt);
3532 }
3533 }
3534 }
3535 set_curr_insn_source_location (sloc);
3536 set_curr_insn_block (sblock);
3537 }
3538
a5883ba0 3539 currently_expanding_gimple_stmt = stmt;
b5b8b0ac 3540
242229bb
JH
3541 /* Expand this statement, then evaluate the resulting RTL and
3542 fixup the CFG accordingly. */
726a989a 3543 if (gimple_code (stmt) == GIMPLE_COND)
cea49550 3544 {
726a989a 3545 new_bb = expand_gimple_cond (bb, stmt);
cea49550
RH
3546 if (new_bb)
3547 return new_bb;
3548 }
b5b8b0ac
AO
3549 else if (gimple_debug_bind_p (stmt))
3550 {
3551 location_t sloc = get_curr_insn_source_location ();
3552 tree sblock = get_curr_insn_block ();
3553 gimple_stmt_iterator nsi = gsi;
3554
3555 for (;;)
3556 {
3557 tree var = gimple_debug_bind_get_var (stmt);
3558 tree value;
3559 rtx val;
3560 enum machine_mode mode;
3561
3562 if (gimple_debug_bind_has_value_p (stmt))
3563 value = gimple_debug_bind_get_value (stmt);
3564 else
3565 value = NULL_TREE;
3566
3567 last = get_last_insn ();
3568
3569 set_curr_insn_source_location (gimple_location (stmt));
3570 set_curr_insn_block (gimple_block (stmt));
3571
3572 if (DECL_P (var))
3573 mode = DECL_MODE (var);
3574 else
3575 mode = TYPE_MODE (TREE_TYPE (var));
3576
3577 val = gen_rtx_VAR_LOCATION
3578 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3579
e16b6fd0 3580 emit_debug_insn (val);
b5b8b0ac
AO
3581
3582 if (dump_file && (dump_flags & TDF_DETAILS))
3583 {
3584 /* We can't dump the insn with a TREE where an RTX
3585 is expected. */
e8c6bb74 3586 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
b5b8b0ac 3587 maybe_dump_rtl_for_gimple_stmt (stmt, last);
e8c6bb74 3588 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
b5b8b0ac
AO
3589 }
3590
2a8e30fb
MM
3591 /* In order not to generate too many debug temporaries,
3592 we delink all uses of debug statements we already expanded.
3593 Therefore debug statements between definition and real
3594 use of TERed SSA names will continue to use the SSA name,
3595 and not be replaced with debug temps. */
3596 delink_stmt_imm_use (stmt);
3597
b5b8b0ac
AO
3598 gsi = nsi;
3599 gsi_next (&nsi);
3600 if (gsi_end_p (nsi))
3601 break;
3602 stmt = gsi_stmt (nsi);
3603 if (!gimple_debug_bind_p (stmt))
3604 break;
3605 }
3606
3607 set_curr_insn_source_location (sloc);
3608 set_curr_insn_block (sblock);
3609 }
80c7a9eb 3610 else
242229bb 3611 {
726a989a 3612 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
cea49550
RH
3613 {
3614 bool can_fallthru;
3615 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3616 if (new_bb)
3617 {
3618 if (can_fallthru)
3619 bb = new_bb;
3620 else
3621 return new_bb;
3622 }
3623 }
4d7a65ea 3624 else
b7211528 3625 {
4e3825db 3626 def_operand_p def_p;
4e3825db
MM
3627 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3628
3629 if (def_p != NULL)
3630 {
3631 /* Ignore this stmt if it is in the list of
3632 replaceable expressions. */
3633 if (SA.values
b8698a0f 3634 && bitmap_bit_p (SA.values,
e97809c6 3635 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4e3825db
MM
3636 continue;
3637 }
28ed065e 3638 last = expand_gimple_stmt (stmt);
726a989a 3639 maybe_dump_rtl_for_gimple_stmt (stmt, last);
b7211528 3640 }
242229bb
JH
3641 }
3642 }
3643
a5883ba0
MM
3644 currently_expanding_gimple_stmt = NULL;
3645
7241571e 3646 /* Expand implicit goto and convert goto_locus. */
a9b77cd1
ZD
3647 FOR_EACH_EDGE (e, ei, bb->succs)
3648 {
7241571e
JJ
3649 if (e->goto_locus && e->goto_block)
3650 {
3651 set_curr_insn_source_location (e->goto_locus);
3652 set_curr_insn_block (e->goto_block);
3653 e->goto_locus = curr_insn_locator ();
3654 }
3655 e->goto_block = NULL;
3656 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3657 {
3658 emit_jump (label_rtx_for_bb (e->dest));
3659 e->flags &= ~EDGE_FALLTHRU;
3660 }
a9b77cd1
ZD
3661 }
3662
ae761c45
AH
3663 /* Expanded RTL can create a jump in the last instruction of block.
3664 This later might be assumed to be a jump to successor and break edge insertion.
3665 We need to insert dummy move to prevent this. PR41440. */
3666 if (single_succ_p (bb)
3667 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3668 && (last = get_last_insn ())
3669 && JUMP_P (last))
3670 {
3671 rtx dummy = gen_reg_rtx (SImode);
3672 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3673 }
3674
242229bb
JH
3675 do_pending_stack_adjust ();
3676
3f117656 3677 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
3678 before a barrier and/or table jump insn. */
3679 last = get_last_insn ();
4b4bf941 3680 if (BARRIER_P (last))
242229bb
JH
3681 last = PREV_INSN (last);
3682 if (JUMP_TABLE_DATA_P (last))
3683 last = PREV_INSN (PREV_INSN (last));
3684 BB_END (bb) = last;
caf93cb0 3685
242229bb 3686 update_bb_for_insn (bb);
80c7a9eb 3687
242229bb
JH
3688 return bb;
3689}
3690
3691
3692/* Create a basic block for initialization code. */
3693
3694static basic_block
3695construct_init_block (void)
3696{
3697 basic_block init_block, first_block;
fd44f634
JH
3698 edge e = NULL;
3699 int flags;
275a4187 3700
fd44f634
JH
3701 /* Multiple entry points not supported yet. */
3702 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
5e2d947c
JH
3703 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3704 init_rtl_bb_info (EXIT_BLOCK_PTR);
3705 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3706 EXIT_BLOCK_PTR->flags |= BB_RTL;
242229bb 3707
fd44f634 3708 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
275a4187 3709
fd44f634
JH
3710 /* When entry edge points to first basic block, we don't need jump,
3711 otherwise we have to jump into proper target. */
3712 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3713 {
726a989a 3714 tree label = gimple_block_label (e->dest);
fd44f634
JH
3715
3716 emit_jump (label_rtx (label));
3717 flags = 0;
275a4187 3718 }
fd44f634
JH
3719 else
3720 flags = EDGE_FALLTHRU;
242229bb
JH
3721
3722 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3723 get_last_insn (),
3724 ENTRY_BLOCK_PTR);
3725 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3726 init_block->count = ENTRY_BLOCK_PTR->count;
3727 if (e)
3728 {
3729 first_block = e->dest;
3730 redirect_edge_succ (e, init_block);
fd44f634 3731 e = make_edge (init_block, first_block, flags);
242229bb
JH
3732 }
3733 else
3734 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3735 e->probability = REG_BR_PROB_BASE;
3736 e->count = ENTRY_BLOCK_PTR->count;
3737
3738 update_bb_for_insn (init_block);
3739 return init_block;
3740}
3741
55e092c4
JH
3742/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3743 found in the block tree. */
3744
3745static void
3746set_block_levels (tree block, int level)
3747{
3748 while (block)
3749 {
3750 BLOCK_NUMBER (block) = level;
3751 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3752 block = BLOCK_CHAIN (block);
3753 }
3754}
242229bb
JH
3755
3756/* Create a block containing landing pads and similar stuff. */
3757
3758static void
3759construct_exit_block (void)
3760{
3761 rtx head = get_last_insn ();
3762 rtx end;
3763 basic_block exit_block;
628f6a4e
BE
3764 edge e, e2;
3765 unsigned ix;
3766 edge_iterator ei;
071a42f9 3767 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
242229bb 3768
bf08ebeb
JH
3769 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3770
caf93cb0 3771 /* Make sure the locus is set to the end of the function, so that
242229bb 3772 epilogue line numbers and warnings are set properly. */
6773e15f 3773 if (cfun->function_end_locus != UNKNOWN_LOCATION)
242229bb
JH
3774 input_location = cfun->function_end_locus;
3775
3776 /* The following insns belong to the top scope. */
55e092c4 3777 set_curr_insn_block (DECL_INITIAL (current_function_decl));
242229bb 3778
242229bb
JH
3779 /* Generate rtl for function exit. */
3780 expand_function_end ();
3781
3782 end = get_last_insn ();
3783 if (head == end)
3784 return;
071a42f9
JH
3785 /* While emitting the function end we could move end of the last basic block.
3786 */
3787 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4b4bf941 3788 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 3789 head = NEXT_INSN (head);
80c7a9eb
RH
3790 exit_block = create_basic_block (NEXT_INSN (head), end,
3791 EXIT_BLOCK_PTR->prev_bb);
242229bb
JH
3792 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3793 exit_block->count = EXIT_BLOCK_PTR->count;
628f6a4e
BE
3794
3795 ix = 0;
3796 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
242229bb 3797 {
8fb790fd 3798 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
242229bb 3799 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
3800 redirect_edge_succ (e, exit_block);
3801 else
3802 ix++;
242229bb 3803 }
628f6a4e 3804
242229bb
JH
3805 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3806 e->probability = REG_BR_PROB_BASE;
3807 e->count = EXIT_BLOCK_PTR->count;
628f6a4e 3808 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
242229bb
JH
3809 if (e2 != e)
3810 {
c22cacf3 3811 e->count -= e2->count;
242229bb
JH
3812 exit_block->count -= e2->count;
3813 exit_block->frequency -= EDGE_FREQUENCY (e2);
3814 }
3815 if (e->count < 0)
3816 e->count = 0;
3817 if (exit_block->count < 0)
3818 exit_block->count = 0;
3819 if (exit_block->frequency < 0)
3820 exit_block->frequency = 0;
3821 update_bb_for_insn (exit_block);
3822}
3823
c22cacf3 3824/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
3825 Look for ARRAY_REF nodes with non-constant indexes and mark them
3826 addressable. */
3827
3828static tree
3829discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3830 void *data ATTRIBUTE_UNUSED)
3831{
3832 tree t = *tp;
3833
3834 if (IS_TYPE_OR_DECL_P (t))
3835 *walk_subtrees = 0;
3836 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3837 {
3838 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3839 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
3840 && (!TREE_OPERAND (t, 2)
3841 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3842 || (TREE_CODE (t) == COMPONENT_REF
3843 && (!TREE_OPERAND (t,2)
3844 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3845 || TREE_CODE (t) == BIT_FIELD_REF
3846 || TREE_CODE (t) == REALPART_EXPR
3847 || TREE_CODE (t) == IMAGPART_EXPR
3848 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1043771b 3849 || CONVERT_EXPR_P (t))
a1b23b2f
UW
3850 t = TREE_OPERAND (t, 0);
3851
3852 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3853 {
3854 t = get_base_address (t);
6f11d690
RG
3855 if (t && DECL_P (t)
3856 && DECL_MODE (t) != BLKmode)
a1b23b2f
UW
3857 TREE_ADDRESSABLE (t) = 1;
3858 }
3859
3860 *walk_subtrees = 0;
3861 }
3862
3863 return NULL_TREE;
3864}
3865
3866/* RTL expansion is not able to compile array references with variable
3867 offsets for arrays stored in single register. Discover such
3868 expressions and mark variables as addressable to avoid this
3869 scenario. */
3870
3871static void
3872discover_nonconstant_array_refs (void)
3873{
3874 basic_block bb;
726a989a 3875 gimple_stmt_iterator gsi;
a1b23b2f
UW
3876
3877 FOR_EACH_BB (bb)
726a989a
RB
3878 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3879 {
3880 gimple stmt = gsi_stmt (gsi);
aa847cc8
JJ
3881 if (!is_gimple_debug (stmt))
3882 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
726a989a 3883 }
a1b23b2f
UW
3884}
3885
2e3f842f
L
3886/* This function sets crtl->args.internal_arg_pointer to a virtual
3887 register if DRAP is needed. Local register allocator will replace
3888 virtual_incoming_args_rtx with the virtual register. */
3889
3890static void
3891expand_stack_alignment (void)
3892{
3893 rtx drap_rtx;
e939805b 3894 unsigned int preferred_stack_boundary;
2e3f842f
L
3895
3896 if (! SUPPORTS_STACK_ALIGNMENT)
3897 return;
b8698a0f 3898
2e3f842f
L
3899 if (cfun->calls_alloca
3900 || cfun->has_nonlocal_label
3901 || crtl->has_nonlocal_goto)
3902 crtl->need_drap = true;
3903
890b9b96
L
3904 /* Call update_stack_boundary here again to update incoming stack
3905 boundary. It may set incoming stack alignment to a different
3906 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
3907 use the minimum incoming stack alignment to check if it is OK
3908 to perform sibcall optimization since sibcall optimization will
3909 only align the outgoing stack to incoming stack boundary. */
3910 if (targetm.calls.update_stack_boundary)
3911 targetm.calls.update_stack_boundary ();
3912
3913 /* The incoming stack frame has to be aligned at least at
3914 parm_stack_boundary. */
3915 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
2e3f842f 3916
2e3f842f
L
3917 /* Update crtl->stack_alignment_estimated and use it later to align
3918 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
3919 exceptions since callgraph doesn't collect incoming stack alignment
3920 in this case. */
8f4f502f 3921 if (cfun->can_throw_non_call_exceptions
2e3f842f
L
3922 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
3923 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3924 else
3925 preferred_stack_boundary = crtl->preferred_stack_boundary;
3926 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
3927 crtl->stack_alignment_estimated = preferred_stack_boundary;
3928 if (preferred_stack_boundary > crtl->stack_alignment_needed)
3929 crtl->stack_alignment_needed = preferred_stack_boundary;
3930
890b9b96
L
3931 gcc_assert (crtl->stack_alignment_needed
3932 <= crtl->stack_alignment_estimated);
3933
2e3f842f 3934 crtl->stack_realign_needed
e939805b 3935 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
d2d93c32 3936 crtl->stack_realign_tried = crtl->stack_realign_needed;
2e3f842f
L
3937
3938 crtl->stack_realign_processed = true;
3939
3940 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3941 alignment. */
3942 gcc_assert (targetm.calls.get_drap_rtx != NULL);
b8698a0f 3943 drap_rtx = targetm.calls.get_drap_rtx ();
2e3f842f 3944
d015f7cc
L
3945 /* stack_realign_drap and drap_rtx must match. */
3946 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
3947
2e3f842f
L
3948 /* Do nothing if NULL is returned, which means DRAP is not needed. */
3949 if (NULL != drap_rtx)
3950 {
3951 crtl->args.internal_arg_pointer = drap_rtx;
3952
3953 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3954 needed. */
3955 fixup_tail_calls ();
3956 }
3957}
3958
242229bb
JH
3959/* Translate the intermediate representation contained in the CFG
3960 from GIMPLE trees to RTL.
3961
3962 We do conversion per basic block and preserve/update the tree CFG.
3963 This implies we have to do some magic as the CFG can simultaneously
3964 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 3965 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
3966 the expansion. */
3967
c2924966 3968static unsigned int
726a989a 3969gimple_expand_cfg (void)
242229bb
JH
3970{
3971 basic_block bb, init_block;
3972 sbitmap blocks;
0ef90296
ZD
3973 edge_iterator ei;
3974 edge e;
3a42502d 3975 rtx var_seq;
4e3825db
MM
3976 unsigned i;
3977
f029db69 3978 timevar_push (TV_OUT_OF_SSA);
4e3825db 3979 rewrite_out_of_ssa (&SA);
f029db69 3980 timevar_pop (TV_OUT_OF_SSA);
4e3825db
MM
3981 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
3982 sizeof (rtx));
242229bb 3983
4586b4ca
SB
3984 /* Some backends want to know that we are expanding to RTL. */
3985 currently_expanding_to_rtl = 1;
3986
bf08ebeb
JH
3987 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
3988
55e092c4 3989 insn_locators_alloc ();
fe8a7779 3990 if (!DECL_IS_BUILTIN (current_function_decl))
1751ecd6
AH
3991 {
3992 /* Eventually, all FEs should explicitly set function_start_locus. */
3993 if (cfun->function_start_locus == UNKNOWN_LOCATION)
3994 set_curr_insn_source_location
3995 (DECL_SOURCE_LOCATION (current_function_decl));
3996 else
3997 set_curr_insn_source_location (cfun->function_start_locus);
3998 }
9ff70652
JJ
3999 else
4000 set_curr_insn_source_location (UNKNOWN_LOCATION);
55e092c4
JH
4001 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4002 prologue_locator = curr_insn_locator ();
4003
2b21299c
JJ
4004#ifdef INSN_SCHEDULING
4005 init_sched_attrs ();
4006#endif
4007
55e092c4
JH
4008 /* Make sure first insn is a note even if we don't want linenums.
4009 This makes sure the first insn will never be deleted.
4010 Also, final expects a note to appear there. */
4011 emit_note (NOTE_INSN_DELETED);
6429e3be 4012
a1b23b2f
UW
4013 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4014 discover_nonconstant_array_refs ();
4015
e41b2a33 4016 targetm.expand_to_rtl_hook ();
cb91fab0 4017 crtl->stack_alignment_needed = STACK_BOUNDARY;
2e3f842f 4018 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
890b9b96 4019 crtl->stack_alignment_estimated = 0;
cb91fab0
JH
4020 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4021 cfun->cfg->max_jumptable_ents = 0;
4022
ae9fd6b7
JH
4023 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4024 of the function section at exapnsion time to predict distance of calls. */
4025 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4026
727a31fa 4027 /* Expand the variables recorded during gimple lowering. */
f029db69 4028 timevar_push (TV_VAR_EXPAND);
3a42502d
RH
4029 start_sequence ();
4030
242229bb 4031 expand_used_vars ();
3a42502d
RH
4032
4033 var_seq = get_insns ();
4034 end_sequence ();
f029db69 4035 timevar_pop (TV_VAR_EXPAND);
242229bb 4036
7d69de61
RH
4037 /* Honor stack protection warnings. */
4038 if (warn_stack_protect)
4039 {
e3b5732b 4040 if (cfun->calls_alloca)
b8698a0f 4041 warning (OPT_Wstack_protector,
3b123595
SB
4042 "stack protector not protecting local variables: "
4043 "variable length buffer");
cb91fab0 4044 if (has_short_buffer && !crtl->stack_protect_guard)
b8698a0f 4045 warning (OPT_Wstack_protector,
3b123595
SB
4046 "stack protector not protecting function: "
4047 "all local arrays are less than %d bytes long",
7d69de61
RH
4048 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4049 }
4050
242229bb 4051 /* Set up parameters and prepare for return, for the function. */
b79c5284 4052 expand_function_start (current_function_decl);
242229bb 4053
3a42502d
RH
4054 /* If we emitted any instructions for setting up the variables,
4055 emit them before the FUNCTION_START note. */
4056 if (var_seq)
4057 {
4058 emit_insn_before (var_seq, parm_birth_insn);
4059
4060 /* In expand_function_end we'll insert the alloca save/restore
4061 before parm_birth_insn. We've just insertted an alloca call.
4062 Adjust the pointer to match. */
4063 parm_birth_insn = var_seq;
4064 }
4065
4e3825db
MM
4066 /* Now that we also have the parameter RTXs, copy them over to our
4067 partitions. */
4068 for (i = 0; i < SA.map->num_partitions; i++)
4069 {
4070 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4071
4072 if (TREE_CODE (var) != VAR_DECL
4073 && !SA.partition_to_pseudo[i])
4074 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4075 gcc_assert (SA.partition_to_pseudo[i]);
eb7adebc
MM
4076
4077 /* If this decl was marked as living in multiple places, reset
4078 this now to NULL. */
4079 if (DECL_RTL_IF_SET (var) == pc_rtx)
4080 SET_DECL_RTL (var, NULL);
4081
4e3825db
MM
4082 /* Some RTL parts really want to look at DECL_RTL(x) when x
4083 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4084 SET_DECL_RTL here making this available, but that would mean
4085 to select one of the potentially many RTLs for one DECL. Instead
4086 of doing that we simply reset the MEM_EXPR of the RTL in question,
4087 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4088 if (!DECL_RTL_SET_P (var))
4089 {
4090 if (MEM_P (SA.partition_to_pseudo[i]))
4091 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4092 }
4093 }
4094
242229bb
JH
4095 /* If this function is `main', emit a call to `__main'
4096 to run global initializers, etc. */
4097 if (DECL_NAME (current_function_decl)
4098 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4099 && DECL_FILE_SCOPE_P (current_function_decl))
4100 expand_main_function ();
4101
7d69de61
RH
4102 /* Initialize the stack_protect_guard field. This must happen after the
4103 call to __main (if any) so that the external decl is initialized. */
cb91fab0 4104 if (crtl->stack_protect_guard)
7d69de61
RH
4105 stack_protect_prologue ();
4106
4e3825db
MM
4107 expand_phi_nodes (&SA);
4108
3fbd86b1 4109 /* Register rtl specific functions for cfg. */
242229bb
JH
4110 rtl_register_cfg_hooks ();
4111
4112 init_block = construct_init_block ();
4113
0ef90296 4114 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4e3825db 4115 remaining edges later. */
0ef90296
ZD
4116 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4117 e->flags &= ~EDGE_EXECUTABLE;
4118
8b11009b 4119 lab_rtx_for_bb = pointer_map_create ();
242229bb 4120 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
10d22567 4121 bb = expand_gimple_basic_block (bb);
bf08ebeb 4122
b5b8b0ac
AO
4123 if (MAY_HAVE_DEBUG_INSNS)
4124 expand_debug_locations ();
4125
4e3825db 4126 execute_free_datastructures ();
f029db69 4127 timevar_push (TV_OUT_OF_SSA);
4e3825db 4128 finish_out_of_ssa (&SA);
f029db69 4129 timevar_pop (TV_OUT_OF_SSA);
4e3825db 4130
f029db69 4131 timevar_push (TV_POST_EXPAND);
91753e21
RG
4132 /* We are no longer in SSA form. */
4133 cfun->gimple_df->in_ssa_p = false;
4134
bf08ebeb
JH
4135 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4136 conservatively to true until they are all profile aware. */
8b11009b 4137 pointer_map_destroy (lab_rtx_for_bb);
cb91fab0 4138 free_histograms ();
242229bb
JH
4139
4140 construct_exit_block ();
55e092c4
JH
4141 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4142 insn_locators_finalize ();
242229bb 4143
1d65f45c 4144 /* Zap the tree EH table. */
e8a2a782 4145 set_eh_throw_stmt_table (cfun, NULL);
242229bb 4146
42821aff
MM
4147 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4148 split edges which edge insertions might do. */
242229bb 4149 rebuild_jump_labels (get_insns ());
242229bb 4150
4e3825db
MM
4151 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4152 {
4153 edge e;
4154 edge_iterator ei;
4155 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4156 {
4157 if (e->insns.r)
bc470c24 4158 {
42821aff 4159 rebuild_jump_labels_chain (e->insns.r);
bc470c24
JJ
4160 /* Avoid putting insns before parm_birth_insn. */
4161 if (e->src == ENTRY_BLOCK_PTR
4162 && single_succ_p (ENTRY_BLOCK_PTR)
4163 && parm_birth_insn)
4164 {
4165 rtx insns = e->insns.r;
4166 e->insns.r = NULL_RTX;
4167 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4168 }
4169 else
4170 commit_one_edge_insertion (e);
4171 }
4e3825db
MM
4172 else
4173 ei_next (&ei);
4174 }
4175 }
4176
4177 /* We're done expanding trees to RTL. */
4178 currently_expanding_to_rtl = 0;
4179
4180 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4181 {
4182 edge e;
4183 edge_iterator ei;
4184 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4185 {
4186 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4187 e->flags &= ~EDGE_EXECUTABLE;
4188
4189 /* At the moment not all abnormal edges match the RTL
4190 representation. It is safe to remove them here as
4191 find_many_sub_basic_blocks will rediscover them.
4192 In the future we should get this fixed properly. */
4193 if ((e->flags & EDGE_ABNORMAL)
4194 && !(e->flags & EDGE_SIBCALL))
4195 remove_edge (e);
4196 else
4197 ei_next (&ei);
4198 }
4199 }
4200
242229bb
JH
4201 blocks = sbitmap_alloc (last_basic_block);
4202 sbitmap_ones (blocks);
4203 find_many_sub_basic_blocks (blocks);
242229bb 4204 sbitmap_free (blocks);
4e3825db 4205 purge_all_dead_edges ();
242229bb
JH
4206
4207 compact_blocks ();
2e3f842f
L
4208
4209 expand_stack_alignment ();
4210
242229bb 4211#ifdef ENABLE_CHECKING
62e5bf5d 4212 verify_flow_info ();
242229bb 4213#endif
9f8628ba
PB
4214
4215 /* There's no need to defer outputting this function any more; we
4216 know we want to output it. */
4217 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4218
4219 /* Now that we're done expanding trees to RTL, we shouldn't have any
4220 more CONCATs anywhere. */
4221 generating_concat_p = 0;
4222
b7211528
SB
4223 if (dump_file)
4224 {
4225 fprintf (dump_file,
4226 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4227 /* And the pass manager will dump RTL for us. */
4228 }
ef330312
PB
4229
4230 /* If we're emitting a nested function, make sure its parent gets
4231 emitted as well. Doing otherwise confuses debug info. */
c22cacf3 4232 {
ef330312
PB
4233 tree parent;
4234 for (parent = DECL_CONTEXT (current_function_decl);
c22cacf3
MS
4235 parent != NULL_TREE;
4236 parent = get_containing_scope (parent))
ef330312 4237 if (TREE_CODE (parent) == FUNCTION_DECL)
c22cacf3 4238 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
ef330312 4239 }
c22cacf3 4240
ef330312
PB
4241 /* We are now committed to emitting code for this function. Do any
4242 preparation, such as emitting abstract debug info for the inline
4243 before it gets mangled by optimization. */
4244 if (cgraph_function_possibly_inlined_p (current_function_decl))
4245 (*debug_hooks->outlining_inline_function) (current_function_decl);
4246
4247 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
4248
4249 /* After expanding, the return labels are no longer needed. */
4250 return_label = NULL;
4251 naked_return_label = NULL;
55e092c4
JH
4252 /* Tag the blocks with a depth number so that change_scope can find
4253 the common parent easily. */
4254 set_block_levels (DECL_INITIAL (cfun->decl), 0);
bf08ebeb 4255 default_rtl_profile ();
f029db69 4256 timevar_pop (TV_POST_EXPAND);
c2924966 4257 return 0;
242229bb
JH
4258}
4259
e3b5732b 4260struct rtl_opt_pass pass_expand =
242229bb 4261{
8ddbbcae 4262 {
e3b5732b 4263 RTL_PASS,
c22cacf3 4264 "expand", /* name */
242229bb 4265 NULL, /* gate */
726a989a 4266 gimple_expand_cfg, /* execute */
242229bb
JH
4267 NULL, /* sub */
4268 NULL, /* next */
4269 0, /* static_pass_number */
c22cacf3 4270 TV_EXPAND, /* tv_id */
688a482d
RG
4271 PROP_ssa | PROP_gimple_leh | PROP_cfg
4272 | PROP_gimple_lcx, /* properties_required */
242229bb 4273 PROP_rtl, /* properties_provided */
4e3825db
MM
4274 PROP_ssa | PROP_trees, /* properties_destroyed */
4275 TODO_verify_ssa | TODO_verify_flow
4276 | TODO_verify_stmts, /* todo_flags_start */
4277 TODO_dump_func
4278 | TODO_ggc_collect /* todo_flags_finish */
8ddbbcae 4279 }
242229bb 4280};
This page took 3.094847 seconds and 5 git commands to generate.