]> gcc.gnu.org Git - gcc.git/blame - gcc/cfgexpand.c
c-common.h (enum cxx_dialect): Add cxx1y.
[gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
4c0c3228 2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
66647d44 3 Free Software Foundation, Inc.
242229bb
JH
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
242229bb
JH
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
242229bb
JH
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "rtl.h"
27#include "tm_p.h"
28#include "basic-block.h"
29#include "function.h"
30#include "expr.h"
31#include "langhooks.h"
32#include "tree-flow.h"
33#include "timevar.h"
34#include "tree-dump.h"
35#include "tree-pass.h"
36#include "except.h"
37#include "flags.h"
1f6d3a08 38#include "diagnostic.h"
cf835838
JM
39#include "tree-pretty-print.h"
40#include "gimple-pretty-print.h"
1f6d3a08 41#include "toplev.h"
ef330312 42#include "debug.h"
7d69de61 43#include "params.h"
ff28a94d 44#include "tree-inline.h"
6946b3f7 45#include "value-prof.h"
e41b2a33 46#include "target.h"
4e3825db 47#include "ssaexpand.h"
7a8cba34
SB
48#include "bitmap.h"
49#include "sbitmap.h"
2b21299c 50#include "insn-attr.h" /* For INSN_SCHEDULING. */
726a989a 51
4e3825db
MM
52/* This variable holds information helping the rewriting of SSA trees
53 into RTL. */
54struct ssaexpand SA;
55
a5883ba0
MM
56/* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58gimple currently_expanding_gimple_stmt;
59
ddb555ed
JJ
60static rtx expand_debug_expr (tree);
61
726a989a
RB
62/* Return an expression tree corresponding to the RHS of GIMPLE
63 statement STMT. */
64
65tree
66gimple_assign_rhs_to_tree (gimple stmt)
67{
68 tree t;
82d6e6fc 69 enum gimple_rhs_class grhs_class;
b8698a0f 70
82d6e6fc 71 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
726a989a 72
0354c0c7
BS
73 if (grhs_class == GIMPLE_TERNARY_RHS)
74 t = build3 (gimple_assign_rhs_code (stmt),
75 TREE_TYPE (gimple_assign_lhs (stmt)),
76 gimple_assign_rhs1 (stmt),
77 gimple_assign_rhs2 (stmt),
78 gimple_assign_rhs3 (stmt));
79 else if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
80 t = build2 (gimple_assign_rhs_code (stmt),
81 TREE_TYPE (gimple_assign_lhs (stmt)),
82 gimple_assign_rhs1 (stmt),
83 gimple_assign_rhs2 (stmt));
82d6e6fc 84 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
85 t = build1 (gimple_assign_rhs_code (stmt),
86 TREE_TYPE (gimple_assign_lhs (stmt)),
87 gimple_assign_rhs1 (stmt));
82d6e6fc 88 else if (grhs_class == GIMPLE_SINGLE_RHS)
b5b8b0ac
AO
89 {
90 t = gimple_assign_rhs1 (stmt);
91 /* Avoid modifying this tree in place below. */
d0ed412a
JJ
92 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
93 && gimple_location (stmt) != EXPR_LOCATION (t))
94 || (gimple_block (stmt)
95 && currently_expanding_to_rtl
96 && EXPR_P (t)
97 && gimple_block (stmt) != TREE_BLOCK (t)))
b5b8b0ac
AO
98 t = copy_node (t);
99 }
726a989a
RB
100 else
101 gcc_unreachable ();
102
f5045c96
AM
103 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
104 SET_EXPR_LOCATION (t, gimple_location (stmt));
d0ed412a
JJ
105 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
106 TREE_BLOCK (t) = gimple_block (stmt);
f5045c96 107
726a989a
RB
108 return t;
109}
110
726a989a 111
1f6d3a08
RH
112#ifndef STACK_ALIGNMENT_NEEDED
113#define STACK_ALIGNMENT_NEEDED 1
114#endif
115
4e3825db
MM
116#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
117
118/* Associate declaration T with storage space X. If T is no
119 SSA name this is exactly SET_DECL_RTL, otherwise make the
120 partition of T associated with X. */
121static inline void
122set_rtl (tree t, rtx x)
123{
124 if (TREE_CODE (t) == SSA_NAME)
125 {
126 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
127 if (x && !MEM_P (x))
128 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
eb7adebc
MM
129 /* For the benefit of debug information at -O0 (where vartracking
130 doesn't run) record the place also in the base DECL if it's
131 a normal variable (not a parameter). */
132 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
133 {
134 tree var = SSA_NAME_VAR (t);
135 /* If we don't yet have something recorded, just record it now. */
136 if (!DECL_RTL_SET_P (var))
137 SET_DECL_RTL (var, x);
47598145 138 /* If we have it set already to "multiple places" don't
eb7adebc
MM
139 change this. */
140 else if (DECL_RTL (var) == pc_rtx)
141 ;
142 /* If we have something recorded and it's not the same place
143 as we want to record now, we have multiple partitions for the
144 same base variable, with different places. We can't just
145 randomly chose one, hence we have to say that we don't know.
146 This only happens with optimization, and there var-tracking
147 will figure out the right thing. */
148 else if (DECL_RTL (var) != x)
149 SET_DECL_RTL (var, pc_rtx);
150 }
4e3825db
MM
151 }
152 else
153 SET_DECL_RTL (t, x);
154}
1f6d3a08
RH
155
156/* This structure holds data relevant to one variable that will be
157 placed in a stack slot. */
158struct stack_var
159{
160 /* The Variable. */
161 tree decl;
162
1f6d3a08
RH
163 /* Initially, the size of the variable. Later, the size of the partition,
164 if this variable becomes it's partition's representative. */
165 HOST_WIDE_INT size;
166
167 /* The *byte* alignment required for this variable. Or as, with the
168 size, the alignment for this partition. */
169 unsigned int alignb;
170
171 /* The partition representative. */
172 size_t representative;
173
174 /* The next stack variable in the partition, or EOC. */
175 size_t next;
2bdbbe94
MM
176
177 /* The numbers of conflicting stack variables. */
178 bitmap conflicts;
1f6d3a08
RH
179};
180
181#define EOC ((size_t)-1)
182
183/* We have an array of such objects while deciding allocation. */
184static struct stack_var *stack_vars;
185static size_t stack_vars_alloc;
186static size_t stack_vars_num;
47598145 187static struct pointer_map_t *decl_to_stack_part;
1f6d3a08 188
fa10beec 189/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
1f6d3a08
RH
190 is non-decreasing. */
191static size_t *stack_vars_sorted;
192
1f6d3a08
RH
193/* The phase of the stack frame. This is the known misalignment of
194 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
195 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
196static int frame_phase;
197
7d69de61
RH
198/* Used during expand_used_vars to remember if we saw any decls for
199 which we'd like to enable stack smashing protection. */
200static bool has_protected_decls;
201
202/* Used during expand_used_vars. Remember if we say a character buffer
203 smaller than our cutoff threshold. Used for -Wstack-protector. */
204static bool has_short_buffer;
1f6d3a08 205
6f197850 206/* Compute the byte alignment to use for DECL. Ignore alignment
765c3e8f
L
207 we can't do with expected alignment of the stack boundary. */
208
209static unsigned int
6f197850 210align_local_variable (tree decl)
765c3e8f 211{
3a42502d 212 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
6f197850 213 DECL_ALIGN (decl) = align;
1f6d3a08
RH
214 return align / BITS_PER_UNIT;
215}
216
217/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
218 Return the frame offset. */
219
220static HOST_WIDE_INT
3a42502d 221alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
1f6d3a08
RH
222{
223 HOST_WIDE_INT offset, new_frame_offset;
224
225 new_frame_offset = frame_offset;
226 if (FRAME_GROWS_DOWNWARD)
227 {
228 new_frame_offset -= size + frame_phase;
229 new_frame_offset &= -align;
230 new_frame_offset += frame_phase;
231 offset = new_frame_offset;
232 }
233 else
234 {
235 new_frame_offset -= frame_phase;
236 new_frame_offset += align - 1;
237 new_frame_offset &= -align;
238 new_frame_offset += frame_phase;
239 offset = new_frame_offset;
240 new_frame_offset += size;
241 }
242 frame_offset = new_frame_offset;
243
9fb798d7
EB
244 if (frame_offset_overflow (frame_offset, cfun->decl))
245 frame_offset = offset = 0;
246
1f6d3a08
RH
247 return offset;
248}
249
250/* Accumulate DECL into STACK_VARS. */
251
252static void
253add_stack_var (tree decl)
254{
533f611a
RH
255 struct stack_var *v;
256
1f6d3a08
RH
257 if (stack_vars_num >= stack_vars_alloc)
258 {
259 if (stack_vars_alloc)
260 stack_vars_alloc = stack_vars_alloc * 3 / 2;
261 else
262 stack_vars_alloc = 32;
263 stack_vars
264 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
265 }
47598145
MM
266 if (!decl_to_stack_part)
267 decl_to_stack_part = pointer_map_create ();
268
533f611a 269 v = &stack_vars[stack_vars_num];
47598145 270 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
533f611a
RH
271
272 v->decl = decl;
533f611a
RH
273 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
274 /* Ensure that all variables have size, so that &a != &b for any two
275 variables that are simultaneously live. */
276 if (v->size == 0)
277 v->size = 1;
6f197850 278 v->alignb = align_local_variable (SSAVAR (decl));
13868f40
EB
279 /* An alignment of zero can mightily confuse us later. */
280 gcc_assert (v->alignb != 0);
1f6d3a08
RH
281
282 /* All variables are initially in their own partition. */
533f611a
RH
283 v->representative = stack_vars_num;
284 v->next = EOC;
1f6d3a08 285
2bdbbe94 286 /* All variables initially conflict with no other. */
533f611a 287 v->conflicts = NULL;
2bdbbe94 288
1f6d3a08 289 /* Ensure that this decl doesn't get put onto the list twice. */
4e3825db 290 set_rtl (decl, pc_rtx);
1f6d3a08
RH
291
292 stack_vars_num++;
293}
294
1f6d3a08
RH
295/* Make the decls associated with luid's X and Y conflict. */
296
297static void
298add_stack_var_conflict (size_t x, size_t y)
299{
2bdbbe94
MM
300 struct stack_var *a = &stack_vars[x];
301 struct stack_var *b = &stack_vars[y];
302 if (!a->conflicts)
303 a->conflicts = BITMAP_ALLOC (NULL);
304 if (!b->conflicts)
305 b->conflicts = BITMAP_ALLOC (NULL);
306 bitmap_set_bit (a->conflicts, y);
307 bitmap_set_bit (b->conflicts, x);
1f6d3a08
RH
308}
309
310/* Check whether the decls associated with luid's X and Y conflict. */
311
312static bool
313stack_var_conflict_p (size_t x, size_t y)
314{
2bdbbe94
MM
315 struct stack_var *a = &stack_vars[x];
316 struct stack_var *b = &stack_vars[y];
47598145
MM
317 if (x == y)
318 return false;
319 /* Partitions containing an SSA name result from gimple registers
320 with things like unsupported modes. They are top-level and
321 hence conflict with everything else. */
322 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
323 return true;
324
2bdbbe94
MM
325 if (!a->conflicts || !b->conflicts)
326 return false;
327 return bitmap_bit_p (a->conflicts, y);
1f6d3a08 328}
b8698a0f 329
d239ed56
SB
330/* Returns true if TYPE is or contains a union type. */
331
332static bool
333aggregate_contains_union_type (tree type)
334{
335 tree field;
336
337 if (TREE_CODE (type) == UNION_TYPE
338 || TREE_CODE (type) == QUAL_UNION_TYPE)
339 return true;
340 if (TREE_CODE (type) == ARRAY_TYPE)
341 return aggregate_contains_union_type (TREE_TYPE (type));
342 if (TREE_CODE (type) != RECORD_TYPE)
343 return false;
344
910ad8de 345 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
d239ed56
SB
346 if (TREE_CODE (field) == FIELD_DECL)
347 if (aggregate_contains_union_type (TREE_TYPE (field)))
348 return true;
349
350 return false;
351}
352
1f6d3a08
RH
353/* A subroutine of expand_used_vars. If two variables X and Y have alias
354 sets that do not conflict, then do add a conflict for these variables
d239ed56
SB
355 in the interference graph. We also need to make sure to add conflicts
356 for union containing structures. Else RTL alias analysis comes along
357 and due to type based aliasing rules decides that for two overlapping
358 union temporaries { short s; int i; } accesses to the same mem through
359 different types may not alias and happily reorders stores across
55356334 360 life-time boundaries of the temporaries (See PR25654). */
1f6d3a08
RH
361
362static void
363add_alias_set_conflicts (void)
364{
365 size_t i, j, n = stack_vars_num;
366
367 for (i = 0; i < n; ++i)
368 {
a4d25453
RH
369 tree type_i = TREE_TYPE (stack_vars[i].decl);
370 bool aggr_i = AGGREGATE_TYPE_P (type_i);
d239ed56 371 bool contains_union;
1f6d3a08 372
d239ed56 373 contains_union = aggregate_contains_union_type (type_i);
1f6d3a08
RH
374 for (j = 0; j < i; ++j)
375 {
a4d25453
RH
376 tree type_j = TREE_TYPE (stack_vars[j].decl);
377 bool aggr_j = AGGREGATE_TYPE_P (type_j);
d239ed56
SB
378 if (aggr_i != aggr_j
379 /* Either the objects conflict by means of type based
380 aliasing rules, or we need to add a conflict. */
381 || !objects_must_conflict_p (type_i, type_j)
382 /* In case the types do not conflict ensure that access
383 to elements will conflict. In case of unions we have
384 to be careful as type based aliasing rules may say
385 access to the same memory does not conflict. So play
4a25752b
ER
386 safe and add a conflict in this case when
387 -fstrict-aliasing is used. */
388 || (contains_union && flag_strict_aliasing))
1f6d3a08
RH
389 add_stack_var_conflict (i, j);
390 }
391 }
392}
393
47598145
MM
394/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
395 enter its partition number into bitmap DATA. */
396
397static bool
398visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
399{
400 bitmap active = (bitmap)data;
401 op = get_base_address (op);
402 if (op
403 && DECL_P (op)
404 && DECL_RTL_IF_SET (op) == pc_rtx)
405 {
406 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
407 if (v)
408 bitmap_set_bit (active, *v);
409 }
410 return false;
411}
412
413/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
414 record conflicts between it and all currently active other partitions
415 from bitmap DATA. */
416
417static bool
418visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
419{
420 bitmap active = (bitmap)data;
421 op = get_base_address (op);
422 if (op
423 && DECL_P (op)
424 && DECL_RTL_IF_SET (op) == pc_rtx)
425 {
426 size_t *v =
427 (size_t *) pointer_map_contains (decl_to_stack_part, op);
428 if (v && bitmap_set_bit (active, *v))
429 {
430 size_t num = *v;
431 bitmap_iterator bi;
432 unsigned i;
433 gcc_assert (num < stack_vars_num);
434 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
435 add_stack_var_conflict (num, i);
436 }
437 }
438 return false;
439}
440
441/* Helper routine for add_scope_conflicts, calculating the active partitions
442 at the end of BB, leaving the result in WORK. We're called to generate
81bfd197
MM
443 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
444 liveness. */
47598145
MM
445
446static void
81bfd197 447add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
47598145
MM
448{
449 edge e;
450 edge_iterator ei;
451 gimple_stmt_iterator gsi;
452 bool (*visit)(gimple, tree, void *);
453
454 bitmap_clear (work);
455 FOR_EACH_EDGE (e, ei, bb->preds)
456 bitmap_ior_into (work, (bitmap)e->src->aux);
457
ea85edfe 458 visit = visit_op;
47598145
MM
459
460 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
461 {
462 gimple stmt = gsi_stmt (gsi);
ea85edfe 463 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
47598145 464 }
ea85edfe 465 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
47598145
MM
466 {
467 gimple stmt = gsi_stmt (gsi);
468
469 if (gimple_clobber_p (stmt))
470 {
471 tree lhs = gimple_assign_lhs (stmt);
472 size_t *v;
473 /* Nested function lowering might introduce LHSs
474 that are COMPONENT_REFs. */
475 if (TREE_CODE (lhs) != VAR_DECL)
476 continue;
477 if (DECL_RTL_IF_SET (lhs) == pc_rtx
478 && (v = (size_t *)
479 pointer_map_contains (decl_to_stack_part, lhs)))
480 bitmap_clear_bit (work, *v);
481 }
482 else if (!is_gimple_debug (stmt))
ea85edfe 483 {
81bfd197 484 if (for_conflict
ea85edfe
JJ
485 && visit == visit_op)
486 {
487 /* If this is the first real instruction in this BB we need
88d599dc
MM
488 to add conflicts for everything live at this point now.
489 Unlike classical liveness for named objects we can't
ea85edfe
JJ
490 rely on seeing a def/use of the names we're interested in.
491 There might merely be indirect loads/stores. We'd not add any
81bfd197 492 conflicts for such partitions. */
ea85edfe
JJ
493 bitmap_iterator bi;
494 unsigned i;
81bfd197 495 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
ea85edfe
JJ
496 {
497 unsigned j;
498 bitmap_iterator bj;
81bfd197 499 EXECUTE_IF_SET_IN_BITMAP (work, i + 1, j, bj)
ea85edfe
JJ
500 add_stack_var_conflict (i, j);
501 }
502 visit = visit_conflict;
503 }
504 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
505 }
47598145
MM
506 }
507}
508
509/* Generate stack partition conflicts between all partitions that are
510 simultaneously live. */
511
512static void
513add_scope_conflicts (void)
514{
515 basic_block bb;
516 bool changed;
517 bitmap work = BITMAP_ALLOC (NULL);
518
88d599dc 519 /* We approximate the live range of a stack variable by taking the first
47598145
MM
520 mention of its name as starting point(s), and by the end-of-scope
521 death clobber added by gimplify as ending point(s) of the range.
522 This overapproximates in the case we for instance moved an address-taken
523 operation upward, without also moving a dereference to it upwards.
524 But it's conservatively correct as a variable never can hold values
525 before its name is mentioned at least once.
526
88d599dc 527 We then do a mostly classical bitmap liveness algorithm. */
47598145
MM
528
529 FOR_ALL_BB (bb)
530 bb->aux = BITMAP_ALLOC (NULL);
531
532 changed = true;
533 while (changed)
534 {
535 changed = false;
536 FOR_EACH_BB (bb)
537 {
538 bitmap active = (bitmap)bb->aux;
81bfd197 539 add_scope_conflicts_1 (bb, work, false);
47598145
MM
540 if (bitmap_ior_into (active, work))
541 changed = true;
542 }
543 }
544
545 FOR_EACH_BB (bb)
81bfd197 546 add_scope_conflicts_1 (bb, work, true);
47598145
MM
547
548 BITMAP_FREE (work);
549 FOR_ALL_BB (bb)
550 BITMAP_FREE (bb->aux);
551}
552
1f6d3a08 553/* A subroutine of partition_stack_vars. A comparison function for qsort,
3a42502d 554 sorting an array of indices by the properties of the object. */
1f6d3a08
RH
555
556static int
3a42502d 557stack_var_cmp (const void *a, const void *b)
1f6d3a08 558{
3a42502d
RH
559 size_t ia = *(const size_t *)a;
560 size_t ib = *(const size_t *)b;
561 unsigned int aligna = stack_vars[ia].alignb;
562 unsigned int alignb = stack_vars[ib].alignb;
563 HOST_WIDE_INT sizea = stack_vars[ia].size;
564 HOST_WIDE_INT sizeb = stack_vars[ib].size;
565 tree decla = stack_vars[ia].decl;
566 tree declb = stack_vars[ib].decl;
567 bool largea, largeb;
4e3825db 568 unsigned int uida, uidb;
1f6d3a08 569
3a42502d
RH
570 /* Primary compare on "large" alignment. Large comes first. */
571 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
572 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
573 if (largea != largeb)
574 return (int)largeb - (int)largea;
575
576 /* Secondary compare on size, decreasing */
3a42502d 577 if (sizea > sizeb)
6ddfda8a
ER
578 return -1;
579 if (sizea < sizeb)
1f6d3a08 580 return 1;
3a42502d
RH
581
582 /* Tertiary compare on true alignment, decreasing. */
583 if (aligna < alignb)
584 return -1;
585 if (aligna > alignb)
586 return 1;
587
588 /* Final compare on ID for sort stability, increasing.
589 Two SSA names are compared by their version, SSA names come before
590 non-SSA names, and two normal decls are compared by their DECL_UID. */
4e3825db
MM
591 if (TREE_CODE (decla) == SSA_NAME)
592 {
593 if (TREE_CODE (declb) == SSA_NAME)
594 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
595 else
596 return -1;
597 }
598 else if (TREE_CODE (declb) == SSA_NAME)
599 return 1;
600 else
601 uida = DECL_UID (decla), uidb = DECL_UID (declb);
79f802f5 602 if (uida < uidb)
79f802f5 603 return 1;
3a42502d
RH
604 if (uida > uidb)
605 return -1;
1f6d3a08
RH
606 return 0;
607}
608
55b34b5f
RG
609
610/* If the points-to solution *PI points to variables that are in a partition
611 together with other variables add all partition members to the pointed-to
612 variables bitmap. */
613
614static void
615add_partitioned_vars_to_ptset (struct pt_solution *pt,
616 struct pointer_map_t *decls_to_partitions,
617 struct pointer_set_t *visited, bitmap temp)
618{
619 bitmap_iterator bi;
620 unsigned i;
621 bitmap *part;
622
623 if (pt->anything
624 || pt->vars == NULL
625 /* The pointed-to vars bitmap is shared, it is enough to
626 visit it once. */
627 || pointer_set_insert(visited, pt->vars))
628 return;
629
630 bitmap_clear (temp);
631
632 /* By using a temporary bitmap to store all members of the partitions
633 we have to add we make sure to visit each of the partitions only
634 once. */
635 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
636 if ((!temp
637 || !bitmap_bit_p (temp, i))
638 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
639 (void *)(size_t) i)))
640 bitmap_ior_into (temp, *part);
641 if (!bitmap_empty_p (temp))
642 bitmap_ior_into (pt->vars, temp);
643}
644
645/* Update points-to sets based on partition info, so we can use them on RTL.
646 The bitmaps representing stack partitions will be saved until expand,
647 where partitioned decls used as bases in memory expressions will be
648 rewritten. */
649
650static void
651update_alias_info_with_stack_vars (void)
652{
653 struct pointer_map_t *decls_to_partitions = NULL;
654 size_t i, j;
655 tree var = NULL_TREE;
656
657 for (i = 0; i < stack_vars_num; i++)
658 {
659 bitmap part = NULL;
660 tree name;
661 struct ptr_info_def *pi;
662
663 /* Not interested in partitions with single variable. */
664 if (stack_vars[i].representative != i
665 || stack_vars[i].next == EOC)
666 continue;
667
668 if (!decls_to_partitions)
669 {
670 decls_to_partitions = pointer_map_create ();
671 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
672 }
673
674 /* Create an SSA_NAME that points to the partition for use
675 as base during alias-oracle queries on RTL for bases that
676 have been partitioned. */
677 if (var == NULL_TREE)
678 var = create_tmp_var (ptr_type_node, NULL);
679 name = make_ssa_name (var, NULL);
680
681 /* Create bitmaps representing partitions. They will be used for
682 points-to sets later, so use GGC alloc. */
683 part = BITMAP_GGC_ALLOC ();
684 for (j = i; j != EOC; j = stack_vars[j].next)
685 {
686 tree decl = stack_vars[j].decl;
25a6a873 687 unsigned int uid = DECL_PT_UID (decl);
55b34b5f
RG
688 /* We should never end up partitioning SSA names (though they
689 may end up on the stack). Neither should we allocate stack
9b999dc5
JJ
690 space to something that is unused and thus unreferenced, except
691 for -O0 where we are preserving even unreferenced variables. */
55b34b5f 692 gcc_assert (DECL_P (decl)
9b999dc5 693 && (!optimize
27c6b086 694 || referenced_var_lookup (cfun, DECL_UID (decl))));
55b34b5f
RG
695 bitmap_set_bit (part, uid);
696 *((bitmap *) pointer_map_insert (decls_to_partitions,
697 (void *)(size_t) uid)) = part;
698 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
699 decl)) = name;
700 }
701
702 /* Make the SSA name point to all partition members. */
703 pi = get_ptr_info (name);
d3553615 704 pt_solution_set (&pi->pt, part, false);
55b34b5f
RG
705 }
706
707 /* Make all points-to sets that contain one member of a partition
708 contain all members of the partition. */
709 if (decls_to_partitions)
710 {
711 unsigned i;
712 struct pointer_set_t *visited = pointer_set_create ();
713 bitmap temp = BITMAP_ALLOC (NULL);
714
715 for (i = 1; i < num_ssa_names; i++)
716 {
717 tree name = ssa_name (i);
718 struct ptr_info_def *pi;
719
720 if (name
721 && POINTER_TYPE_P (TREE_TYPE (name))
722 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
723 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
724 visited, temp);
725 }
726
727 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
728 decls_to_partitions, visited, temp);
55b34b5f
RG
729
730 pointer_set_destroy (visited);
731 pointer_map_destroy (decls_to_partitions);
732 BITMAP_FREE (temp);
733 }
734}
735
1f6d3a08
RH
736/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
737 partitioning algorithm. Partitions A and B are known to be non-conflicting.
6ddfda8a 738 Merge them into a single partition A. */
1f6d3a08
RH
739
740static void
6ddfda8a 741union_stack_vars (size_t a, size_t b)
1f6d3a08 742{
2bdbbe94
MM
743 struct stack_var *vb = &stack_vars[b];
744 bitmap_iterator bi;
745 unsigned u;
1f6d3a08 746
6ddfda8a
ER
747 gcc_assert (stack_vars[b].next == EOC);
748 /* Add B to A's partition. */
749 stack_vars[b].next = stack_vars[a].next;
750 stack_vars[b].representative = a;
1f6d3a08
RH
751 stack_vars[a].next = b;
752
753 /* Update the required alignment of partition A to account for B. */
754 if (stack_vars[a].alignb < stack_vars[b].alignb)
755 stack_vars[a].alignb = stack_vars[b].alignb;
756
757 /* Update the interference graph and merge the conflicts. */
2bdbbe94
MM
758 if (vb->conflicts)
759 {
760 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
761 add_stack_var_conflict (a, stack_vars[u].representative);
762 BITMAP_FREE (vb->conflicts);
763 }
1f6d3a08
RH
764}
765
766/* A subroutine of expand_used_vars. Binpack the variables into
767 partitions constrained by the interference graph. The overall
768 algorithm used is as follows:
769
6ddfda8a 770 Sort the objects by size in descending order.
1f6d3a08
RH
771 For each object A {
772 S = size(A)
773 O = 0
774 loop {
775 Look for the largest non-conflicting object B with size <= S.
776 UNION (A, B)
1f6d3a08
RH
777 }
778 }
779*/
780
781static void
782partition_stack_vars (void)
783{
784 size_t si, sj, n = stack_vars_num;
785
786 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
787 for (si = 0; si < n; ++si)
788 stack_vars_sorted[si] = si;
789
790 if (n == 1)
791 return;
792
3a42502d 793 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
1f6d3a08 794
1f6d3a08
RH
795 for (si = 0; si < n; ++si)
796 {
797 size_t i = stack_vars_sorted[si];
3a42502d 798 unsigned int ialign = stack_vars[i].alignb;
1f6d3a08 799
6ddfda8a
ER
800 /* Ignore objects that aren't partition representatives. If we
801 see a var that is not a partition representative, it must
802 have been merged earlier. */
803 if (stack_vars[i].representative != i)
804 continue;
805
806 for (sj = si + 1; sj < n; ++sj)
1f6d3a08
RH
807 {
808 size_t j = stack_vars_sorted[sj];
1f6d3a08
RH
809 unsigned int jalign = stack_vars[j].alignb;
810
811 /* Ignore objects that aren't partition representatives. */
812 if (stack_vars[j].representative != j)
813 continue;
814
1f6d3a08
RH
815 /* Ignore conflicting objects. */
816 if (stack_var_conflict_p (i, j))
817 continue;
818
3a42502d
RH
819 /* Do not mix objects of "small" (supported) alignment
820 and "large" (unsupported) alignment. */
821 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
822 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
823 continue;
824
1f6d3a08 825 /* UNION the objects, placing J at OFFSET. */
6ddfda8a 826 union_stack_vars (i, j);
1f6d3a08
RH
827 }
828 }
55b34b5f 829
9b999dc5 830 update_alias_info_with_stack_vars ();
1f6d3a08
RH
831}
832
833/* A debugging aid for expand_used_vars. Dump the generated partitions. */
834
835static void
836dump_stack_var_partition (void)
837{
838 size_t si, i, j, n = stack_vars_num;
839
840 for (si = 0; si < n; ++si)
841 {
842 i = stack_vars_sorted[si];
843
844 /* Skip variables that aren't partition representatives, for now. */
845 if (stack_vars[i].representative != i)
846 continue;
847
848 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
849 " align %u\n", (unsigned long) i, stack_vars[i].size,
850 stack_vars[i].alignb);
851
852 for (j = i; j != EOC; j = stack_vars[j].next)
853 {
854 fputc ('\t', dump_file);
855 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
1f6d3a08 856 }
6ddfda8a 857 fputc ('\n', dump_file);
1f6d3a08
RH
858 }
859}
860
3a42502d 861/* Assign rtl to DECL at BASE + OFFSET. */
1f6d3a08
RH
862
863static void
3a42502d
RH
864expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
865 HOST_WIDE_INT offset)
1f6d3a08 866{
3a42502d 867 unsigned align;
1f6d3a08 868 rtx x;
c22cacf3 869
1f6d3a08
RH
870 /* If this fails, we've overflowed the stack frame. Error nicely? */
871 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
872
3a42502d 873 x = plus_constant (base, offset);
4e3825db 874 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
1f6d3a08 875
4e3825db
MM
876 if (TREE_CODE (decl) != SSA_NAME)
877 {
878 /* Set alignment we actually gave this decl if it isn't an SSA name.
879 If it is we generate stack slots only accidentally so it isn't as
880 important, we'll simply use the alignment that is already set. */
3a42502d
RH
881 if (base == virtual_stack_vars_rtx)
882 offset -= frame_phase;
4e3825db
MM
883 align = offset & -offset;
884 align *= BITS_PER_UNIT;
3a42502d
RH
885 if (align == 0 || align > base_align)
886 align = base_align;
887
888 /* One would think that we could assert that we're not decreasing
889 alignment here, but (at least) the i386 port does exactly this
890 via the MINIMUM_ALIGNMENT hook. */
4e3825db
MM
891
892 DECL_ALIGN (decl) = align;
893 DECL_USER_ALIGN (decl) = 0;
894 }
895
896 set_mem_attributes (x, SSAVAR (decl), true);
897 set_rtl (decl, x);
1f6d3a08
RH
898}
899
900/* A subroutine of expand_used_vars. Give each partition representative
901 a unique location within the stack frame. Update each partition member
902 with that location. */
903
904static void
7d69de61 905expand_stack_vars (bool (*pred) (tree))
1f6d3a08
RH
906{
907 size_t si, i, j, n = stack_vars_num;
3a42502d
RH
908 HOST_WIDE_INT large_size = 0, large_alloc = 0;
909 rtx large_base = NULL;
910 unsigned large_align = 0;
911 tree decl;
912
913 /* Determine if there are any variables requiring "large" alignment.
914 Since these are dynamically allocated, we only process these if
915 no predicate involved. */
916 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
917 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
918 {
919 /* Find the total size of these variables. */
920 for (si = 0; si < n; ++si)
921 {
922 unsigned alignb;
923
924 i = stack_vars_sorted[si];
925 alignb = stack_vars[i].alignb;
926
927 /* Stop when we get to the first decl with "small" alignment. */
928 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
929 break;
930
931 /* Skip variables that aren't partition representatives. */
932 if (stack_vars[i].representative != i)
933 continue;
934
935 /* Skip variables that have already had rtl assigned. See also
936 add_stack_var where we perpetrate this pc_rtx hack. */
937 decl = stack_vars[i].decl;
938 if ((TREE_CODE (decl) == SSA_NAME
939 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
940 : DECL_RTL (decl)) != pc_rtx)
941 continue;
942
943 large_size += alignb - 1;
944 large_size &= -(HOST_WIDE_INT)alignb;
945 large_size += stack_vars[i].size;
946 }
947
948 /* If there were any, allocate space. */
949 if (large_size > 0)
950 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
951 large_align, true);
952 }
1f6d3a08
RH
953
954 for (si = 0; si < n; ++si)
955 {
3a42502d
RH
956 rtx base;
957 unsigned base_align, alignb;
1f6d3a08
RH
958 HOST_WIDE_INT offset;
959
960 i = stack_vars_sorted[si];
961
962 /* Skip variables that aren't partition representatives, for now. */
963 if (stack_vars[i].representative != i)
964 continue;
965
7d69de61
RH
966 /* Skip variables that have already had rtl assigned. See also
967 add_stack_var where we perpetrate this pc_rtx hack. */
3a42502d
RH
968 decl = stack_vars[i].decl;
969 if ((TREE_CODE (decl) == SSA_NAME
970 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
971 : DECL_RTL (decl)) != pc_rtx)
7d69de61
RH
972 continue;
973
c22cacf3 974 /* Check the predicate to see whether this variable should be
7d69de61 975 allocated in this pass. */
3a42502d 976 if (pred && !pred (decl))
7d69de61
RH
977 continue;
978
3a42502d
RH
979 alignb = stack_vars[i].alignb;
980 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
981 {
982 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
983 base = virtual_stack_vars_rtx;
984 base_align = crtl->max_used_stack_slot_alignment;
985 }
986 else
987 {
988 /* Large alignment is only processed in the last pass. */
989 if (pred)
990 continue;
533f611a 991 gcc_assert (large_base != NULL);
3a42502d
RH
992
993 large_alloc += alignb - 1;
994 large_alloc &= -(HOST_WIDE_INT)alignb;
995 offset = large_alloc;
996 large_alloc += stack_vars[i].size;
997
998 base = large_base;
999 base_align = large_align;
1000 }
1f6d3a08
RH
1001
1002 /* Create rtl for each variable based on their location within the
1003 partition. */
1004 for (j = i; j != EOC; j = stack_vars[j].next)
f8da8190 1005 {
f8da8190 1006 expand_one_stack_var_at (stack_vars[j].decl,
3a42502d 1007 base, base_align,
6ddfda8a 1008 offset);
f8da8190 1009 }
1f6d3a08 1010 }
3a42502d
RH
1011
1012 gcc_assert (large_alloc == large_size);
1f6d3a08
RH
1013}
1014
ff28a94d
JH
1015/* Take into account all sizes of partitions and reset DECL_RTLs. */
1016static HOST_WIDE_INT
1017account_stack_vars (void)
1018{
1019 size_t si, j, i, n = stack_vars_num;
1020 HOST_WIDE_INT size = 0;
1021
1022 for (si = 0; si < n; ++si)
1023 {
1024 i = stack_vars_sorted[si];
1025
1026 /* Skip variables that aren't partition representatives, for now. */
1027 if (stack_vars[i].representative != i)
1028 continue;
1029
1030 size += stack_vars[i].size;
1031 for (j = i; j != EOC; j = stack_vars[j].next)
4e3825db 1032 set_rtl (stack_vars[j].decl, NULL);
ff28a94d
JH
1033 }
1034 return size;
1035}
1036
1f6d3a08
RH
1037/* A subroutine of expand_one_var. Called to immediately assign rtl
1038 to a variable to be allocated in the stack frame. */
1039
1040static void
1041expand_one_stack_var (tree var)
1042{
3a42502d
RH
1043 HOST_WIDE_INT size, offset;
1044 unsigned byte_align;
1f6d3a08 1045
4e3825db 1046 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
6f197850 1047 byte_align = align_local_variable (SSAVAR (var));
3a42502d
RH
1048
1049 /* We handle highly aligned variables in expand_stack_vars. */
1050 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1f6d3a08 1051
3a42502d
RH
1052 offset = alloc_stack_frame_space (size, byte_align);
1053
1054 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1055 crtl->max_used_stack_slot_alignment, offset);
1f6d3a08
RH
1056}
1057
1f6d3a08
RH
1058/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1059 that will reside in a hard register. */
1060
1061static void
1062expand_one_hard_reg_var (tree var)
1063{
1064 rest_of_decl_compilation (var, 0, 0);
1065}
1066
1067/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1068 that will reside in a pseudo register. */
1069
1070static void
1071expand_one_register_var (tree var)
1072{
4e3825db
MM
1073 tree decl = SSAVAR (var);
1074 tree type = TREE_TYPE (decl);
cde0f3fd 1075 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1f6d3a08
RH
1076 rtx x = gen_reg_rtx (reg_mode);
1077
4e3825db 1078 set_rtl (var, x);
1f6d3a08
RH
1079
1080 /* Note if the object is a user variable. */
4e3825db
MM
1081 if (!DECL_ARTIFICIAL (decl))
1082 mark_user_reg (x);
1f6d3a08 1083
61021c2c 1084 if (POINTER_TYPE_P (type))
d466b407 1085 mark_reg_pointer (x, get_pointer_alignment (var));
1f6d3a08
RH
1086}
1087
1088/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 1089 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
1090 to pick something that won't crash the rest of the compiler. */
1091
1092static void
1093expand_one_error_var (tree var)
1094{
1095 enum machine_mode mode = DECL_MODE (var);
1096 rtx x;
1097
1098 if (mode == BLKmode)
1099 x = gen_rtx_MEM (BLKmode, const0_rtx);
1100 else if (mode == VOIDmode)
1101 x = const0_rtx;
1102 else
1103 x = gen_reg_rtx (mode);
1104
1105 SET_DECL_RTL (var, x);
1106}
1107
c22cacf3 1108/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
1109 allocated to the local stack frame. Return true if we wish to
1110 add VAR to STACK_VARS so that it will be coalesced with other
1111 variables. Return false to allocate VAR immediately.
1112
1113 This function is used to reduce the number of variables considered
1114 for coalescing, which reduces the size of the quadratic problem. */
1115
1116static bool
1117defer_stack_allocation (tree var, bool toplevel)
1118{
7d69de61
RH
1119 /* If stack protection is enabled, *all* stack variables must be deferred,
1120 so that we can re-order the strings to the top of the frame. */
1121 if (flag_stack_protect)
1122 return true;
1123
3a42502d
RH
1124 /* We handle "large" alignment via dynamic allocation. We want to handle
1125 this extra complication in only one place, so defer them. */
1126 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1127 return true;
1128
1f6d3a08
RH
1129 /* Variables in the outermost scope automatically conflict with
1130 every other variable. The only reason to want to defer them
1131 at all is that, after sorting, we can more efficiently pack
1132 small variables in the stack frame. Continue to defer at -O2. */
1133 if (toplevel && optimize < 2)
1134 return false;
1135
1136 /* Without optimization, *most* variables are allocated from the
1137 stack, which makes the quadratic problem large exactly when we
c22cacf3 1138 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
1139 other hand, we don't want the function's stack frame size to
1140 get completely out of hand. So we avoid adding scalars and
1141 "small" aggregates to the list at all. */
1142 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1143 return false;
1144
1145 return true;
1146}
1147
1148/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 1149 its flavor. Variables to be placed on the stack are not actually
b8698a0f 1150 expanded yet, merely recorded.
ff28a94d
JH
1151 When REALLY_EXPAND is false, only add stack values to be allocated.
1152 Return stack usage this variable is supposed to take.
1153*/
1f6d3a08 1154
ff28a94d
JH
1155static HOST_WIDE_INT
1156expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08 1157{
3a42502d 1158 unsigned int align = BITS_PER_UNIT;
4e3825db 1159 tree origvar = var;
3a42502d 1160
4e3825db
MM
1161 var = SSAVAR (var);
1162
3a42502d 1163 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
2e3f842f 1164 {
2e3f842f
L
1165 /* Because we don't know if VAR will be in register or on stack,
1166 we conservatively assume it will be on stack even if VAR is
1167 eventually put into register after RA pass. For non-automatic
1168 variables, which won't be on stack, we collect alignment of
1169 type and ignore user specified alignment. */
1170 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
ae58e548
JJ
1171 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1172 TYPE_MODE (TREE_TYPE (var)),
1173 TYPE_ALIGN (TREE_TYPE (var)));
f3184b4c
JJ
1174 else if (DECL_HAS_VALUE_EXPR_P (var)
1175 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1176 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1177 or variables which were assigned a stack slot already by
1178 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1179 changed from the offset chosen to it. */
1180 align = crtl->stack_alignment_estimated;
2e3f842f 1181 else
ae58e548 1182 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
2e3f842f 1183
3a42502d
RH
1184 /* If the variable alignment is very large we'll dynamicaly allocate
1185 it, which means that in-frame portion is just a pointer. */
1186 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1187 align = POINTER_SIZE;
1188 }
1189
1190 if (SUPPORTS_STACK_ALIGNMENT
1191 && crtl->stack_alignment_estimated < align)
1192 {
1193 /* stack_alignment_estimated shouldn't change after stack
1194 realign decision made */
1195 gcc_assert(!crtl->stack_realign_processed);
1196 crtl->stack_alignment_estimated = align;
2e3f842f
L
1197 }
1198
3a42502d
RH
1199 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1200 So here we only make sure stack_alignment_needed >= align. */
1201 if (crtl->stack_alignment_needed < align)
1202 crtl->stack_alignment_needed = align;
1203 if (crtl->max_used_stack_slot_alignment < align)
1204 crtl->max_used_stack_slot_alignment = align;
1205
4e3825db
MM
1206 if (TREE_CODE (origvar) == SSA_NAME)
1207 {
1208 gcc_assert (TREE_CODE (var) != VAR_DECL
1209 || (!DECL_EXTERNAL (var)
1210 && !DECL_HAS_VALUE_EXPR_P (var)
1211 && !TREE_STATIC (var)
4e3825db
MM
1212 && TREE_TYPE (var) != error_mark_node
1213 && !DECL_HARD_REGISTER (var)
1214 && really_expand));
1215 }
1216 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
4846b435 1217 ;
1f6d3a08
RH
1218 else if (DECL_EXTERNAL (var))
1219 ;
833b3afe 1220 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
1221 ;
1222 else if (TREE_STATIC (var))
7e8b322a 1223 ;
eb7adebc 1224 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1f6d3a08
RH
1225 ;
1226 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
1227 {
1228 if (really_expand)
1229 expand_one_error_var (var);
1230 }
4e3825db 1231 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
ff28a94d
JH
1232 {
1233 if (really_expand)
1234 expand_one_hard_reg_var (var);
1235 }
1f6d3a08 1236 else if (use_register_for_decl (var))
ff28a94d
JH
1237 {
1238 if (really_expand)
4e3825db 1239 expand_one_register_var (origvar);
ff28a94d 1240 }
7604eb4e
JJ
1241 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1242 {
1243 if (really_expand)
1244 {
1245 error ("size of variable %q+D is too large", var);
1246 expand_one_error_var (var);
1247 }
1248 }
1f6d3a08 1249 else if (defer_stack_allocation (var, toplevel))
4e3825db 1250 add_stack_var (origvar);
1f6d3a08 1251 else
ff28a94d 1252 {
bd9f1b4b 1253 if (really_expand)
4e3825db 1254 expand_one_stack_var (origvar);
ff28a94d
JH
1255 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1256 }
1257 return 0;
1f6d3a08
RH
1258}
1259
1260/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1261 expanding variables. Those variables that can be put into registers
1262 are allocated pseudos; those that can't are put on the stack.
1263
1264 TOPLEVEL is true if this is the outermost BLOCK. */
1265
1266static void
1267expand_used_vars_for_block (tree block, bool toplevel)
1268{
1f6d3a08
RH
1269 tree t;
1270
1f6d3a08 1271 /* Expand all variables at this level. */
910ad8de 1272 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1ace6185
JJ
1273 if (TREE_USED (t)
1274 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1275 || !DECL_NONSHAREABLE (t)))
ff28a94d 1276 expand_one_var (t, toplevel, true);
1f6d3a08 1277
1f6d3a08
RH
1278 /* Expand all variables at containing levels. */
1279 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1280 expand_used_vars_for_block (t, false);
1f6d3a08
RH
1281}
1282
1283/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1284 and clear TREE_USED on all local variables. */
1285
1286static void
1287clear_tree_used (tree block)
1288{
1289 tree t;
1290
910ad8de 1291 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1f6d3a08 1292 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1ace6185
JJ
1293 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1294 || !DECL_NONSHAREABLE (t))
1f6d3a08
RH
1295 TREE_USED (t) = 0;
1296
1297 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1298 clear_tree_used (t);
1299}
1300
7d69de61
RH
1301/* Examine TYPE and determine a bit mask of the following features. */
1302
1303#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1304#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1305#define SPCT_HAS_ARRAY 4
1306#define SPCT_HAS_AGGREGATE 8
1307
1308static unsigned int
1309stack_protect_classify_type (tree type)
1310{
1311 unsigned int ret = 0;
1312 tree t;
1313
1314 switch (TREE_CODE (type))
1315 {
1316 case ARRAY_TYPE:
1317 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1318 if (t == char_type_node
1319 || t == signed_char_type_node
1320 || t == unsigned_char_type_node)
1321 {
15362b89
JJ
1322 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1323 unsigned HOST_WIDE_INT len;
7d69de61 1324
15362b89
JJ
1325 if (!TYPE_SIZE_UNIT (type)
1326 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1327 len = max;
7d69de61 1328 else
15362b89 1329 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
7d69de61
RH
1330
1331 if (len < max)
1332 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1333 else
1334 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1335 }
1336 else
1337 ret = SPCT_HAS_ARRAY;
1338 break;
1339
1340 case UNION_TYPE:
1341 case QUAL_UNION_TYPE:
1342 case RECORD_TYPE:
1343 ret = SPCT_HAS_AGGREGATE;
1344 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1345 if (TREE_CODE (t) == FIELD_DECL)
1346 ret |= stack_protect_classify_type (TREE_TYPE (t));
1347 break;
1348
1349 default:
1350 break;
1351 }
1352
1353 return ret;
1354}
1355
a4d05547
KH
1356/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1357 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
1358 any variable in this function. The return value is the phase number in
1359 which the variable should be allocated. */
1360
1361static int
1362stack_protect_decl_phase (tree decl)
1363{
1364 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1365 int ret = 0;
1366
1367 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1368 has_short_buffer = true;
1369
1370 if (flag_stack_protect == 2)
1371 {
1372 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1373 && !(bits & SPCT_HAS_AGGREGATE))
1374 ret = 1;
1375 else if (bits & SPCT_HAS_ARRAY)
1376 ret = 2;
1377 }
1378 else
1379 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1380
1381 if (ret)
1382 has_protected_decls = true;
1383
1384 return ret;
1385}
1386
1387/* Two helper routines that check for phase 1 and phase 2. These are used
1388 as callbacks for expand_stack_vars. */
1389
1390static bool
1391stack_protect_decl_phase_1 (tree decl)
1392{
1393 return stack_protect_decl_phase (decl) == 1;
1394}
1395
1396static bool
1397stack_protect_decl_phase_2 (tree decl)
1398{
1399 return stack_protect_decl_phase (decl) == 2;
1400}
1401
1402/* Ensure that variables in different stack protection phases conflict
1403 so that they are not merged and share the same stack slot. */
1404
1405static void
1406add_stack_protection_conflicts (void)
1407{
1408 size_t i, j, n = stack_vars_num;
1409 unsigned char *phase;
1410
1411 phase = XNEWVEC (unsigned char, n);
1412 for (i = 0; i < n; ++i)
1413 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1414
1415 for (i = 0; i < n; ++i)
1416 {
1417 unsigned char ph_i = phase[i];
1418 for (j = 0; j < i; ++j)
1419 if (ph_i != phase[j])
1420 add_stack_var_conflict (i, j);
1421 }
1422
1423 XDELETEVEC (phase);
1424}
1425
1426/* Create a decl for the guard at the top of the stack frame. */
1427
1428static void
1429create_stack_guard (void)
1430{
c2255bc4
AH
1431 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1432 VAR_DECL, NULL, ptr_type_node);
7d69de61
RH
1433 TREE_THIS_VOLATILE (guard) = 1;
1434 TREE_USED (guard) = 1;
1435 expand_one_stack_var (guard);
cb91fab0 1436 crtl->stack_protect_guard = guard;
7d69de61
RH
1437}
1438
ff28a94d 1439/* Prepare for expanding variables. */
b8698a0f 1440static void
ff28a94d
JH
1441init_vars_expansion (void)
1442{
1443 tree t;
c021f10b 1444 unsigned ix;
cb91fab0 1445 /* Set TREE_USED on all variables in the local_decls. */
c021f10b
NF
1446 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1447 TREE_USED (t) = 1;
ff28a94d
JH
1448
1449 /* Clear TREE_USED on all variables associated with a block scope. */
1450 clear_tree_used (DECL_INITIAL (current_function_decl));
1451
1452 /* Initialize local stack smashing state. */
1453 has_protected_decls = false;
1454 has_short_buffer = false;
1455}
1456
1457/* Free up stack variable graph data. */
1458static void
1459fini_vars_expansion (void)
1460{
2bdbbe94
MM
1461 size_t i, n = stack_vars_num;
1462 for (i = 0; i < n; i++)
1463 BITMAP_FREE (stack_vars[i].conflicts);
ff28a94d
JH
1464 XDELETEVEC (stack_vars);
1465 XDELETEVEC (stack_vars_sorted);
ff28a94d
JH
1466 stack_vars = NULL;
1467 stack_vars_alloc = stack_vars_num = 0;
47598145
MM
1468 pointer_map_destroy (decl_to_stack_part);
1469 decl_to_stack_part = NULL;
ff28a94d
JH
1470}
1471
30925d94
AO
1472/* Make a fair guess for the size of the stack frame of the function
1473 in NODE. This doesn't have to be exact, the result is only used in
1474 the inline heuristics. So we don't want to run the full stack var
1475 packing algorithm (which is quadratic in the number of stack vars).
1476 Instead, we calculate the total size of all stack vars. This turns
1477 out to be a pretty fair estimate -- packing of stack vars doesn't
1478 happen very often. */
b5a430f3 1479
ff28a94d 1480HOST_WIDE_INT
30925d94 1481estimated_stack_frame_size (struct cgraph_node *node)
ff28a94d
JH
1482{
1483 HOST_WIDE_INT size = 0;
b5a430f3 1484 size_t i;
bb7e6d55 1485 tree var;
2e1ec94f 1486 tree old_cur_fun_decl = current_function_decl;
bb7e6d55
AO
1487 referenced_var_iterator rvi;
1488 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
30925d94
AO
1489
1490 current_function_decl = node->decl;
bb7e6d55 1491 push_cfun (fn);
ff28a94d 1492
bb7e6d55
AO
1493 gcc_checking_assert (gimple_referenced_vars (fn));
1494 FOR_EACH_REFERENCED_VAR (fn, var, rvi)
1495 size += expand_one_var (var, true, false);
b5a430f3 1496
ff28a94d
JH
1497 if (stack_vars_num > 0)
1498 {
b5a430f3
SB
1499 /* Fake sorting the stack vars for account_stack_vars (). */
1500 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1501 for (i = 0; i < stack_vars_num; ++i)
1502 stack_vars_sorted[i] = i;
ff28a94d
JH
1503 size += account_stack_vars ();
1504 fini_vars_expansion ();
1505 }
2e1ec94f
RR
1506 pop_cfun ();
1507 current_function_decl = old_cur_fun_decl;
ff28a94d
JH
1508 return size;
1509}
1510
1f6d3a08 1511/* Expand all variables used in the function. */
727a31fa
RH
1512
1513static void
1514expand_used_vars (void)
1515{
c021f10b
NF
1516 tree var, outer_block = DECL_INITIAL (current_function_decl);
1517 VEC(tree,heap) *maybe_local_decls = NULL;
4e3825db 1518 unsigned i;
c021f10b 1519 unsigned len;
727a31fa 1520
1f6d3a08
RH
1521 /* Compute the phase of the stack frame for this function. */
1522 {
1523 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1524 int off = STARTING_FRAME_OFFSET % align;
1525 frame_phase = off ? align - off : 0;
1526 }
727a31fa 1527
ff28a94d 1528 init_vars_expansion ();
7d69de61 1529
4e3825db
MM
1530 for (i = 0; i < SA.map->num_partitions; i++)
1531 {
1532 tree var = partition_to_var (SA.map, i);
1533
1534 gcc_assert (is_gimple_reg (var));
1535 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1536 expand_one_var (var, true, true);
1537 else
1538 {
1539 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1540 contain the default def (representing the parm or result itself)
1541 we don't do anything here. But those which don't contain the
1542 default def (representing a temporary based on the parm/result)
1543 we need to allocate space just like for normal VAR_DECLs. */
1544 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1545 {
1546 expand_one_var (var, true, true);
1547 gcc_assert (SA.partition_to_pseudo[i]);
1548 }
1549 }
1550 }
1551
cb91fab0 1552 /* At this point all variables on the local_decls with TREE_USED
1f6d3a08 1553 set are not associated with any block scope. Lay them out. */
c021f10b
NF
1554
1555 len = VEC_length (tree, cfun->local_decls);
1556 FOR_EACH_LOCAL_DECL (cfun, i, var)
1f6d3a08 1557 {
1f6d3a08
RH
1558 bool expand_now = false;
1559
4e3825db
MM
1560 /* Expanded above already. */
1561 if (is_gimple_reg (var))
eb7adebc
MM
1562 {
1563 TREE_USED (var) = 0;
3adcf52c 1564 goto next;
eb7adebc 1565 }
1f6d3a08
RH
1566 /* We didn't set a block for static or extern because it's hard
1567 to tell the difference between a global variable (re)declared
1568 in a local scope, and one that's really declared there to
1569 begin with. And it doesn't really matter much, since we're
1570 not giving them stack space. Expand them now. */
4e3825db 1571 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1f6d3a08
RH
1572 expand_now = true;
1573
1574 /* If the variable is not associated with any block, then it
1575 was created by the optimizers, and could be live anywhere
1576 in the function. */
1577 else if (TREE_USED (var))
1578 expand_now = true;
1579
1580 /* Finally, mark all variables on the list as used. We'll use
1581 this in a moment when we expand those associated with scopes. */
1582 TREE_USED (var) = 1;
1583
1584 if (expand_now)
3adcf52c
JM
1585 expand_one_var (var, true, true);
1586
1587 next:
1588 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
802e9f8e 1589 {
3adcf52c
JM
1590 rtx rtl = DECL_RTL_IF_SET (var);
1591
1592 /* Keep artificial non-ignored vars in cfun->local_decls
1593 chain until instantiate_decls. */
1594 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 1595 add_local_decl (cfun, var);
6c6366f6 1596 else if (rtl == NULL_RTX)
c021f10b
NF
1597 /* If rtl isn't set yet, which can happen e.g. with
1598 -fstack-protector, retry before returning from this
1599 function. */
1600 VEC_safe_push (tree, heap, maybe_local_decls, var);
802e9f8e 1601 }
1f6d3a08 1602 }
1f6d3a08 1603
c021f10b
NF
1604 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1605
1606 +-----------------+-----------------+
1607 | ...processed... | ...duplicates...|
1608 +-----------------+-----------------+
1609 ^
1610 +-- LEN points here.
1611
1612 We just want the duplicates, as those are the artificial
1613 non-ignored vars that we want to keep until instantiate_decls.
1614 Move them down and truncate the array. */
1615 if (!VEC_empty (tree, cfun->local_decls))
1616 VEC_block_remove (tree, cfun->local_decls, 0, len);
1617
1f6d3a08
RH
1618 /* At this point, all variables within the block tree with TREE_USED
1619 set are actually used by the optimized function. Lay them out. */
1620 expand_used_vars_for_block (outer_block, true);
1621
1622 if (stack_vars_num > 0)
1623 {
47598145 1624 add_scope_conflicts ();
1f6d3a08 1625 /* Due to the way alias sets work, no variables with non-conflicting
c22cacf3 1626 alias sets may be assigned the same address. Add conflicts to
1f6d3a08
RH
1627 reflect this. */
1628 add_alias_set_conflicts ();
1629
c22cacf3 1630 /* If stack protection is enabled, we don't share space between
7d69de61
RH
1631 vulnerable data and non-vulnerable data. */
1632 if (flag_stack_protect)
1633 add_stack_protection_conflicts ();
1634
c22cacf3 1635 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
1636 minimal interference graph, attempt to save some stack space. */
1637 partition_stack_vars ();
1638 if (dump_file)
1639 dump_stack_var_partition ();
7d69de61
RH
1640 }
1641
1642 /* There are several conditions under which we should create a
1643 stack guard: protect-all, alloca used, protected decls present. */
1644 if (flag_stack_protect == 2
1645 || (flag_stack_protect
e3b5732b 1646 && (cfun->calls_alloca || has_protected_decls)))
7d69de61 1647 create_stack_guard ();
1f6d3a08 1648
7d69de61
RH
1649 /* Assign rtl to each variable based on these partitions. */
1650 if (stack_vars_num > 0)
1651 {
1652 /* Reorder decls to be protected by iterating over the variables
1653 array multiple times, and allocating out of each phase in turn. */
c22cacf3 1654 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
1655 earlier, such that we naturally see these variables first,
1656 and thus naturally allocate things in the right order. */
1657 if (has_protected_decls)
1658 {
1659 /* Phase 1 contains only character arrays. */
1660 expand_stack_vars (stack_protect_decl_phase_1);
1661
1662 /* Phase 2 contains other kinds of arrays. */
1663 if (flag_stack_protect == 2)
1664 expand_stack_vars (stack_protect_decl_phase_2);
1665 }
1666
1667 expand_stack_vars (NULL);
1f6d3a08 1668
ff28a94d 1669 fini_vars_expansion ();
1f6d3a08
RH
1670 }
1671
6c6366f6
JJ
1672 /* If there were any artificial non-ignored vars without rtl
1673 found earlier, see if deferred stack allocation hasn't assigned
1674 rtl to them. */
c021f10b 1675 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
6c6366f6 1676 {
6c6366f6
JJ
1677 rtx rtl = DECL_RTL_IF_SET (var);
1678
6c6366f6
JJ
1679 /* Keep artificial non-ignored vars in cfun->local_decls
1680 chain until instantiate_decls. */
1681 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 1682 add_local_decl (cfun, var);
6c6366f6 1683 }
c021f10b 1684 VEC_free (tree, heap, maybe_local_decls);
6c6366f6 1685
1f6d3a08
RH
1686 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1687 if (STACK_ALIGNMENT_NEEDED)
1688 {
1689 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1690 if (!FRAME_GROWS_DOWNWARD)
1691 frame_offset += align - 1;
1692 frame_offset &= -align;
1693 }
727a31fa
RH
1694}
1695
1696
b7211528
SB
1697/* If we need to produce a detailed dump, print the tree representation
1698 for STMT to the dump file. SINCE is the last RTX after which the RTL
1699 generated for STMT should have been appended. */
1700
1701static void
726a989a 1702maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
b7211528
SB
1703{
1704 if (dump_file && (dump_flags & TDF_DETAILS))
1705 {
1706 fprintf (dump_file, "\n;; ");
b5b8b0ac
AO
1707 print_gimple_stmt (dump_file, stmt, 0,
1708 TDF_SLIM | (dump_flags & TDF_LINENO));
b7211528
SB
1709 fprintf (dump_file, "\n");
1710
1711 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1712 }
1713}
1714
8b11009b
ZD
1715/* Maps the blocks that do not contain tree labels to rtx labels. */
1716
1717static struct pointer_map_t *lab_rtx_for_bb;
1718
a9b77cd1
ZD
1719/* Returns the label_rtx expression for a label starting basic block BB. */
1720
1721static rtx
726a989a 1722label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
a9b77cd1 1723{
726a989a
RB
1724 gimple_stmt_iterator gsi;
1725 tree lab;
1726 gimple lab_stmt;
8b11009b 1727 void **elt;
a9b77cd1
ZD
1728
1729 if (bb->flags & BB_RTL)
1730 return block_label (bb);
1731
8b11009b
ZD
1732 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1733 if (elt)
ae50c0cb 1734 return (rtx) *elt;
8b11009b
ZD
1735
1736 /* Find the tree label if it is present. */
b8698a0f 1737
726a989a 1738 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
a9b77cd1 1739 {
726a989a
RB
1740 lab_stmt = gsi_stmt (gsi);
1741 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
a9b77cd1
ZD
1742 break;
1743
726a989a 1744 lab = gimple_label_label (lab_stmt);
a9b77cd1
ZD
1745 if (DECL_NONLOCAL (lab))
1746 break;
1747
1748 return label_rtx (lab);
1749 }
1750
8b11009b
ZD
1751 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1752 *elt = gen_label_rtx ();
ae50c0cb 1753 return (rtx) *elt;
a9b77cd1
ZD
1754}
1755
726a989a 1756
529ff441
MM
1757/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1758 of a basic block where we just expanded the conditional at the end,
315adeda
MM
1759 possibly clean up the CFG and instruction sequence. LAST is the
1760 last instruction before the just emitted jump sequence. */
529ff441
MM
1761
1762static void
315adeda 1763maybe_cleanup_end_of_block (edge e, rtx last)
529ff441
MM
1764{
1765 /* Special case: when jumpif decides that the condition is
1766 trivial it emits an unconditional jump (and the necessary
1767 barrier). But we still have two edges, the fallthru one is
1768 wrong. purge_dead_edges would clean this up later. Unfortunately
1769 we have to insert insns (and split edges) before
1770 find_many_sub_basic_blocks and hence before purge_dead_edges.
1771 But splitting edges might create new blocks which depend on the
1772 fact that if there are two edges there's no barrier. So the
1773 barrier would get lost and verify_flow_info would ICE. Instead
1774 of auditing all edge splitters to care for the barrier (which
1775 normally isn't there in a cleaned CFG), fix it here. */
1776 if (BARRIER_P (get_last_insn ()))
1777 {
529ff441
MM
1778 rtx insn;
1779 remove_edge (e);
1780 /* Now, we have a single successor block, if we have insns to
1781 insert on the remaining edge we potentially will insert
1782 it at the end of this block (if the dest block isn't feasible)
1783 in order to avoid splitting the edge. This insertion will take
1784 place in front of the last jump. But we might have emitted
1785 multiple jumps (conditional and one unconditional) to the
1786 same destination. Inserting in front of the last one then
1787 is a problem. See PR 40021. We fix this by deleting all
1788 jumps except the last unconditional one. */
1789 insn = PREV_INSN (get_last_insn ());
1790 /* Make sure we have an unconditional jump. Otherwise we're
1791 confused. */
1792 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
315adeda 1793 for (insn = PREV_INSN (insn); insn != last;)
529ff441
MM
1794 {
1795 insn = PREV_INSN (insn);
1796 if (JUMP_P (NEXT_INSN (insn)))
90eb3e33 1797 {
8a269cb7 1798 if (!any_condjump_p (NEXT_INSN (insn)))
90eb3e33
JJ
1799 {
1800 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1801 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1802 }
1803 delete_insn (NEXT_INSN (insn));
1804 }
529ff441
MM
1805 }
1806 }
1807}
1808
726a989a 1809/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
80c7a9eb
RH
1810 Returns a new basic block if we've terminated the current basic
1811 block and created a new one. */
1812
1813static basic_block
726a989a 1814expand_gimple_cond (basic_block bb, gimple stmt)
80c7a9eb
RH
1815{
1816 basic_block new_bb, dest;
1817 edge new_edge;
1818 edge true_edge;
1819 edge false_edge;
b7211528 1820 rtx last2, last;
28ed065e
MM
1821 enum tree_code code;
1822 tree op0, op1;
1823
1824 code = gimple_cond_code (stmt);
1825 op0 = gimple_cond_lhs (stmt);
1826 op1 = gimple_cond_rhs (stmt);
1827 /* We're sometimes presented with such code:
1828 D.123_1 = x < y;
1829 if (D.123_1 != 0)
1830 ...
1831 This would expand to two comparisons which then later might
1832 be cleaned up by combine. But some pattern matchers like if-conversion
1833 work better when there's only one compare, so make up for this
1834 here as special exception if TER would have made the same change. */
1835 if (gimple_cond_single_var_p (stmt)
1836 && SA.values
1837 && TREE_CODE (op0) == SSA_NAME
1838 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1839 {
1840 gimple second = SSA_NAME_DEF_STMT (op0);
e83f4b68 1841 if (gimple_code (second) == GIMPLE_ASSIGN)
28ed065e 1842 {
e83f4b68
MM
1843 enum tree_code code2 = gimple_assign_rhs_code (second);
1844 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1845 {
1846 code = code2;
1847 op0 = gimple_assign_rhs1 (second);
1848 op1 = gimple_assign_rhs2 (second);
1849 }
1850 /* If jumps are cheap turn some more codes into
1851 jumpy sequences. */
1852 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1853 {
1854 if ((code2 == BIT_AND_EXPR
1855 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1856 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1857 || code2 == TRUTH_AND_EXPR)
1858 {
1859 code = TRUTH_ANDIF_EXPR;
1860 op0 = gimple_assign_rhs1 (second);
1861 op1 = gimple_assign_rhs2 (second);
1862 }
1863 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1864 {
1865 code = TRUTH_ORIF_EXPR;
1866 op0 = gimple_assign_rhs1 (second);
1867 op1 = gimple_assign_rhs2 (second);
1868 }
1869 }
28ed065e
MM
1870 }
1871 }
b7211528
SB
1872
1873 last2 = last = get_last_insn ();
80c7a9eb
RH
1874
1875 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
c82fee88
EB
1876 set_curr_insn_source_location (gimple_location (stmt));
1877 set_curr_insn_block (gimple_block (stmt));
80c7a9eb
RH
1878
1879 /* These flags have no purpose in RTL land. */
1880 true_edge->flags &= ~EDGE_TRUE_VALUE;
1881 false_edge->flags &= ~EDGE_FALSE_VALUE;
1882
1883 /* We can either have a pure conditional jump with one fallthru edge or
1884 two-way jump that needs to be decomposed into two basic blocks. */
a9b77cd1 1885 if (false_edge->dest == bb->next_bb)
80c7a9eb 1886 {
40e90eac
JJ
1887 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1888 true_edge->probability);
726a989a 1889 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1890 if (true_edge->goto_locus)
7241571e
JJ
1891 {
1892 set_curr_insn_source_location (true_edge->goto_locus);
1893 set_curr_insn_block (true_edge->goto_block);
1894 true_edge->goto_locus = curr_insn_locator ();
1895 }
1896 true_edge->goto_block = NULL;
a9b77cd1 1897 false_edge->flags |= EDGE_FALLTHRU;
315adeda 1898 maybe_cleanup_end_of_block (false_edge, last);
80c7a9eb
RH
1899 return NULL;
1900 }
a9b77cd1 1901 if (true_edge->dest == bb->next_bb)
80c7a9eb 1902 {
40e90eac
JJ
1903 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1904 false_edge->probability);
726a989a 1905 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1906 if (false_edge->goto_locus)
7241571e
JJ
1907 {
1908 set_curr_insn_source_location (false_edge->goto_locus);
1909 set_curr_insn_block (false_edge->goto_block);
1910 false_edge->goto_locus = curr_insn_locator ();
1911 }
1912 false_edge->goto_block = NULL;
a9b77cd1 1913 true_edge->flags |= EDGE_FALLTHRU;
315adeda 1914 maybe_cleanup_end_of_block (true_edge, last);
80c7a9eb
RH
1915 return NULL;
1916 }
80c7a9eb 1917
40e90eac
JJ
1918 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1919 true_edge->probability);
80c7a9eb 1920 last = get_last_insn ();
7241571e
JJ
1921 if (false_edge->goto_locus)
1922 {
1923 set_curr_insn_source_location (false_edge->goto_locus);
1924 set_curr_insn_block (false_edge->goto_block);
1925 false_edge->goto_locus = curr_insn_locator ();
1926 }
1927 false_edge->goto_block = NULL;
a9b77cd1 1928 emit_jump (label_rtx_for_bb (false_edge->dest));
80c7a9eb
RH
1929
1930 BB_END (bb) = last;
1931 if (BARRIER_P (BB_END (bb)))
1932 BB_END (bb) = PREV_INSN (BB_END (bb));
1933 update_bb_for_insn (bb);
1934
1935 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1936 dest = false_edge->dest;
1937 redirect_edge_succ (false_edge, new_bb);
1938 false_edge->flags |= EDGE_FALLTHRU;
1939 new_bb->count = false_edge->count;
1940 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1941 new_edge = make_edge (new_bb, dest, 0);
1942 new_edge->probability = REG_BR_PROB_BASE;
1943 new_edge->count = new_bb->count;
1944 if (BARRIER_P (BB_END (new_bb)))
1945 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1946 update_bb_for_insn (new_bb);
1947
726a989a 1948 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
c22cacf3 1949
7787b4aa
JJ
1950 if (true_edge->goto_locus)
1951 {
1952 set_curr_insn_source_location (true_edge->goto_locus);
1953 set_curr_insn_block (true_edge->goto_block);
1954 true_edge->goto_locus = curr_insn_locator ();
1955 }
1956 true_edge->goto_block = NULL;
1957
80c7a9eb
RH
1958 return new_bb;
1959}
1960
0a35513e
AH
1961/* Mark all calls that can have a transaction restart. */
1962
1963static void
1964mark_transaction_restart_calls (gimple stmt)
1965{
1966 struct tm_restart_node dummy;
1967 void **slot;
1968
1969 if (!cfun->gimple_df->tm_restart)
1970 return;
1971
1972 dummy.stmt = stmt;
1973 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
1974 if (slot)
1975 {
1976 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
1977 tree list = n->label_or_list;
1978 rtx insn;
1979
1980 for (insn = next_real_insn (get_last_insn ());
1981 !CALL_P (insn);
1982 insn = next_real_insn (insn))
1983 continue;
1984
1985 if (TREE_CODE (list) == LABEL_DECL)
1986 add_reg_note (insn, REG_TM, label_rtx (list));
1987 else
1988 for (; list ; list = TREE_CHAIN (list))
1989 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
1990 }
1991}
1992
28ed065e
MM
1993/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1994 statement STMT. */
1995
1996static void
1997expand_call_stmt (gimple stmt)
1998{
25583c4f 1999 tree exp, decl, lhs;
e23817b3 2000 bool builtin_p;
e7925582 2001 size_t i;
28ed065e 2002
25583c4f
RS
2003 if (gimple_call_internal_p (stmt))
2004 {
2005 expand_internal_call (stmt);
2006 return;
2007 }
2008
28ed065e
MM
2009 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2010
2011 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
e23817b3
RG
2012 decl = gimple_call_fndecl (stmt);
2013 builtin_p = decl && DECL_BUILT_IN (decl);
2014
e7925582
EB
2015 /* If this is not a builtin function, the function type through which the
2016 call is made may be different from the type of the function. */
2017 if (!builtin_p)
2018 CALL_EXPR_FN (exp)
b25aa0e8
EB
2019 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2020 CALL_EXPR_FN (exp));
e7925582 2021
28ed065e
MM
2022 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2023 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2024
2025 for (i = 0; i < gimple_call_num_args (stmt); i++)
e23817b3
RG
2026 {
2027 tree arg = gimple_call_arg (stmt, i);
2028 gimple def;
2029 /* TER addresses into arguments of builtin functions so we have a
2030 chance to infer more correct alignment information. See PR39954. */
2031 if (builtin_p
2032 && TREE_CODE (arg) == SSA_NAME
2033 && (def = get_gimple_for_ssa_name (arg))
2034 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2035 arg = gimple_assign_rhs1 (def);
2036 CALL_EXPR_ARG (exp, i) = arg;
2037 }
28ed065e 2038
93f28ca7 2039 if (gimple_has_side_effects (stmt))
28ed065e
MM
2040 TREE_SIDE_EFFECTS (exp) = 1;
2041
93f28ca7 2042 if (gimple_call_nothrow_p (stmt))
28ed065e
MM
2043 TREE_NOTHROW (exp) = 1;
2044
2045 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2046 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
63d2a353
MM
2047 if (decl
2048 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13e49da9
TV
2049 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2050 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
63d2a353
MM
2051 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2052 else
2053 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
28ed065e
MM
2054 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2055 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2056 TREE_BLOCK (exp) = gimple_block (stmt);
2057
ddb555ed
JJ
2058 /* Ensure RTL is created for debug args. */
2059 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2060 {
2061 VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
2062 unsigned int ix;
2063 tree dtemp;
2064
2065 if (debug_args)
2066 for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
2067 {
2068 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2069 expand_debug_expr (dtemp);
2070 }
2071 }
2072
25583c4f 2073 lhs = gimple_call_lhs (stmt);
28ed065e
MM
2074 if (lhs)
2075 expand_assignment (lhs, exp, false);
2076 else
2077 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
0a35513e
AH
2078
2079 mark_transaction_restart_calls (stmt);
28ed065e
MM
2080}
2081
2082/* A subroutine of expand_gimple_stmt, expanding one gimple statement
2083 STMT that doesn't require special handling for outgoing edges. That
2084 is no tailcalls and no GIMPLE_COND. */
2085
2086static void
2087expand_gimple_stmt_1 (gimple stmt)
2088{
2089 tree op0;
c82fee88
EB
2090
2091 set_curr_insn_source_location (gimple_location (stmt));
2092 set_curr_insn_block (gimple_block (stmt));
2093
28ed065e
MM
2094 switch (gimple_code (stmt))
2095 {
2096 case GIMPLE_GOTO:
2097 op0 = gimple_goto_dest (stmt);
2098 if (TREE_CODE (op0) == LABEL_DECL)
2099 expand_goto (op0);
2100 else
2101 expand_computed_goto (op0);
2102 break;
2103 case GIMPLE_LABEL:
2104 expand_label (gimple_label_label (stmt));
2105 break;
2106 case GIMPLE_NOP:
2107 case GIMPLE_PREDICT:
2108 break;
28ed065e
MM
2109 case GIMPLE_SWITCH:
2110 expand_case (stmt);
2111 break;
2112 case GIMPLE_ASM:
2113 expand_asm_stmt (stmt);
2114 break;
2115 case GIMPLE_CALL:
2116 expand_call_stmt (stmt);
2117 break;
2118
2119 case GIMPLE_RETURN:
2120 op0 = gimple_return_retval (stmt);
2121
2122 if (op0 && op0 != error_mark_node)
2123 {
2124 tree result = DECL_RESULT (current_function_decl);
2125
2126 /* If we are not returning the current function's RESULT_DECL,
2127 build an assignment to it. */
2128 if (op0 != result)
2129 {
2130 /* I believe that a function's RESULT_DECL is unique. */
2131 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
2132
2133 /* ??? We'd like to use simply expand_assignment here,
2134 but this fails if the value is of BLKmode but the return
2135 decl is a register. expand_return has special handling
2136 for this combination, which eventually should move
2137 to common code. See comments there. Until then, let's
2138 build a modify expression :-/ */
2139 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
2140 result, op0);
2141 }
2142 }
2143 if (!op0)
2144 expand_null_return ();
2145 else
2146 expand_return (op0);
2147 break;
2148
2149 case GIMPLE_ASSIGN:
2150 {
2151 tree lhs = gimple_assign_lhs (stmt);
2152
2153 /* Tree expand used to fiddle with |= and &= of two bitfield
2154 COMPONENT_REFs here. This can't happen with gimple, the LHS
2155 of binary assigns must be a gimple reg. */
2156
2157 if (TREE_CODE (lhs) != SSA_NAME
2158 || get_gimple_rhs_class (gimple_expr_code (stmt))
2159 == GIMPLE_SINGLE_RHS)
2160 {
2161 tree rhs = gimple_assign_rhs1 (stmt);
2162 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
2163 == GIMPLE_SINGLE_RHS);
2164 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
2165 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
47598145
MM
2166 if (TREE_CLOBBER_P (rhs))
2167 /* This is a clobber to mark the going out of scope for
2168 this LHS. */
2169 ;
2170 else
2171 expand_assignment (lhs, rhs,
2172 gimple_assign_nontemporal_move_p (stmt));
28ed065e
MM
2173 }
2174 else
2175 {
2176 rtx target, temp;
2177 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2178 struct separate_ops ops;
2179 bool promoted = false;
2180
2181 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2182 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2183 promoted = true;
2184
2185 ops.code = gimple_assign_rhs_code (stmt);
2186 ops.type = TREE_TYPE (lhs);
2187 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2188 {
0354c0c7
BS
2189 case GIMPLE_TERNARY_RHS:
2190 ops.op2 = gimple_assign_rhs3 (stmt);
2191 /* Fallthru */
28ed065e
MM
2192 case GIMPLE_BINARY_RHS:
2193 ops.op1 = gimple_assign_rhs2 (stmt);
2194 /* Fallthru */
2195 case GIMPLE_UNARY_RHS:
2196 ops.op0 = gimple_assign_rhs1 (stmt);
2197 break;
2198 default:
2199 gcc_unreachable ();
2200 }
2201 ops.location = gimple_location (stmt);
2202
2203 /* If we want to use a nontemporal store, force the value to
2204 register first. If we store into a promoted register,
2205 don't directly expand to target. */
2206 temp = nontemporal || promoted ? NULL_RTX : target;
2207 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2208 EXPAND_NORMAL);
2209
2210 if (temp == target)
2211 ;
2212 else if (promoted)
2213 {
4e18a7d4 2214 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
28ed065e
MM
2215 /* If TEMP is a VOIDmode constant, use convert_modes to make
2216 sure that we properly convert it. */
2217 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2218 {
2219 temp = convert_modes (GET_MODE (target),
2220 TYPE_MODE (ops.type),
4e18a7d4 2221 temp, unsignedp);
28ed065e 2222 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4e18a7d4 2223 GET_MODE (target), temp, unsignedp);
28ed065e
MM
2224 }
2225
4e18a7d4 2226 convert_move (SUBREG_REG (target), temp, unsignedp);
28ed065e
MM
2227 }
2228 else if (nontemporal && emit_storent_insn (target, temp))
2229 ;
2230 else
2231 {
2232 temp = force_operand (temp, target);
2233 if (temp != target)
2234 emit_move_insn (target, temp);
2235 }
2236 }
2237 }
2238 break;
2239
2240 default:
2241 gcc_unreachable ();
2242 }
2243}
2244
2245/* Expand one gimple statement STMT and return the last RTL instruction
2246 before any of the newly generated ones.
2247
2248 In addition to generating the necessary RTL instructions this also
2249 sets REG_EH_REGION notes if necessary and sets the current source
2250 location for diagnostics. */
2251
2252static rtx
2253expand_gimple_stmt (gimple stmt)
2254{
28ed065e 2255 location_t saved_location = input_location;
c82fee88
EB
2256 rtx last = get_last_insn ();
2257 int lp_nr;
28ed065e 2258
28ed065e
MM
2259 gcc_assert (cfun);
2260
c82fee88
EB
2261 /* We need to save and restore the current source location so that errors
2262 discovered during expansion are emitted with the right location. But
2263 it would be better if the diagnostic routines used the source location
2264 embedded in the tree nodes rather than globals. */
28ed065e 2265 if (gimple_has_location (stmt))
c82fee88 2266 input_location = gimple_location (stmt);
28ed065e
MM
2267
2268 expand_gimple_stmt_1 (stmt);
c82fee88 2269
28ed065e
MM
2270 /* Free any temporaries used to evaluate this statement. */
2271 free_temp_slots ();
2272
2273 input_location = saved_location;
2274
2275 /* Mark all insns that may trap. */
1d65f45c
RH
2276 lp_nr = lookup_stmt_eh_lp (stmt);
2277 if (lp_nr)
28ed065e
MM
2278 {
2279 rtx insn;
2280 for (insn = next_real_insn (last); insn;
2281 insn = next_real_insn (insn))
2282 {
2283 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2284 /* If we want exceptions for non-call insns, any
2285 may_trap_p instruction may throw. */
2286 && GET_CODE (PATTERN (insn)) != CLOBBER
2287 && GET_CODE (PATTERN (insn)) != USE
1d65f45c
RH
2288 && insn_could_throw_p (insn))
2289 make_reg_eh_region_note (insn, 0, lp_nr);
28ed065e
MM
2290 }
2291 }
2292
2293 return last;
2294}
2295
726a989a 2296/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
224e770b
RH
2297 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2298 generated a tail call (something that might be denied by the ABI
cea49550
RH
2299 rules governing the call; see calls.c).
2300
2301 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2302 can still reach the rest of BB. The case here is __builtin_sqrt,
2303 where the NaN result goes through the external function (with a
2304 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
2305
2306static basic_block
726a989a 2307expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
80c7a9eb 2308{
b7211528 2309 rtx last2, last;
224e770b 2310 edge e;
628f6a4e 2311 edge_iterator ei;
224e770b
RH
2312 int probability;
2313 gcov_type count;
80c7a9eb 2314
28ed065e 2315 last2 = last = expand_gimple_stmt (stmt);
80c7a9eb
RH
2316
2317 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
2318 if (CALL_P (last) && SIBLING_CALL_P (last))
2319 goto found;
80c7a9eb 2320
726a989a 2321 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2322
cea49550 2323 *can_fallthru = true;
224e770b 2324 return NULL;
80c7a9eb 2325
224e770b
RH
2326 found:
2327 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2328 Any instructions emitted here are about to be deleted. */
2329 do_pending_stack_adjust ();
2330
2331 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2332 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2333 EH or abnormal edges, we shouldn't have created a tail call in
2334 the first place. So it seems to me we should just be removing
2335 all edges here, or redirecting the existing fallthru edge to
2336 the exit block. */
2337
224e770b
RH
2338 probability = 0;
2339 count = 0;
224e770b 2340
628f6a4e
BE
2341 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2342 {
224e770b
RH
2343 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2344 {
2345 if (e->dest != EXIT_BLOCK_PTR)
80c7a9eb 2346 {
224e770b
RH
2347 e->dest->count -= e->count;
2348 e->dest->frequency -= EDGE_FREQUENCY (e);
2349 if (e->dest->count < 0)
c22cacf3 2350 e->dest->count = 0;
224e770b 2351 if (e->dest->frequency < 0)
c22cacf3 2352 e->dest->frequency = 0;
80c7a9eb 2353 }
224e770b
RH
2354 count += e->count;
2355 probability += e->probability;
2356 remove_edge (e);
80c7a9eb 2357 }
628f6a4e
BE
2358 else
2359 ei_next (&ei);
80c7a9eb
RH
2360 }
2361
224e770b
RH
2362 /* This is somewhat ugly: the call_expr expander often emits instructions
2363 after the sibcall (to perform the function return). These confuse the
12eff7b7 2364 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 2365 last = NEXT_INSN (last);
341c100f 2366 gcc_assert (BARRIER_P (last));
cea49550
RH
2367
2368 *can_fallthru = false;
224e770b
RH
2369 while (NEXT_INSN (last))
2370 {
2371 /* For instance an sqrt builtin expander expands if with
2372 sibcall in the then and label for `else`. */
2373 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
2374 {
2375 *can_fallthru = true;
2376 break;
2377 }
224e770b
RH
2378 delete_insn (NEXT_INSN (last));
2379 }
2380
2381 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2382 e->probability += probability;
2383 e->count += count;
2384 BB_END (bb) = last;
2385 update_bb_for_insn (bb);
2386
2387 if (NEXT_INSN (last))
2388 {
2389 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2390
2391 last = BB_END (bb);
2392 if (BARRIER_P (last))
2393 BB_END (bb) = PREV_INSN (last);
2394 }
2395
726a989a 2396 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2397
224e770b 2398 return bb;
80c7a9eb
RH
2399}
2400
b5b8b0ac
AO
2401/* Return the difference between the floor and the truncated result of
2402 a signed division by OP1 with remainder MOD. */
2403static rtx
2404floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2405{
2406 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2407 return gen_rtx_IF_THEN_ELSE
2408 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2409 gen_rtx_IF_THEN_ELSE
2410 (mode, gen_rtx_LT (BImode,
2411 gen_rtx_DIV (mode, op1, mod),
2412 const0_rtx),
2413 constm1_rtx, const0_rtx),
2414 const0_rtx);
2415}
2416
2417/* Return the difference between the ceil and the truncated result of
2418 a signed division by OP1 with remainder MOD. */
2419static rtx
2420ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2421{
2422 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2423 return gen_rtx_IF_THEN_ELSE
2424 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2425 gen_rtx_IF_THEN_ELSE
2426 (mode, gen_rtx_GT (BImode,
2427 gen_rtx_DIV (mode, op1, mod),
2428 const0_rtx),
2429 const1_rtx, const0_rtx),
2430 const0_rtx);
2431}
2432
2433/* Return the difference between the ceil and the truncated result of
2434 an unsigned division by OP1 with remainder MOD. */
2435static rtx
2436ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2437{
2438 /* (mod != 0 ? 1 : 0) */
2439 return gen_rtx_IF_THEN_ELSE
2440 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2441 const1_rtx, const0_rtx);
2442}
2443
2444/* Return the difference between the rounded and the truncated result
2445 of a signed division by OP1 with remainder MOD. Halfway cases are
2446 rounded away from zero, rather than to the nearest even number. */
2447static rtx
2448round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2449{
2450 /* (abs (mod) >= abs (op1) - abs (mod)
2451 ? (op1 / mod > 0 ? 1 : -1)
2452 : 0) */
2453 return gen_rtx_IF_THEN_ELSE
2454 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2455 gen_rtx_MINUS (mode,
2456 gen_rtx_ABS (mode, op1),
2457 gen_rtx_ABS (mode, mod))),
2458 gen_rtx_IF_THEN_ELSE
2459 (mode, gen_rtx_GT (BImode,
2460 gen_rtx_DIV (mode, op1, mod),
2461 const0_rtx),
2462 const1_rtx, constm1_rtx),
2463 const0_rtx);
2464}
2465
2466/* Return the difference between the rounded and the truncated result
2467 of a unsigned division by OP1 with remainder MOD. Halfway cases
2468 are rounded away from zero, rather than to the nearest even
2469 number. */
2470static rtx
2471round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2472{
2473 /* (mod >= op1 - mod ? 1 : 0) */
2474 return gen_rtx_IF_THEN_ELSE
2475 (mode, gen_rtx_GE (BImode, mod,
2476 gen_rtx_MINUS (mode, op1, mod)),
2477 const1_rtx, const0_rtx);
2478}
2479
dda2da58
AO
2480/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2481 any rtl. */
2482
2483static rtx
f61c6f34
JJ
2484convert_debug_memory_address (enum machine_mode mode, rtx x,
2485 addr_space_t as)
dda2da58
AO
2486{
2487 enum machine_mode xmode = GET_MODE (x);
2488
2489#ifndef POINTERS_EXTEND_UNSIGNED
f61c6f34
JJ
2490 gcc_assert (mode == Pmode
2491 || mode == targetm.addr_space.address_mode (as));
dda2da58
AO
2492 gcc_assert (xmode == mode || xmode == VOIDmode);
2493#else
f61c6f34 2494 rtx temp;
f61c6f34 2495
639d4bb8 2496 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
dda2da58
AO
2497
2498 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2499 return x;
2500
69660a70 2501 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
dda2da58
AO
2502 x = simplify_gen_subreg (mode, x, xmode,
2503 subreg_lowpart_offset
2504 (mode, xmode));
2505 else if (POINTERS_EXTEND_UNSIGNED > 0)
2506 x = gen_rtx_ZERO_EXTEND (mode, x);
2507 else if (!POINTERS_EXTEND_UNSIGNED)
2508 x = gen_rtx_SIGN_EXTEND (mode, x);
2509 else
f61c6f34
JJ
2510 {
2511 switch (GET_CODE (x))
2512 {
2513 case SUBREG:
2514 if ((SUBREG_PROMOTED_VAR_P (x)
2515 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2516 || (GET_CODE (SUBREG_REG (x)) == PLUS
2517 && REG_P (XEXP (SUBREG_REG (x), 0))
2518 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2519 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2520 && GET_MODE (SUBREG_REG (x)) == mode)
2521 return SUBREG_REG (x);
2522 break;
2523 case LABEL_REF:
2524 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2525 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2526 return temp;
2527 case SYMBOL_REF:
2528 temp = shallow_copy_rtx (x);
2529 PUT_MODE (temp, mode);
2530 return temp;
2531 case CONST:
2532 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2533 if (temp)
2534 temp = gen_rtx_CONST (mode, temp);
2535 return temp;
2536 case PLUS:
2537 case MINUS:
2538 if (CONST_INT_P (XEXP (x, 1)))
2539 {
2540 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2541 if (temp)
2542 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2543 }
2544 break;
2545 default:
2546 break;
2547 }
2548 /* Don't know how to express ptr_extend as operation in debug info. */
2549 return NULL;
2550 }
dda2da58
AO
2551#endif /* POINTERS_EXTEND_UNSIGNED */
2552
2553 return x;
2554}
2555
12c5ffe5
EB
2556/* Return an RTX equivalent to the value of the parameter DECL. */
2557
2558static rtx
2559expand_debug_parm_decl (tree decl)
2560{
2561 rtx incoming = DECL_INCOMING_RTL (decl);
2562
2563 if (incoming
2564 && GET_MODE (incoming) != BLKmode
2565 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2566 || (MEM_P (incoming)
2567 && REG_P (XEXP (incoming, 0))
2568 && HARD_REGISTER_P (XEXP (incoming, 0)))))
2569 {
2570 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2571
2572#ifdef HAVE_window_save
2573 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2574 If the target machine has an explicit window save instruction, the
2575 actual entry value is the corresponding OUTGOING_REGNO instead. */
2576 if (REG_P (incoming)
2577 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2578 incoming
2579 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2580 OUTGOING_REGNO (REGNO (incoming)), 0);
2581 else if (MEM_P (incoming))
2582 {
2583 rtx reg = XEXP (incoming, 0);
2584 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2585 {
2586 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2587 incoming = replace_equiv_address_nv (incoming, reg);
2588 }
2589 }
2590#endif
2591
2592 ENTRY_VALUE_EXP (rtl) = incoming;
2593 return rtl;
2594 }
2595
2596 if (incoming
2597 && GET_MODE (incoming) != BLKmode
2598 && !TREE_ADDRESSABLE (decl)
2599 && MEM_P (incoming)
2600 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2601 || (GET_CODE (XEXP (incoming, 0)) == PLUS
2602 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2603 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2604 return incoming;
2605
2606 return NULL_RTX;
2607}
2608
2609/* Return an RTX equivalent to the value of the tree expression EXP. */
b5b8b0ac
AO
2610
2611static rtx
2612expand_debug_expr (tree exp)
2613{
2614 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2615 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2ba172e0 2616 enum machine_mode inner_mode = VOIDmode;
b5b8b0ac 2617 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
09e881c9 2618 addr_space_t as;
b5b8b0ac
AO
2619
2620 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2621 {
2622 case tcc_expression:
2623 switch (TREE_CODE (exp))
2624 {
2625 case COND_EXPR:
7ece48b1 2626 case DOT_PROD_EXPR:
0354c0c7
BS
2627 case WIDEN_MULT_PLUS_EXPR:
2628 case WIDEN_MULT_MINUS_EXPR:
0f59b812 2629 case FMA_EXPR:
b5b8b0ac
AO
2630 goto ternary;
2631
2632 case TRUTH_ANDIF_EXPR:
2633 case TRUTH_ORIF_EXPR:
2634 case TRUTH_AND_EXPR:
2635 case TRUTH_OR_EXPR:
2636 case TRUTH_XOR_EXPR:
2637 goto binary;
2638
2639 case TRUTH_NOT_EXPR:
2640 goto unary;
2641
2642 default:
2643 break;
2644 }
2645 break;
2646
2647 ternary:
2648 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2649 if (!op2)
2650 return NULL_RTX;
2651 /* Fall through. */
2652
2653 binary:
2654 case tcc_binary:
2655 case tcc_comparison:
2656 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2657 if (!op1)
2658 return NULL_RTX;
2659 /* Fall through. */
2660
2661 unary:
2662 case tcc_unary:
2ba172e0 2663 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
2664 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2665 if (!op0)
2666 return NULL_RTX;
2667 break;
2668
2669 case tcc_type:
2670 case tcc_statement:
2671 gcc_unreachable ();
2672
2673 case tcc_constant:
2674 case tcc_exceptional:
2675 case tcc_declaration:
2676 case tcc_reference:
2677 case tcc_vl_exp:
2678 break;
2679 }
2680
2681 switch (TREE_CODE (exp))
2682 {
2683 case STRING_CST:
2684 if (!lookup_constant_def (exp))
2685 {
e1b243a8
JJ
2686 if (strlen (TREE_STRING_POINTER (exp)) + 1
2687 != (size_t) TREE_STRING_LENGTH (exp))
2688 return NULL_RTX;
b5b8b0ac
AO
2689 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2690 op0 = gen_rtx_MEM (BLKmode, op0);
2691 set_mem_attributes (op0, exp, 0);
2692 return op0;
2693 }
2694 /* Fall through... */
2695
2696 case INTEGER_CST:
2697 case REAL_CST:
2698 case FIXED_CST:
2699 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2700 return op0;
2701
2702 case COMPLEX_CST:
2703 gcc_assert (COMPLEX_MODE_P (mode));
2704 op0 = expand_debug_expr (TREE_REALPART (exp));
b5b8b0ac 2705 op1 = expand_debug_expr (TREE_IMAGPART (exp));
b5b8b0ac
AO
2706 return gen_rtx_CONCAT (mode, op0, op1);
2707
0ca5af51
AO
2708 case DEBUG_EXPR_DECL:
2709 op0 = DECL_RTL_IF_SET (exp);
2710
2711 if (op0)
2712 return op0;
2713
2714 op0 = gen_rtx_DEBUG_EXPR (mode);
e4fb38bd 2715 DEBUG_EXPR_TREE_DECL (op0) = exp;
0ca5af51
AO
2716 SET_DECL_RTL (exp, op0);
2717
2718 return op0;
2719
b5b8b0ac
AO
2720 case VAR_DECL:
2721 case PARM_DECL:
2722 case FUNCTION_DECL:
2723 case LABEL_DECL:
2724 case CONST_DECL:
2725 case RESULT_DECL:
2726 op0 = DECL_RTL_IF_SET (exp);
2727
2728 /* This decl was probably optimized away. */
2729 if (!op0)
e1b243a8
JJ
2730 {
2731 if (TREE_CODE (exp) != VAR_DECL
2732 || DECL_EXTERNAL (exp)
2733 || !TREE_STATIC (exp)
2734 || !DECL_NAME (exp)
0fba566c 2735 || DECL_HARD_REGISTER (exp)
7d5fc814 2736 || DECL_IN_CONSTANT_POOL (exp)
0fba566c 2737 || mode == VOIDmode)
e1b243a8
JJ
2738 return NULL;
2739
b1aa0655 2740 op0 = make_decl_rtl_for_debug (exp);
e1b243a8
JJ
2741 if (!MEM_P (op0)
2742 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2743 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2744 return NULL;
2745 }
2746 else
2747 op0 = copy_rtx (op0);
b5b8b0ac 2748
06796564
JJ
2749 if (GET_MODE (op0) == BLKmode
2750 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2751 below would ICE. While it is likely a FE bug,
2752 try to be robust here. See PR43166. */
132b4e82
JJ
2753 || mode == BLKmode
2754 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
b5b8b0ac
AO
2755 {
2756 gcc_assert (MEM_P (op0));
2757 op0 = adjust_address_nv (op0, mode, 0);
2758 return op0;
2759 }
2760
2761 /* Fall through. */
2762
2763 adjust_mode:
2764 case PAREN_EXPR:
2765 case NOP_EXPR:
2766 case CONVERT_EXPR:
2767 {
2ba172e0 2768 inner_mode = GET_MODE (op0);
b5b8b0ac
AO
2769
2770 if (mode == inner_mode)
2771 return op0;
2772
2773 if (inner_mode == VOIDmode)
2774 {
2a8e30fb
MM
2775 if (TREE_CODE (exp) == SSA_NAME)
2776 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2777 else
2778 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
2779 if (mode == inner_mode)
2780 return op0;
2781 }
2782
2783 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2784 {
2785 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2786 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2787 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2788 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2789 else
2790 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2791 }
2792 else if (FLOAT_MODE_P (mode))
2793 {
2a8e30fb 2794 gcc_assert (TREE_CODE (exp) != SSA_NAME);
b5b8b0ac
AO
2795 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2796 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2797 else
2798 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2799 }
2800 else if (FLOAT_MODE_P (inner_mode))
2801 {
2802 if (unsignedp)
2803 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2804 else
2805 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2806 }
2807 else if (CONSTANT_P (op0)
69660a70 2808 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
b5b8b0ac
AO
2809 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2810 subreg_lowpart_offset (mode,
2811 inner_mode));
1b47fe3f
JJ
2812 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2813 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2814 : unsignedp)
2ba172e0 2815 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
b5b8b0ac 2816 else
2ba172e0 2817 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
b5b8b0ac
AO
2818
2819 return op0;
2820 }
2821
70f34814 2822 case MEM_REF:
71f3a3f5
JJ
2823 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2824 {
2825 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2826 TREE_OPERAND (exp, 0),
2827 TREE_OPERAND (exp, 1));
2828 if (newexp)
2829 return expand_debug_expr (newexp);
2830 }
2831 /* FALLTHROUGH */
b5b8b0ac 2832 case INDIRECT_REF:
b5b8b0ac
AO
2833 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2834 if (!op0)
2835 return NULL;
2836
cb115041
JJ
2837 if (TREE_CODE (exp) == MEM_REF)
2838 {
583ac69c
JJ
2839 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2840 || (GET_CODE (op0) == PLUS
2841 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2842 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2843 Instead just use get_inner_reference. */
2844 goto component_ref;
2845
cb115041
JJ
2846 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2847 if (!op1 || !CONST_INT_P (op1))
2848 return NULL;
2849
2850 op0 = plus_constant (op0, INTVAL (op1));
2851 }
2852
09e881c9 2853 if (POINTER_TYPE_P (TREE_TYPE (exp)))
75421dcd 2854 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
09e881c9 2855 else
75421dcd 2856 as = ADDR_SPACE_GENERIC;
b5b8b0ac 2857
f61c6f34
JJ
2858 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2859 op0, as);
2860 if (op0 == NULL_RTX)
2861 return NULL;
b5b8b0ac 2862
f61c6f34 2863 op0 = gen_rtx_MEM (mode, op0);
b5b8b0ac 2864 set_mem_attributes (op0, exp, 0);
71f3a3f5
JJ
2865 if (TREE_CODE (exp) == MEM_REF
2866 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2867 set_mem_expr (op0, NULL_TREE);
09e881c9 2868 set_mem_addr_space (op0, as);
b5b8b0ac
AO
2869
2870 return op0;
2871
2872 case TARGET_MEM_REF:
4d948885
RG
2873 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2874 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
b5b8b0ac
AO
2875 return NULL;
2876
2877 op0 = expand_debug_expr
4e25ca6b 2878 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
b5b8b0ac
AO
2879 if (!op0)
2880 return NULL;
2881
f61c6f34
JJ
2882 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2883 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2884 else
2885 as = ADDR_SPACE_GENERIC;
2886
2887 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2888 op0, as);
2889 if (op0 == NULL_RTX)
2890 return NULL;
b5b8b0ac
AO
2891
2892 op0 = gen_rtx_MEM (mode, op0);
2893
2894 set_mem_attributes (op0, exp, 0);
09e881c9 2895 set_mem_addr_space (op0, as);
b5b8b0ac
AO
2896
2897 return op0;
2898
583ac69c 2899 component_ref:
b5b8b0ac
AO
2900 case ARRAY_REF:
2901 case ARRAY_RANGE_REF:
2902 case COMPONENT_REF:
2903 case BIT_FIELD_REF:
2904 case REALPART_EXPR:
2905 case IMAGPART_EXPR:
2906 case VIEW_CONVERT_EXPR:
2907 {
2908 enum machine_mode mode1;
2909 HOST_WIDE_INT bitsize, bitpos;
2910 tree offset;
2911 int volatilep = 0;
2912 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2913 &mode1, &unsignedp, &volatilep, false);
2914 rtx orig_op0;
2915
4f2a9af8
JJ
2916 if (bitsize == 0)
2917 return NULL;
2918
b5b8b0ac
AO
2919 orig_op0 = op0 = expand_debug_expr (tem);
2920
2921 if (!op0)
2922 return NULL;
2923
2924 if (offset)
2925 {
dda2da58
AO
2926 enum machine_mode addrmode, offmode;
2927
aa847cc8
JJ
2928 if (!MEM_P (op0))
2929 return NULL;
b5b8b0ac 2930
dda2da58
AO
2931 op0 = XEXP (op0, 0);
2932 addrmode = GET_MODE (op0);
2933 if (addrmode == VOIDmode)
2934 addrmode = Pmode;
2935
b5b8b0ac
AO
2936 op1 = expand_debug_expr (offset);
2937 if (!op1)
2938 return NULL;
2939
dda2da58
AO
2940 offmode = GET_MODE (op1);
2941 if (offmode == VOIDmode)
2942 offmode = TYPE_MODE (TREE_TYPE (offset));
2943
2944 if (addrmode != offmode)
2945 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2946 subreg_lowpart_offset (addrmode,
2947 offmode));
2948
2949 /* Don't use offset_address here, we don't need a
2950 recognizable address, and we don't want to generate
2951 code. */
2ba172e0
JJ
2952 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
2953 op0, op1));
b5b8b0ac
AO
2954 }
2955
2956 if (MEM_P (op0))
2957 {
4f2a9af8
JJ
2958 if (mode1 == VOIDmode)
2959 /* Bitfield. */
2960 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
b5b8b0ac
AO
2961 if (bitpos >= BITS_PER_UNIT)
2962 {
2963 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2964 bitpos %= BITS_PER_UNIT;
2965 }
2966 else if (bitpos < 0)
2967 {
4f2a9af8
JJ
2968 HOST_WIDE_INT units
2969 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
b5b8b0ac
AO
2970 op0 = adjust_address_nv (op0, mode1, units);
2971 bitpos += units * BITS_PER_UNIT;
2972 }
2973 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2974 op0 = adjust_address_nv (op0, mode, 0);
2975 else if (GET_MODE (op0) != mode1)
2976 op0 = adjust_address_nv (op0, mode1, 0);
2977 else
2978 op0 = copy_rtx (op0);
2979 if (op0 == orig_op0)
2980 op0 = shallow_copy_rtx (op0);
2981 set_mem_attributes (op0, exp, 0);
2982 }
2983
2984 if (bitpos == 0 && mode == GET_MODE (op0))
2985 return op0;
2986
2d3fc6aa
JJ
2987 if (bitpos < 0)
2988 return NULL;
2989
88c04a5d
JJ
2990 if (GET_MODE (op0) == BLKmode)
2991 return NULL;
2992
b5b8b0ac
AO
2993 if ((bitpos % BITS_PER_UNIT) == 0
2994 && bitsize == GET_MODE_BITSIZE (mode1))
2995 {
2996 enum machine_mode opmode = GET_MODE (op0);
2997
b5b8b0ac 2998 if (opmode == VOIDmode)
9712cba0 2999 opmode = TYPE_MODE (TREE_TYPE (tem));
b5b8b0ac
AO
3000
3001 /* This condition may hold if we're expanding the address
3002 right past the end of an array that turned out not to
3003 be addressable (i.e., the address was only computed in
3004 debug stmts). The gen_subreg below would rightfully
3005 crash, and the address doesn't really exist, so just
3006 drop it. */
3007 if (bitpos >= GET_MODE_BITSIZE (opmode))
3008 return NULL;
3009
7d5d39bb
JJ
3010 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
3011 return simplify_gen_subreg (mode, op0, opmode,
3012 bitpos / BITS_PER_UNIT);
b5b8b0ac
AO
3013 }
3014
3015 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
3016 && TYPE_UNSIGNED (TREE_TYPE (exp))
3017 ? SIGN_EXTRACT
3018 : ZERO_EXTRACT, mode,
3019 GET_MODE (op0) != VOIDmode
9712cba0
JJ
3020 ? GET_MODE (op0)
3021 : TYPE_MODE (TREE_TYPE (tem)),
b5b8b0ac
AO
3022 op0, GEN_INT (bitsize), GEN_INT (bitpos));
3023 }
3024
b5b8b0ac 3025 case ABS_EXPR:
2ba172e0 3026 return simplify_gen_unary (ABS, mode, op0, mode);
b5b8b0ac
AO
3027
3028 case NEGATE_EXPR:
2ba172e0 3029 return simplify_gen_unary (NEG, mode, op0, mode);
b5b8b0ac
AO
3030
3031 case BIT_NOT_EXPR:
2ba172e0 3032 return simplify_gen_unary (NOT, mode, op0, mode);
b5b8b0ac
AO
3033
3034 case FLOAT_EXPR:
2ba172e0
JJ
3035 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3036 0)))
3037 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
3038 inner_mode);
b5b8b0ac
AO
3039
3040 case FIX_TRUNC_EXPR:
2ba172e0
JJ
3041 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
3042 inner_mode);
b5b8b0ac
AO
3043
3044 case POINTER_PLUS_EXPR:
576319a7
DD
3045 /* For the rare target where pointers are not the same size as
3046 size_t, we need to check for mis-matched modes and correct
3047 the addend. */
3048 if (op0 && op1
3049 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
3050 && GET_MODE (op0) != GET_MODE (op1))
3051 {
3052 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2ba172e0
JJ
3053 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
3054 GET_MODE (op1));
576319a7
DD
3055 else
3056 /* We always sign-extend, regardless of the signedness of
3057 the operand, because the operand is always unsigned
3058 here even if the original C expression is signed. */
2ba172e0
JJ
3059 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
3060 GET_MODE (op1));
576319a7
DD
3061 }
3062 /* Fall through. */
b5b8b0ac 3063 case PLUS_EXPR:
2ba172e0 3064 return simplify_gen_binary (PLUS, mode, op0, op1);
b5b8b0ac
AO
3065
3066 case MINUS_EXPR:
2ba172e0 3067 return simplify_gen_binary (MINUS, mode, op0, op1);
b5b8b0ac
AO
3068
3069 case MULT_EXPR:
2ba172e0 3070 return simplify_gen_binary (MULT, mode, op0, op1);
b5b8b0ac
AO
3071
3072 case RDIV_EXPR:
3073 case TRUNC_DIV_EXPR:
3074 case EXACT_DIV_EXPR:
3075 if (unsignedp)
2ba172e0 3076 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac 3077 else
2ba172e0 3078 return simplify_gen_binary (DIV, mode, op0, op1);
b5b8b0ac
AO
3079
3080 case TRUNC_MOD_EXPR:
2ba172e0 3081 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
b5b8b0ac
AO
3082
3083 case FLOOR_DIV_EXPR:
3084 if (unsignedp)
2ba172e0 3085 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac
AO
3086 else
3087 {
2ba172e0
JJ
3088 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3089 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3090 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0 3091 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
3092 }
3093
3094 case FLOOR_MOD_EXPR:
3095 if (unsignedp)
2ba172e0 3096 return simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac
AO
3097 else
3098 {
2ba172e0 3099 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3100 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
3101 adj = simplify_gen_unary (NEG, mode,
3102 simplify_gen_binary (MULT, mode, adj, op1),
3103 mode);
3104 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
3105 }
3106
3107 case CEIL_DIV_EXPR:
3108 if (unsignedp)
3109 {
2ba172e0
JJ
3110 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3111 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 3112 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0 3113 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
3114 }
3115 else
3116 {
2ba172e0
JJ
3117 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3118 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3119 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0 3120 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
3121 }
3122
3123 case CEIL_MOD_EXPR:
3124 if (unsignedp)
3125 {
2ba172e0 3126 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 3127 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
3128 adj = simplify_gen_unary (NEG, mode,
3129 simplify_gen_binary (MULT, mode, adj, op1),
3130 mode);
3131 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
3132 }
3133 else
3134 {
2ba172e0 3135 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3136 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
3137 adj = simplify_gen_unary (NEG, mode,
3138 simplify_gen_binary (MULT, mode, adj, op1),
3139 mode);
3140 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
3141 }
3142
3143 case ROUND_DIV_EXPR:
3144 if (unsignedp)
3145 {
2ba172e0
JJ
3146 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3147 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 3148 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0 3149 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
3150 }
3151 else
3152 {
2ba172e0
JJ
3153 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3154 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3155 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0 3156 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
3157 }
3158
3159 case ROUND_MOD_EXPR:
3160 if (unsignedp)
3161 {
2ba172e0 3162 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 3163 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
3164 adj = simplify_gen_unary (NEG, mode,
3165 simplify_gen_binary (MULT, mode, adj, op1),
3166 mode);
3167 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
3168 }
3169 else
3170 {
2ba172e0 3171 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3172 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
3173 adj = simplify_gen_unary (NEG, mode,
3174 simplify_gen_binary (MULT, mode, adj, op1),
3175 mode);
3176 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
3177 }
3178
3179 case LSHIFT_EXPR:
2ba172e0 3180 return simplify_gen_binary (ASHIFT, mode, op0, op1);
b5b8b0ac
AO
3181
3182 case RSHIFT_EXPR:
3183 if (unsignedp)
2ba172e0 3184 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
b5b8b0ac 3185 else
2ba172e0 3186 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
b5b8b0ac
AO
3187
3188 case LROTATE_EXPR:
2ba172e0 3189 return simplify_gen_binary (ROTATE, mode, op0, op1);
b5b8b0ac
AO
3190
3191 case RROTATE_EXPR:
2ba172e0 3192 return simplify_gen_binary (ROTATERT, mode, op0, op1);
b5b8b0ac
AO
3193
3194 case MIN_EXPR:
2ba172e0 3195 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
b5b8b0ac
AO
3196
3197 case MAX_EXPR:
2ba172e0 3198 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
b5b8b0ac
AO
3199
3200 case BIT_AND_EXPR:
3201 case TRUTH_AND_EXPR:
2ba172e0 3202 return simplify_gen_binary (AND, mode, op0, op1);
b5b8b0ac
AO
3203
3204 case BIT_IOR_EXPR:
3205 case TRUTH_OR_EXPR:
2ba172e0 3206 return simplify_gen_binary (IOR, mode, op0, op1);
b5b8b0ac
AO
3207
3208 case BIT_XOR_EXPR:
3209 case TRUTH_XOR_EXPR:
2ba172e0 3210 return simplify_gen_binary (XOR, mode, op0, op1);
b5b8b0ac
AO
3211
3212 case TRUTH_ANDIF_EXPR:
3213 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3214
3215 case TRUTH_ORIF_EXPR:
3216 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3217
3218 case TRUTH_NOT_EXPR:
2ba172e0 3219 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
b5b8b0ac
AO
3220
3221 case LT_EXPR:
2ba172e0
JJ
3222 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3223 op0, op1);
b5b8b0ac
AO
3224
3225 case LE_EXPR:
2ba172e0
JJ
3226 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3227 op0, op1);
b5b8b0ac
AO
3228
3229 case GT_EXPR:
2ba172e0
JJ
3230 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3231 op0, op1);
b5b8b0ac
AO
3232
3233 case GE_EXPR:
2ba172e0
JJ
3234 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3235 op0, op1);
b5b8b0ac
AO
3236
3237 case EQ_EXPR:
2ba172e0 3238 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3239
3240 case NE_EXPR:
2ba172e0 3241 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3242
3243 case UNORDERED_EXPR:
2ba172e0 3244 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3245
3246 case ORDERED_EXPR:
2ba172e0 3247 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3248
3249 case UNLT_EXPR:
2ba172e0 3250 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3251
3252 case UNLE_EXPR:
2ba172e0 3253 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3254
3255 case UNGT_EXPR:
2ba172e0 3256 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3257
3258 case UNGE_EXPR:
2ba172e0 3259 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3260
3261 case UNEQ_EXPR:
2ba172e0 3262 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3263
3264 case LTGT_EXPR:
2ba172e0 3265 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3266
3267 case COND_EXPR:
3268 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3269
3270 case COMPLEX_EXPR:
3271 gcc_assert (COMPLEX_MODE_P (mode));
3272 if (GET_MODE (op0) == VOIDmode)
3273 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3274 if (GET_MODE (op1) == VOIDmode)
3275 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3276 return gen_rtx_CONCAT (mode, op0, op1);
3277
d02a5a4b
JJ
3278 case CONJ_EXPR:
3279 if (GET_CODE (op0) == CONCAT)
3280 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2ba172e0
JJ
3281 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3282 XEXP (op0, 1),
3283 GET_MODE_INNER (mode)));
d02a5a4b
JJ
3284 else
3285 {
3286 enum machine_mode imode = GET_MODE_INNER (mode);
3287 rtx re, im;
3288
3289 if (MEM_P (op0))
3290 {
3291 re = adjust_address_nv (op0, imode, 0);
3292 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3293 }
3294 else
3295 {
3296 enum machine_mode ifmode = int_mode_for_mode (mode);
3297 enum machine_mode ihmode = int_mode_for_mode (imode);
3298 rtx halfsize;
3299 if (ifmode == BLKmode || ihmode == BLKmode)
3300 return NULL;
3301 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3302 re = op0;
3303 if (mode != ifmode)
3304 re = gen_rtx_SUBREG (ifmode, re, 0);
3305 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3306 if (imode != ihmode)
3307 re = gen_rtx_SUBREG (imode, re, 0);
3308 im = copy_rtx (op0);
3309 if (mode != ifmode)
3310 im = gen_rtx_SUBREG (ifmode, im, 0);
3311 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3312 if (imode != ihmode)
3313 im = gen_rtx_SUBREG (imode, im, 0);
3314 }
3315 im = gen_rtx_NEG (imode, im);
3316 return gen_rtx_CONCAT (mode, re, im);
3317 }
3318
b5b8b0ac
AO
3319 case ADDR_EXPR:
3320 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3321 if (!op0 || !MEM_P (op0))
c8a27c40
JJ
3322 {
3323 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3324 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3325 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
f8cca67b
JJ
3326 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
3327 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
c8a27c40
JJ
3328 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3329
3330 if (handled_component_p (TREE_OPERAND (exp, 0)))
3331 {
3332 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3333 tree decl
3334 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3335 &bitoffset, &bitsize, &maxsize);
3336 if ((TREE_CODE (decl) == VAR_DECL
3337 || TREE_CODE (decl) == PARM_DECL
3338 || TREE_CODE (decl) == RESULT_DECL)
f8cca67b
JJ
3339 && (!TREE_ADDRESSABLE (decl)
3340 || target_for_debug_bind (decl))
c8a27c40
JJ
3341 && (bitoffset % BITS_PER_UNIT) == 0
3342 && bitsize > 0
3343 && bitsize == maxsize)
3344 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3345 bitoffset / BITS_PER_UNIT);
3346 }
3347
3348 return NULL;
3349 }
b5b8b0ac 3350
f61c6f34
JJ
3351 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3352 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
dda2da58
AO
3353
3354 return op0;
b5b8b0ac
AO
3355
3356 case VECTOR_CST:
d2a12ae7
RG
3357 {
3358 unsigned i;
3359
3360 op0 = gen_rtx_CONCATN
3361 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3362
3363 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
3364 {
3365 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
3366 if (!op1)
3367 return NULL;
3368 XVECEXP (op0, 0, i) = op1;
3369 }
3370
3371 return op0;
3372 }
b5b8b0ac
AO
3373
3374 case CONSTRUCTOR:
47598145
MM
3375 if (TREE_CLOBBER_P (exp))
3376 return NULL;
3377 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
b5b8b0ac
AO
3378 {
3379 unsigned i;
3380 tree val;
3381
3382 op0 = gen_rtx_CONCATN
3383 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3384
3385 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3386 {
3387 op1 = expand_debug_expr (val);
3388 if (!op1)
3389 return NULL;
3390 XVECEXP (op0, 0, i) = op1;
3391 }
3392
3393 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3394 {
3395 op1 = expand_debug_expr
e8160c9a 3396 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
b5b8b0ac
AO
3397
3398 if (!op1)
3399 return NULL;
3400
3401 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3402 XVECEXP (op0, 0, i) = op1;
3403 }
3404
3405 return op0;
3406 }
3407 else
3408 goto flag_unsupported;
3409
3410 case CALL_EXPR:
3411 /* ??? Maybe handle some builtins? */
3412 return NULL;
3413
3414 case SSA_NAME:
3415 {
2a8e30fb
MM
3416 gimple g = get_gimple_for_ssa_name (exp);
3417 if (g)
3418 {
3419 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3420 if (!op0)
3421 return NULL;
3422 }
3423 else
3424 {
3425 int part = var_to_partition (SA.map, exp);
b5b8b0ac 3426
2a8e30fb 3427 if (part == NO_PARTITION)
a58a8e4b
JJ
3428 {
3429 /* If this is a reference to an incoming value of parameter
3430 that is never used in the code or where the incoming
3431 value is never used in the code, use PARM_DECL's
3432 DECL_RTL if set. */
3433 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3434 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3435 {
12c5ffe5
EB
3436 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3437 if (op0)
3438 goto adjust_mode;
a58a8e4b 3439 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
12c5ffe5
EB
3440 if (op0)
3441 goto adjust_mode;
a58a8e4b
JJ
3442 }
3443 return NULL;
3444 }
b5b8b0ac 3445
2a8e30fb 3446 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
b5b8b0ac 3447
abfea58d 3448 op0 = copy_rtx (SA.partition_to_pseudo[part]);
2a8e30fb 3449 }
b5b8b0ac
AO
3450 goto adjust_mode;
3451 }
3452
3453 case ERROR_MARK:
3454 return NULL;
3455
7ece48b1
JJ
3456 /* Vector stuff. For most of the codes we don't have rtl codes. */
3457 case REALIGN_LOAD_EXPR:
3458 case REDUC_MAX_EXPR:
3459 case REDUC_MIN_EXPR:
3460 case REDUC_PLUS_EXPR:
3461 case VEC_COND_EXPR:
7ece48b1
JJ
3462 case VEC_LSHIFT_EXPR:
3463 case VEC_PACK_FIX_TRUNC_EXPR:
3464 case VEC_PACK_SAT_EXPR:
3465 case VEC_PACK_TRUNC_EXPR:
3466 case VEC_RSHIFT_EXPR:
3467 case VEC_UNPACK_FLOAT_HI_EXPR:
3468 case VEC_UNPACK_FLOAT_LO_EXPR:
3469 case VEC_UNPACK_HI_EXPR:
3470 case VEC_UNPACK_LO_EXPR:
3471 case VEC_WIDEN_MULT_HI_EXPR:
3472 case VEC_WIDEN_MULT_LO_EXPR:
36ba4aae
IR
3473 case VEC_WIDEN_LSHIFT_HI_EXPR:
3474 case VEC_WIDEN_LSHIFT_LO_EXPR:
3f3af9df 3475 case VEC_PERM_EXPR:
7ece48b1
JJ
3476 return NULL;
3477
3478 /* Misc codes. */
3479 case ADDR_SPACE_CONVERT_EXPR:
3480 case FIXED_CONVERT_EXPR:
3481 case OBJ_TYPE_REF:
3482 case WITH_SIZE_EXPR:
3483 return NULL;
3484
3485 case DOT_PROD_EXPR:
3486 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3487 && SCALAR_INT_MODE_P (mode))
3488 {
2ba172e0
JJ
3489 op0
3490 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3491 0)))
3492 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3493 inner_mode);
3494 op1
3495 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3496 1)))
3497 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3498 inner_mode);
3499 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3500 return simplify_gen_binary (PLUS, mode, op0, op2);
7ece48b1
JJ
3501 }
3502 return NULL;
3503
3504 case WIDEN_MULT_EXPR:
0354c0c7
BS
3505 case WIDEN_MULT_PLUS_EXPR:
3506 case WIDEN_MULT_MINUS_EXPR:
7ece48b1
JJ
3507 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3508 && SCALAR_INT_MODE_P (mode))
3509 {
2ba172e0 3510 inner_mode = GET_MODE (op0);
7ece48b1 3511 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5b58b39b 3512 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
7ece48b1 3513 else
5b58b39b 3514 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
7ece48b1 3515 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5b58b39b 3516 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
7ece48b1 3517 else
5b58b39b 3518 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
2ba172e0 3519 op0 = simplify_gen_binary (MULT, mode, op0, op1);
0354c0c7
BS
3520 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3521 return op0;
3522 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
2ba172e0 3523 return simplify_gen_binary (PLUS, mode, op0, op2);
0354c0c7 3524 else
2ba172e0 3525 return simplify_gen_binary (MINUS, mode, op2, op0);
7ece48b1
JJ
3526 }
3527 return NULL;
3528
3529 case WIDEN_SUM_EXPR:
3f3af9df 3530 case WIDEN_LSHIFT_EXPR:
7ece48b1
JJ
3531 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3532 && SCALAR_INT_MODE_P (mode))
3533 {
2ba172e0
JJ
3534 op0
3535 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3536 0)))
3537 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3538 inner_mode);
3f3af9df
JJ
3539 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
3540 ? ASHIFT : PLUS, mode, op0, op1);
7ece48b1
JJ
3541 }
3542 return NULL;
3543
0f59b812 3544 case FMA_EXPR:
2ba172e0 3545 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
0f59b812 3546
b5b8b0ac
AO
3547 default:
3548 flag_unsupported:
3549#ifdef ENABLE_CHECKING
3550 debug_tree (exp);
3551 gcc_unreachable ();
3552#else
3553 return NULL;
3554#endif
3555 }
3556}
3557
ddb555ed
JJ
3558/* Return an RTX equivalent to the source bind value of the tree expression
3559 EXP. */
3560
3561static rtx
3562expand_debug_source_expr (tree exp)
3563{
3564 rtx op0 = NULL_RTX;
3565 enum machine_mode mode = VOIDmode, inner_mode;
3566
3567 switch (TREE_CODE (exp))
3568 {
3569 case PARM_DECL:
3570 {
ddb555ed 3571 mode = DECL_MODE (exp);
12c5ffe5
EB
3572 op0 = expand_debug_parm_decl (exp);
3573 if (op0)
3574 break;
ddb555ed
JJ
3575 /* See if this isn't an argument that has been completely
3576 optimized out. */
3577 if (!DECL_RTL_SET_P (exp)
12c5ffe5 3578 && !DECL_INCOMING_RTL (exp)
ddb555ed
JJ
3579 && DECL_ABSTRACT_ORIGIN (current_function_decl))
3580 {
3581 tree aexp = exp;
3582 if (DECL_ABSTRACT_ORIGIN (exp))
3583 aexp = DECL_ABSTRACT_ORIGIN (exp);
3584 if (DECL_CONTEXT (aexp)
3585 == DECL_ABSTRACT_ORIGIN (current_function_decl))
3586 {
3587 VEC(tree, gc) **debug_args;
3588 unsigned int ix;
3589 tree ddecl;
3590#ifdef ENABLE_CHECKING
3591 tree parm;
3592 for (parm = DECL_ARGUMENTS (current_function_decl);
3593 parm; parm = DECL_CHAIN (parm))
3594 gcc_assert (parm != exp
3595 && DECL_ABSTRACT_ORIGIN (parm) != aexp);
3596#endif
3597 debug_args = decl_debug_args_lookup (current_function_decl);
3598 if (debug_args != NULL)
3599 {
3600 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
3601 ix += 2)
3602 if (ddecl == aexp)
3603 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3604 }
3605 }
3606 }
3607 break;
3608 }
3609 default:
3610 break;
3611 }
3612
3613 if (op0 == NULL_RTX)
3614 return NULL_RTX;
3615
3616 inner_mode = GET_MODE (op0);
3617 if (mode == inner_mode)
3618 return op0;
3619
3620 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3621 {
3622 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3623 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3624 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3625 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3626 else
3627 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3628 }
3629 else if (FLOAT_MODE_P (mode))
3630 gcc_unreachable ();
3631 else if (FLOAT_MODE_P (inner_mode))
3632 {
3633 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3634 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3635 else
3636 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3637 }
3638 else if (CONSTANT_P (op0)
3639 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3640 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3641 subreg_lowpart_offset (mode, inner_mode));
3642 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3643 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3644 else
3645 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3646
3647 return op0;
3648}
3649
b5b8b0ac
AO
3650/* Expand the _LOCs in debug insns. We run this after expanding all
3651 regular insns, so that any variables referenced in the function
3652 will have their DECL_RTLs set. */
3653
3654static void
3655expand_debug_locations (void)
3656{
3657 rtx insn;
3658 rtx last = get_last_insn ();
3659 int save_strict_alias = flag_strict_aliasing;
3660
3661 /* New alias sets while setting up memory attributes cause
3662 -fcompare-debug failures, even though it doesn't bring about any
3663 codegen changes. */
3664 flag_strict_aliasing = 0;
3665
3666 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3667 if (DEBUG_INSN_P (insn))
3668 {
3669 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3670 rtx val;
3671 enum machine_mode mode;
3672
3673 if (value == NULL_TREE)
3674 val = NULL_RTX;
3675 else
3676 {
ddb555ed
JJ
3677 if (INSN_VAR_LOCATION_STATUS (insn)
3678 == VAR_INIT_STATUS_UNINITIALIZED)
3679 val = expand_debug_source_expr (value);
3680 else
3681 val = expand_debug_expr (value);
b5b8b0ac
AO
3682 gcc_assert (last == get_last_insn ());
3683 }
3684
3685 if (!val)
3686 val = gen_rtx_UNKNOWN_VAR_LOC ();
3687 else
3688 {
3689 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3690
3691 gcc_assert (mode == GET_MODE (val)
3692 || (GET_MODE (val) == VOIDmode
3693 && (CONST_INT_P (val)
3694 || GET_CODE (val) == CONST_FIXED
3695 || GET_CODE (val) == CONST_DOUBLE
3696 || GET_CODE (val) == LABEL_REF)));
3697 }
3698
3699 INSN_VAR_LOCATION_LOC (insn) = val;
3700 }
3701
3702 flag_strict_aliasing = save_strict_alias;
3703}
3704
242229bb
JH
3705/* Expand basic block BB from GIMPLE trees to RTL. */
3706
3707static basic_block
10d22567 3708expand_gimple_basic_block (basic_block bb)
242229bb 3709{
726a989a
RB
3710 gimple_stmt_iterator gsi;
3711 gimple_seq stmts;
3712 gimple stmt = NULL;
242229bb
JH
3713 rtx note, last;
3714 edge e;
628f6a4e 3715 edge_iterator ei;
8b11009b 3716 void **elt;
242229bb
JH
3717
3718 if (dump_file)
726a989a
RB
3719 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3720 bb->index);
3721
3722 /* Note that since we are now transitioning from GIMPLE to RTL, we
3723 cannot use the gsi_*_bb() routines because they expect the basic
3724 block to be in GIMPLE, instead of RTL. Therefore, we need to
3725 access the BB sequence directly. */
3726 stmts = bb_seq (bb);
3727 bb->il.gimple = NULL;
bf08ebeb 3728 rtl_profile_for_bb (bb);
5e2d947c
JH
3729 init_rtl_bb_info (bb);
3730 bb->flags |= BB_RTL;
3731
a9b77cd1
ZD
3732 /* Remove the RETURN_EXPR if we may fall though to the exit
3733 instead. */
726a989a
RB
3734 gsi = gsi_last (stmts);
3735 if (!gsi_end_p (gsi)
3736 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
a9b77cd1 3737 {
726a989a 3738 gimple ret_stmt = gsi_stmt (gsi);
a9b77cd1
ZD
3739
3740 gcc_assert (single_succ_p (bb));
3741 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3742
3743 if (bb->next_bb == EXIT_BLOCK_PTR
726a989a 3744 && !gimple_return_retval (ret_stmt))
a9b77cd1 3745 {
726a989a 3746 gsi_remove (&gsi, false);
a9b77cd1
ZD
3747 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3748 }
3749 }
3750
726a989a
RB
3751 gsi = gsi_start (stmts);
3752 if (!gsi_end_p (gsi))
8b11009b 3753 {
726a989a
RB
3754 stmt = gsi_stmt (gsi);
3755 if (gimple_code (stmt) != GIMPLE_LABEL)
3756 stmt = NULL;
8b11009b 3757 }
242229bb 3758
8b11009b
ZD
3759 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3760
3761 if (stmt || elt)
242229bb
JH
3762 {
3763 last = get_last_insn ();
3764
8b11009b
ZD
3765 if (stmt)
3766 {
28ed065e 3767 expand_gimple_stmt (stmt);
726a989a 3768 gsi_next (&gsi);
8b11009b
ZD
3769 }
3770
3771 if (elt)
ae50c0cb 3772 emit_label ((rtx) *elt);
242229bb 3773
caf93cb0 3774 /* Java emits line number notes in the top of labels.
c22cacf3 3775 ??? Make this go away once line number notes are obsoleted. */
242229bb 3776 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 3777 if (NOTE_P (BB_HEAD (bb)))
242229bb 3778 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
242229bb 3779 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528 3780
726a989a 3781 maybe_dump_rtl_for_gimple_stmt (stmt, last);
242229bb
JH
3782 }
3783 else
3784 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3785
3786 NOTE_BASIC_BLOCK (note) = bb;
3787
726a989a 3788 for (; !gsi_end_p (gsi); gsi_next (&gsi))
242229bb 3789 {
cea49550 3790 basic_block new_bb;
242229bb 3791
b5b8b0ac 3792 stmt = gsi_stmt (gsi);
2a8e30fb
MM
3793
3794 /* If this statement is a non-debug one, and we generate debug
3795 insns, then this one might be the last real use of a TERed
3796 SSA_NAME, but where there are still some debug uses further
3797 down. Expanding the current SSA name in such further debug
3798 uses by their RHS might lead to wrong debug info, as coalescing
3799 might make the operands of such RHS be placed into the same
3800 pseudo as something else. Like so:
3801 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3802 use(a_1);
3803 a_2 = ...
3804 #DEBUG ... => a_1
3805 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3806 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3807 the write to a_2 would actually have clobbered the place which
3808 formerly held a_0.
3809
3810 So, instead of that, we recognize the situation, and generate
3811 debug temporaries at the last real use of TERed SSA names:
3812 a_1 = a_0 + 1;
3813 #DEBUG #D1 => a_1
3814 use(a_1);
3815 a_2 = ...
3816 #DEBUG ... => #D1
3817 */
3818 if (MAY_HAVE_DEBUG_INSNS
3819 && SA.values
3820 && !is_gimple_debug (stmt))
3821 {
3822 ssa_op_iter iter;
3823 tree op;
3824 gimple def;
3825
3826 location_t sloc = get_curr_insn_source_location ();
3827 tree sblock = get_curr_insn_block ();
3828
3829 /* Look for SSA names that have their last use here (TERed
3830 names always have only one real use). */
3831 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3832 if ((def = get_gimple_for_ssa_name (op)))
3833 {
3834 imm_use_iterator imm_iter;
3835 use_operand_p use_p;
3836 bool have_debug_uses = false;
3837
3838 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3839 {
3840 if (gimple_debug_bind_p (USE_STMT (use_p)))
3841 {
3842 have_debug_uses = true;
3843 break;
3844 }
3845 }
3846
3847 if (have_debug_uses)
3848 {
3849 /* OP is a TERed SSA name, with DEF it's defining
3850 statement, and where OP is used in further debug
3851 instructions. Generate a debug temporary, and
3852 replace all uses of OP in debug insns with that
3853 temporary. */
3854 gimple debugstmt;
3855 tree value = gimple_assign_rhs_to_tree (def);
3856 tree vexpr = make_node (DEBUG_EXPR_DECL);
3857 rtx val;
3858 enum machine_mode mode;
3859
3860 set_curr_insn_source_location (gimple_location (def));
3861 set_curr_insn_block (gimple_block (def));
3862
3863 DECL_ARTIFICIAL (vexpr) = 1;
3864 TREE_TYPE (vexpr) = TREE_TYPE (value);
3865 if (DECL_P (value))
3866 mode = DECL_MODE (value);
3867 else
3868 mode = TYPE_MODE (TREE_TYPE (value));
3869 DECL_MODE (vexpr) = mode;
3870
3871 val = gen_rtx_VAR_LOCATION
3872 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3873
e8c6bb74 3874 emit_debug_insn (val);
2a8e30fb
MM
3875
3876 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3877 {
3878 if (!gimple_debug_bind_p (debugstmt))
3879 continue;
3880
3881 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3882 SET_USE (use_p, vexpr);
3883
3884 update_stmt (debugstmt);
3885 }
3886 }
3887 }
3888 set_curr_insn_source_location (sloc);
3889 set_curr_insn_block (sblock);
3890 }
3891
a5883ba0 3892 currently_expanding_gimple_stmt = stmt;
b5b8b0ac 3893
242229bb
JH
3894 /* Expand this statement, then evaluate the resulting RTL and
3895 fixup the CFG accordingly. */
726a989a 3896 if (gimple_code (stmt) == GIMPLE_COND)
cea49550 3897 {
726a989a 3898 new_bb = expand_gimple_cond (bb, stmt);
cea49550
RH
3899 if (new_bb)
3900 return new_bb;
3901 }
b5b8b0ac
AO
3902 else if (gimple_debug_bind_p (stmt))
3903 {
3904 location_t sloc = get_curr_insn_source_location ();
3905 tree sblock = get_curr_insn_block ();
3906 gimple_stmt_iterator nsi = gsi;
3907
3908 for (;;)
3909 {
3910 tree var = gimple_debug_bind_get_var (stmt);
3911 tree value;
3912 rtx val;
3913 enum machine_mode mode;
3914
ec8c1492
JJ
3915 if (TREE_CODE (var) != DEBUG_EXPR_DECL
3916 && TREE_CODE (var) != LABEL_DECL
3917 && !target_for_debug_bind (var))
3918 goto delink_debug_stmt;
3919
b5b8b0ac
AO
3920 if (gimple_debug_bind_has_value_p (stmt))
3921 value = gimple_debug_bind_get_value (stmt);
3922 else
3923 value = NULL_TREE;
3924
3925 last = get_last_insn ();
3926
3927 set_curr_insn_source_location (gimple_location (stmt));
3928 set_curr_insn_block (gimple_block (stmt));
3929
3930 if (DECL_P (var))
3931 mode = DECL_MODE (var);
3932 else
3933 mode = TYPE_MODE (TREE_TYPE (var));
3934
3935 val = gen_rtx_VAR_LOCATION
3936 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3937
e16b6fd0 3938 emit_debug_insn (val);
b5b8b0ac
AO
3939
3940 if (dump_file && (dump_flags & TDF_DETAILS))
3941 {
3942 /* We can't dump the insn with a TREE where an RTX
3943 is expected. */
e8c6bb74 3944 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
b5b8b0ac 3945 maybe_dump_rtl_for_gimple_stmt (stmt, last);
e8c6bb74 3946 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
b5b8b0ac
AO
3947 }
3948
ec8c1492 3949 delink_debug_stmt:
2a8e30fb
MM
3950 /* In order not to generate too many debug temporaries,
3951 we delink all uses of debug statements we already expanded.
3952 Therefore debug statements between definition and real
3953 use of TERed SSA names will continue to use the SSA name,
3954 and not be replaced with debug temps. */
3955 delink_stmt_imm_use (stmt);
3956
b5b8b0ac
AO
3957 gsi = nsi;
3958 gsi_next (&nsi);
3959 if (gsi_end_p (nsi))
3960 break;
3961 stmt = gsi_stmt (nsi);
3962 if (!gimple_debug_bind_p (stmt))
3963 break;
3964 }
3965
ddb555ed
JJ
3966 set_curr_insn_source_location (sloc);
3967 set_curr_insn_block (sblock);
3968 }
3969 else if (gimple_debug_source_bind_p (stmt))
3970 {
3971 location_t sloc = get_curr_insn_source_location ();
3972 tree sblock = get_curr_insn_block ();
3973 tree var = gimple_debug_source_bind_get_var (stmt);
3974 tree value = gimple_debug_source_bind_get_value (stmt);
3975 rtx val;
3976 enum machine_mode mode;
3977
3978 last = get_last_insn ();
3979
3980 set_curr_insn_source_location (gimple_location (stmt));
3981 set_curr_insn_block (gimple_block (stmt));
3982
3983 mode = DECL_MODE (var);
3984
3985 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
3986 VAR_INIT_STATUS_UNINITIALIZED);
3987
3988 emit_debug_insn (val);
3989
3990 if (dump_file && (dump_flags & TDF_DETAILS))
3991 {
3992 /* We can't dump the insn with a TREE where an RTX
3993 is expected. */
3994 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3995 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3996 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3997 }
3998
b5b8b0ac
AO
3999 set_curr_insn_source_location (sloc);
4000 set_curr_insn_block (sblock);
4001 }
80c7a9eb 4002 else
242229bb 4003 {
726a989a 4004 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
cea49550
RH
4005 {
4006 bool can_fallthru;
4007 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
4008 if (new_bb)
4009 {
4010 if (can_fallthru)
4011 bb = new_bb;
4012 else
4013 return new_bb;
4014 }
4015 }
4d7a65ea 4016 else
b7211528 4017 {
4e3825db 4018 def_operand_p def_p;
4e3825db
MM
4019 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
4020
4021 if (def_p != NULL)
4022 {
4023 /* Ignore this stmt if it is in the list of
4024 replaceable expressions. */
4025 if (SA.values
b8698a0f 4026 && bitmap_bit_p (SA.values,
e97809c6 4027 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4e3825db
MM
4028 continue;
4029 }
28ed065e 4030 last = expand_gimple_stmt (stmt);
726a989a 4031 maybe_dump_rtl_for_gimple_stmt (stmt, last);
b7211528 4032 }
242229bb
JH
4033 }
4034 }
4035
a5883ba0
MM
4036 currently_expanding_gimple_stmt = NULL;
4037
7241571e 4038 /* Expand implicit goto and convert goto_locus. */
a9b77cd1
ZD
4039 FOR_EACH_EDGE (e, ei, bb->succs)
4040 {
7241571e
JJ
4041 if (e->goto_locus && e->goto_block)
4042 {
4043 set_curr_insn_source_location (e->goto_locus);
4044 set_curr_insn_block (e->goto_block);
4045 e->goto_locus = curr_insn_locator ();
4046 }
4047 e->goto_block = NULL;
4048 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
4049 {
4050 emit_jump (label_rtx_for_bb (e->dest));
4051 e->flags &= ~EDGE_FALLTHRU;
4052 }
a9b77cd1
ZD
4053 }
4054
ae761c45
AH
4055 /* Expanded RTL can create a jump in the last instruction of block.
4056 This later might be assumed to be a jump to successor and break edge insertion.
4057 We need to insert dummy move to prevent this. PR41440. */
4058 if (single_succ_p (bb)
4059 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
4060 && (last = get_last_insn ())
4061 && JUMP_P (last))
4062 {
4063 rtx dummy = gen_reg_rtx (SImode);
4064 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
4065 }
4066
242229bb
JH
4067 do_pending_stack_adjust ();
4068
3f117656 4069 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
4070 before a barrier and/or table jump insn. */
4071 last = get_last_insn ();
4b4bf941 4072 if (BARRIER_P (last))
242229bb
JH
4073 last = PREV_INSN (last);
4074 if (JUMP_TABLE_DATA_P (last))
4075 last = PREV_INSN (PREV_INSN (last));
4076 BB_END (bb) = last;
caf93cb0 4077
242229bb 4078 update_bb_for_insn (bb);
80c7a9eb 4079
242229bb
JH
4080 return bb;
4081}
4082
4083
4084/* Create a basic block for initialization code. */
4085
4086static basic_block
4087construct_init_block (void)
4088{
4089 basic_block init_block, first_block;
fd44f634
JH
4090 edge e = NULL;
4091 int flags;
275a4187 4092
fd44f634
JH
4093 /* Multiple entry points not supported yet. */
4094 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
5e2d947c
JH
4095 init_rtl_bb_info (ENTRY_BLOCK_PTR);
4096 init_rtl_bb_info (EXIT_BLOCK_PTR);
4097 ENTRY_BLOCK_PTR->flags |= BB_RTL;
4098 EXIT_BLOCK_PTR->flags |= BB_RTL;
242229bb 4099
fd44f634 4100 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
275a4187 4101
fd44f634
JH
4102 /* When entry edge points to first basic block, we don't need jump,
4103 otherwise we have to jump into proper target. */
4104 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
4105 {
726a989a 4106 tree label = gimple_block_label (e->dest);
fd44f634
JH
4107
4108 emit_jump (label_rtx (label));
4109 flags = 0;
275a4187 4110 }
fd44f634
JH
4111 else
4112 flags = EDGE_FALLTHRU;
242229bb
JH
4113
4114 init_block = create_basic_block (NEXT_INSN (get_insns ()),
4115 get_last_insn (),
4116 ENTRY_BLOCK_PTR);
4117 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
4118 init_block->count = ENTRY_BLOCK_PTR->count;
4119 if (e)
4120 {
4121 first_block = e->dest;
4122 redirect_edge_succ (e, init_block);
fd44f634 4123 e = make_edge (init_block, first_block, flags);
242229bb
JH
4124 }
4125 else
4126 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4127 e->probability = REG_BR_PROB_BASE;
4128 e->count = ENTRY_BLOCK_PTR->count;
4129
4130 update_bb_for_insn (init_block);
4131 return init_block;
4132}
4133
55e092c4
JH
4134/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4135 found in the block tree. */
4136
4137static void
4138set_block_levels (tree block, int level)
4139{
4140 while (block)
4141 {
4142 BLOCK_NUMBER (block) = level;
4143 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
4144 block = BLOCK_CHAIN (block);
4145 }
4146}
242229bb
JH
4147
4148/* Create a block containing landing pads and similar stuff. */
4149
4150static void
4151construct_exit_block (void)
4152{
4153 rtx head = get_last_insn ();
4154 rtx end;
4155 basic_block exit_block;
628f6a4e
BE
4156 edge e, e2;
4157 unsigned ix;
4158 edge_iterator ei;
071a42f9 4159 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
242229bb 4160
bf08ebeb
JH
4161 rtl_profile_for_bb (EXIT_BLOCK_PTR);
4162
caf93cb0 4163 /* Make sure the locus is set to the end of the function, so that
242229bb 4164 epilogue line numbers and warnings are set properly. */
6773e15f 4165 if (cfun->function_end_locus != UNKNOWN_LOCATION)
242229bb
JH
4166 input_location = cfun->function_end_locus;
4167
4168 /* The following insns belong to the top scope. */
55e092c4 4169 set_curr_insn_block (DECL_INITIAL (current_function_decl));
242229bb 4170
242229bb
JH
4171 /* Generate rtl for function exit. */
4172 expand_function_end ();
4173
4174 end = get_last_insn ();
4175 if (head == end)
4176 return;
071a42f9
JH
4177 /* While emitting the function end we could move end of the last basic block.
4178 */
4179 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4b4bf941 4180 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 4181 head = NEXT_INSN (head);
80c7a9eb
RH
4182 exit_block = create_basic_block (NEXT_INSN (head), end,
4183 EXIT_BLOCK_PTR->prev_bb);
242229bb
JH
4184 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
4185 exit_block->count = EXIT_BLOCK_PTR->count;
628f6a4e
BE
4186
4187 ix = 0;
4188 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
242229bb 4189 {
8fb790fd 4190 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
242229bb 4191 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
4192 redirect_edge_succ (e, exit_block);
4193 else
4194 ix++;
242229bb 4195 }
628f6a4e 4196
242229bb
JH
4197 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4198 e->probability = REG_BR_PROB_BASE;
4199 e->count = EXIT_BLOCK_PTR->count;
628f6a4e 4200 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
242229bb
JH
4201 if (e2 != e)
4202 {
c22cacf3 4203 e->count -= e2->count;
242229bb
JH
4204 exit_block->count -= e2->count;
4205 exit_block->frequency -= EDGE_FREQUENCY (e2);
4206 }
4207 if (e->count < 0)
4208 e->count = 0;
4209 if (exit_block->count < 0)
4210 exit_block->count = 0;
4211 if (exit_block->frequency < 0)
4212 exit_block->frequency = 0;
4213 update_bb_for_insn (exit_block);
4214}
4215
c22cacf3 4216/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
4217 Look for ARRAY_REF nodes with non-constant indexes and mark them
4218 addressable. */
4219
4220static tree
4221discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4222 void *data ATTRIBUTE_UNUSED)
4223{
4224 tree t = *tp;
4225
4226 if (IS_TYPE_OR_DECL_P (t))
4227 *walk_subtrees = 0;
4228 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4229 {
4230 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4231 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4232 && (!TREE_OPERAND (t, 2)
4233 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4234 || (TREE_CODE (t) == COMPONENT_REF
4235 && (!TREE_OPERAND (t,2)
4236 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4237 || TREE_CODE (t) == BIT_FIELD_REF
4238 || TREE_CODE (t) == REALPART_EXPR
4239 || TREE_CODE (t) == IMAGPART_EXPR
4240 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1043771b 4241 || CONVERT_EXPR_P (t))
a1b23b2f
UW
4242 t = TREE_OPERAND (t, 0);
4243
4244 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4245 {
4246 t = get_base_address (t);
6f11d690
RG
4247 if (t && DECL_P (t)
4248 && DECL_MODE (t) != BLKmode)
a1b23b2f
UW
4249 TREE_ADDRESSABLE (t) = 1;
4250 }
4251
4252 *walk_subtrees = 0;
4253 }
4254
4255 return NULL_TREE;
4256}
4257
4258/* RTL expansion is not able to compile array references with variable
4259 offsets for arrays stored in single register. Discover such
4260 expressions and mark variables as addressable to avoid this
4261 scenario. */
4262
4263static void
4264discover_nonconstant_array_refs (void)
4265{
4266 basic_block bb;
726a989a 4267 gimple_stmt_iterator gsi;
a1b23b2f
UW
4268
4269 FOR_EACH_BB (bb)
726a989a
RB
4270 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4271 {
4272 gimple stmt = gsi_stmt (gsi);
aa847cc8
JJ
4273 if (!is_gimple_debug (stmt))
4274 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
726a989a 4275 }
a1b23b2f
UW
4276}
4277
2e3f842f
L
4278/* This function sets crtl->args.internal_arg_pointer to a virtual
4279 register if DRAP is needed. Local register allocator will replace
4280 virtual_incoming_args_rtx with the virtual register. */
4281
4282static void
4283expand_stack_alignment (void)
4284{
4285 rtx drap_rtx;
e939805b 4286 unsigned int preferred_stack_boundary;
2e3f842f
L
4287
4288 if (! SUPPORTS_STACK_ALIGNMENT)
4289 return;
b8698a0f 4290
2e3f842f
L
4291 if (cfun->calls_alloca
4292 || cfun->has_nonlocal_label
4293 || crtl->has_nonlocal_goto)
4294 crtl->need_drap = true;
4295
890b9b96
L
4296 /* Call update_stack_boundary here again to update incoming stack
4297 boundary. It may set incoming stack alignment to a different
4298 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4299 use the minimum incoming stack alignment to check if it is OK
4300 to perform sibcall optimization since sibcall optimization will
4301 only align the outgoing stack to incoming stack boundary. */
4302 if (targetm.calls.update_stack_boundary)
4303 targetm.calls.update_stack_boundary ();
4304
4305 /* The incoming stack frame has to be aligned at least at
4306 parm_stack_boundary. */
4307 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
2e3f842f 4308
2e3f842f
L
4309 /* Update crtl->stack_alignment_estimated and use it later to align
4310 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4311 exceptions since callgraph doesn't collect incoming stack alignment
4312 in this case. */
8f4f502f 4313 if (cfun->can_throw_non_call_exceptions
2e3f842f
L
4314 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4315 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4316 else
4317 preferred_stack_boundary = crtl->preferred_stack_boundary;
4318 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4319 crtl->stack_alignment_estimated = preferred_stack_boundary;
4320 if (preferred_stack_boundary > crtl->stack_alignment_needed)
4321 crtl->stack_alignment_needed = preferred_stack_boundary;
4322
890b9b96
L
4323 gcc_assert (crtl->stack_alignment_needed
4324 <= crtl->stack_alignment_estimated);
4325
2e3f842f 4326 crtl->stack_realign_needed
e939805b 4327 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
d2d93c32 4328 crtl->stack_realign_tried = crtl->stack_realign_needed;
2e3f842f
L
4329
4330 crtl->stack_realign_processed = true;
4331
4332 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4333 alignment. */
4334 gcc_assert (targetm.calls.get_drap_rtx != NULL);
b8698a0f 4335 drap_rtx = targetm.calls.get_drap_rtx ();
2e3f842f 4336
d015f7cc
L
4337 /* stack_realign_drap and drap_rtx must match. */
4338 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4339
2e3f842f
L
4340 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4341 if (NULL != drap_rtx)
4342 {
4343 crtl->args.internal_arg_pointer = drap_rtx;
4344
4345 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4346 needed. */
4347 fixup_tail_calls ();
4348 }
4349}
4350
242229bb
JH
4351/* Translate the intermediate representation contained in the CFG
4352 from GIMPLE trees to RTL.
4353
4354 We do conversion per basic block and preserve/update the tree CFG.
4355 This implies we have to do some magic as the CFG can simultaneously
4356 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 4357 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
4358 the expansion. */
4359
c2924966 4360static unsigned int
726a989a 4361gimple_expand_cfg (void)
242229bb
JH
4362{
4363 basic_block bb, init_block;
4364 sbitmap blocks;
0ef90296
ZD
4365 edge_iterator ei;
4366 edge e;
3a42502d 4367 rtx var_seq;
4e3825db
MM
4368 unsigned i;
4369
f029db69 4370 timevar_push (TV_OUT_OF_SSA);
4e3825db 4371 rewrite_out_of_ssa (&SA);
f029db69 4372 timevar_pop (TV_OUT_OF_SSA);
4e3825db
MM
4373 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
4374 sizeof (rtx));
242229bb 4375
4586b4ca
SB
4376 /* Some backends want to know that we are expanding to RTL. */
4377 currently_expanding_to_rtl = 1;
cd7d9fd7
RG
4378 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
4379 free_dominance_info (CDI_DOMINATORS);
4586b4ca 4380
bf08ebeb
JH
4381 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4382
55e092c4 4383 insn_locators_alloc ();
fe8a7779 4384 if (!DECL_IS_BUILTIN (current_function_decl))
1751ecd6
AH
4385 {
4386 /* Eventually, all FEs should explicitly set function_start_locus. */
4387 if (cfun->function_start_locus == UNKNOWN_LOCATION)
4388 set_curr_insn_source_location
4389 (DECL_SOURCE_LOCATION (current_function_decl));
4390 else
4391 set_curr_insn_source_location (cfun->function_start_locus);
4392 }
9ff70652
JJ
4393 else
4394 set_curr_insn_source_location (UNKNOWN_LOCATION);
55e092c4
JH
4395 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4396 prologue_locator = curr_insn_locator ();
4397
2b21299c
JJ
4398#ifdef INSN_SCHEDULING
4399 init_sched_attrs ();
4400#endif
4401
55e092c4
JH
4402 /* Make sure first insn is a note even if we don't want linenums.
4403 This makes sure the first insn will never be deleted.
4404 Also, final expects a note to appear there. */
4405 emit_note (NOTE_INSN_DELETED);
6429e3be 4406
a1b23b2f
UW
4407 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4408 discover_nonconstant_array_refs ();
4409
e41b2a33 4410 targetm.expand_to_rtl_hook ();
cb91fab0 4411 crtl->stack_alignment_needed = STACK_BOUNDARY;
2e3f842f 4412 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
890b9b96 4413 crtl->stack_alignment_estimated = 0;
cb91fab0
JH
4414 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4415 cfun->cfg->max_jumptable_ents = 0;
4416
ae9fd6b7
JH
4417 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4418 of the function section at exapnsion time to predict distance of calls. */
4419 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4420
727a31fa 4421 /* Expand the variables recorded during gimple lowering. */
f029db69 4422 timevar_push (TV_VAR_EXPAND);
3a42502d
RH
4423 start_sequence ();
4424
242229bb 4425 expand_used_vars ();
3a42502d
RH
4426
4427 var_seq = get_insns ();
4428 end_sequence ();
f029db69 4429 timevar_pop (TV_VAR_EXPAND);
242229bb 4430
7d69de61
RH
4431 /* Honor stack protection warnings. */
4432 if (warn_stack_protect)
4433 {
e3b5732b 4434 if (cfun->calls_alloca)
b8698a0f 4435 warning (OPT_Wstack_protector,
3b123595
SB
4436 "stack protector not protecting local variables: "
4437 "variable length buffer");
cb91fab0 4438 if (has_short_buffer && !crtl->stack_protect_guard)
b8698a0f 4439 warning (OPT_Wstack_protector,
3b123595
SB
4440 "stack protector not protecting function: "
4441 "all local arrays are less than %d bytes long",
7d69de61
RH
4442 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4443 }
4444
242229bb 4445 /* Set up parameters and prepare for return, for the function. */
b79c5284 4446 expand_function_start (current_function_decl);
242229bb 4447
3a42502d
RH
4448 /* If we emitted any instructions for setting up the variables,
4449 emit them before the FUNCTION_START note. */
4450 if (var_seq)
4451 {
4452 emit_insn_before (var_seq, parm_birth_insn);
4453
4454 /* In expand_function_end we'll insert the alloca save/restore
4455 before parm_birth_insn. We've just insertted an alloca call.
4456 Adjust the pointer to match. */
4457 parm_birth_insn = var_seq;
4458 }
4459
4e3825db
MM
4460 /* Now that we also have the parameter RTXs, copy them over to our
4461 partitions. */
4462 for (i = 0; i < SA.map->num_partitions; i++)
4463 {
4464 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4465
4466 if (TREE_CODE (var) != VAR_DECL
4467 && !SA.partition_to_pseudo[i])
4468 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4469 gcc_assert (SA.partition_to_pseudo[i]);
eb7adebc
MM
4470
4471 /* If this decl was marked as living in multiple places, reset
4472 this now to NULL. */
4473 if (DECL_RTL_IF_SET (var) == pc_rtx)
4474 SET_DECL_RTL (var, NULL);
4475
4e3825db
MM
4476 /* Some RTL parts really want to look at DECL_RTL(x) when x
4477 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4478 SET_DECL_RTL here making this available, but that would mean
4479 to select one of the potentially many RTLs for one DECL. Instead
4480 of doing that we simply reset the MEM_EXPR of the RTL in question,
4481 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4482 if (!DECL_RTL_SET_P (var))
4483 {
4484 if (MEM_P (SA.partition_to_pseudo[i]))
4485 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4486 }
4487 }
4488
d466b407
MM
4489 /* If we have a class containing differently aligned pointers
4490 we need to merge those into the corresponding RTL pointer
4491 alignment. */
4492 for (i = 1; i < num_ssa_names; i++)
4493 {
4494 tree name = ssa_name (i);
4495 int part;
4496 rtx r;
4497
4498 if (!name
4499 || !POINTER_TYPE_P (TREE_TYPE (name))
4500 /* We might have generated new SSA names in
4501 update_alias_info_with_stack_vars. They will have a NULL
4502 defining statements, and won't be part of the partitioning,
4503 so ignore those. */
4504 || !SSA_NAME_DEF_STMT (name))
4505 continue;
4506 part = var_to_partition (SA.map, name);
4507 if (part == NO_PARTITION)
4508 continue;
4509 r = SA.partition_to_pseudo[part];
4510 if (REG_P (r))
4511 mark_reg_pointer (r, get_pointer_alignment (name));
4512 }
4513
242229bb
JH
4514 /* If this function is `main', emit a call to `__main'
4515 to run global initializers, etc. */
4516 if (DECL_NAME (current_function_decl)
4517 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4518 && DECL_FILE_SCOPE_P (current_function_decl))
4519 expand_main_function ();
4520
7d69de61
RH
4521 /* Initialize the stack_protect_guard field. This must happen after the
4522 call to __main (if any) so that the external decl is initialized. */
cb91fab0 4523 if (crtl->stack_protect_guard)
7d69de61
RH
4524 stack_protect_prologue ();
4525
4e3825db
MM
4526 expand_phi_nodes (&SA);
4527
3fbd86b1 4528 /* Register rtl specific functions for cfg. */
242229bb
JH
4529 rtl_register_cfg_hooks ();
4530
4531 init_block = construct_init_block ();
4532
0ef90296 4533 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4e3825db 4534 remaining edges later. */
0ef90296
ZD
4535 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4536 e->flags &= ~EDGE_EXECUTABLE;
4537
8b11009b 4538 lab_rtx_for_bb = pointer_map_create ();
242229bb 4539 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
10d22567 4540 bb = expand_gimple_basic_block (bb);
bf08ebeb 4541
b5b8b0ac
AO
4542 if (MAY_HAVE_DEBUG_INSNS)
4543 expand_debug_locations ();
4544
4e3825db 4545 execute_free_datastructures ();
f029db69 4546 timevar_push (TV_OUT_OF_SSA);
4e3825db 4547 finish_out_of_ssa (&SA);
f029db69 4548 timevar_pop (TV_OUT_OF_SSA);
4e3825db 4549
f029db69 4550 timevar_push (TV_POST_EXPAND);
91753e21
RG
4551 /* We are no longer in SSA form. */
4552 cfun->gimple_df->in_ssa_p = false;
4553
bf08ebeb
JH
4554 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4555 conservatively to true until they are all profile aware. */
8b11009b 4556 pointer_map_destroy (lab_rtx_for_bb);
cb91fab0 4557 free_histograms ();
242229bb
JH
4558
4559 construct_exit_block ();
55e092c4
JH
4560 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4561 insn_locators_finalize ();
242229bb 4562
1d65f45c 4563 /* Zap the tree EH table. */
e8a2a782 4564 set_eh_throw_stmt_table (cfun, NULL);
242229bb 4565
42821aff
MM
4566 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4567 split edges which edge insertions might do. */
242229bb 4568 rebuild_jump_labels (get_insns ());
242229bb 4569
4e3825db
MM
4570 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4571 {
4572 edge e;
4573 edge_iterator ei;
4574 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4575 {
4576 if (e->insns.r)
bc470c24 4577 {
42821aff 4578 rebuild_jump_labels_chain (e->insns.r);
bc470c24
JJ
4579 /* Avoid putting insns before parm_birth_insn. */
4580 if (e->src == ENTRY_BLOCK_PTR
4581 && single_succ_p (ENTRY_BLOCK_PTR)
4582 && parm_birth_insn)
4583 {
4584 rtx insns = e->insns.r;
4585 e->insns.r = NULL_RTX;
4586 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4587 }
4588 else
4589 commit_one_edge_insertion (e);
4590 }
4e3825db
MM
4591 else
4592 ei_next (&ei);
4593 }
4594 }
4595
4596 /* We're done expanding trees to RTL. */
4597 currently_expanding_to_rtl = 0;
4598
4599 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4600 {
4601 edge e;
4602 edge_iterator ei;
4603 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4604 {
4605 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4606 e->flags &= ~EDGE_EXECUTABLE;
4607
4608 /* At the moment not all abnormal edges match the RTL
4609 representation. It is safe to remove them here as
4610 find_many_sub_basic_blocks will rediscover them.
4611 In the future we should get this fixed properly. */
4612 if ((e->flags & EDGE_ABNORMAL)
4613 && !(e->flags & EDGE_SIBCALL))
4614 remove_edge (e);
4615 else
4616 ei_next (&ei);
4617 }
4618 }
4619
242229bb
JH
4620 blocks = sbitmap_alloc (last_basic_block);
4621 sbitmap_ones (blocks);
4622 find_many_sub_basic_blocks (blocks);
242229bb 4623 sbitmap_free (blocks);
4e3825db 4624 purge_all_dead_edges ();
242229bb 4625
2e3f842f
L
4626 expand_stack_alignment ();
4627
dac1fbf8
RG
4628 /* After initial rtl generation, call back to finish generating
4629 exception support code. We need to do this before cleaning up
4630 the CFG as the code does not expect dead landing pads. */
4631 if (cfun->eh->region_tree != NULL)
4632 finish_eh_generation ();
4633
4634 /* Remove unreachable blocks, otherwise we cannot compute dominators
4635 which are needed for loop state verification. As a side-effect
4636 this also compacts blocks.
4637 ??? We cannot remove trivially dead insns here as for example
4638 the DRAP reg on i?86 is not magically live at this point.
4639 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
4640 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4641
242229bb 4642#ifdef ENABLE_CHECKING
62e5bf5d 4643 verify_flow_info ();
242229bb 4644#endif
9f8628ba
PB
4645
4646 /* There's no need to defer outputting this function any more; we
4647 know we want to output it. */
4648 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4649
4650 /* Now that we're done expanding trees to RTL, we shouldn't have any
4651 more CONCATs anywhere. */
4652 generating_concat_p = 0;
4653
b7211528
SB
4654 if (dump_file)
4655 {
4656 fprintf (dump_file,
4657 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4658 /* And the pass manager will dump RTL for us. */
4659 }
ef330312
PB
4660
4661 /* If we're emitting a nested function, make sure its parent gets
4662 emitted as well. Doing otherwise confuses debug info. */
c22cacf3 4663 {
ef330312
PB
4664 tree parent;
4665 for (parent = DECL_CONTEXT (current_function_decl);
c22cacf3
MS
4666 parent != NULL_TREE;
4667 parent = get_containing_scope (parent))
ef330312 4668 if (TREE_CODE (parent) == FUNCTION_DECL)
c22cacf3 4669 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
ef330312 4670 }
c22cacf3 4671
ef330312
PB
4672 /* We are now committed to emitting code for this function. Do any
4673 preparation, such as emitting abstract debug info for the inline
4674 before it gets mangled by optimization. */
4675 if (cgraph_function_possibly_inlined_p (current_function_decl))
4676 (*debug_hooks->outlining_inline_function) (current_function_decl);
4677
4678 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
4679
4680 /* After expanding, the return labels are no longer needed. */
4681 return_label = NULL;
4682 naked_return_label = NULL;
0a35513e
AH
4683
4684 /* After expanding, the tm_restart map is no longer needed. */
4685 if (cfun->gimple_df->tm_restart)
4686 {
4687 htab_delete (cfun->gimple_df->tm_restart);
4688 cfun->gimple_df->tm_restart = NULL;
4689 }
4690
55e092c4
JH
4691 /* Tag the blocks with a depth number so that change_scope can find
4692 the common parent easily. */
4693 set_block_levels (DECL_INITIAL (cfun->decl), 0);
bf08ebeb 4694 default_rtl_profile ();
f029db69 4695 timevar_pop (TV_POST_EXPAND);
c2924966 4696 return 0;
242229bb
JH
4697}
4698
e3b5732b 4699struct rtl_opt_pass pass_expand =
242229bb 4700{
8ddbbcae 4701 {
e3b5732b 4702 RTL_PASS,
c22cacf3 4703 "expand", /* name */
242229bb 4704 NULL, /* gate */
726a989a 4705 gimple_expand_cfg, /* execute */
242229bb
JH
4706 NULL, /* sub */
4707 NULL, /* next */
4708 0, /* static_pass_number */
c22cacf3 4709 TV_EXPAND, /* tv_id */
688a482d
RG
4710 PROP_ssa | PROP_gimple_leh | PROP_cfg
4711 | PROP_gimple_lcx, /* properties_required */
242229bb 4712 PROP_rtl, /* properties_provided */
4e3825db
MM
4713 PROP_ssa | PROP_trees, /* properties_destroyed */
4714 TODO_verify_ssa | TODO_verify_flow
4715 | TODO_verify_stmts, /* todo_flags_start */
22c5fa5f 4716 TODO_ggc_collect /* todo_flags_finish */
8ddbbcae 4717 }
242229bb 4718};
This page took 2.424266 seconds and 5 git commands to generate.