]> gcc.gnu.org Git - gcc.git/blame - gcc/cfgexpand.c
re PR c/43772 (Errant -Wlogical-op warning when testing limits)
[gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
4c0c3228 2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
66647d44 3 Free Software Foundation, Inc.
242229bb
JH
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
242229bb
JH
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
242229bb
JH
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "rtl.h"
27#include "tm_p.h"
28#include "basic-block.h"
29#include "function.h"
30#include "expr.h"
31#include "langhooks.h"
32#include "tree-flow.h"
33#include "timevar.h"
34#include "tree-dump.h"
35#include "tree-pass.h"
36#include "except.h"
37#include "flags.h"
1f6d3a08 38#include "diagnostic.h"
cf835838
JM
39#include "tree-pretty-print.h"
40#include "gimple-pretty-print.h"
1f6d3a08 41#include "toplev.h"
ef330312 42#include "debug.h"
7d69de61 43#include "params.h"
ff28a94d 44#include "tree-inline.h"
6946b3f7 45#include "value-prof.h"
e41b2a33 46#include "target.h"
4e3825db 47#include "ssaexpand.h"
7a8cba34
SB
48#include "bitmap.h"
49#include "sbitmap.h"
7d776ee2 50#include "cfgloop.h"
be147e84
RG
51#include "regs.h" /* For reg_renumber. */
52#include "integrate.h" /* For emit_initial_value_sets. */
2b21299c 53#include "insn-attr.h" /* For INSN_SCHEDULING. */
726a989a 54
4e3825db
MM
55/* This variable holds information helping the rewriting of SSA trees
56 into RTL. */
57struct ssaexpand SA;
58
a5883ba0
MM
59/* This variable holds the currently expanded gimple statement for purposes
60 of comminucating the profile info to the builtin expanders. */
61gimple currently_expanding_gimple_stmt;
62
ddb555ed
JJ
63static rtx expand_debug_expr (tree);
64
726a989a
RB
65/* Return an expression tree corresponding to the RHS of GIMPLE
66 statement STMT. */
67
68tree
69gimple_assign_rhs_to_tree (gimple stmt)
70{
71 tree t;
82d6e6fc 72 enum gimple_rhs_class grhs_class;
b8698a0f 73
82d6e6fc 74 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
726a989a 75
0354c0c7
BS
76 if (grhs_class == GIMPLE_TERNARY_RHS)
77 t = build3 (gimple_assign_rhs_code (stmt),
78 TREE_TYPE (gimple_assign_lhs (stmt)),
79 gimple_assign_rhs1 (stmt),
80 gimple_assign_rhs2 (stmt),
81 gimple_assign_rhs3 (stmt));
82 else if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
83 t = build2 (gimple_assign_rhs_code (stmt),
84 TREE_TYPE (gimple_assign_lhs (stmt)),
85 gimple_assign_rhs1 (stmt),
86 gimple_assign_rhs2 (stmt));
82d6e6fc 87 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
88 t = build1 (gimple_assign_rhs_code (stmt),
89 TREE_TYPE (gimple_assign_lhs (stmt)),
90 gimple_assign_rhs1 (stmt));
82d6e6fc 91 else if (grhs_class == GIMPLE_SINGLE_RHS)
b5b8b0ac
AO
92 {
93 t = gimple_assign_rhs1 (stmt);
94 /* Avoid modifying this tree in place below. */
d0ed412a
JJ
95 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
96 && gimple_location (stmt) != EXPR_LOCATION (t))
97 || (gimple_block (stmt)
98 && currently_expanding_to_rtl
99 && EXPR_P (t)
100 && gimple_block (stmt) != TREE_BLOCK (t)))
b5b8b0ac
AO
101 t = copy_node (t);
102 }
726a989a
RB
103 else
104 gcc_unreachable ();
105
f5045c96
AM
106 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
107 SET_EXPR_LOCATION (t, gimple_location (stmt));
d0ed412a
JJ
108 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
109 TREE_BLOCK (t) = gimple_block (stmt);
f5045c96 110
726a989a
RB
111 return t;
112}
113
726a989a 114
1f6d3a08
RH
115#ifndef STACK_ALIGNMENT_NEEDED
116#define STACK_ALIGNMENT_NEEDED 1
117#endif
118
4e3825db
MM
119#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
120
121/* Associate declaration T with storage space X. If T is no
122 SSA name this is exactly SET_DECL_RTL, otherwise make the
123 partition of T associated with X. */
124static inline void
125set_rtl (tree t, rtx x)
126{
127 if (TREE_CODE (t) == SSA_NAME)
128 {
129 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
130 if (x && !MEM_P (x))
131 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
eb7adebc
MM
132 /* For the benefit of debug information at -O0 (where vartracking
133 doesn't run) record the place also in the base DECL if it's
134 a normal variable (not a parameter). */
135 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
136 {
137 tree var = SSA_NAME_VAR (t);
138 /* If we don't yet have something recorded, just record it now. */
139 if (!DECL_RTL_SET_P (var))
140 SET_DECL_RTL (var, x);
47598145 141 /* If we have it set already to "multiple places" don't
eb7adebc
MM
142 change this. */
143 else if (DECL_RTL (var) == pc_rtx)
144 ;
145 /* If we have something recorded and it's not the same place
146 as we want to record now, we have multiple partitions for the
147 same base variable, with different places. We can't just
148 randomly chose one, hence we have to say that we don't know.
149 This only happens with optimization, and there var-tracking
150 will figure out the right thing. */
151 else if (DECL_RTL (var) != x)
152 SET_DECL_RTL (var, pc_rtx);
153 }
4e3825db
MM
154 }
155 else
156 SET_DECL_RTL (t, x);
157}
1f6d3a08
RH
158
159/* This structure holds data relevant to one variable that will be
160 placed in a stack slot. */
161struct stack_var
162{
163 /* The Variable. */
164 tree decl;
165
1f6d3a08
RH
166 /* Initially, the size of the variable. Later, the size of the partition,
167 if this variable becomes it's partition's representative. */
168 HOST_WIDE_INT size;
169
170 /* The *byte* alignment required for this variable. Or as, with the
171 size, the alignment for this partition. */
172 unsigned int alignb;
173
174 /* The partition representative. */
175 size_t representative;
176
177 /* The next stack variable in the partition, or EOC. */
178 size_t next;
2bdbbe94
MM
179
180 /* The numbers of conflicting stack variables. */
181 bitmap conflicts;
1f6d3a08
RH
182};
183
184#define EOC ((size_t)-1)
185
186/* We have an array of such objects while deciding allocation. */
187static struct stack_var *stack_vars;
188static size_t stack_vars_alloc;
189static size_t stack_vars_num;
47598145 190static struct pointer_map_t *decl_to_stack_part;
1f6d3a08 191
fa10beec 192/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
1f6d3a08
RH
193 is non-decreasing. */
194static size_t *stack_vars_sorted;
195
1f6d3a08
RH
196/* The phase of the stack frame. This is the known misalignment of
197 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
198 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
199static int frame_phase;
200
7d69de61
RH
201/* Used during expand_used_vars to remember if we saw any decls for
202 which we'd like to enable stack smashing protection. */
203static bool has_protected_decls;
204
205/* Used during expand_used_vars. Remember if we say a character buffer
206 smaller than our cutoff threshold. Used for -Wstack-protector. */
207static bool has_short_buffer;
1f6d3a08 208
6f197850 209/* Compute the byte alignment to use for DECL. Ignore alignment
765c3e8f
L
210 we can't do with expected alignment of the stack boundary. */
211
212static unsigned int
6f197850 213align_local_variable (tree decl)
765c3e8f 214{
3a42502d 215 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
6f197850 216 DECL_ALIGN (decl) = align;
1f6d3a08
RH
217 return align / BITS_PER_UNIT;
218}
219
220/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
221 Return the frame offset. */
222
223static HOST_WIDE_INT
3a42502d 224alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
1f6d3a08
RH
225{
226 HOST_WIDE_INT offset, new_frame_offset;
227
228 new_frame_offset = frame_offset;
229 if (FRAME_GROWS_DOWNWARD)
230 {
231 new_frame_offset -= size + frame_phase;
232 new_frame_offset &= -align;
233 new_frame_offset += frame_phase;
234 offset = new_frame_offset;
235 }
236 else
237 {
238 new_frame_offset -= frame_phase;
239 new_frame_offset += align - 1;
240 new_frame_offset &= -align;
241 new_frame_offset += frame_phase;
242 offset = new_frame_offset;
243 new_frame_offset += size;
244 }
245 frame_offset = new_frame_offset;
246
9fb798d7
EB
247 if (frame_offset_overflow (frame_offset, cfun->decl))
248 frame_offset = offset = 0;
249
1f6d3a08
RH
250 return offset;
251}
252
253/* Accumulate DECL into STACK_VARS. */
254
255static void
256add_stack_var (tree decl)
257{
533f611a
RH
258 struct stack_var *v;
259
1f6d3a08
RH
260 if (stack_vars_num >= stack_vars_alloc)
261 {
262 if (stack_vars_alloc)
263 stack_vars_alloc = stack_vars_alloc * 3 / 2;
264 else
265 stack_vars_alloc = 32;
266 stack_vars
267 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
268 }
47598145
MM
269 if (!decl_to_stack_part)
270 decl_to_stack_part = pointer_map_create ();
271
533f611a 272 v = &stack_vars[stack_vars_num];
47598145 273 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
533f611a
RH
274
275 v->decl = decl;
533f611a
RH
276 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
277 /* Ensure that all variables have size, so that &a != &b for any two
278 variables that are simultaneously live. */
279 if (v->size == 0)
280 v->size = 1;
6f197850 281 v->alignb = align_local_variable (SSAVAR (decl));
13868f40
EB
282 /* An alignment of zero can mightily confuse us later. */
283 gcc_assert (v->alignb != 0);
1f6d3a08
RH
284
285 /* All variables are initially in their own partition. */
533f611a
RH
286 v->representative = stack_vars_num;
287 v->next = EOC;
1f6d3a08 288
2bdbbe94 289 /* All variables initially conflict with no other. */
533f611a 290 v->conflicts = NULL;
2bdbbe94 291
1f6d3a08 292 /* Ensure that this decl doesn't get put onto the list twice. */
4e3825db 293 set_rtl (decl, pc_rtx);
1f6d3a08
RH
294
295 stack_vars_num++;
296}
297
1f6d3a08
RH
298/* Make the decls associated with luid's X and Y conflict. */
299
300static void
301add_stack_var_conflict (size_t x, size_t y)
302{
2bdbbe94
MM
303 struct stack_var *a = &stack_vars[x];
304 struct stack_var *b = &stack_vars[y];
305 if (!a->conflicts)
306 a->conflicts = BITMAP_ALLOC (NULL);
307 if (!b->conflicts)
308 b->conflicts = BITMAP_ALLOC (NULL);
309 bitmap_set_bit (a->conflicts, y);
310 bitmap_set_bit (b->conflicts, x);
1f6d3a08
RH
311}
312
313/* Check whether the decls associated with luid's X and Y conflict. */
314
315static bool
316stack_var_conflict_p (size_t x, size_t y)
317{
2bdbbe94
MM
318 struct stack_var *a = &stack_vars[x];
319 struct stack_var *b = &stack_vars[y];
47598145
MM
320 if (x == y)
321 return false;
322 /* Partitions containing an SSA name result from gimple registers
323 with things like unsupported modes. They are top-level and
324 hence conflict with everything else. */
325 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
326 return true;
327
2bdbbe94
MM
328 if (!a->conflicts || !b->conflicts)
329 return false;
330 return bitmap_bit_p (a->conflicts, y);
1f6d3a08 331}
b8698a0f 332
d239ed56
SB
333/* Returns true if TYPE is or contains a union type. */
334
335static bool
336aggregate_contains_union_type (tree type)
337{
338 tree field;
339
340 if (TREE_CODE (type) == UNION_TYPE
341 || TREE_CODE (type) == QUAL_UNION_TYPE)
342 return true;
343 if (TREE_CODE (type) == ARRAY_TYPE)
344 return aggregate_contains_union_type (TREE_TYPE (type));
345 if (TREE_CODE (type) != RECORD_TYPE)
346 return false;
347
910ad8de 348 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
d239ed56
SB
349 if (TREE_CODE (field) == FIELD_DECL)
350 if (aggregate_contains_union_type (TREE_TYPE (field)))
351 return true;
352
353 return false;
354}
355
1f6d3a08
RH
356/* A subroutine of expand_used_vars. If two variables X and Y have alias
357 sets that do not conflict, then do add a conflict for these variables
d239ed56
SB
358 in the interference graph. We also need to make sure to add conflicts
359 for union containing structures. Else RTL alias analysis comes along
360 and due to type based aliasing rules decides that for two overlapping
361 union temporaries { short s; int i; } accesses to the same mem through
362 different types may not alias and happily reorders stores across
55356334 363 life-time boundaries of the temporaries (See PR25654). */
1f6d3a08
RH
364
365static void
366add_alias_set_conflicts (void)
367{
368 size_t i, j, n = stack_vars_num;
369
370 for (i = 0; i < n; ++i)
371 {
a4d25453
RH
372 tree type_i = TREE_TYPE (stack_vars[i].decl);
373 bool aggr_i = AGGREGATE_TYPE_P (type_i);
d239ed56 374 bool contains_union;
1f6d3a08 375
d239ed56 376 contains_union = aggregate_contains_union_type (type_i);
1f6d3a08
RH
377 for (j = 0; j < i; ++j)
378 {
a4d25453
RH
379 tree type_j = TREE_TYPE (stack_vars[j].decl);
380 bool aggr_j = AGGREGATE_TYPE_P (type_j);
d239ed56
SB
381 if (aggr_i != aggr_j
382 /* Either the objects conflict by means of type based
383 aliasing rules, or we need to add a conflict. */
384 || !objects_must_conflict_p (type_i, type_j)
385 /* In case the types do not conflict ensure that access
386 to elements will conflict. In case of unions we have
387 to be careful as type based aliasing rules may say
388 access to the same memory does not conflict. So play
4a25752b
ER
389 safe and add a conflict in this case when
390 -fstrict-aliasing is used. */
391 || (contains_union && flag_strict_aliasing))
1f6d3a08
RH
392 add_stack_var_conflict (i, j);
393 }
394 }
395}
396
47598145
MM
397/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
398 enter its partition number into bitmap DATA. */
399
400static bool
401visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
402{
403 bitmap active = (bitmap)data;
404 op = get_base_address (op);
405 if (op
406 && DECL_P (op)
407 && DECL_RTL_IF_SET (op) == pc_rtx)
408 {
409 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
410 if (v)
411 bitmap_set_bit (active, *v);
412 }
413 return false;
414}
415
416/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
417 record conflicts between it and all currently active other partitions
418 from bitmap DATA. */
419
420static bool
421visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
422{
423 bitmap active = (bitmap)data;
424 op = get_base_address (op);
425 if (op
426 && DECL_P (op)
427 && DECL_RTL_IF_SET (op) == pc_rtx)
428 {
429 size_t *v =
430 (size_t *) pointer_map_contains (decl_to_stack_part, op);
431 if (v && bitmap_set_bit (active, *v))
432 {
433 size_t num = *v;
434 bitmap_iterator bi;
435 unsigned i;
436 gcc_assert (num < stack_vars_num);
437 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
438 add_stack_var_conflict (num, i);
439 }
440 }
441 return false;
442}
443
444/* Helper routine for add_scope_conflicts, calculating the active partitions
445 at the end of BB, leaving the result in WORK. We're called to generate
81bfd197
MM
446 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
447 liveness. */
47598145
MM
448
449static void
81bfd197 450add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
47598145
MM
451{
452 edge e;
453 edge_iterator ei;
454 gimple_stmt_iterator gsi;
455 bool (*visit)(gimple, tree, void *);
456
457 bitmap_clear (work);
458 FOR_EACH_EDGE (e, ei, bb->preds)
459 bitmap_ior_into (work, (bitmap)e->src->aux);
460
ea85edfe 461 visit = visit_op;
47598145
MM
462
463 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
464 {
465 gimple stmt = gsi_stmt (gsi);
ea85edfe 466 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
47598145 467 }
ea85edfe 468 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
47598145
MM
469 {
470 gimple stmt = gsi_stmt (gsi);
471
472 if (gimple_clobber_p (stmt))
473 {
474 tree lhs = gimple_assign_lhs (stmt);
475 size_t *v;
476 /* Nested function lowering might introduce LHSs
477 that are COMPONENT_REFs. */
478 if (TREE_CODE (lhs) != VAR_DECL)
479 continue;
480 if (DECL_RTL_IF_SET (lhs) == pc_rtx
481 && (v = (size_t *)
482 pointer_map_contains (decl_to_stack_part, lhs)))
483 bitmap_clear_bit (work, *v);
484 }
485 else if (!is_gimple_debug (stmt))
ea85edfe 486 {
81bfd197 487 if (for_conflict
ea85edfe
JJ
488 && visit == visit_op)
489 {
490 /* If this is the first real instruction in this BB we need
88d599dc
MM
491 to add conflicts for everything live at this point now.
492 Unlike classical liveness for named objects we can't
ea85edfe
JJ
493 rely on seeing a def/use of the names we're interested in.
494 There might merely be indirect loads/stores. We'd not add any
81bfd197 495 conflicts for such partitions. */
ea85edfe
JJ
496 bitmap_iterator bi;
497 unsigned i;
81bfd197 498 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
ea85edfe
JJ
499 {
500 unsigned j;
501 bitmap_iterator bj;
81bfd197 502 EXECUTE_IF_SET_IN_BITMAP (work, i + 1, j, bj)
ea85edfe
JJ
503 add_stack_var_conflict (i, j);
504 }
505 visit = visit_conflict;
506 }
507 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
508 }
47598145
MM
509 }
510}
511
512/* Generate stack partition conflicts between all partitions that are
513 simultaneously live. */
514
515static void
516add_scope_conflicts (void)
517{
518 basic_block bb;
519 bool changed;
520 bitmap work = BITMAP_ALLOC (NULL);
521
88d599dc 522 /* We approximate the live range of a stack variable by taking the first
47598145
MM
523 mention of its name as starting point(s), and by the end-of-scope
524 death clobber added by gimplify as ending point(s) of the range.
525 This overapproximates in the case we for instance moved an address-taken
526 operation upward, without also moving a dereference to it upwards.
527 But it's conservatively correct as a variable never can hold values
528 before its name is mentioned at least once.
529
88d599dc 530 We then do a mostly classical bitmap liveness algorithm. */
47598145
MM
531
532 FOR_ALL_BB (bb)
533 bb->aux = BITMAP_ALLOC (NULL);
534
535 changed = true;
536 while (changed)
537 {
538 changed = false;
539 FOR_EACH_BB (bb)
540 {
541 bitmap active = (bitmap)bb->aux;
81bfd197 542 add_scope_conflicts_1 (bb, work, false);
47598145
MM
543 if (bitmap_ior_into (active, work))
544 changed = true;
545 }
546 }
547
548 FOR_EACH_BB (bb)
81bfd197 549 add_scope_conflicts_1 (bb, work, true);
47598145
MM
550
551 BITMAP_FREE (work);
552 FOR_ALL_BB (bb)
553 BITMAP_FREE (bb->aux);
554}
555
1f6d3a08 556/* A subroutine of partition_stack_vars. A comparison function for qsort,
3a42502d 557 sorting an array of indices by the properties of the object. */
1f6d3a08
RH
558
559static int
3a42502d 560stack_var_cmp (const void *a, const void *b)
1f6d3a08 561{
3a42502d
RH
562 size_t ia = *(const size_t *)a;
563 size_t ib = *(const size_t *)b;
564 unsigned int aligna = stack_vars[ia].alignb;
565 unsigned int alignb = stack_vars[ib].alignb;
566 HOST_WIDE_INT sizea = stack_vars[ia].size;
567 HOST_WIDE_INT sizeb = stack_vars[ib].size;
568 tree decla = stack_vars[ia].decl;
569 tree declb = stack_vars[ib].decl;
570 bool largea, largeb;
4e3825db 571 unsigned int uida, uidb;
1f6d3a08 572
3a42502d
RH
573 /* Primary compare on "large" alignment. Large comes first. */
574 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
575 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
576 if (largea != largeb)
577 return (int)largeb - (int)largea;
578
579 /* Secondary compare on size, decreasing */
3a42502d 580 if (sizea > sizeb)
6ddfda8a
ER
581 return -1;
582 if (sizea < sizeb)
1f6d3a08 583 return 1;
3a42502d
RH
584
585 /* Tertiary compare on true alignment, decreasing. */
586 if (aligna < alignb)
587 return -1;
588 if (aligna > alignb)
589 return 1;
590
591 /* Final compare on ID for sort stability, increasing.
592 Two SSA names are compared by their version, SSA names come before
593 non-SSA names, and two normal decls are compared by their DECL_UID. */
4e3825db
MM
594 if (TREE_CODE (decla) == SSA_NAME)
595 {
596 if (TREE_CODE (declb) == SSA_NAME)
597 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
598 else
599 return -1;
600 }
601 else if (TREE_CODE (declb) == SSA_NAME)
602 return 1;
603 else
604 uida = DECL_UID (decla), uidb = DECL_UID (declb);
79f802f5 605 if (uida < uidb)
79f802f5 606 return 1;
3a42502d
RH
607 if (uida > uidb)
608 return -1;
1f6d3a08
RH
609 return 0;
610}
611
55b34b5f
RG
612
613/* If the points-to solution *PI points to variables that are in a partition
614 together with other variables add all partition members to the pointed-to
615 variables bitmap. */
616
617static void
618add_partitioned_vars_to_ptset (struct pt_solution *pt,
619 struct pointer_map_t *decls_to_partitions,
620 struct pointer_set_t *visited, bitmap temp)
621{
622 bitmap_iterator bi;
623 unsigned i;
624 bitmap *part;
625
626 if (pt->anything
627 || pt->vars == NULL
628 /* The pointed-to vars bitmap is shared, it is enough to
629 visit it once. */
630 || pointer_set_insert(visited, pt->vars))
631 return;
632
633 bitmap_clear (temp);
634
635 /* By using a temporary bitmap to store all members of the partitions
636 we have to add we make sure to visit each of the partitions only
637 once. */
638 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
639 if ((!temp
640 || !bitmap_bit_p (temp, i))
641 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
642 (void *)(size_t) i)))
643 bitmap_ior_into (temp, *part);
644 if (!bitmap_empty_p (temp))
645 bitmap_ior_into (pt->vars, temp);
646}
647
648/* Update points-to sets based on partition info, so we can use them on RTL.
649 The bitmaps representing stack partitions will be saved until expand,
650 where partitioned decls used as bases in memory expressions will be
651 rewritten. */
652
653static void
654update_alias_info_with_stack_vars (void)
655{
656 struct pointer_map_t *decls_to_partitions = NULL;
657 size_t i, j;
658 tree var = NULL_TREE;
659
660 for (i = 0; i < stack_vars_num; i++)
661 {
662 bitmap part = NULL;
663 tree name;
664 struct ptr_info_def *pi;
665
666 /* Not interested in partitions with single variable. */
667 if (stack_vars[i].representative != i
668 || stack_vars[i].next == EOC)
669 continue;
670
671 if (!decls_to_partitions)
672 {
673 decls_to_partitions = pointer_map_create ();
674 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
675 }
676
677 /* Create an SSA_NAME that points to the partition for use
678 as base during alias-oracle queries on RTL for bases that
679 have been partitioned. */
680 if (var == NULL_TREE)
681 var = create_tmp_var (ptr_type_node, NULL);
682 name = make_ssa_name (var, NULL);
683
684 /* Create bitmaps representing partitions. They will be used for
685 points-to sets later, so use GGC alloc. */
686 part = BITMAP_GGC_ALLOC ();
687 for (j = i; j != EOC; j = stack_vars[j].next)
688 {
689 tree decl = stack_vars[j].decl;
25a6a873 690 unsigned int uid = DECL_PT_UID (decl);
55b34b5f
RG
691 /* We should never end up partitioning SSA names (though they
692 may end up on the stack). Neither should we allocate stack
9b999dc5
JJ
693 space to something that is unused and thus unreferenced, except
694 for -O0 where we are preserving even unreferenced variables. */
55b34b5f 695 gcc_assert (DECL_P (decl)
9b999dc5 696 && (!optimize
27c6b086 697 || referenced_var_lookup (cfun, DECL_UID (decl))));
55b34b5f
RG
698 bitmap_set_bit (part, uid);
699 *((bitmap *) pointer_map_insert (decls_to_partitions,
700 (void *)(size_t) uid)) = part;
701 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
702 decl)) = name;
703 }
704
705 /* Make the SSA name point to all partition members. */
706 pi = get_ptr_info (name);
d3553615 707 pt_solution_set (&pi->pt, part, false);
55b34b5f
RG
708 }
709
710 /* Make all points-to sets that contain one member of a partition
711 contain all members of the partition. */
712 if (decls_to_partitions)
713 {
714 unsigned i;
715 struct pointer_set_t *visited = pointer_set_create ();
716 bitmap temp = BITMAP_ALLOC (NULL);
717
718 for (i = 1; i < num_ssa_names; i++)
719 {
720 tree name = ssa_name (i);
721 struct ptr_info_def *pi;
722
723 if (name
724 && POINTER_TYPE_P (TREE_TYPE (name))
725 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
726 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
727 visited, temp);
728 }
729
730 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
731 decls_to_partitions, visited, temp);
55b34b5f
RG
732
733 pointer_set_destroy (visited);
734 pointer_map_destroy (decls_to_partitions);
735 BITMAP_FREE (temp);
736 }
737}
738
1f6d3a08
RH
739/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
740 partitioning algorithm. Partitions A and B are known to be non-conflicting.
6ddfda8a 741 Merge them into a single partition A. */
1f6d3a08
RH
742
743static void
6ddfda8a 744union_stack_vars (size_t a, size_t b)
1f6d3a08 745{
2bdbbe94
MM
746 struct stack_var *vb = &stack_vars[b];
747 bitmap_iterator bi;
748 unsigned u;
1f6d3a08 749
6ddfda8a
ER
750 gcc_assert (stack_vars[b].next == EOC);
751 /* Add B to A's partition. */
752 stack_vars[b].next = stack_vars[a].next;
753 stack_vars[b].representative = a;
1f6d3a08
RH
754 stack_vars[a].next = b;
755
756 /* Update the required alignment of partition A to account for B. */
757 if (stack_vars[a].alignb < stack_vars[b].alignb)
758 stack_vars[a].alignb = stack_vars[b].alignb;
759
760 /* Update the interference graph and merge the conflicts. */
2bdbbe94
MM
761 if (vb->conflicts)
762 {
763 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
764 add_stack_var_conflict (a, stack_vars[u].representative);
765 BITMAP_FREE (vb->conflicts);
766 }
1f6d3a08
RH
767}
768
769/* A subroutine of expand_used_vars. Binpack the variables into
770 partitions constrained by the interference graph. The overall
771 algorithm used is as follows:
772
6ddfda8a 773 Sort the objects by size in descending order.
1f6d3a08
RH
774 For each object A {
775 S = size(A)
776 O = 0
777 loop {
778 Look for the largest non-conflicting object B with size <= S.
779 UNION (A, B)
1f6d3a08
RH
780 }
781 }
782*/
783
784static void
785partition_stack_vars (void)
786{
787 size_t si, sj, n = stack_vars_num;
788
789 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
790 for (si = 0; si < n; ++si)
791 stack_vars_sorted[si] = si;
792
793 if (n == 1)
794 return;
795
3a42502d 796 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
1f6d3a08 797
1f6d3a08
RH
798 for (si = 0; si < n; ++si)
799 {
800 size_t i = stack_vars_sorted[si];
3a42502d 801 unsigned int ialign = stack_vars[i].alignb;
1f6d3a08 802
6ddfda8a
ER
803 /* Ignore objects that aren't partition representatives. If we
804 see a var that is not a partition representative, it must
805 have been merged earlier. */
806 if (stack_vars[i].representative != i)
807 continue;
808
809 for (sj = si + 1; sj < n; ++sj)
1f6d3a08
RH
810 {
811 size_t j = stack_vars_sorted[sj];
1f6d3a08
RH
812 unsigned int jalign = stack_vars[j].alignb;
813
814 /* Ignore objects that aren't partition representatives. */
815 if (stack_vars[j].representative != j)
816 continue;
817
1f6d3a08
RH
818 /* Ignore conflicting objects. */
819 if (stack_var_conflict_p (i, j))
820 continue;
821
3a42502d
RH
822 /* Do not mix objects of "small" (supported) alignment
823 and "large" (unsupported) alignment. */
824 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
825 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
826 continue;
827
1f6d3a08 828 /* UNION the objects, placing J at OFFSET. */
6ddfda8a 829 union_stack_vars (i, j);
1f6d3a08
RH
830 }
831 }
55b34b5f 832
9b999dc5 833 update_alias_info_with_stack_vars ();
1f6d3a08
RH
834}
835
836/* A debugging aid for expand_used_vars. Dump the generated partitions. */
837
838static void
839dump_stack_var_partition (void)
840{
841 size_t si, i, j, n = stack_vars_num;
842
843 for (si = 0; si < n; ++si)
844 {
845 i = stack_vars_sorted[si];
846
847 /* Skip variables that aren't partition representatives, for now. */
848 if (stack_vars[i].representative != i)
849 continue;
850
851 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
852 " align %u\n", (unsigned long) i, stack_vars[i].size,
853 stack_vars[i].alignb);
854
855 for (j = i; j != EOC; j = stack_vars[j].next)
856 {
857 fputc ('\t', dump_file);
858 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
1f6d3a08 859 }
6ddfda8a 860 fputc ('\n', dump_file);
1f6d3a08
RH
861 }
862}
863
3a42502d 864/* Assign rtl to DECL at BASE + OFFSET. */
1f6d3a08
RH
865
866static void
3a42502d
RH
867expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
868 HOST_WIDE_INT offset)
1f6d3a08 869{
3a42502d 870 unsigned align;
1f6d3a08 871 rtx x;
c22cacf3 872
1f6d3a08
RH
873 /* If this fails, we've overflowed the stack frame. Error nicely? */
874 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
875
3a42502d 876 x = plus_constant (base, offset);
4e3825db 877 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
1f6d3a08 878
4e3825db
MM
879 if (TREE_CODE (decl) != SSA_NAME)
880 {
881 /* Set alignment we actually gave this decl if it isn't an SSA name.
882 If it is we generate stack slots only accidentally so it isn't as
883 important, we'll simply use the alignment that is already set. */
3a42502d
RH
884 if (base == virtual_stack_vars_rtx)
885 offset -= frame_phase;
4e3825db
MM
886 align = offset & -offset;
887 align *= BITS_PER_UNIT;
3a42502d
RH
888 if (align == 0 || align > base_align)
889 align = base_align;
890
891 /* One would think that we could assert that we're not decreasing
892 alignment here, but (at least) the i386 port does exactly this
893 via the MINIMUM_ALIGNMENT hook. */
4e3825db
MM
894
895 DECL_ALIGN (decl) = align;
896 DECL_USER_ALIGN (decl) = 0;
897 }
898
899 set_mem_attributes (x, SSAVAR (decl), true);
900 set_rtl (decl, x);
1f6d3a08
RH
901}
902
903/* A subroutine of expand_used_vars. Give each partition representative
904 a unique location within the stack frame. Update each partition member
905 with that location. */
906
907static void
7d69de61 908expand_stack_vars (bool (*pred) (tree))
1f6d3a08
RH
909{
910 size_t si, i, j, n = stack_vars_num;
3a42502d
RH
911 HOST_WIDE_INT large_size = 0, large_alloc = 0;
912 rtx large_base = NULL;
913 unsigned large_align = 0;
914 tree decl;
915
916 /* Determine if there are any variables requiring "large" alignment.
917 Since these are dynamically allocated, we only process these if
918 no predicate involved. */
919 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
920 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
921 {
922 /* Find the total size of these variables. */
923 for (si = 0; si < n; ++si)
924 {
925 unsigned alignb;
926
927 i = stack_vars_sorted[si];
928 alignb = stack_vars[i].alignb;
929
930 /* Stop when we get to the first decl with "small" alignment. */
931 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
932 break;
933
934 /* Skip variables that aren't partition representatives. */
935 if (stack_vars[i].representative != i)
936 continue;
937
938 /* Skip variables that have already had rtl assigned. See also
939 add_stack_var where we perpetrate this pc_rtx hack. */
940 decl = stack_vars[i].decl;
941 if ((TREE_CODE (decl) == SSA_NAME
942 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
943 : DECL_RTL (decl)) != pc_rtx)
944 continue;
945
946 large_size += alignb - 1;
947 large_size &= -(HOST_WIDE_INT)alignb;
948 large_size += stack_vars[i].size;
949 }
950
951 /* If there were any, allocate space. */
952 if (large_size > 0)
953 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
954 large_align, true);
955 }
1f6d3a08
RH
956
957 for (si = 0; si < n; ++si)
958 {
3a42502d
RH
959 rtx base;
960 unsigned base_align, alignb;
1f6d3a08
RH
961 HOST_WIDE_INT offset;
962
963 i = stack_vars_sorted[si];
964
965 /* Skip variables that aren't partition representatives, for now. */
966 if (stack_vars[i].representative != i)
967 continue;
968
7d69de61
RH
969 /* Skip variables that have already had rtl assigned. See also
970 add_stack_var where we perpetrate this pc_rtx hack. */
3a42502d
RH
971 decl = stack_vars[i].decl;
972 if ((TREE_CODE (decl) == SSA_NAME
973 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
974 : DECL_RTL (decl)) != pc_rtx)
7d69de61
RH
975 continue;
976
c22cacf3 977 /* Check the predicate to see whether this variable should be
7d69de61 978 allocated in this pass. */
3a42502d 979 if (pred && !pred (decl))
7d69de61
RH
980 continue;
981
3a42502d
RH
982 alignb = stack_vars[i].alignb;
983 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
984 {
985 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
986 base = virtual_stack_vars_rtx;
987 base_align = crtl->max_used_stack_slot_alignment;
988 }
989 else
990 {
991 /* Large alignment is only processed in the last pass. */
992 if (pred)
993 continue;
533f611a 994 gcc_assert (large_base != NULL);
3a42502d
RH
995
996 large_alloc += alignb - 1;
997 large_alloc &= -(HOST_WIDE_INT)alignb;
998 offset = large_alloc;
999 large_alloc += stack_vars[i].size;
1000
1001 base = large_base;
1002 base_align = large_align;
1003 }
1f6d3a08
RH
1004
1005 /* Create rtl for each variable based on their location within the
1006 partition. */
1007 for (j = i; j != EOC; j = stack_vars[j].next)
f8da8190 1008 {
f8da8190 1009 expand_one_stack_var_at (stack_vars[j].decl,
3a42502d 1010 base, base_align,
6ddfda8a 1011 offset);
f8da8190 1012 }
1f6d3a08 1013 }
3a42502d
RH
1014
1015 gcc_assert (large_alloc == large_size);
1f6d3a08
RH
1016}
1017
ff28a94d
JH
1018/* Take into account all sizes of partitions and reset DECL_RTLs. */
1019static HOST_WIDE_INT
1020account_stack_vars (void)
1021{
1022 size_t si, j, i, n = stack_vars_num;
1023 HOST_WIDE_INT size = 0;
1024
1025 for (si = 0; si < n; ++si)
1026 {
1027 i = stack_vars_sorted[si];
1028
1029 /* Skip variables that aren't partition representatives, for now. */
1030 if (stack_vars[i].representative != i)
1031 continue;
1032
1033 size += stack_vars[i].size;
1034 for (j = i; j != EOC; j = stack_vars[j].next)
4e3825db 1035 set_rtl (stack_vars[j].decl, NULL);
ff28a94d
JH
1036 }
1037 return size;
1038}
1039
1f6d3a08
RH
1040/* A subroutine of expand_one_var. Called to immediately assign rtl
1041 to a variable to be allocated in the stack frame. */
1042
1043static void
1044expand_one_stack_var (tree var)
1045{
3a42502d
RH
1046 HOST_WIDE_INT size, offset;
1047 unsigned byte_align;
1f6d3a08 1048
4e3825db 1049 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
6f197850 1050 byte_align = align_local_variable (SSAVAR (var));
3a42502d
RH
1051
1052 /* We handle highly aligned variables in expand_stack_vars. */
1053 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1f6d3a08 1054
3a42502d
RH
1055 offset = alloc_stack_frame_space (size, byte_align);
1056
1057 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1058 crtl->max_used_stack_slot_alignment, offset);
1f6d3a08
RH
1059}
1060
1f6d3a08
RH
1061/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1062 that will reside in a hard register. */
1063
1064static void
1065expand_one_hard_reg_var (tree var)
1066{
1067 rest_of_decl_compilation (var, 0, 0);
1068}
1069
1070/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1071 that will reside in a pseudo register. */
1072
1073static void
1074expand_one_register_var (tree var)
1075{
4e3825db
MM
1076 tree decl = SSAVAR (var);
1077 tree type = TREE_TYPE (decl);
cde0f3fd 1078 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1f6d3a08
RH
1079 rtx x = gen_reg_rtx (reg_mode);
1080
4e3825db 1081 set_rtl (var, x);
1f6d3a08
RH
1082
1083 /* Note if the object is a user variable. */
4e3825db
MM
1084 if (!DECL_ARTIFICIAL (decl))
1085 mark_user_reg (x);
1f6d3a08 1086
61021c2c 1087 if (POINTER_TYPE_P (type))
d466b407 1088 mark_reg_pointer (x, get_pointer_alignment (var));
1f6d3a08
RH
1089}
1090
1091/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 1092 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
1093 to pick something that won't crash the rest of the compiler. */
1094
1095static void
1096expand_one_error_var (tree var)
1097{
1098 enum machine_mode mode = DECL_MODE (var);
1099 rtx x;
1100
1101 if (mode == BLKmode)
1102 x = gen_rtx_MEM (BLKmode, const0_rtx);
1103 else if (mode == VOIDmode)
1104 x = const0_rtx;
1105 else
1106 x = gen_reg_rtx (mode);
1107
1108 SET_DECL_RTL (var, x);
1109}
1110
c22cacf3 1111/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
1112 allocated to the local stack frame. Return true if we wish to
1113 add VAR to STACK_VARS so that it will be coalesced with other
1114 variables. Return false to allocate VAR immediately.
1115
1116 This function is used to reduce the number of variables considered
1117 for coalescing, which reduces the size of the quadratic problem. */
1118
1119static bool
1120defer_stack_allocation (tree var, bool toplevel)
1121{
7d69de61
RH
1122 /* If stack protection is enabled, *all* stack variables must be deferred,
1123 so that we can re-order the strings to the top of the frame. */
1124 if (flag_stack_protect)
1125 return true;
1126
3a42502d
RH
1127 /* We handle "large" alignment via dynamic allocation. We want to handle
1128 this extra complication in only one place, so defer them. */
1129 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1130 return true;
1131
1f6d3a08
RH
1132 /* Variables in the outermost scope automatically conflict with
1133 every other variable. The only reason to want to defer them
1134 at all is that, after sorting, we can more efficiently pack
1135 small variables in the stack frame. Continue to defer at -O2. */
1136 if (toplevel && optimize < 2)
1137 return false;
1138
1139 /* Without optimization, *most* variables are allocated from the
1140 stack, which makes the quadratic problem large exactly when we
c22cacf3 1141 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
1142 other hand, we don't want the function's stack frame size to
1143 get completely out of hand. So we avoid adding scalars and
1144 "small" aggregates to the list at all. */
1145 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1146 return false;
1147
1148 return true;
1149}
1150
1151/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 1152 its flavor. Variables to be placed on the stack are not actually
b8698a0f 1153 expanded yet, merely recorded.
ff28a94d
JH
1154 When REALLY_EXPAND is false, only add stack values to be allocated.
1155 Return stack usage this variable is supposed to take.
1156*/
1f6d3a08 1157
ff28a94d
JH
1158static HOST_WIDE_INT
1159expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08 1160{
3a42502d 1161 unsigned int align = BITS_PER_UNIT;
4e3825db 1162 tree origvar = var;
3a42502d 1163
4e3825db
MM
1164 var = SSAVAR (var);
1165
3a42502d 1166 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
2e3f842f 1167 {
2e3f842f
L
1168 /* Because we don't know if VAR will be in register or on stack,
1169 we conservatively assume it will be on stack even if VAR is
1170 eventually put into register after RA pass. For non-automatic
1171 variables, which won't be on stack, we collect alignment of
1172 type and ignore user specified alignment. */
1173 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
ae58e548
JJ
1174 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1175 TYPE_MODE (TREE_TYPE (var)),
1176 TYPE_ALIGN (TREE_TYPE (var)));
f3184b4c
JJ
1177 else if (DECL_HAS_VALUE_EXPR_P (var)
1178 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1179 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1180 or variables which were assigned a stack slot already by
1181 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1182 changed from the offset chosen to it. */
1183 align = crtl->stack_alignment_estimated;
2e3f842f 1184 else
ae58e548 1185 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
2e3f842f 1186
3a42502d
RH
1187 /* If the variable alignment is very large we'll dynamicaly allocate
1188 it, which means that in-frame portion is just a pointer. */
1189 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1190 align = POINTER_SIZE;
1191 }
1192
1193 if (SUPPORTS_STACK_ALIGNMENT
1194 && crtl->stack_alignment_estimated < align)
1195 {
1196 /* stack_alignment_estimated shouldn't change after stack
1197 realign decision made */
1198 gcc_assert(!crtl->stack_realign_processed);
1199 crtl->stack_alignment_estimated = align;
2e3f842f
L
1200 }
1201
3a42502d
RH
1202 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1203 So here we only make sure stack_alignment_needed >= align. */
1204 if (crtl->stack_alignment_needed < align)
1205 crtl->stack_alignment_needed = align;
1206 if (crtl->max_used_stack_slot_alignment < align)
1207 crtl->max_used_stack_slot_alignment = align;
1208
4e3825db
MM
1209 if (TREE_CODE (origvar) == SSA_NAME)
1210 {
1211 gcc_assert (TREE_CODE (var) != VAR_DECL
1212 || (!DECL_EXTERNAL (var)
1213 && !DECL_HAS_VALUE_EXPR_P (var)
1214 && !TREE_STATIC (var)
4e3825db
MM
1215 && TREE_TYPE (var) != error_mark_node
1216 && !DECL_HARD_REGISTER (var)
1217 && really_expand));
1218 }
1219 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
4846b435 1220 ;
1f6d3a08
RH
1221 else if (DECL_EXTERNAL (var))
1222 ;
833b3afe 1223 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
1224 ;
1225 else if (TREE_STATIC (var))
7e8b322a 1226 ;
eb7adebc 1227 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1f6d3a08
RH
1228 ;
1229 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
1230 {
1231 if (really_expand)
1232 expand_one_error_var (var);
1233 }
4e3825db 1234 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
ff28a94d
JH
1235 {
1236 if (really_expand)
1237 expand_one_hard_reg_var (var);
1238 }
1f6d3a08 1239 else if (use_register_for_decl (var))
ff28a94d
JH
1240 {
1241 if (really_expand)
4e3825db 1242 expand_one_register_var (origvar);
ff28a94d 1243 }
56099f00 1244 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
7604eb4e 1245 {
56099f00 1246 /* Reject variables which cover more than half of the address-space. */
7604eb4e
JJ
1247 if (really_expand)
1248 {
1249 error ("size of variable %q+D is too large", var);
1250 expand_one_error_var (var);
1251 }
1252 }
1f6d3a08 1253 else if (defer_stack_allocation (var, toplevel))
4e3825db 1254 add_stack_var (origvar);
1f6d3a08 1255 else
ff28a94d 1256 {
bd9f1b4b 1257 if (really_expand)
4e3825db 1258 expand_one_stack_var (origvar);
ff28a94d
JH
1259 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1260 }
1261 return 0;
1f6d3a08
RH
1262}
1263
1264/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1265 expanding variables. Those variables that can be put into registers
1266 are allocated pseudos; those that can't are put on the stack.
1267
1268 TOPLEVEL is true if this is the outermost BLOCK. */
1269
1270static void
1271expand_used_vars_for_block (tree block, bool toplevel)
1272{
1f6d3a08
RH
1273 tree t;
1274
1f6d3a08 1275 /* Expand all variables at this level. */
910ad8de 1276 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1ace6185
JJ
1277 if (TREE_USED (t)
1278 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1279 || !DECL_NONSHAREABLE (t)))
ff28a94d 1280 expand_one_var (t, toplevel, true);
1f6d3a08 1281
1f6d3a08
RH
1282 /* Expand all variables at containing levels. */
1283 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1284 expand_used_vars_for_block (t, false);
1f6d3a08
RH
1285}
1286
1287/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1288 and clear TREE_USED on all local variables. */
1289
1290static void
1291clear_tree_used (tree block)
1292{
1293 tree t;
1294
910ad8de 1295 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1f6d3a08 1296 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1ace6185
JJ
1297 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1298 || !DECL_NONSHAREABLE (t))
1f6d3a08
RH
1299 TREE_USED (t) = 0;
1300
1301 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1302 clear_tree_used (t);
1303}
1304
7d69de61
RH
1305/* Examine TYPE and determine a bit mask of the following features. */
1306
1307#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1308#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1309#define SPCT_HAS_ARRAY 4
1310#define SPCT_HAS_AGGREGATE 8
1311
1312static unsigned int
1313stack_protect_classify_type (tree type)
1314{
1315 unsigned int ret = 0;
1316 tree t;
1317
1318 switch (TREE_CODE (type))
1319 {
1320 case ARRAY_TYPE:
1321 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1322 if (t == char_type_node
1323 || t == signed_char_type_node
1324 || t == unsigned_char_type_node)
1325 {
15362b89
JJ
1326 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1327 unsigned HOST_WIDE_INT len;
7d69de61 1328
15362b89
JJ
1329 if (!TYPE_SIZE_UNIT (type)
1330 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1331 len = max;
7d69de61 1332 else
15362b89 1333 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
7d69de61
RH
1334
1335 if (len < max)
1336 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1337 else
1338 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1339 }
1340 else
1341 ret = SPCT_HAS_ARRAY;
1342 break;
1343
1344 case UNION_TYPE:
1345 case QUAL_UNION_TYPE:
1346 case RECORD_TYPE:
1347 ret = SPCT_HAS_AGGREGATE;
1348 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1349 if (TREE_CODE (t) == FIELD_DECL)
1350 ret |= stack_protect_classify_type (TREE_TYPE (t));
1351 break;
1352
1353 default:
1354 break;
1355 }
1356
1357 return ret;
1358}
1359
a4d05547
KH
1360/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1361 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
1362 any variable in this function. The return value is the phase number in
1363 which the variable should be allocated. */
1364
1365static int
1366stack_protect_decl_phase (tree decl)
1367{
1368 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1369 int ret = 0;
1370
1371 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1372 has_short_buffer = true;
1373
1374 if (flag_stack_protect == 2)
1375 {
1376 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1377 && !(bits & SPCT_HAS_AGGREGATE))
1378 ret = 1;
1379 else if (bits & SPCT_HAS_ARRAY)
1380 ret = 2;
1381 }
1382 else
1383 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1384
1385 if (ret)
1386 has_protected_decls = true;
1387
1388 return ret;
1389}
1390
1391/* Two helper routines that check for phase 1 and phase 2. These are used
1392 as callbacks for expand_stack_vars. */
1393
1394static bool
1395stack_protect_decl_phase_1 (tree decl)
1396{
1397 return stack_protect_decl_phase (decl) == 1;
1398}
1399
1400static bool
1401stack_protect_decl_phase_2 (tree decl)
1402{
1403 return stack_protect_decl_phase (decl) == 2;
1404}
1405
1406/* Ensure that variables in different stack protection phases conflict
1407 so that they are not merged and share the same stack slot. */
1408
1409static void
1410add_stack_protection_conflicts (void)
1411{
1412 size_t i, j, n = stack_vars_num;
1413 unsigned char *phase;
1414
1415 phase = XNEWVEC (unsigned char, n);
1416 for (i = 0; i < n; ++i)
1417 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1418
1419 for (i = 0; i < n; ++i)
1420 {
1421 unsigned char ph_i = phase[i];
1422 for (j = 0; j < i; ++j)
1423 if (ph_i != phase[j])
1424 add_stack_var_conflict (i, j);
1425 }
1426
1427 XDELETEVEC (phase);
1428}
1429
1430/* Create a decl for the guard at the top of the stack frame. */
1431
1432static void
1433create_stack_guard (void)
1434{
c2255bc4
AH
1435 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1436 VAR_DECL, NULL, ptr_type_node);
7d69de61
RH
1437 TREE_THIS_VOLATILE (guard) = 1;
1438 TREE_USED (guard) = 1;
1439 expand_one_stack_var (guard);
cb91fab0 1440 crtl->stack_protect_guard = guard;
7d69de61
RH
1441}
1442
ff28a94d 1443/* Prepare for expanding variables. */
b8698a0f 1444static void
ff28a94d
JH
1445init_vars_expansion (void)
1446{
1447 tree t;
c021f10b 1448 unsigned ix;
cb91fab0 1449 /* Set TREE_USED on all variables in the local_decls. */
c021f10b
NF
1450 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1451 TREE_USED (t) = 1;
ff28a94d
JH
1452
1453 /* Clear TREE_USED on all variables associated with a block scope. */
1454 clear_tree_used (DECL_INITIAL (current_function_decl));
1455
1456 /* Initialize local stack smashing state. */
1457 has_protected_decls = false;
1458 has_short_buffer = false;
1459}
1460
1461/* Free up stack variable graph data. */
1462static void
1463fini_vars_expansion (void)
1464{
2bdbbe94
MM
1465 size_t i, n = stack_vars_num;
1466 for (i = 0; i < n; i++)
1467 BITMAP_FREE (stack_vars[i].conflicts);
ff28a94d
JH
1468 XDELETEVEC (stack_vars);
1469 XDELETEVEC (stack_vars_sorted);
ff28a94d
JH
1470 stack_vars = NULL;
1471 stack_vars_alloc = stack_vars_num = 0;
47598145
MM
1472 pointer_map_destroy (decl_to_stack_part);
1473 decl_to_stack_part = NULL;
ff28a94d
JH
1474}
1475
30925d94
AO
1476/* Make a fair guess for the size of the stack frame of the function
1477 in NODE. This doesn't have to be exact, the result is only used in
1478 the inline heuristics. So we don't want to run the full stack var
1479 packing algorithm (which is quadratic in the number of stack vars).
1480 Instead, we calculate the total size of all stack vars. This turns
1481 out to be a pretty fair estimate -- packing of stack vars doesn't
1482 happen very often. */
b5a430f3 1483
ff28a94d 1484HOST_WIDE_INT
30925d94 1485estimated_stack_frame_size (struct cgraph_node *node)
ff28a94d
JH
1486{
1487 HOST_WIDE_INT size = 0;
b5a430f3 1488 size_t i;
bb7e6d55 1489 tree var;
2e1ec94f 1490 tree old_cur_fun_decl = current_function_decl;
bb7e6d55 1491 referenced_var_iterator rvi;
960bfb69 1492 struct function *fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
30925d94 1493
960bfb69 1494 current_function_decl = node->symbol.decl;
bb7e6d55 1495 push_cfun (fn);
ff28a94d 1496
bb7e6d55
AO
1497 gcc_checking_assert (gimple_referenced_vars (fn));
1498 FOR_EACH_REFERENCED_VAR (fn, var, rvi)
1499 size += expand_one_var (var, true, false);
b5a430f3 1500
ff28a94d
JH
1501 if (stack_vars_num > 0)
1502 {
b5a430f3
SB
1503 /* Fake sorting the stack vars for account_stack_vars (). */
1504 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1505 for (i = 0; i < stack_vars_num; ++i)
1506 stack_vars_sorted[i] = i;
ff28a94d
JH
1507 size += account_stack_vars ();
1508 fini_vars_expansion ();
1509 }
2e1ec94f
RR
1510 pop_cfun ();
1511 current_function_decl = old_cur_fun_decl;
ff28a94d
JH
1512 return size;
1513}
1514
1f6d3a08 1515/* Expand all variables used in the function. */
727a31fa
RH
1516
1517static void
1518expand_used_vars (void)
1519{
c021f10b
NF
1520 tree var, outer_block = DECL_INITIAL (current_function_decl);
1521 VEC(tree,heap) *maybe_local_decls = NULL;
4e3825db 1522 unsigned i;
c021f10b 1523 unsigned len;
727a31fa 1524
1f6d3a08
RH
1525 /* Compute the phase of the stack frame for this function. */
1526 {
1527 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1528 int off = STARTING_FRAME_OFFSET % align;
1529 frame_phase = off ? align - off : 0;
1530 }
727a31fa 1531
ff28a94d 1532 init_vars_expansion ();
7d69de61 1533
4e3825db
MM
1534 for (i = 0; i < SA.map->num_partitions; i++)
1535 {
1536 tree var = partition_to_var (SA.map, i);
1537
1538 gcc_assert (is_gimple_reg (var));
1539 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1540 expand_one_var (var, true, true);
1541 else
1542 {
1543 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1544 contain the default def (representing the parm or result itself)
1545 we don't do anything here. But those which don't contain the
1546 default def (representing a temporary based on the parm/result)
1547 we need to allocate space just like for normal VAR_DECLs. */
1548 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1549 {
1550 expand_one_var (var, true, true);
1551 gcc_assert (SA.partition_to_pseudo[i]);
1552 }
1553 }
1554 }
1555
cb91fab0 1556 /* At this point all variables on the local_decls with TREE_USED
1f6d3a08 1557 set are not associated with any block scope. Lay them out. */
c021f10b
NF
1558
1559 len = VEC_length (tree, cfun->local_decls);
1560 FOR_EACH_LOCAL_DECL (cfun, i, var)
1f6d3a08 1561 {
1f6d3a08
RH
1562 bool expand_now = false;
1563
4e3825db
MM
1564 /* Expanded above already. */
1565 if (is_gimple_reg (var))
eb7adebc
MM
1566 {
1567 TREE_USED (var) = 0;
3adcf52c 1568 goto next;
eb7adebc 1569 }
1f6d3a08
RH
1570 /* We didn't set a block for static or extern because it's hard
1571 to tell the difference between a global variable (re)declared
1572 in a local scope, and one that's really declared there to
1573 begin with. And it doesn't really matter much, since we're
1574 not giving them stack space. Expand them now. */
4e3825db 1575 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1f6d3a08
RH
1576 expand_now = true;
1577
1578 /* If the variable is not associated with any block, then it
1579 was created by the optimizers, and could be live anywhere
1580 in the function. */
1581 else if (TREE_USED (var))
1582 expand_now = true;
1583
1584 /* Finally, mark all variables on the list as used. We'll use
1585 this in a moment when we expand those associated with scopes. */
1586 TREE_USED (var) = 1;
1587
1588 if (expand_now)
3adcf52c
JM
1589 expand_one_var (var, true, true);
1590
1591 next:
1592 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
802e9f8e 1593 {
3adcf52c
JM
1594 rtx rtl = DECL_RTL_IF_SET (var);
1595
1596 /* Keep artificial non-ignored vars in cfun->local_decls
1597 chain until instantiate_decls. */
1598 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 1599 add_local_decl (cfun, var);
6c6366f6 1600 else if (rtl == NULL_RTX)
c021f10b
NF
1601 /* If rtl isn't set yet, which can happen e.g. with
1602 -fstack-protector, retry before returning from this
1603 function. */
1604 VEC_safe_push (tree, heap, maybe_local_decls, var);
802e9f8e 1605 }
1f6d3a08 1606 }
1f6d3a08 1607
c021f10b
NF
1608 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1609
1610 +-----------------+-----------------+
1611 | ...processed... | ...duplicates...|
1612 +-----------------+-----------------+
1613 ^
1614 +-- LEN points here.
1615
1616 We just want the duplicates, as those are the artificial
1617 non-ignored vars that we want to keep until instantiate_decls.
1618 Move them down and truncate the array. */
1619 if (!VEC_empty (tree, cfun->local_decls))
1620 VEC_block_remove (tree, cfun->local_decls, 0, len);
1621
1f6d3a08
RH
1622 /* At this point, all variables within the block tree with TREE_USED
1623 set are actually used by the optimized function. Lay them out. */
1624 expand_used_vars_for_block (outer_block, true);
1625
1626 if (stack_vars_num > 0)
1627 {
47598145 1628 add_scope_conflicts ();
1f6d3a08 1629 /* Due to the way alias sets work, no variables with non-conflicting
c22cacf3 1630 alias sets may be assigned the same address. Add conflicts to
1f6d3a08
RH
1631 reflect this. */
1632 add_alias_set_conflicts ();
1633
c22cacf3 1634 /* If stack protection is enabled, we don't share space between
7d69de61
RH
1635 vulnerable data and non-vulnerable data. */
1636 if (flag_stack_protect)
1637 add_stack_protection_conflicts ();
1638
c22cacf3 1639 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
1640 minimal interference graph, attempt to save some stack space. */
1641 partition_stack_vars ();
1642 if (dump_file)
1643 dump_stack_var_partition ();
7d69de61
RH
1644 }
1645
1646 /* There are several conditions under which we should create a
1647 stack guard: protect-all, alloca used, protected decls present. */
1648 if (flag_stack_protect == 2
1649 || (flag_stack_protect
e3b5732b 1650 && (cfun->calls_alloca || has_protected_decls)))
7d69de61 1651 create_stack_guard ();
1f6d3a08 1652
7d69de61
RH
1653 /* Assign rtl to each variable based on these partitions. */
1654 if (stack_vars_num > 0)
1655 {
1656 /* Reorder decls to be protected by iterating over the variables
1657 array multiple times, and allocating out of each phase in turn. */
c22cacf3 1658 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
1659 earlier, such that we naturally see these variables first,
1660 and thus naturally allocate things in the right order. */
1661 if (has_protected_decls)
1662 {
1663 /* Phase 1 contains only character arrays. */
1664 expand_stack_vars (stack_protect_decl_phase_1);
1665
1666 /* Phase 2 contains other kinds of arrays. */
1667 if (flag_stack_protect == 2)
1668 expand_stack_vars (stack_protect_decl_phase_2);
1669 }
1670
1671 expand_stack_vars (NULL);
1f6d3a08 1672
ff28a94d 1673 fini_vars_expansion ();
1f6d3a08
RH
1674 }
1675
6c6366f6
JJ
1676 /* If there were any artificial non-ignored vars without rtl
1677 found earlier, see if deferred stack allocation hasn't assigned
1678 rtl to them. */
c021f10b 1679 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
6c6366f6 1680 {
6c6366f6
JJ
1681 rtx rtl = DECL_RTL_IF_SET (var);
1682
6c6366f6
JJ
1683 /* Keep artificial non-ignored vars in cfun->local_decls
1684 chain until instantiate_decls. */
1685 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
c021f10b 1686 add_local_decl (cfun, var);
6c6366f6 1687 }
c021f10b 1688 VEC_free (tree, heap, maybe_local_decls);
6c6366f6 1689
1f6d3a08
RH
1690 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1691 if (STACK_ALIGNMENT_NEEDED)
1692 {
1693 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1694 if (!FRAME_GROWS_DOWNWARD)
1695 frame_offset += align - 1;
1696 frame_offset &= -align;
1697 }
727a31fa
RH
1698}
1699
1700
b7211528
SB
1701/* If we need to produce a detailed dump, print the tree representation
1702 for STMT to the dump file. SINCE is the last RTX after which the RTL
1703 generated for STMT should have been appended. */
1704
1705static void
726a989a 1706maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
b7211528
SB
1707{
1708 if (dump_file && (dump_flags & TDF_DETAILS))
1709 {
1710 fprintf (dump_file, "\n;; ");
b5b8b0ac
AO
1711 print_gimple_stmt (dump_file, stmt, 0,
1712 TDF_SLIM | (dump_flags & TDF_LINENO));
b7211528
SB
1713 fprintf (dump_file, "\n");
1714
1715 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1716 }
1717}
1718
8b11009b
ZD
1719/* Maps the blocks that do not contain tree labels to rtx labels. */
1720
1721static struct pointer_map_t *lab_rtx_for_bb;
1722
a9b77cd1
ZD
1723/* Returns the label_rtx expression for a label starting basic block BB. */
1724
1725static rtx
726a989a 1726label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
a9b77cd1 1727{
726a989a
RB
1728 gimple_stmt_iterator gsi;
1729 tree lab;
1730 gimple lab_stmt;
8b11009b 1731 void **elt;
a9b77cd1
ZD
1732
1733 if (bb->flags & BB_RTL)
1734 return block_label (bb);
1735
8b11009b
ZD
1736 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1737 if (elt)
ae50c0cb 1738 return (rtx) *elt;
8b11009b
ZD
1739
1740 /* Find the tree label if it is present. */
b8698a0f 1741
726a989a 1742 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
a9b77cd1 1743 {
726a989a
RB
1744 lab_stmt = gsi_stmt (gsi);
1745 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
a9b77cd1
ZD
1746 break;
1747
726a989a 1748 lab = gimple_label_label (lab_stmt);
a9b77cd1
ZD
1749 if (DECL_NONLOCAL (lab))
1750 break;
1751
1752 return label_rtx (lab);
1753 }
1754
8b11009b
ZD
1755 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1756 *elt = gen_label_rtx ();
ae50c0cb 1757 return (rtx) *elt;
a9b77cd1
ZD
1758}
1759
726a989a 1760
529ff441
MM
1761/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1762 of a basic block where we just expanded the conditional at the end,
315adeda
MM
1763 possibly clean up the CFG and instruction sequence. LAST is the
1764 last instruction before the just emitted jump sequence. */
529ff441
MM
1765
1766static void
315adeda 1767maybe_cleanup_end_of_block (edge e, rtx last)
529ff441
MM
1768{
1769 /* Special case: when jumpif decides that the condition is
1770 trivial it emits an unconditional jump (and the necessary
1771 barrier). But we still have two edges, the fallthru one is
1772 wrong. purge_dead_edges would clean this up later. Unfortunately
1773 we have to insert insns (and split edges) before
1774 find_many_sub_basic_blocks and hence before purge_dead_edges.
1775 But splitting edges might create new blocks which depend on the
1776 fact that if there are two edges there's no barrier. So the
1777 barrier would get lost and verify_flow_info would ICE. Instead
1778 of auditing all edge splitters to care for the barrier (which
1779 normally isn't there in a cleaned CFG), fix it here. */
1780 if (BARRIER_P (get_last_insn ()))
1781 {
529ff441
MM
1782 rtx insn;
1783 remove_edge (e);
1784 /* Now, we have a single successor block, if we have insns to
1785 insert on the remaining edge we potentially will insert
1786 it at the end of this block (if the dest block isn't feasible)
1787 in order to avoid splitting the edge. This insertion will take
1788 place in front of the last jump. But we might have emitted
1789 multiple jumps (conditional and one unconditional) to the
1790 same destination. Inserting in front of the last one then
1791 is a problem. See PR 40021. We fix this by deleting all
1792 jumps except the last unconditional one. */
1793 insn = PREV_INSN (get_last_insn ());
1794 /* Make sure we have an unconditional jump. Otherwise we're
1795 confused. */
1796 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
315adeda 1797 for (insn = PREV_INSN (insn); insn != last;)
529ff441
MM
1798 {
1799 insn = PREV_INSN (insn);
1800 if (JUMP_P (NEXT_INSN (insn)))
90eb3e33 1801 {
8a269cb7 1802 if (!any_condjump_p (NEXT_INSN (insn)))
90eb3e33
JJ
1803 {
1804 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1805 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1806 }
1807 delete_insn (NEXT_INSN (insn));
1808 }
529ff441
MM
1809 }
1810 }
1811}
1812
726a989a 1813/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
80c7a9eb
RH
1814 Returns a new basic block if we've terminated the current basic
1815 block and created a new one. */
1816
1817static basic_block
726a989a 1818expand_gimple_cond (basic_block bb, gimple stmt)
80c7a9eb
RH
1819{
1820 basic_block new_bb, dest;
1821 edge new_edge;
1822 edge true_edge;
1823 edge false_edge;
b7211528 1824 rtx last2, last;
28ed065e
MM
1825 enum tree_code code;
1826 tree op0, op1;
1827
1828 code = gimple_cond_code (stmt);
1829 op0 = gimple_cond_lhs (stmt);
1830 op1 = gimple_cond_rhs (stmt);
1831 /* We're sometimes presented with such code:
1832 D.123_1 = x < y;
1833 if (D.123_1 != 0)
1834 ...
1835 This would expand to two comparisons which then later might
1836 be cleaned up by combine. But some pattern matchers like if-conversion
1837 work better when there's only one compare, so make up for this
1838 here as special exception if TER would have made the same change. */
1839 if (gimple_cond_single_var_p (stmt)
1840 && SA.values
1841 && TREE_CODE (op0) == SSA_NAME
1842 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1843 {
1844 gimple second = SSA_NAME_DEF_STMT (op0);
e83f4b68 1845 if (gimple_code (second) == GIMPLE_ASSIGN)
28ed065e 1846 {
e83f4b68
MM
1847 enum tree_code code2 = gimple_assign_rhs_code (second);
1848 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1849 {
1850 code = code2;
1851 op0 = gimple_assign_rhs1 (second);
1852 op1 = gimple_assign_rhs2 (second);
1853 }
1854 /* If jumps are cheap turn some more codes into
1855 jumpy sequences. */
1856 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1857 {
1858 if ((code2 == BIT_AND_EXPR
1859 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1860 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1861 || code2 == TRUTH_AND_EXPR)
1862 {
1863 code = TRUTH_ANDIF_EXPR;
1864 op0 = gimple_assign_rhs1 (second);
1865 op1 = gimple_assign_rhs2 (second);
1866 }
1867 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1868 {
1869 code = TRUTH_ORIF_EXPR;
1870 op0 = gimple_assign_rhs1 (second);
1871 op1 = gimple_assign_rhs2 (second);
1872 }
1873 }
28ed065e
MM
1874 }
1875 }
b7211528
SB
1876
1877 last2 = last = get_last_insn ();
80c7a9eb
RH
1878
1879 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
c82fee88
EB
1880 set_curr_insn_source_location (gimple_location (stmt));
1881 set_curr_insn_block (gimple_block (stmt));
80c7a9eb
RH
1882
1883 /* These flags have no purpose in RTL land. */
1884 true_edge->flags &= ~EDGE_TRUE_VALUE;
1885 false_edge->flags &= ~EDGE_FALSE_VALUE;
1886
1887 /* We can either have a pure conditional jump with one fallthru edge or
1888 two-way jump that needs to be decomposed into two basic blocks. */
a9b77cd1 1889 if (false_edge->dest == bb->next_bb)
80c7a9eb 1890 {
40e90eac
JJ
1891 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1892 true_edge->probability);
726a989a 1893 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1894 if (true_edge->goto_locus)
7241571e
JJ
1895 {
1896 set_curr_insn_source_location (true_edge->goto_locus);
1897 set_curr_insn_block (true_edge->goto_block);
1898 true_edge->goto_locus = curr_insn_locator ();
1899 }
1900 true_edge->goto_block = NULL;
a9b77cd1 1901 false_edge->flags |= EDGE_FALLTHRU;
315adeda 1902 maybe_cleanup_end_of_block (false_edge, last);
80c7a9eb
RH
1903 return NULL;
1904 }
a9b77cd1 1905 if (true_edge->dest == bb->next_bb)
80c7a9eb 1906 {
40e90eac
JJ
1907 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1908 false_edge->probability);
726a989a 1909 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1910 if (false_edge->goto_locus)
7241571e
JJ
1911 {
1912 set_curr_insn_source_location (false_edge->goto_locus);
1913 set_curr_insn_block (false_edge->goto_block);
1914 false_edge->goto_locus = curr_insn_locator ();
1915 }
1916 false_edge->goto_block = NULL;
a9b77cd1 1917 true_edge->flags |= EDGE_FALLTHRU;
315adeda 1918 maybe_cleanup_end_of_block (true_edge, last);
80c7a9eb
RH
1919 return NULL;
1920 }
80c7a9eb 1921
40e90eac
JJ
1922 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1923 true_edge->probability);
80c7a9eb 1924 last = get_last_insn ();
7241571e
JJ
1925 if (false_edge->goto_locus)
1926 {
1927 set_curr_insn_source_location (false_edge->goto_locus);
1928 set_curr_insn_block (false_edge->goto_block);
1929 false_edge->goto_locus = curr_insn_locator ();
1930 }
1931 false_edge->goto_block = NULL;
a9b77cd1 1932 emit_jump (label_rtx_for_bb (false_edge->dest));
80c7a9eb
RH
1933
1934 BB_END (bb) = last;
1935 if (BARRIER_P (BB_END (bb)))
1936 BB_END (bb) = PREV_INSN (BB_END (bb));
1937 update_bb_for_insn (bb);
1938
1939 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1940 dest = false_edge->dest;
1941 redirect_edge_succ (false_edge, new_bb);
1942 false_edge->flags |= EDGE_FALLTHRU;
1943 new_bb->count = false_edge->count;
1944 new_bb->frequency = EDGE_FREQUENCY (false_edge);
7d776ee2
RG
1945 if (current_loops && bb->loop_father)
1946 add_bb_to_loop (new_bb, bb->loop_father);
80c7a9eb
RH
1947 new_edge = make_edge (new_bb, dest, 0);
1948 new_edge->probability = REG_BR_PROB_BASE;
1949 new_edge->count = new_bb->count;
1950 if (BARRIER_P (BB_END (new_bb)))
1951 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1952 update_bb_for_insn (new_bb);
1953
726a989a 1954 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
c22cacf3 1955
7787b4aa
JJ
1956 if (true_edge->goto_locus)
1957 {
1958 set_curr_insn_source_location (true_edge->goto_locus);
1959 set_curr_insn_block (true_edge->goto_block);
1960 true_edge->goto_locus = curr_insn_locator ();
1961 }
1962 true_edge->goto_block = NULL;
1963
80c7a9eb
RH
1964 return new_bb;
1965}
1966
0a35513e
AH
1967/* Mark all calls that can have a transaction restart. */
1968
1969static void
1970mark_transaction_restart_calls (gimple stmt)
1971{
1972 struct tm_restart_node dummy;
1973 void **slot;
1974
1975 if (!cfun->gimple_df->tm_restart)
1976 return;
1977
1978 dummy.stmt = stmt;
1979 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
1980 if (slot)
1981 {
1982 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
1983 tree list = n->label_or_list;
1984 rtx insn;
1985
1986 for (insn = next_real_insn (get_last_insn ());
1987 !CALL_P (insn);
1988 insn = next_real_insn (insn))
1989 continue;
1990
1991 if (TREE_CODE (list) == LABEL_DECL)
1992 add_reg_note (insn, REG_TM, label_rtx (list));
1993 else
1994 for (; list ; list = TREE_CHAIN (list))
1995 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
1996 }
1997}
1998
28ed065e
MM
1999/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2000 statement STMT. */
2001
2002static void
2003expand_call_stmt (gimple stmt)
2004{
25583c4f 2005 tree exp, decl, lhs;
e23817b3 2006 bool builtin_p;
e7925582 2007 size_t i;
28ed065e 2008
25583c4f
RS
2009 if (gimple_call_internal_p (stmt))
2010 {
2011 expand_internal_call (stmt);
2012 return;
2013 }
2014
28ed065e
MM
2015 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2016
2017 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
e23817b3
RG
2018 decl = gimple_call_fndecl (stmt);
2019 builtin_p = decl && DECL_BUILT_IN (decl);
2020
e7925582
EB
2021 /* If this is not a builtin function, the function type through which the
2022 call is made may be different from the type of the function. */
2023 if (!builtin_p)
2024 CALL_EXPR_FN (exp)
b25aa0e8
EB
2025 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2026 CALL_EXPR_FN (exp));
e7925582 2027
28ed065e
MM
2028 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2029 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2030
2031 for (i = 0; i < gimple_call_num_args (stmt); i++)
e23817b3
RG
2032 {
2033 tree arg = gimple_call_arg (stmt, i);
2034 gimple def;
2035 /* TER addresses into arguments of builtin functions so we have a
2036 chance to infer more correct alignment information. See PR39954. */
2037 if (builtin_p
2038 && TREE_CODE (arg) == SSA_NAME
2039 && (def = get_gimple_for_ssa_name (arg))
2040 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2041 arg = gimple_assign_rhs1 (def);
2042 CALL_EXPR_ARG (exp, i) = arg;
2043 }
28ed065e 2044
93f28ca7 2045 if (gimple_has_side_effects (stmt))
28ed065e
MM
2046 TREE_SIDE_EFFECTS (exp) = 1;
2047
93f28ca7 2048 if (gimple_call_nothrow_p (stmt))
28ed065e
MM
2049 TREE_NOTHROW (exp) = 1;
2050
2051 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2052 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
63d2a353
MM
2053 if (decl
2054 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13e49da9
TV
2055 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2056 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
63d2a353
MM
2057 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2058 else
2059 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
28ed065e
MM
2060 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2061 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2062 TREE_BLOCK (exp) = gimple_block (stmt);
2063
ddb555ed
JJ
2064 /* Ensure RTL is created for debug args. */
2065 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2066 {
2067 VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
2068 unsigned int ix;
2069 tree dtemp;
2070
2071 if (debug_args)
2072 for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
2073 {
2074 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2075 expand_debug_expr (dtemp);
2076 }
2077 }
2078
25583c4f 2079 lhs = gimple_call_lhs (stmt);
28ed065e
MM
2080 if (lhs)
2081 expand_assignment (lhs, exp, false);
2082 else
2083 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
0a35513e
AH
2084
2085 mark_transaction_restart_calls (stmt);
28ed065e
MM
2086}
2087
2088/* A subroutine of expand_gimple_stmt, expanding one gimple statement
2089 STMT that doesn't require special handling for outgoing edges. That
2090 is no tailcalls and no GIMPLE_COND. */
2091
2092static void
2093expand_gimple_stmt_1 (gimple stmt)
2094{
2095 tree op0;
c82fee88
EB
2096
2097 set_curr_insn_source_location (gimple_location (stmt));
2098 set_curr_insn_block (gimple_block (stmt));
2099
28ed065e
MM
2100 switch (gimple_code (stmt))
2101 {
2102 case GIMPLE_GOTO:
2103 op0 = gimple_goto_dest (stmt);
2104 if (TREE_CODE (op0) == LABEL_DECL)
2105 expand_goto (op0);
2106 else
2107 expand_computed_goto (op0);
2108 break;
2109 case GIMPLE_LABEL:
2110 expand_label (gimple_label_label (stmt));
2111 break;
2112 case GIMPLE_NOP:
2113 case GIMPLE_PREDICT:
2114 break;
28ed065e
MM
2115 case GIMPLE_SWITCH:
2116 expand_case (stmt);
2117 break;
2118 case GIMPLE_ASM:
2119 expand_asm_stmt (stmt);
2120 break;
2121 case GIMPLE_CALL:
2122 expand_call_stmt (stmt);
2123 break;
2124
2125 case GIMPLE_RETURN:
2126 op0 = gimple_return_retval (stmt);
2127
2128 if (op0 && op0 != error_mark_node)
2129 {
2130 tree result = DECL_RESULT (current_function_decl);
2131
2132 /* If we are not returning the current function's RESULT_DECL,
2133 build an assignment to it. */
2134 if (op0 != result)
2135 {
2136 /* I believe that a function's RESULT_DECL is unique. */
2137 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
2138
2139 /* ??? We'd like to use simply expand_assignment here,
2140 but this fails if the value is of BLKmode but the return
2141 decl is a register. expand_return has special handling
2142 for this combination, which eventually should move
2143 to common code. See comments there. Until then, let's
2144 build a modify expression :-/ */
2145 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
2146 result, op0);
2147 }
2148 }
2149 if (!op0)
2150 expand_null_return ();
2151 else
2152 expand_return (op0);
2153 break;
2154
2155 case GIMPLE_ASSIGN:
2156 {
2157 tree lhs = gimple_assign_lhs (stmt);
2158
2159 /* Tree expand used to fiddle with |= and &= of two bitfield
2160 COMPONENT_REFs here. This can't happen with gimple, the LHS
2161 of binary assigns must be a gimple reg. */
2162
2163 if (TREE_CODE (lhs) != SSA_NAME
2164 || get_gimple_rhs_class (gimple_expr_code (stmt))
2165 == GIMPLE_SINGLE_RHS)
2166 {
2167 tree rhs = gimple_assign_rhs1 (stmt);
2168 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
2169 == GIMPLE_SINGLE_RHS);
2170 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
2171 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
47598145
MM
2172 if (TREE_CLOBBER_P (rhs))
2173 /* This is a clobber to mark the going out of scope for
2174 this LHS. */
2175 ;
2176 else
2177 expand_assignment (lhs, rhs,
2178 gimple_assign_nontemporal_move_p (stmt));
28ed065e
MM
2179 }
2180 else
2181 {
2182 rtx target, temp;
2183 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2184 struct separate_ops ops;
2185 bool promoted = false;
2186
2187 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2188 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2189 promoted = true;
2190
2191 ops.code = gimple_assign_rhs_code (stmt);
2192 ops.type = TREE_TYPE (lhs);
2193 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2194 {
0354c0c7
BS
2195 case GIMPLE_TERNARY_RHS:
2196 ops.op2 = gimple_assign_rhs3 (stmt);
2197 /* Fallthru */
28ed065e
MM
2198 case GIMPLE_BINARY_RHS:
2199 ops.op1 = gimple_assign_rhs2 (stmt);
2200 /* Fallthru */
2201 case GIMPLE_UNARY_RHS:
2202 ops.op0 = gimple_assign_rhs1 (stmt);
2203 break;
2204 default:
2205 gcc_unreachable ();
2206 }
2207 ops.location = gimple_location (stmt);
2208
2209 /* If we want to use a nontemporal store, force the value to
2210 register first. If we store into a promoted register,
2211 don't directly expand to target. */
2212 temp = nontemporal || promoted ? NULL_RTX : target;
2213 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2214 EXPAND_NORMAL);
2215
2216 if (temp == target)
2217 ;
2218 else if (promoted)
2219 {
4e18a7d4 2220 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
28ed065e
MM
2221 /* If TEMP is a VOIDmode constant, use convert_modes to make
2222 sure that we properly convert it. */
2223 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2224 {
2225 temp = convert_modes (GET_MODE (target),
2226 TYPE_MODE (ops.type),
4e18a7d4 2227 temp, unsignedp);
28ed065e 2228 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4e18a7d4 2229 GET_MODE (target), temp, unsignedp);
28ed065e
MM
2230 }
2231
4e18a7d4 2232 convert_move (SUBREG_REG (target), temp, unsignedp);
28ed065e
MM
2233 }
2234 else if (nontemporal && emit_storent_insn (target, temp))
2235 ;
2236 else
2237 {
2238 temp = force_operand (temp, target);
2239 if (temp != target)
2240 emit_move_insn (target, temp);
2241 }
2242 }
2243 }
2244 break;
2245
2246 default:
2247 gcc_unreachable ();
2248 }
2249}
2250
2251/* Expand one gimple statement STMT and return the last RTL instruction
2252 before any of the newly generated ones.
2253
2254 In addition to generating the necessary RTL instructions this also
2255 sets REG_EH_REGION notes if necessary and sets the current source
2256 location for diagnostics. */
2257
2258static rtx
2259expand_gimple_stmt (gimple stmt)
2260{
28ed065e 2261 location_t saved_location = input_location;
c82fee88
EB
2262 rtx last = get_last_insn ();
2263 int lp_nr;
28ed065e 2264
28ed065e
MM
2265 gcc_assert (cfun);
2266
c82fee88
EB
2267 /* We need to save and restore the current source location so that errors
2268 discovered during expansion are emitted with the right location. But
2269 it would be better if the diagnostic routines used the source location
2270 embedded in the tree nodes rather than globals. */
28ed065e 2271 if (gimple_has_location (stmt))
c82fee88 2272 input_location = gimple_location (stmt);
28ed065e
MM
2273
2274 expand_gimple_stmt_1 (stmt);
c82fee88 2275
28ed065e
MM
2276 /* Free any temporaries used to evaluate this statement. */
2277 free_temp_slots ();
2278
2279 input_location = saved_location;
2280
2281 /* Mark all insns that may trap. */
1d65f45c
RH
2282 lp_nr = lookup_stmt_eh_lp (stmt);
2283 if (lp_nr)
28ed065e
MM
2284 {
2285 rtx insn;
2286 for (insn = next_real_insn (last); insn;
2287 insn = next_real_insn (insn))
2288 {
2289 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2290 /* If we want exceptions for non-call insns, any
2291 may_trap_p instruction may throw. */
2292 && GET_CODE (PATTERN (insn)) != CLOBBER
2293 && GET_CODE (PATTERN (insn)) != USE
1d65f45c
RH
2294 && insn_could_throw_p (insn))
2295 make_reg_eh_region_note (insn, 0, lp_nr);
28ed065e
MM
2296 }
2297 }
2298
2299 return last;
2300}
2301
726a989a 2302/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
224e770b
RH
2303 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2304 generated a tail call (something that might be denied by the ABI
cea49550
RH
2305 rules governing the call; see calls.c).
2306
2307 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2308 can still reach the rest of BB. The case here is __builtin_sqrt,
2309 where the NaN result goes through the external function (with a
2310 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
2311
2312static basic_block
726a989a 2313expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
80c7a9eb 2314{
b7211528 2315 rtx last2, last;
224e770b 2316 edge e;
628f6a4e 2317 edge_iterator ei;
224e770b
RH
2318 int probability;
2319 gcov_type count;
80c7a9eb 2320
28ed065e 2321 last2 = last = expand_gimple_stmt (stmt);
80c7a9eb
RH
2322
2323 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
2324 if (CALL_P (last) && SIBLING_CALL_P (last))
2325 goto found;
80c7a9eb 2326
726a989a 2327 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2328
cea49550 2329 *can_fallthru = true;
224e770b 2330 return NULL;
80c7a9eb 2331
224e770b
RH
2332 found:
2333 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2334 Any instructions emitted here are about to be deleted. */
2335 do_pending_stack_adjust ();
2336
2337 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2338 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2339 EH or abnormal edges, we shouldn't have created a tail call in
2340 the first place. So it seems to me we should just be removing
2341 all edges here, or redirecting the existing fallthru edge to
2342 the exit block. */
2343
224e770b
RH
2344 probability = 0;
2345 count = 0;
224e770b 2346
628f6a4e
BE
2347 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2348 {
224e770b
RH
2349 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2350 {
2351 if (e->dest != EXIT_BLOCK_PTR)
80c7a9eb 2352 {
224e770b
RH
2353 e->dest->count -= e->count;
2354 e->dest->frequency -= EDGE_FREQUENCY (e);
2355 if (e->dest->count < 0)
c22cacf3 2356 e->dest->count = 0;
224e770b 2357 if (e->dest->frequency < 0)
c22cacf3 2358 e->dest->frequency = 0;
80c7a9eb 2359 }
224e770b
RH
2360 count += e->count;
2361 probability += e->probability;
2362 remove_edge (e);
80c7a9eb 2363 }
628f6a4e
BE
2364 else
2365 ei_next (&ei);
80c7a9eb
RH
2366 }
2367
224e770b
RH
2368 /* This is somewhat ugly: the call_expr expander often emits instructions
2369 after the sibcall (to perform the function return). These confuse the
12eff7b7 2370 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 2371 last = NEXT_INSN (last);
341c100f 2372 gcc_assert (BARRIER_P (last));
cea49550
RH
2373
2374 *can_fallthru = false;
224e770b
RH
2375 while (NEXT_INSN (last))
2376 {
2377 /* For instance an sqrt builtin expander expands if with
2378 sibcall in the then and label for `else`. */
2379 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
2380 {
2381 *can_fallthru = true;
2382 break;
2383 }
224e770b
RH
2384 delete_insn (NEXT_INSN (last));
2385 }
2386
2387 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2388 e->probability += probability;
2389 e->count += count;
2390 BB_END (bb) = last;
2391 update_bb_for_insn (bb);
2392
2393 if (NEXT_INSN (last))
2394 {
2395 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2396
2397 last = BB_END (bb);
2398 if (BARRIER_P (last))
2399 BB_END (bb) = PREV_INSN (last);
2400 }
2401
726a989a 2402 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2403
224e770b 2404 return bb;
80c7a9eb
RH
2405}
2406
b5b8b0ac
AO
2407/* Return the difference between the floor and the truncated result of
2408 a signed division by OP1 with remainder MOD. */
2409static rtx
2410floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2411{
2412 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2413 return gen_rtx_IF_THEN_ELSE
2414 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2415 gen_rtx_IF_THEN_ELSE
2416 (mode, gen_rtx_LT (BImode,
2417 gen_rtx_DIV (mode, op1, mod),
2418 const0_rtx),
2419 constm1_rtx, const0_rtx),
2420 const0_rtx);
2421}
2422
2423/* Return the difference between the ceil and the truncated result of
2424 a signed division by OP1 with remainder MOD. */
2425static rtx
2426ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2427{
2428 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2429 return gen_rtx_IF_THEN_ELSE
2430 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2431 gen_rtx_IF_THEN_ELSE
2432 (mode, gen_rtx_GT (BImode,
2433 gen_rtx_DIV (mode, op1, mod),
2434 const0_rtx),
2435 const1_rtx, const0_rtx),
2436 const0_rtx);
2437}
2438
2439/* Return the difference between the ceil and the truncated result of
2440 an unsigned division by OP1 with remainder MOD. */
2441static rtx
2442ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2443{
2444 /* (mod != 0 ? 1 : 0) */
2445 return gen_rtx_IF_THEN_ELSE
2446 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2447 const1_rtx, const0_rtx);
2448}
2449
2450/* Return the difference between the rounded and the truncated result
2451 of a signed division by OP1 with remainder MOD. Halfway cases are
2452 rounded away from zero, rather than to the nearest even number. */
2453static rtx
2454round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2455{
2456 /* (abs (mod) >= abs (op1) - abs (mod)
2457 ? (op1 / mod > 0 ? 1 : -1)
2458 : 0) */
2459 return gen_rtx_IF_THEN_ELSE
2460 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2461 gen_rtx_MINUS (mode,
2462 gen_rtx_ABS (mode, op1),
2463 gen_rtx_ABS (mode, mod))),
2464 gen_rtx_IF_THEN_ELSE
2465 (mode, gen_rtx_GT (BImode,
2466 gen_rtx_DIV (mode, op1, mod),
2467 const0_rtx),
2468 const1_rtx, constm1_rtx),
2469 const0_rtx);
2470}
2471
2472/* Return the difference between the rounded and the truncated result
2473 of a unsigned division by OP1 with remainder MOD. Halfway cases
2474 are rounded away from zero, rather than to the nearest even
2475 number. */
2476static rtx
2477round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2478{
2479 /* (mod >= op1 - mod ? 1 : 0) */
2480 return gen_rtx_IF_THEN_ELSE
2481 (mode, gen_rtx_GE (BImode, mod,
2482 gen_rtx_MINUS (mode, op1, mod)),
2483 const1_rtx, const0_rtx);
2484}
2485
dda2da58
AO
2486/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2487 any rtl. */
2488
2489static rtx
f61c6f34
JJ
2490convert_debug_memory_address (enum machine_mode mode, rtx x,
2491 addr_space_t as)
dda2da58
AO
2492{
2493 enum machine_mode xmode = GET_MODE (x);
2494
2495#ifndef POINTERS_EXTEND_UNSIGNED
f61c6f34
JJ
2496 gcc_assert (mode == Pmode
2497 || mode == targetm.addr_space.address_mode (as));
dda2da58
AO
2498 gcc_assert (xmode == mode || xmode == VOIDmode);
2499#else
f61c6f34 2500 rtx temp;
f61c6f34 2501
639d4bb8 2502 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
dda2da58
AO
2503
2504 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2505 return x;
2506
69660a70 2507 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
dda2da58
AO
2508 x = simplify_gen_subreg (mode, x, xmode,
2509 subreg_lowpart_offset
2510 (mode, xmode));
2511 else if (POINTERS_EXTEND_UNSIGNED > 0)
2512 x = gen_rtx_ZERO_EXTEND (mode, x);
2513 else if (!POINTERS_EXTEND_UNSIGNED)
2514 x = gen_rtx_SIGN_EXTEND (mode, x);
2515 else
f61c6f34
JJ
2516 {
2517 switch (GET_CODE (x))
2518 {
2519 case SUBREG:
2520 if ((SUBREG_PROMOTED_VAR_P (x)
2521 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2522 || (GET_CODE (SUBREG_REG (x)) == PLUS
2523 && REG_P (XEXP (SUBREG_REG (x), 0))
2524 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2525 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2526 && GET_MODE (SUBREG_REG (x)) == mode)
2527 return SUBREG_REG (x);
2528 break;
2529 case LABEL_REF:
2530 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2531 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2532 return temp;
2533 case SYMBOL_REF:
2534 temp = shallow_copy_rtx (x);
2535 PUT_MODE (temp, mode);
2536 return temp;
2537 case CONST:
2538 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2539 if (temp)
2540 temp = gen_rtx_CONST (mode, temp);
2541 return temp;
2542 case PLUS:
2543 case MINUS:
2544 if (CONST_INT_P (XEXP (x, 1)))
2545 {
2546 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2547 if (temp)
2548 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2549 }
2550 break;
2551 default:
2552 break;
2553 }
2554 /* Don't know how to express ptr_extend as operation in debug info. */
2555 return NULL;
2556 }
dda2da58
AO
2557#endif /* POINTERS_EXTEND_UNSIGNED */
2558
2559 return x;
2560}
2561
12c5ffe5
EB
2562/* Return an RTX equivalent to the value of the parameter DECL. */
2563
2564static rtx
2565expand_debug_parm_decl (tree decl)
2566{
2567 rtx incoming = DECL_INCOMING_RTL (decl);
2568
2569 if (incoming
2570 && GET_MODE (incoming) != BLKmode
2571 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2572 || (MEM_P (incoming)
2573 && REG_P (XEXP (incoming, 0))
2574 && HARD_REGISTER_P (XEXP (incoming, 0)))))
2575 {
2576 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2577
2578#ifdef HAVE_window_save
2579 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2580 If the target machine has an explicit window save instruction, the
2581 actual entry value is the corresponding OUTGOING_REGNO instead. */
2582 if (REG_P (incoming)
2583 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2584 incoming
2585 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2586 OUTGOING_REGNO (REGNO (incoming)), 0);
2587 else if (MEM_P (incoming))
2588 {
2589 rtx reg = XEXP (incoming, 0);
2590 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2591 {
2592 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2593 incoming = replace_equiv_address_nv (incoming, reg);
2594 }
2595 }
2596#endif
2597
2598 ENTRY_VALUE_EXP (rtl) = incoming;
2599 return rtl;
2600 }
2601
2602 if (incoming
2603 && GET_MODE (incoming) != BLKmode
2604 && !TREE_ADDRESSABLE (decl)
2605 && MEM_P (incoming)
2606 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2607 || (GET_CODE (XEXP (incoming, 0)) == PLUS
2608 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2609 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2610 return incoming;
2611
2612 return NULL_RTX;
2613}
2614
2615/* Return an RTX equivalent to the value of the tree expression EXP. */
b5b8b0ac
AO
2616
2617static rtx
2618expand_debug_expr (tree exp)
2619{
2620 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2621 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2ba172e0 2622 enum machine_mode inner_mode = VOIDmode;
b5b8b0ac 2623 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
09e881c9 2624 addr_space_t as;
b5b8b0ac
AO
2625
2626 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2627 {
2628 case tcc_expression:
2629 switch (TREE_CODE (exp))
2630 {
2631 case COND_EXPR:
7ece48b1 2632 case DOT_PROD_EXPR:
0354c0c7
BS
2633 case WIDEN_MULT_PLUS_EXPR:
2634 case WIDEN_MULT_MINUS_EXPR:
0f59b812 2635 case FMA_EXPR:
b5b8b0ac
AO
2636 goto ternary;
2637
2638 case TRUTH_ANDIF_EXPR:
2639 case TRUTH_ORIF_EXPR:
2640 case TRUTH_AND_EXPR:
2641 case TRUTH_OR_EXPR:
2642 case TRUTH_XOR_EXPR:
2643 goto binary;
2644
2645 case TRUTH_NOT_EXPR:
2646 goto unary;
2647
2648 default:
2649 break;
2650 }
2651 break;
2652
2653 ternary:
2654 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2655 if (!op2)
2656 return NULL_RTX;
2657 /* Fall through. */
2658
2659 binary:
2660 case tcc_binary:
2661 case tcc_comparison:
2662 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2663 if (!op1)
2664 return NULL_RTX;
2665 /* Fall through. */
2666
2667 unary:
2668 case tcc_unary:
2ba172e0 2669 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
2670 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2671 if (!op0)
2672 return NULL_RTX;
2673 break;
2674
2675 case tcc_type:
2676 case tcc_statement:
2677 gcc_unreachable ();
2678
2679 case tcc_constant:
2680 case tcc_exceptional:
2681 case tcc_declaration:
2682 case tcc_reference:
2683 case tcc_vl_exp:
2684 break;
2685 }
2686
2687 switch (TREE_CODE (exp))
2688 {
2689 case STRING_CST:
2690 if (!lookup_constant_def (exp))
2691 {
e1b243a8
JJ
2692 if (strlen (TREE_STRING_POINTER (exp)) + 1
2693 != (size_t) TREE_STRING_LENGTH (exp))
2694 return NULL_RTX;
b5b8b0ac
AO
2695 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2696 op0 = gen_rtx_MEM (BLKmode, op0);
2697 set_mem_attributes (op0, exp, 0);
2698 return op0;
2699 }
2700 /* Fall through... */
2701
2702 case INTEGER_CST:
2703 case REAL_CST:
2704 case FIXED_CST:
2705 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2706 return op0;
2707
2708 case COMPLEX_CST:
2709 gcc_assert (COMPLEX_MODE_P (mode));
2710 op0 = expand_debug_expr (TREE_REALPART (exp));
b5b8b0ac 2711 op1 = expand_debug_expr (TREE_IMAGPART (exp));
b5b8b0ac
AO
2712 return gen_rtx_CONCAT (mode, op0, op1);
2713
0ca5af51
AO
2714 case DEBUG_EXPR_DECL:
2715 op0 = DECL_RTL_IF_SET (exp);
2716
2717 if (op0)
2718 return op0;
2719
2720 op0 = gen_rtx_DEBUG_EXPR (mode);
e4fb38bd 2721 DEBUG_EXPR_TREE_DECL (op0) = exp;
0ca5af51
AO
2722 SET_DECL_RTL (exp, op0);
2723
2724 return op0;
2725
b5b8b0ac
AO
2726 case VAR_DECL:
2727 case PARM_DECL:
2728 case FUNCTION_DECL:
2729 case LABEL_DECL:
2730 case CONST_DECL:
2731 case RESULT_DECL:
2732 op0 = DECL_RTL_IF_SET (exp);
2733
2734 /* This decl was probably optimized away. */
2735 if (!op0)
e1b243a8
JJ
2736 {
2737 if (TREE_CODE (exp) != VAR_DECL
2738 || DECL_EXTERNAL (exp)
2739 || !TREE_STATIC (exp)
2740 || !DECL_NAME (exp)
0fba566c 2741 || DECL_HARD_REGISTER (exp)
7d5fc814 2742 || DECL_IN_CONSTANT_POOL (exp)
0fba566c 2743 || mode == VOIDmode)
e1b243a8
JJ
2744 return NULL;
2745
b1aa0655 2746 op0 = make_decl_rtl_for_debug (exp);
e1b243a8
JJ
2747 if (!MEM_P (op0)
2748 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2749 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2750 return NULL;
2751 }
2752 else
2753 op0 = copy_rtx (op0);
b5b8b0ac 2754
06796564
JJ
2755 if (GET_MODE (op0) == BLKmode
2756 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2757 below would ICE. While it is likely a FE bug,
2758 try to be robust here. See PR43166. */
132b4e82
JJ
2759 || mode == BLKmode
2760 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
b5b8b0ac
AO
2761 {
2762 gcc_assert (MEM_P (op0));
2763 op0 = adjust_address_nv (op0, mode, 0);
2764 return op0;
2765 }
2766
2767 /* Fall through. */
2768
2769 adjust_mode:
2770 case PAREN_EXPR:
2771 case NOP_EXPR:
2772 case CONVERT_EXPR:
2773 {
2ba172e0 2774 inner_mode = GET_MODE (op0);
b5b8b0ac
AO
2775
2776 if (mode == inner_mode)
2777 return op0;
2778
2779 if (inner_mode == VOIDmode)
2780 {
2a8e30fb
MM
2781 if (TREE_CODE (exp) == SSA_NAME)
2782 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2783 else
2784 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b5b8b0ac
AO
2785 if (mode == inner_mode)
2786 return op0;
2787 }
2788
2789 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2790 {
2791 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2792 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2793 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2794 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2795 else
2796 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2797 }
2798 else if (FLOAT_MODE_P (mode))
2799 {
2a8e30fb 2800 gcc_assert (TREE_CODE (exp) != SSA_NAME);
b5b8b0ac
AO
2801 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2802 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2803 else
2804 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2805 }
2806 else if (FLOAT_MODE_P (inner_mode))
2807 {
2808 if (unsignedp)
2809 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2810 else
2811 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2812 }
2813 else if (CONSTANT_P (op0)
69660a70 2814 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
b5b8b0ac
AO
2815 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2816 subreg_lowpart_offset (mode,
2817 inner_mode));
1b47fe3f
JJ
2818 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2819 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2820 : unsignedp)
2ba172e0 2821 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
b5b8b0ac 2822 else
2ba172e0 2823 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
b5b8b0ac
AO
2824
2825 return op0;
2826 }
2827
70f34814 2828 case MEM_REF:
71f3a3f5
JJ
2829 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2830 {
2831 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2832 TREE_OPERAND (exp, 0),
2833 TREE_OPERAND (exp, 1));
2834 if (newexp)
2835 return expand_debug_expr (newexp);
2836 }
2837 /* FALLTHROUGH */
b5b8b0ac 2838 case INDIRECT_REF:
b5b8b0ac
AO
2839 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2840 if (!op0)
2841 return NULL;
2842
cb115041
JJ
2843 if (TREE_CODE (exp) == MEM_REF)
2844 {
583ac69c
JJ
2845 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2846 || (GET_CODE (op0) == PLUS
2847 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2848 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2849 Instead just use get_inner_reference. */
2850 goto component_ref;
2851
cb115041
JJ
2852 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2853 if (!op1 || !CONST_INT_P (op1))
2854 return NULL;
2855
2856 op0 = plus_constant (op0, INTVAL (op1));
2857 }
2858
09e881c9 2859 if (POINTER_TYPE_P (TREE_TYPE (exp)))
75421dcd 2860 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
09e881c9 2861 else
75421dcd 2862 as = ADDR_SPACE_GENERIC;
b5b8b0ac 2863
f61c6f34
JJ
2864 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2865 op0, as);
2866 if (op0 == NULL_RTX)
2867 return NULL;
b5b8b0ac 2868
f61c6f34 2869 op0 = gen_rtx_MEM (mode, op0);
b5b8b0ac 2870 set_mem_attributes (op0, exp, 0);
71f3a3f5
JJ
2871 if (TREE_CODE (exp) == MEM_REF
2872 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2873 set_mem_expr (op0, NULL_TREE);
09e881c9 2874 set_mem_addr_space (op0, as);
b5b8b0ac
AO
2875
2876 return op0;
2877
2878 case TARGET_MEM_REF:
4d948885
RG
2879 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2880 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
b5b8b0ac
AO
2881 return NULL;
2882
2883 op0 = expand_debug_expr
4e25ca6b 2884 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
b5b8b0ac
AO
2885 if (!op0)
2886 return NULL;
2887
f61c6f34
JJ
2888 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2889 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2890 else
2891 as = ADDR_SPACE_GENERIC;
2892
2893 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2894 op0, as);
2895 if (op0 == NULL_RTX)
2896 return NULL;
b5b8b0ac
AO
2897
2898 op0 = gen_rtx_MEM (mode, op0);
2899
2900 set_mem_attributes (op0, exp, 0);
09e881c9 2901 set_mem_addr_space (op0, as);
b5b8b0ac
AO
2902
2903 return op0;
2904
583ac69c 2905 component_ref:
b5b8b0ac
AO
2906 case ARRAY_REF:
2907 case ARRAY_RANGE_REF:
2908 case COMPONENT_REF:
2909 case BIT_FIELD_REF:
2910 case REALPART_EXPR:
2911 case IMAGPART_EXPR:
2912 case VIEW_CONVERT_EXPR:
2913 {
2914 enum machine_mode mode1;
2915 HOST_WIDE_INT bitsize, bitpos;
2916 tree offset;
2917 int volatilep = 0;
2918 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2919 &mode1, &unsignedp, &volatilep, false);
2920 rtx orig_op0;
2921
4f2a9af8
JJ
2922 if (bitsize == 0)
2923 return NULL;
2924
b5b8b0ac
AO
2925 orig_op0 = op0 = expand_debug_expr (tem);
2926
2927 if (!op0)
2928 return NULL;
2929
2930 if (offset)
2931 {
dda2da58
AO
2932 enum machine_mode addrmode, offmode;
2933
aa847cc8
JJ
2934 if (!MEM_P (op0))
2935 return NULL;
b5b8b0ac 2936
dda2da58
AO
2937 op0 = XEXP (op0, 0);
2938 addrmode = GET_MODE (op0);
2939 if (addrmode == VOIDmode)
2940 addrmode = Pmode;
2941
b5b8b0ac
AO
2942 op1 = expand_debug_expr (offset);
2943 if (!op1)
2944 return NULL;
2945
dda2da58
AO
2946 offmode = GET_MODE (op1);
2947 if (offmode == VOIDmode)
2948 offmode = TYPE_MODE (TREE_TYPE (offset));
2949
2950 if (addrmode != offmode)
2951 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2952 subreg_lowpart_offset (addrmode,
2953 offmode));
2954
2955 /* Don't use offset_address here, we don't need a
2956 recognizable address, and we don't want to generate
2957 code. */
2ba172e0
JJ
2958 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
2959 op0, op1));
b5b8b0ac
AO
2960 }
2961
2962 if (MEM_P (op0))
2963 {
4f2a9af8
JJ
2964 if (mode1 == VOIDmode)
2965 /* Bitfield. */
2966 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
b5b8b0ac
AO
2967 if (bitpos >= BITS_PER_UNIT)
2968 {
2969 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2970 bitpos %= BITS_PER_UNIT;
2971 }
2972 else if (bitpos < 0)
2973 {
4f2a9af8
JJ
2974 HOST_WIDE_INT units
2975 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
b5b8b0ac
AO
2976 op0 = adjust_address_nv (op0, mode1, units);
2977 bitpos += units * BITS_PER_UNIT;
2978 }
2979 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2980 op0 = adjust_address_nv (op0, mode, 0);
2981 else if (GET_MODE (op0) != mode1)
2982 op0 = adjust_address_nv (op0, mode1, 0);
2983 else
2984 op0 = copy_rtx (op0);
2985 if (op0 == orig_op0)
2986 op0 = shallow_copy_rtx (op0);
2987 set_mem_attributes (op0, exp, 0);
2988 }
2989
2990 if (bitpos == 0 && mode == GET_MODE (op0))
2991 return op0;
2992
2d3fc6aa
JJ
2993 if (bitpos < 0)
2994 return NULL;
2995
88c04a5d
JJ
2996 if (GET_MODE (op0) == BLKmode)
2997 return NULL;
2998
b5b8b0ac
AO
2999 if ((bitpos % BITS_PER_UNIT) == 0
3000 && bitsize == GET_MODE_BITSIZE (mode1))
3001 {
3002 enum machine_mode opmode = GET_MODE (op0);
3003
b5b8b0ac 3004 if (opmode == VOIDmode)
9712cba0 3005 opmode = TYPE_MODE (TREE_TYPE (tem));
b5b8b0ac
AO
3006
3007 /* This condition may hold if we're expanding the address
3008 right past the end of an array that turned out not to
3009 be addressable (i.e., the address was only computed in
3010 debug stmts). The gen_subreg below would rightfully
3011 crash, and the address doesn't really exist, so just
3012 drop it. */
3013 if (bitpos >= GET_MODE_BITSIZE (opmode))
3014 return NULL;
3015
7d5d39bb
JJ
3016 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
3017 return simplify_gen_subreg (mode, op0, opmode,
3018 bitpos / BITS_PER_UNIT);
b5b8b0ac
AO
3019 }
3020
3021 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
3022 && TYPE_UNSIGNED (TREE_TYPE (exp))
3023 ? SIGN_EXTRACT
3024 : ZERO_EXTRACT, mode,
3025 GET_MODE (op0) != VOIDmode
9712cba0
JJ
3026 ? GET_MODE (op0)
3027 : TYPE_MODE (TREE_TYPE (tem)),
b5b8b0ac
AO
3028 op0, GEN_INT (bitsize), GEN_INT (bitpos));
3029 }
3030
b5b8b0ac 3031 case ABS_EXPR:
2ba172e0 3032 return simplify_gen_unary (ABS, mode, op0, mode);
b5b8b0ac
AO
3033
3034 case NEGATE_EXPR:
2ba172e0 3035 return simplify_gen_unary (NEG, mode, op0, mode);
b5b8b0ac
AO
3036
3037 case BIT_NOT_EXPR:
2ba172e0 3038 return simplify_gen_unary (NOT, mode, op0, mode);
b5b8b0ac
AO
3039
3040 case FLOAT_EXPR:
2ba172e0
JJ
3041 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3042 0)))
3043 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
3044 inner_mode);
b5b8b0ac
AO
3045
3046 case FIX_TRUNC_EXPR:
2ba172e0
JJ
3047 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
3048 inner_mode);
b5b8b0ac
AO
3049
3050 case POINTER_PLUS_EXPR:
576319a7
DD
3051 /* For the rare target where pointers are not the same size as
3052 size_t, we need to check for mis-matched modes and correct
3053 the addend. */
3054 if (op0 && op1
3055 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
3056 && GET_MODE (op0) != GET_MODE (op1))
3057 {
3058 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2ba172e0
JJ
3059 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
3060 GET_MODE (op1));
576319a7
DD
3061 else
3062 /* We always sign-extend, regardless of the signedness of
3063 the operand, because the operand is always unsigned
3064 here even if the original C expression is signed. */
2ba172e0
JJ
3065 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
3066 GET_MODE (op1));
576319a7
DD
3067 }
3068 /* Fall through. */
b5b8b0ac 3069 case PLUS_EXPR:
2ba172e0 3070 return simplify_gen_binary (PLUS, mode, op0, op1);
b5b8b0ac
AO
3071
3072 case MINUS_EXPR:
2ba172e0 3073 return simplify_gen_binary (MINUS, mode, op0, op1);
b5b8b0ac
AO
3074
3075 case MULT_EXPR:
2ba172e0 3076 return simplify_gen_binary (MULT, mode, op0, op1);
b5b8b0ac
AO
3077
3078 case RDIV_EXPR:
3079 case TRUNC_DIV_EXPR:
3080 case EXACT_DIV_EXPR:
3081 if (unsignedp)
2ba172e0 3082 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac 3083 else
2ba172e0 3084 return simplify_gen_binary (DIV, mode, op0, op1);
b5b8b0ac
AO
3085
3086 case TRUNC_MOD_EXPR:
2ba172e0 3087 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
b5b8b0ac
AO
3088
3089 case FLOOR_DIV_EXPR:
3090 if (unsignedp)
2ba172e0 3091 return simplify_gen_binary (UDIV, mode, op0, op1);
b5b8b0ac
AO
3092 else
3093 {
2ba172e0
JJ
3094 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3095 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3096 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0 3097 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
3098 }
3099
3100 case FLOOR_MOD_EXPR:
3101 if (unsignedp)
2ba172e0 3102 return simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac
AO
3103 else
3104 {
2ba172e0 3105 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3106 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
3107 adj = simplify_gen_unary (NEG, mode,
3108 simplify_gen_binary (MULT, mode, adj, op1),
3109 mode);
3110 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
3111 }
3112
3113 case CEIL_DIV_EXPR:
3114 if (unsignedp)
3115 {
2ba172e0
JJ
3116 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3117 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 3118 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0 3119 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
3120 }
3121 else
3122 {
2ba172e0
JJ
3123 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3124 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3125 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0 3126 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
3127 }
3128
3129 case CEIL_MOD_EXPR:
3130 if (unsignedp)
3131 {
2ba172e0 3132 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 3133 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
3134 adj = simplify_gen_unary (NEG, mode,
3135 simplify_gen_binary (MULT, mode, adj, op1),
3136 mode);
3137 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
3138 }
3139 else
3140 {
2ba172e0 3141 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3142 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
3143 adj = simplify_gen_unary (NEG, mode,
3144 simplify_gen_binary (MULT, mode, adj, op1),
3145 mode);
3146 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
3147 }
3148
3149 case ROUND_DIV_EXPR:
3150 if (unsignedp)
3151 {
2ba172e0
JJ
3152 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3153 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 3154 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0 3155 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
3156 }
3157 else
3158 {
2ba172e0
JJ
3159 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3160 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3161 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0 3162 return simplify_gen_binary (PLUS, mode, div, adj);
b5b8b0ac
AO
3163 }
3164
3165 case ROUND_MOD_EXPR:
3166 if (unsignedp)
3167 {
2ba172e0 3168 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
b5b8b0ac 3169 rtx adj = round_udiv_adjust (mode, mod, op1);
2ba172e0
JJ
3170 adj = simplify_gen_unary (NEG, mode,
3171 simplify_gen_binary (MULT, mode, adj, op1),
3172 mode);
3173 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
3174 }
3175 else
3176 {
2ba172e0 3177 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
b5b8b0ac 3178 rtx adj = round_sdiv_adjust (mode, mod, op1);
2ba172e0
JJ
3179 adj = simplify_gen_unary (NEG, mode,
3180 simplify_gen_binary (MULT, mode, adj, op1),
3181 mode);
3182 return simplify_gen_binary (PLUS, mode, mod, adj);
b5b8b0ac
AO
3183 }
3184
3185 case LSHIFT_EXPR:
2ba172e0 3186 return simplify_gen_binary (ASHIFT, mode, op0, op1);
b5b8b0ac
AO
3187
3188 case RSHIFT_EXPR:
3189 if (unsignedp)
2ba172e0 3190 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
b5b8b0ac 3191 else
2ba172e0 3192 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
b5b8b0ac
AO
3193
3194 case LROTATE_EXPR:
2ba172e0 3195 return simplify_gen_binary (ROTATE, mode, op0, op1);
b5b8b0ac
AO
3196
3197 case RROTATE_EXPR:
2ba172e0 3198 return simplify_gen_binary (ROTATERT, mode, op0, op1);
b5b8b0ac
AO
3199
3200 case MIN_EXPR:
2ba172e0 3201 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
b5b8b0ac
AO
3202
3203 case MAX_EXPR:
2ba172e0 3204 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
b5b8b0ac
AO
3205
3206 case BIT_AND_EXPR:
3207 case TRUTH_AND_EXPR:
2ba172e0 3208 return simplify_gen_binary (AND, mode, op0, op1);
b5b8b0ac
AO
3209
3210 case BIT_IOR_EXPR:
3211 case TRUTH_OR_EXPR:
2ba172e0 3212 return simplify_gen_binary (IOR, mode, op0, op1);
b5b8b0ac
AO
3213
3214 case BIT_XOR_EXPR:
3215 case TRUTH_XOR_EXPR:
2ba172e0 3216 return simplify_gen_binary (XOR, mode, op0, op1);
b5b8b0ac
AO
3217
3218 case TRUTH_ANDIF_EXPR:
3219 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3220
3221 case TRUTH_ORIF_EXPR:
3222 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3223
3224 case TRUTH_NOT_EXPR:
2ba172e0 3225 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
b5b8b0ac
AO
3226
3227 case LT_EXPR:
2ba172e0
JJ
3228 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3229 op0, op1);
b5b8b0ac
AO
3230
3231 case LE_EXPR:
2ba172e0
JJ
3232 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3233 op0, op1);
b5b8b0ac
AO
3234
3235 case GT_EXPR:
2ba172e0
JJ
3236 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3237 op0, op1);
b5b8b0ac
AO
3238
3239 case GE_EXPR:
2ba172e0
JJ
3240 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3241 op0, op1);
b5b8b0ac
AO
3242
3243 case EQ_EXPR:
2ba172e0 3244 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3245
3246 case NE_EXPR:
2ba172e0 3247 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3248
3249 case UNORDERED_EXPR:
2ba172e0 3250 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3251
3252 case ORDERED_EXPR:
2ba172e0 3253 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3254
3255 case UNLT_EXPR:
2ba172e0 3256 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3257
3258 case UNLE_EXPR:
2ba172e0 3259 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3260
3261 case UNGT_EXPR:
2ba172e0 3262 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3263
3264 case UNGE_EXPR:
2ba172e0 3265 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3266
3267 case UNEQ_EXPR:
2ba172e0 3268 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3269
3270 case LTGT_EXPR:
2ba172e0 3271 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
b5b8b0ac
AO
3272
3273 case COND_EXPR:
3274 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3275
3276 case COMPLEX_EXPR:
3277 gcc_assert (COMPLEX_MODE_P (mode));
3278 if (GET_MODE (op0) == VOIDmode)
3279 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3280 if (GET_MODE (op1) == VOIDmode)
3281 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3282 return gen_rtx_CONCAT (mode, op0, op1);
3283
d02a5a4b
JJ
3284 case CONJ_EXPR:
3285 if (GET_CODE (op0) == CONCAT)
3286 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2ba172e0
JJ
3287 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3288 XEXP (op0, 1),
3289 GET_MODE_INNER (mode)));
d02a5a4b
JJ
3290 else
3291 {
3292 enum machine_mode imode = GET_MODE_INNER (mode);
3293 rtx re, im;
3294
3295 if (MEM_P (op0))
3296 {
3297 re = adjust_address_nv (op0, imode, 0);
3298 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3299 }
3300 else
3301 {
3302 enum machine_mode ifmode = int_mode_for_mode (mode);
3303 enum machine_mode ihmode = int_mode_for_mode (imode);
3304 rtx halfsize;
3305 if (ifmode == BLKmode || ihmode == BLKmode)
3306 return NULL;
3307 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3308 re = op0;
3309 if (mode != ifmode)
3310 re = gen_rtx_SUBREG (ifmode, re, 0);
3311 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3312 if (imode != ihmode)
3313 re = gen_rtx_SUBREG (imode, re, 0);
3314 im = copy_rtx (op0);
3315 if (mode != ifmode)
3316 im = gen_rtx_SUBREG (ifmode, im, 0);
3317 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3318 if (imode != ihmode)
3319 im = gen_rtx_SUBREG (imode, im, 0);
3320 }
3321 im = gen_rtx_NEG (imode, im);
3322 return gen_rtx_CONCAT (mode, re, im);
3323 }
3324
b5b8b0ac
AO
3325 case ADDR_EXPR:
3326 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3327 if (!op0 || !MEM_P (op0))
c8a27c40
JJ
3328 {
3329 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3330 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3331 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
f8cca67b
JJ
3332 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
3333 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
c8a27c40
JJ
3334 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3335
3336 if (handled_component_p (TREE_OPERAND (exp, 0)))
3337 {
3338 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3339 tree decl
3340 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3341 &bitoffset, &bitsize, &maxsize);
3342 if ((TREE_CODE (decl) == VAR_DECL
3343 || TREE_CODE (decl) == PARM_DECL
3344 || TREE_CODE (decl) == RESULT_DECL)
f8cca67b
JJ
3345 && (!TREE_ADDRESSABLE (decl)
3346 || target_for_debug_bind (decl))
c8a27c40
JJ
3347 && (bitoffset % BITS_PER_UNIT) == 0
3348 && bitsize > 0
3349 && bitsize == maxsize)
3350 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3351 bitoffset / BITS_PER_UNIT);
3352 }
3353
3354 return NULL;
3355 }
b5b8b0ac 3356
f61c6f34
JJ
3357 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3358 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
dda2da58
AO
3359
3360 return op0;
b5b8b0ac
AO
3361
3362 case VECTOR_CST:
d2a12ae7
RG
3363 {
3364 unsigned i;
3365
3366 op0 = gen_rtx_CONCATN
3367 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3368
3369 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
3370 {
3371 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
3372 if (!op1)
3373 return NULL;
3374 XVECEXP (op0, 0, i) = op1;
3375 }
3376
3377 return op0;
3378 }
b5b8b0ac
AO
3379
3380 case CONSTRUCTOR:
47598145
MM
3381 if (TREE_CLOBBER_P (exp))
3382 return NULL;
3383 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
b5b8b0ac
AO
3384 {
3385 unsigned i;
3386 tree val;
3387
3388 op0 = gen_rtx_CONCATN
3389 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3390
3391 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3392 {
3393 op1 = expand_debug_expr (val);
3394 if (!op1)
3395 return NULL;
3396 XVECEXP (op0, 0, i) = op1;
3397 }
3398
3399 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3400 {
3401 op1 = expand_debug_expr
e8160c9a 3402 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
b5b8b0ac
AO
3403
3404 if (!op1)
3405 return NULL;
3406
3407 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3408 XVECEXP (op0, 0, i) = op1;
3409 }
3410
3411 return op0;
3412 }
3413 else
3414 goto flag_unsupported;
3415
3416 case CALL_EXPR:
3417 /* ??? Maybe handle some builtins? */
3418 return NULL;
3419
3420 case SSA_NAME:
3421 {
2a8e30fb
MM
3422 gimple g = get_gimple_for_ssa_name (exp);
3423 if (g)
3424 {
3425 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3426 if (!op0)
3427 return NULL;
3428 }
3429 else
3430 {
3431 int part = var_to_partition (SA.map, exp);
b5b8b0ac 3432
2a8e30fb 3433 if (part == NO_PARTITION)
a58a8e4b
JJ
3434 {
3435 /* If this is a reference to an incoming value of parameter
3436 that is never used in the code or where the incoming
3437 value is never used in the code, use PARM_DECL's
3438 DECL_RTL if set. */
3439 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3440 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3441 {
12c5ffe5
EB
3442 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3443 if (op0)
3444 goto adjust_mode;
a58a8e4b 3445 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
12c5ffe5
EB
3446 if (op0)
3447 goto adjust_mode;
a58a8e4b
JJ
3448 }
3449 return NULL;
3450 }
b5b8b0ac 3451
2a8e30fb 3452 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
b5b8b0ac 3453
abfea58d 3454 op0 = copy_rtx (SA.partition_to_pseudo[part]);
2a8e30fb 3455 }
b5b8b0ac
AO
3456 goto adjust_mode;
3457 }
3458
3459 case ERROR_MARK:
3460 return NULL;
3461
7ece48b1
JJ
3462 /* Vector stuff. For most of the codes we don't have rtl codes. */
3463 case REALIGN_LOAD_EXPR:
3464 case REDUC_MAX_EXPR:
3465 case REDUC_MIN_EXPR:
3466 case REDUC_PLUS_EXPR:
3467 case VEC_COND_EXPR:
7ece48b1
JJ
3468 case VEC_LSHIFT_EXPR:
3469 case VEC_PACK_FIX_TRUNC_EXPR:
3470 case VEC_PACK_SAT_EXPR:
3471 case VEC_PACK_TRUNC_EXPR:
3472 case VEC_RSHIFT_EXPR:
3473 case VEC_UNPACK_FLOAT_HI_EXPR:
3474 case VEC_UNPACK_FLOAT_LO_EXPR:
3475 case VEC_UNPACK_HI_EXPR:
3476 case VEC_UNPACK_LO_EXPR:
3477 case VEC_WIDEN_MULT_HI_EXPR:
3478 case VEC_WIDEN_MULT_LO_EXPR:
36ba4aae
IR
3479 case VEC_WIDEN_LSHIFT_HI_EXPR:
3480 case VEC_WIDEN_LSHIFT_LO_EXPR:
3f3af9df 3481 case VEC_PERM_EXPR:
7ece48b1
JJ
3482 return NULL;
3483
3484 /* Misc codes. */
3485 case ADDR_SPACE_CONVERT_EXPR:
3486 case FIXED_CONVERT_EXPR:
3487 case OBJ_TYPE_REF:
3488 case WITH_SIZE_EXPR:
3489 return NULL;
3490
3491 case DOT_PROD_EXPR:
3492 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3493 && SCALAR_INT_MODE_P (mode))
3494 {
2ba172e0
JJ
3495 op0
3496 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3497 0)))
3498 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3499 inner_mode);
3500 op1
3501 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3502 1)))
3503 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3504 inner_mode);
3505 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3506 return simplify_gen_binary (PLUS, mode, op0, op2);
7ece48b1
JJ
3507 }
3508 return NULL;
3509
3510 case WIDEN_MULT_EXPR:
0354c0c7
BS
3511 case WIDEN_MULT_PLUS_EXPR:
3512 case WIDEN_MULT_MINUS_EXPR:
7ece48b1
JJ
3513 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3514 && SCALAR_INT_MODE_P (mode))
3515 {
2ba172e0 3516 inner_mode = GET_MODE (op0);
7ece48b1 3517 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5b58b39b 3518 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
7ece48b1 3519 else
5b58b39b 3520 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
7ece48b1 3521 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5b58b39b 3522 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
7ece48b1 3523 else
5b58b39b 3524 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
2ba172e0 3525 op0 = simplify_gen_binary (MULT, mode, op0, op1);
0354c0c7
BS
3526 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3527 return op0;
3528 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
2ba172e0 3529 return simplify_gen_binary (PLUS, mode, op0, op2);
0354c0c7 3530 else
2ba172e0 3531 return simplify_gen_binary (MINUS, mode, op2, op0);
7ece48b1
JJ
3532 }
3533 return NULL;
3534
3535 case WIDEN_SUM_EXPR:
3f3af9df 3536 case WIDEN_LSHIFT_EXPR:
7ece48b1
JJ
3537 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3538 && SCALAR_INT_MODE_P (mode))
3539 {
2ba172e0
JJ
3540 op0
3541 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3542 0)))
3543 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3544 inner_mode);
3f3af9df
JJ
3545 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
3546 ? ASHIFT : PLUS, mode, op0, op1);
7ece48b1
JJ
3547 }
3548 return NULL;
3549
0f59b812 3550 case FMA_EXPR:
2ba172e0 3551 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
0f59b812 3552
b5b8b0ac
AO
3553 default:
3554 flag_unsupported:
3555#ifdef ENABLE_CHECKING
3556 debug_tree (exp);
3557 gcc_unreachable ();
3558#else
3559 return NULL;
3560#endif
3561 }
3562}
3563
ddb555ed
JJ
3564/* Return an RTX equivalent to the source bind value of the tree expression
3565 EXP. */
3566
3567static rtx
3568expand_debug_source_expr (tree exp)
3569{
3570 rtx op0 = NULL_RTX;
3571 enum machine_mode mode = VOIDmode, inner_mode;
3572
3573 switch (TREE_CODE (exp))
3574 {
3575 case PARM_DECL:
3576 {
ddb555ed 3577 mode = DECL_MODE (exp);
12c5ffe5
EB
3578 op0 = expand_debug_parm_decl (exp);
3579 if (op0)
3580 break;
ddb555ed
JJ
3581 /* See if this isn't an argument that has been completely
3582 optimized out. */
3583 if (!DECL_RTL_SET_P (exp)
12c5ffe5 3584 && !DECL_INCOMING_RTL (exp)
ddb555ed
JJ
3585 && DECL_ABSTRACT_ORIGIN (current_function_decl))
3586 {
3587 tree aexp = exp;
3588 if (DECL_ABSTRACT_ORIGIN (exp))
3589 aexp = DECL_ABSTRACT_ORIGIN (exp);
3590 if (DECL_CONTEXT (aexp)
3591 == DECL_ABSTRACT_ORIGIN (current_function_decl))
3592 {
3593 VEC(tree, gc) **debug_args;
3594 unsigned int ix;
3595 tree ddecl;
3596#ifdef ENABLE_CHECKING
3597 tree parm;
3598 for (parm = DECL_ARGUMENTS (current_function_decl);
3599 parm; parm = DECL_CHAIN (parm))
3600 gcc_assert (parm != exp
3601 && DECL_ABSTRACT_ORIGIN (parm) != aexp);
3602#endif
3603 debug_args = decl_debug_args_lookup (current_function_decl);
3604 if (debug_args != NULL)
3605 {
3606 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
3607 ix += 2)
3608 if (ddecl == aexp)
3609 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3610 }
3611 }
3612 }
3613 break;
3614 }
3615 default:
3616 break;
3617 }
3618
3619 if (op0 == NULL_RTX)
3620 return NULL_RTX;
3621
3622 inner_mode = GET_MODE (op0);
3623 if (mode == inner_mode)
3624 return op0;
3625
3626 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3627 {
3628 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3629 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3630 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3631 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3632 else
3633 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3634 }
3635 else if (FLOAT_MODE_P (mode))
3636 gcc_unreachable ();
3637 else if (FLOAT_MODE_P (inner_mode))
3638 {
3639 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3640 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3641 else
3642 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3643 }
3644 else if (CONSTANT_P (op0)
3645 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3646 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3647 subreg_lowpart_offset (mode, inner_mode));
3648 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3649 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3650 else
3651 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3652
3653 return op0;
3654}
3655
b5b8b0ac
AO
3656/* Expand the _LOCs in debug insns. We run this after expanding all
3657 regular insns, so that any variables referenced in the function
3658 will have their DECL_RTLs set. */
3659
3660static void
3661expand_debug_locations (void)
3662{
3663 rtx insn;
3664 rtx last = get_last_insn ();
3665 int save_strict_alias = flag_strict_aliasing;
3666
3667 /* New alias sets while setting up memory attributes cause
3668 -fcompare-debug failures, even though it doesn't bring about any
3669 codegen changes. */
3670 flag_strict_aliasing = 0;
3671
3672 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3673 if (DEBUG_INSN_P (insn))
3674 {
3675 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3676 rtx val;
3677 enum machine_mode mode;
3678
3679 if (value == NULL_TREE)
3680 val = NULL_RTX;
3681 else
3682 {
ddb555ed
JJ
3683 if (INSN_VAR_LOCATION_STATUS (insn)
3684 == VAR_INIT_STATUS_UNINITIALIZED)
3685 val = expand_debug_source_expr (value);
3686 else
3687 val = expand_debug_expr (value);
b5b8b0ac
AO
3688 gcc_assert (last == get_last_insn ());
3689 }
3690
3691 if (!val)
3692 val = gen_rtx_UNKNOWN_VAR_LOC ();
3693 else
3694 {
3695 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3696
3697 gcc_assert (mode == GET_MODE (val)
3698 || (GET_MODE (val) == VOIDmode
3699 && (CONST_INT_P (val)
3700 || GET_CODE (val) == CONST_FIXED
3701 || GET_CODE (val) == CONST_DOUBLE
3702 || GET_CODE (val) == LABEL_REF)));
3703 }
3704
3705 INSN_VAR_LOCATION_LOC (insn) = val;
3706 }
3707
3708 flag_strict_aliasing = save_strict_alias;
3709}
3710
242229bb
JH
3711/* Expand basic block BB from GIMPLE trees to RTL. */
3712
3713static basic_block
10d22567 3714expand_gimple_basic_block (basic_block bb)
242229bb 3715{
726a989a
RB
3716 gimple_stmt_iterator gsi;
3717 gimple_seq stmts;
3718 gimple stmt = NULL;
242229bb
JH
3719 rtx note, last;
3720 edge e;
628f6a4e 3721 edge_iterator ei;
8b11009b 3722 void **elt;
242229bb
JH
3723
3724 if (dump_file)
726a989a
RB
3725 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3726 bb->index);
3727
3728 /* Note that since we are now transitioning from GIMPLE to RTL, we
3729 cannot use the gsi_*_bb() routines because they expect the basic
3730 block to be in GIMPLE, instead of RTL. Therefore, we need to
3731 access the BB sequence directly. */
3732 stmts = bb_seq (bb);
3e8b732e
MM
3733 bb->il.gimple.seq = NULL;
3734 bb->il.gimple.phi_nodes = NULL;
bf08ebeb 3735 rtl_profile_for_bb (bb);
5e2d947c
JH
3736 init_rtl_bb_info (bb);
3737 bb->flags |= BB_RTL;
3738
a9b77cd1
ZD
3739 /* Remove the RETURN_EXPR if we may fall though to the exit
3740 instead. */
726a989a
RB
3741 gsi = gsi_last (stmts);
3742 if (!gsi_end_p (gsi)
3743 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
a9b77cd1 3744 {
726a989a 3745 gimple ret_stmt = gsi_stmt (gsi);
a9b77cd1
ZD
3746
3747 gcc_assert (single_succ_p (bb));
3748 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3749
3750 if (bb->next_bb == EXIT_BLOCK_PTR
726a989a 3751 && !gimple_return_retval (ret_stmt))
a9b77cd1 3752 {
726a989a 3753 gsi_remove (&gsi, false);
a9b77cd1
ZD
3754 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3755 }
3756 }
3757
726a989a
RB
3758 gsi = gsi_start (stmts);
3759 if (!gsi_end_p (gsi))
8b11009b 3760 {
726a989a
RB
3761 stmt = gsi_stmt (gsi);
3762 if (gimple_code (stmt) != GIMPLE_LABEL)
3763 stmt = NULL;
8b11009b 3764 }
242229bb 3765
8b11009b
ZD
3766 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3767
3768 if (stmt || elt)
242229bb
JH
3769 {
3770 last = get_last_insn ();
3771
8b11009b
ZD
3772 if (stmt)
3773 {
28ed065e 3774 expand_gimple_stmt (stmt);
726a989a 3775 gsi_next (&gsi);
8b11009b
ZD
3776 }
3777
3778 if (elt)
ae50c0cb 3779 emit_label ((rtx) *elt);
242229bb 3780
caf93cb0 3781 /* Java emits line number notes in the top of labels.
c22cacf3 3782 ??? Make this go away once line number notes are obsoleted. */
242229bb 3783 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 3784 if (NOTE_P (BB_HEAD (bb)))
242229bb 3785 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
242229bb 3786 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528 3787
726a989a 3788 maybe_dump_rtl_for_gimple_stmt (stmt, last);
242229bb
JH
3789 }
3790 else
3791 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3792
3793 NOTE_BASIC_BLOCK (note) = bb;
3794
726a989a 3795 for (; !gsi_end_p (gsi); gsi_next (&gsi))
242229bb 3796 {
cea49550 3797 basic_block new_bb;
242229bb 3798
b5b8b0ac 3799 stmt = gsi_stmt (gsi);
2a8e30fb
MM
3800
3801 /* If this statement is a non-debug one, and we generate debug
3802 insns, then this one might be the last real use of a TERed
3803 SSA_NAME, but where there are still some debug uses further
3804 down. Expanding the current SSA name in such further debug
3805 uses by their RHS might lead to wrong debug info, as coalescing
3806 might make the operands of such RHS be placed into the same
3807 pseudo as something else. Like so:
3808 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3809 use(a_1);
3810 a_2 = ...
3811 #DEBUG ... => a_1
3812 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3813 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3814 the write to a_2 would actually have clobbered the place which
3815 formerly held a_0.
3816
3817 So, instead of that, we recognize the situation, and generate
3818 debug temporaries at the last real use of TERed SSA names:
3819 a_1 = a_0 + 1;
3820 #DEBUG #D1 => a_1
3821 use(a_1);
3822 a_2 = ...
3823 #DEBUG ... => #D1
3824 */
3825 if (MAY_HAVE_DEBUG_INSNS
3826 && SA.values
3827 && !is_gimple_debug (stmt))
3828 {
3829 ssa_op_iter iter;
3830 tree op;
3831 gimple def;
3832
3833 location_t sloc = get_curr_insn_source_location ();
3834 tree sblock = get_curr_insn_block ();
3835
3836 /* Look for SSA names that have their last use here (TERed
3837 names always have only one real use). */
3838 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3839 if ((def = get_gimple_for_ssa_name (op)))
3840 {
3841 imm_use_iterator imm_iter;
3842 use_operand_p use_p;
3843 bool have_debug_uses = false;
3844
3845 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3846 {
3847 if (gimple_debug_bind_p (USE_STMT (use_p)))
3848 {
3849 have_debug_uses = true;
3850 break;
3851 }
3852 }
3853
3854 if (have_debug_uses)
3855 {
3856 /* OP is a TERed SSA name, with DEF it's defining
3857 statement, and where OP is used in further debug
3858 instructions. Generate a debug temporary, and
3859 replace all uses of OP in debug insns with that
3860 temporary. */
3861 gimple debugstmt;
3862 tree value = gimple_assign_rhs_to_tree (def);
3863 tree vexpr = make_node (DEBUG_EXPR_DECL);
3864 rtx val;
3865 enum machine_mode mode;
3866
3867 set_curr_insn_source_location (gimple_location (def));
3868 set_curr_insn_block (gimple_block (def));
3869
3870 DECL_ARTIFICIAL (vexpr) = 1;
3871 TREE_TYPE (vexpr) = TREE_TYPE (value);
3872 if (DECL_P (value))
3873 mode = DECL_MODE (value);
3874 else
3875 mode = TYPE_MODE (TREE_TYPE (value));
3876 DECL_MODE (vexpr) = mode;
3877
3878 val = gen_rtx_VAR_LOCATION
3879 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3880
e8c6bb74 3881 emit_debug_insn (val);
2a8e30fb
MM
3882
3883 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3884 {
3885 if (!gimple_debug_bind_p (debugstmt))
3886 continue;
3887
3888 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3889 SET_USE (use_p, vexpr);
3890
3891 update_stmt (debugstmt);
3892 }
3893 }
3894 }
3895 set_curr_insn_source_location (sloc);
3896 set_curr_insn_block (sblock);
3897 }
3898
a5883ba0 3899 currently_expanding_gimple_stmt = stmt;
b5b8b0ac 3900
242229bb
JH
3901 /* Expand this statement, then evaluate the resulting RTL and
3902 fixup the CFG accordingly. */
726a989a 3903 if (gimple_code (stmt) == GIMPLE_COND)
cea49550 3904 {
726a989a 3905 new_bb = expand_gimple_cond (bb, stmt);
cea49550
RH
3906 if (new_bb)
3907 return new_bb;
3908 }
b5b8b0ac
AO
3909 else if (gimple_debug_bind_p (stmt))
3910 {
3911 location_t sloc = get_curr_insn_source_location ();
3912 tree sblock = get_curr_insn_block ();
3913 gimple_stmt_iterator nsi = gsi;
3914
3915 for (;;)
3916 {
3917 tree var = gimple_debug_bind_get_var (stmt);
3918 tree value;
3919 rtx val;
3920 enum machine_mode mode;
3921
ec8c1492
JJ
3922 if (TREE_CODE (var) != DEBUG_EXPR_DECL
3923 && TREE_CODE (var) != LABEL_DECL
3924 && !target_for_debug_bind (var))
3925 goto delink_debug_stmt;
3926
b5b8b0ac
AO
3927 if (gimple_debug_bind_has_value_p (stmt))
3928 value = gimple_debug_bind_get_value (stmt);
3929 else
3930 value = NULL_TREE;
3931
3932 last = get_last_insn ();
3933
3934 set_curr_insn_source_location (gimple_location (stmt));
3935 set_curr_insn_block (gimple_block (stmt));
3936
3937 if (DECL_P (var))
3938 mode = DECL_MODE (var);
3939 else
3940 mode = TYPE_MODE (TREE_TYPE (var));
3941
3942 val = gen_rtx_VAR_LOCATION
3943 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3944
e16b6fd0 3945 emit_debug_insn (val);
b5b8b0ac
AO
3946
3947 if (dump_file && (dump_flags & TDF_DETAILS))
3948 {
3949 /* We can't dump the insn with a TREE where an RTX
3950 is expected. */
e8c6bb74 3951 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
b5b8b0ac 3952 maybe_dump_rtl_for_gimple_stmt (stmt, last);
e8c6bb74 3953 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
b5b8b0ac
AO
3954 }
3955
ec8c1492 3956 delink_debug_stmt:
2a8e30fb
MM
3957 /* In order not to generate too many debug temporaries,
3958 we delink all uses of debug statements we already expanded.
3959 Therefore debug statements between definition and real
3960 use of TERed SSA names will continue to use the SSA name,
3961 and not be replaced with debug temps. */
3962 delink_stmt_imm_use (stmt);
3963
b5b8b0ac
AO
3964 gsi = nsi;
3965 gsi_next (&nsi);
3966 if (gsi_end_p (nsi))
3967 break;
3968 stmt = gsi_stmt (nsi);
3969 if (!gimple_debug_bind_p (stmt))
3970 break;
3971 }
3972
ddb555ed
JJ
3973 set_curr_insn_source_location (sloc);
3974 set_curr_insn_block (sblock);
3975 }
3976 else if (gimple_debug_source_bind_p (stmt))
3977 {
3978 location_t sloc = get_curr_insn_source_location ();
3979 tree sblock = get_curr_insn_block ();
3980 tree var = gimple_debug_source_bind_get_var (stmt);
3981 tree value = gimple_debug_source_bind_get_value (stmt);
3982 rtx val;
3983 enum machine_mode mode;
3984
3985 last = get_last_insn ();
3986
3987 set_curr_insn_source_location (gimple_location (stmt));
3988 set_curr_insn_block (gimple_block (stmt));
3989
3990 mode = DECL_MODE (var);
3991
3992 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
3993 VAR_INIT_STATUS_UNINITIALIZED);
3994
3995 emit_debug_insn (val);
3996
3997 if (dump_file && (dump_flags & TDF_DETAILS))
3998 {
3999 /* We can't dump the insn with a TREE where an RTX
4000 is expected. */
4001 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
4002 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4003 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
4004 }
4005
b5b8b0ac
AO
4006 set_curr_insn_source_location (sloc);
4007 set_curr_insn_block (sblock);
4008 }
80c7a9eb 4009 else
242229bb 4010 {
726a989a 4011 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
cea49550
RH
4012 {
4013 bool can_fallthru;
4014 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
4015 if (new_bb)
4016 {
4017 if (can_fallthru)
4018 bb = new_bb;
4019 else
4020 return new_bb;
4021 }
4022 }
4d7a65ea 4023 else
b7211528 4024 {
4e3825db 4025 def_operand_p def_p;
4e3825db
MM
4026 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
4027
4028 if (def_p != NULL)
4029 {
4030 /* Ignore this stmt if it is in the list of
4031 replaceable expressions. */
4032 if (SA.values
b8698a0f 4033 && bitmap_bit_p (SA.values,
e97809c6 4034 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4e3825db
MM
4035 continue;
4036 }
28ed065e 4037 last = expand_gimple_stmt (stmt);
726a989a 4038 maybe_dump_rtl_for_gimple_stmt (stmt, last);
b7211528 4039 }
242229bb
JH
4040 }
4041 }
4042
a5883ba0
MM
4043 currently_expanding_gimple_stmt = NULL;
4044
7241571e 4045 /* Expand implicit goto and convert goto_locus. */
a9b77cd1
ZD
4046 FOR_EACH_EDGE (e, ei, bb->succs)
4047 {
7241571e
JJ
4048 if (e->goto_locus && e->goto_block)
4049 {
4050 set_curr_insn_source_location (e->goto_locus);
4051 set_curr_insn_block (e->goto_block);
4052 e->goto_locus = curr_insn_locator ();
4053 }
4054 e->goto_block = NULL;
4055 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
4056 {
4057 emit_jump (label_rtx_for_bb (e->dest));
4058 e->flags &= ~EDGE_FALLTHRU;
4059 }
a9b77cd1
ZD
4060 }
4061
ae761c45
AH
4062 /* Expanded RTL can create a jump in the last instruction of block.
4063 This later might be assumed to be a jump to successor and break edge insertion.
4064 We need to insert dummy move to prevent this. PR41440. */
4065 if (single_succ_p (bb)
4066 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
4067 && (last = get_last_insn ())
4068 && JUMP_P (last))
4069 {
4070 rtx dummy = gen_reg_rtx (SImode);
4071 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
4072 }
4073
242229bb
JH
4074 do_pending_stack_adjust ();
4075
3f117656 4076 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
4077 before a barrier and/or table jump insn. */
4078 last = get_last_insn ();
4b4bf941 4079 if (BARRIER_P (last))
242229bb
JH
4080 last = PREV_INSN (last);
4081 if (JUMP_TABLE_DATA_P (last))
4082 last = PREV_INSN (PREV_INSN (last));
4083 BB_END (bb) = last;
caf93cb0 4084
242229bb 4085 update_bb_for_insn (bb);
80c7a9eb 4086
242229bb
JH
4087 return bb;
4088}
4089
4090
4091/* Create a basic block for initialization code. */
4092
4093static basic_block
4094construct_init_block (void)
4095{
4096 basic_block init_block, first_block;
fd44f634
JH
4097 edge e = NULL;
4098 int flags;
275a4187 4099
fd44f634
JH
4100 /* Multiple entry points not supported yet. */
4101 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
5e2d947c
JH
4102 init_rtl_bb_info (ENTRY_BLOCK_PTR);
4103 init_rtl_bb_info (EXIT_BLOCK_PTR);
4104 ENTRY_BLOCK_PTR->flags |= BB_RTL;
4105 EXIT_BLOCK_PTR->flags |= BB_RTL;
242229bb 4106
fd44f634 4107 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
275a4187 4108
fd44f634
JH
4109 /* When entry edge points to first basic block, we don't need jump,
4110 otherwise we have to jump into proper target. */
4111 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
4112 {
726a989a 4113 tree label = gimple_block_label (e->dest);
fd44f634
JH
4114
4115 emit_jump (label_rtx (label));
4116 flags = 0;
275a4187 4117 }
fd44f634
JH
4118 else
4119 flags = EDGE_FALLTHRU;
242229bb
JH
4120
4121 init_block = create_basic_block (NEXT_INSN (get_insns ()),
4122 get_last_insn (),
4123 ENTRY_BLOCK_PTR);
4124 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
4125 init_block->count = ENTRY_BLOCK_PTR->count;
7d776ee2
RG
4126 if (current_loops && ENTRY_BLOCK_PTR->loop_father)
4127 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR->loop_father);
242229bb
JH
4128 if (e)
4129 {
4130 first_block = e->dest;
4131 redirect_edge_succ (e, init_block);
fd44f634 4132 e = make_edge (init_block, first_block, flags);
242229bb
JH
4133 }
4134 else
4135 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4136 e->probability = REG_BR_PROB_BASE;
4137 e->count = ENTRY_BLOCK_PTR->count;
4138
4139 update_bb_for_insn (init_block);
4140 return init_block;
4141}
4142
55e092c4
JH
4143/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4144 found in the block tree. */
4145
4146static void
4147set_block_levels (tree block, int level)
4148{
4149 while (block)
4150 {
4151 BLOCK_NUMBER (block) = level;
4152 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
4153 block = BLOCK_CHAIN (block);
4154 }
4155}
242229bb
JH
4156
4157/* Create a block containing landing pads and similar stuff. */
4158
4159static void
4160construct_exit_block (void)
4161{
4162 rtx head = get_last_insn ();
4163 rtx end;
4164 basic_block exit_block;
628f6a4e
BE
4165 edge e, e2;
4166 unsigned ix;
4167 edge_iterator ei;
071a42f9 4168 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
242229bb 4169
bf08ebeb
JH
4170 rtl_profile_for_bb (EXIT_BLOCK_PTR);
4171
caf93cb0 4172 /* Make sure the locus is set to the end of the function, so that
242229bb 4173 epilogue line numbers and warnings are set properly. */
6773e15f 4174 if (cfun->function_end_locus != UNKNOWN_LOCATION)
242229bb
JH
4175 input_location = cfun->function_end_locus;
4176
4177 /* The following insns belong to the top scope. */
55e092c4 4178 set_curr_insn_block (DECL_INITIAL (current_function_decl));
242229bb 4179
242229bb
JH
4180 /* Generate rtl for function exit. */
4181 expand_function_end ();
4182
4183 end = get_last_insn ();
4184 if (head == end)
4185 return;
071a42f9
JH
4186 /* While emitting the function end we could move end of the last basic block.
4187 */
4188 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4b4bf941 4189 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 4190 head = NEXT_INSN (head);
80c7a9eb
RH
4191 exit_block = create_basic_block (NEXT_INSN (head), end,
4192 EXIT_BLOCK_PTR->prev_bb);
242229bb
JH
4193 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
4194 exit_block->count = EXIT_BLOCK_PTR->count;
7d776ee2
RG
4195 if (current_loops && EXIT_BLOCK_PTR->loop_father)
4196 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR->loop_father);
628f6a4e
BE
4197
4198 ix = 0;
4199 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
242229bb 4200 {
8fb790fd 4201 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
242229bb 4202 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
4203 redirect_edge_succ (e, exit_block);
4204 else
4205 ix++;
242229bb 4206 }
628f6a4e 4207
242229bb
JH
4208 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4209 e->probability = REG_BR_PROB_BASE;
4210 e->count = EXIT_BLOCK_PTR->count;
628f6a4e 4211 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
242229bb
JH
4212 if (e2 != e)
4213 {
c22cacf3 4214 e->count -= e2->count;
242229bb
JH
4215 exit_block->count -= e2->count;
4216 exit_block->frequency -= EDGE_FREQUENCY (e2);
4217 }
4218 if (e->count < 0)
4219 e->count = 0;
4220 if (exit_block->count < 0)
4221 exit_block->count = 0;
4222 if (exit_block->frequency < 0)
4223 exit_block->frequency = 0;
4224 update_bb_for_insn (exit_block);
4225}
4226
c22cacf3 4227/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
4228 Look for ARRAY_REF nodes with non-constant indexes and mark them
4229 addressable. */
4230
4231static tree
4232discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4233 void *data ATTRIBUTE_UNUSED)
4234{
4235 tree t = *tp;
4236
4237 if (IS_TYPE_OR_DECL_P (t))
4238 *walk_subtrees = 0;
4239 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4240 {
4241 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4242 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4243 && (!TREE_OPERAND (t, 2)
4244 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4245 || (TREE_CODE (t) == COMPONENT_REF
4246 && (!TREE_OPERAND (t,2)
4247 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4248 || TREE_CODE (t) == BIT_FIELD_REF
4249 || TREE_CODE (t) == REALPART_EXPR
4250 || TREE_CODE (t) == IMAGPART_EXPR
4251 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1043771b 4252 || CONVERT_EXPR_P (t))
a1b23b2f
UW
4253 t = TREE_OPERAND (t, 0);
4254
4255 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4256 {
4257 t = get_base_address (t);
6f11d690
RG
4258 if (t && DECL_P (t)
4259 && DECL_MODE (t) != BLKmode)
a1b23b2f
UW
4260 TREE_ADDRESSABLE (t) = 1;
4261 }
4262
4263 *walk_subtrees = 0;
4264 }
4265
4266 return NULL_TREE;
4267}
4268
4269/* RTL expansion is not able to compile array references with variable
4270 offsets for arrays stored in single register. Discover such
4271 expressions and mark variables as addressable to avoid this
4272 scenario. */
4273
4274static void
4275discover_nonconstant_array_refs (void)
4276{
4277 basic_block bb;
726a989a 4278 gimple_stmt_iterator gsi;
a1b23b2f
UW
4279
4280 FOR_EACH_BB (bb)
726a989a
RB
4281 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4282 {
4283 gimple stmt = gsi_stmt (gsi);
aa847cc8
JJ
4284 if (!is_gimple_debug (stmt))
4285 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
726a989a 4286 }
a1b23b2f
UW
4287}
4288
2e3f842f
L
4289/* This function sets crtl->args.internal_arg_pointer to a virtual
4290 register if DRAP is needed. Local register allocator will replace
4291 virtual_incoming_args_rtx with the virtual register. */
4292
4293static void
4294expand_stack_alignment (void)
4295{
4296 rtx drap_rtx;
e939805b 4297 unsigned int preferred_stack_boundary;
2e3f842f
L
4298
4299 if (! SUPPORTS_STACK_ALIGNMENT)
4300 return;
b8698a0f 4301
2e3f842f
L
4302 if (cfun->calls_alloca
4303 || cfun->has_nonlocal_label
4304 || crtl->has_nonlocal_goto)
4305 crtl->need_drap = true;
4306
890b9b96
L
4307 /* Call update_stack_boundary here again to update incoming stack
4308 boundary. It may set incoming stack alignment to a different
4309 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4310 use the minimum incoming stack alignment to check if it is OK
4311 to perform sibcall optimization since sibcall optimization will
4312 only align the outgoing stack to incoming stack boundary. */
4313 if (targetm.calls.update_stack_boundary)
4314 targetm.calls.update_stack_boundary ();
4315
4316 /* The incoming stack frame has to be aligned at least at
4317 parm_stack_boundary. */
4318 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
2e3f842f 4319
2e3f842f
L
4320 /* Update crtl->stack_alignment_estimated and use it later to align
4321 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4322 exceptions since callgraph doesn't collect incoming stack alignment
4323 in this case. */
8f4f502f 4324 if (cfun->can_throw_non_call_exceptions
2e3f842f
L
4325 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4326 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4327 else
4328 preferred_stack_boundary = crtl->preferred_stack_boundary;
4329 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4330 crtl->stack_alignment_estimated = preferred_stack_boundary;
4331 if (preferred_stack_boundary > crtl->stack_alignment_needed)
4332 crtl->stack_alignment_needed = preferred_stack_boundary;
4333
890b9b96
L
4334 gcc_assert (crtl->stack_alignment_needed
4335 <= crtl->stack_alignment_estimated);
4336
2e3f842f 4337 crtl->stack_realign_needed
e939805b 4338 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
d2d93c32 4339 crtl->stack_realign_tried = crtl->stack_realign_needed;
2e3f842f
L
4340
4341 crtl->stack_realign_processed = true;
4342
4343 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4344 alignment. */
4345 gcc_assert (targetm.calls.get_drap_rtx != NULL);
b8698a0f 4346 drap_rtx = targetm.calls.get_drap_rtx ();
2e3f842f 4347
d015f7cc
L
4348 /* stack_realign_drap and drap_rtx must match. */
4349 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4350
2e3f842f
L
4351 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4352 if (NULL != drap_rtx)
4353 {
4354 crtl->args.internal_arg_pointer = drap_rtx;
4355
4356 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4357 needed. */
4358 fixup_tail_calls ();
4359 }
4360}
4361
242229bb
JH
4362/* Translate the intermediate representation contained in the CFG
4363 from GIMPLE trees to RTL.
4364
4365 We do conversion per basic block and preserve/update the tree CFG.
4366 This implies we have to do some magic as the CFG can simultaneously
4367 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 4368 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
4369 the expansion. */
4370
c2924966 4371static unsigned int
726a989a 4372gimple_expand_cfg (void)
242229bb
JH
4373{
4374 basic_block bb, init_block;
4375 sbitmap blocks;
0ef90296
ZD
4376 edge_iterator ei;
4377 edge e;
3a42502d 4378 rtx var_seq;
4e3825db
MM
4379 unsigned i;
4380
f029db69 4381 timevar_push (TV_OUT_OF_SSA);
4e3825db 4382 rewrite_out_of_ssa (&SA);
f029db69 4383 timevar_pop (TV_OUT_OF_SSA);
4e3825db
MM
4384 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
4385 sizeof (rtx));
242229bb 4386
be147e84
RG
4387 /* Make sure all values used by the optimization passes have sane
4388 defaults. */
4389 reg_renumber = 0;
4390
4586b4ca
SB
4391 /* Some backends want to know that we are expanding to RTL. */
4392 currently_expanding_to_rtl = 1;
cd7d9fd7
RG
4393 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
4394 free_dominance_info (CDI_DOMINATORS);
4586b4ca 4395
bf08ebeb
JH
4396 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4397
55e092c4 4398 insn_locators_alloc ();
fe8a7779 4399 if (!DECL_IS_BUILTIN (current_function_decl))
1751ecd6
AH
4400 {
4401 /* Eventually, all FEs should explicitly set function_start_locus. */
4402 if (cfun->function_start_locus == UNKNOWN_LOCATION)
4403 set_curr_insn_source_location
4404 (DECL_SOURCE_LOCATION (current_function_decl));
4405 else
4406 set_curr_insn_source_location (cfun->function_start_locus);
4407 }
9ff70652
JJ
4408 else
4409 set_curr_insn_source_location (UNKNOWN_LOCATION);
55e092c4
JH
4410 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4411 prologue_locator = curr_insn_locator ();
4412
2b21299c
JJ
4413#ifdef INSN_SCHEDULING
4414 init_sched_attrs ();
4415#endif
4416
55e092c4
JH
4417 /* Make sure first insn is a note even if we don't want linenums.
4418 This makes sure the first insn will never be deleted.
4419 Also, final expects a note to appear there. */
4420 emit_note (NOTE_INSN_DELETED);
6429e3be 4421
a1b23b2f
UW
4422 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4423 discover_nonconstant_array_refs ();
4424
e41b2a33 4425 targetm.expand_to_rtl_hook ();
cb91fab0 4426 crtl->stack_alignment_needed = STACK_BOUNDARY;
2e3f842f 4427 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
890b9b96 4428 crtl->stack_alignment_estimated = 0;
cb91fab0
JH
4429 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4430 cfun->cfg->max_jumptable_ents = 0;
4431
ae9fd6b7
JH
4432 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4433 of the function section at exapnsion time to predict distance of calls. */
4434 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4435
727a31fa 4436 /* Expand the variables recorded during gimple lowering. */
f029db69 4437 timevar_push (TV_VAR_EXPAND);
3a42502d
RH
4438 start_sequence ();
4439
242229bb 4440 expand_used_vars ();
3a42502d
RH
4441
4442 var_seq = get_insns ();
4443 end_sequence ();
f029db69 4444 timevar_pop (TV_VAR_EXPAND);
242229bb 4445
7d69de61
RH
4446 /* Honor stack protection warnings. */
4447 if (warn_stack_protect)
4448 {
e3b5732b 4449 if (cfun->calls_alloca)
b8698a0f 4450 warning (OPT_Wstack_protector,
3b123595
SB
4451 "stack protector not protecting local variables: "
4452 "variable length buffer");
cb91fab0 4453 if (has_short_buffer && !crtl->stack_protect_guard)
b8698a0f 4454 warning (OPT_Wstack_protector,
3b123595
SB
4455 "stack protector not protecting function: "
4456 "all local arrays are less than %d bytes long",
7d69de61
RH
4457 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4458 }
4459
242229bb 4460 /* Set up parameters and prepare for return, for the function. */
b79c5284 4461 expand_function_start (current_function_decl);
242229bb 4462
3a42502d
RH
4463 /* If we emitted any instructions for setting up the variables,
4464 emit them before the FUNCTION_START note. */
4465 if (var_seq)
4466 {
4467 emit_insn_before (var_seq, parm_birth_insn);
4468
4469 /* In expand_function_end we'll insert the alloca save/restore
4470 before parm_birth_insn. We've just insertted an alloca call.
4471 Adjust the pointer to match. */
4472 parm_birth_insn = var_seq;
4473 }
4474
4e3825db
MM
4475 /* Now that we also have the parameter RTXs, copy them over to our
4476 partitions. */
4477 for (i = 0; i < SA.map->num_partitions; i++)
4478 {
4479 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4480
4481 if (TREE_CODE (var) != VAR_DECL
4482 && !SA.partition_to_pseudo[i])
4483 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4484 gcc_assert (SA.partition_to_pseudo[i]);
eb7adebc
MM
4485
4486 /* If this decl was marked as living in multiple places, reset
4487 this now to NULL. */
4488 if (DECL_RTL_IF_SET (var) == pc_rtx)
4489 SET_DECL_RTL (var, NULL);
4490
4e3825db
MM
4491 /* Some RTL parts really want to look at DECL_RTL(x) when x
4492 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4493 SET_DECL_RTL here making this available, but that would mean
4494 to select one of the potentially many RTLs for one DECL. Instead
4495 of doing that we simply reset the MEM_EXPR of the RTL in question,
4496 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4497 if (!DECL_RTL_SET_P (var))
4498 {
4499 if (MEM_P (SA.partition_to_pseudo[i]))
4500 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4501 }
4502 }
4503
d466b407
MM
4504 /* If we have a class containing differently aligned pointers
4505 we need to merge those into the corresponding RTL pointer
4506 alignment. */
4507 for (i = 1; i < num_ssa_names; i++)
4508 {
4509 tree name = ssa_name (i);
4510 int part;
4511 rtx r;
4512
4513 if (!name
4514 || !POINTER_TYPE_P (TREE_TYPE (name))
4515 /* We might have generated new SSA names in
4516 update_alias_info_with_stack_vars. They will have a NULL
4517 defining statements, and won't be part of the partitioning,
4518 so ignore those. */
4519 || !SSA_NAME_DEF_STMT (name))
4520 continue;
4521 part = var_to_partition (SA.map, name);
4522 if (part == NO_PARTITION)
4523 continue;
4524 r = SA.partition_to_pseudo[part];
4525 if (REG_P (r))
4526 mark_reg_pointer (r, get_pointer_alignment (name));
4527 }
4528
242229bb
JH
4529 /* If this function is `main', emit a call to `__main'
4530 to run global initializers, etc. */
4531 if (DECL_NAME (current_function_decl)
4532 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4533 && DECL_FILE_SCOPE_P (current_function_decl))
4534 expand_main_function ();
4535
7d69de61
RH
4536 /* Initialize the stack_protect_guard field. This must happen after the
4537 call to __main (if any) so that the external decl is initialized. */
cb91fab0 4538 if (crtl->stack_protect_guard)
7d69de61
RH
4539 stack_protect_prologue ();
4540
4e3825db
MM
4541 expand_phi_nodes (&SA);
4542
3fbd86b1 4543 /* Register rtl specific functions for cfg. */
242229bb
JH
4544 rtl_register_cfg_hooks ();
4545
4546 init_block = construct_init_block ();
4547
0ef90296 4548 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4e3825db 4549 remaining edges later. */
0ef90296
ZD
4550 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4551 e->flags &= ~EDGE_EXECUTABLE;
4552
8b11009b 4553 lab_rtx_for_bb = pointer_map_create ();
242229bb 4554 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
10d22567 4555 bb = expand_gimple_basic_block (bb);
bf08ebeb 4556
b5b8b0ac
AO
4557 if (MAY_HAVE_DEBUG_INSNS)
4558 expand_debug_locations ();
4559
452aa9c5
RG
4560 /* Free stuff we no longer need after GIMPLE optimizations. */
4561 free_dominance_info (CDI_DOMINATORS);
4562 free_dominance_info (CDI_POST_DOMINATORS);
4563 delete_tree_cfg_annotations ();
4564
f029db69 4565 timevar_push (TV_OUT_OF_SSA);
4e3825db 4566 finish_out_of_ssa (&SA);
f029db69 4567 timevar_pop (TV_OUT_OF_SSA);
4e3825db 4568
f029db69 4569 timevar_push (TV_POST_EXPAND);
91753e21
RG
4570 /* We are no longer in SSA form. */
4571 cfun->gimple_df->in_ssa_p = false;
7d776ee2
RG
4572 if (current_loops)
4573 loops_state_clear (LOOP_CLOSED_SSA);
91753e21 4574
bf08ebeb
JH
4575 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4576 conservatively to true until they are all profile aware. */
8b11009b 4577 pointer_map_destroy (lab_rtx_for_bb);
cb91fab0 4578 free_histograms ();
242229bb
JH
4579
4580 construct_exit_block ();
55e092c4
JH
4581 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4582 insn_locators_finalize ();
242229bb 4583
1d65f45c 4584 /* Zap the tree EH table. */
e8a2a782 4585 set_eh_throw_stmt_table (cfun, NULL);
242229bb 4586
42821aff
MM
4587 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4588 split edges which edge insertions might do. */
242229bb 4589 rebuild_jump_labels (get_insns ());
242229bb 4590
4e3825db
MM
4591 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4592 {
4593 edge e;
4594 edge_iterator ei;
4595 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4596 {
4597 if (e->insns.r)
bc470c24 4598 {
42821aff 4599 rebuild_jump_labels_chain (e->insns.r);
bc470c24
JJ
4600 /* Avoid putting insns before parm_birth_insn. */
4601 if (e->src == ENTRY_BLOCK_PTR
4602 && single_succ_p (ENTRY_BLOCK_PTR)
4603 && parm_birth_insn)
4604 {
4605 rtx insns = e->insns.r;
4606 e->insns.r = NULL_RTX;
4607 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4608 }
4609 else
4610 commit_one_edge_insertion (e);
4611 }
4e3825db
MM
4612 else
4613 ei_next (&ei);
4614 }
4615 }
4616
4617 /* We're done expanding trees to RTL. */
4618 currently_expanding_to_rtl = 0;
4619
4620 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4621 {
4622 edge e;
4623 edge_iterator ei;
4624 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4625 {
4626 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4627 e->flags &= ~EDGE_EXECUTABLE;
4628
4629 /* At the moment not all abnormal edges match the RTL
4630 representation. It is safe to remove them here as
4631 find_many_sub_basic_blocks will rediscover them.
4632 In the future we should get this fixed properly. */
4633 if ((e->flags & EDGE_ABNORMAL)
4634 && !(e->flags & EDGE_SIBCALL))
4635 remove_edge (e);
4636 else
4637 ei_next (&ei);
4638 }
4639 }
4640
242229bb
JH
4641 blocks = sbitmap_alloc (last_basic_block);
4642 sbitmap_ones (blocks);
4643 find_many_sub_basic_blocks (blocks);
242229bb 4644 sbitmap_free (blocks);
4e3825db 4645 purge_all_dead_edges ();
242229bb 4646
2e3f842f
L
4647 expand_stack_alignment ();
4648
be147e84
RG
4649 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
4650 function. */
4651 if (crtl->tail_call_emit)
4652 fixup_tail_calls ();
4653
dac1fbf8
RG
4654 /* After initial rtl generation, call back to finish generating
4655 exception support code. We need to do this before cleaning up
4656 the CFG as the code does not expect dead landing pads. */
4657 if (cfun->eh->region_tree != NULL)
4658 finish_eh_generation ();
4659
4660 /* Remove unreachable blocks, otherwise we cannot compute dominators
4661 which are needed for loop state verification. As a side-effect
4662 this also compacts blocks.
4663 ??? We cannot remove trivially dead insns here as for example
4664 the DRAP reg on i?86 is not magically live at this point.
4665 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
4666 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4667
242229bb 4668#ifdef ENABLE_CHECKING
62e5bf5d 4669 verify_flow_info ();
242229bb 4670#endif
9f8628ba 4671
be147e84
RG
4672 /* Initialize pseudos allocated for hard registers. */
4673 emit_initial_value_sets ();
4674
4675 /* And finally unshare all RTL. */
4676 unshare_all_rtl ();
4677
9f8628ba
PB
4678 /* There's no need to defer outputting this function any more; we
4679 know we want to output it. */
4680 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4681
4682 /* Now that we're done expanding trees to RTL, we shouldn't have any
4683 more CONCATs anywhere. */
4684 generating_concat_p = 0;
4685
b7211528
SB
4686 if (dump_file)
4687 {
4688 fprintf (dump_file,
4689 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4690 /* And the pass manager will dump RTL for us. */
4691 }
ef330312
PB
4692
4693 /* If we're emitting a nested function, make sure its parent gets
4694 emitted as well. Doing otherwise confuses debug info. */
c22cacf3 4695 {
ef330312
PB
4696 tree parent;
4697 for (parent = DECL_CONTEXT (current_function_decl);
c22cacf3
MS
4698 parent != NULL_TREE;
4699 parent = get_containing_scope (parent))
ef330312 4700 if (TREE_CODE (parent) == FUNCTION_DECL)
c22cacf3 4701 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
ef330312 4702 }
c22cacf3 4703
ef330312
PB
4704 /* We are now committed to emitting code for this function. Do any
4705 preparation, such as emitting abstract debug info for the inline
4706 before it gets mangled by optimization. */
4707 if (cgraph_function_possibly_inlined_p (current_function_decl))
4708 (*debug_hooks->outlining_inline_function) (current_function_decl);
4709
4710 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
4711
4712 /* After expanding, the return labels are no longer needed. */
4713 return_label = NULL;
4714 naked_return_label = NULL;
0a35513e
AH
4715
4716 /* After expanding, the tm_restart map is no longer needed. */
4717 if (cfun->gimple_df->tm_restart)
4718 {
4719 htab_delete (cfun->gimple_df->tm_restart);
4720 cfun->gimple_df->tm_restart = NULL;
4721 }
4722
55e092c4
JH
4723 /* Tag the blocks with a depth number so that change_scope can find
4724 the common parent easily. */
4725 set_block_levels (DECL_INITIAL (cfun->decl), 0);
bf08ebeb 4726 default_rtl_profile ();
be147e84 4727
f029db69 4728 timevar_pop (TV_POST_EXPAND);
be147e84 4729
c2924966 4730 return 0;
242229bb
JH
4731}
4732
e3b5732b 4733struct rtl_opt_pass pass_expand =
242229bb 4734{
8ddbbcae 4735 {
e3b5732b 4736 RTL_PASS,
c22cacf3 4737 "expand", /* name */
242229bb 4738 NULL, /* gate */
726a989a 4739 gimple_expand_cfg, /* execute */
242229bb
JH
4740 NULL, /* sub */
4741 NULL, /* next */
4742 0, /* static_pass_number */
c22cacf3 4743 TV_EXPAND, /* tv_id */
688a482d
RG
4744 PROP_ssa | PROP_gimple_leh | PROP_cfg
4745 | PROP_gimple_lcx, /* properties_required */
242229bb 4746 PROP_rtl, /* properties_provided */
4e3825db
MM
4747 PROP_ssa | PROP_trees, /* properties_destroyed */
4748 TODO_verify_ssa | TODO_verify_flow
4749 | TODO_verify_stmts, /* todo_flags_start */
22c5fa5f 4750 TODO_ggc_collect /* todo_flags_finish */
8ddbbcae 4751 }
242229bb 4752};
This page took 2.928545 seconds and 5 git commands to generate.