]> gcc.gnu.org Git - gcc.git/blame - gcc/gimplify.c
Remove a layer of indirection from hash_table
[gcc.git] / gcc / gimplify.c
CommitLineData
6de9cd9a
DN
1/* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
23a5b65a 3 Copyright (C) 2002-2014 Free Software Foundation, Inc.
6de9cd9a
DN
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
9dcd6f09 11Software Foundation; either version 3, or (at your option) any later
6de9cd9a
DN
12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
9dcd6f09
NC
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
6de9cd9a 26#include "tree.h"
d8a2d370 27#include "expr.h"
2fb9a547
AM
28#include "pointer-set.h"
29#include "hash-table.h"
30#include "basic-block.h"
31#include "tree-ssa-alias.h"
32#include "internal-fn.h"
33#include "gimple-fold.h"
34#include "tree-eh.h"
35#include "gimple-expr.h"
36#include "is-a.h"
18f429e2 37#include "gimple.h"
45b0be94 38#include "gimplify.h"
5be5c238 39#include "gimple-iterator.h"
d8a2d370
DN
40#include "stringpool.h"
41#include "calls.h"
42#include "varasm.h"
43#include "stor-layout.h"
44#include "stmt.h"
45#include "print-tree.h"
726a989a 46#include "tree-iterator.h"
6de9cd9a 47#include "tree-inline.h"
cf835838 48#include "tree-pretty-print.h"
6de9cd9a 49#include "langhooks.h"
442b4905
AM
50#include "bitmap.h"
51#include "gimple-ssa.h"
44de5aeb 52#include "cgraph.h"
442b4905
AM
53#include "tree-cfg.h"
54#include "tree-ssanames.h"
55#include "tree-ssa.h"
718f9c0f 56#include "diagnostic-core.h"
cd3ce9b4 57#include "target.h"
6be42dd4 58#include "splay-tree.h"
0645c1a2 59#include "omp-low.h"
4484a35a 60#include "gimple-low.h"
939b37da 61#include "cilk.h"
6de9cd9a 62
7ee2468b
SB
63#include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
64#include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
9b2b7279 65#include "builtins.h"
953ff289
DN
66
67enum gimplify_omp_var_data
68{
69 GOVD_SEEN = 1,
70 GOVD_EXPLICIT = 2,
71 GOVD_SHARED = 4,
72 GOVD_PRIVATE = 8,
73 GOVD_FIRSTPRIVATE = 16,
74 GOVD_LASTPRIVATE = 32,
75 GOVD_REDUCTION = 64,
76 GOVD_LOCAL = 128,
acf0174b
JJ
77 GOVD_MAP = 256,
78 GOVD_DEBUG_PRIVATE = 512,
79 GOVD_PRIVATE_OUTER_REF = 1024,
74bf76ed 80 GOVD_LINEAR = 2048,
acf0174b
JJ
81 GOVD_ALIGNED = 4096,
82 GOVD_MAP_TO_ONLY = 8192,
953ff289 83 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
74bf76ed
JJ
84 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
85 | GOVD_LOCAL)
953ff289
DN
86};
87
726a989a 88
a68ab351
JJ
89enum omp_region_type
90{
91 ORT_WORKSHARE = 0,
74bf76ed 92 ORT_SIMD = 1,
a68ab351 93 ORT_PARALLEL = 2,
f22f4340
JJ
94 ORT_COMBINED_PARALLEL = 3,
95 ORT_TASK = 4,
acf0174b
JJ
96 ORT_UNTIED_TASK = 5,
97 ORT_TEAMS = 8,
98 ORT_TARGET_DATA = 16,
99 ORT_TARGET = 32
a68ab351
JJ
100};
101
45852dcc
AM
102/* Gimplify hashtable helper. */
103
104struct gimplify_hasher : typed_free_remove <elt_t>
105{
106 typedef elt_t value_type;
107 typedef elt_t compare_type;
108 static inline hashval_t hash (const value_type *);
109 static inline bool equal (const value_type *, const compare_type *);
110};
111
112struct gimplify_ctx
113{
114 struct gimplify_ctx *prev_context;
115
116 vec<gimple> bind_expr_stack;
117 tree temps;
118 gimple_seq conditional_cleanups;
119 tree exit_label;
120 tree return_temp;
121
122 vec<tree> case_labels;
123 /* The formal temporary table. Should this be persistent? */
c203e8a7 124 hash_table<gimplify_hasher> *temp_htab;
45852dcc
AM
125
126 int conditions;
127 bool save_stack;
128 bool into_ssa;
129 bool allow_rhs_cond_expr;
130 bool in_cleanup_point_expr;
131};
132
953ff289 133struct gimplify_omp_ctx
6de9cd9a 134{
953ff289
DN
135 struct gimplify_omp_ctx *outer_context;
136 splay_tree variables;
137 struct pointer_set_t *privatized_types;
138 location_t location;
139 enum omp_clause_default_kind default_kind;
a68ab351 140 enum omp_region_type region_type;
acf0174b 141 bool combined_loop;
9cf32741 142 bool distribute;
953ff289
DN
143};
144
45852dcc 145static struct gimplify_ctx *gimplify_ctxp;
953ff289
DN
146static struct gimplify_omp_ctx *gimplify_omp_ctxp;
147
ad19c4be 148/* Forward declaration. */
726a989a 149static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
eb6127a4 150
a1a6c5b2
JJ
151/* Shorter alias name for the above function for use in gimplify.c
152 only. */
153
154static inline void
155gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
156{
157 gimple_seq_add_stmt_without_update (seq_p, gs);
158}
159
726a989a
RB
160/* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
161 NULL, a new sequence is allocated. This function is
162 similar to gimple_seq_add_seq, but does not scan the operands.
163 During gimplification, we need to manipulate statement sequences
164 before the def/use vectors have been constructed. */
165
166static void
167gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
168{
169 gimple_stmt_iterator si;
170
171 if (src == NULL)
172 return;
173
726a989a
RB
174 si = gsi_last (*dst_p);
175 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
176}
177
45852dcc
AM
178
179/* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
180 and popping gimplify contexts. */
181
182static struct gimplify_ctx *ctx_pool = NULL;
183
184/* Return a gimplify context struct from the pool. */
185
186static inline struct gimplify_ctx *
187ctx_alloc (void)
188{
189 struct gimplify_ctx * c = ctx_pool;
190
191 if (c)
192 ctx_pool = c->prev_context;
193 else
194 c = XNEW (struct gimplify_ctx);
195
196 memset (c, '\0', sizeof (*c));
197 return c;
198}
199
200/* Put gimplify context C back into the pool. */
201
202static inline void
203ctx_free (struct gimplify_ctx *c)
204{
205 c->prev_context = ctx_pool;
206 ctx_pool = c;
207}
208
209/* Free allocated ctx stack memory. */
210
211void
212free_gimplify_stack (void)
213{
214 struct gimplify_ctx *c;
215
216 while ((c = ctx_pool))
217 {
218 ctx_pool = c->prev_context;
219 free (c);
220 }
221}
222
223
6de9cd9a
DN
224/* Set up a context for the gimplifier. */
225
226void
45852dcc 227push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
6de9cd9a 228{
45852dcc
AM
229 struct gimplify_ctx *c = ctx_alloc ();
230
953ff289 231 c->prev_context = gimplify_ctxp;
953ff289 232 gimplify_ctxp = c;
45852dcc
AM
233 gimplify_ctxp->into_ssa = in_ssa;
234 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
6de9cd9a
DN
235}
236
237/* Tear down a context for the gimplifier. If BODY is non-null, then
238 put the temporaries into the outer BIND_EXPR. Otherwise, put them
726a989a
RB
239 in the local_decls.
240
241 BODY is not a sequence, but the first tuple in a sequence. */
6de9cd9a
DN
242
243void
726a989a 244pop_gimplify_context (gimple body)
6de9cd9a 245{
953ff289 246 struct gimplify_ctx *c = gimplify_ctxp;
17ad5b5e 247
9771b263
DN
248 gcc_assert (c
249 && (!c->bind_expr_stack.exists ()
250 || c->bind_expr_stack.is_empty ()));
251 c->bind_expr_stack.release ();
953ff289 252 gimplify_ctxp = c->prev_context;
6de9cd9a
DN
253
254 if (body)
5123ad09 255 declare_vars (c->temps, body, false);
6de9cd9a 256 else
953ff289 257 record_vars (c->temps);
6de9cd9a 258
c203e8a7
TS
259 delete c->temp_htab;
260 c->temp_htab = NULL;
45852dcc 261 ctx_free (c);
6de9cd9a
DN
262}
263
ad19c4be
EB
264/* Push a GIMPLE_BIND tuple onto the stack of bindings. */
265
c24b7de9 266static void
726a989a 267gimple_push_bind_expr (gimple gimple_bind)
6de9cd9a 268{
9771b263
DN
269 gimplify_ctxp->bind_expr_stack.reserve (8);
270 gimplify_ctxp->bind_expr_stack.safe_push (gimple_bind);
6de9cd9a
DN
271}
272
ad19c4be
EB
273/* Pop the first element off the stack of bindings. */
274
c24b7de9 275static void
6de9cd9a
DN
276gimple_pop_bind_expr (void)
277{
9771b263 278 gimplify_ctxp->bind_expr_stack.pop ();
6de9cd9a
DN
279}
280
ad19c4be
EB
281/* Return the first element of the stack of bindings. */
282
726a989a 283gimple
6de9cd9a
DN
284gimple_current_bind_expr (void)
285{
9771b263 286 return gimplify_ctxp->bind_expr_stack.last ();
726a989a
RB
287}
288
ad19c4be 289/* Return the stack of bindings created during gimplification. */
726a989a 290
9771b263 291vec<gimple>
726a989a
RB
292gimple_bind_expr_stack (void)
293{
294 return gimplify_ctxp->bind_expr_stack;
6de9cd9a
DN
295}
296
ad19c4be 297/* Return true iff there is a COND_EXPR between us and the innermost
6de9cd9a
DN
298 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
299
300static bool
301gimple_conditional_context (void)
302{
303 return gimplify_ctxp->conditions > 0;
304}
305
306/* Note that we've entered a COND_EXPR. */
307
308static void
309gimple_push_condition (void)
310{
726a989a 311#ifdef ENABLE_GIMPLE_CHECKING
d775bc45 312 if (gimplify_ctxp->conditions == 0)
726a989a 313 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
d775bc45 314#endif
6de9cd9a
DN
315 ++(gimplify_ctxp->conditions);
316}
317
318/* Note that we've left a COND_EXPR. If we're back at unconditional scope
319 now, add any conditional cleanups we've seen to the prequeue. */
320
321static void
726a989a 322gimple_pop_condition (gimple_seq *pre_p)
6de9cd9a
DN
323{
324 int conds = --(gimplify_ctxp->conditions);
aa4a53af 325
282899df 326 gcc_assert (conds >= 0);
6de9cd9a
DN
327 if (conds == 0)
328 {
726a989a
RB
329 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
330 gimplify_ctxp->conditional_cleanups = NULL;
6de9cd9a 331 }
6de9cd9a
DN
332}
333
953ff289
DN
334/* A stable comparison routine for use with splay trees and DECLs. */
335
336static int
337splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
338{
339 tree a = (tree) xa;
340 tree b = (tree) xb;
341
342 return DECL_UID (a) - DECL_UID (b);
343}
344
345/* Create a new omp construct that deals with variable remapping. */
346
347static struct gimplify_omp_ctx *
a68ab351 348new_omp_context (enum omp_region_type region_type)
953ff289
DN
349{
350 struct gimplify_omp_ctx *c;
351
352 c = XCNEW (struct gimplify_omp_ctx);
353 c->outer_context = gimplify_omp_ctxp;
354 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
355 c->privatized_types = pointer_set_create ();
356 c->location = input_location;
a68ab351 357 c->region_type = region_type;
f22f4340 358 if ((region_type & ORT_TASK) == 0)
a68ab351
JJ
359 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
360 else
361 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
953ff289
DN
362
363 return c;
364}
365
366/* Destroy an omp construct that deals with variable remapping. */
367
368static void
369delete_omp_context (struct gimplify_omp_ctx *c)
370{
371 splay_tree_delete (c->variables);
372 pointer_set_destroy (c->privatized_types);
373 XDELETE (c);
374}
375
376static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
377static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
378
726a989a
RB
379/* Both gimplify the statement T and append it to *SEQ_P. This function
380 behaves exactly as gimplify_stmt, but you don't have to pass T as a
381 reference. */
cd3ce9b4
JM
382
383void
726a989a
RB
384gimplify_and_add (tree t, gimple_seq *seq_p)
385{
386 gimplify_stmt (&t, seq_p);
387}
388
389/* Gimplify statement T into sequence *SEQ_P, and return the first
390 tuple in the sequence of generated tuples for this statement.
391 Return NULL if gimplifying T produced no tuples. */
392
393static gimple
394gimplify_and_return_first (tree t, gimple_seq *seq_p)
cd3ce9b4 395{
726a989a
RB
396 gimple_stmt_iterator last = gsi_last (*seq_p);
397
398 gimplify_and_add (t, seq_p);
399
400 if (!gsi_end_p (last))
401 {
402 gsi_next (&last);
403 return gsi_stmt (last);
404 }
405 else
406 return gimple_seq_first_stmt (*seq_p);
cd3ce9b4
JM
407}
408
216820a4
RG
409/* Returns true iff T is a valid RHS for an assignment to an un-renamed
410 LHS, or for a call argument. */
411
412static bool
413is_gimple_mem_rhs (tree t)
414{
415 /* If we're dealing with a renamable type, either source or dest must be
416 a renamed variable. */
417 if (is_gimple_reg_type (TREE_TYPE (t)))
418 return is_gimple_val (t);
419 else
420 return is_gimple_val (t) || is_gimple_lvalue (t);
421}
422
726a989a 423/* Return true if T is a CALL_EXPR or an expression that can be
12947319 424 assigned to a temporary. Note that this predicate should only be
726a989a
RB
425 used during gimplification. See the rationale for this in
426 gimplify_modify_expr. */
427
428static bool
ba4d8f9d 429is_gimple_reg_rhs_or_call (tree t)
726a989a 430{
ba4d8f9d
RG
431 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
432 || TREE_CODE (t) == CALL_EXPR);
726a989a
RB
433}
434
435/* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
436 this predicate should only be used during gimplification. See the
437 rationale for this in gimplify_modify_expr. */
438
439static bool
ba4d8f9d 440is_gimple_mem_rhs_or_call (tree t)
726a989a
RB
441{
442 /* If we're dealing with a renamable type, either source or dest must be
050bbfeb
RG
443 a renamed variable. */
444 if (is_gimple_reg_type (TREE_TYPE (t)))
726a989a
RB
445 return is_gimple_val (t);
446 else
ba4d8f9d
RG
447 return (is_gimple_val (t) || is_gimple_lvalue (t)
448 || TREE_CODE (t) == CALL_EXPR);
726a989a
RB
449}
450
2ad728d2
RG
451/* Create a temporary with a name derived from VAL. Subroutine of
452 lookup_tmp_var; nobody else should call this function. */
453
454static inline tree
947ca6a0 455create_tmp_from_val (tree val)
2ad728d2
RG
456{
457 /* Drop all qualifiers and address-space information from the value type. */
458 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
459 tree var = create_tmp_var (type, get_name (val));
947ca6a0
RB
460 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
461 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
2ad728d2
RG
462 DECL_GIMPLE_REG_P (var) = 1;
463 return var;
464}
465
466/* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
467 an existing expression temporary. */
468
469static tree
470lookup_tmp_var (tree val, bool is_formal)
471{
472 tree ret;
473
474 /* If not optimizing, never really reuse a temporary. local-alloc
475 won't allocate any variable that is used in more than one basic
476 block, which means it will go into memory, causing much extra
477 work in reload and final and poorer code generation, outweighing
478 the extra memory allocation here. */
479 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
947ca6a0 480 ret = create_tmp_from_val (val);
2ad728d2
RG
481 else
482 {
483 elt_t elt, *elt_p;
4a8fb1a1 484 elt_t **slot;
2ad728d2
RG
485
486 elt.val = val;
c203e8a7
TS
487 if (!gimplify_ctxp->temp_htab)
488 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
489 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
2ad728d2
RG
490 if (*slot == NULL)
491 {
492 elt_p = XNEW (elt_t);
493 elt_p->val = val;
947ca6a0 494 elt_p->temp = ret = create_tmp_from_val (val);
4a8fb1a1 495 *slot = elt_p;
2ad728d2
RG
496 }
497 else
498 {
4a8fb1a1 499 elt_p = *slot;
2ad728d2
RG
500 ret = elt_p->temp;
501 }
502 }
503
504 return ret;
505}
506
ba4d8f9d 507/* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
6de9cd9a
DN
508
509static tree
726a989a
RB
510internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
511 bool is_formal)
6de9cd9a
DN
512{
513 tree t, mod;
6de9cd9a 514
726a989a
RB
515 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
516 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
ba4d8f9d 517 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
726a989a 518 fb_rvalue);
6de9cd9a 519
2ad728d2
RG
520 if (gimplify_ctxp->into_ssa
521 && is_gimple_reg_type (TREE_TYPE (val)))
522 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)), NULL);
523 else
524 t = lookup_tmp_var (val, is_formal);
e41d82f5 525
2e929cf3 526 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
6de9cd9a 527
8400e75e 528 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
6de9cd9a 529
fff34d35
RK
530 /* gimplify_modify_expr might want to reduce this further. */
531 gimplify_and_add (mod, pre_p);
726a989a 532 ggc_free (mod);
8b11a64c 533
6de9cd9a
DN
534 return t;
535}
536
ad19c4be 537/* Return a formal temporary variable initialized with VAL. PRE_P is as
ba4d8f9d
RG
538 in gimplify_expr. Only use this function if:
539
540 1) The value of the unfactored expression represented by VAL will not
541 change between the initialization and use of the temporary, and
542 2) The temporary will not be otherwise modified.
543
544 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
545 and #2 means it is inappropriate for && temps.
546
547 For other cases, use get_initialized_tmp_var instead. */
50674e96 548
6de9cd9a 549tree
726a989a 550get_formal_tmp_var (tree val, gimple_seq *pre_p)
6de9cd9a
DN
551{
552 return internal_get_tmp_var (val, pre_p, NULL, true);
553}
554
ad19c4be 555/* Return a temporary variable initialized with VAL. PRE_P and POST_P
6de9cd9a
DN
556 are as in gimplify_expr. */
557
558tree
726a989a 559get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
560{
561 return internal_get_tmp_var (val, pre_p, post_p, false);
562}
563
ad19c4be
EB
564/* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
565 generate debug info for them; otherwise don't. */
6de9cd9a
DN
566
567void
726a989a 568declare_vars (tree vars, gimple scope, bool debug_info)
6de9cd9a
DN
569{
570 tree last = vars;
571 if (last)
572 {
5123ad09 573 tree temps, block;
6de9cd9a 574
726a989a 575 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
6de9cd9a
DN
576
577 temps = nreverse (last);
5123ad09 578
524d9a45 579 block = gimple_bind_block (scope);
726a989a 580 gcc_assert (!block || TREE_CODE (block) == BLOCK);
5123ad09
EB
581 if (!block || !debug_info)
582 {
910ad8de 583 DECL_CHAIN (last) = gimple_bind_vars (scope);
726a989a 584 gimple_bind_set_vars (scope, temps);
5123ad09
EB
585 }
586 else
587 {
588 /* We need to attach the nodes both to the BIND_EXPR and to its
589 associated BLOCK for debugging purposes. The key point here
590 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
591 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
592 if (BLOCK_VARS (block))
593 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
594 else
595 {
726a989a
RB
596 gimple_bind_set_vars (scope,
597 chainon (gimple_bind_vars (scope), temps));
5123ad09
EB
598 BLOCK_VARS (block) = temps;
599 }
600 }
6de9cd9a
DN
601 }
602}
603
a441447f
OH
604/* For VAR a VAR_DECL of variable size, try to find a constant upper bound
605 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
606 no such upper bound can be obtained. */
607
608static void
609force_constant_size (tree var)
610{
611 /* The only attempt we make is by querying the maximum size of objects
612 of the variable's type. */
613
614 HOST_WIDE_INT max_size;
615
616 gcc_assert (TREE_CODE (var) == VAR_DECL);
617
618 max_size = max_int_size_in_bytes (TREE_TYPE (var));
619
620 gcc_assert (max_size >= 0);
621
622 DECL_SIZE_UNIT (var)
623 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
624 DECL_SIZE (var)
625 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
626}
627
ad19c4be
EB
628/* Push the temporary variable TMP into the current binding. */
629
45b62594
RB
630void
631gimple_add_tmp_var_fn (struct function *fn, tree tmp)
632{
633 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
634
635 /* Later processing assumes that the object size is constant, which might
636 not be true at this point. Force the use of a constant upper bound in
637 this case. */
638 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
639 force_constant_size (tmp);
640
641 DECL_CONTEXT (tmp) = fn->decl;
642 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
643
644 record_vars_into (tmp, fn->decl);
645}
646
647/* Push the temporary variable TMP into the current binding. */
648
6de9cd9a
DN
649void
650gimple_add_tmp_var (tree tmp)
651{
910ad8de 652 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
6de9cd9a 653
a441447f
OH
654 /* Later processing assumes that the object size is constant, which might
655 not be true at this point. Force the use of a constant upper bound in
656 this case. */
cc269bb6 657 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
a441447f
OH
658 force_constant_size (tmp);
659
6de9cd9a 660 DECL_CONTEXT (tmp) = current_function_decl;
48eb4e53 661 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
6de9cd9a
DN
662
663 if (gimplify_ctxp)
664 {
910ad8de 665 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
6de9cd9a 666 gimplify_ctxp->temps = tmp;
953ff289
DN
667
668 /* Mark temporaries local within the nearest enclosing parallel. */
669 if (gimplify_omp_ctxp)
670 {
671 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
74bf76ed
JJ
672 while (ctx
673 && (ctx->region_type == ORT_WORKSHARE
674 || ctx->region_type == ORT_SIMD))
953ff289
DN
675 ctx = ctx->outer_context;
676 if (ctx)
677 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
678 }
6de9cd9a
DN
679 }
680 else if (cfun)
681 record_vars (tmp);
682 else
726a989a
RB
683 {
684 gimple_seq body_seq;
685
686 /* This case is for nested functions. We need to expose the locals
687 they create. */
688 body_seq = gimple_body (current_function_decl);
689 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
690 }
691}
692
726a989a 693
616f1431
EB
694\f
695/* This page contains routines to unshare tree nodes, i.e. to duplicate tree
696 nodes that are referenced more than once in GENERIC functions. This is
697 necessary because gimplification (translation into GIMPLE) is performed
698 by modifying tree nodes in-place, so gimplication of a shared node in a
699 first context could generate an invalid GIMPLE form in a second context.
700
701 This is achieved with a simple mark/copy/unmark algorithm that walks the
702 GENERIC representation top-down, marks nodes with TREE_VISITED the first
703 time it encounters them, duplicates them if they already have TREE_VISITED
704 set, and finally removes the TREE_VISITED marks it has set.
705
706 The algorithm works only at the function level, i.e. it generates a GENERIC
707 representation of a function with no nodes shared within the function when
708 passed a GENERIC function (except for nodes that are allowed to be shared).
709
710 At the global level, it is also necessary to unshare tree nodes that are
711 referenced in more than one function, for the same aforementioned reason.
712 This requires some cooperation from the front-end. There are 2 strategies:
713
714 1. Manual unsharing. The front-end needs to call unshare_expr on every
715 expression that might end up being shared across functions.
716
717 2. Deep unsharing. This is an extension of regular unsharing. Instead
718 of calling unshare_expr on expressions that might be shared across
719 functions, the front-end pre-marks them with TREE_VISITED. This will
720 ensure that they are unshared on the first reference within functions
721 when the regular unsharing algorithm runs. The counterpart is that
722 this algorithm must look deeper than for manual unsharing, which is
723 specified by LANG_HOOKS_DEEP_UNSHARING.
724
725 If there are only few specific cases of node sharing across functions, it is
726 probably easier for a front-end to unshare the expressions manually. On the
727 contrary, if the expressions generated at the global level are as widespread
728 as expressions generated within functions, deep unsharing is very likely the
729 way to go. */
730
731/* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
3ad065ef
EB
732 These nodes model computations that must be done once. If we were to
733 unshare something like SAVE_EXPR(i++), the gimplification process would
734 create wrong code. However, if DATA is non-null, it must hold a pointer
735 set that is used to unshare the subtrees of these nodes. */
6de9cd9a
DN
736
737static tree
738mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
739{
616f1431
EB
740 tree t = *tp;
741 enum tree_code code = TREE_CODE (t);
742
6687b740
EB
743 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
744 copy their subtrees if we can make sure to do it only once. */
745 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
616f1431
EB
746 {
747 if (data && !pointer_set_insert ((struct pointer_set_t *)data, t))
748 ;
749 else
750 *walk_subtrees = 0;
751 }
752
753 /* Stop at types, decls, constants like copy_tree_r. */
754 else if (TREE_CODE_CLASS (code) == tcc_type
755 || TREE_CODE_CLASS (code) == tcc_declaration
756 || TREE_CODE_CLASS (code) == tcc_constant
757 /* We can't do anything sensible with a BLOCK used as an
758 expression, but we also can't just die when we see it
759 because of non-expression uses. So we avert our eyes
760 and cross our fingers. Silly Java. */
761 || code == BLOCK)
6de9cd9a 762 *walk_subtrees = 0;
616f1431
EB
763
764 /* Cope with the statement expression extension. */
765 else if (code == STATEMENT_LIST)
766 ;
767
768 /* Leave the bulk of the work to copy_tree_r itself. */
6de9cd9a 769 else
6687b740 770 copy_tree_r (tp, walk_subtrees, NULL);
6de9cd9a
DN
771
772 return NULL_TREE;
773}
774
3ad065ef
EB
775/* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
776 If *TP has been visited already, then *TP is deeply copied by calling
777 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
6de9cd9a
DN
778
779static tree
616f1431 780copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
6de9cd9a 781{
f0638e1d
RH
782 tree t = *tp;
783 enum tree_code code = TREE_CODE (t);
784
44de5aeb
RK
785 /* Skip types, decls, and constants. But we do want to look at their
786 types and the bounds of types. Mark them as visited so we properly
787 unmark their subtrees on the unmark pass. If we've already seen them,
788 don't look down further. */
6615c446
JO
789 if (TREE_CODE_CLASS (code) == tcc_type
790 || TREE_CODE_CLASS (code) == tcc_declaration
791 || TREE_CODE_CLASS (code) == tcc_constant)
44de5aeb
RK
792 {
793 if (TREE_VISITED (t))
794 *walk_subtrees = 0;
795 else
796 TREE_VISITED (t) = 1;
797 }
f0638e1d 798
6de9cd9a
DN
799 /* If this node has been visited already, unshare it and don't look
800 any deeper. */
f0638e1d 801 else if (TREE_VISITED (t))
6de9cd9a 802 {
616f1431 803 walk_tree (tp, mostly_copy_tree_r, data, NULL);
6de9cd9a
DN
804 *walk_subtrees = 0;
805 }
f0638e1d 806
616f1431 807 /* Otherwise, mark the node as visited and keep looking. */
6de9cd9a 808 else
77c9db77 809 TREE_VISITED (t) = 1;
f0638e1d 810
6de9cd9a
DN
811 return NULL_TREE;
812}
813
3ad065ef
EB
814/* Unshare most of the shared trees rooted at *TP. DATA is passed to the
815 copy_if_shared_r callback unmodified. */
6de9cd9a 816
616f1431 817static inline void
3ad065ef 818copy_if_shared (tree *tp, void *data)
616f1431 819{
3ad065ef 820 walk_tree (tp, copy_if_shared_r, data, NULL);
6de9cd9a
DN
821}
822
3ad065ef
EB
823/* Unshare all the trees in the body of FNDECL, as well as in the bodies of
824 any nested functions. */
44de5aeb
RK
825
826static void
3ad065ef 827unshare_body (tree fndecl)
44de5aeb 828{
9f9ebcdf 829 struct cgraph_node *cgn = cgraph_get_node (fndecl);
3ad065ef
EB
830 /* If the language requires deep unsharing, we need a pointer set to make
831 sure we don't repeatedly unshare subtrees of unshareable nodes. */
832 struct pointer_set_t *visited
833 = lang_hooks.deep_unsharing ? pointer_set_create () : NULL;
44de5aeb 834
3ad065ef
EB
835 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
836 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
837 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
838
839 if (visited)
840 pointer_set_destroy (visited);
616f1431 841
3ad065ef 842 if (cgn)
48eb4e53 843 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
67348ccc 844 unshare_body (cgn->decl);
44de5aeb
RK
845}
846
616f1431
EB
847/* Callback for walk_tree to unmark the visited trees rooted at *TP.
848 Subtrees are walked until the first unvisited node is encountered. */
849
850static tree
851unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
852{
853 tree t = *tp;
854
855 /* If this node has been visited, unmark it and keep looking. */
856 if (TREE_VISITED (t))
857 TREE_VISITED (t) = 0;
858
859 /* Otherwise, don't look any deeper. */
860 else
861 *walk_subtrees = 0;
862
863 return NULL_TREE;
864}
865
866/* Unmark the visited trees rooted at *TP. */
867
868static inline void
869unmark_visited (tree *tp)
870{
871 walk_tree (tp, unmark_visited_r, NULL, NULL);
872}
873
44de5aeb
RK
874/* Likewise, but mark all trees as not visited. */
875
876static void
3ad065ef 877unvisit_body (tree fndecl)
44de5aeb 878{
9f9ebcdf 879 struct cgraph_node *cgn = cgraph_get_node (fndecl);
44de5aeb 880
3ad065ef
EB
881 unmark_visited (&DECL_SAVED_TREE (fndecl));
882 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
883 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
616f1431 884
3ad065ef 885 if (cgn)
48eb4e53 886 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
67348ccc 887 unvisit_body (cgn->decl);
44de5aeb
RK
888}
889
6de9cd9a
DN
890/* Unconditionally make an unshared copy of EXPR. This is used when using
891 stored expressions which span multiple functions, such as BINFO_VTABLE,
892 as the normal unsharing process can't tell that they're shared. */
893
894tree
895unshare_expr (tree expr)
896{
897 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
898 return expr;
899}
d1f98542
RB
900
901/* Worker for unshare_expr_without_location. */
902
903static tree
904prune_expr_location (tree *tp, int *walk_subtrees, void *)
905{
906 if (EXPR_P (*tp))
907 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
908 else
909 *walk_subtrees = 0;
910 return NULL_TREE;
911}
912
913/* Similar to unshare_expr but also prune all expression locations
914 from EXPR. */
915
916tree
917unshare_expr_without_location (tree expr)
918{
919 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
920 if (EXPR_P (expr))
921 walk_tree (&expr, prune_expr_location, NULL, NULL);
922 return expr;
923}
6de9cd9a
DN
924\f
925/* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
926 contain statements and have a value. Assign its value to a temporary
ad19c4be 927 and give it void_type_node. Return the temporary, or NULL_TREE if
6de9cd9a
DN
928 WRAPPER was already void. */
929
930tree
325c3691 931voidify_wrapper_expr (tree wrapper, tree temp)
6de9cd9a 932{
4832214a
JM
933 tree type = TREE_TYPE (wrapper);
934 if (type && !VOID_TYPE_P (type))
6de9cd9a 935 {
c6c7698d 936 tree *p;
6de9cd9a 937
c6c7698d
JM
938 /* Set p to point to the body of the wrapper. Loop until we find
939 something that isn't a wrapper. */
940 for (p = &wrapper; p && *p; )
d3147f64 941 {
c6c7698d 942 switch (TREE_CODE (*p))
6de9cd9a 943 {
c6c7698d
JM
944 case BIND_EXPR:
945 TREE_SIDE_EFFECTS (*p) = 1;
946 TREE_TYPE (*p) = void_type_node;
947 /* For a BIND_EXPR, the body is operand 1. */
948 p = &BIND_EXPR_BODY (*p);
949 break;
950
951 case CLEANUP_POINT_EXPR:
952 case TRY_FINALLY_EXPR:
953 case TRY_CATCH_EXPR:
6de9cd9a
DN
954 TREE_SIDE_EFFECTS (*p) = 1;
955 TREE_TYPE (*p) = void_type_node;
c6c7698d
JM
956 p = &TREE_OPERAND (*p, 0);
957 break;
958
959 case STATEMENT_LIST:
960 {
961 tree_stmt_iterator i = tsi_last (*p);
962 TREE_SIDE_EFFECTS (*p) = 1;
963 TREE_TYPE (*p) = void_type_node;
964 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
965 }
966 break;
967
968 case COMPOUND_EXPR:
ad19c4be
EB
969 /* Advance to the last statement. Set all container types to
970 void. */
c6c7698d
JM
971 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
972 {
973 TREE_SIDE_EFFECTS (*p) = 1;
974 TREE_TYPE (*p) = void_type_node;
975 }
976 break;
977
0a35513e
AH
978 case TRANSACTION_EXPR:
979 TREE_SIDE_EFFECTS (*p) = 1;
980 TREE_TYPE (*p) = void_type_node;
981 p = &TRANSACTION_EXPR_BODY (*p);
982 break;
983
c6c7698d 984 default:
5f23640f
TR
985 /* Assume that any tree upon which voidify_wrapper_expr is
986 directly called is a wrapper, and that its body is op0. */
987 if (p == &wrapper)
988 {
989 TREE_SIDE_EFFECTS (*p) = 1;
990 TREE_TYPE (*p) = void_type_node;
991 p = &TREE_OPERAND (*p, 0);
992 break;
993 }
c6c7698d 994 goto out;
6de9cd9a
DN
995 }
996 }
997
c6c7698d 998 out:
325c3691 999 if (p == NULL || IS_EMPTY_STMT (*p))
c6c7698d
JM
1000 temp = NULL_TREE;
1001 else if (temp)
6de9cd9a 1002 {
c6c7698d
JM
1003 /* The wrapper is on the RHS of an assignment that we're pushing
1004 down. */
1005 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1006 || TREE_CODE (temp) == MODIFY_EXPR);
726a989a 1007 TREE_OPERAND (temp, 1) = *p;
c6c7698d 1008 *p = temp;
6de9cd9a
DN
1009 }
1010 else
1011 {
c6c7698d
JM
1012 temp = create_tmp_var (type, "retval");
1013 *p = build2 (INIT_EXPR, type, temp, *p);
6de9cd9a
DN
1014 }
1015
6de9cd9a
DN
1016 return temp;
1017 }
1018
1019 return NULL_TREE;
1020}
1021
1022/* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1ea7e6ad 1023 a temporary through which they communicate. */
6de9cd9a
DN
1024
1025static void
726a989a 1026build_stack_save_restore (gimple *save, gimple *restore)
6de9cd9a 1027{
726a989a 1028 tree tmp_var;
6de9cd9a 1029
e79983f4 1030 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
6de9cd9a 1031 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
726a989a 1032 gimple_call_set_lhs (*save, tmp_var);
6de9cd9a 1033
ad19c4be 1034 *restore
e79983f4 1035 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
ad19c4be 1036 1, tmp_var);
6de9cd9a
DN
1037}
1038
1039/* Gimplify a BIND_EXPR. Just voidify and recurse. */
1040
1041static enum gimplify_status
726a989a 1042gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1043{
1044 tree bind_expr = *expr_p;
6de9cd9a
DN
1045 bool old_save_stack = gimplify_ctxp->save_stack;
1046 tree t;
726a989a 1047 gimple gimple_bind;
47598145
MM
1048 gimple_seq body, cleanup;
1049 gimple stack_save;
a5852bea 1050 location_t start_locus = 0, end_locus = 0;
6de9cd9a 1051
c6c7698d 1052 tree temp = voidify_wrapper_expr (bind_expr, NULL);
325c3691 1053
6de9cd9a 1054 /* Mark variables seen in this bind expr. */
910ad8de 1055 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
e41d82f5 1056 {
820cc88f 1057 if (TREE_CODE (t) == VAR_DECL)
8cb86b65
JJ
1058 {
1059 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1060
1061 /* Mark variable as local. */
144f4153 1062 if (ctx && !DECL_EXTERNAL (t)
8cb86b65
JJ
1063 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1064 || splay_tree_lookup (ctx->variables,
1065 (splay_tree_key) t) == NULL))
c74559df
JJ
1066 {
1067 if (ctx->region_type == ORT_SIMD
1068 && TREE_ADDRESSABLE (t)
1069 && !TREE_STATIC (t))
1070 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1071 else
1072 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1073 }
8cb86b65
JJ
1074
1075 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
fc3103e7
JJ
1076
1077 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1078 cfun->has_local_explicit_reg_vars = true;
8cb86b65 1079 }
e41d82f5
RH
1080
1081 /* Preliminarily mark non-addressed complex variables as eligible
1082 for promotion to gimple registers. We'll transform their uses
bd2e63a1
RG
1083 as we find them. */
1084 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1085 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
e41d82f5
RH
1086 && !TREE_THIS_VOLATILE (t)
1087 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1088 && !needs_to_live_in_memory (t))
0890b981 1089 DECL_GIMPLE_REG_P (t) = 1;
e41d82f5 1090 }
6de9cd9a 1091
726a989a
RB
1092 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1093 BIND_EXPR_BLOCK (bind_expr));
1094 gimple_push_bind_expr (gimple_bind);
1095
6de9cd9a
DN
1096 gimplify_ctxp->save_stack = false;
1097
726a989a
RB
1098 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1099 body = NULL;
1100 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1101 gimple_bind_set_body (gimple_bind, body);
6de9cd9a 1102
a5852bea
OH
1103 /* Source location wise, the cleanup code (stack_restore and clobbers)
1104 belongs to the end of the block, so propagate what we have. The
1105 stack_save operation belongs to the beginning of block, which we can
1106 infer from the bind_expr directly if the block has no explicit
1107 assignment. */
1108 if (BIND_EXPR_BLOCK (bind_expr))
1109 {
1110 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1111 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1112 }
1113 if (start_locus == 0)
1114 start_locus = EXPR_LOCATION (bind_expr);
1115
47598145
MM
1116 cleanup = NULL;
1117 stack_save = NULL;
6de9cd9a
DN
1118 if (gimplify_ctxp->save_stack)
1119 {
47598145 1120 gimple stack_restore;
6de9cd9a
DN
1121
1122 /* Save stack on entry and restore it on exit. Add a try_finally
98906124 1123 block to achieve this. */
6de9cd9a
DN
1124 build_stack_save_restore (&stack_save, &stack_restore);
1125
a5852bea
OH
1126 gimple_set_location (stack_save, start_locus);
1127 gimple_set_location (stack_restore, end_locus);
1128
726a989a 1129 gimplify_seq_add_stmt (&cleanup, stack_restore);
47598145
MM
1130 }
1131
1132 /* Add clobbers for all variables that go out of scope. */
1133 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1134 {
1135 if (TREE_CODE (t) == VAR_DECL
1136 && !is_global_var (t)
1137 && DECL_CONTEXT (t) == current_function_decl
1138 && !DECL_HARD_REGISTER (t)
1139 && !TREE_THIS_VOLATILE (t)
1140 && !DECL_HAS_VALUE_EXPR_P (t)
1141 /* Only care for variables that have to be in memory. Others
1142 will be rewritten into SSA names, hence moved to the top-level. */
87e2a8fd
XDL
1143 && !is_gimple_reg (t)
1144 && flag_stack_reuse != SR_NONE)
47598145 1145 {
a5852bea
OH
1146 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1147 gimple clobber_stmt;
47598145 1148 TREE_THIS_VOLATILE (clobber) = 1;
a5852bea
OH
1149 clobber_stmt = gimple_build_assign (t, clobber);
1150 gimple_set_location (clobber_stmt, end_locus);
1151 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
47598145
MM
1152 }
1153 }
1154
1155 if (cleanup)
1156 {
1157 gimple gs;
1158 gimple_seq new_body;
1159
1160 new_body = NULL;
726a989a
RB
1161 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1162 GIMPLE_TRY_FINALLY);
6de9cd9a 1163
47598145
MM
1164 if (stack_save)
1165 gimplify_seq_add_stmt (&new_body, stack_save);
726a989a
RB
1166 gimplify_seq_add_stmt (&new_body, gs);
1167 gimple_bind_set_body (gimple_bind, new_body);
6de9cd9a
DN
1168 }
1169
1170 gimplify_ctxp->save_stack = old_save_stack;
1171 gimple_pop_bind_expr ();
1172
726a989a
RB
1173 gimplify_seq_add_stmt (pre_p, gimple_bind);
1174
6de9cd9a
DN
1175 if (temp)
1176 {
1177 *expr_p = temp;
6de9cd9a
DN
1178 return GS_OK;
1179 }
726a989a
RB
1180
1181 *expr_p = NULL_TREE;
1182 return GS_ALL_DONE;
6de9cd9a
DN
1183}
1184
1185/* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1186 GIMPLE value, it is assigned to a new temporary and the statement is
1187 re-written to return the temporary.
1188
726a989a 1189 PRE_P points to the sequence where side effects that must happen before
6de9cd9a
DN
1190 STMT should be stored. */
1191
1192static enum gimplify_status
726a989a 1193gimplify_return_expr (tree stmt, gimple_seq *pre_p)
6de9cd9a 1194{
726a989a 1195 gimple ret;
6de9cd9a 1196 tree ret_expr = TREE_OPERAND (stmt, 0);
71877985 1197 tree result_decl, result;
6de9cd9a 1198
726a989a
RB
1199 if (ret_expr == error_mark_node)
1200 return GS_ERROR;
1201
939b37da
BI
1202 /* Implicit _Cilk_sync must be inserted right before any return statement
1203 if there is a _Cilk_spawn in the function. If the user has provided a
1204 _Cilk_sync, the optimizer should remove this duplicate one. */
1205 if (fn_contains_cilk_spawn_p (cfun))
1206 {
1207 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1208 gimplify_and_add (impl_sync, pre_p);
1209 }
1210
726a989a
RB
1211 if (!ret_expr
1212 || TREE_CODE (ret_expr) == RESULT_DECL
55e99d52 1213 || ret_expr == error_mark_node)
726a989a
RB
1214 {
1215 gimple ret = gimple_build_return (ret_expr);
1216 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1217 gimplify_seq_add_stmt (pre_p, ret);
1218 return GS_ALL_DONE;
1219 }
6de9cd9a 1220
6de9cd9a 1221 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
71877985 1222 result_decl = NULL_TREE;
6de9cd9a
DN
1223 else
1224 {
726a989a
RB
1225 result_decl = TREE_OPERAND (ret_expr, 0);
1226
1227 /* See through a return by reference. */
cc77ae10 1228 if (TREE_CODE (result_decl) == INDIRECT_REF)
cc77ae10 1229 result_decl = TREE_OPERAND (result_decl, 0);
282899df
NS
1230
1231 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1232 || TREE_CODE (ret_expr) == INIT_EXPR)
1233 && TREE_CODE (result_decl) == RESULT_DECL);
6de9cd9a
DN
1234 }
1235
71877985
RH
1236 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1237 Recall that aggregate_value_p is FALSE for any aggregate type that is
1238 returned in registers. If we're returning values in registers, then
1239 we don't want to extend the lifetime of the RESULT_DECL, particularly
d3147f64 1240 across another call. In addition, for those aggregates for which
535a42b1 1241 hard_function_value generates a PARALLEL, we'll die during normal
71877985
RH
1242 expansion of structure assignments; there's special code in expand_return
1243 to handle this case that does not exist in expand_expr. */
ca361dec
EB
1244 if (!result_decl)
1245 result = NULL_TREE;
1246 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1247 {
1248 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1249 {
1250 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1251 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1252 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1253 should be effectively allocated by the caller, i.e. all calls to
1254 this function must be subject to the Return Slot Optimization. */
1255 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1256 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1257 }
1258 result = result_decl;
1259 }
71877985
RH
1260 else if (gimplify_ctxp->return_temp)
1261 result = gimplify_ctxp->return_temp;
1262 else
1263 {
acd63801 1264 result = create_tmp_reg (TREE_TYPE (result_decl), NULL);
ff98621c
RH
1265
1266 /* ??? With complex control flow (usually involving abnormal edges),
1267 we can wind up warning about an uninitialized value for this. Due
1268 to how this variable is constructed and initialized, this is never
1269 true. Give up and never warn. */
1270 TREE_NO_WARNING (result) = 1;
1271
71877985
RH
1272 gimplify_ctxp->return_temp = result;
1273 }
1274
726a989a 1275 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
71877985
RH
1276 Then gimplify the whole thing. */
1277 if (result != result_decl)
726a989a 1278 TREE_OPERAND (ret_expr, 0) = result;
fff34d35
RK
1279
1280 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
6de9cd9a 1281
726a989a
RB
1282 ret = gimple_build_return (result);
1283 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1284 gimplify_seq_add_stmt (pre_p, ret);
6de9cd9a 1285
6de9cd9a
DN
1286 return GS_ALL_DONE;
1287}
1288
ad19c4be
EB
1289/* Gimplify a variable-length array DECL. */
1290
786025ea 1291static void
726a989a 1292gimplify_vla_decl (tree decl, gimple_seq *seq_p)
786025ea
JJ
1293{
1294 /* This is a variable-sized decl. Simplify its size and mark it
98906124 1295 for deferred expansion. */
786025ea
JJ
1296 tree t, addr, ptr_type;
1297
726a989a
RB
1298 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1299 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
786025ea 1300
0138d6b2
JM
1301 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1302 if (DECL_HAS_VALUE_EXPR_P (decl))
1303 return;
1304
786025ea
JJ
1305 /* All occurrences of this decl in final gimplified code will be
1306 replaced by indirection. Setting DECL_VALUE_EXPR does two
1307 things: First, it lets the rest of the gimplifier know what
1308 replacement to use. Second, it lets the debug info know
1309 where to find the value. */
1310 ptr_type = build_pointer_type (TREE_TYPE (decl));
1311 addr = create_tmp_var (ptr_type, get_name (decl));
1312 DECL_IGNORED_P (addr) = 0;
1313 t = build_fold_indirect_ref (addr);
31408f60 1314 TREE_THIS_NOTRAP (t) = 1;
786025ea
JJ
1315 SET_DECL_VALUE_EXPR (decl, t);
1316 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1317
e79983f4 1318 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13e49da9
TV
1319 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1320 size_int (DECL_ALIGN (decl)));
d3c12306 1321 /* The call has been built for a variable-sized object. */
63d2a353 1322 CALL_ALLOCA_FOR_VAR_P (t) = 1;
786025ea 1323 t = fold_convert (ptr_type, t);
726a989a 1324 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
786025ea 1325
726a989a 1326 gimplify_and_add (t, seq_p);
786025ea
JJ
1327
1328 /* Indicate that we need to restore the stack level when the
1329 enclosing BIND_EXPR is exited. */
1330 gimplify_ctxp->save_stack = true;
1331}
1332
45b0be94
AM
1333/* A helper function to be called via walk_tree. Mark all labels under *TP
1334 as being forced. To be called for DECL_INITIAL of static variables. */
1335
1336static tree
1337force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1338{
1339 if (TYPE_P (*tp))
1340 *walk_subtrees = 0;
1341 if (TREE_CODE (*tp) == LABEL_DECL)
1342 FORCED_LABEL (*tp) = 1;
1343
1344 return NULL_TREE;
1345}
1346
ad19c4be 1347/* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
350fae66
RK
1348 and initialization explicit. */
1349
1350static enum gimplify_status
726a989a 1351gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
350fae66
RK
1352{
1353 tree stmt = *stmt_p;
1354 tree decl = DECL_EXPR_DECL (stmt);
1355
1356 *stmt_p = NULL_TREE;
1357
1358 if (TREE_TYPE (decl) == error_mark_node)
1359 return GS_ERROR;
1360
8e0a600b
JJ
1361 if ((TREE_CODE (decl) == TYPE_DECL
1362 || TREE_CODE (decl) == VAR_DECL)
1363 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
726a989a 1364 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
350fae66 1365
d400d17e
EB
1366 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1367 in case its size expressions contain problematic nodes like CALL_EXPR. */
1368 if (TREE_CODE (decl) == TYPE_DECL
1369 && DECL_ORIGINAL_TYPE (decl)
1370 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1371 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1372
8e0a600b 1373 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
350fae66
RK
1374 {
1375 tree init = DECL_INITIAL (decl);
1376
b38f3813
EB
1377 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1378 || (!TREE_STATIC (decl)
1379 && flag_stack_check == GENERIC_STACK_CHECK
1380 && compare_tree_int (DECL_SIZE_UNIT (decl),
1381 STACK_CHECK_MAX_VAR_SIZE) > 0))
726a989a 1382 gimplify_vla_decl (decl, seq_p);
350fae66 1383
22192559
JM
1384 /* Some front ends do not explicitly declare all anonymous
1385 artificial variables. We compensate here by declaring the
1386 variables, though it would be better if the front ends would
1387 explicitly declare them. */
1388 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1389 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1390 gimple_add_tmp_var (decl);
1391
350fae66
RK
1392 if (init && init != error_mark_node)
1393 {
1394 if (!TREE_STATIC (decl))
1395 {
1396 DECL_INITIAL (decl) = NULL_TREE;
dae7ec87 1397 init = build2 (INIT_EXPR, void_type_node, decl, init);
726a989a
RB
1398 gimplify_and_add (init, seq_p);
1399 ggc_free (init);
350fae66
RK
1400 }
1401 else
1402 /* We must still examine initializers for static variables
1403 as they may contain a label address. */
1404 walk_tree (&init, force_labels_r, NULL, NULL);
1405 }
350fae66
RK
1406 }
1407
1408 return GS_ALL_DONE;
1409}
1410
6de9cd9a
DN
1411/* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1412 and replacing the LOOP_EXPR with goto, but if the loop contains an
1413 EXIT_EXPR, we need to append a label for it to jump to. */
1414
1415static enum gimplify_status
726a989a 1416gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1417{
1418 tree saved_label = gimplify_ctxp->exit_label;
c2255bc4 1419 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
6de9cd9a 1420
726a989a 1421 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
6de9cd9a
DN
1422
1423 gimplify_ctxp->exit_label = NULL_TREE;
1424
fff34d35 1425 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
6de9cd9a 1426
726a989a
RB
1427 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1428
6de9cd9a 1429 if (gimplify_ctxp->exit_label)
ad19c4be
EB
1430 gimplify_seq_add_stmt (pre_p,
1431 gimple_build_label (gimplify_ctxp->exit_label));
726a989a
RB
1432
1433 gimplify_ctxp->exit_label = saved_label;
1434
1435 *expr_p = NULL;
1436 return GS_ALL_DONE;
1437}
1438
ad19c4be 1439/* Gimplify a statement list onto a sequence. These may be created either
726a989a
RB
1440 by an enlightened front-end, or by shortcut_cond_expr. */
1441
1442static enum gimplify_status
1443gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1444{
1445 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1446
1447 tree_stmt_iterator i = tsi_start (*expr_p);
1448
1449 while (!tsi_end_p (i))
6de9cd9a 1450 {
726a989a
RB
1451 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1452 tsi_delink (&i);
6de9cd9a 1453 }
6de9cd9a 1454
726a989a
RB
1455 if (temp)
1456 {
1457 *expr_p = temp;
1458 return GS_OK;
1459 }
6de9cd9a
DN
1460
1461 return GS_ALL_DONE;
1462}
0f1f6967 1463
68e72840
SB
1464\f
1465/* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
6de9cd9a
DN
1466 branch to. */
1467
1468static enum gimplify_status
726a989a 1469gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1470{
1471 tree switch_expr = *expr_p;
726a989a 1472 gimple_seq switch_body_seq = NULL;
6de9cd9a 1473 enum gimplify_status ret;
0cd2402d
SB
1474 tree index_type = TREE_TYPE (switch_expr);
1475 if (index_type == NULL_TREE)
1476 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
6de9cd9a 1477
726a989a
RB
1478 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1479 fb_rvalue);
1480 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1481 return ret;
6de9cd9a
DN
1482
1483 if (SWITCH_BODY (switch_expr))
1484 {
9771b263
DN
1485 vec<tree> labels;
1486 vec<tree> saved_labels;
726a989a 1487 tree default_case = NULL_TREE;
726a989a 1488 gimple gimple_switch;
b8698a0f 1489
6de9cd9a
DN
1490 /* If someone can be bothered to fill in the labels, they can
1491 be bothered to null out the body too. */
282899df 1492 gcc_assert (!SWITCH_LABELS (switch_expr));
6de9cd9a 1493
0cd2402d 1494 /* Save old labels, get new ones from body, then restore the old
726a989a 1495 labels. Save all the things from the switch body to append after. */
6de9cd9a 1496 saved_labels = gimplify_ctxp->case_labels;
9771b263 1497 gimplify_ctxp->case_labels.create (8);
6de9cd9a 1498
726a989a 1499 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
6de9cd9a
DN
1500 labels = gimplify_ctxp->case_labels;
1501 gimplify_ctxp->case_labels = saved_labels;
b8698a0f 1502
68e72840
SB
1503 preprocess_case_label_vec_for_gimple (labels, index_type,
1504 &default_case);
32f579f6 1505
726a989a 1506 if (!default_case)
6de9cd9a 1507 {
68e72840 1508 gimple new_default;
6de9cd9a 1509
68e72840
SB
1510 default_case
1511 = build_case_label (NULL_TREE, NULL_TREE,
1512 create_artificial_label (UNKNOWN_LOCATION));
1513 new_default = gimple_build_label (CASE_LABEL (default_case));
1514 gimplify_seq_add_stmt (&switch_body_seq, new_default);
32f579f6 1515 }
f667741c 1516
fd8d363e
SB
1517 gimple_switch = gimple_build_switch (SWITCH_COND (switch_expr),
1518 default_case, labels);
726a989a
RB
1519 gimplify_seq_add_stmt (pre_p, gimple_switch);
1520 gimplify_seq_add_seq (pre_p, switch_body_seq);
9771b263 1521 labels.release ();
6de9cd9a 1522 }
282899df
NS
1523 else
1524 gcc_assert (SWITCH_LABELS (switch_expr));
6de9cd9a 1525
726a989a 1526 return GS_ALL_DONE;
6de9cd9a
DN
1527}
1528
ad19c4be 1529/* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
726a989a 1530
6de9cd9a 1531static enum gimplify_status
726a989a 1532gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a 1533{
953ff289 1534 struct gimplify_ctx *ctxp;
726a989a 1535 gimple gimple_label;
953ff289
DN
1536
1537 /* Invalid OpenMP programs can play Duff's Device type games with
1538 #pragma omp parallel. At least in the C front end, we don't
1539 detect such invalid branches until after gimplification. */
1540 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
9771b263 1541 if (ctxp->case_labels.exists ())
953ff289 1542 break;
282899df 1543
726a989a 1544 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
9771b263 1545 ctxp->case_labels.safe_push (*expr_p);
726a989a
RB
1546 gimplify_seq_add_stmt (pre_p, gimple_label);
1547
6de9cd9a
DN
1548 return GS_ALL_DONE;
1549}
1550
6de9cd9a
DN
1551/* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1552 if necessary. */
1553
1554tree
1555build_and_jump (tree *label_p)
1556{
1557 if (label_p == NULL)
1558 /* If there's nowhere to jump, just fall through. */
65355d53 1559 return NULL_TREE;
6de9cd9a
DN
1560
1561 if (*label_p == NULL_TREE)
1562 {
c2255bc4 1563 tree label = create_artificial_label (UNKNOWN_LOCATION);
6de9cd9a
DN
1564 *label_p = label;
1565 }
1566
1567 return build1 (GOTO_EXPR, void_type_node, *label_p);
1568}
1569
1570/* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1571 This also involves building a label to jump to and communicating it to
1572 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1573
1574static enum gimplify_status
1575gimplify_exit_expr (tree *expr_p)
1576{
1577 tree cond = TREE_OPERAND (*expr_p, 0);
1578 tree expr;
1579
1580 expr = build_and_jump (&gimplify_ctxp->exit_label);
b4257cfc 1581 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
6de9cd9a
DN
1582 *expr_p = expr;
1583
1584 return GS_OK;
1585}
1586
26d44ae2
RH
1587/* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1588 different from its canonical type, wrap the whole thing inside a
1589 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1590 type.
6de9cd9a 1591
26d44ae2
RH
1592 The canonical type of a COMPONENT_REF is the type of the field being
1593 referenced--unless the field is a bit-field which can be read directly
1594 in a smaller mode, in which case the canonical type is the
1595 sign-appropriate type corresponding to that mode. */
6de9cd9a 1596
26d44ae2
RH
1597static void
1598canonicalize_component_ref (tree *expr_p)
6de9cd9a 1599{
26d44ae2
RH
1600 tree expr = *expr_p;
1601 tree type;
6de9cd9a 1602
282899df 1603 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
6de9cd9a 1604
26d44ae2
RH
1605 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1606 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1607 else
1608 type = TREE_TYPE (TREE_OPERAND (expr, 1));
6de9cd9a 1609
b26c6d55
RG
1610 /* One could argue that all the stuff below is not necessary for
1611 the non-bitfield case and declare it a FE error if type
1612 adjustment would be needed. */
26d44ae2 1613 if (TREE_TYPE (expr) != type)
6de9cd9a 1614 {
b26c6d55 1615#ifdef ENABLE_TYPES_CHECKING
26d44ae2 1616 tree old_type = TREE_TYPE (expr);
b26c6d55
RG
1617#endif
1618 int type_quals;
1619
1620 /* We need to preserve qualifiers and propagate them from
1621 operand 0. */
1622 type_quals = TYPE_QUALS (type)
1623 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1624 if (TYPE_QUALS (type) != type_quals)
1625 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
6de9cd9a 1626
26d44ae2
RH
1627 /* Set the type of the COMPONENT_REF to the underlying type. */
1628 TREE_TYPE (expr) = type;
6de9cd9a 1629
b26c6d55
RG
1630#ifdef ENABLE_TYPES_CHECKING
1631 /* It is now a FE error, if the conversion from the canonical
1632 type to the original expression type is not useless. */
1633 gcc_assert (useless_type_conversion_p (old_type, type));
1634#endif
26d44ae2
RH
1635 }
1636}
6de9cd9a 1637
26d44ae2 1638/* If a NOP conversion is changing a pointer to array of foo to a pointer
d3147f64 1639 to foo, embed that change in the ADDR_EXPR by converting
26d44ae2
RH
1640 T array[U];
1641 (T *)&array
1642 ==>
1643 &array[L]
1644 where L is the lower bound. For simplicity, only do this for constant
04d86531
RG
1645 lower bound.
1646 The constraint is that the type of &array[L] is trivially convertible
1647 to T *. */
6de9cd9a 1648
26d44ae2
RH
1649static void
1650canonicalize_addr_expr (tree *expr_p)
1651{
1652 tree expr = *expr_p;
26d44ae2 1653 tree addr_expr = TREE_OPERAND (expr, 0);
04d86531 1654 tree datype, ddatype, pddatype;
6de9cd9a 1655
04d86531
RG
1656 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1657 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1658 || TREE_CODE (addr_expr) != ADDR_EXPR)
26d44ae2 1659 return;
6de9cd9a 1660
26d44ae2 1661 /* The addr_expr type should be a pointer to an array. */
04d86531 1662 datype = TREE_TYPE (TREE_TYPE (addr_expr));
26d44ae2
RH
1663 if (TREE_CODE (datype) != ARRAY_TYPE)
1664 return;
6de9cd9a 1665
04d86531
RG
1666 /* The pointer to element type shall be trivially convertible to
1667 the expression pointer type. */
26d44ae2 1668 ddatype = TREE_TYPE (datype);
04d86531 1669 pddatype = build_pointer_type (ddatype);
e5fdcd8c
RG
1670 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1671 pddatype))
26d44ae2 1672 return;
6de9cd9a 1673
26d44ae2 1674 /* The lower bound and element sizes must be constant. */
04d86531
RG
1675 if (!TYPE_SIZE_UNIT (ddatype)
1676 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
26d44ae2
RH
1677 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1678 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1679 return;
6de9cd9a 1680
26d44ae2 1681 /* All checks succeeded. Build a new node to merge the cast. */
04d86531 1682 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
26d44ae2 1683 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
5852948c 1684 NULL_TREE, NULL_TREE);
04d86531 1685 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
e5fdcd8c
RG
1686
1687 /* We can have stripped a required restrict qualifier above. */
1688 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1689 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
26d44ae2 1690}
6de9cd9a 1691
26d44ae2
RH
1692/* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1693 underneath as appropriate. */
6de9cd9a 1694
26d44ae2
RH
1695static enum gimplify_status
1696gimplify_conversion (tree *expr_p)
d3147f64 1697{
db3927fb 1698 location_t loc = EXPR_LOCATION (*expr_p);
1043771b 1699 gcc_assert (CONVERT_EXPR_P (*expr_p));
c2255bc4 1700
0710ccff
NS
1701 /* Then strip away all but the outermost conversion. */
1702 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1703
1704 /* And remove the outermost conversion if it's useless. */
1705 if (tree_ssa_useless_type_conversion (*expr_p))
1706 *expr_p = TREE_OPERAND (*expr_p, 0);
6de9cd9a 1707
26d44ae2
RH
1708 /* If we still have a conversion at the toplevel,
1709 then canonicalize some constructs. */
1043771b 1710 if (CONVERT_EXPR_P (*expr_p))
26d44ae2
RH
1711 {
1712 tree sub = TREE_OPERAND (*expr_p, 0);
6de9cd9a 1713
26d44ae2
RH
1714 /* If a NOP conversion is changing the type of a COMPONENT_REF
1715 expression, then canonicalize its type now in order to expose more
1716 redundant conversions. */
1717 if (TREE_CODE (sub) == COMPONENT_REF)
1718 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
6de9cd9a 1719
26d44ae2
RH
1720 /* If a NOP conversion is changing a pointer to array of foo
1721 to a pointer to foo, embed that change in the ADDR_EXPR. */
1722 else if (TREE_CODE (sub) == ADDR_EXPR)
1723 canonicalize_addr_expr (expr_p);
1724 }
6de9cd9a 1725
8b17cc05
RG
1726 /* If we have a conversion to a non-register type force the
1727 use of a VIEW_CONVERT_EXPR instead. */
4f934809 1728 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
db3927fb 1729 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
4f934809 1730 TREE_OPERAND (*expr_p, 0));
8b17cc05 1731
6de9cd9a
DN
1732 return GS_OK;
1733}
1734
77f2a970
JJ
1735/* Nonlocal VLAs seen in the current function. */
1736static struct pointer_set_t *nonlocal_vlas;
1737
96ddb7ec
JJ
1738/* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
1739static tree nonlocal_vla_vars;
1740
ad19c4be 1741/* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
a9f7c570
RH
1742 DECL_VALUE_EXPR, and it's worth re-examining things. */
1743
1744static enum gimplify_status
1745gimplify_var_or_parm_decl (tree *expr_p)
1746{
1747 tree decl = *expr_p;
1748
1749 /* ??? If this is a local variable, and it has not been seen in any
1750 outer BIND_EXPR, then it's probably the result of a duplicate
1751 declaration, for which we've already issued an error. It would
1752 be really nice if the front end wouldn't leak these at all.
1753 Currently the only known culprit is C++ destructors, as seen
1754 in g++.old-deja/g++.jason/binding.C. */
1755 if (TREE_CODE (decl) == VAR_DECL
1756 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1757 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1758 && decl_function_context (decl) == current_function_decl)
1759 {
1da2ed5f 1760 gcc_assert (seen_error ());
a9f7c570
RH
1761 return GS_ERROR;
1762 }
1763
953ff289
DN
1764 /* When within an OpenMP context, notice uses of variables. */
1765 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1766 return GS_ALL_DONE;
1767
a9f7c570
RH
1768 /* If the decl is an alias for another expression, substitute it now. */
1769 if (DECL_HAS_VALUE_EXPR_P (decl))
1770 {
77f2a970
JJ
1771 tree value_expr = DECL_VALUE_EXPR (decl);
1772
1773 /* For referenced nonlocal VLAs add a decl for debugging purposes
1774 to the current function. */
1775 if (TREE_CODE (decl) == VAR_DECL
1776 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1777 && nonlocal_vlas != NULL
1778 && TREE_CODE (value_expr) == INDIRECT_REF
1779 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1780 && decl_function_context (decl) != current_function_decl)
1781 {
1782 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
74bf76ed
JJ
1783 while (ctx
1784 && (ctx->region_type == ORT_WORKSHARE
1785 || ctx->region_type == ORT_SIMD))
77f2a970
JJ
1786 ctx = ctx->outer_context;
1787 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
1788 {
96ddb7ec 1789 tree copy = copy_node (decl);
77f2a970
JJ
1790
1791 lang_hooks.dup_lang_specific_decl (copy);
2eb79bbb 1792 SET_DECL_RTL (copy, 0);
77f2a970 1793 TREE_USED (copy) = 1;
96ddb7ec
JJ
1794 DECL_CHAIN (copy) = nonlocal_vla_vars;
1795 nonlocal_vla_vars = copy;
77f2a970
JJ
1796 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1797 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1798 }
1799 }
1800
1801 *expr_p = unshare_expr (value_expr);
a9f7c570
RH
1802 return GS_OK;
1803 }
1804
1805 return GS_ALL_DONE;
1806}
1807
66c14933
EB
1808/* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
1809
1810static void
2fb9a547
AM
1811recalculate_side_effects (tree t)
1812{
1813 enum tree_code code = TREE_CODE (t);
1814 int len = TREE_OPERAND_LENGTH (t);
1815 int i;
1816
1817 switch (TREE_CODE_CLASS (code))
1818 {
1819 case tcc_expression:
1820 switch (code)
1821 {
1822 case INIT_EXPR:
1823 case MODIFY_EXPR:
1824 case VA_ARG_EXPR:
1825 case PREDECREMENT_EXPR:
1826 case PREINCREMENT_EXPR:
1827 case POSTDECREMENT_EXPR:
1828 case POSTINCREMENT_EXPR:
1829 /* All of these have side-effects, no matter what their
1830 operands are. */
1831 return;
1832
1833 default:
1834 break;
1835 }
1836 /* Fall through. */
1837
1838 case tcc_comparison: /* a comparison expression */
1839 case tcc_unary: /* a unary arithmetic expression */
1840 case tcc_binary: /* a binary arithmetic expression */
1841 case tcc_reference: /* a reference */
1842 case tcc_vl_exp: /* a function call */
1843 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
1844 for (i = 0; i < len; ++i)
1845 {
1846 tree op = TREE_OPERAND (t, i);
1847 if (op && TREE_SIDE_EFFECTS (op))
1848 TREE_SIDE_EFFECTS (t) = 1;
1849 }
1850 break;
1851
1852 case tcc_constant:
1853 /* No side-effects. */
1854 return;
1855
1856 default:
1857 gcc_unreachable ();
1858 }
1859}
1860
6de9cd9a 1861/* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
726a989a 1862 node *EXPR_P.
6de9cd9a
DN
1863
1864 compound_lval
1865 : min_lval '[' val ']'
1866 | min_lval '.' ID
1867 | compound_lval '[' val ']'
1868 | compound_lval '.' ID
1869
1870 This is not part of the original SIMPLE definition, which separates
1871 array and member references, but it seems reasonable to handle them
1872 together. Also, this way we don't run into problems with union
1873 aliasing; gcc requires that for accesses through a union to alias, the
1874 union reference must be explicit, which was not always the case when we
1875 were splitting up array and member refs.
1876
726a989a 1877 PRE_P points to the sequence where side effects that must happen before
6de9cd9a
DN
1878 *EXPR_P should be stored.
1879
726a989a 1880 POST_P points to the sequence where side effects that must happen after
6de9cd9a
DN
1881 *EXPR_P should be stored. */
1882
1883static enum gimplify_status
726a989a
RB
1884gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1885 fallback_t fallback)
6de9cd9a
DN
1886{
1887 tree *p;
941f78d1 1888 enum gimplify_status ret = GS_ALL_DONE, tret;
af72267c 1889 int i;
db3927fb 1890 location_t loc = EXPR_LOCATION (*expr_p);
941f78d1 1891 tree expr = *expr_p;
6de9cd9a 1892
6de9cd9a 1893 /* Create a stack of the subexpressions so later we can walk them in
ec234842 1894 order from inner to outer. */
00f96dc9 1895 auto_vec<tree, 10> expr_stack;
6de9cd9a 1896
afe84921 1897 /* We can handle anything that get_inner_reference can deal with. */
6a720599
JM
1898 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1899 {
a9f7c570 1900 restart:
6a720599
JM
1901 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1902 if (TREE_CODE (*p) == INDIRECT_REF)
db3927fb 1903 *p = fold_indirect_ref_loc (loc, *p);
a9f7c570
RH
1904
1905 if (handled_component_p (*p))
1906 ;
1907 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1908 additional COMPONENT_REFs. */
1909 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1910 && gimplify_var_or_parm_decl (p) == GS_OK)
1911 goto restart;
1912 else
6a720599 1913 break;
b8698a0f 1914
9771b263 1915 expr_stack.safe_push (*p);
6a720599 1916 }
6de9cd9a 1917
9771b263 1918 gcc_assert (expr_stack.length ());
9e51aaf5 1919
0823efed
DN
1920 /* Now EXPR_STACK is a stack of pointers to all the refs we've
1921 walked through and P points to the innermost expression.
6de9cd9a 1922
af72267c
RK
1923 Java requires that we elaborated nodes in source order. That
1924 means we must gimplify the inner expression followed by each of
1925 the indices, in order. But we can't gimplify the inner
1926 expression until we deal with any variable bounds, sizes, or
1927 positions in order to deal with PLACEHOLDER_EXPRs.
1928
1929 So we do this in three steps. First we deal with the annotations
1930 for any variables in the components, then we gimplify the base,
1931 then we gimplify any indices, from left to right. */
9771b263 1932 for (i = expr_stack.length () - 1; i >= 0; i--)
6de9cd9a 1933 {
9771b263 1934 tree t = expr_stack[i];
44de5aeb
RK
1935
1936 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6de9cd9a 1937 {
44de5aeb
RK
1938 /* Gimplify the low bound and element type size and put them into
1939 the ARRAY_REF. If these values are set, they have already been
1940 gimplified. */
726a989a 1941 if (TREE_OPERAND (t, 2) == NULL_TREE)
44de5aeb 1942 {
a7cc468a
RH
1943 tree low = unshare_expr (array_ref_low_bound (t));
1944 if (!is_gimple_min_invariant (low))
44de5aeb 1945 {
726a989a
RB
1946 TREE_OPERAND (t, 2) = low;
1947 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
ba4d8f9d 1948 post_p, is_gimple_reg,
726a989a 1949 fb_rvalue);
44de5aeb
RK
1950 ret = MIN (ret, tret);
1951 }
1952 }
19c44640
JJ
1953 else
1954 {
1955 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1956 is_gimple_reg, fb_rvalue);
1957 ret = MIN (ret, tret);
1958 }
44de5aeb 1959
19c44640 1960 if (TREE_OPERAND (t, 3) == NULL_TREE)
44de5aeb
RK
1961 {
1962 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1963 tree elmt_size = unshare_expr (array_ref_element_size (t));
a4e9ffe5 1964 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
44de5aeb
RK
1965
1966 /* Divide the element size by the alignment of the element
1967 type (above). */
ad19c4be
EB
1968 elmt_size
1969 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
44de5aeb 1970
a7cc468a 1971 if (!is_gimple_min_invariant (elmt_size))
44de5aeb 1972 {
726a989a
RB
1973 TREE_OPERAND (t, 3) = elmt_size;
1974 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
ba4d8f9d 1975 post_p, is_gimple_reg,
726a989a 1976 fb_rvalue);
44de5aeb
RK
1977 ret = MIN (ret, tret);
1978 }
6de9cd9a 1979 }
19c44640
JJ
1980 else
1981 {
1982 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
1983 is_gimple_reg, fb_rvalue);
1984 ret = MIN (ret, tret);
1985 }
6de9cd9a 1986 }
44de5aeb
RK
1987 else if (TREE_CODE (t) == COMPONENT_REF)
1988 {
1989 /* Set the field offset into T and gimplify it. */
19c44640 1990 if (TREE_OPERAND (t, 2) == NULL_TREE)
44de5aeb
RK
1991 {
1992 tree offset = unshare_expr (component_ref_field_offset (t));
1993 tree field = TREE_OPERAND (t, 1);
1994 tree factor
1995 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
1996
1997 /* Divide the offset by its alignment. */
db3927fb 1998 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
44de5aeb 1999
a7cc468a 2000 if (!is_gimple_min_invariant (offset))
44de5aeb 2001 {
726a989a
RB
2002 TREE_OPERAND (t, 2) = offset;
2003 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
ba4d8f9d 2004 post_p, is_gimple_reg,
726a989a 2005 fb_rvalue);
44de5aeb
RK
2006 ret = MIN (ret, tret);
2007 }
2008 }
19c44640
JJ
2009 else
2010 {
2011 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2012 is_gimple_reg, fb_rvalue);
2013 ret = MIN (ret, tret);
2014 }
44de5aeb 2015 }
af72267c
RK
2016 }
2017
a9f7c570
RH
2018 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2019 so as to match the min_lval predicate. Failure to do so may result
2020 in the creation of large aggregate temporaries. */
2021 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2022 fallback | fb_lvalue);
af72267c
RK
2023 ret = MIN (ret, tret);
2024
ea814c66 2025 /* And finally, the indices and operands of ARRAY_REF. During this
48eb4e53 2026 loop we also remove any useless conversions. */
9771b263 2027 for (; expr_stack.length () > 0; )
af72267c 2028 {
9771b263 2029 tree t = expr_stack.pop ();
af72267c
RK
2030
2031 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2032 {
ba4d8f9d 2033 /* Gimplify the dimension. */
af72267c
RK
2034 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2035 {
2036 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
ba4d8f9d 2037 is_gimple_val, fb_rvalue);
af72267c
RK
2038 ret = MIN (ret, tret);
2039 }
2040 }
48eb4e53
RK
2041
2042 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2043
726a989a
RB
2044 /* The innermost expression P may have originally had
2045 TREE_SIDE_EFFECTS set which would have caused all the outer
2046 expressions in *EXPR_P leading to P to also have had
2047 TREE_SIDE_EFFECTS set. */
6de9cd9a 2048 recalculate_side_effects (t);
6de9cd9a
DN
2049 }
2050
2051 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
90051e16 2052 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
6de9cd9a
DN
2053 {
2054 canonicalize_component_ref (expr_p);
6de9cd9a
DN
2055 }
2056
9771b263 2057 expr_stack.release ();
07724022 2058
941f78d1
JM
2059 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2060
6de9cd9a
DN
2061 return ret;
2062}
2063
206048bd
VR
2064/* Gimplify the self modifying expression pointed to by EXPR_P
2065 (++, --, +=, -=).
6de9cd9a
DN
2066
2067 PRE_P points to the list where side effects that must happen before
2068 *EXPR_P should be stored.
2069
2070 POST_P points to the list where side effects that must happen after
2071 *EXPR_P should be stored.
2072
2073 WANT_VALUE is nonzero iff we want to use the value of this expression
cc3c4f62 2074 in another expression.
6de9cd9a 2075
cc3c4f62
RB
2076 ARITH_TYPE is the type the computation should be performed in. */
2077
2078enum gimplify_status
726a989a 2079gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
cc3c4f62 2080 bool want_value, tree arith_type)
6de9cd9a
DN
2081{
2082 enum tree_code code;
726a989a
RB
2083 tree lhs, lvalue, rhs, t1;
2084 gimple_seq post = NULL, *orig_post_p = post_p;
6de9cd9a
DN
2085 bool postfix;
2086 enum tree_code arith_code;
2087 enum gimplify_status ret;
db3927fb 2088 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
2089
2090 code = TREE_CODE (*expr_p);
2091
282899df
NS
2092 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2093 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
6de9cd9a
DN
2094
2095 /* Prefix or postfix? */
2096 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2097 /* Faster to treat as prefix if result is not used. */
2098 postfix = want_value;
2099 else
2100 postfix = false;
2101
82181741
JJ
2102 /* For postfix, make sure the inner expression's post side effects
2103 are executed after side effects from this expression. */
2104 if (postfix)
2105 post_p = &post;
2106
6de9cd9a
DN
2107 /* Add or subtract? */
2108 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2109 arith_code = PLUS_EXPR;
2110 else
2111 arith_code = MINUS_EXPR;
2112
2113 /* Gimplify the LHS into a GIMPLE lvalue. */
2114 lvalue = TREE_OPERAND (*expr_p, 0);
2115 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2116 if (ret == GS_ERROR)
2117 return ret;
2118
2119 /* Extract the operands to the arithmetic operation. */
2120 lhs = lvalue;
2121 rhs = TREE_OPERAND (*expr_p, 1);
2122
2123 /* For postfix operator, we evaluate the LHS to an rvalue and then use
d97c9b22 2124 that as the result value and in the postqueue operation. */
6de9cd9a
DN
2125 if (postfix)
2126 {
2127 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2128 if (ret == GS_ERROR)
2129 return ret;
6de9cd9a 2130
d97c9b22
JJ
2131 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2132 }
cc3c4f62 2133
5be014d5
AP
2134 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2135 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2136 {
0d82a1c8 2137 rhs = convert_to_ptrofftype_loc (loc, rhs);
5be014d5 2138 if (arith_code == MINUS_EXPR)
db3927fb 2139 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
cc3c4f62 2140 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
5be014d5 2141 }
cc3c4f62
RB
2142 else
2143 t1 = fold_convert (TREE_TYPE (*expr_p),
2144 fold_build2 (arith_code, arith_type,
2145 fold_convert (arith_type, lhs),
2146 fold_convert (arith_type, rhs)));
5be014d5 2147
6de9cd9a
DN
2148 if (postfix)
2149 {
cf1867a0 2150 gimplify_assign (lvalue, t1, pre_p);
726a989a 2151 gimplify_seq_add_seq (orig_post_p, post);
cc3c4f62 2152 *expr_p = lhs;
6de9cd9a
DN
2153 return GS_ALL_DONE;
2154 }
2155 else
2156 {
726a989a 2157 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
6de9cd9a
DN
2158 return GS_OK;
2159 }
2160}
2161
d25cee4d
RH
2162/* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2163
2164static void
2165maybe_with_size_expr (tree *expr_p)
2166{
61025d1b
RK
2167 tree expr = *expr_p;
2168 tree type = TREE_TYPE (expr);
2169 tree size;
d25cee4d 2170
61025d1b
RK
2171 /* If we've already wrapped this or the type is error_mark_node, we can't do
2172 anything. */
2173 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2174 || type == error_mark_node)
d25cee4d
RH
2175 return;
2176
61025d1b 2177 /* If the size isn't known or is a constant, we have nothing to do. */
d25cee4d 2178 size = TYPE_SIZE_UNIT (type);
61025d1b
RK
2179 if (!size || TREE_CODE (size) == INTEGER_CST)
2180 return;
2181
2182 /* Otherwise, make a WITH_SIZE_EXPR. */
2183 size = unshare_expr (size);
2184 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2185 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
d25cee4d
RH
2186}
2187
726a989a 2188/* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
1282697f
AH
2189 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2190 the CALL_EXPR. */
e4f78bd4 2191
fe6ebcf1 2192enum gimplify_status
1282697f 2193gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
e4f78bd4
JM
2194{
2195 bool (*test) (tree);
2196 fallback_t fb;
2197
2198 /* In general, we allow lvalues for function arguments to avoid
2199 extra overhead of copying large aggregates out of even larger
2200 aggregates into temporaries only to copy the temporaries to
2201 the argument list. Make optimizers happy by pulling out to
2202 temporaries those types that fit in registers. */
726a989a 2203 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
e4f78bd4
JM
2204 test = is_gimple_val, fb = fb_rvalue;
2205 else
b4ef8aac
JM
2206 {
2207 test = is_gimple_lvalue, fb = fb_either;
2208 /* Also strip a TARGET_EXPR that would force an extra copy. */
2209 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2210 {
2211 tree init = TARGET_EXPR_INITIAL (*arg_p);
2212 if (init
2213 && !VOID_TYPE_P (TREE_TYPE (init)))
2214 *arg_p = init;
2215 }
2216 }
e4f78bd4 2217
d25cee4d 2218 /* If this is a variable sized type, we must remember the size. */
726a989a 2219 maybe_with_size_expr (arg_p);
d25cee4d 2220
c2255bc4 2221 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
1282697f
AH
2222 /* Make sure arguments have the same location as the function call
2223 itself. */
2224 protected_set_expr_location (*arg_p, call_location);
2225
e4f78bd4
JM
2226 /* There is a sequence point before a function call. Side effects in
2227 the argument list must occur before the actual call. So, when
2228 gimplifying arguments, force gimplify_expr to use an internal
2229 post queue which is then appended to the end of PRE_P. */
726a989a 2230 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
e4f78bd4
JM
2231}
2232
88ac13da
TS
2233/* Don't fold STMT inside ORT_TARGET, because it can break code by adding decl
2234 references that weren't in the source. We'll do it during omplower pass
2235 instead. */
2236
2237static bool
2238maybe_fold_stmt (gimple_stmt_iterator *gsi)
2239{
2240 struct gimplify_omp_ctx *ctx;
2241 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
2242 if (ctx->region_type == ORT_TARGET)
2243 return false;
2244 return fold_stmt (gsi);
2245}
2246
726a989a 2247/* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
90051e16 2248 WANT_VALUE is true if the result of the call is desired. */
6de9cd9a
DN
2249
2250static enum gimplify_status
726a989a 2251gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6de9cd9a 2252{
f20ca725 2253 tree fndecl, parms, p, fnptrtype;
6de9cd9a 2254 enum gimplify_status ret;
5039610b 2255 int i, nargs;
726a989a 2256 gimple call;
ed9c79e1 2257 bool builtin_va_start_p = false;
db3927fb 2258 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a 2259
282899df 2260 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
6de9cd9a 2261
d3147f64 2262 /* For reliable diagnostics during inlining, it is necessary that
6de9cd9a 2263 every call_expr be annotated with file and line. */
a281759f
PB
2264 if (! EXPR_HAS_LOCATION (*expr_p))
2265 SET_EXPR_LOCATION (*expr_p, input_location);
6de9cd9a 2266
0e37a2f3
MP
2267 /* Gimplify internal functions created in the FEs. */
2268 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
2269 {
2270 nargs = call_expr_nargs (*expr_p);
2271 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
2272 auto_vec<tree> vargs (nargs);
2273
2274 for (i = 0; i < nargs; i++)
2275 {
2276 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2277 EXPR_LOCATION (*expr_p));
2278 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
2279 }
2280 gimple call = gimple_build_call_internal_vec (ifn, vargs);
2281 gimplify_seq_add_stmt (pre_p, call);
2282 return GS_ALL_DONE;
2283 }
2284
6de9cd9a
DN
2285 /* This may be a call to a builtin function.
2286
2287 Builtin function calls may be transformed into different
2288 (and more efficient) builtin function calls under certain
2289 circumstances. Unfortunately, gimplification can muck things
2290 up enough that the builtin expanders are not aware that certain
2291 transformations are still valid.
2292
2293 So we attempt transformation/gimplification of the call before
2294 we gimplify the CALL_EXPR. At this time we do not manage to
2295 transform all calls in the same manner as the expanders do, but
2296 we do transform most of them. */
726a989a 2297 fndecl = get_callee_fndecl (*expr_p);
3537a0cd
RG
2298 if (fndecl
2299 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2300 switch (DECL_FUNCTION_CODE (fndecl))
2301 {
2302 case BUILT_IN_VA_START:
2efcfa4e 2303 {
726a989a 2304 builtin_va_start_p = TRUE;
5039610b 2305 if (call_expr_nargs (*expr_p) < 2)
2efcfa4e
AP
2306 {
2307 error ("too few arguments to function %<va_start%>");
c2255bc4 2308 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2efcfa4e
AP
2309 return GS_OK;
2310 }
b8698a0f 2311
5039610b 2312 if (fold_builtin_next_arg (*expr_p, true))
2efcfa4e 2313 {
c2255bc4 2314 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2efcfa4e
AP
2315 return GS_OK;
2316 }
3537a0cd
RG
2317 break;
2318 }
2319 case BUILT_IN_LINE:
2320 {
2321 expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2322 *expr_p = build_int_cst (TREE_TYPE (*expr_p), loc.line);
2323 return GS_OK;
2324 }
2325 case BUILT_IN_FILE:
2326 {
2327 expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2328 *expr_p = build_string_literal (strlen (loc.file) + 1, loc.file);
2329 return GS_OK;
2330 }
2331 case BUILT_IN_FUNCTION:
2332 {
2333 const char *function;
2334 function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
2335 *expr_p = build_string_literal (strlen (function) + 1, function);
2336 return GS_OK;
2337 }
2338 default:
2339 ;
2340 }
2341 if (fndecl && DECL_BUILT_IN (fndecl))
2342 {
2343 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2344 if (new_tree && new_tree != *expr_p)
2345 {
2346 /* There was a transformation of this call which computes the
2347 same value, but in a more efficient way. Return and try
2348 again. */
2349 *expr_p = new_tree;
2350 return GS_OK;
2efcfa4e 2351 }
6de9cd9a
DN
2352 }
2353
f20ca725
RG
2354 /* Remember the original function pointer type. */
2355 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2356
6de9cd9a
DN
2357 /* There is a sequence point before the call, so any side effects in
2358 the calling expression must occur before the actual call. Force
2359 gimplify_expr to use an internal post queue. */
5039610b 2360 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
0f59171d 2361 is_gimple_call_addr, fb_rvalue);
6de9cd9a 2362
5039610b
SL
2363 nargs = call_expr_nargs (*expr_p);
2364
e36711f3 2365 /* Get argument types for verification. */
726a989a 2366 fndecl = get_callee_fndecl (*expr_p);
e36711f3 2367 parms = NULL_TREE;
726a989a
RB
2368 if (fndecl)
2369 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
a96c6a62
RB
2370 else
2371 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
e36711f3 2372
726a989a 2373 if (fndecl && DECL_ARGUMENTS (fndecl))
f9487002 2374 p = DECL_ARGUMENTS (fndecl);
004e2fa7 2375 else if (parms)
f9487002 2376 p = parms;
6ef5231b 2377 else
498e51ca 2378 p = NULL_TREE;
f9487002
JJ
2379 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2380 ;
6ef5231b
JJ
2381
2382 /* If the last argument is __builtin_va_arg_pack () and it is not
2383 passed as a named argument, decrease the number of CALL_EXPR
2384 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2385 if (!p
2386 && i < nargs
2387 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2388 {
2389 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2390 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2391
2392 if (last_arg_fndecl
2393 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2394 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2395 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2396 {
2397 tree call = *expr_p;
2398
2399 --nargs;
db3927fb
AH
2400 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2401 CALL_EXPR_FN (call),
2402 nargs, CALL_EXPR_ARGP (call));
726a989a
RB
2403
2404 /* Copy all CALL_EXPR flags, location and block, except
6ef5231b
JJ
2405 CALL_EXPR_VA_ARG_PACK flag. */
2406 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2407 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2408 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2409 = CALL_EXPR_RETURN_SLOT_OPT (call);
2410 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
5e278028 2411 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
726a989a 2412
6ef5231b
JJ
2413 /* Set CALL_EXPR_VA_ARG_PACK. */
2414 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2415 }
2416 }
e36711f3
RG
2417
2418 /* Finally, gimplify the function arguments. */
726a989a 2419 if (nargs > 0)
6de9cd9a 2420 {
726a989a
RB
2421 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2422 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2423 PUSH_ARGS_REVERSED ? i-- : i++)
2424 {
2425 enum gimplify_status t;
6de9cd9a 2426
726a989a
RB
2427 /* Avoid gimplifying the second argument to va_start, which needs to
2428 be the plain PARM_DECL. */
2429 if ((i != 1) || !builtin_va_start_p)
2430 {
1282697f
AH
2431 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2432 EXPR_LOCATION (*expr_p));
6de9cd9a 2433
726a989a
RB
2434 if (t == GS_ERROR)
2435 ret = GS_ERROR;
2436 }
2437 }
6de9cd9a 2438 }
6de9cd9a 2439
33922890
RG
2440 /* Verify the function result. */
2441 if (want_value && fndecl
f20ca725 2442 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
33922890
RG
2443 {
2444 error_at (loc, "using result of function returning %<void%>");
2445 ret = GS_ERROR;
2446 }
2447
6de9cd9a 2448 /* Try this again in case gimplification exposed something. */
6f538523 2449 if (ret != GS_ERROR)
6de9cd9a 2450 {
db3927fb 2451 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
6f538523 2452
82d6e6fc 2453 if (new_tree && new_tree != *expr_p)
5039610b
SL
2454 {
2455 /* There was a transformation of this call which computes the
2456 same value, but in a more efficient way. Return and try
2457 again. */
82d6e6fc 2458 *expr_p = new_tree;
5039610b 2459 return GS_OK;
6de9cd9a
DN
2460 }
2461 }
726a989a
RB
2462 else
2463 {
df8fa700 2464 *expr_p = error_mark_node;
726a989a
RB
2465 return GS_ERROR;
2466 }
6de9cd9a
DN
2467
2468 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2469 decl. This allows us to eliminate redundant or useless
2470 calls to "const" functions. */
becfd6e5
KZ
2471 if (TREE_CODE (*expr_p) == CALL_EXPR)
2472 {
2473 int flags = call_expr_flags (*expr_p);
2474 if (flags & (ECF_CONST | ECF_PURE)
2475 /* An infinite loop is considered a side effect. */
2476 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2477 TREE_SIDE_EFFECTS (*expr_p) = 0;
2478 }
726a989a
RB
2479
2480 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2481 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2482 form and delegate the creation of a GIMPLE_CALL to
2483 gimplify_modify_expr. This is always possible because when
2484 WANT_VALUE is true, the caller wants the result of this call into
2485 a temporary, which means that we will emit an INIT_EXPR in
2486 internal_get_tmp_var which will then be handled by
2487 gimplify_modify_expr. */
2488 if (!want_value)
2489 {
2490 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2491 have to do is replicate it as a GIMPLE_CALL tuple. */
64e0f5ff 2492 gimple_stmt_iterator gsi;
726a989a 2493 call = gimple_build_call_from_tree (*expr_p);
f20ca725 2494 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
f6b64c35 2495 notice_special_calls (call);
726a989a 2496 gimplify_seq_add_stmt (pre_p, call);
64e0f5ff 2497 gsi = gsi_last (*pre_p);
88ac13da 2498 maybe_fold_stmt (&gsi);
726a989a
RB
2499 *expr_p = NULL_TREE;
2500 }
f20ca725
RG
2501 else
2502 /* Remember the original function type. */
2503 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2504 CALL_EXPR_FN (*expr_p));
726a989a 2505
6de9cd9a
DN
2506 return ret;
2507}
2508
2509/* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2510 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2511
2512 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2513 condition is true or false, respectively. If null, we should generate
2514 our own to skip over the evaluation of this specific expression.
2515
ca80e52b
EB
2516 LOCUS is the source location of the COND_EXPR.
2517
6de9cd9a
DN
2518 This function is the tree equivalent of do_jump.
2519
2520 shortcut_cond_r should only be called by shortcut_cond_expr. */
2521
2522static tree
ca80e52b
EB
2523shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2524 location_t locus)
6de9cd9a
DN
2525{
2526 tree local_label = NULL_TREE;
2527 tree t, expr = NULL;
2528
2529 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2530 retain the shortcut semantics. Just insert the gotos here;
2531 shortcut_cond_expr will append the real blocks later. */
2532 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2533 {
ca80e52b
EB
2534 location_t new_locus;
2535
6de9cd9a
DN
2536 /* Turn if (a && b) into
2537
2538 if (a); else goto no;
2539 if (b) goto yes; else goto no;
2540 (no:) */
2541
2542 if (false_label_p == NULL)
2543 false_label_p = &local_label;
2544
ca80e52b
EB
2545 /* Keep the original source location on the first 'if'. */
2546 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
6de9cd9a
DN
2547 append_to_statement_list (t, &expr);
2548
ca80e52b
EB
2549 /* Set the source location of the && on the second 'if'. */
2550 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2551 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2552 new_locus);
6de9cd9a
DN
2553 append_to_statement_list (t, &expr);
2554 }
2555 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2556 {
ca80e52b
EB
2557 location_t new_locus;
2558
6de9cd9a
DN
2559 /* Turn if (a || b) into
2560
2561 if (a) goto yes;
2562 if (b) goto yes; else goto no;
2563 (yes:) */
2564
2565 if (true_label_p == NULL)
2566 true_label_p = &local_label;
2567
ca80e52b
EB
2568 /* Keep the original source location on the first 'if'. */
2569 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
6de9cd9a
DN
2570 append_to_statement_list (t, &expr);
2571
ca80e52b
EB
2572 /* Set the source location of the || on the second 'if'. */
2573 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2574 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2575 new_locus);
6de9cd9a
DN
2576 append_to_statement_list (t, &expr);
2577 }
1537737f
JJ
2578 else if (TREE_CODE (pred) == COND_EXPR
2579 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2580 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
6de9cd9a 2581 {
ca80e52b
EB
2582 location_t new_locus;
2583
6de9cd9a
DN
2584 /* As long as we're messing with gotos, turn if (a ? b : c) into
2585 if (a)
2586 if (b) goto yes; else goto no;
2587 else
1537737f
JJ
2588 if (c) goto yes; else goto no;
2589
2590 Don't do this if one of the arms has void type, which can happen
2591 in C++ when the arm is throw. */
ca80e52b
EB
2592
2593 /* Keep the original source location on the first 'if'. Set the source
2594 location of the ? on the second 'if'. */
2595 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
b4257cfc
RG
2596 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2597 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
ca80e52b 2598 false_label_p, locus),
b4257cfc 2599 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
ca80e52b 2600 false_label_p, new_locus));
6de9cd9a
DN
2601 }
2602 else
2603 {
b4257cfc
RG
2604 expr = build3 (COND_EXPR, void_type_node, pred,
2605 build_and_jump (true_label_p),
2606 build_and_jump (false_label_p));
ca80e52b 2607 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
2608 }
2609
2610 if (local_label)
2611 {
2612 t = build1 (LABEL_EXPR, void_type_node, local_label);
2613 append_to_statement_list (t, &expr);
2614 }
2615
2616 return expr;
2617}
2618
726a989a
RB
2619/* Given a conditional expression EXPR with short-circuit boolean
2620 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
073a8998 2621 predicate apart into the equivalent sequence of conditionals. */
726a989a 2622
6de9cd9a
DN
2623static tree
2624shortcut_cond_expr (tree expr)
2625{
2626 tree pred = TREE_OPERAND (expr, 0);
2627 tree then_ = TREE_OPERAND (expr, 1);
2628 tree else_ = TREE_OPERAND (expr, 2);
2629 tree true_label, false_label, end_label, t;
2630 tree *true_label_p;
2631 tree *false_label_p;
089efaa4 2632 bool emit_end, emit_false, jump_over_else;
65355d53
RH
2633 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2634 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
6de9cd9a
DN
2635
2636 /* First do simple transformations. */
65355d53 2637 if (!else_se)
6de9cd9a 2638 {
ca80e52b
EB
2639 /* If there is no 'else', turn
2640 if (a && b) then c
2641 into
2642 if (a) if (b) then c. */
6de9cd9a
DN
2643 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2644 {
ca80e52b 2645 /* Keep the original source location on the first 'if'. */
8400e75e 2646 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
6de9cd9a 2647 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
ca80e52b
EB
2648 /* Set the source location of the && on the second 'if'. */
2649 if (EXPR_HAS_LOCATION (pred))
2650 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
6de9cd9a 2651 then_ = shortcut_cond_expr (expr);
4356a1bf 2652 then_se = then_ && TREE_SIDE_EFFECTS (then_);
6de9cd9a 2653 pred = TREE_OPERAND (pred, 0);
b4257cfc 2654 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
ca80e52b 2655 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
2656 }
2657 }
726a989a 2658
65355d53 2659 if (!then_se)
6de9cd9a
DN
2660 {
2661 /* If there is no 'then', turn
2662 if (a || b); else d
2663 into
2664 if (a); else if (b); else d. */
2665 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2666 {
ca80e52b 2667 /* Keep the original source location on the first 'if'. */
8400e75e 2668 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
6de9cd9a 2669 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
ca80e52b
EB
2670 /* Set the source location of the || on the second 'if'. */
2671 if (EXPR_HAS_LOCATION (pred))
2672 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
6de9cd9a 2673 else_ = shortcut_cond_expr (expr);
4356a1bf 2674 else_se = else_ && TREE_SIDE_EFFECTS (else_);
6de9cd9a 2675 pred = TREE_OPERAND (pred, 0);
b4257cfc 2676 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
ca80e52b 2677 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
2678 }
2679 }
2680
2681 /* If we're done, great. */
2682 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2683 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2684 return expr;
2685
2686 /* Otherwise we need to mess with gotos. Change
2687 if (a) c; else d;
2688 to
2689 if (a); else goto no;
2690 c; goto end;
2691 no: d; end:
2692 and recursively gimplify the condition. */
2693
2694 true_label = false_label = end_label = NULL_TREE;
2695
2696 /* If our arms just jump somewhere, hijack those labels so we don't
2697 generate jumps to jumps. */
2698
65355d53
RH
2699 if (then_
2700 && TREE_CODE (then_) == GOTO_EXPR
6de9cd9a
DN
2701 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2702 {
2703 true_label = GOTO_DESTINATION (then_);
65355d53
RH
2704 then_ = NULL;
2705 then_se = false;
6de9cd9a
DN
2706 }
2707
65355d53
RH
2708 if (else_
2709 && TREE_CODE (else_) == GOTO_EXPR
6de9cd9a
DN
2710 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2711 {
2712 false_label = GOTO_DESTINATION (else_);
65355d53
RH
2713 else_ = NULL;
2714 else_se = false;
6de9cd9a
DN
2715 }
2716
9cf737f8 2717 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
6de9cd9a
DN
2718 if (true_label)
2719 true_label_p = &true_label;
2720 else
2721 true_label_p = NULL;
2722
2723 /* The 'else' branch also needs a label if it contains interesting code. */
65355d53 2724 if (false_label || else_se)
6de9cd9a
DN
2725 false_label_p = &false_label;
2726 else
2727 false_label_p = NULL;
2728
2729 /* If there was nothing else in our arms, just forward the label(s). */
65355d53 2730 if (!then_se && !else_se)
ca80e52b 2731 return shortcut_cond_r (pred, true_label_p, false_label_p,
8400e75e 2732 EXPR_LOC_OR_LOC (expr, input_location));
6de9cd9a
DN
2733
2734 /* If our last subexpression already has a terminal label, reuse it. */
65355d53 2735 if (else_se)
ca80e52b 2736 t = expr_last (else_);
65355d53 2737 else if (then_se)
ca80e52b 2738 t = expr_last (then_);
65355d53 2739 else
ca80e52b
EB
2740 t = NULL;
2741 if (t && TREE_CODE (t) == LABEL_EXPR)
2742 end_label = LABEL_EXPR_LABEL (t);
6de9cd9a
DN
2743
2744 /* If we don't care about jumping to the 'else' branch, jump to the end
2745 if the condition is false. */
2746 if (!false_label_p)
2747 false_label_p = &end_label;
2748
2749 /* We only want to emit these labels if we aren't hijacking them. */
2750 emit_end = (end_label == NULL_TREE);
2751 emit_false = (false_label == NULL_TREE);
2752
089efaa4
ILT
2753 /* We only emit the jump over the else clause if we have to--if the
2754 then clause may fall through. Otherwise we can wind up with a
2755 useless jump and a useless label at the end of gimplified code,
2756 which will cause us to think that this conditional as a whole
2757 falls through even if it doesn't. If we then inline a function
2758 which ends with such a condition, that can cause us to issue an
2759 inappropriate warning about control reaching the end of a
2760 non-void function. */
2761 jump_over_else = block_may_fallthru (then_);
2762
ca80e52b 2763 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
8400e75e 2764 EXPR_LOC_OR_LOC (expr, input_location));
6de9cd9a
DN
2765
2766 expr = NULL;
2767 append_to_statement_list (pred, &expr);
2768
2769 append_to_statement_list (then_, &expr);
65355d53 2770 if (else_se)
6de9cd9a 2771 {
089efaa4
ILT
2772 if (jump_over_else)
2773 {
ca80e52b 2774 tree last = expr_last (expr);
089efaa4 2775 t = build_and_jump (&end_label);
ca80e52b
EB
2776 if (EXPR_HAS_LOCATION (last))
2777 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
089efaa4
ILT
2778 append_to_statement_list (t, &expr);
2779 }
6de9cd9a
DN
2780 if (emit_false)
2781 {
2782 t = build1 (LABEL_EXPR, void_type_node, false_label);
2783 append_to_statement_list (t, &expr);
2784 }
2785 append_to_statement_list (else_, &expr);
2786 }
2787 if (emit_end && end_label)
2788 {
2789 t = build1 (LABEL_EXPR, void_type_node, end_label);
2790 append_to_statement_list (t, &expr);
2791 }
2792
2793 return expr;
2794}
2795
2796/* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2797
50674e96 2798tree
6de9cd9a
DN
2799gimple_boolify (tree expr)
2800{
2801 tree type = TREE_TYPE (expr);
db3927fb 2802 location_t loc = EXPR_LOCATION (expr);
6de9cd9a 2803
554cf330
JJ
2804 if (TREE_CODE (expr) == NE_EXPR
2805 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2806 && integer_zerop (TREE_OPERAND (expr, 1)))
2807 {
2808 tree call = TREE_OPERAND (expr, 0);
2809 tree fn = get_callee_fndecl (call);
2810
d53c73e0
JJ
2811 /* For __builtin_expect ((long) (x), y) recurse into x as well
2812 if x is truth_value_p. */
554cf330
JJ
2813 if (fn
2814 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2815 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2816 && call_expr_nargs (call) == 2)
2817 {
2818 tree arg = CALL_EXPR_ARG (call, 0);
2819 if (arg)
2820 {
2821 if (TREE_CODE (arg) == NOP_EXPR
2822 && TREE_TYPE (arg) == TREE_TYPE (call))
2823 arg = TREE_OPERAND (arg, 0);
d53c73e0
JJ
2824 if (truth_value_p (TREE_CODE (arg)))
2825 {
2826 arg = gimple_boolify (arg);
2827 CALL_EXPR_ARG (call, 0)
2828 = fold_convert_loc (loc, TREE_TYPE (call), arg);
2829 }
554cf330
JJ
2830 }
2831 }
2832 }
2833
6de9cd9a
DN
2834 switch (TREE_CODE (expr))
2835 {
2836 case TRUTH_AND_EXPR:
2837 case TRUTH_OR_EXPR:
2838 case TRUTH_XOR_EXPR:
2839 case TRUTH_ANDIF_EXPR:
2840 case TRUTH_ORIF_EXPR:
2841 /* Also boolify the arguments of truth exprs. */
2842 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2843 /* FALLTHRU */
2844
2845 case TRUTH_NOT_EXPR:
2846 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
6de9cd9a 2847
6de9cd9a 2848 /* These expressions always produce boolean results. */
7f3ff782
KT
2849 if (TREE_CODE (type) != BOOLEAN_TYPE)
2850 TREE_TYPE (expr) = boolean_type_node;
6de9cd9a 2851 return expr;
d3147f64 2852
8170608b 2853 case ANNOTATE_EXPR:
718c4601 2854 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
8170608b 2855 {
718c4601
EB
2856 case annot_expr_ivdep_kind:
2857 case annot_expr_no_vector_kind:
2858 case annot_expr_vector_kind:
8170608b
TB
2859 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2860 if (TREE_CODE (type) != BOOLEAN_TYPE)
2861 TREE_TYPE (expr) = boolean_type_node;
2862 return expr;
718c4601
EB
2863 default:
2864 gcc_unreachable ();
8170608b 2865 }
8170608b 2866
6de9cd9a 2867 default:
7f3ff782
KT
2868 if (COMPARISON_CLASS_P (expr))
2869 {
2870 /* There expressions always prduce boolean results. */
2871 if (TREE_CODE (type) != BOOLEAN_TYPE)
2872 TREE_TYPE (expr) = boolean_type_node;
2873 return expr;
2874 }
6de9cd9a
DN
2875 /* Other expressions that get here must have boolean values, but
2876 might need to be converted to the appropriate mode. */
7f3ff782 2877 if (TREE_CODE (type) == BOOLEAN_TYPE)
1d15f620 2878 return expr;
db3927fb 2879 return fold_convert_loc (loc, boolean_type_node, expr);
6de9cd9a
DN
2880 }
2881}
2882
aea74440
JJ
2883/* Given a conditional expression *EXPR_P without side effects, gimplify
2884 its operands. New statements are inserted to PRE_P. */
2885
2886static enum gimplify_status
726a989a 2887gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
aea74440
JJ
2888{
2889 tree expr = *expr_p, cond;
2890 enum gimplify_status ret, tret;
2891 enum tree_code code;
2892
2893 cond = gimple_boolify (COND_EXPR_COND (expr));
2894
2895 /* We need to handle && and || specially, as their gimplification
2896 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
2897 code = TREE_CODE (cond);
2898 if (code == TRUTH_ANDIF_EXPR)
2899 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2900 else if (code == TRUTH_ORIF_EXPR)
2901 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
726a989a 2902 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
aea74440
JJ
2903 COND_EXPR_COND (*expr_p) = cond;
2904
2905 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2906 is_gimple_val, fb_rvalue);
2907 ret = MIN (ret, tret);
2908 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2909 is_gimple_val, fb_rvalue);
2910
2911 return MIN (ret, tret);
2912}
2913
ad19c4be 2914/* Return true if evaluating EXPR could trap.
aea74440
JJ
2915 EXPR is GENERIC, while tree_could_trap_p can be called
2916 only on GIMPLE. */
2917
2918static bool
2919generic_expr_could_trap_p (tree expr)
2920{
2921 unsigned i, n;
2922
2923 if (!expr || is_gimple_val (expr))
2924 return false;
2925
2926 if (!EXPR_P (expr) || tree_could_trap_p (expr))
2927 return true;
2928
2929 n = TREE_OPERAND_LENGTH (expr);
2930 for (i = 0; i < n; i++)
2931 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2932 return true;
2933
2934 return false;
2935}
2936
206048bd 2937/* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
6de9cd9a
DN
2938 into
2939
2940 if (p) if (p)
2941 t1 = a; a;
2942 else or else
2943 t1 = b; b;
2944 t1;
2945
2946 The second form is used when *EXPR_P is of type void.
2947
2948 PRE_P points to the list where side effects that must happen before
dae7ec87 2949 *EXPR_P should be stored. */
6de9cd9a
DN
2950
2951static enum gimplify_status
726a989a 2952gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
6de9cd9a
DN
2953{
2954 tree expr = *expr_p;
06ec59e6
EB
2955 tree type = TREE_TYPE (expr);
2956 location_t loc = EXPR_LOCATION (expr);
2957 tree tmp, arm1, arm2;
6de9cd9a 2958 enum gimplify_status ret;
726a989a
RB
2959 tree label_true, label_false, label_cont;
2960 bool have_then_clause_p, have_else_clause_p;
2961 gimple gimple_cond;
2962 enum tree_code pred_code;
2963 gimple_seq seq = NULL;
26d44ae2
RH
2964
2965 /* If this COND_EXPR has a value, copy the values into a temporary within
2966 the arms. */
06ec59e6 2967 if (!VOID_TYPE_P (type))
26d44ae2 2968 {
06ec59e6 2969 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
aff98faf
AO
2970 tree result;
2971
06ec59e6
EB
2972 /* If either an rvalue is ok or we do not require an lvalue, create the
2973 temporary. But we cannot do that if the type is addressable. */
2974 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
c3e203cf 2975 && !TREE_ADDRESSABLE (type))
aff98faf 2976 {
aea74440
JJ
2977 if (gimplify_ctxp->allow_rhs_cond_expr
2978 /* If either branch has side effects or could trap, it can't be
2979 evaluated unconditionally. */
06ec59e6
EB
2980 && !TREE_SIDE_EFFECTS (then_)
2981 && !generic_expr_could_trap_p (then_)
2982 && !TREE_SIDE_EFFECTS (else_)
2983 && !generic_expr_could_trap_p (else_))
aea74440
JJ
2984 return gimplify_pure_cond_expr (expr_p, pre_p);
2985
06ec59e6
EB
2986 tmp = create_tmp_var (type, "iftmp");
2987 result = tmp;
aff98faf 2988 }
06ec59e6
EB
2989
2990 /* Otherwise, only create and copy references to the values. */
26d44ae2
RH
2991 else
2992 {
06ec59e6 2993 type = build_pointer_type (type);
aff98faf 2994
06ec59e6
EB
2995 if (!VOID_TYPE_P (TREE_TYPE (then_)))
2996 then_ = build_fold_addr_expr_loc (loc, then_);
aff98faf 2997
06ec59e6
EB
2998 if (!VOID_TYPE_P (TREE_TYPE (else_)))
2999 else_ = build_fold_addr_expr_loc (loc, else_);
3000
3001 expr
3002 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
aea74440 3003
726a989a 3004 tmp = create_tmp_var (type, "iftmp");
70f34814 3005 result = build_simple_mem_ref_loc (loc, tmp);
26d44ae2
RH
3006 }
3007
06ec59e6
EB
3008 /* Build the new then clause, `tmp = then_;'. But don't build the
3009 assignment if the value is void; in C++ it can be if it's a throw. */
3010 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3011 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
26d44ae2 3012
06ec59e6
EB
3013 /* Similarly, build the new else clause, `tmp = else_;'. */
3014 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3015 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
26d44ae2
RH
3016
3017 TREE_TYPE (expr) = void_type_node;
3018 recalculate_side_effects (expr);
3019
d91ba7b0 3020 /* Move the COND_EXPR to the prequeue. */
726a989a 3021 gimplify_stmt (&expr, pre_p);
26d44ae2 3022
aff98faf 3023 *expr_p = result;
726a989a 3024 return GS_ALL_DONE;
26d44ae2
RH
3025 }
3026
f2f81d57
EB
3027 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3028 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3029 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3030 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3031
26d44ae2
RH
3032 /* Make sure the condition has BOOLEAN_TYPE. */
3033 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3034
3035 /* Break apart && and || conditions. */
3036 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3037 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3038 {
3039 expr = shortcut_cond_expr (expr);
3040
3041 if (expr != *expr_p)
3042 {
3043 *expr_p = expr;
3044
3045 /* We can't rely on gimplify_expr to re-gimplify the expanded
3046 form properly, as cleanups might cause the target labels to be
3047 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3048 set up a conditional context. */
3049 gimple_push_condition ();
726a989a 3050 gimplify_stmt (expr_p, &seq);
26d44ae2 3051 gimple_pop_condition (pre_p);
726a989a 3052 gimple_seq_add_seq (pre_p, seq);
26d44ae2
RH
3053
3054 return GS_ALL_DONE;
3055 }
3056 }
3057
3058 /* Now do the normal gimplification. */
26d44ae2 3059
726a989a
RB
3060 /* Gimplify condition. */
3061 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3062 fb_rvalue);
26d44ae2 3063 if (ret == GS_ERROR)
726a989a
RB
3064 return GS_ERROR;
3065 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3066
3067 gimple_push_condition ();
26d44ae2 3068
726a989a
RB
3069 have_then_clause_p = have_else_clause_p = false;
3070 if (TREE_OPERAND (expr, 1) != NULL
3071 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3072 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3073 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3074 == current_function_decl)
3075 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3076 have different locations, otherwise we end up with incorrect
3077 location information on the branches. */
3078 && (optimize
3079 || !EXPR_HAS_LOCATION (expr)
3080 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3081 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3082 {
3083 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3084 have_then_clause_p = true;
26d44ae2
RH
3085 }
3086 else
c2255bc4 3087 label_true = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
3088 if (TREE_OPERAND (expr, 2) != NULL
3089 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3090 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3091 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3092 == current_function_decl)
3093 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3094 have different locations, otherwise we end up with incorrect
3095 location information on the branches. */
3096 && (optimize
3097 || !EXPR_HAS_LOCATION (expr)
3098 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3099 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3100 {
3101 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3102 have_else_clause_p = true;
3103 }
3104 else
c2255bc4 3105 label_false = create_artificial_label (UNKNOWN_LOCATION);
26d44ae2 3106
726a989a
RB
3107 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3108 &arm2);
26d44ae2 3109
726a989a
RB
3110 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
3111 label_false);
26d44ae2 3112
726a989a
RB
3113 gimplify_seq_add_stmt (&seq, gimple_cond);
3114 label_cont = NULL_TREE;
3115 if (!have_then_clause_p)
3116 {
3117 /* For if (...) {} else { code; } put label_true after
3118 the else block. */
3119 if (TREE_OPERAND (expr, 1) == NULL_TREE
3120 && !have_else_clause_p
3121 && TREE_OPERAND (expr, 2) != NULL_TREE)
3122 label_cont = label_true;
3123 else
3124 {
3125 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3126 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3127 /* For if (...) { code; } else {} or
3128 if (...) { code; } else goto label; or
3129 if (...) { code; return; } else { ... }
3130 label_cont isn't needed. */
3131 if (!have_else_clause_p
3132 && TREE_OPERAND (expr, 2) != NULL_TREE
3133 && gimple_seq_may_fallthru (seq))
3134 {
3135 gimple g;
c2255bc4 3136 label_cont = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
3137
3138 g = gimple_build_goto (label_cont);
3139
3140 /* GIMPLE_COND's are very low level; they have embedded
3141 gotos. This particular embedded goto should not be marked
3142 with the location of the original COND_EXPR, as it would
3143 correspond to the COND_EXPR's condition, not the ELSE or the
3144 THEN arms. To avoid marking it with the wrong location, flag
3145 it as "no location". */
3146 gimple_set_do_not_emit_location (g);
3147
3148 gimplify_seq_add_stmt (&seq, g);
3149 }
3150 }
3151 }
3152 if (!have_else_clause_p)
3153 {
3154 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3155 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3156 }
3157 if (label_cont)
3158 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3159
3160 gimple_pop_condition (pre_p);
3161 gimple_seq_add_seq (pre_p, seq);
3162
3163 if (ret == GS_ERROR)
3164 ; /* Do nothing. */
3165 else if (have_then_clause_p || have_else_clause_p)
3166 ret = GS_ALL_DONE;
3167 else
3168 {
3169 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3170 expr = TREE_OPERAND (expr, 0);
3171 gimplify_stmt (&expr, pre_p);
3172 }
3173
3174 *expr_p = NULL;
3175 return ret;
3176}
3177
f76d6e6f
EB
3178/* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3179 to be marked addressable.
3180
3181 We cannot rely on such an expression being directly markable if a temporary
3182 has been created by the gimplification. In this case, we create another
3183 temporary and initialize it with a copy, which will become a store after we
3184 mark it addressable. This can happen if the front-end passed us something
3185 that it could not mark addressable yet, like a Fortran pass-by-reference
3186 parameter (int) floatvar. */
3187
3188static void
3189prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3190{
3191 while (handled_component_p (*expr_p))
3192 expr_p = &TREE_OPERAND (*expr_p, 0);
3193 if (is_gimple_reg (*expr_p))
947ca6a0
RB
3194 {
3195 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3196 DECL_GIMPLE_REG_P (var) = 0;
3197 *expr_p = var;
3198 }
f76d6e6f
EB
3199}
3200
726a989a
RB
3201/* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3202 a call to __builtin_memcpy. */
3203
3204static enum gimplify_status
3205gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3206 gimple_seq *seq_p)
26d44ae2 3207{
5039610b 3208 tree t, to, to_ptr, from, from_ptr;
726a989a 3209 gimple gs;
db3927fb 3210 location_t loc = EXPR_LOCATION (*expr_p);
26d44ae2 3211
726a989a
RB
3212 to = TREE_OPERAND (*expr_p, 0);
3213 from = TREE_OPERAND (*expr_p, 1);
26d44ae2 3214
f76d6e6f
EB
3215 /* Mark the RHS addressable. Beware that it may not be possible to do so
3216 directly if a temporary has been created by the gimplification. */
3217 prepare_gimple_addressable (&from, seq_p);
3218
628c189e 3219 mark_addressable (from);
db3927fb
AH
3220 from_ptr = build_fold_addr_expr_loc (loc, from);
3221 gimplify_arg (&from_ptr, seq_p, loc);
26d44ae2 3222
628c189e 3223 mark_addressable (to);
db3927fb
AH
3224 to_ptr = build_fold_addr_expr_loc (loc, to);
3225 gimplify_arg (&to_ptr, seq_p, loc);
726a989a 3226
e79983f4 3227 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
726a989a
RB
3228
3229 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
26d44ae2
RH
3230
3231 if (want_value)
3232 {
726a989a
RB
3233 /* tmp = memcpy() */
3234 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3235 gimple_call_set_lhs (gs, t);
3236 gimplify_seq_add_stmt (seq_p, gs);
3237
70f34814 3238 *expr_p = build_simple_mem_ref (t);
726a989a 3239 return GS_ALL_DONE;
26d44ae2
RH
3240 }
3241
726a989a
RB
3242 gimplify_seq_add_stmt (seq_p, gs);
3243 *expr_p = NULL;
3244 return GS_ALL_DONE;
26d44ae2
RH
3245}
3246
3247/* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3248 a call to __builtin_memset. In this case we know that the RHS is
3249 a CONSTRUCTOR with an empty element list. */
3250
3251static enum gimplify_status
726a989a
RB
3252gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3253 gimple_seq *seq_p)
26d44ae2 3254{
1a13360e 3255 tree t, from, to, to_ptr;
726a989a 3256 gimple gs;
db3927fb 3257 location_t loc = EXPR_LOCATION (*expr_p);
26d44ae2 3258
1a13360e
OH
3259 /* Assert our assumptions, to abort instead of producing wrong code
3260 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3261 not be immediately exposed. */
b8698a0f 3262 from = TREE_OPERAND (*expr_p, 1);
1a13360e
OH
3263 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3264 from = TREE_OPERAND (from, 0);
3265
3266 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
9771b263 3267 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
1a13360e
OH
3268
3269 /* Now proceed. */
726a989a 3270 to = TREE_OPERAND (*expr_p, 0);
26d44ae2 3271
db3927fb
AH
3272 to_ptr = build_fold_addr_expr_loc (loc, to);
3273 gimplify_arg (&to_ptr, seq_p, loc);
e79983f4 3274 t = builtin_decl_implicit (BUILT_IN_MEMSET);
726a989a
RB
3275
3276 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
26d44ae2
RH
3277
3278 if (want_value)
3279 {
726a989a
RB
3280 /* tmp = memset() */
3281 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3282 gimple_call_set_lhs (gs, t);
3283 gimplify_seq_add_stmt (seq_p, gs);
3284
3285 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3286 return GS_ALL_DONE;
26d44ae2
RH
3287 }
3288
726a989a
RB
3289 gimplify_seq_add_stmt (seq_p, gs);
3290 *expr_p = NULL;
3291 return GS_ALL_DONE;
26d44ae2
RH
3292}
3293
57d1dd87
RH
3294/* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3295 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
ad19c4be 3296 assignment. Return non-null if we detect a potential overlap. */
57d1dd87
RH
3297
3298struct gimplify_init_ctor_preeval_data
3299{
3300 /* The base decl of the lhs object. May be NULL, in which case we
3301 have to assume the lhs is indirect. */
3302 tree lhs_base_decl;
3303
3304 /* The alias set of the lhs object. */
4862826d 3305 alias_set_type lhs_alias_set;
57d1dd87
RH
3306};
3307
3308static tree
3309gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3310{
3311 struct gimplify_init_ctor_preeval_data *data
3312 = (struct gimplify_init_ctor_preeval_data *) xdata;
3313 tree t = *tp;
3314
3315 /* If we find the base object, obviously we have overlap. */
3316 if (data->lhs_base_decl == t)
3317 return t;
3318
3319 /* If the constructor component is indirect, determine if we have a
3320 potential overlap with the lhs. The only bits of information we
3321 have to go on at this point are addressability and alias sets. */
70f34814
RG
3322 if ((INDIRECT_REF_P (t)
3323 || TREE_CODE (t) == MEM_REF)
57d1dd87
RH
3324 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3325 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3326 return t;
3327
df10ee2a 3328 /* If the constructor component is a call, determine if it can hide a
70f34814
RG
3329 potential overlap with the lhs through an INDIRECT_REF like above.
3330 ??? Ugh - this is completely broken. In fact this whole analysis
3331 doesn't look conservative. */
df10ee2a
EB
3332 if (TREE_CODE (t) == CALL_EXPR)
3333 {
3334 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3335
3336 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3337 if (POINTER_TYPE_P (TREE_VALUE (type))
3338 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3339 && alias_sets_conflict_p (data->lhs_alias_set,
3340 get_alias_set
3341 (TREE_TYPE (TREE_VALUE (type)))))
3342 return t;
3343 }
3344
6615c446 3345 if (IS_TYPE_OR_DECL_P (t))
57d1dd87
RH
3346 *walk_subtrees = 0;
3347 return NULL;
3348}
3349
726a989a 3350/* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
57d1dd87
RH
3351 force values that overlap with the lhs (as described by *DATA)
3352 into temporaries. */
3353
3354static void
726a989a 3355gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
57d1dd87
RH
3356 struct gimplify_init_ctor_preeval_data *data)
3357{
3358 enum gimplify_status one;
3359
51eed280
PB
3360 /* If the value is constant, then there's nothing to pre-evaluate. */
3361 if (TREE_CONSTANT (*expr_p))
3362 {
3363 /* Ensure it does not have side effects, it might contain a reference to
3364 the object we're initializing. */
3365 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3366 return;
3367 }
57d1dd87
RH
3368
3369 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3370 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3371 return;
3372
3373 /* Recurse for nested constructors. */
3374 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3375 {
4038c495
GB
3376 unsigned HOST_WIDE_INT ix;
3377 constructor_elt *ce;
9771b263 3378 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4038c495 3379
9771b263 3380 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4038c495 3381 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
726a989a 3382
57d1dd87
RH
3383 return;
3384 }
3385
0461b801
EB
3386 /* If this is a variable sized type, we must remember the size. */
3387 maybe_with_size_expr (expr_p);
57d1dd87
RH
3388
3389 /* Gimplify the constructor element to something appropriate for the rhs
726a989a 3390 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
d3147f64 3391 the gimplifier will consider this a store to memory. Doing this
57d1dd87
RH
3392 gimplification now means that we won't have to deal with complicated
3393 language-specific trees, nor trees like SAVE_EXPR that can induce
b01d837f 3394 exponential search behavior. */
57d1dd87
RH
3395 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3396 if (one == GS_ERROR)
3397 {
3398 *expr_p = NULL;
3399 return;
3400 }
3401
3402 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3403 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3404 always be true for all scalars, since is_gimple_mem_rhs insists on a
3405 temporary variable for them. */
3406 if (DECL_P (*expr_p))
3407 return;
3408
3409 /* If this is of variable size, we have no choice but to assume it doesn't
3410 overlap since we can't make a temporary for it. */
4c923c28 3411 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
57d1dd87
RH
3412 return;
3413
3414 /* Otherwise, we must search for overlap ... */
3415 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3416 return;
3417
3418 /* ... and if found, force the value into a temporary. */
3419 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3420}
3421
6fa91b48
SB
3422/* A subroutine of gimplify_init_ctor_eval. Create a loop for
3423 a RANGE_EXPR in a CONSTRUCTOR for an array.
3424
3425 var = lower;
3426 loop_entry:
3427 object[var] = value;
3428 if (var == upper)
3429 goto loop_exit;
3430 var = var + 1;
3431 goto loop_entry;
3432 loop_exit:
3433
3434 We increment var _after_ the loop exit check because we might otherwise
3435 fail if upper == TYPE_MAX_VALUE (type for upper).
3436
3437 Note that we never have to deal with SAVE_EXPRs here, because this has
3438 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3439
9771b263 3440static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
726a989a 3441 gimple_seq *, bool);
6fa91b48
SB
3442
3443static void
3444gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3445 tree value, tree array_elt_type,
726a989a 3446 gimple_seq *pre_p, bool cleared)
6fa91b48 3447{
726a989a 3448 tree loop_entry_label, loop_exit_label, fall_thru_label;
b56b9fe3 3449 tree var, var_type, cref, tmp;
6fa91b48 3450
c2255bc4
AH
3451 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3452 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3453 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
6fa91b48
SB
3454
3455 /* Create and initialize the index variable. */
3456 var_type = TREE_TYPE (upper);
3457 var = create_tmp_var (var_type, NULL);
726a989a 3458 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
6fa91b48
SB
3459
3460 /* Add the loop entry label. */
726a989a 3461 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
6fa91b48
SB
3462
3463 /* Build the reference. */
3464 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3465 var, NULL_TREE, NULL_TREE);
3466
3467 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3468 the store. Otherwise just assign value to the reference. */
3469
3470 if (TREE_CODE (value) == CONSTRUCTOR)
3471 /* NB we might have to call ourself recursively through
3472 gimplify_init_ctor_eval if the value is a constructor. */
3473 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3474 pre_p, cleared);
3475 else
726a989a 3476 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
6fa91b48
SB
3477
3478 /* We exit the loop when the index var is equal to the upper bound. */
726a989a
RB
3479 gimplify_seq_add_stmt (pre_p,
3480 gimple_build_cond (EQ_EXPR, var, upper,
3481 loop_exit_label, fall_thru_label));
3482
3483 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
6fa91b48
SB
3484
3485 /* Otherwise, increment the index var... */
b56b9fe3
RS
3486 tmp = build2 (PLUS_EXPR, var_type, var,
3487 fold_convert (var_type, integer_one_node));
726a989a 3488 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
6fa91b48
SB
3489
3490 /* ...and jump back to the loop entry. */
726a989a 3491 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
6fa91b48
SB
3492
3493 /* Add the loop exit label. */
726a989a 3494 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
6fa91b48
SB
3495}
3496
292a398f 3497/* Return true if FDECL is accessing a field that is zero sized. */
b8698a0f 3498
292a398f 3499static bool
22ea9ec0 3500zero_sized_field_decl (const_tree fdecl)
292a398f 3501{
b8698a0f 3502 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
292a398f
DB
3503 && integer_zerop (DECL_SIZE (fdecl)))
3504 return true;
3505 return false;
3506}
3507
d06526b7 3508/* Return true if TYPE is zero sized. */
b8698a0f 3509
d06526b7 3510static bool
22ea9ec0 3511zero_sized_type (const_tree type)
d06526b7
AP
3512{
3513 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3514 && integer_zerop (TYPE_SIZE (type)))
3515 return true;
3516 return false;
3517}
3518
57d1dd87
RH
3519/* A subroutine of gimplify_init_constructor. Generate individual
3520 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4038c495 3521 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
57d1dd87
RH
3522 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3523 zeroed first. */
3524
3525static void
9771b263 3526gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
726a989a 3527 gimple_seq *pre_p, bool cleared)
57d1dd87
RH
3528{
3529 tree array_elt_type = NULL;
4038c495
GB
3530 unsigned HOST_WIDE_INT ix;
3531 tree purpose, value;
57d1dd87
RH
3532
3533 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3534 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3535
4038c495 3536 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
57d1dd87 3537 {
726a989a 3538 tree cref;
57d1dd87
RH
3539
3540 /* NULL values are created above for gimplification errors. */
3541 if (value == NULL)
3542 continue;
3543
3544 if (cleared && initializer_zerop (value))
3545 continue;
3546
6fa91b48
SB
3547 /* ??? Here's to hoping the front end fills in all of the indices,
3548 so we don't have to figure out what's missing ourselves. */
3549 gcc_assert (purpose);
3550
816fa80a
OH
3551 /* Skip zero-sized fields, unless value has side-effects. This can
3552 happen with calls to functions returning a zero-sized type, which
3553 we shouldn't discard. As a number of downstream passes don't
3554 expect sets of zero-sized fields, we rely on the gimplification of
3555 the MODIFY_EXPR we make below to drop the assignment statement. */
3556 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
292a398f
DB
3557 continue;
3558
6fa91b48
SB
3559 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3560 whole range. */
3561 if (TREE_CODE (purpose) == RANGE_EXPR)
57d1dd87 3562 {
6fa91b48
SB
3563 tree lower = TREE_OPERAND (purpose, 0);
3564 tree upper = TREE_OPERAND (purpose, 1);
3565
3566 /* If the lower bound is equal to upper, just treat it as if
3567 upper was the index. */
3568 if (simple_cst_equal (lower, upper))
3569 purpose = upper;
3570 else
3571 {
3572 gimplify_init_ctor_eval_range (object, lower, upper, value,
3573 array_elt_type, pre_p, cleared);
3574 continue;
3575 }
3576 }
57d1dd87 3577
6fa91b48
SB
3578 if (array_elt_type)
3579 {
1a1640db
RG
3580 /* Do not use bitsizetype for ARRAY_REF indices. */
3581 if (TYPE_DOMAIN (TREE_TYPE (object)))
ad19c4be
EB
3582 purpose
3583 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3584 purpose);
b4257cfc
RG
3585 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3586 purpose, NULL_TREE, NULL_TREE);
57d1dd87
RH
3587 }
3588 else
cf0efa6a
ILT
3589 {
3590 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
b4257cfc
RG
3591 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3592 unshare_expr (object), purpose, NULL_TREE);
cf0efa6a 3593 }
57d1dd87 3594
cf0efa6a
ILT
3595 if (TREE_CODE (value) == CONSTRUCTOR
3596 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
57d1dd87
RH
3597 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3598 pre_p, cleared);
3599 else
3600 {
726a989a 3601 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
57d1dd87 3602 gimplify_and_add (init, pre_p);
726a989a 3603 ggc_free (init);
57d1dd87
RH
3604 }
3605 }
3606}
3607
ad19c4be 3608/* Return the appropriate RHS predicate for this LHS. */
726a989a 3609
18f429e2 3610gimple_predicate
726a989a
RB
3611rhs_predicate_for (tree lhs)
3612{
ba4d8f9d
RG
3613 if (is_gimple_reg (lhs))
3614 return is_gimple_reg_rhs_or_call;
726a989a 3615 else
ba4d8f9d 3616 return is_gimple_mem_rhs_or_call;
726a989a
RB
3617}
3618
2ec5deb5
PB
3619/* Gimplify a C99 compound literal expression. This just means adding
3620 the DECL_EXPR before the current statement and using its anonymous
3621 decl instead. */
3622
3623static enum gimplify_status
a845a7f5 3624gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4c53d183 3625 bool (*gimple_test_f) (tree),
a845a7f5 3626 fallback_t fallback)
2ec5deb5
PB
3627{
3628 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3629 tree decl = DECL_EXPR_DECL (decl_s);
4c53d183 3630 tree init = DECL_INITIAL (decl);
2ec5deb5
PB
3631 /* Mark the decl as addressable if the compound literal
3632 expression is addressable now, otherwise it is marked too late
3633 after we gimplify the initialization expression. */
3634 if (TREE_ADDRESSABLE (*expr_p))
3635 TREE_ADDRESSABLE (decl) = 1;
4c53d183
MM
3636 /* Otherwise, if we don't need an lvalue and have a literal directly
3637 substitute it. Check if it matches the gimple predicate, as
3638 otherwise we'd generate a new temporary, and we can as well just
3639 use the decl we already have. */
3640 else if (!TREE_ADDRESSABLE (decl)
3641 && init
3642 && (fallback & fb_lvalue) == 0
3643 && gimple_test_f (init))
3644 {
3645 *expr_p = init;
3646 return GS_OK;
3647 }
2ec5deb5
PB
3648
3649 /* Preliminarily mark non-addressed complex variables as eligible
3650 for promotion to gimple registers. We'll transform their uses
3651 as we find them. */
3652 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3653 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3654 && !TREE_THIS_VOLATILE (decl)
3655 && !needs_to_live_in_memory (decl))
3656 DECL_GIMPLE_REG_P (decl) = 1;
3657
a845a7f5
ILT
3658 /* If the decl is not addressable, then it is being used in some
3659 expression or on the right hand side of a statement, and it can
3660 be put into a readonly data section. */
3661 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3662 TREE_READONLY (decl) = 1;
3663
2ec5deb5
PB
3664 /* This decl isn't mentioned in the enclosing block, so add it to the
3665 list of temps. FIXME it seems a bit of a kludge to say that
3666 anonymous artificial vars aren't pushed, but everything else is. */
3667 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3668 gimple_add_tmp_var (decl);
3669
3670 gimplify_and_add (decl_s, pre_p);
3671 *expr_p = decl;
3672 return GS_OK;
3673}
3674
3675/* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3676 return a new CONSTRUCTOR if something changed. */
3677
3678static tree
3679optimize_compound_literals_in_ctor (tree orig_ctor)
3680{
3681 tree ctor = orig_ctor;
9771b263
DN
3682 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3683 unsigned int idx, num = vec_safe_length (elts);
2ec5deb5
PB
3684
3685 for (idx = 0; idx < num; idx++)
3686 {
9771b263 3687 tree value = (*elts)[idx].value;
2ec5deb5
PB
3688 tree newval = value;
3689 if (TREE_CODE (value) == CONSTRUCTOR)
3690 newval = optimize_compound_literals_in_ctor (value);
3691 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3692 {
3693 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3694 tree decl = DECL_EXPR_DECL (decl_s);
3695 tree init = DECL_INITIAL (decl);
3696
3697 if (!TREE_ADDRESSABLE (value)
3698 && !TREE_ADDRESSABLE (decl)
6f8f67e9
JJ
3699 && init
3700 && TREE_CODE (init) == CONSTRUCTOR)
2ec5deb5
PB
3701 newval = optimize_compound_literals_in_ctor (init);
3702 }
3703 if (newval == value)
3704 continue;
3705
3706 if (ctor == orig_ctor)
3707 {
3708 ctor = copy_node (orig_ctor);
9771b263 3709 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
2ec5deb5
PB
3710 elts = CONSTRUCTOR_ELTS (ctor);
3711 }
9771b263 3712 (*elts)[idx].value = newval;
2ec5deb5
PB
3713 }
3714 return ctor;
3715}
3716
26d44ae2
RH
3717/* A subroutine of gimplify_modify_expr. Break out elements of a
3718 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3719
3720 Note that we still need to clear any elements that don't have explicit
3721 initializers, so if not all elements are initialized we keep the
ffed8a01
AH
3722 original MODIFY_EXPR, we just remove all of the constructor elements.
3723
3724 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3725 GS_ERROR if we would have to create a temporary when gimplifying
3726 this constructor. Otherwise, return GS_OK.
3727
3728 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
26d44ae2
RH
3729
3730static enum gimplify_status
726a989a
RB
3731gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3732 bool want_value, bool notify_temp_creation)
26d44ae2 3733{
f5a1f0d0 3734 tree object, ctor, type;
26d44ae2 3735 enum gimplify_status ret;
9771b263 3736 vec<constructor_elt, va_gc> *elts;
26d44ae2 3737
f5a1f0d0 3738 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
26d44ae2 3739
ffed8a01
AH
3740 if (!notify_temp_creation)
3741 {
726a989a 3742 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
ffed8a01
AH
3743 is_gimple_lvalue, fb_lvalue);
3744 if (ret == GS_ERROR)
3745 return ret;
3746 }
57d1dd87 3747
726a989a 3748 object = TREE_OPERAND (*expr_p, 0);
f5a1f0d0
PB
3749 ctor = TREE_OPERAND (*expr_p, 1) =
3750 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3751 type = TREE_TYPE (ctor);
3752 elts = CONSTRUCTOR_ELTS (ctor);
26d44ae2 3753 ret = GS_ALL_DONE;
726a989a 3754
26d44ae2
RH
3755 switch (TREE_CODE (type))
3756 {
3757 case RECORD_TYPE:
3758 case UNION_TYPE:
3759 case QUAL_UNION_TYPE:
3760 case ARRAY_TYPE:
3761 {
57d1dd87 3762 struct gimplify_init_ctor_preeval_data preeval_data;
953d0c90
RS
3763 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3764 bool cleared, complete_p, valid_const_initializer;
26d44ae2
RH
3765
3766 /* Aggregate types must lower constructors to initialization of
3767 individual elements. The exception is that a CONSTRUCTOR node
3768 with no elements indicates zero-initialization of the whole. */
9771b263 3769 if (vec_safe_is_empty (elts))
ffed8a01
AH
3770 {
3771 if (notify_temp_creation)
3772 return GS_OK;
3773 break;
3774 }
b8698a0f 3775
fe24d485
OH
3776 /* Fetch information about the constructor to direct later processing.
3777 We might want to make static versions of it in various cases, and
3778 can only do so if it known to be a valid constant initializer. */
3779 valid_const_initializer
3780 = categorize_ctor_elements (ctor, &num_nonzero_elements,
953d0c90 3781 &num_ctor_elements, &complete_p);
26d44ae2
RH
3782
3783 /* If a const aggregate variable is being initialized, then it
3784 should never be a lose to promote the variable to be static. */
fe24d485 3785 if (valid_const_initializer
6f642f98 3786 && num_nonzero_elements > 1
26d44ae2 3787 && TREE_READONLY (object)
d0ea0759
SE
3788 && TREE_CODE (object) == VAR_DECL
3789 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
26d44ae2 3790 {
ffed8a01
AH
3791 if (notify_temp_creation)
3792 return GS_ERROR;
26d44ae2
RH
3793 DECL_INITIAL (object) = ctor;
3794 TREE_STATIC (object) = 1;
3795 if (!DECL_NAME (object))
3796 DECL_NAME (object) = create_tmp_var_name ("C");
3797 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3798
3799 /* ??? C++ doesn't automatically append a .<number> to the
6bdf3519 3800 assembler name, and even when it does, it looks at FE private
26d44ae2
RH
3801 data structures to figure out what that number should be,
3802 which are not set for this variable. I suppose this is
3803 important for local statics for inline functions, which aren't
3804 "local" in the object file sense. So in order to get a unique
3805 TU-local symbol, we must invoke the lhd version now. */
3806 lhd_set_decl_assembler_name (object);
3807
3808 *expr_p = NULL_TREE;
3809 break;
3810 }
3811
cce70747
JC
3812 /* If there are "lots" of initialized elements, even discounting
3813 those that are not address constants (and thus *must* be
3814 computed at runtime), then partition the constructor into
3815 constant and non-constant parts. Block copy the constant
3816 parts in, then generate code for the non-constant parts. */
3817 /* TODO. There's code in cp/typeck.c to do this. */
3818
953d0c90
RS
3819 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
3820 /* store_constructor will ignore the clearing of variable-sized
3821 objects. Initializers for such objects must explicitly set
3822 every field that needs to be set. */
3823 cleared = false;
d368135f 3824 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
953d0c90 3825 /* If the constructor isn't complete, clear the whole object
d368135f 3826 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
953d0c90
RS
3827
3828 ??? This ought not to be needed. For any element not present
3829 in the initializer, we should simply set them to zero. Except
3830 we'd need to *find* the elements that are not present, and that
3831 requires trickery to avoid quadratic compile-time behavior in
3832 large cases or excessive memory use in small cases. */
73ed17ff 3833 cleared = true;
953d0c90 3834 else if (num_ctor_elements - num_nonzero_elements
e04ad03d 3835 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
953d0c90
RS
3836 && num_nonzero_elements < num_ctor_elements / 4)
3837 /* If there are "lots" of zeros, it's more efficient to clear
3838 the memory and then set the nonzero elements. */
cce70747 3839 cleared = true;
953d0c90
RS
3840 else
3841 cleared = false;
cce70747 3842
26d44ae2
RH
3843 /* If there are "lots" of initialized elements, and all of them
3844 are valid address constants, then the entire initializer can
cce70747
JC
3845 be dropped to memory, and then memcpy'd out. Don't do this
3846 for sparse arrays, though, as it's more efficient to follow
3847 the standard CONSTRUCTOR behavior of memset followed by
8afd015a
JM
3848 individual element initialization. Also don't do this for small
3849 all-zero initializers (which aren't big enough to merit
3850 clearing), and don't try to make bitwise copies of
089d1227 3851 TREE_ADDRESSABLE types. */
8afd015a
JM
3852 if (valid_const_initializer
3853 && !(cleared || num_nonzero_elements == 0)
089d1227 3854 && !TREE_ADDRESSABLE (type))
26d44ae2
RH
3855 {
3856 HOST_WIDE_INT size = int_size_in_bytes (type);
3857 unsigned int align;
3858
3859 /* ??? We can still get unbounded array types, at least
3860 from the C++ front end. This seems wrong, but attempt
3861 to work around it for now. */
3862 if (size < 0)
3863 {
3864 size = int_size_in_bytes (TREE_TYPE (object));
3865 if (size >= 0)
3866 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3867 }
3868
3869 /* Find the maximum alignment we can assume for the object. */
3870 /* ??? Make use of DECL_OFFSET_ALIGN. */
3871 if (DECL_P (object))
3872 align = DECL_ALIGN (object);
3873 else
3874 align = TYPE_ALIGN (type);
3875
f301837e
EB
3876 /* Do a block move either if the size is so small as to make
3877 each individual move a sub-unit move on average, or if it
3878 is so large as to make individual moves inefficient. */
329ad380
JJ
3879 if (size > 0
3880 && num_nonzero_elements > 1
f301837e
EB
3881 && (size < num_nonzero_elements
3882 || !can_move_by_pieces (size, align)))
26d44ae2 3883 {
ffed8a01
AH
3884 if (notify_temp_creation)
3885 return GS_ERROR;
3886
46314d3e
EB
3887 walk_tree (&ctor, force_labels_r, NULL, NULL);
3888 ctor = tree_output_constant_def (ctor);
3889 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
3890 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
3891 TREE_OPERAND (*expr_p, 1) = ctor;
57d1dd87
RH
3892
3893 /* This is no longer an assignment of a CONSTRUCTOR, but
3894 we still may have processing to do on the LHS. So
3895 pretend we didn't do anything here to let that happen. */
3896 return GS_UNHANDLED;
26d44ae2
RH
3897 }
3898 }
3899
558af7ca
EB
3900 /* If the target is volatile, we have non-zero elements and more than
3901 one field to assign, initialize the target from a temporary. */
61c7cbf8
RG
3902 if (TREE_THIS_VOLATILE (object)
3903 && !TREE_ADDRESSABLE (type)
558af7ca 3904 && num_nonzero_elements > 0
9771b263 3905 && vec_safe_length (elts) > 1)
61c7cbf8
RG
3906 {
3907 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
3908 TREE_OPERAND (*expr_p, 0) = temp;
3909 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
3910 *expr_p,
3911 build2 (MODIFY_EXPR, void_type_node,
3912 object, temp));
3913 return GS_OK;
3914 }
3915
ffed8a01
AH
3916 if (notify_temp_creation)
3917 return GS_OK;
3918
675c873b
EB
3919 /* If there are nonzero elements and if needed, pre-evaluate to capture
3920 elements overlapping with the lhs into temporaries. We must do this
3921 before clearing to fetch the values before they are zeroed-out. */
3922 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
85d89e76
OH
3923 {
3924 preeval_data.lhs_base_decl = get_base_address (object);
3925 if (!DECL_P (preeval_data.lhs_base_decl))
3926 preeval_data.lhs_base_decl = NULL;
3927 preeval_data.lhs_alias_set = get_alias_set (object);
3928
726a989a 3929 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
85d89e76
OH
3930 pre_p, post_p, &preeval_data);
3931 }
3932
26d44ae2
RH
3933 if (cleared)
3934 {
3935 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3936 Note that we still have to gimplify, in order to handle the
57d1dd87 3937 case of variable sized types. Avoid shared tree structures. */
4038c495 3938 CONSTRUCTOR_ELTS (ctor) = NULL;
726a989a 3939 TREE_SIDE_EFFECTS (ctor) = 0;
57d1dd87 3940 object = unshare_expr (object);
726a989a 3941 gimplify_stmt (expr_p, pre_p);
26d44ae2
RH
3942 }
3943
6fa91b48
SB
3944 /* If we have not block cleared the object, or if there are nonzero
3945 elements in the constructor, add assignments to the individual
3946 scalar fields of the object. */
3947 if (!cleared || num_nonzero_elements > 0)
85d89e76 3948 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
26d44ae2
RH
3949
3950 *expr_p = NULL_TREE;
3951 }
3952 break;
3953
3954 case COMPLEX_TYPE:
3955 {
3956 tree r, i;
3957
ffed8a01
AH
3958 if (notify_temp_creation)
3959 return GS_OK;
3960
26d44ae2 3961 /* Extract the real and imaginary parts out of the ctor. */
9771b263
DN
3962 gcc_assert (elts->length () == 2);
3963 r = (*elts)[0].value;
3964 i = (*elts)[1].value;
26d44ae2
RH
3965 if (r == NULL || i == NULL)
3966 {
e8160c9a 3967 tree zero = build_zero_cst (TREE_TYPE (type));
26d44ae2
RH
3968 if (r == NULL)
3969 r = zero;
3970 if (i == NULL)
3971 i = zero;
3972 }
3973
3974 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3975 represent creation of a complex value. */
3976 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3977 {
3978 ctor = build_complex (type, r, i);
3979 TREE_OPERAND (*expr_p, 1) = ctor;
3980 }
3981 else
3982 {
b4257cfc 3983 ctor = build2 (COMPLEX_EXPR, type, r, i);
26d44ae2 3984 TREE_OPERAND (*expr_p, 1) = ctor;
726a989a
RB
3985 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
3986 pre_p,
3987 post_p,
17ad5b5e
RH
3988 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3989 fb_rvalue);
26d44ae2
RH
3990 }
3991 }
3992 break;
506e2710 3993
26d44ae2 3994 case VECTOR_TYPE:
4038c495
GB
3995 {
3996 unsigned HOST_WIDE_INT ix;
3997 constructor_elt *ce;
e89be13b 3998
ffed8a01
AH
3999 if (notify_temp_creation)
4000 return GS_OK;
4001
4038c495
GB
4002 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4003 if (TREE_CONSTANT (ctor))
4004 {
4005 bool constant_p = true;
4006 tree value;
4007
4008 /* Even when ctor is constant, it might contain non-*_CST
9f1da821
RS
4009 elements, such as addresses or trapping values like
4010 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4011 in VECTOR_CST nodes. */
4038c495
GB
4012 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4013 if (!CONSTANT_CLASS_P (value))
4014 {
4015 constant_p = false;
4016 break;
4017 }
e89be13b 4018
4038c495
GB
4019 if (constant_p)
4020 {
4021 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4022 break;
4023 }
84816907 4024
9f1da821 4025 /* Don't reduce an initializer constant even if we can't
84816907
JM
4026 make a VECTOR_CST. It won't do anything for us, and it'll
4027 prevent us from representing it as a single constant. */
9f1da821
RS
4028 if (initializer_constant_valid_p (ctor, type))
4029 break;
4030
4031 TREE_CONSTANT (ctor) = 0;
4038c495 4032 }
e89be13b 4033
4038c495
GB
4034 /* Vector types use CONSTRUCTOR all the way through gimple
4035 compilation as a general initializer. */
9771b263 4036 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4038c495
GB
4037 {
4038 enum gimplify_status tret;
726a989a
RB
4039 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4040 fb_rvalue);
4038c495
GB
4041 if (tret == GS_ERROR)
4042 ret = GS_ERROR;
4043 }
726a989a
RB
4044 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4045 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4038c495 4046 }
26d44ae2 4047 break;
6de9cd9a 4048
26d44ae2
RH
4049 default:
4050 /* So how did we get a CONSTRUCTOR for a scalar type? */
282899df 4051 gcc_unreachable ();
26d44ae2 4052 }
6de9cd9a 4053
26d44ae2
RH
4054 if (ret == GS_ERROR)
4055 return GS_ERROR;
4056 else if (want_value)
4057 {
26d44ae2
RH
4058 *expr_p = object;
4059 return GS_OK;
6de9cd9a 4060 }
26d44ae2 4061 else
726a989a
RB
4062 {
4063 /* If we have gimplified both sides of the initializer but have
4064 not emitted an assignment, do so now. */
4065 if (*expr_p)
4066 {
4067 tree lhs = TREE_OPERAND (*expr_p, 0);
4068 tree rhs = TREE_OPERAND (*expr_p, 1);
4069 gimple init = gimple_build_assign (lhs, rhs);
4070 gimplify_seq_add_stmt (pre_p, init);
4071 *expr_p = NULL;
4072 }
4073
4074 return GS_ALL_DONE;
4075 }
26d44ae2 4076}
6de9cd9a 4077
de4af523
JJ
4078/* Given a pointer value OP0, return a simplified version of an
4079 indirection through OP0, or NULL_TREE if no simplification is
4080 possible. This may only be applied to a rhs of an expression.
4081 Note that the resulting type may be different from the type pointed
4082 to in the sense that it is still compatible from the langhooks
4083 point of view. */
4084
4085static tree
4086gimple_fold_indirect_ref_rhs (tree t)
4087{
4088 return gimple_fold_indirect_ref (t);
4089}
4090
4caa08da
AH
4091/* Subroutine of gimplify_modify_expr to do simplifications of
4092 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4093 something changes. */
6de9cd9a 4094
26d44ae2 4095static enum gimplify_status
726a989a
RB
4096gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4097 gimple_seq *pre_p, gimple_seq *post_p,
4098 bool want_value)
26d44ae2 4099{
6d729f28
JM
4100 enum gimplify_status ret = GS_UNHANDLED;
4101 bool changed;
6de9cd9a 4102
6d729f28
JM
4103 do
4104 {
4105 changed = false;
4106 switch (TREE_CODE (*from_p))
4107 {
4108 case VAR_DECL:
4109 /* If we're assigning from a read-only variable initialized with
4110 a constructor, do the direct assignment from the constructor,
4111 but only if neither source nor target are volatile since this
4112 latter assignment might end up being done on a per-field basis. */
4113 if (DECL_INITIAL (*from_p)
4114 && TREE_READONLY (*from_p)
4115 && !TREE_THIS_VOLATILE (*from_p)
4116 && !TREE_THIS_VOLATILE (*to_p)
4117 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4118 {
4119 tree old_from = *from_p;
4120 enum gimplify_status subret;
4121
4122 /* Move the constructor into the RHS. */
4123 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4124
4125 /* Let's see if gimplify_init_constructor will need to put
4126 it in memory. */
4127 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4128 false, true);
4129 if (subret == GS_ERROR)
4130 {
4131 /* If so, revert the change. */
4132 *from_p = old_from;
4133 }
4134 else
4135 {
4136 ret = GS_OK;
4137 changed = true;
4138 }
4139 }
4140 break;
4141 case INDIRECT_REF:
4caa08da 4142 {
6d729f28 4143 /* If we have code like
ffed8a01 4144
6d729f28 4145 *(const A*)(A*)&x
ffed8a01 4146
6d729f28
JM
4147 where the type of "x" is a (possibly cv-qualified variant
4148 of "A"), treat the entire expression as identical to "x".
4149 This kind of code arises in C++ when an object is bound
4150 to a const reference, and if "x" is a TARGET_EXPR we want
4151 to take advantage of the optimization below. */
06baaba3 4152 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
6d729f28
JM
4153 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4154 if (t)
ffed8a01 4155 {
06baaba3
RG
4156 if (TREE_THIS_VOLATILE (t) != volatile_p)
4157 {
4158 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4159 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4160 build_fold_addr_expr (t));
4161 if (REFERENCE_CLASS_P (t))
4162 TREE_THIS_VOLATILE (t) = volatile_p;
4163 }
6d729f28
JM
4164 *from_p = t;
4165 ret = GS_OK;
4166 changed = true;
ffed8a01 4167 }
6d729f28
JM
4168 break;
4169 }
4170
4171 case TARGET_EXPR:
4172 {
4173 /* If we are initializing something from a TARGET_EXPR, strip the
4174 TARGET_EXPR and initialize it directly, if possible. This can't
4175 be done if the initializer is void, since that implies that the
4176 temporary is set in some non-trivial way.
4177
4178 ??? What about code that pulls out the temp and uses it
4179 elsewhere? I think that such code never uses the TARGET_EXPR as
4180 an initializer. If I'm wrong, we'll die because the temp won't
4181 have any RTL. In that case, I guess we'll need to replace
4182 references somehow. */
4183 tree init = TARGET_EXPR_INITIAL (*from_p);
4184
4185 if (init
4186 && !VOID_TYPE_P (TREE_TYPE (init)))
ffed8a01 4187 {
6d729f28 4188 *from_p = init;
ffed8a01 4189 ret = GS_OK;
6d729f28 4190 changed = true;
ffed8a01 4191 }
4caa08da 4192 }
6d729f28 4193 break;
f98625f6 4194
6d729f28
JM
4195 case COMPOUND_EXPR:
4196 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4197 caught. */
4198 gimplify_compound_expr (from_p, pre_p, true);
4199 ret = GS_OK;
4200 changed = true;
4201 break;
f98625f6 4202
6d729f28 4203 case CONSTRUCTOR:
ce3beba3
JM
4204 /* If we already made some changes, let the front end have a
4205 crack at this before we break it down. */
4206 if (ret != GS_UNHANDLED)
4207 break;
6d729f28
JM
4208 /* If we're initializing from a CONSTRUCTOR, break this into
4209 individual MODIFY_EXPRs. */
4210 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4211 false);
4212
4213 case COND_EXPR:
4214 /* If we're assigning to a non-register type, push the assignment
4215 down into the branches. This is mandatory for ADDRESSABLE types,
4216 since we cannot generate temporaries for such, but it saves a
4217 copy in other cases as well. */
4218 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
f98625f6 4219 {
6d729f28
JM
4220 /* This code should mirror the code in gimplify_cond_expr. */
4221 enum tree_code code = TREE_CODE (*expr_p);
4222 tree cond = *from_p;
4223 tree result = *to_p;
4224
4225 ret = gimplify_expr (&result, pre_p, post_p,
4226 is_gimple_lvalue, fb_lvalue);
4227 if (ret != GS_ERROR)
4228 ret = GS_OK;
4229
4230 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4231 TREE_OPERAND (cond, 1)
4232 = build2 (code, void_type_node, result,
4233 TREE_OPERAND (cond, 1));
4234 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4235 TREE_OPERAND (cond, 2)
4236 = build2 (code, void_type_node, unshare_expr (result),
4237 TREE_OPERAND (cond, 2));
4238
4239 TREE_TYPE (cond) = void_type_node;
4240 recalculate_side_effects (cond);
4241
4242 if (want_value)
4243 {
4244 gimplify_and_add (cond, pre_p);
4245 *expr_p = unshare_expr (result);
4246 }
4247 else
4248 *expr_p = cond;
4249 return ret;
f98625f6 4250 }
f98625f6 4251 break;
f98625f6 4252
6d729f28
JM
4253 case CALL_EXPR:
4254 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4255 return slot so that we don't generate a temporary. */
4256 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4257 && aggregate_value_p (*from_p, *from_p))
26d44ae2 4258 {
6d729f28
JM
4259 bool use_target;
4260
4261 if (!(rhs_predicate_for (*to_p))(*from_p))
4262 /* If we need a temporary, *to_p isn't accurate. */
4263 use_target = false;
ad19c4be 4264 /* It's OK to use the return slot directly unless it's an NRV. */
6d729f28
JM
4265 else if (TREE_CODE (*to_p) == RESULT_DECL
4266 && DECL_NAME (*to_p) == NULL_TREE
4267 && needs_to_live_in_memory (*to_p))
6d729f28
JM
4268 use_target = true;
4269 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4270 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4271 /* Don't force regs into memory. */
4272 use_target = false;
4273 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4274 /* It's OK to use the target directly if it's being
4275 initialized. */
4276 use_target = true;
aabb90e5
RG
4277 else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE))
4278 /* Always use the target and thus RSO for variable-sized types.
4279 GIMPLE cannot deal with a variable-sized assignment
4280 embedded in a call statement. */
4281 use_target = true;
345ae177
AH
4282 else if (TREE_CODE (*to_p) != SSA_NAME
4283 && (!is_gimple_variable (*to_p)
4284 || needs_to_live_in_memory (*to_p)))
6d729f28
JM
4285 /* Don't use the original target if it's already addressable;
4286 if its address escapes, and the called function uses the
4287 NRV optimization, a conforming program could see *to_p
4288 change before the called function returns; see c++/19317.
4289 When optimizing, the return_slot pass marks more functions
4290 as safe after we have escape info. */
4291 use_target = false;
4292 else
4293 use_target = true;
4294
4295 if (use_target)
4296 {
4297 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4298 mark_addressable (*to_p);
4299 }
26d44ae2 4300 }
6d729f28 4301 break;
6de9cd9a 4302
6d729f28
JM
4303 case WITH_SIZE_EXPR:
4304 /* Likewise for calls that return an aggregate of non-constant size,
4305 since we would not be able to generate a temporary at all. */
4306 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4307 {
4308 *from_p = TREE_OPERAND (*from_p, 0);
ebad5233
JM
4309 /* We don't change ret in this case because the
4310 WITH_SIZE_EXPR might have been added in
4311 gimplify_modify_expr, so returning GS_OK would lead to an
4312 infinite loop. */
6d729f28
JM
4313 changed = true;
4314 }
4315 break;
6de9cd9a 4316
6d729f28
JM
4317 /* If we're initializing from a container, push the initialization
4318 inside it. */
4319 case CLEANUP_POINT_EXPR:
4320 case BIND_EXPR:
4321 case STATEMENT_LIST:
26d44ae2 4322 {
6d729f28
JM
4323 tree wrap = *from_p;
4324 tree t;
dae7ec87 4325
6d729f28
JM
4326 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4327 fb_lvalue);
dae7ec87
JM
4328 if (ret != GS_ERROR)
4329 ret = GS_OK;
4330
6d729f28
JM
4331 t = voidify_wrapper_expr (wrap, *expr_p);
4332 gcc_assert (t == *expr_p);
dae7ec87
JM
4333
4334 if (want_value)
4335 {
6d729f28
JM
4336 gimplify_and_add (wrap, pre_p);
4337 *expr_p = unshare_expr (*to_p);
dae7ec87
JM
4338 }
4339 else
6d729f28
JM
4340 *expr_p = wrap;
4341 return GS_OK;
26d44ae2 4342 }
6de9cd9a 4343
6d729f28 4344 case COMPOUND_LITERAL_EXPR:
fa47911c 4345 {
6d729f28
JM
4346 tree complit = TREE_OPERAND (*expr_p, 1);
4347 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4348 tree decl = DECL_EXPR_DECL (decl_s);
4349 tree init = DECL_INITIAL (decl);
4350
4351 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4352 into struct T x = { 0, 1, 2 } if the address of the
4353 compound literal has never been taken. */
4354 if (!TREE_ADDRESSABLE (complit)
4355 && !TREE_ADDRESSABLE (decl)
4356 && init)
fa47911c 4357 {
6d729f28
JM
4358 *expr_p = copy_node (*expr_p);
4359 TREE_OPERAND (*expr_p, 1) = init;
4360 return GS_OK;
fa47911c
JM
4361 }
4362 }
4363
6d729f28
JM
4364 default:
4365 break;
2ec5deb5 4366 }
6d729f28
JM
4367 }
4368 while (changed);
6de9cd9a 4369
6de9cd9a
DN
4370 return ret;
4371}
4372
216820a4
RG
4373
4374/* Return true if T looks like a valid GIMPLE statement. */
4375
4376static bool
4377is_gimple_stmt (tree t)
4378{
4379 const enum tree_code code = TREE_CODE (t);
4380
4381 switch (code)
4382 {
4383 case NOP_EXPR:
4384 /* The only valid NOP_EXPR is the empty statement. */
4385 return IS_EMPTY_STMT (t);
4386
4387 case BIND_EXPR:
4388 case COND_EXPR:
4389 /* These are only valid if they're void. */
4390 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4391
4392 case SWITCH_EXPR:
4393 case GOTO_EXPR:
4394 case RETURN_EXPR:
4395 case LABEL_EXPR:
4396 case CASE_LABEL_EXPR:
4397 case TRY_CATCH_EXPR:
4398 case TRY_FINALLY_EXPR:
4399 case EH_FILTER_EXPR:
4400 case CATCH_EXPR:
4401 case ASM_EXPR:
4402 case STATEMENT_LIST:
4403 case OMP_PARALLEL:
4404 case OMP_FOR:
74bf76ed 4405 case OMP_SIMD:
c02065fc 4406 case CILK_SIMD:
acf0174b 4407 case OMP_DISTRIBUTE:
216820a4
RG
4408 case OMP_SECTIONS:
4409 case OMP_SECTION:
4410 case OMP_SINGLE:
4411 case OMP_MASTER:
acf0174b 4412 case OMP_TASKGROUP:
216820a4
RG
4413 case OMP_ORDERED:
4414 case OMP_CRITICAL:
4415 case OMP_TASK:
4416 /* These are always void. */
4417 return true;
4418
4419 case CALL_EXPR:
4420 case MODIFY_EXPR:
4421 case PREDICT_EXPR:
4422 /* These are valid regardless of their type. */
4423 return true;
4424
4425 default:
4426 return false;
4427 }
4428}
4429
4430
d9c2d296
AP
4431/* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4432 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
7b7e6ecd
EB
4433 DECL_GIMPLE_REG_P set.
4434
4435 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4436 other, unmodified part of the complex object just before the total store.
4437 As a consequence, if the object is still uninitialized, an undefined value
4438 will be loaded into a register, which may result in a spurious exception
4439 if the register is floating-point and the value happens to be a signaling
4440 NaN for example. Then the fully-fledged complex operations lowering pass
4441 followed by a DCE pass are necessary in order to fix things up. */
d9c2d296
AP
4442
4443static enum gimplify_status
726a989a
RB
4444gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4445 bool want_value)
d9c2d296
AP
4446{
4447 enum tree_code code, ocode;
4448 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4449
726a989a
RB
4450 lhs = TREE_OPERAND (*expr_p, 0);
4451 rhs = TREE_OPERAND (*expr_p, 1);
d9c2d296
AP
4452 code = TREE_CODE (lhs);
4453 lhs = TREE_OPERAND (lhs, 0);
4454
4455 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4456 other = build1 (ocode, TREE_TYPE (rhs), lhs);
8d2b0410 4457 TREE_NO_WARNING (other) = 1;
d9c2d296
AP
4458 other = get_formal_tmp_var (other, pre_p);
4459
4460 realpart = code == REALPART_EXPR ? rhs : other;
4461 imagpart = code == REALPART_EXPR ? other : rhs;
4462
4463 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4464 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4465 else
4466 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4467
726a989a
RB
4468 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4469 *expr_p = (want_value) ? rhs : NULL_TREE;
d9c2d296
AP
4470
4471 return GS_ALL_DONE;
4472}
4473
206048bd 4474/* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6de9cd9a
DN
4475
4476 modify_expr
4477 : varname '=' rhs
4478 | '*' ID '=' rhs
4479
4480 PRE_P points to the list where side effects that must happen before
4481 *EXPR_P should be stored.
4482
4483 POST_P points to the list where side effects that must happen after
4484 *EXPR_P should be stored.
4485
4486 WANT_VALUE is nonzero iff we want to use the value of this expression
4487 in another expression. */
4488
4489static enum gimplify_status
726a989a
RB
4490gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4491 bool want_value)
6de9cd9a 4492{
726a989a
RB
4493 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4494 tree *to_p = &TREE_OPERAND (*expr_p, 0);
44de5aeb 4495 enum gimplify_status ret = GS_UNHANDLED;
726a989a 4496 gimple assign;
db3927fb 4497 location_t loc = EXPR_LOCATION (*expr_p);
6da8be89 4498 gimple_stmt_iterator gsi;
6de9cd9a 4499
282899df
NS
4500 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4501 || TREE_CODE (*expr_p) == INIT_EXPR);
6de9cd9a 4502
d0ad58f9
JM
4503 /* Trying to simplify a clobber using normal logic doesn't work,
4504 so handle it here. */
4505 if (TREE_CLOBBER_P (*from_p))
4506 {
5d751b0c
JJ
4507 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4508 if (ret == GS_ERROR)
4509 return ret;
4510 gcc_assert (!want_value
4511 && (TREE_CODE (*to_p) == VAR_DECL
4512 || TREE_CODE (*to_p) == MEM_REF));
d0ad58f9
JM
4513 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4514 *expr_p = NULL;
4515 return GS_ALL_DONE;
4516 }
4517
1b24a790
RG
4518 /* Insert pointer conversions required by the middle-end that are not
4519 required by the frontend. This fixes middle-end type checking for
4520 for example gcc.dg/redecl-6.c. */
daad0278 4521 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
1b24a790
RG
4522 {
4523 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4524 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
db3927fb 4525 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
1b24a790
RG
4526 }
4527
83d7e8f0
JM
4528 /* See if any simplifications can be done based on what the RHS is. */
4529 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4530 want_value);
4531 if (ret != GS_UNHANDLED)
4532 return ret;
4533
4534 /* For zero sized types only gimplify the left hand side and right hand
4535 side as statements and throw away the assignment. Do this after
4536 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4537 types properly. */
753b34d7 4538 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
412f8986 4539 {
726a989a
RB
4540 gimplify_stmt (from_p, pre_p);
4541 gimplify_stmt (to_p, pre_p);
412f8986
AP
4542 *expr_p = NULL_TREE;
4543 return GS_ALL_DONE;
4544 }
6de9cd9a 4545
d25cee4d
RH
4546 /* If the value being copied is of variable width, compute the length
4547 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4548 before gimplifying any of the operands so that we can resolve any
4549 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4550 the size of the expression to be copied, not of the destination, so
726a989a 4551 that is what we must do here. */
d25cee4d 4552 maybe_with_size_expr (from_p);
6de9cd9a 4553
44de5aeb
RK
4554 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4555 if (ret == GS_ERROR)
4556 return ret;
6de9cd9a 4557
726a989a
RB
4558 /* As a special case, we have to temporarily allow for assignments
4559 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4560 a toplevel statement, when gimplifying the GENERIC expression
4561 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4562 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4563
4564 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4565 prevent gimplify_expr from trying to create a new temporary for
4566 foo's LHS, we tell it that it should only gimplify until it
4567 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4568 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4569 and all we need to do here is set 'a' to be its LHS. */
4570 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4571 fb_rvalue);
6de9cd9a
DN
4572 if (ret == GS_ERROR)
4573 return ret;
4574
44de5aeb
RK
4575 /* Now see if the above changed *from_p to something we handle specially. */
4576 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4577 want_value);
6de9cd9a
DN
4578 if (ret != GS_UNHANDLED)
4579 return ret;
4580
d25cee4d
RH
4581 /* If we've got a variable sized assignment between two lvalues (i.e. does
4582 not involve a call), then we can make things a bit more straightforward
4583 by converting the assignment to memcpy or memset. */
4584 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4585 {
4586 tree from = TREE_OPERAND (*from_p, 0);
4587 tree size = TREE_OPERAND (*from_p, 1);
4588
4589 if (TREE_CODE (from) == CONSTRUCTOR)
726a989a
RB
4590 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4591
e847cc68 4592 if (is_gimple_addressable (from))
d25cee4d
RH
4593 {
4594 *from_p = from;
726a989a
RB
4595 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4596 pre_p);
d25cee4d
RH
4597 }
4598 }
4599
e41d82f5
RH
4600 /* Transform partial stores to non-addressable complex variables into
4601 total stores. This allows us to use real instead of virtual operands
4602 for these variables, which improves optimization. */
4603 if ((TREE_CODE (*to_p) == REALPART_EXPR
4604 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4605 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4606 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4607
f173837a
EB
4608 /* Try to alleviate the effects of the gimplification creating artificial
4609 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4610 if (!gimplify_ctxp->into_ssa
f2896bc9 4611 && TREE_CODE (*from_p) == VAR_DECL
726a989a
RB
4612 && DECL_IGNORED_P (*from_p)
4613 && DECL_P (*to_p)
4614 && !DECL_IGNORED_P (*to_p))
f173837a
EB
4615 {
4616 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4617 DECL_NAME (*from_p)
4618 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
839b422f 4619 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
f173837a 4620 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
726a989a
RB
4621 }
4622
8f0fe813
NS
4623 if (want_value && TREE_THIS_VOLATILE (*to_p))
4624 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4625
726a989a
RB
4626 if (TREE_CODE (*from_p) == CALL_EXPR)
4627 {
4628 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4629 instead of a GIMPLE_ASSIGN. */
f20ca725
RG
4630 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4631 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4632 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
ed9c79e1
JJ
4633 tree fndecl = get_callee_fndecl (*from_p);
4634 if (fndecl
4635 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4636 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
4637 && call_expr_nargs (*from_p) == 3)
4638 assign = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
4639 CALL_EXPR_ARG (*from_p, 0),
4640 CALL_EXPR_ARG (*from_p, 1),
4641 CALL_EXPR_ARG (*from_p, 2));
4642 else
4643 {
4644 assign = gimple_build_call_from_tree (*from_p);
4645 gimple_call_set_fntype (assign, TREE_TYPE (fnptrtype));
4646 }
f6b64c35 4647 notice_special_calls (assign);
5de8da9b
AO
4648 if (!gimple_call_noreturn_p (assign))
4649 gimple_call_set_lhs (assign, *to_p);
f173837a 4650 }
726a989a 4651 else
c2255bc4
AH
4652 {
4653 assign = gimple_build_assign (*to_p, *from_p);
4654 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4655 }
f173837a 4656
726a989a 4657 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
6de9cd9a 4658 {
2ad728d2
RG
4659 /* We should have got an SSA name from the start. */
4660 gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
726a989a 4661 }
07beea0d 4662
6da8be89
MM
4663 gimplify_seq_add_stmt (pre_p, assign);
4664 gsi = gsi_last (*pre_p);
88ac13da 4665 maybe_fold_stmt (&gsi);
6da8be89 4666
726a989a
RB
4667 if (want_value)
4668 {
8f0fe813 4669 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
17ad5b5e 4670 return GS_OK;
6de9cd9a 4671 }
726a989a
RB
4672 else
4673 *expr_p = NULL;
6de9cd9a 4674
17ad5b5e 4675 return GS_ALL_DONE;
6de9cd9a
DN
4676}
4677
ad19c4be
EB
4678/* Gimplify a comparison between two variable-sized objects. Do this
4679 with a call to BUILT_IN_MEMCMP. */
44de5aeb
RK
4680
4681static enum gimplify_status
4682gimplify_variable_sized_compare (tree *expr_p)
4683{
692ad9aa 4684 location_t loc = EXPR_LOCATION (*expr_p);
44de5aeb
RK
4685 tree op0 = TREE_OPERAND (*expr_p, 0);
4686 tree op1 = TREE_OPERAND (*expr_p, 1);
692ad9aa 4687 tree t, arg, dest, src, expr;
5039610b
SL
4688
4689 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4690 arg = unshare_expr (arg);
4691 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
db3927fb
AH
4692 src = build_fold_addr_expr_loc (loc, op1);
4693 dest = build_fold_addr_expr_loc (loc, op0);
e79983f4 4694 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
db3927fb 4695 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
692ad9aa
EB
4696
4697 expr
b4257cfc 4698 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
692ad9aa
EB
4699 SET_EXPR_LOCATION (expr, loc);
4700 *expr_p = expr;
44de5aeb
RK
4701
4702 return GS_OK;
4703}
4704
ad19c4be
EB
4705/* Gimplify a comparison between two aggregate objects of integral scalar
4706 mode as a comparison between the bitwise equivalent scalar values. */
61c25908
OH
4707
4708static enum gimplify_status
4709gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4710{
db3927fb 4711 location_t loc = EXPR_LOCATION (*expr_p);
61c25908
OH
4712 tree op0 = TREE_OPERAND (*expr_p, 0);
4713 tree op1 = TREE_OPERAND (*expr_p, 1);
4714
4715 tree type = TREE_TYPE (op0);
4716 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4717
db3927fb
AH
4718 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4719 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
61c25908
OH
4720
4721 *expr_p
db3927fb 4722 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
61c25908
OH
4723
4724 return GS_OK;
4725}
4726
ad19c4be
EB
4727/* Gimplify an expression sequence. This function gimplifies each
4728 expression and rewrites the original expression with the last
6de9cd9a
DN
4729 expression of the sequence in GIMPLE form.
4730
4731 PRE_P points to the list where the side effects for all the
4732 expressions in the sequence will be emitted.
d3147f64 4733
6de9cd9a 4734 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6de9cd9a
DN
4735
4736static enum gimplify_status
726a989a 4737gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6de9cd9a
DN
4738{
4739 tree t = *expr_p;
4740
4741 do
4742 {
4743 tree *sub_p = &TREE_OPERAND (t, 0);
4744
4745 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4746 gimplify_compound_expr (sub_p, pre_p, false);
4747 else
726a989a 4748 gimplify_stmt (sub_p, pre_p);
6de9cd9a
DN
4749
4750 t = TREE_OPERAND (t, 1);
4751 }
4752 while (TREE_CODE (t) == COMPOUND_EXPR);
4753
4754 *expr_p = t;
4755 if (want_value)
4756 return GS_OK;
4757 else
4758 {
726a989a 4759 gimplify_stmt (expr_p, pre_p);
6de9cd9a
DN
4760 return GS_ALL_DONE;
4761 }
4762}
4763
726a989a
RB
4764/* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4765 gimplify. After gimplification, EXPR_P will point to a new temporary
4766 that holds the original value of the SAVE_EXPR node.
6de9cd9a 4767
726a989a 4768 PRE_P points to the list where side effects that must happen before
ad19c4be 4769 *EXPR_P should be stored. */
6de9cd9a
DN
4770
4771static enum gimplify_status
726a989a 4772gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
4773{
4774 enum gimplify_status ret = GS_ALL_DONE;
4775 tree val;
4776
282899df 4777 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6de9cd9a
DN
4778 val = TREE_OPERAND (*expr_p, 0);
4779
7f5e6307
RH
4780 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4781 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
17ad5b5e 4782 {
7f5e6307
RH
4783 /* The operand may be a void-valued expression such as SAVE_EXPRs
4784 generated by the Java frontend for class initialization. It is
4785 being executed only for its side-effects. */
4786 if (TREE_TYPE (val) == void_type_node)
4787 {
4788 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4789 is_gimple_stmt, fb_none);
7f5e6307
RH
4790 val = NULL;
4791 }
4792 else
4793 val = get_initialized_tmp_var (val, pre_p, post_p);
4794
4795 TREE_OPERAND (*expr_p, 0) = val;
4796 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
17ad5b5e 4797 }
6de9cd9a 4798
7f5e6307
RH
4799 *expr_p = val;
4800
6de9cd9a
DN
4801 return ret;
4802}
4803
ad19c4be 4804/* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6de9cd9a
DN
4805
4806 unary_expr
4807 : ...
4808 | '&' varname
4809 ...
4810
4811 PRE_P points to the list where side effects that must happen before
4812 *EXPR_P should be stored.
4813
4814 POST_P points to the list where side effects that must happen after
4815 *EXPR_P should be stored. */
4816
4817static enum gimplify_status
726a989a 4818gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
4819{
4820 tree expr = *expr_p;
4821 tree op0 = TREE_OPERAND (expr, 0);
4822 enum gimplify_status ret;
db3927fb 4823 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
4824
4825 switch (TREE_CODE (op0))
4826 {
4827 case INDIRECT_REF:
67f23620 4828 do_indirect_ref:
6de9cd9a
DN
4829 /* Check if we are dealing with an expression of the form '&*ptr'.
4830 While the front end folds away '&*ptr' into 'ptr', these
4831 expressions may be generated internally by the compiler (e.g.,
4832 builtins like __builtin_va_end). */
67f23620
RH
4833 /* Caution: the silent array decomposition semantics we allow for
4834 ADDR_EXPR means we can't always discard the pair. */
c87ac7e8
AO
4835 /* Gimplification of the ADDR_EXPR operand may drop
4836 cv-qualification conversions, so make sure we add them if
4837 needed. */
67f23620
RH
4838 {
4839 tree op00 = TREE_OPERAND (op0, 0);
4840 tree t_expr = TREE_TYPE (expr);
4841 tree t_op00 = TREE_TYPE (op00);
4842
f4088621 4843 if (!useless_type_conversion_p (t_expr, t_op00))
db3927fb 4844 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
67f23620
RH
4845 *expr_p = op00;
4846 ret = GS_OK;
4847 }
6de9cd9a
DN
4848 break;
4849
44de5aeb
RK
4850 case VIEW_CONVERT_EXPR:
4851 /* Take the address of our operand and then convert it to the type of
af72267c
RK
4852 this ADDR_EXPR.
4853
4854 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4855 all clear. The impact of this transformation is even less clear. */
91804752
EB
4856
4857 /* If the operand is a useless conversion, look through it. Doing so
4858 guarantees that the ADDR_EXPR and its operand will remain of the
4859 same type. */
4860 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
317c0092 4861 op0 = TREE_OPERAND (op0, 0);
91804752 4862
db3927fb
AH
4863 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
4864 build_fold_addr_expr_loc (loc,
4865 TREE_OPERAND (op0, 0)));
44de5aeb 4866 ret = GS_OK;
6de9cd9a
DN
4867 break;
4868
4869 default:
4870 /* We use fb_either here because the C frontend sometimes takes
5201931e
JM
4871 the address of a call that returns a struct; see
4872 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
4873 the implied temporary explicit. */
936d04b6 4874
f76d6e6f 4875 /* Make the operand addressable. */
6de9cd9a 4876 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
e847cc68 4877 is_gimple_addressable, fb_either);
8b17cc05
RG
4878 if (ret == GS_ERROR)
4879 break;
67f23620 4880
f76d6e6f
EB
4881 /* Then mark it. Beware that it may not be possible to do so directly
4882 if a temporary has been created by the gimplification. */
4883 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
9e51aaf5 4884
8b17cc05 4885 op0 = TREE_OPERAND (expr, 0);
6de9cd9a 4886
8b17cc05
RG
4887 /* For various reasons, the gimplification of the expression
4888 may have made a new INDIRECT_REF. */
4889 if (TREE_CODE (op0) == INDIRECT_REF)
4890 goto do_indirect_ref;
4891
6b8b9e42
RG
4892 mark_addressable (TREE_OPERAND (expr, 0));
4893
4894 /* The FEs may end up building ADDR_EXPRs early on a decl with
4895 an incomplete type. Re-build ADDR_EXPRs in canonical form
4896 here. */
4897 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
4898 *expr_p = build_fold_addr_expr (op0);
4899
8b17cc05 4900 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6b8b9e42
RG
4901 recompute_tree_invariant_for_addr_expr (*expr_p);
4902
4903 /* If we re-built the ADDR_EXPR add a conversion to the original type
4904 if required. */
4905 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
4906 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
8b17cc05 4907
6de9cd9a
DN
4908 break;
4909 }
4910
6de9cd9a
DN
4911 return ret;
4912}
4913
4914/* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
4915 value; output operands should be a gimple lvalue. */
4916
4917static enum gimplify_status
726a989a 4918gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a 4919{
726a989a
RB
4920 tree expr;
4921 int noutputs;
4922 const char **oconstraints;
6de9cd9a
DN
4923 int i;
4924 tree link;
4925 const char *constraint;
4926 bool allows_mem, allows_reg, is_inout;
4927 enum gimplify_status ret, tret;
726a989a 4928 gimple stmt;
9771b263
DN
4929 vec<tree, va_gc> *inputs;
4930 vec<tree, va_gc> *outputs;
4931 vec<tree, va_gc> *clobbers;
4932 vec<tree, va_gc> *labels;
726a989a 4933 tree link_next;
b8698a0f 4934
726a989a
RB
4935 expr = *expr_p;
4936 noutputs = list_length (ASM_OUTPUTS (expr));
4937 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
4938
9771b263
DN
4939 inputs = NULL;
4940 outputs = NULL;
4941 clobbers = NULL;
4942 labels = NULL;
6de9cd9a 4943
6de9cd9a 4944 ret = GS_ALL_DONE;
726a989a
RB
4945 link_next = NULL_TREE;
4946 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6de9cd9a 4947 {
2c68ba8e 4948 bool ok;
726a989a
RB
4949 size_t constraint_len;
4950
4951 link_next = TREE_CHAIN (link);
4952
4953 oconstraints[i]
4954 = constraint
6de9cd9a 4955 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6db081f1
AP
4956 constraint_len = strlen (constraint);
4957 if (constraint_len == 0)
4958 continue;
6de9cd9a 4959
2c68ba8e
LB
4960 ok = parse_output_constraint (&constraint, i, 0, 0,
4961 &allows_mem, &allows_reg, &is_inout);
4962 if (!ok)
4963 {
4964 ret = GS_ERROR;
4965 is_inout = false;
4966 }
6de9cd9a
DN
4967
4968 if (!allows_reg && allows_mem)
936d04b6 4969 mark_addressable (TREE_VALUE (link));
6de9cd9a
DN
4970
4971 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4972 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
4973 fb_lvalue | fb_mayfail);
4974 if (tret == GS_ERROR)
4975 {
4976 error ("invalid lvalue in asm output %d", i);
4977 ret = tret;
4978 }
4979
9771b263 4980 vec_safe_push (outputs, link);
726a989a
RB
4981 TREE_CHAIN (link) = NULL_TREE;
4982
6de9cd9a
DN
4983 if (is_inout)
4984 {
4985 /* An input/output operand. To give the optimizers more
4986 flexibility, split it into separate input and output
4987 operands. */
4988 tree input;
4989 char buf[10];
6de9cd9a
DN
4990
4991 /* Turn the in/out constraint into an output constraint. */
4992 char *p = xstrdup (constraint);
4993 p[0] = '=';
4994 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6de9cd9a
DN
4995
4996 /* And add a matching input constraint. */
4997 if (allows_reg)
4998 {
4999 sprintf (buf, "%d", i);
372d72d9
JJ
5000
5001 /* If there are multiple alternatives in the constraint,
5002 handle each of them individually. Those that allow register
5003 will be replaced with operand number, the others will stay
5004 unchanged. */
5005 if (strchr (p, ',') != NULL)
5006 {
5007 size_t len = 0, buflen = strlen (buf);
5008 char *beg, *end, *str, *dst;
5009
5010 for (beg = p + 1;;)
5011 {
5012 end = strchr (beg, ',');
5013 if (end == NULL)
5014 end = strchr (beg, '\0');
5015 if ((size_t) (end - beg) < buflen)
5016 len += buflen + 1;
5017 else
5018 len += end - beg + 1;
5019 if (*end)
5020 beg = end + 1;
5021 else
5022 break;
5023 }
5024
858904db 5025 str = (char *) alloca (len);
372d72d9
JJ
5026 for (beg = p + 1, dst = str;;)
5027 {
5028 const char *tem;
5029 bool mem_p, reg_p, inout_p;
5030
5031 end = strchr (beg, ',');
5032 if (end)
5033 *end = '\0';
5034 beg[-1] = '=';
5035 tem = beg - 1;
5036 parse_output_constraint (&tem, i, 0, 0,
5037 &mem_p, &reg_p, &inout_p);
5038 if (dst != str)
5039 *dst++ = ',';
5040 if (reg_p)
5041 {
5042 memcpy (dst, buf, buflen);
5043 dst += buflen;
5044 }
5045 else
5046 {
5047 if (end)
5048 len = end - beg;
5049 else
5050 len = strlen (beg);
5051 memcpy (dst, beg, len);
5052 dst += len;
5053 }
5054 if (end)
5055 beg = end + 1;
5056 else
5057 break;
5058 }
5059 *dst = '\0';
5060 input = build_string (dst - str, str);
5061 }
5062 else
5063 input = build_string (strlen (buf), buf);
6de9cd9a
DN
5064 }
5065 else
5066 input = build_string (constraint_len - 1, constraint + 1);
372d72d9
JJ
5067
5068 free (p);
5069
6de9cd9a
DN
5070 input = build_tree_list (build_tree_list (NULL_TREE, input),
5071 unshare_expr (TREE_VALUE (link)));
5072 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5073 }
5074 }
5075
726a989a
RB
5076 link_next = NULL_TREE;
5077 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6de9cd9a 5078 {
726a989a
RB
5079 link_next = TREE_CHAIN (link);
5080 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6de9cd9a
DN
5081 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5082 oconstraints, &allows_mem, &allows_reg);
5083
f497c16c
JJ
5084 /* If we can't make copies, we can only accept memory. */
5085 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5086 {
5087 if (allows_mem)
5088 allows_reg = 0;
5089 else
5090 {
5091 error ("impossible constraint in %<asm%>");
5092 error ("non-memory input %d must stay in memory", i);
5093 return GS_ERROR;
5094 }
5095 }
5096
6de9cd9a
DN
5097 /* If the operand is a memory input, it should be an lvalue. */
5098 if (!allows_reg && allows_mem)
5099 {
502c5084
JJ
5100 tree inputv = TREE_VALUE (link);
5101 STRIP_NOPS (inputv);
5102 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5103 || TREE_CODE (inputv) == PREINCREMENT_EXPR
5104 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5105 || TREE_CODE (inputv) == POSTINCREMENT_EXPR)
5106 TREE_VALUE (link) = error_mark_node;
6de9cd9a
DN
5107 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5108 is_gimple_lvalue, fb_lvalue | fb_mayfail);
936d04b6 5109 mark_addressable (TREE_VALUE (link));
6de9cd9a
DN
5110 if (tret == GS_ERROR)
5111 {
6a3799eb
AH
5112 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5113 input_location = EXPR_LOCATION (TREE_VALUE (link));
6de9cd9a
DN
5114 error ("memory input %d is not directly addressable", i);
5115 ret = tret;
5116 }
5117 }
5118 else
5119 {
5120 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
e670d9e4 5121 is_gimple_asm_val, fb_rvalue);
6de9cd9a
DN
5122 if (tret == GS_ERROR)
5123 ret = tret;
5124 }
726a989a
RB
5125
5126 TREE_CHAIN (link) = NULL_TREE;
9771b263 5127 vec_safe_push (inputs, link);
6de9cd9a 5128 }
b8698a0f 5129
ca081cc8
EB
5130 link_next = NULL_TREE;
5131 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
5132 {
5133 link_next = TREE_CHAIN (link);
5134 TREE_CHAIN (link) = NULL_TREE;
5135 vec_safe_push (clobbers, link);
5136 }
1c384bf1 5137
ca081cc8
EB
5138 link_next = NULL_TREE;
5139 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
5140 {
5141 link_next = TREE_CHAIN (link);
5142 TREE_CHAIN (link) = NULL_TREE;
5143 vec_safe_push (labels, link);
5144 }
726a989a 5145
a406865a
RG
5146 /* Do not add ASMs with errors to the gimple IL stream. */
5147 if (ret != GS_ERROR)
5148 {
5149 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
1c384bf1 5150 inputs, outputs, clobbers, labels);
726a989a 5151
a406865a
RG
5152 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
5153 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5154
5155 gimplify_seq_add_stmt (pre_p, stmt);
5156 }
6de9cd9a
DN
5157
5158 return ret;
5159}
5160
5161/* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
726a989a 5162 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6de9cd9a
DN
5163 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5164 return to this function.
5165
5166 FIXME should we complexify the prequeue handling instead? Or use flags
5167 for all the cleanups and let the optimizer tighten them up? The current
5168 code seems pretty fragile; it will break on a cleanup within any
5169 non-conditional nesting. But any such nesting would be broken, anyway;
5170 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5171 and continues out of it. We can do that at the RTL level, though, so
5172 having an optimizer to tighten up try/finally regions would be a Good
5173 Thing. */
5174
5175static enum gimplify_status
726a989a 5176gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a 5177{
726a989a
RB
5178 gimple_stmt_iterator iter;
5179 gimple_seq body_sequence = NULL;
6de9cd9a 5180
325c3691 5181 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6de9cd9a
DN
5182
5183 /* We only care about the number of conditions between the innermost
df77f454
JM
5184 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5185 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6de9cd9a 5186 int old_conds = gimplify_ctxp->conditions;
726a989a 5187 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
32be32af 5188 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6de9cd9a 5189 gimplify_ctxp->conditions = 0;
726a989a 5190 gimplify_ctxp->conditional_cleanups = NULL;
32be32af 5191 gimplify_ctxp->in_cleanup_point_expr = true;
6de9cd9a 5192
726a989a 5193 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6de9cd9a
DN
5194
5195 gimplify_ctxp->conditions = old_conds;
df77f454 5196 gimplify_ctxp->conditional_cleanups = old_cleanups;
32be32af 5197 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6de9cd9a 5198
726a989a 5199 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6de9cd9a 5200 {
726a989a 5201 gimple wce = gsi_stmt (iter);
6de9cd9a 5202
726a989a 5203 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6de9cd9a 5204 {
726a989a 5205 if (gsi_one_before_end_p (iter))
6de9cd9a 5206 {
726a989a
RB
5207 /* Note that gsi_insert_seq_before and gsi_remove do not
5208 scan operands, unlike some other sequence mutators. */
ae0595b0
RG
5209 if (!gimple_wce_cleanup_eh_only (wce))
5210 gsi_insert_seq_before_without_update (&iter,
5211 gimple_wce_cleanup (wce),
5212 GSI_SAME_STMT);
726a989a 5213 gsi_remove (&iter, true);
6de9cd9a
DN
5214 break;
5215 }
5216 else
5217 {
daa6e488 5218 gimple_statement_try *gtry;
726a989a
RB
5219 gimple_seq seq;
5220 enum gimple_try_flags kind;
40aac948 5221
726a989a
RB
5222 if (gimple_wce_cleanup_eh_only (wce))
5223 kind = GIMPLE_TRY_CATCH;
40aac948 5224 else
726a989a
RB
5225 kind = GIMPLE_TRY_FINALLY;
5226 seq = gsi_split_seq_after (iter);
5227
82d6e6fc 5228 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
726a989a
RB
5229 /* Do not use gsi_replace here, as it may scan operands.
5230 We want to do a simple structural modification only. */
355a7673 5231 gsi_set_stmt (&iter, gtry);
daa6e488 5232 iter = gsi_start (gtry->eval);
6de9cd9a
DN
5233 }
5234 }
5235 else
726a989a 5236 gsi_next (&iter);
6de9cd9a
DN
5237 }
5238
726a989a 5239 gimplify_seq_add_seq (pre_p, body_sequence);
6de9cd9a
DN
5240 if (temp)
5241 {
5242 *expr_p = temp;
6de9cd9a
DN
5243 return GS_OK;
5244 }
5245 else
5246 {
726a989a 5247 *expr_p = NULL;
6de9cd9a
DN
5248 return GS_ALL_DONE;
5249 }
5250}
5251
5252/* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
726a989a
RB
5253 is the cleanup action required. EH_ONLY is true if the cleanup should
5254 only be executed if an exception is thrown, not on normal exit. */
6de9cd9a
DN
5255
5256static void
726a989a 5257gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
6de9cd9a 5258{
726a989a
RB
5259 gimple wce;
5260 gimple_seq cleanup_stmts = NULL;
6de9cd9a
DN
5261
5262 /* Errors can result in improperly nested cleanups. Which results in
726a989a 5263 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
1da2ed5f 5264 if (seen_error ())
6de9cd9a
DN
5265 return;
5266
5267 if (gimple_conditional_context ())
5268 {
5269 /* If we're in a conditional context, this is more complex. We only
5270 want to run the cleanup if we actually ran the initialization that
5271 necessitates it, but we want to run it after the end of the
5272 conditional context. So we wrap the try/finally around the
5273 condition and use a flag to determine whether or not to actually
5274 run the destructor. Thus
5275
5276 test ? f(A()) : 0
5277
5278 becomes (approximately)
5279
5280 flag = 0;
5281 try {
5282 if (test) { A::A(temp); flag = 1; val = f(temp); }
5283 else { val = 0; }
5284 } finally {
5285 if (flag) A::~A(temp);
5286 }
5287 val
5288 */
6de9cd9a 5289 tree flag = create_tmp_var (boolean_type_node, "cleanup");
726a989a
RB
5290 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5291 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5292
b4257cfc 5293 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
726a989a
RB
5294 gimplify_stmt (&cleanup, &cleanup_stmts);
5295 wce = gimple_build_wce (cleanup_stmts);
5296
5297 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5298 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5299 gimplify_seq_add_stmt (pre_p, ftrue);
6de9cd9a
DN
5300
5301 /* Because of this manipulation, and the EH edges that jump
5302 threading cannot redirect, the temporary (VAR) will appear
5303 to be used uninitialized. Don't warn. */
5304 TREE_NO_WARNING (var) = 1;
5305 }
5306 else
5307 {
726a989a
RB
5308 gimplify_stmt (&cleanup, &cleanup_stmts);
5309 wce = gimple_build_wce (cleanup_stmts);
5310 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5311 gimplify_seq_add_stmt (pre_p, wce);
6de9cd9a 5312 }
6de9cd9a
DN
5313}
5314
5315/* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5316
5317static enum gimplify_status
726a989a 5318gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
5319{
5320 tree targ = *expr_p;
5321 tree temp = TARGET_EXPR_SLOT (targ);
5322 tree init = TARGET_EXPR_INITIAL (targ);
5323 enum gimplify_status ret;
5324
5325 if (init)
5326 {
d0ad58f9
JM
5327 tree cleanup = NULL_TREE;
5328
3a5b9284 5329 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
786025ea
JJ
5330 to the temps list. Handle also variable length TARGET_EXPRs. */
5331 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5332 {
5333 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5334 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5335 gimplify_vla_decl (temp, pre_p);
5336 }
5337 else
5338 gimple_add_tmp_var (temp);
6de9cd9a 5339
3a5b9284
RH
5340 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5341 expression is supposed to initialize the slot. */
5342 if (VOID_TYPE_P (TREE_TYPE (init)))
5343 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5344 else
325c3691 5345 {
726a989a
RB
5346 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5347 init = init_expr;
5348 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5349 init = NULL;
5350 ggc_free (init_expr);
325c3691 5351 }
3a5b9284 5352 if (ret == GS_ERROR)
abc67de1
SM
5353 {
5354 /* PR c++/28266 Make sure this is expanded only once. */
5355 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5356 return GS_ERROR;
5357 }
726a989a
RB
5358 if (init)
5359 gimplify_and_add (init, pre_p);
6de9cd9a
DN
5360
5361 /* If needed, push the cleanup for the temp. */
5362 if (TARGET_EXPR_CLEANUP (targ))
d0ad58f9
JM
5363 {
5364 if (CLEANUP_EH_ONLY (targ))
5365 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5366 CLEANUP_EH_ONLY (targ), pre_p);
5367 else
5368 cleanup = TARGET_EXPR_CLEANUP (targ);
5369 }
5370
5371 /* Add a clobber for the temporary going out of scope, like
5372 gimplify_bind_expr. */
32be32af 5373 if (gimplify_ctxp->in_cleanup_point_expr
87e2a8fd
XDL
5374 && needs_to_live_in_memory (temp)
5375 && flag_stack_reuse == SR_ALL)
d0ad58f9 5376 {
9771b263
DN
5377 tree clobber = build_constructor (TREE_TYPE (temp),
5378 NULL);
d0ad58f9
JM
5379 TREE_THIS_VOLATILE (clobber) = true;
5380 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5381 if (cleanup)
5382 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5383 clobber);
5384 else
5385 cleanup = clobber;
5386 }
5387
5388 if (cleanup)
5389 gimple_push_cleanup (temp, cleanup, false, pre_p);
6de9cd9a
DN
5390
5391 /* Only expand this once. */
5392 TREE_OPERAND (targ, 3) = init;
5393 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5394 }
282899df 5395 else
6de9cd9a 5396 /* We should have expanded this before. */
282899df 5397 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6de9cd9a
DN
5398
5399 *expr_p = temp;
5400 return GS_OK;
5401}
5402
5403/* Gimplification of expression trees. */
5404
726a989a
RB
5405/* Gimplify an expression which appears at statement context. The
5406 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5407 NULL, a new sequence is allocated.
6de9cd9a 5408
726a989a
RB
5409 Return true if we actually added a statement to the queue. */
5410
5411bool
5412gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6de9cd9a 5413{
726a989a 5414 gimple_seq_node last;
6de9cd9a 5415
726a989a
RB
5416 last = gimple_seq_last (*seq_p);
5417 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5418 return last != gimple_seq_last (*seq_p);
6de9cd9a
DN
5419}
5420
953ff289
DN
5421/* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5422 to CTX. If entries already exist, force them to be some flavor of private.
5423 If there is no enclosing parallel, do nothing. */
5424
5425void
5426omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5427{
5428 splay_tree_node n;
5429
5430 if (decl == NULL || !DECL_P (decl))
5431 return;
5432
5433 do
5434 {
5435 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5436 if (n != NULL)
5437 {
5438 if (n->value & GOVD_SHARED)
5439 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
acf0174b
JJ
5440 else if (n->value & GOVD_MAP)
5441 n->value |= GOVD_MAP_TO_ONLY;
953ff289
DN
5442 else
5443 return;
5444 }
acf0174b
JJ
5445 else if (ctx->region_type == ORT_TARGET)
5446 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
74bf76ed 5447 else if (ctx->region_type != ORT_WORKSHARE
acf0174b
JJ
5448 && ctx->region_type != ORT_SIMD
5449 && ctx->region_type != ORT_TARGET_DATA)
953ff289
DN
5450 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5451
5452 ctx = ctx->outer_context;
5453 }
5454 while (ctx);
5455}
5456
5457/* Similarly for each of the type sizes of TYPE. */
5458
5459static void
5460omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5461{
5462 if (type == NULL || type == error_mark_node)
5463 return;
5464 type = TYPE_MAIN_VARIANT (type);
5465
5466 if (pointer_set_insert (ctx->privatized_types, type))
5467 return;
5468
5469 switch (TREE_CODE (type))
5470 {
5471 case INTEGER_TYPE:
5472 case ENUMERAL_TYPE:
5473 case BOOLEAN_TYPE:
953ff289 5474 case REAL_TYPE:
325217ed 5475 case FIXED_POINT_TYPE:
953ff289
DN
5476 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5477 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5478 break;
5479
5480 case ARRAY_TYPE:
5481 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5482 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5483 break;
5484
5485 case RECORD_TYPE:
5486 case UNION_TYPE:
5487 case QUAL_UNION_TYPE:
5488 {
5489 tree field;
910ad8de 5490 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
953ff289
DN
5491 if (TREE_CODE (field) == FIELD_DECL)
5492 {
5493 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5494 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5495 }
5496 }
5497 break;
5498
5499 case POINTER_TYPE:
5500 case REFERENCE_TYPE:
5501 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5502 break;
5503
5504 default:
5505 break;
5506 }
5507
5508 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5509 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5510 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5511}
5512
5513/* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5514
5515static void
5516omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5517{
5518 splay_tree_node n;
5519 unsigned int nflags;
5520 tree t;
5521
b504a918 5522 if (error_operand_p (decl))
953ff289
DN
5523 return;
5524
5525 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5526 there are constructors involved somewhere. */
5527 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5528 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5529 flags |= GOVD_SEEN;
5530
5531 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
acf0174b 5532 if (n != NULL && n->value != GOVD_ALIGNED)
953ff289
DN
5533 {
5534 /* We shouldn't be re-adding the decl with the same data
5535 sharing class. */
5536 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5537 /* The only combination of data sharing classes we should see is
5538 FIRSTPRIVATE and LASTPRIVATE. */
5539 nflags = n->value | flags;
5540 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
74bf76ed
JJ
5541 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE)
5542 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
953ff289
DN
5543 n->value = nflags;
5544 return;
5545 }
5546
5547 /* When adding a variable-sized variable, we have to handle all sorts
b8698a0f 5548 of additional bits of data: the pointer replacement variable, and
953ff289 5549 the parameters of the type. */
4c923c28 5550 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
953ff289
DN
5551 {
5552 /* Add the pointer replacement variable as PRIVATE if the variable
5553 replacement is private, else FIRSTPRIVATE since we'll need the
5554 address of the original variable either for SHARED, or for the
5555 copy into or out of the context. */
5556 if (!(flags & GOVD_LOCAL))
5557 {
acf0174b
JJ
5558 nflags = flags & GOVD_MAP
5559 ? GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT
5560 : flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
953ff289
DN
5561 nflags |= flags & GOVD_SEEN;
5562 t = DECL_VALUE_EXPR (decl);
5563 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5564 t = TREE_OPERAND (t, 0);
5565 gcc_assert (DECL_P (t));
5566 omp_add_variable (ctx, t, nflags);
5567 }
5568
5569 /* Add all of the variable and type parameters (which should have
5570 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5571 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5572 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5573 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5574
5575 /* The variable-sized variable itself is never SHARED, only some form
5576 of PRIVATE. The sharing would take place via the pointer variable
5577 which we remapped above. */
5578 if (flags & GOVD_SHARED)
5579 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5580 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5581
b8698a0f 5582 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
953ff289
DN
5583 alloca statement we generate for the variable, so make sure it
5584 is available. This isn't automatically needed for the SHARED
4288fea2
JJ
5585 case, since we won't be allocating local storage then.
5586 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5587 in this case omp_notice_variable will be called later
5588 on when it is gimplified. */
acf0174b 5589 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
423ed416 5590 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
953ff289
DN
5591 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5592 }
acf0174b
JJ
5593 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
5594 && lang_hooks.decls.omp_privatize_by_reference (decl))
953ff289 5595 {
953ff289
DN
5596 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5597
5598 /* Similar to the direct variable sized case above, we'll need the
5599 size of references being privatized. */
5600 if ((flags & GOVD_SHARED) == 0)
5601 {
5602 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
4c923c28 5603 if (TREE_CODE (t) != INTEGER_CST)
953ff289
DN
5604 omp_notice_variable (ctx, t, true);
5605 }
5606 }
5607
74bf76ed
JJ
5608 if (n != NULL)
5609 n->value |= flags;
5610 else
5611 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
953ff289
DN
5612}
5613
f22f4340
JJ
5614/* Notice a threadprivate variable DECL used in OpenMP context CTX.
5615 This just prints out diagnostics about threadprivate variable uses
5616 in untied tasks. If DECL2 is non-NULL, prevent this warning
5617 on that variable. */
5618
5619static bool
5620omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5621 tree decl2)
5622{
5623 splay_tree_node n;
acf0174b
JJ
5624 struct gimplify_omp_ctx *octx;
5625
5626 for (octx = ctx; octx; octx = octx->outer_context)
5627 if (octx->region_type == ORT_TARGET)
5628 {
5629 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
5630 if (n == NULL)
5631 {
5632 error ("threadprivate variable %qE used in target region",
5633 DECL_NAME (decl));
5634 error_at (octx->location, "enclosing target region");
5635 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
5636 }
5637 if (decl2)
5638 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
5639 }
f22f4340
JJ
5640
5641 if (ctx->region_type != ORT_UNTIED_TASK)
5642 return false;
5643 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5644 if (n == NULL)
5645 {
ad19c4be
EB
5646 error ("threadprivate variable %qE used in untied task",
5647 DECL_NAME (decl));
f22f4340
JJ
5648 error_at (ctx->location, "enclosing task");
5649 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5650 }
5651 if (decl2)
5652 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5653 return false;
5654}
5655
953ff289
DN
5656/* Record the fact that DECL was used within the OpenMP context CTX.
5657 IN_CODE is true when real code uses DECL, and false when we should
5658 merely emit default(none) errors. Return true if DECL is going to
5659 be remapped and thus DECL shouldn't be gimplified into its
5660 DECL_VALUE_EXPR (if any). */
5661
5662static bool
5663omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5664{
5665 splay_tree_node n;
5666 unsigned flags = in_code ? GOVD_SEEN : 0;
5667 bool ret = false, shared;
5668
b504a918 5669 if (error_operand_p (decl))
953ff289
DN
5670 return false;
5671
5672 /* Threadprivate variables are predetermined. */
5673 if (is_global_var (decl))
5674 {
5675 if (DECL_THREAD_LOCAL_P (decl))
f22f4340 5676 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
953ff289
DN
5677
5678 if (DECL_HAS_VALUE_EXPR_P (decl))
5679 {
5680 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5681
5682 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
f22f4340 5683 return omp_notice_threadprivate_variable (ctx, decl, value);
953ff289
DN
5684 }
5685 }
5686
5687 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
acf0174b
JJ
5688 if (ctx->region_type == ORT_TARGET)
5689 {
f014c653 5690 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
acf0174b
JJ
5691 if (n == NULL)
5692 {
5693 if (!lang_hooks.types.omp_mappable_type (TREE_TYPE (decl)))
5694 {
5695 error ("%qD referenced in target region does not have "
5696 "a mappable type", decl);
5697 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_EXPLICIT | flags);
5698 }
5699 else
5700 omp_add_variable (ctx, decl, GOVD_MAP | flags);
5701 }
5702 else
f014c653
JJ
5703 {
5704 /* If nothing changed, there's nothing left to do. */
5705 if ((n->value & flags) == flags)
5706 return ret;
5707 n->value |= flags;
5708 }
acf0174b
JJ
5709 goto do_outer;
5710 }
5711
953ff289
DN
5712 if (n == NULL)
5713 {
5714 enum omp_clause_default_kind default_kind, kind;
a68ab351 5715 struct gimplify_omp_ctx *octx;
953ff289 5716
74bf76ed 5717 if (ctx->region_type == ORT_WORKSHARE
acf0174b
JJ
5718 || ctx->region_type == ORT_SIMD
5719 || ctx->region_type == ORT_TARGET_DATA)
953ff289
DN
5720 goto do_outer;
5721
5722 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5723 remapped firstprivate instead of shared. To some extent this is
5724 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5725 default_kind = ctx->default_kind;
5726 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5727 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5728 default_kind = kind;
5729
5730 switch (default_kind)
5731 {
5732 case OMP_CLAUSE_DEFAULT_NONE:
09af4b4c 5733 if ((ctx->region_type & ORT_PARALLEL) != 0)
a5a5434f
TS
5734 {
5735 error ("%qE not specified in enclosing parallel",
5736 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5737 error_at (ctx->location, "enclosing parallel");
5738 }
5739 else if ((ctx->region_type & ORT_TASK) != 0)
acf0174b
JJ
5740 {
5741 error ("%qE not specified in enclosing task",
5742 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5743 error_at (ctx->location, "enclosing task");
5744 }
5745 else if (ctx->region_type == ORT_TEAMS)
5746 {
5747 error ("%qE not specified in enclosing teams construct",
5748 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5749 error_at (ctx->location, "enclosing teams construct");
5750 }
f22f4340 5751 else
a5a5434f 5752 gcc_unreachable ();
953ff289
DN
5753 /* FALLTHRU */
5754 case OMP_CLAUSE_DEFAULT_SHARED:
5755 flags |= GOVD_SHARED;
5756 break;
5757 case OMP_CLAUSE_DEFAULT_PRIVATE:
5758 flags |= GOVD_PRIVATE;
5759 break;
a68ab351
JJ
5760 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5761 flags |= GOVD_FIRSTPRIVATE;
5762 break;
5763 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5764 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
f22f4340 5765 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
a68ab351
JJ
5766 if (ctx->outer_context)
5767 omp_notice_variable (ctx->outer_context, decl, in_code);
5768 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5769 {
5770 splay_tree_node n2;
5771
acf0174b
JJ
5772 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0)
5773 continue;
a68ab351
JJ
5774 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5775 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5776 {
5777 flags |= GOVD_FIRSTPRIVATE;
5778 break;
5779 }
acf0174b 5780 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
a68ab351
JJ
5781 break;
5782 }
5783 if (flags & GOVD_FIRSTPRIVATE)
5784 break;
5785 if (octx == NULL
5786 && (TREE_CODE (decl) == PARM_DECL
5787 || (!is_global_var (decl)
5788 && DECL_CONTEXT (decl) == current_function_decl)))
5789 {
5790 flags |= GOVD_FIRSTPRIVATE;
5791 break;
5792 }
5793 flags |= GOVD_SHARED;
5794 break;
953ff289
DN
5795 default:
5796 gcc_unreachable ();
5797 }
5798
a68ab351
JJ
5799 if ((flags & GOVD_PRIVATE)
5800 && lang_hooks.decls.omp_private_outer_ref (decl))
5801 flags |= GOVD_PRIVATE_OUTER_REF;
5802
953ff289
DN
5803 omp_add_variable (ctx, decl, flags);
5804
5805 shared = (flags & GOVD_SHARED) != 0;
5806 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5807 goto do_outer;
5808 }
5809
3ad6b266
JJ
5810 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5811 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5812 && DECL_SIZE (decl)
5813 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5814 {
5815 splay_tree_node n2;
5816 tree t = DECL_VALUE_EXPR (decl);
5817 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5818 t = TREE_OPERAND (t, 0);
5819 gcc_assert (DECL_P (t));
5820 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5821 n2->value |= GOVD_SEEN;
5822 }
5823
953ff289
DN
5824 shared = ((flags | n->value) & GOVD_SHARED) != 0;
5825 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5826
5827 /* If nothing changed, there's nothing left to do. */
5828 if ((n->value & flags) == flags)
5829 return ret;
5830 flags |= n->value;
5831 n->value = flags;
5832
5833 do_outer:
5834 /* If the variable is private in the current context, then we don't
5835 need to propagate anything to an outer context. */
a68ab351 5836 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
953ff289
DN
5837 return ret;
5838 if (ctx->outer_context
5839 && omp_notice_variable (ctx->outer_context, decl, in_code))
5840 return true;
5841 return ret;
5842}
5843
5844/* Verify that DECL is private within CTX. If there's specific information
5845 to the contrary in the innermost scope, generate an error. */
5846
5847static bool
f7468577 5848omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
953ff289
DN
5849{
5850 splay_tree_node n;
5851
5852 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5853 if (n != NULL)
5854 {
5855 if (n->value & GOVD_SHARED)
5856 {
5857 if (ctx == gimplify_omp_ctxp)
f6a5ffbf 5858 {
74bf76ed
JJ
5859 if (simd)
5860 error ("iteration variable %qE is predetermined linear",
5861 DECL_NAME (decl));
5862 else
5863 error ("iteration variable %qE should be private",
5864 DECL_NAME (decl));
f6a5ffbf
JJ
5865 n->value = GOVD_PRIVATE;
5866 return true;
5867 }
5868 else
5869 return false;
953ff289 5870 }
761041be
JJ
5871 else if ((n->value & GOVD_EXPLICIT) != 0
5872 && (ctx == gimplify_omp_ctxp
a68ab351 5873 || (ctx->region_type == ORT_COMBINED_PARALLEL
761041be
JJ
5874 && gimplify_omp_ctxp->outer_context == ctx)))
5875 {
5876 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
4f1e4960
JM
5877 error ("iteration variable %qE should not be firstprivate",
5878 DECL_NAME (decl));
761041be 5879 else if ((n->value & GOVD_REDUCTION) != 0)
4f1e4960
JM
5880 error ("iteration variable %qE should not be reduction",
5881 DECL_NAME (decl));
f7468577 5882 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
74bf76ed
JJ
5883 error ("iteration variable %qE should not be lastprivate",
5884 DECL_NAME (decl));
5885 else if (simd && (n->value & GOVD_PRIVATE) != 0)
5886 error ("iteration variable %qE should not be private",
5887 DECL_NAME (decl));
f7468577 5888 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
74bf76ed
JJ
5889 error ("iteration variable %qE is predetermined linear",
5890 DECL_NAME (decl));
761041be 5891 }
ca2b1311
JJ
5892 return (ctx == gimplify_omp_ctxp
5893 || (ctx->region_type == ORT_COMBINED_PARALLEL
5894 && gimplify_omp_ctxp->outer_context == ctx));
953ff289
DN
5895 }
5896
74bf76ed
JJ
5897 if (ctx->region_type != ORT_WORKSHARE
5898 && ctx->region_type != ORT_SIMD)
953ff289 5899 return false;
f6a5ffbf 5900 else if (ctx->outer_context)
74bf76ed 5901 return omp_is_private (ctx->outer_context, decl, simd);
ca2b1311 5902 return false;
953ff289
DN
5903}
5904
07b7aade
JJ
5905/* Return true if DECL is private within a parallel region
5906 that binds to the current construct's context or in parallel
5907 region's REDUCTION clause. */
5908
5909static bool
cab37c89 5910omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
07b7aade
JJ
5911{
5912 splay_tree_node n;
5913
5914 do
5915 {
5916 ctx = ctx->outer_context;
5917 if (ctx == NULL)
5918 return !(is_global_var (decl)
cab37c89
JJ
5919 /* References might be private, but might be shared too,
5920 when checking for copyprivate, assume they might be
5921 private, otherwise assume they might be shared. */
5922 || (!copyprivate
5923 && lang_hooks.decls.omp_privatize_by_reference (decl)));
07b7aade 5924
acf0174b
JJ
5925 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
5926 continue;
5927
07b7aade
JJ
5928 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5929 if (n != NULL)
5930 return (n->value & GOVD_SHARED) == 0;
5931 }
74bf76ed
JJ
5932 while (ctx->region_type == ORT_WORKSHARE
5933 || ctx->region_type == ORT_SIMD);
07b7aade
JJ
5934 return false;
5935}
5936
953ff289
DN
5937/* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
5938 and previous omp contexts. */
5939
5940static void
726a989a 5941gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
a68ab351 5942 enum omp_region_type region_type)
953ff289
DN
5943{
5944 struct gimplify_omp_ctx *ctx, *outer_ctx;
5945 tree c;
5946
a68ab351 5947 ctx = new_omp_context (region_type);
953ff289
DN
5948 outer_ctx = ctx->outer_context;
5949
5950 while ((c = *list_p) != NULL)
5951 {
953ff289
DN
5952 bool remove = false;
5953 bool notice_outer = true;
07b7aade 5954 const char *check_non_private = NULL;
953ff289
DN
5955 unsigned int flags;
5956 tree decl;
5957
aaf46ef9 5958 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
5959 {
5960 case OMP_CLAUSE_PRIVATE:
5961 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
a68ab351
JJ
5962 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
5963 {
5964 flags |= GOVD_PRIVATE_OUTER_REF;
5965 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
5966 }
5967 else
5968 notice_outer = false;
953ff289
DN
5969 goto do_add;
5970 case OMP_CLAUSE_SHARED:
5971 flags = GOVD_SHARED | GOVD_EXPLICIT;
5972 goto do_add;
5973 case OMP_CLAUSE_FIRSTPRIVATE:
5974 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
07b7aade 5975 check_non_private = "firstprivate";
953ff289
DN
5976 goto do_add;
5977 case OMP_CLAUSE_LASTPRIVATE:
5978 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
07b7aade 5979 check_non_private = "lastprivate";
953ff289
DN
5980 goto do_add;
5981 case OMP_CLAUSE_REDUCTION:
5982 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
07b7aade 5983 check_non_private = "reduction";
953ff289 5984 goto do_add;
acf0174b
JJ
5985 case OMP_CLAUSE_LINEAR:
5986 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
5987 is_gimple_val, fb_rvalue) == GS_ERROR)
5988 {
5989 remove = true;
5990 break;
5991 }
5992 flags = GOVD_LINEAR | GOVD_EXPLICIT;
5993 goto do_add;
5994
5995 case OMP_CLAUSE_MAP:
b46ebd6c
JJ
5996 decl = OMP_CLAUSE_DECL (c);
5997 if (error_operand_p (decl))
5998 {
5999 remove = true;
6000 break;
6001 }
6002 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6003 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
6004 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
6005 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6006 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
acf0174b
JJ
6007 {
6008 remove = true;
6009 break;
6010 }
acf0174b
JJ
6011 if (!DECL_P (decl))
6012 {
6013 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
6014 NULL, is_gimple_lvalue, fb_lvalue)
6015 == GS_ERROR)
6016 {
6017 remove = true;
6018 break;
6019 }
6020 break;
6021 }
6022 flags = GOVD_MAP | GOVD_EXPLICIT;
6023 goto do_add;
6024
6025 case OMP_CLAUSE_DEPEND:
6026 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
6027 {
6028 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
6029 NULL, is_gimple_val, fb_rvalue);
6030 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
6031 }
6032 if (error_operand_p (OMP_CLAUSE_DECL (c)))
6033 {
6034 remove = true;
6035 break;
6036 }
6037 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
6038 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
6039 is_gimple_val, fb_rvalue) == GS_ERROR)
6040 {
6041 remove = true;
6042 break;
6043 }
6044 break;
6045
6046 case OMP_CLAUSE_TO:
6047 case OMP_CLAUSE_FROM:
b46ebd6c
JJ
6048 decl = OMP_CLAUSE_DECL (c);
6049 if (error_operand_p (decl))
acf0174b
JJ
6050 {
6051 remove = true;
6052 break;
6053 }
b46ebd6c
JJ
6054 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6055 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
6056 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
6057 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6058 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
acf0174b
JJ
6059 {
6060 remove = true;
6061 break;
6062 }
6063 if (!DECL_P (decl))
6064 {
6065 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
6066 NULL, is_gimple_lvalue, fb_lvalue)
6067 == GS_ERROR)
6068 {
6069 remove = true;
6070 break;
6071 }
6072 break;
6073 }
6074 goto do_notice;
953ff289
DN
6075
6076 do_add:
6077 decl = OMP_CLAUSE_DECL (c);
b504a918 6078 if (error_operand_p (decl))
953ff289
DN
6079 {
6080 remove = true;
6081 break;
6082 }
6083 omp_add_variable (ctx, decl, flags);
693d710f 6084 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
953ff289
DN
6085 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6086 {
6087 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
693d710f 6088 GOVD_LOCAL | GOVD_SEEN);
953ff289 6089 gimplify_omp_ctxp = ctx;
45852dcc 6090 push_gimplify_context ();
726a989a 6091
355a7673
MM
6092 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6093 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
726a989a
RB
6094
6095 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
6096 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
6097 pop_gimplify_context
6098 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
45852dcc 6099 push_gimplify_context ();
726a989a
RB
6100 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
6101 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
b8698a0f 6102 pop_gimplify_context
726a989a
RB
6103 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
6104 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
6105 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
6106
953ff289
DN
6107 gimplify_omp_ctxp = outer_ctx;
6108 }
a68ab351
JJ
6109 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6110 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
6111 {
6112 gimplify_omp_ctxp = ctx;
45852dcc 6113 push_gimplify_context ();
a68ab351
JJ
6114 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
6115 {
6116 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
6117 NULL, NULL);
6118 TREE_SIDE_EFFECTS (bind) = 1;
6119 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
6120 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
6121 }
726a989a
RB
6122 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
6123 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6124 pop_gimplify_context
6125 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
6126 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
6127
dd2fc525
JJ
6128 gimplify_omp_ctxp = outer_ctx;
6129 }
6130 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6131 && OMP_CLAUSE_LINEAR_STMT (c))
6132 {
6133 gimplify_omp_ctxp = ctx;
6134 push_gimplify_context ();
6135 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
6136 {
6137 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
6138 NULL, NULL);
6139 TREE_SIDE_EFFECTS (bind) = 1;
6140 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
6141 OMP_CLAUSE_LINEAR_STMT (c) = bind;
6142 }
6143 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
6144 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6145 pop_gimplify_context
6146 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
6147 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
6148
a68ab351
JJ
6149 gimplify_omp_ctxp = outer_ctx;
6150 }
953ff289
DN
6151 if (notice_outer)
6152 goto do_notice;
6153 break;
6154
6155 case OMP_CLAUSE_COPYIN:
6156 case OMP_CLAUSE_COPYPRIVATE:
6157 decl = OMP_CLAUSE_DECL (c);
b504a918 6158 if (error_operand_p (decl))
953ff289
DN
6159 {
6160 remove = true;
6161 break;
6162 }
cab37c89
JJ
6163 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
6164 && !remove
6165 && !omp_check_private (ctx, decl, true))
6166 {
6167 remove = true;
6168 if (is_global_var (decl))
6169 {
6170 if (DECL_THREAD_LOCAL_P (decl))
6171 remove = false;
6172 else if (DECL_HAS_VALUE_EXPR_P (decl))
6173 {
6174 tree value = get_base_address (DECL_VALUE_EXPR (decl));
6175
6176 if (value
6177 && DECL_P (value)
6178 && DECL_THREAD_LOCAL_P (value))
6179 remove = false;
6180 }
6181 }
6182 if (remove)
6183 error_at (OMP_CLAUSE_LOCATION (c),
6184 "copyprivate variable %qE is not threadprivate"
6185 " or private in outer context", DECL_NAME (decl));
6186 }
953ff289
DN
6187 do_notice:
6188 if (outer_ctx)
6189 omp_notice_variable (outer_ctx, decl, true);
07b7aade 6190 if (check_non_private
a68ab351 6191 && region_type == ORT_WORKSHARE
cab37c89 6192 && omp_check_private (ctx, decl, false))
07b7aade 6193 {
4f1e4960
JM
6194 error ("%s variable %qE is private in outer context",
6195 check_non_private, DECL_NAME (decl));
07b7aade
JJ
6196 remove = true;
6197 }
953ff289
DN
6198 break;
6199
20906c66 6200 case OMP_CLAUSE_FINAL:
953ff289 6201 case OMP_CLAUSE_IF:
d568d1a8
RS
6202 OMP_CLAUSE_OPERAND (c, 0)
6203 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
6204 /* Fall through. */
6205
6206 case OMP_CLAUSE_SCHEDULE:
953ff289 6207 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
6208 case OMP_CLAUSE_NUM_TEAMS:
6209 case OMP_CLAUSE_THREAD_LIMIT:
6210 case OMP_CLAUSE_DIST_SCHEDULE:
6211 case OMP_CLAUSE_DEVICE:
726a989a
RB
6212 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
6213 is_gimple_val, fb_rvalue) == GS_ERROR)
acf0174b 6214 remove = true;
953ff289
DN
6215 break;
6216
6217 case OMP_CLAUSE_NOWAIT:
6218 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
6219 case OMP_CLAUSE_UNTIED:
6220 case OMP_CLAUSE_COLLAPSE:
20906c66 6221 case OMP_CLAUSE_MERGEABLE:
acf0174b 6222 case OMP_CLAUSE_PROC_BIND:
74bf76ed 6223 case OMP_CLAUSE_SAFELEN:
953ff289
DN
6224 break;
6225
acf0174b
JJ
6226 case OMP_CLAUSE_ALIGNED:
6227 decl = OMP_CLAUSE_DECL (c);
6228 if (error_operand_p (decl))
6229 {
6230 remove = true;
6231 break;
6232 }
b46ebd6c
JJ
6233 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
6234 is_gimple_val, fb_rvalue) == GS_ERROR)
6235 {
6236 remove = true;
6237 break;
6238 }
acf0174b
JJ
6239 if (!is_global_var (decl)
6240 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6241 omp_add_variable (ctx, decl, GOVD_ALIGNED);
6242 break;
6243
953ff289
DN
6244 case OMP_CLAUSE_DEFAULT:
6245 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
6246 break;
6247
6248 default:
6249 gcc_unreachable ();
6250 }
6251
6252 if (remove)
6253 *list_p = OMP_CLAUSE_CHAIN (c);
6254 else
6255 list_p = &OMP_CLAUSE_CHAIN (c);
6256 }
6257
6258 gimplify_omp_ctxp = ctx;
6259}
6260
f014c653
JJ
6261struct gimplify_adjust_omp_clauses_data
6262{
6263 tree *list_p;
6264 gimple_seq *pre_p;
6265};
6266
953ff289
DN
6267/* For all variables that were not actually used within the context,
6268 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
6269
6270static int
6271gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
6272{
f014c653
JJ
6273 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
6274 gimple_seq *pre_p
6275 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
953ff289
DN
6276 tree decl = (tree) n->key;
6277 unsigned flags = n->value;
aaf46ef9 6278 enum omp_clause_code code;
953ff289
DN
6279 tree clause;
6280 bool private_debug;
6281
6282 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
6283 return 0;
6284 if ((flags & GOVD_SEEN) == 0)
6285 return 0;
6286 if (flags & GOVD_DEBUG_PRIVATE)
6287 {
6288 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
6289 private_debug = true;
6290 }
acf0174b
JJ
6291 else if (flags & GOVD_MAP)
6292 private_debug = false;
953ff289
DN
6293 else
6294 private_debug
6295 = lang_hooks.decls.omp_private_debug_clause (decl,
6296 !!(flags & GOVD_SHARED));
6297 if (private_debug)
6298 code = OMP_CLAUSE_PRIVATE;
acf0174b
JJ
6299 else if (flags & GOVD_MAP)
6300 code = OMP_CLAUSE_MAP;
953ff289
DN
6301 else if (flags & GOVD_SHARED)
6302 {
6303 if (is_global_var (decl))
64964499
JJ
6304 {
6305 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6306 while (ctx != NULL)
6307 {
6308 splay_tree_node on
6309 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6310 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
74bf76ed
JJ
6311 | GOVD_PRIVATE | GOVD_REDUCTION
6312 | GOVD_LINEAR)) != 0)
64964499
JJ
6313 break;
6314 ctx = ctx->outer_context;
6315 }
6316 if (ctx == NULL)
6317 return 0;
6318 }
953ff289
DN
6319 code = OMP_CLAUSE_SHARED;
6320 }
6321 else if (flags & GOVD_PRIVATE)
6322 code = OMP_CLAUSE_PRIVATE;
6323 else if (flags & GOVD_FIRSTPRIVATE)
6324 code = OMP_CLAUSE_FIRSTPRIVATE;
74bf76ed
JJ
6325 else if (flags & GOVD_LASTPRIVATE)
6326 code = OMP_CLAUSE_LASTPRIVATE;
acf0174b
JJ
6327 else if (flags & GOVD_ALIGNED)
6328 return 0;
953ff289
DN
6329 else
6330 gcc_unreachable ();
6331
c2255bc4 6332 clause = build_omp_clause (input_location, code);
aaf46ef9 6333 OMP_CLAUSE_DECL (clause) = decl;
953ff289
DN
6334 OMP_CLAUSE_CHAIN (clause) = *list_p;
6335 if (private_debug)
6336 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
a68ab351
JJ
6337 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
6338 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
acf0174b
JJ
6339 else if (code == OMP_CLAUSE_MAP)
6340 {
6341 OMP_CLAUSE_MAP_KIND (clause) = flags & GOVD_MAP_TO_ONLY
6342 ? OMP_CLAUSE_MAP_TO
6343 : OMP_CLAUSE_MAP_TOFROM;
6344 if (DECL_SIZE (decl)
6345 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6346 {
6347 tree decl2 = DECL_VALUE_EXPR (decl);
6348 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6349 decl2 = TREE_OPERAND (decl2, 0);
6350 gcc_assert (DECL_P (decl2));
6351 tree mem = build_simple_mem_ref (decl2);
6352 OMP_CLAUSE_DECL (clause) = mem;
6353 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6354 if (gimplify_omp_ctxp->outer_context)
6355 {
6356 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6357 omp_notice_variable (ctx, decl2, true);
6358 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
6359 }
6360 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
6361 OMP_CLAUSE_MAP);
6362 OMP_CLAUSE_DECL (nc) = decl;
6363 OMP_CLAUSE_SIZE (nc) = size_zero_node;
6364 OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER;
6365 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
6366 OMP_CLAUSE_CHAIN (clause) = nc;
6367 }
b46ebd6c
JJ
6368 else
6369 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
acf0174b 6370 }
95782571
JJ
6371 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
6372 {
6373 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
6374 OMP_CLAUSE_DECL (nc) = decl;
6375 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
6376 OMP_CLAUSE_CHAIN (nc) = *list_p;
6377 OMP_CLAUSE_CHAIN (clause) = nc;
f014c653
JJ
6378 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6379 gimplify_omp_ctxp = ctx->outer_context;
6380 lang_hooks.decls.omp_finish_clause (nc, pre_p);
6381 gimplify_omp_ctxp = ctx;
95782571 6382 }
953ff289 6383 *list_p = clause;
f014c653
JJ
6384 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6385 gimplify_omp_ctxp = ctx->outer_context;
6386 lang_hooks.decls.omp_finish_clause (clause, pre_p);
6387 gimplify_omp_ctxp = ctx;
953ff289
DN
6388 return 0;
6389}
6390
6391static void
f014c653 6392gimplify_adjust_omp_clauses (gimple_seq *pre_p, tree *list_p)
953ff289
DN
6393{
6394 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6395 tree c, decl;
6396
6397 while ((c = *list_p) != NULL)
6398 {
6399 splay_tree_node n;
6400 bool remove = false;
6401
aaf46ef9 6402 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
6403 {
6404 case OMP_CLAUSE_PRIVATE:
6405 case OMP_CLAUSE_SHARED:
6406 case OMP_CLAUSE_FIRSTPRIVATE:
74bf76ed 6407 case OMP_CLAUSE_LINEAR:
953ff289
DN
6408 decl = OMP_CLAUSE_DECL (c);
6409 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6410 remove = !(n->value & GOVD_SEEN);
6411 if (! remove)
6412 {
aaf46ef9 6413 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
953ff289
DN
6414 if ((n->value & GOVD_DEBUG_PRIVATE)
6415 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
6416 {
6417 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
6418 || ((n->value & GOVD_DATA_SHARE_CLASS)
6419 == GOVD_PRIVATE));
aaf46ef9 6420 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
953ff289
DN
6421 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
6422 }
74bf76ed
JJ
6423 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6424 && ctx->outer_context
6425 && !(OMP_CLAUSE_LINEAR_NO_COPYIN (c)
95782571 6426 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
74bf76ed 6427 {
95782571
JJ
6428 if (ctx->outer_context->combined_loop
6429 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
74bf76ed
JJ
6430 {
6431 n = splay_tree_lookup (ctx->outer_context->variables,
6432 (splay_tree_key) decl);
6433 if (n == NULL
6434 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
6435 {
9cf32741
JJ
6436 int flags = GOVD_FIRSTPRIVATE;
6437 /* #pragma omp distribute does not allow
6438 lastprivate clause. */
6439 if (!ctx->outer_context->distribute)
6440 flags |= GOVD_LASTPRIVATE;
74bf76ed
JJ
6441 if (n == NULL)
6442 omp_add_variable (ctx->outer_context, decl,
6443 flags | GOVD_SEEN);
6444 else
6445 n->value |= flags | GOVD_SEEN;
6446 }
6447 }
95782571 6448 else if (!is_global_var (decl))
74bf76ed
JJ
6449 omp_notice_variable (ctx->outer_context, decl, true);
6450 }
953ff289
DN
6451 }
6452 break;
6453
6454 case OMP_CLAUSE_LASTPRIVATE:
6455 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
6456 accurately reflect the presence of a FIRSTPRIVATE clause. */
6457 decl = OMP_CLAUSE_DECL (c);
6458 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6459 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6460 = (n->value & GOVD_FIRSTPRIVATE) != 0;
6461 break;
b8698a0f 6462
acf0174b
JJ
6463 case OMP_CLAUSE_ALIGNED:
6464 decl = OMP_CLAUSE_DECL (c);
6465 if (!is_global_var (decl))
6466 {
6467 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6468 remove = n == NULL || !(n->value & GOVD_SEEN);
6469 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6470 {
6471 struct gimplify_omp_ctx *octx;
6472 if (n != NULL
6473 && (n->value & (GOVD_DATA_SHARE_CLASS
6474 & ~GOVD_FIRSTPRIVATE)))
6475 remove = true;
6476 else
6477 for (octx = ctx->outer_context; octx;
6478 octx = octx->outer_context)
6479 {
6480 n = splay_tree_lookup (octx->variables,
6481 (splay_tree_key) decl);
6482 if (n == NULL)
6483 continue;
6484 if (n->value & GOVD_LOCAL)
6485 break;
6486 /* We have to avoid assigning a shared variable
6487 to itself when trying to add
6488 __builtin_assume_aligned. */
6489 if (n->value & GOVD_SHARED)
6490 {
6491 remove = true;
6492 break;
6493 }
6494 }
6495 }
6496 }
6497 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
6498 {
6499 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6500 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6501 remove = true;
6502 }
6503 break;
6504
6505 case OMP_CLAUSE_MAP:
6506 decl = OMP_CLAUSE_DECL (c);
6507 if (!DECL_P (decl))
6508 break;
6509 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6510 if (ctx->region_type == ORT_TARGET && !(n->value & GOVD_SEEN))
6511 remove = true;
6512 else if (DECL_SIZE (decl)
6513 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
6514 && OMP_CLAUSE_MAP_KIND (c) != OMP_CLAUSE_MAP_POINTER)
6515 {
6516 tree decl2 = DECL_VALUE_EXPR (decl);
6517 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6518 decl2 = TREE_OPERAND (decl2, 0);
6519 gcc_assert (DECL_P (decl2));
6520 tree mem = build_simple_mem_ref (decl2);
6521 OMP_CLAUSE_DECL (c) = mem;
6522 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6523 if (ctx->outer_context)
6524 {
6525 omp_notice_variable (ctx->outer_context, decl2, true);
6526 omp_notice_variable (ctx->outer_context,
6527 OMP_CLAUSE_SIZE (c), true);
6528 }
6529 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6530 OMP_CLAUSE_MAP);
6531 OMP_CLAUSE_DECL (nc) = decl;
6532 OMP_CLAUSE_SIZE (nc) = size_zero_node;
6533 OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER;
6534 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
6535 OMP_CLAUSE_CHAIN (c) = nc;
6536 c = nc;
6537 }
b46ebd6c
JJ
6538 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6539 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
acf0174b
JJ
6540 break;
6541
6542 case OMP_CLAUSE_TO:
6543 case OMP_CLAUSE_FROM:
6544 decl = OMP_CLAUSE_DECL (c);
6545 if (!DECL_P (decl))
6546 break;
6547 if (DECL_SIZE (decl)
6548 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6549 {
6550 tree decl2 = DECL_VALUE_EXPR (decl);
6551 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6552 decl2 = TREE_OPERAND (decl2, 0);
6553 gcc_assert (DECL_P (decl2));
6554 tree mem = build_simple_mem_ref (decl2);
6555 OMP_CLAUSE_DECL (c) = mem;
6556 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6557 if (ctx->outer_context)
6558 {
6559 omp_notice_variable (ctx->outer_context, decl2, true);
6560 omp_notice_variable (ctx->outer_context,
6561 OMP_CLAUSE_SIZE (c), true);
6562 }
6563 }
b46ebd6c
JJ
6564 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6565 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
acf0174b
JJ
6566 break;
6567
953ff289
DN
6568 case OMP_CLAUSE_REDUCTION:
6569 case OMP_CLAUSE_COPYIN:
6570 case OMP_CLAUSE_COPYPRIVATE:
6571 case OMP_CLAUSE_IF:
6572 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
6573 case OMP_CLAUSE_NUM_TEAMS:
6574 case OMP_CLAUSE_THREAD_LIMIT:
6575 case OMP_CLAUSE_DIST_SCHEDULE:
6576 case OMP_CLAUSE_DEVICE:
953ff289
DN
6577 case OMP_CLAUSE_SCHEDULE:
6578 case OMP_CLAUSE_NOWAIT:
6579 case OMP_CLAUSE_ORDERED:
6580 case OMP_CLAUSE_DEFAULT:
a68ab351
JJ
6581 case OMP_CLAUSE_UNTIED:
6582 case OMP_CLAUSE_COLLAPSE:
20906c66
JJ
6583 case OMP_CLAUSE_FINAL:
6584 case OMP_CLAUSE_MERGEABLE:
acf0174b 6585 case OMP_CLAUSE_PROC_BIND:
74bf76ed 6586 case OMP_CLAUSE_SAFELEN:
acf0174b 6587 case OMP_CLAUSE_DEPEND:
953ff289
DN
6588 break;
6589
6590 default:
6591 gcc_unreachable ();
6592 }
6593
6594 if (remove)
6595 *list_p = OMP_CLAUSE_CHAIN (c);
6596 else
6597 list_p = &OMP_CLAUSE_CHAIN (c);
6598 }
6599
6600 /* Add in any implicit data sharing. */
f014c653
JJ
6601 struct gimplify_adjust_omp_clauses_data data;
6602 data.list_p = list_p;
6603 data.pre_p = pre_p;
6604 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
b8698a0f 6605
953ff289
DN
6606 gimplify_omp_ctxp = ctx->outer_context;
6607 delete_omp_context (ctx);
6608}
6609
6610/* Gimplify the contents of an OMP_PARALLEL statement. This involves
6611 gimplification of the body, as well as scanning the body for used
6612 variables. We need to do this scan now, because variable-sized
6613 decls will be decomposed during gimplification. */
6614
726a989a
RB
6615static void
6616gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
953ff289
DN
6617{
6618 tree expr = *expr_p;
726a989a
RB
6619 gimple g;
6620 gimple_seq body = NULL;
953ff289 6621
a68ab351
JJ
6622 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
6623 OMP_PARALLEL_COMBINED (expr)
6624 ? ORT_COMBINED_PARALLEL
6625 : ORT_PARALLEL);
953ff289 6626
45852dcc 6627 push_gimplify_context ();
953ff289 6628
726a989a
RB
6629 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
6630 if (gimple_code (g) == GIMPLE_BIND)
6631 pop_gimplify_context (g);
50674e96 6632 else
726a989a 6633 pop_gimplify_context (NULL);
953ff289 6634
f014c653 6635 gimplify_adjust_omp_clauses (pre_p, &OMP_PARALLEL_CLAUSES (expr));
953ff289 6636
726a989a
RB
6637 g = gimple_build_omp_parallel (body,
6638 OMP_PARALLEL_CLAUSES (expr),
6639 NULL_TREE, NULL_TREE);
6640 if (OMP_PARALLEL_COMBINED (expr))
6641 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6642 gimplify_seq_add_stmt (pre_p, g);
6643 *expr_p = NULL_TREE;
953ff289
DN
6644}
6645
a68ab351
JJ
6646/* Gimplify the contents of an OMP_TASK statement. This involves
6647 gimplification of the body, as well as scanning the body for used
6648 variables. We need to do this scan now, because variable-sized
6649 decls will be decomposed during gimplification. */
953ff289 6650
726a989a
RB
6651static void
6652gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
953ff289 6653{
a68ab351 6654 tree expr = *expr_p;
726a989a
RB
6655 gimple g;
6656 gimple_seq body = NULL;
953ff289 6657
f22f4340
JJ
6658 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
6659 find_omp_clause (OMP_TASK_CLAUSES (expr),
6660 OMP_CLAUSE_UNTIED)
6661 ? ORT_UNTIED_TASK : ORT_TASK);
953ff289 6662
45852dcc 6663 push_gimplify_context ();
953ff289 6664
726a989a
RB
6665 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6666 if (gimple_code (g) == GIMPLE_BIND)
6667 pop_gimplify_context (g);
953ff289 6668 else
726a989a 6669 pop_gimplify_context (NULL);
953ff289 6670
f014c653 6671 gimplify_adjust_omp_clauses (pre_p, &OMP_TASK_CLAUSES (expr));
917948d3 6672
726a989a
RB
6673 g = gimple_build_omp_task (body,
6674 OMP_TASK_CLAUSES (expr),
6675 NULL_TREE, NULL_TREE,
6676 NULL_TREE, NULL_TREE, NULL_TREE);
6677 gimplify_seq_add_stmt (pre_p, g);
6678 *expr_p = NULL_TREE;
a68ab351
JJ
6679}
6680
acf0174b
JJ
6681/* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
6682 with non-NULL OMP_FOR_INIT. */
6683
6684static tree
6685find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
6686{
6687 *walk_subtrees = 0;
6688 switch (TREE_CODE (*tp))
6689 {
6690 case OMP_FOR:
6691 *walk_subtrees = 1;
6692 /* FALLTHRU */
6693 case OMP_SIMD:
6694 if (OMP_FOR_INIT (*tp) != NULL_TREE)
6695 return *tp;
6696 break;
6697 case BIND_EXPR:
6698 case STATEMENT_LIST:
6699 case OMP_PARALLEL:
6700 *walk_subtrees = 1;
6701 break;
6702 default:
6703 break;
6704 }
6705 return NULL_TREE;
6706}
6707
a68ab351
JJ
6708/* Gimplify the gross structure of an OMP_FOR statement. */
6709
6710static enum gimplify_status
726a989a 6711gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
a68ab351 6712{
acf0174b 6713 tree for_stmt, orig_for_stmt, decl, var, t;
32e8bb8e
ILT
6714 enum gimplify_status ret = GS_ALL_DONE;
6715 enum gimplify_status tret;
726a989a
RB
6716 gimple gfor;
6717 gimple_seq for_body, for_pre_body;
a68ab351 6718 int i;
74bf76ed
JJ
6719 bool simd;
6720 bitmap has_decl_expr = NULL;
a68ab351 6721
acf0174b 6722 orig_for_stmt = for_stmt = *expr_p;
a68ab351 6723
f7468577
JJ
6724 simd = (TREE_CODE (for_stmt) == OMP_SIMD
6725 || TREE_CODE (for_stmt) == CILK_SIMD);
a68ab351 6726 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
74bf76ed 6727 simd ? ORT_SIMD : ORT_WORKSHARE);
9cf32741
JJ
6728 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
6729 gimplify_omp_ctxp->distribute = true;
917948d3 6730
726a989a
RB
6731 /* Handle OMP_FOR_INIT. */
6732 for_pre_body = NULL;
74bf76ed
JJ
6733 if (simd && OMP_FOR_PRE_BODY (for_stmt))
6734 {
6735 has_decl_expr = BITMAP_ALLOC (NULL);
6736 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
6737 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
acf0174b 6738 == VAR_DECL)
74bf76ed
JJ
6739 {
6740 t = OMP_FOR_PRE_BODY (for_stmt);
6741 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
6742 }
6743 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
6744 {
6745 tree_stmt_iterator si;
6746 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
6747 tsi_next (&si))
6748 {
6749 t = tsi_stmt (si);
6750 if (TREE_CODE (t) == DECL_EXPR
6751 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
6752 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
6753 }
6754 }
6755 }
726a989a
RB
6756 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
6757 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
a68ab351 6758
acf0174b
JJ
6759 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
6760 {
6761 for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt), find_combined_omp_for,
6762 NULL, NULL);
6763 gcc_assert (for_stmt != NULL_TREE);
6764 gimplify_omp_ctxp->combined_loop = true;
6765 }
6766
355a7673 6767 for_body = NULL;
a68ab351
JJ
6768 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6769 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
6770 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6771 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
6772 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6773 {
6774 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
726a989a
RB
6775 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6776 decl = TREE_OPERAND (t, 0);
a68ab351
JJ
6777 gcc_assert (DECL_P (decl));
6778 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
6779 || POINTER_TYPE_P (TREE_TYPE (decl)));
6780
6781 /* Make sure the iteration variable is private. */
74bf76ed 6782 tree c = NULL_TREE;
f7468577 6783 tree c2 = NULL_TREE;
acf0174b
JJ
6784 if (orig_for_stmt != for_stmt)
6785 /* Do this only on innermost construct for combined ones. */;
6786 else if (simd)
74bf76ed
JJ
6787 {
6788 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
6789 (splay_tree_key)decl);
f7468577
JJ
6790 omp_is_private (gimplify_omp_ctxp, decl,
6791 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6792 != 1));
74bf76ed
JJ
6793 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6794 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6795 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
6796 {
6797 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
6798 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
6799 if (has_decl_expr
6800 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
6801 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
6802 OMP_CLAUSE_DECL (c) = decl;
6803 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
6804 OMP_FOR_CLAUSES (for_stmt) = c;
6805 omp_add_variable (gimplify_omp_ctxp, decl,
6806 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
6807 }
6808 else
6809 {
6810 bool lastprivate
6811 = (!has_decl_expr
6812 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
56ad0e38
JJ
6813 if (lastprivate
6814 && gimplify_omp_ctxp->outer_context
6815 && gimplify_omp_ctxp->outer_context->region_type
6816 == ORT_WORKSHARE
6817 && gimplify_omp_ctxp->outer_context->combined_loop
6818 && !gimplify_omp_ctxp->outer_context->distribute)
6819 {
6820 struct gimplify_omp_ctx *outer
6821 = gimplify_omp_ctxp->outer_context;
6822 n = splay_tree_lookup (outer->variables,
6823 (splay_tree_key) decl);
6824 if (n != NULL
6825 && (n->value & GOVD_DATA_SHARE_CLASS) == GOVD_LOCAL)
6826 lastprivate = false;
6827 else if (omp_check_private (outer, decl, false))
6828 error ("lastprivate variable %qE is private in outer "
6829 "context", DECL_NAME (decl));
6830 else
6831 {
6832 omp_add_variable (outer, decl,
6833 GOVD_LASTPRIVATE | GOVD_SEEN);
6834 if (outer->outer_context)
6835 omp_notice_variable (outer->outer_context, decl, true);
6836 }
6837 }
74bf76ed
JJ
6838 c = build_omp_clause (input_location,
6839 lastprivate ? OMP_CLAUSE_LASTPRIVATE
6840 : OMP_CLAUSE_PRIVATE);
6841 OMP_CLAUSE_DECL (c) = decl;
6842 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
f7468577 6843 OMP_FOR_CLAUSES (for_stmt) = c;
74bf76ed
JJ
6844 omp_add_variable (gimplify_omp_ctxp, decl,
6845 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
f7468577 6846 | GOVD_EXPLICIT | GOVD_SEEN);
74bf76ed
JJ
6847 c = NULL_TREE;
6848 }
6849 }
f7468577 6850 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
a68ab351
JJ
6851 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6852 else
6853 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
6854
6855 /* If DECL is not a gimple register, create a temporary variable to act
6856 as an iteration counter. This is valid, since DECL cannot be
56ad0e38
JJ
6857 modified in the body of the loop. Similarly for any iteration vars
6858 in simd with collapse > 1 where the iterator vars must be
6859 lastprivate. */
acf0174b
JJ
6860 if (orig_for_stmt != for_stmt)
6861 var = decl;
56ad0e38
JJ
6862 else if (!is_gimple_reg (decl)
6863 || (simd && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
a68ab351
JJ
6864 {
6865 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
726a989a 6866 TREE_OPERAND (t, 0) = var;
b8698a0f 6867
726a989a 6868 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
953ff289 6869
f7468577
JJ
6870 if (simd && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
6871 {
6872 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
6873 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
6874 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
6875 OMP_CLAUSE_DECL (c2) = var;
6876 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
6877 OMP_FOR_CLAUSES (for_stmt) = c2;
6878 omp_add_variable (gimplify_omp_ctxp, var,
6879 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
6880 if (c == NULL_TREE)
6881 {
6882 c = c2;
6883 c2 = NULL_TREE;
6884 }
6885 }
6886 else
6887 omp_add_variable (gimplify_omp_ctxp, var,
6888 GOVD_PRIVATE | GOVD_SEEN);
a68ab351
JJ
6889 }
6890 else
6891 var = decl;
07beea0d 6892
32e8bb8e 6893 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
726a989a 6894 is_gimple_val, fb_rvalue);
32e8bb8e 6895 ret = MIN (ret, tret);
726a989a
RB
6896 if (ret == GS_ERROR)
6897 return ret;
953ff289 6898
726a989a 6899 /* Handle OMP_FOR_COND. */
a68ab351
JJ
6900 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6901 gcc_assert (COMPARISON_CLASS_P (t));
726a989a 6902 gcc_assert (TREE_OPERAND (t, 0) == decl);
b56b9fe3 6903
32e8bb8e 6904 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
726a989a 6905 is_gimple_val, fb_rvalue);
32e8bb8e 6906 ret = MIN (ret, tret);
917948d3 6907
726a989a 6908 /* Handle OMP_FOR_INCR. */
a68ab351 6909 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
953ff289
DN
6910 switch (TREE_CODE (t))
6911 {
a68ab351
JJ
6912 case PREINCREMENT_EXPR:
6913 case POSTINCREMENT_EXPR:
c02065fc
AH
6914 {
6915 tree decl = TREE_OPERAND (t, 0);
6916 // c_omp_for_incr_canonicalize_ptr() should have been
6917 // called to massage things appropriately.
6918 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
6919
6920 if (orig_for_stmt != for_stmt)
6921 break;
6922 t = build_int_cst (TREE_TYPE (decl), 1);
6923 if (c)
6924 OMP_CLAUSE_LINEAR_STEP (c) = t;
6925 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6926 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6927 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
acf0174b 6928 break;
c02065fc 6929 }
a68ab351
JJ
6930
6931 case PREDECREMENT_EXPR:
6932 case POSTDECREMENT_EXPR:
acf0174b
JJ
6933 if (orig_for_stmt != for_stmt)
6934 break;
a68ab351 6935 t = build_int_cst (TREE_TYPE (decl), -1);
74bf76ed
JJ
6936 if (c)
6937 OMP_CLAUSE_LINEAR_STEP (c) = t;
a68ab351 6938 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
726a989a 6939 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
a68ab351
JJ
6940 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6941 break;
6942
726a989a
RB
6943 case MODIFY_EXPR:
6944 gcc_assert (TREE_OPERAND (t, 0) == decl);
6945 TREE_OPERAND (t, 0) = var;
a68ab351 6946
726a989a 6947 t = TREE_OPERAND (t, 1);
a68ab351 6948 switch (TREE_CODE (t))
953ff289 6949 {
a68ab351
JJ
6950 case PLUS_EXPR:
6951 if (TREE_OPERAND (t, 1) == decl)
6952 {
6953 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6954 TREE_OPERAND (t, 0) = var;
6955 break;
6956 }
6957
6958 /* Fallthru. */
6959 case MINUS_EXPR:
6960 case POINTER_PLUS_EXPR:
6961 gcc_assert (TREE_OPERAND (t, 0) == decl);
917948d3 6962 TREE_OPERAND (t, 0) = var;
953ff289 6963 break;
a68ab351
JJ
6964 default:
6965 gcc_unreachable ();
953ff289 6966 }
917948d3 6967
32e8bb8e 6968 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
726a989a 6969 is_gimple_val, fb_rvalue);
32e8bb8e 6970 ret = MIN (ret, tret);
74bf76ed
JJ
6971 if (c)
6972 {
6973 OMP_CLAUSE_LINEAR_STEP (c) = TREE_OPERAND (t, 1);
6974 if (TREE_CODE (t) == MINUS_EXPR)
6975 {
6976 t = TREE_OPERAND (t, 1);
6977 OMP_CLAUSE_LINEAR_STEP (c)
6978 = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
6979 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
6980 &for_pre_body, NULL,
6981 is_gimple_val, fb_rvalue);
6982 ret = MIN (ret, tret);
6983 }
6984 }
953ff289 6985 break;
a68ab351 6986
953ff289
DN
6987 default:
6988 gcc_unreachable ();
6989 }
6990
f7468577
JJ
6991 if (c2)
6992 {
6993 gcc_assert (c);
6994 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
6995 }
6996
acf0174b
JJ
6997 if ((var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
6998 && orig_for_stmt == for_stmt)
a68ab351 6999 {
a68ab351 7000 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
f7468577
JJ
7001 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7002 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
7003 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7004 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
7005 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
7006 && OMP_CLAUSE_DECL (c) == decl)
726a989a
RB
7007 {
7008 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7009 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
7010 gcc_assert (TREE_OPERAND (t, 0) == var);
7011 t = TREE_OPERAND (t, 1);
7012 gcc_assert (TREE_CODE (t) == PLUS_EXPR
7013 || TREE_CODE (t) == MINUS_EXPR
7014 || TREE_CODE (t) == POINTER_PLUS_EXPR);
7015 gcc_assert (TREE_OPERAND (t, 0) == var);
7016 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
7017 TREE_OPERAND (t, 1));
f7468577
JJ
7018 gimple_seq *seq;
7019 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
7020 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
7021 else
7022 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
7023 gimplify_assign (decl, t, seq);
a68ab351
JJ
7024 }
7025 }
953ff289
DN
7026 }
7027
74bf76ed
JJ
7028 BITMAP_FREE (has_decl_expr);
7029
acf0174b 7030 gimplify_and_add (OMP_FOR_BODY (orig_for_stmt), &for_body);
726a989a 7031
acf0174b
JJ
7032 if (orig_for_stmt != for_stmt)
7033 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
7034 {
7035 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
7036 decl = TREE_OPERAND (t, 0);
7037 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
7038 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
7039 TREE_OPERAND (t, 0) = var;
7040 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7041 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
7042 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
7043 }
7044
f014c653 7045 gimplify_adjust_omp_clauses (pre_p, &OMP_FOR_CLAUSES (orig_for_stmt));
953ff289 7046
74bf76ed 7047 int kind;
acf0174b 7048 switch (TREE_CODE (orig_for_stmt))
74bf76ed
JJ
7049 {
7050 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
7051 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
c02065fc 7052 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
acf0174b 7053 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
74bf76ed
JJ
7054 default:
7055 gcc_unreachable ();
7056 }
acf0174b 7057 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
726a989a
RB
7058 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
7059 for_pre_body);
acf0174b
JJ
7060 if (orig_for_stmt != for_stmt)
7061 gimple_omp_for_set_combined_p (gfor, true);
7062 if (gimplify_omp_ctxp
7063 && (gimplify_omp_ctxp->combined_loop
7064 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
7065 && gimplify_omp_ctxp->outer_context
7066 && gimplify_omp_ctxp->outer_context->combined_loop)))
7067 {
7068 gimple_omp_for_set_combined_into_p (gfor, true);
7069 if (gimplify_omp_ctxp->combined_loop)
7070 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
7071 else
7072 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
7073 }
726a989a
RB
7074
7075 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
7076 {
7077 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
7078 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
7079 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
7080 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
7081 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
7082 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
7083 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7084 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
7085 }
7086
7087 gimplify_seq_add_stmt (pre_p, gfor);
74bf76ed
JJ
7088 if (ret != GS_ALL_DONE)
7089 return GS_ERROR;
7090 *expr_p = NULL_TREE;
7091 return GS_ALL_DONE;
953ff289
DN
7092}
7093
acf0174b
JJ
7094/* Gimplify the gross structure of other OpenMP constructs.
7095 In particular, OMP_SECTIONS, OMP_SINGLE, OMP_TARGET, OMP_TARGET_DATA
7096 and OMP_TEAMS. */
953ff289 7097
726a989a
RB
7098static void
7099gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
953ff289 7100{
726a989a
RB
7101 tree expr = *expr_p;
7102 gimple stmt;
7103 gimple_seq body = NULL;
acf0174b 7104 enum omp_region_type ort = ORT_WORKSHARE;
953ff289 7105
acf0174b
JJ
7106 switch (TREE_CODE (expr))
7107 {
7108 case OMP_SECTIONS:
7109 case OMP_SINGLE:
7110 break;
7111 case OMP_TARGET:
7112 ort = ORT_TARGET;
7113 break;
7114 case OMP_TARGET_DATA:
7115 ort = ORT_TARGET_DATA;
7116 break;
7117 case OMP_TEAMS:
7118 ort = ORT_TEAMS;
7119 break;
7120 default:
7121 gcc_unreachable ();
7122 }
7123 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort);
7124 if (ort == ORT_TARGET || ort == ORT_TARGET_DATA)
7125 {
45852dcc 7126 push_gimplify_context ();
acf0174b
JJ
7127 gimple g = gimplify_and_return_first (OMP_BODY (expr), &body);
7128 if (gimple_code (g) == GIMPLE_BIND)
7129 pop_gimplify_context (g);
7130 else
7131 pop_gimplify_context (NULL);
7132 if (ort == ORT_TARGET_DATA)
7133 {
7134 gimple_seq cleanup = NULL;
7135 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TARGET_END_DATA);
7136 g = gimple_build_call (fn, 0);
7137 gimple_seq_add_stmt (&cleanup, g);
7138 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
7139 body = NULL;
7140 gimple_seq_add_stmt (&body, g);
7141 }
7142 }
7143 else
7144 gimplify_and_add (OMP_BODY (expr), &body);
f014c653 7145 gimplify_adjust_omp_clauses (pre_p, &OMP_CLAUSES (expr));
953ff289 7146
acf0174b
JJ
7147 switch (TREE_CODE (expr))
7148 {
7149 case OMP_SECTIONS:
7150 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
7151 break;
7152 case OMP_SINGLE:
7153 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
7154 break;
7155 case OMP_TARGET:
7156 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
7157 OMP_CLAUSES (expr));
7158 break;
7159 case OMP_TARGET_DATA:
7160 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
7161 OMP_CLAUSES (expr));
7162 break;
7163 case OMP_TEAMS:
7164 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
7165 break;
7166 default:
7167 gcc_unreachable ();
7168 }
7169
7170 gimplify_seq_add_stmt (pre_p, stmt);
7171 *expr_p = NULL_TREE;
7172}
7173
7174/* Gimplify the gross structure of OpenMP target update construct. */
7175
7176static void
7177gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
7178{
7179 tree expr = *expr_p;
7180 gimple stmt;
7181
7182 gimplify_scan_omp_clauses (&OMP_TARGET_UPDATE_CLAUSES (expr), pre_p,
7183 ORT_WORKSHARE);
f014c653 7184 gimplify_adjust_omp_clauses (pre_p, &OMP_TARGET_UPDATE_CLAUSES (expr));
acf0174b
JJ
7185 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_UPDATE,
7186 OMP_TARGET_UPDATE_CLAUSES (expr));
726a989a
RB
7187
7188 gimplify_seq_add_stmt (pre_p, stmt);
acf0174b 7189 *expr_p = NULL_TREE;
953ff289
DN
7190}
7191
7192/* A subroutine of gimplify_omp_atomic. The front end is supposed to have
b8698a0f 7193 stabilized the lhs of the atomic operation as *ADDR. Return true if
953ff289
DN
7194 EXPR is this stabilized form. */
7195
7196static bool
a509ebb5 7197goa_lhs_expr_p (tree expr, tree addr)
953ff289
DN
7198{
7199 /* Also include casts to other type variants. The C front end is fond
b8698a0f 7200 of adding these for e.g. volatile variables. This is like
953ff289 7201 STRIP_TYPE_NOPS but includes the main variant lookup. */
9600efe1 7202 STRIP_USELESS_TYPE_CONVERSION (expr);
953ff289 7203
78e47463
JJ
7204 if (TREE_CODE (expr) == INDIRECT_REF)
7205 {
7206 expr = TREE_OPERAND (expr, 0);
7207 while (expr != addr
1043771b 7208 && (CONVERT_EXPR_P (expr)
78e47463
JJ
7209 || TREE_CODE (expr) == NON_LVALUE_EXPR)
7210 && TREE_CODE (expr) == TREE_CODE (addr)
9600efe1 7211 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
78e47463
JJ
7212 {
7213 expr = TREE_OPERAND (expr, 0);
7214 addr = TREE_OPERAND (addr, 0);
7215 }
251923f5
JJ
7216 if (expr == addr)
7217 return true;
71458b8a
JJ
7218 return (TREE_CODE (addr) == ADDR_EXPR
7219 && TREE_CODE (expr) == ADDR_EXPR
251923f5 7220 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
78e47463 7221 }
953ff289
DN
7222 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
7223 return true;
7224 return false;
7225}
7226
ad19c4be
EB
7227/* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
7228 expression does not involve the lhs, evaluate it into a temporary.
7229 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
7230 or -1 if an error was encountered. */
953ff289
DN
7231
7232static int
726a989a
RB
7233goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
7234 tree lhs_var)
953ff289
DN
7235{
7236 tree expr = *expr_p;
7237 int saw_lhs;
7238
7239 if (goa_lhs_expr_p (expr, lhs_addr))
7240 {
7241 *expr_p = lhs_var;
7242 return 1;
7243 }
7244 if (is_gimple_val (expr))
7245 return 0;
b8698a0f 7246
953ff289
DN
7247 saw_lhs = 0;
7248 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
7249 {
7250 case tcc_binary:
067dd3c9 7251 case tcc_comparison:
726a989a
RB
7252 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
7253 lhs_var);
953ff289 7254 case tcc_unary:
726a989a
RB
7255 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
7256 lhs_var);
953ff289 7257 break;
067dd3c9
JJ
7258 case tcc_expression:
7259 switch (TREE_CODE (expr))
7260 {
7261 case TRUTH_ANDIF_EXPR:
7262 case TRUTH_ORIF_EXPR:
f2b11865
JJ
7263 case TRUTH_AND_EXPR:
7264 case TRUTH_OR_EXPR:
7265 case TRUTH_XOR_EXPR:
067dd3c9
JJ
7266 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
7267 lhs_addr, lhs_var);
f2b11865 7268 case TRUTH_NOT_EXPR:
067dd3c9
JJ
7269 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
7270 lhs_addr, lhs_var);
7271 break;
4063e61b
JM
7272 case COMPOUND_EXPR:
7273 /* Break out any preevaluations from cp_build_modify_expr. */
7274 for (; TREE_CODE (expr) == COMPOUND_EXPR;
7275 expr = TREE_OPERAND (expr, 1))
7276 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
7277 *expr_p = expr;
7278 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
067dd3c9
JJ
7279 default:
7280 break;
7281 }
7282 break;
953ff289
DN
7283 default:
7284 break;
7285 }
7286
7287 if (saw_lhs == 0)
7288 {
7289 enum gimplify_status gs;
7290 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
7291 if (gs != GS_ALL_DONE)
7292 saw_lhs = -1;
7293 }
7294
7295 return saw_lhs;
7296}
7297
953ff289
DN
7298/* Gimplify an OMP_ATOMIC statement. */
7299
7300static enum gimplify_status
726a989a 7301gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
953ff289
DN
7302{
7303 tree addr = TREE_OPERAND (*expr_p, 0);
20906c66
JJ
7304 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
7305 ? NULL : TREE_OPERAND (*expr_p, 1);
953ff289 7306 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
726a989a 7307 tree tmp_load;
20906c66 7308 gimple loadstmt, storestmt;
953ff289 7309
20906c66
JJ
7310 tmp_load = create_tmp_reg (type, NULL);
7311 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
7312 return GS_ERROR;
7313
7314 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
7315 != GS_ALL_DONE)
7316 return GS_ERROR;
953ff289 7317
20906c66
JJ
7318 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
7319 gimplify_seq_add_stmt (pre_p, loadstmt);
7320 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
7321 != GS_ALL_DONE)
7322 return GS_ERROR;
953ff289 7323
20906c66
JJ
7324 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
7325 rhs = tmp_load;
7326 storestmt = gimple_build_omp_atomic_store (rhs);
7327 gimplify_seq_add_stmt (pre_p, storestmt);
acf0174b
JJ
7328 if (OMP_ATOMIC_SEQ_CST (*expr_p))
7329 {
7330 gimple_omp_atomic_set_seq_cst (loadstmt);
7331 gimple_omp_atomic_set_seq_cst (storestmt);
7332 }
20906c66
JJ
7333 switch (TREE_CODE (*expr_p))
7334 {
7335 case OMP_ATOMIC_READ:
7336 case OMP_ATOMIC_CAPTURE_OLD:
7337 *expr_p = tmp_load;
7338 gimple_omp_atomic_set_need_value (loadstmt);
7339 break;
7340 case OMP_ATOMIC_CAPTURE_NEW:
7341 *expr_p = rhs;
7342 gimple_omp_atomic_set_need_value (storestmt);
7343 break;
7344 default:
7345 *expr_p = NULL;
7346 break;
7347 }
a509ebb5 7348
acf0174b 7349 return GS_ALL_DONE;
953ff289 7350}
6de9cd9a 7351
0a35513e
AH
7352/* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
7353 body, and adding some EH bits. */
7354
7355static enum gimplify_status
7356gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
7357{
7358 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
7359 gimple g;
7360 gimple_seq body = NULL;
0a35513e
AH
7361 int subcode = 0;
7362
7363 /* Wrap the transaction body in a BIND_EXPR so we have a context
7364 where to put decls for OpenMP. */
7365 if (TREE_CODE (tbody) != BIND_EXPR)
7366 {
7367 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
7368 TREE_SIDE_EFFECTS (bind) = 1;
7369 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
7370 TRANSACTION_EXPR_BODY (expr) = bind;
7371 }
7372
45852dcc 7373 push_gimplify_context ();
0a35513e
AH
7374 temp = voidify_wrapper_expr (*expr_p, NULL);
7375
7376 g = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
7377 pop_gimplify_context (g);
7378
7379 g = gimple_build_transaction (body, NULL);
7380 if (TRANSACTION_EXPR_OUTER (expr))
7381 subcode = GTMA_IS_OUTER;
7382 else if (TRANSACTION_EXPR_RELAXED (expr))
7383 subcode = GTMA_IS_RELAXED;
7384 gimple_transaction_set_subcode (g, subcode);
7385
7386 gimplify_seq_add_stmt (pre_p, g);
7387
7388 if (temp)
7389 {
7390 *expr_p = temp;
7391 return GS_OK;
7392 }
7393
7394 *expr_p = NULL_TREE;
7395 return GS_ALL_DONE;
7396}
7397
ad19c4be 7398/* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
726a989a
RB
7399 expression produces a value to be used as an operand inside a GIMPLE
7400 statement, the value will be stored back in *EXPR_P. This value will
7401 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
7402 an SSA_NAME. The corresponding sequence of GIMPLE statements is
7403 emitted in PRE_P and POST_P.
7404
7405 Additionally, this process may overwrite parts of the input
7406 expression during gimplification. Ideally, it should be
7407 possible to do non-destructive gimplification.
7408
7409 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
7410 the expression needs to evaluate to a value to be used as
7411 an operand in a GIMPLE statement, this value will be stored in
7412 *EXPR_P on exit. This happens when the caller specifies one
7413 of fb_lvalue or fb_rvalue fallback flags.
7414
7415 PRE_P will contain the sequence of GIMPLE statements corresponding
7416 to the evaluation of EXPR and all the side-effects that must
7417 be executed before the main expression. On exit, the last
7418 statement of PRE_P is the core statement being gimplified. For
7419 instance, when gimplifying 'if (++a)' the last statement in
7420 PRE_P will be 'if (t.1)' where t.1 is the result of
7421 pre-incrementing 'a'.
7422
7423 POST_P will contain the sequence of GIMPLE statements corresponding
7424 to the evaluation of all the side-effects that must be executed
7425 after the main expression. If this is NULL, the post
7426 side-effects are stored at the end of PRE_P.
7427
7428 The reason why the output is split in two is to handle post
7429 side-effects explicitly. In some cases, an expression may have
7430 inner and outer post side-effects which need to be emitted in
7431 an order different from the one given by the recursive
7432 traversal. For instance, for the expression (*p--)++ the post
7433 side-effects of '--' must actually occur *after* the post
7434 side-effects of '++'. However, gimplification will first visit
7435 the inner expression, so if a separate POST sequence was not
7436 used, the resulting sequence would be:
7437
7438 1 t.1 = *p
7439 2 p = p - 1
7440 3 t.2 = t.1 + 1
7441 4 *p = t.2
7442
7443 However, the post-decrement operation in line #2 must not be
7444 evaluated until after the store to *p at line #4, so the
7445 correct sequence should be:
7446
7447 1 t.1 = *p
7448 2 t.2 = t.1 + 1
7449 3 *p = t.2
7450 4 p = p - 1
7451
7452 So, by specifying a separate post queue, it is possible
7453 to emit the post side-effects in the correct order.
7454 If POST_P is NULL, an internal queue will be used. Before
7455 returning to the caller, the sequence POST_P is appended to
7456 the main output sequence PRE_P.
7457
7458 GIMPLE_TEST_F points to a function that takes a tree T and
7459 returns nonzero if T is in the GIMPLE form requested by the
12947319 7460 caller. The GIMPLE predicates are in gimple.c.
726a989a
RB
7461
7462 FALLBACK tells the function what sort of a temporary we want if
7463 gimplification cannot produce an expression that complies with
7464 GIMPLE_TEST_F.
7465
7466 fb_none means that no temporary should be generated
7467 fb_rvalue means that an rvalue is OK to generate
7468 fb_lvalue means that an lvalue is OK to generate
7469 fb_either means that either is OK, but an lvalue is preferable.
7470 fb_mayfail means that gimplification may fail (in which case
7471 GS_ERROR will be returned)
7472
7473 The return value is either GS_ERROR or GS_ALL_DONE, since this
7474 function iterates until EXPR is completely gimplified or an error
7475 occurs. */
6de9cd9a
DN
7476
7477enum gimplify_status
726a989a
RB
7478gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
7479 bool (*gimple_test_f) (tree), fallback_t fallback)
6de9cd9a
DN
7480{
7481 tree tmp;
726a989a
RB
7482 gimple_seq internal_pre = NULL;
7483 gimple_seq internal_post = NULL;
6de9cd9a 7484 tree save_expr;
726a989a 7485 bool is_statement;
6de9cd9a
DN
7486 location_t saved_location;
7487 enum gimplify_status ret;
726a989a 7488 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
6de9cd9a
DN
7489
7490 save_expr = *expr_p;
7491 if (save_expr == NULL_TREE)
7492 return GS_ALL_DONE;
7493
726a989a
RB
7494 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
7495 is_statement = gimple_test_f == is_gimple_stmt;
7496 if (is_statement)
7497 gcc_assert (pre_p);
7498
7499 /* Consistency checks. */
7500 if (gimple_test_f == is_gimple_reg)
7501 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
7502 else if (gimple_test_f == is_gimple_val
726a989a
RB
7503 || gimple_test_f == is_gimple_call_addr
7504 || gimple_test_f == is_gimple_condexpr
7505 || gimple_test_f == is_gimple_mem_rhs
ba4d8f9d 7506 || gimple_test_f == is_gimple_mem_rhs_or_call
726a989a 7507 || gimple_test_f == is_gimple_reg_rhs
ba4d8f9d 7508 || gimple_test_f == is_gimple_reg_rhs_or_call
70f34814
RG
7509 || gimple_test_f == is_gimple_asm_val
7510 || gimple_test_f == is_gimple_mem_ref_addr)
726a989a
RB
7511 gcc_assert (fallback & fb_rvalue);
7512 else if (gimple_test_f == is_gimple_min_lval
7513 || gimple_test_f == is_gimple_lvalue)
7514 gcc_assert (fallback & fb_lvalue);
7515 else if (gimple_test_f == is_gimple_addressable)
7516 gcc_assert (fallback & fb_either);
7517 else if (gimple_test_f == is_gimple_stmt)
7518 gcc_assert (fallback == fb_none);
7519 else
7520 {
7521 /* We should have recognized the GIMPLE_TEST_F predicate to
7522 know what kind of fallback to use in case a temporary is
7523 needed to hold the value or address of *EXPR_P. */
7524 gcc_unreachable ();
7525 }
7526
6de9cd9a
DN
7527 /* We used to check the predicate here and return immediately if it
7528 succeeds. This is wrong; the design is for gimplification to be
7529 idempotent, and for the predicates to only test for valid forms, not
7530 whether they are fully simplified. */
6de9cd9a
DN
7531 if (pre_p == NULL)
7532 pre_p = &internal_pre;
726a989a 7533
6de9cd9a
DN
7534 if (post_p == NULL)
7535 post_p = &internal_post;
7536
726a989a
RB
7537 /* Remember the last statements added to PRE_P and POST_P. Every
7538 new statement added by the gimplification helpers needs to be
7539 annotated with location information. To centralize the
7540 responsibility, we remember the last statement that had been
7541 added to both queues before gimplifying *EXPR_P. If
7542 gimplification produces new statements in PRE_P and POST_P, those
7543 statements will be annotated with the same location information
7544 as *EXPR_P. */
7545 pre_last_gsi = gsi_last (*pre_p);
7546 post_last_gsi = gsi_last (*post_p);
7547
6de9cd9a 7548 saved_location = input_location;
a281759f
PB
7549 if (save_expr != error_mark_node
7550 && EXPR_HAS_LOCATION (*expr_p))
7551 input_location = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
7552
7553 /* Loop over the specific gimplifiers until the toplevel node
7554 remains the same. */
7555 do
7556 {
73d6ddef
RK
7557 /* Strip away as many useless type conversions as possible
7558 at the toplevel. */
7559 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
6de9cd9a
DN
7560
7561 /* Remember the expr. */
7562 save_expr = *expr_p;
7563
7564 /* Die, die, die, my darling. */
7565 if (save_expr == error_mark_node
726a989a 7566 || (TREE_TYPE (save_expr)
65355d53 7567 && TREE_TYPE (save_expr) == error_mark_node))
6de9cd9a
DN
7568 {
7569 ret = GS_ERROR;
7570 break;
7571 }
7572
7573 /* Do any language-specific gimplification. */
32e8bb8e
ILT
7574 ret = ((enum gimplify_status)
7575 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
6de9cd9a
DN
7576 if (ret == GS_OK)
7577 {
7578 if (*expr_p == NULL_TREE)
7579 break;
7580 if (*expr_p != save_expr)
7581 continue;
7582 }
7583 else if (ret != GS_UNHANDLED)
7584 break;
7585
941f78d1
JM
7586 /* Make sure that all the cases set 'ret' appropriately. */
7587 ret = GS_UNHANDLED;
6de9cd9a
DN
7588 switch (TREE_CODE (*expr_p))
7589 {
7590 /* First deal with the special cases. */
7591
7592 case POSTINCREMENT_EXPR:
7593 case POSTDECREMENT_EXPR:
7594 case PREINCREMENT_EXPR:
7595 case PREDECREMENT_EXPR:
7596 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
cc3c4f62
RB
7597 fallback != fb_none,
7598 TREE_TYPE (*expr_p));
6de9cd9a
DN
7599 break;
7600
0bd34ae4
RB
7601 case VIEW_CONVERT_EXPR:
7602 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
7603 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
7604 {
7605 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7606 post_p, is_gimple_val, fb_rvalue);
7607 recalculate_side_effects (*expr_p);
7608 break;
7609 }
7610 /* Fallthru. */
7611
6de9cd9a 7612 case ARRAY_REF:
44de5aeb
RK
7613 case ARRAY_RANGE_REF:
7614 case REALPART_EXPR:
7615 case IMAGPART_EXPR:
6de9cd9a
DN
7616 case COMPONENT_REF:
7617 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
90051e16 7618 fallback ? fallback : fb_rvalue);
6de9cd9a
DN
7619 break;
7620
7621 case COND_EXPR:
dae7ec87 7622 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
726a989a 7623
0223e4f5
JM
7624 /* C99 code may assign to an array in a structure value of a
7625 conditional expression, and this has undefined behavior
7626 only on execution, so create a temporary if an lvalue is
7627 required. */
7628 if (fallback == fb_lvalue)
7629 {
7630 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
936d04b6 7631 mark_addressable (*expr_p);
941f78d1 7632 ret = GS_OK;
0223e4f5 7633 }
6de9cd9a
DN
7634 break;
7635
7636 case CALL_EXPR:
90051e16 7637 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
726a989a 7638
0223e4f5
JM
7639 /* C99 code may assign to an array in a structure returned
7640 from a function, and this has undefined behavior only on
7641 execution, so create a temporary if an lvalue is
7642 required. */
7643 if (fallback == fb_lvalue)
7644 {
7645 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
936d04b6 7646 mark_addressable (*expr_p);
941f78d1 7647 ret = GS_OK;
0223e4f5 7648 }
6de9cd9a
DN
7649 break;
7650
7651 case TREE_LIST:
282899df 7652 gcc_unreachable ();
6de9cd9a
DN
7653
7654 case COMPOUND_EXPR:
7655 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
7656 break;
7657
2ec5deb5 7658 case COMPOUND_LITERAL_EXPR:
4c53d183
MM
7659 ret = gimplify_compound_literal_expr (expr_p, pre_p,
7660 gimple_test_f, fallback);
2ec5deb5
PB
7661 break;
7662
6de9cd9a
DN
7663 case MODIFY_EXPR:
7664 case INIT_EXPR:
ebad5233
JM
7665 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
7666 fallback != fb_none);
6de9cd9a
DN
7667 break;
7668
7669 case TRUTH_ANDIF_EXPR:
7670 case TRUTH_ORIF_EXPR:
1d15f620
KT
7671 {
7672 /* Preserve the original type of the expression and the
7673 source location of the outer expression. */
7674 tree org_type = TREE_TYPE (*expr_p);
7675 *expr_p = gimple_boolify (*expr_p);
4b4455e5 7676 *expr_p = build3_loc (input_location, COND_EXPR,
1d15f620
KT
7677 org_type, *expr_p,
7678 fold_convert_loc
4b4455e5 7679 (input_location,
1d15f620
KT
7680 org_type, boolean_true_node),
7681 fold_convert_loc
4b4455e5 7682 (input_location,
1d15f620
KT
7683 org_type, boolean_false_node));
7684 ret = GS_OK;
7685 break;
7686 }
6de9cd9a
DN
7687
7688 case TRUTH_NOT_EXPR:
3c6cbf7a 7689 {
53020648
RG
7690 tree type = TREE_TYPE (*expr_p);
7691 /* The parsers are careful to generate TRUTH_NOT_EXPR
7692 only with operands that are always zero or one.
7693 We do not fold here but handle the only interesting case
7694 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
3c6cbf7a 7695 *expr_p = gimple_boolify (*expr_p);
53020648
RG
7696 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
7697 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
7698 TREE_TYPE (*expr_p),
7699 TREE_OPERAND (*expr_p, 0));
7700 else
7701 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
7702 TREE_TYPE (*expr_p),
7703 TREE_OPERAND (*expr_p, 0),
7704 build_int_cst (TREE_TYPE (*expr_p), 1));
7705 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
7706 *expr_p = fold_convert_loc (input_location, type, *expr_p);
7707 ret = GS_OK;
bd5d002e 7708 break;
3c6cbf7a 7709 }
67339062 7710
6de9cd9a
DN
7711 case ADDR_EXPR:
7712 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
7713 break;
7714
8170608b
TB
7715 case ANNOTATE_EXPR:
7716 {
7717 tree cond = TREE_OPERAND (*expr_p, 0);
718c4601 7718 tree kind = TREE_OPERAND (*expr_p, 1);
664ceb1e
JJ
7719 tree type = TREE_TYPE (cond);
7720 if (!INTEGRAL_TYPE_P (type))
7721 {
7722 *expr_p = cond;
7723 ret = GS_OK;
7724 break;
7725 }
7726 tree tmp = create_tmp_var (type, NULL);
8170608b 7727 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
718c4601
EB
7728 gimple call
7729 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
8170608b
TB
7730 gimple_call_set_lhs (call, tmp);
7731 gimplify_seq_add_stmt (pre_p, call);
7732 *expr_p = tmp;
7733 ret = GS_ALL_DONE;
7734 break;
7735 }
7736
6de9cd9a 7737 case VA_ARG_EXPR:
cd3ce9b4 7738 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
6de9cd9a
DN
7739 break;
7740
1043771b 7741 CASE_CONVERT:
6de9cd9a
DN
7742 if (IS_EMPTY_STMT (*expr_p))
7743 {
7744 ret = GS_ALL_DONE;
7745 break;
7746 }
7747
7748 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
7749 || fallback == fb_none)
7750 {
7751 /* Just strip a conversion to void (or in void context) and
7752 try again. */
7753 *expr_p = TREE_OPERAND (*expr_p, 0);
941f78d1 7754 ret = GS_OK;
6de9cd9a
DN
7755 break;
7756 }
7757
7758 ret = gimplify_conversion (expr_p);
7759 if (ret == GS_ERROR)
7760 break;
7761 if (*expr_p != save_expr)
7762 break;
7763 /* FALLTHRU */
7764
7765 case FIX_TRUNC_EXPR:
6de9cd9a
DN
7766 /* unary_expr: ... | '(' cast ')' val | ... */
7767 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7768 is_gimple_val, fb_rvalue);
7769 recalculate_side_effects (*expr_p);
7770 break;
7771
6a720599 7772 case INDIRECT_REF:
70f34814
RG
7773 {
7774 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
3748f5c9 7775 bool notrap = TREE_THIS_NOTRAP (*expr_p);
70f34814
RG
7776 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
7777
7778 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
7779 if (*expr_p != save_expr)
7780 {
7781 ret = GS_OK;
7782 break;
7783 }
7784
7785 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7786 is_gimple_reg, fb_rvalue);
dca26746
RG
7787 if (ret == GS_ERROR)
7788 break;
70f34814 7789
dca26746 7790 recalculate_side_effects (*expr_p);
70f34814
RG
7791 *expr_p = fold_build2_loc (input_location, MEM_REF,
7792 TREE_TYPE (*expr_p),
7793 TREE_OPERAND (*expr_p, 0),
7794 build_int_cst (saved_ptr_type, 0));
7795 TREE_THIS_VOLATILE (*expr_p) = volatilep;
3748f5c9 7796 TREE_THIS_NOTRAP (*expr_p) = notrap;
70f34814
RG
7797 ret = GS_OK;
7798 break;
7799 }
7800
7801 /* We arrive here through the various re-gimplifcation paths. */
7802 case MEM_REF:
7803 /* First try re-folding the whole thing. */
7804 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
7805 TREE_OPERAND (*expr_p, 0),
7806 TREE_OPERAND (*expr_p, 1));
7807 if (tmp)
941f78d1 7808 {
70f34814
RG
7809 *expr_p = tmp;
7810 recalculate_side_effects (*expr_p);
941f78d1
JM
7811 ret = GS_OK;
7812 break;
7813 }
01718e96
RG
7814 /* Avoid re-gimplifying the address operand if it is already
7815 in suitable form. Re-gimplifying would mark the address
7816 operand addressable. Always gimplify when not in SSA form
7817 as we still may have to gimplify decls with value-exprs. */
7818 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
7819 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
7820 {
7821 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7822 is_gimple_mem_ref_addr, fb_rvalue);
7823 if (ret == GS_ERROR)
7824 break;
7825 }
6de9cd9a 7826 recalculate_side_effects (*expr_p);
70f34814 7827 ret = GS_ALL_DONE;
6de9cd9a
DN
7828 break;
7829
01718e96 7830 /* Constants need not be gimplified. */
6de9cd9a
DN
7831 case INTEGER_CST:
7832 case REAL_CST:
325217ed 7833 case FIXED_CST:
6de9cd9a
DN
7834 case STRING_CST:
7835 case COMPLEX_CST:
7836 case VECTOR_CST:
3f5c390d
RB
7837 /* Drop the overflow flag on constants, we do not want
7838 that in the GIMPLE IL. */
7839 if (TREE_OVERFLOW_P (*expr_p))
7840 *expr_p = drop_tree_overflow (*expr_p);
6de9cd9a
DN
7841 ret = GS_ALL_DONE;
7842 break;
7843
7844 case CONST_DECL:
0534fa56 7845 /* If we require an lvalue, such as for ADDR_EXPR, retain the
2a7e31df 7846 CONST_DECL node. Otherwise the decl is replaceable by its
0534fa56
RH
7847 value. */
7848 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
7849 if (fallback & fb_lvalue)
7850 ret = GS_ALL_DONE;
7851 else
941f78d1
JM
7852 {
7853 *expr_p = DECL_INITIAL (*expr_p);
7854 ret = GS_OK;
7855 }
6de9cd9a
DN
7856 break;
7857
350fae66 7858 case DECL_EXPR:
726a989a 7859 ret = gimplify_decl_expr (expr_p, pre_p);
350fae66
RK
7860 break;
7861
6de9cd9a 7862 case BIND_EXPR:
c6c7698d 7863 ret = gimplify_bind_expr (expr_p, pre_p);
6de9cd9a
DN
7864 break;
7865
7866 case LOOP_EXPR:
7867 ret = gimplify_loop_expr (expr_p, pre_p);
7868 break;
7869
7870 case SWITCH_EXPR:
7871 ret = gimplify_switch_expr (expr_p, pre_p);
7872 break;
7873
6de9cd9a
DN
7874 case EXIT_EXPR:
7875 ret = gimplify_exit_expr (expr_p);
7876 break;
7877
7878 case GOTO_EXPR:
7879 /* If the target is not LABEL, then it is a computed jump
7880 and the target needs to be gimplified. */
7881 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
8c50b495
JJ
7882 {
7883 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
7884 NULL, is_gimple_val, fb_rvalue);
7885 if (ret == GS_ERROR)
7886 break;
7887 }
726a989a
RB
7888 gimplify_seq_add_stmt (pre_p,
7889 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
941f78d1 7890 ret = GS_ALL_DONE;
6de9cd9a
DN
7891 break;
7892
2e28e797 7893 case PREDICT_EXPR:
726a989a
RB
7894 gimplify_seq_add_stmt (pre_p,
7895 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
7896 PREDICT_EXPR_OUTCOME (*expr_p)));
7897 ret = GS_ALL_DONE;
7898 break;
2e28e797 7899
6de9cd9a
DN
7900 case LABEL_EXPR:
7901 ret = GS_ALL_DONE;
282899df
NS
7902 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
7903 == current_function_decl);
726a989a
RB
7904 gimplify_seq_add_stmt (pre_p,
7905 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
6de9cd9a
DN
7906 break;
7907
7908 case CASE_LABEL_EXPR:
726a989a 7909 ret = gimplify_case_label_expr (expr_p, pre_p);
6de9cd9a
DN
7910 break;
7911
7912 case RETURN_EXPR:
7913 ret = gimplify_return_expr (*expr_p, pre_p);
7914 break;
7915
7916 case CONSTRUCTOR:
48eb4e53
RK
7917 /* Don't reduce this in place; let gimplify_init_constructor work its
7918 magic. Buf if we're just elaborating this for side effects, just
7919 gimplify any element that has side-effects. */
7920 if (fallback == fb_none)
7921 {
4038c495 7922 unsigned HOST_WIDE_INT ix;
ac47786e 7923 tree val;
08330ec2 7924 tree temp = NULL_TREE;
ac47786e
NF
7925 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
7926 if (TREE_SIDE_EFFECTS (val))
7927 append_to_statement_list (val, &temp);
48eb4e53 7928
08330ec2 7929 *expr_p = temp;
941f78d1 7930 ret = temp ? GS_OK : GS_ALL_DONE;
48eb4e53 7931 }
ca0b7d18
AP
7932 /* C99 code may assign to an array in a constructed
7933 structure or union, and this has undefined behavior only
7934 on execution, so create a temporary if an lvalue is
7935 required. */
7936 else if (fallback == fb_lvalue)
7937 {
7938 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
936d04b6 7939 mark_addressable (*expr_p);
941f78d1 7940 ret = GS_OK;
ca0b7d18 7941 }
08330ec2
AP
7942 else
7943 ret = GS_ALL_DONE;
6de9cd9a
DN
7944 break;
7945
7946 /* The following are special cases that are not handled by the
7947 original GIMPLE grammar. */
7948
7949 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
7950 eliminated. */
7951 case SAVE_EXPR:
7952 ret = gimplify_save_expr (expr_p, pre_p, post_p);
7953 break;
7954
7955 case BIT_FIELD_REF:
ea814c66
EB
7956 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7957 post_p, is_gimple_lvalue, fb_either);
7958 recalculate_side_effects (*expr_p);
6de9cd9a
DN
7959 break;
7960
150e3929
RG
7961 case TARGET_MEM_REF:
7962 {
7963 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
7964
23a534a1 7965 if (TMR_BASE (*expr_p))
150e3929 7966 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
4d948885 7967 post_p, is_gimple_mem_ref_addr, fb_either);
150e3929
RG
7968 if (TMR_INDEX (*expr_p))
7969 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
7970 post_p, is_gimple_val, fb_rvalue);
4d948885
RG
7971 if (TMR_INDEX2 (*expr_p))
7972 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
7973 post_p, is_gimple_val, fb_rvalue);
150e3929
RG
7974 /* TMR_STEP and TMR_OFFSET are always integer constants. */
7975 ret = MIN (r0, r1);
7976 }
7977 break;
7978
6de9cd9a
DN
7979 case NON_LVALUE_EXPR:
7980 /* This should have been stripped above. */
282899df 7981 gcc_unreachable ();
6de9cd9a
DN
7982
7983 case ASM_EXPR:
7984 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
7985 break;
7986
7987 case TRY_FINALLY_EXPR:
7988 case TRY_CATCH_EXPR:
726a989a
RB
7989 {
7990 gimple_seq eval, cleanup;
7991 gimple try_;
7992
820055a0
DC
7993 /* Calls to destructors are generated automatically in FINALLY/CATCH
7994 block. They should have location as UNKNOWN_LOCATION. However,
7995 gimplify_call_expr will reset these call stmts to input_location
7996 if it finds stmt's location is unknown. To prevent resetting for
7997 destructors, we set the input_location to unknown.
7998 Note that this only affects the destructor calls in FINALLY/CATCH
7999 block, and will automatically reset to its original value by the
8000 end of gimplify_expr. */
8001 input_location = UNKNOWN_LOCATION;
726a989a
RB
8002 eval = cleanup = NULL;
8003 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
8004 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
cc8b343d
JJ
8005 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
8006 if (gimple_seq_empty_p (cleanup))
8007 {
8008 gimple_seq_add_seq (pre_p, eval);
8009 ret = GS_ALL_DONE;
8010 break;
8011 }
726a989a
RB
8012 try_ = gimple_build_try (eval, cleanup,
8013 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
8014 ? GIMPLE_TRY_FINALLY
8015 : GIMPLE_TRY_CATCH);
e368f44f
DC
8016 if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
8017 gimple_set_location (try_, saved_location);
8018 else
8019 gimple_set_location (try_, EXPR_LOCATION (save_expr));
726a989a
RB
8020 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
8021 gimple_try_set_catch_is_cleanup (try_,
8022 TRY_CATCH_IS_CLEANUP (*expr_p));
8023 gimplify_seq_add_stmt (pre_p, try_);
8024 ret = GS_ALL_DONE;
8025 break;
8026 }
6de9cd9a
DN
8027
8028 case CLEANUP_POINT_EXPR:
8029 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
8030 break;
8031
8032 case TARGET_EXPR:
8033 ret = gimplify_target_expr (expr_p, pre_p, post_p);
8034 break;
8035
8036 case CATCH_EXPR:
726a989a
RB
8037 {
8038 gimple c;
8039 gimple_seq handler = NULL;
8040 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
8041 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
8042 gimplify_seq_add_stmt (pre_p, c);
8043 ret = GS_ALL_DONE;
8044 break;
8045 }
6de9cd9a
DN
8046
8047 case EH_FILTER_EXPR:
726a989a
RB
8048 {
8049 gimple ehf;
8050 gimple_seq failure = NULL;
8051
8052 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
8053 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
d665b6e5 8054 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
726a989a
RB
8055 gimplify_seq_add_stmt (pre_p, ehf);
8056 ret = GS_ALL_DONE;
8057 break;
8058 }
6de9cd9a 8059
0f59171d
RH
8060 case OBJ_TYPE_REF:
8061 {
8062 enum gimplify_status r0, r1;
726a989a
RB
8063 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
8064 post_p, is_gimple_val, fb_rvalue);
8065 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
8066 post_p, is_gimple_val, fb_rvalue);
0f3a057a 8067 TREE_SIDE_EFFECTS (*expr_p) = 0;
0f59171d
RH
8068 ret = MIN (r0, r1);
8069 }
6de9cd9a
DN
8070 break;
8071
6de9cd9a
DN
8072 case LABEL_DECL:
8073 /* We get here when taking the address of a label. We mark
8074 the label as "forced"; meaning it can never be removed and
8075 it is a potential target for any computed goto. */
8076 FORCED_LABEL (*expr_p) = 1;
8077 ret = GS_ALL_DONE;
8078 break;
8079
8080 case STATEMENT_LIST:
c6c7698d 8081 ret = gimplify_statement_list (expr_p, pre_p);
6de9cd9a
DN
8082 break;
8083
d25cee4d
RH
8084 case WITH_SIZE_EXPR:
8085 {
70e2829d
KH
8086 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8087 post_p == &internal_post ? NULL : post_p,
8088 gimple_test_f, fallback);
8089 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
8090 is_gimple_val, fb_rvalue);
941f78d1 8091 ret = GS_ALL_DONE;
d25cee4d
RH
8092 }
8093 break;
8094
6de9cd9a 8095 case VAR_DECL:
4744afba 8096 case PARM_DECL:
a9f7c570 8097 ret = gimplify_var_or_parm_decl (expr_p);
6de9cd9a
DN
8098 break;
8099
077b0dfb
JJ
8100 case RESULT_DECL:
8101 /* When within an OpenMP context, notice uses of variables. */
8102 if (gimplify_omp_ctxp)
8103 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
8104 ret = GS_ALL_DONE;
8105 break;
8106
71956db3
RH
8107 case SSA_NAME:
8108 /* Allow callbacks into the gimplifier during optimization. */
8109 ret = GS_ALL_DONE;
8110 break;
8111
953ff289 8112 case OMP_PARALLEL:
726a989a
RB
8113 gimplify_omp_parallel (expr_p, pre_p);
8114 ret = GS_ALL_DONE;
953ff289
DN
8115 break;
8116
a68ab351 8117 case OMP_TASK:
726a989a
RB
8118 gimplify_omp_task (expr_p, pre_p);
8119 ret = GS_ALL_DONE;
a68ab351
JJ
8120 break;
8121
953ff289 8122 case OMP_FOR:
74bf76ed 8123 case OMP_SIMD:
c02065fc 8124 case CILK_SIMD:
acf0174b 8125 case OMP_DISTRIBUTE:
953ff289
DN
8126 ret = gimplify_omp_for (expr_p, pre_p);
8127 break;
8128
8129 case OMP_SECTIONS:
8130 case OMP_SINGLE:
acf0174b
JJ
8131 case OMP_TARGET:
8132 case OMP_TARGET_DATA:
8133 case OMP_TEAMS:
726a989a
RB
8134 gimplify_omp_workshare (expr_p, pre_p);
8135 ret = GS_ALL_DONE;
953ff289
DN
8136 break;
8137
acf0174b
JJ
8138 case OMP_TARGET_UPDATE:
8139 gimplify_omp_target_update (expr_p, pre_p);
8140 ret = GS_ALL_DONE;
8141 break;
8142
953ff289
DN
8143 case OMP_SECTION:
8144 case OMP_MASTER:
acf0174b 8145 case OMP_TASKGROUP:
953ff289
DN
8146 case OMP_ORDERED:
8147 case OMP_CRITICAL:
726a989a
RB
8148 {
8149 gimple_seq body = NULL;
8150 gimple g;
8151
8152 gimplify_and_add (OMP_BODY (*expr_p), &body);
8153 switch (TREE_CODE (*expr_p))
8154 {
8155 case OMP_SECTION:
8156 g = gimple_build_omp_section (body);
8157 break;
8158 case OMP_MASTER:
8159 g = gimple_build_omp_master (body);
8160 break;
acf0174b
JJ
8161 case OMP_TASKGROUP:
8162 {
8163 gimple_seq cleanup = NULL;
8164 tree fn
8165 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
8166 g = gimple_build_call (fn, 0);
8167 gimple_seq_add_stmt (&cleanup, g);
8168 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
8169 body = NULL;
8170 gimple_seq_add_stmt (&body, g);
8171 g = gimple_build_omp_taskgroup (body);
8172 }
8173 break;
726a989a
RB
8174 case OMP_ORDERED:
8175 g = gimple_build_omp_ordered (body);
8176 break;
8177 case OMP_CRITICAL:
8178 g = gimple_build_omp_critical (body,
8179 OMP_CRITICAL_NAME (*expr_p));
8180 break;
8181 default:
8182 gcc_unreachable ();
8183 }
8184 gimplify_seq_add_stmt (pre_p, g);
8185 ret = GS_ALL_DONE;
8186 break;
8187 }
953ff289
DN
8188
8189 case OMP_ATOMIC:
20906c66
JJ
8190 case OMP_ATOMIC_READ:
8191 case OMP_ATOMIC_CAPTURE_OLD:
8192 case OMP_ATOMIC_CAPTURE_NEW:
953ff289
DN
8193 ret = gimplify_omp_atomic (expr_p, pre_p);
8194 break;
8195
0a35513e
AH
8196 case TRANSACTION_EXPR:
8197 ret = gimplify_transaction (expr_p, pre_p);
8198 break;
8199
16949072
RG
8200 case TRUTH_AND_EXPR:
8201 case TRUTH_OR_EXPR:
8202 case TRUTH_XOR_EXPR:
1d15f620 8203 {
bd5d002e 8204 tree orig_type = TREE_TYPE (*expr_p);
fc1f4caf 8205 tree new_type, xop0, xop1;
1d15f620 8206 *expr_p = gimple_boolify (*expr_p);
fc1f4caf
KT
8207 new_type = TREE_TYPE (*expr_p);
8208 if (!useless_type_conversion_p (orig_type, new_type))
1d15f620 8209 {
4b4455e5 8210 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
1d15f620
KT
8211 ret = GS_OK;
8212 break;
8213 }
da5fb469 8214
bd5d002e
RG
8215 /* Boolified binary truth expressions are semantically equivalent
8216 to bitwise binary expressions. Canonicalize them to the
8217 bitwise variant. */
8218 switch (TREE_CODE (*expr_p))
8219 {
8220 case TRUTH_AND_EXPR:
8221 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
8222 break;
8223 case TRUTH_OR_EXPR:
8224 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
8225 break;
8226 case TRUTH_XOR_EXPR:
8227 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
8228 break;
8229 default:
8230 break;
8231 }
fc1f4caf
KT
8232 /* Now make sure that operands have compatible type to
8233 expression's new_type. */
8234 xop0 = TREE_OPERAND (*expr_p, 0);
8235 xop1 = TREE_OPERAND (*expr_p, 1);
8236 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
8237 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
8238 new_type,
8239 xop0);
8240 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
8241 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
8242 new_type,
8243 xop1);
bd5d002e
RG
8244 /* Continue classified as tcc_binary. */
8245 goto expr_2;
da5fb469 8246 }
16949072
RG
8247
8248 case FMA_EXPR:
e6ed43b0 8249 case VEC_COND_EXPR:
2205ed25 8250 case VEC_PERM_EXPR:
16949072
RG
8251 /* Classified as tcc_expression. */
8252 goto expr_3;
8253
5be014d5 8254 case POINTER_PLUS_EXPR:
315f5f1b
RG
8255 {
8256 enum gimplify_status r0, r1;
8257 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8258 post_p, is_gimple_val, fb_rvalue);
8259 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8260 post_p, is_gimple_val, fb_rvalue);
8261 recalculate_side_effects (*expr_p);
8262 ret = MIN (r0, r1);
8263 /* Convert &X + CST to invariant &MEM[&X, CST]. Do this
8264 after gimplifying operands - this is similar to how
8265 it would be folding all gimplified stmts on creation
8266 to have them canonicalized, which is what we eventually
8267 should do anyway. */
8268 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
8269 && is_gimple_min_invariant (TREE_OPERAND (*expr_p, 0)))
8270 {
8271 *expr_p = build_fold_addr_expr_with_type_loc
8272 (input_location,
8273 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (*expr_p)),
8274 TREE_OPERAND (*expr_p, 0),
8275 fold_convert (ptr_type_node,
8276 TREE_OPERAND (*expr_p, 1))),
8277 TREE_TYPE (*expr_p));
8278 ret = MIN (ret, GS_OK);
8279 }
8280 break;
8281 }
726a989a 8282
939b37da
BI
8283 case CILK_SYNC_STMT:
8284 {
8285 if (!fn_contains_cilk_spawn_p (cfun))
8286 {
8287 error_at (EXPR_LOCATION (*expr_p),
8288 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
8289 ret = GS_ERROR;
8290 }
8291 else
8292 {
8293 gimplify_cilk_sync (expr_p, pre_p);
8294 ret = GS_ALL_DONE;
8295 }
8296 break;
8297 }
8298
6de9cd9a 8299 default:
282899df 8300 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
6de9cd9a 8301 {
6615c446 8302 case tcc_comparison:
61c25908
OH
8303 /* Handle comparison of objects of non scalar mode aggregates
8304 with a call to memcmp. It would be nice to only have to do
8305 this for variable-sized objects, but then we'd have to allow
8306 the same nest of reference nodes we allow for MODIFY_EXPR and
8307 that's too complex.
8308
8309 Compare scalar mode aggregates as scalar mode values. Using
8310 memcmp for them would be very inefficient at best, and is
8311 plain wrong if bitfields are involved. */
726a989a
RB
8312 {
8313 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
61c25908 8314
544d960a
AS
8315 /* Vector comparisons need no boolification. */
8316 if (TREE_CODE (type) == VECTOR_TYPE)
8317 goto expr_2;
8318 else if (!AGGREGATE_TYPE_P (type))
7f3ff782
KT
8319 {
8320 tree org_type = TREE_TYPE (*expr_p);
8321 *expr_p = gimple_boolify (*expr_p);
8322 if (!useless_type_conversion_p (org_type,
8323 TREE_TYPE (*expr_p)))
8324 {
8325 *expr_p = fold_convert_loc (input_location,
8326 org_type, *expr_p);
8327 ret = GS_OK;
8328 }
8329 else
8330 goto expr_2;
8331 }
726a989a
RB
8332 else if (TYPE_MODE (type) != BLKmode)
8333 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
8334 else
8335 ret = gimplify_variable_sized_compare (expr_p);
61c25908 8336
726a989a 8337 break;
61c25908 8338 }
d3147f64 8339
282899df
NS
8340 /* If *EXPR_P does not need to be special-cased, handle it
8341 according to its class. */
6615c446 8342 case tcc_unary:
282899df
NS
8343 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8344 post_p, is_gimple_val, fb_rvalue);
8345 break;
6de9cd9a 8346
6615c446 8347 case tcc_binary:
282899df
NS
8348 expr_2:
8349 {
8350 enum gimplify_status r0, r1;
d3147f64 8351
282899df 8352 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
726a989a 8353 post_p, is_gimple_val, fb_rvalue);
282899df
NS
8354 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8355 post_p, is_gimple_val, fb_rvalue);
d3147f64 8356
282899df
NS
8357 ret = MIN (r0, r1);
8358 break;
8359 }
d3147f64 8360
16949072
RG
8361 expr_3:
8362 {
8363 enum gimplify_status r0, r1, r2;
8364
8365 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8366 post_p, is_gimple_val, fb_rvalue);
8367 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8368 post_p, is_gimple_val, fb_rvalue);
8369 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
8370 post_p, is_gimple_val, fb_rvalue);
8371
8372 ret = MIN (MIN (r0, r1), r2);
8373 break;
8374 }
8375
6615c446
JO
8376 case tcc_declaration:
8377 case tcc_constant:
6de9cd9a 8378 ret = GS_ALL_DONE;
282899df 8379 goto dont_recalculate;
d3147f64 8380
282899df 8381 default:
16949072 8382 gcc_unreachable ();
6de9cd9a 8383 }
6de9cd9a
DN
8384
8385 recalculate_side_effects (*expr_p);
726a989a 8386
282899df 8387 dont_recalculate:
6de9cd9a
DN
8388 break;
8389 }
d3147f64 8390
941f78d1 8391 gcc_assert (*expr_p || ret != GS_OK);
6de9cd9a
DN
8392 }
8393 while (ret == GS_OK);
8394
8395 /* If we encountered an error_mark somewhere nested inside, either
8396 stub out the statement or propagate the error back out. */
8397 if (ret == GS_ERROR)
8398 {
8399 if (is_statement)
65355d53 8400 *expr_p = NULL;
6de9cd9a
DN
8401 goto out;
8402 }
8403
6de9cd9a
DN
8404 /* This was only valid as a return value from the langhook, which
8405 we handled. Make sure it doesn't escape from any other context. */
282899df 8406 gcc_assert (ret != GS_UNHANDLED);
6de9cd9a 8407
65355d53 8408 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
6de9cd9a
DN
8409 {
8410 /* We aren't looking for a value, and we don't have a valid
8411 statement. If it doesn't have side-effects, throw it away. */
8412 if (!TREE_SIDE_EFFECTS (*expr_p))
65355d53 8413 *expr_p = NULL;
6de9cd9a 8414 else if (!TREE_THIS_VOLATILE (*expr_p))
44de5aeb
RK
8415 {
8416 /* This is probably a _REF that contains something nested that
8417 has side effects. Recurse through the operands to find it. */
8418 enum tree_code code = TREE_CODE (*expr_p);
8419
282899df 8420 switch (code)
44de5aeb 8421 {
282899df 8422 case COMPONENT_REF:
02a5eac4
EB
8423 case REALPART_EXPR:
8424 case IMAGPART_EXPR:
8425 case VIEW_CONVERT_EXPR:
282899df
NS
8426 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8427 gimple_test_f, fallback);
8428 break;
8429
a9e64c63
EB
8430 case ARRAY_REF:
8431 case ARRAY_RANGE_REF:
44de5aeb
RK
8432 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8433 gimple_test_f, fallback);
8434 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
282899df
NS
8435 gimple_test_f, fallback);
8436 break;
8437
8438 default:
8439 /* Anything else with side-effects must be converted to
a9e64c63 8440 a valid statement before we get here. */
282899df 8441 gcc_unreachable ();
44de5aeb 8442 }
44de5aeb 8443
65355d53 8444 *expr_p = NULL;
44de5aeb 8445 }
a9e64c63
EB
8446 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
8447 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
6de9cd9a 8448 {
a9e64c63
EB
8449 /* Historically, the compiler has treated a bare reference
8450 to a non-BLKmode volatile lvalue as forcing a load. */
af62f6f9 8451 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
726a989a 8452
c22b1771 8453 /* Normally, we do not want to create a temporary for a
a38578e1
MM
8454 TREE_ADDRESSABLE type because such a type should not be
8455 copied by bitwise-assignment. However, we make an
8456 exception here, as all we are doing here is ensuring that
8457 we read the bytes that make up the type. We use
8458 create_tmp_var_raw because create_tmp_var will abort when
57b51d4d 8459 given a TREE_ADDRESSABLE type. */
a38578e1
MM
8460 tree tmp = create_tmp_var_raw (type, "vol");
8461 gimple_add_tmp_var (tmp);
726a989a
RB
8462 gimplify_assign (tmp, *expr_p, pre_p);
8463 *expr_p = NULL;
6de9cd9a
DN
8464 }
8465 else
8466 /* We can't do anything useful with a volatile reference to
a9e64c63
EB
8467 an incomplete type, so just throw it away. Likewise for
8468 a BLKmode type, since any implicit inner load should
8469 already have been turned into an explicit one by the
8470 gimplification process. */
65355d53 8471 *expr_p = NULL;
6de9cd9a
DN
8472 }
8473
8474 /* If we are gimplifying at the statement level, we're done. Tack
726a989a 8475 everything together and return. */
325c3691 8476 if (fallback == fb_none || is_statement)
6de9cd9a 8477 {
726a989a
RB
8478 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
8479 it out for GC to reclaim it. */
8480 *expr_p = NULL_TREE;
8481
8482 if (!gimple_seq_empty_p (internal_pre)
8483 || !gimple_seq_empty_p (internal_post))
be00f578 8484 {
726a989a
RB
8485 gimplify_seq_add_seq (&internal_pre, internal_post);
8486 gimplify_seq_add_seq (pre_p, internal_pre);
be00f578 8487 }
726a989a
RB
8488
8489 /* The result of gimplifying *EXPR_P is going to be the last few
8490 statements in *PRE_P and *POST_P. Add location information
8491 to all the statements that were added by the gimplification
8492 helpers. */
8493 if (!gimple_seq_empty_p (*pre_p))
8494 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
8495
8496 if (!gimple_seq_empty_p (*post_p))
8497 annotate_all_with_location_after (*post_p, post_last_gsi,
8498 input_location);
8499
6de9cd9a
DN
8500 goto out;
8501 }
8502
726a989a
RB
8503#ifdef ENABLE_GIMPLE_CHECKING
8504 if (*expr_p)
8505 {
8506 enum tree_code code = TREE_CODE (*expr_p);
8507 /* These expressions should already be in gimple IR form. */
8508 gcc_assert (code != MODIFY_EXPR
8509 && code != ASM_EXPR
8510 && code != BIND_EXPR
8511 && code != CATCH_EXPR
6fc4fb06 8512 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
726a989a
RB
8513 && code != EH_FILTER_EXPR
8514 && code != GOTO_EXPR
8515 && code != LABEL_EXPR
8516 && code != LOOP_EXPR
726a989a
RB
8517 && code != SWITCH_EXPR
8518 && code != TRY_FINALLY_EXPR
8519 && code != OMP_CRITICAL
8520 && code != OMP_FOR
8521 && code != OMP_MASTER
acf0174b 8522 && code != OMP_TASKGROUP
726a989a
RB
8523 && code != OMP_ORDERED
8524 && code != OMP_PARALLEL
8525 && code != OMP_SECTIONS
8526 && code != OMP_SECTION
8527 && code != OMP_SINGLE);
8528 }
8529#endif
6de9cd9a 8530
726a989a
RB
8531 /* Otherwise we're gimplifying a subexpression, so the resulting
8532 value is interesting. If it's a valid operand that matches
8533 GIMPLE_TEST_F, we're done. Unless we are handling some
8534 post-effects internally; if that's the case, we need to copy into
8535 a temporary before adding the post-effects to POST_P. */
8536 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
6de9cd9a
DN
8537 goto out;
8538
8539 /* Otherwise, we need to create a new temporary for the gimplified
8540 expression. */
8541
8542 /* We can't return an lvalue if we have an internal postqueue. The
8543 object the lvalue refers to would (probably) be modified by the
8544 postqueue; we need to copy the value out first, which means an
8545 rvalue. */
726a989a
RB
8546 if ((fallback & fb_lvalue)
8547 && gimple_seq_empty_p (internal_post)
e847cc68 8548 && is_gimple_addressable (*expr_p))
6de9cd9a
DN
8549 {
8550 /* An lvalue will do. Take the address of the expression, store it
8551 in a temporary, and replace the expression with an INDIRECT_REF of
8552 that temporary. */
db3927fb 8553 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
6de9cd9a 8554 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
7f5ad6d7 8555 *expr_p = build_simple_mem_ref (tmp);
6de9cd9a 8556 }
ba4d8f9d 8557 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
6de9cd9a 8558 {
726a989a
RB
8559 /* An rvalue will do. Assign the gimplified expression into a
8560 new temporary TMP and replace the original expression with
8561 TMP. First, make sure that the expression has a type so that
8562 it can be assigned into a temporary. */
282899df 8563 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
412a1d9e 8564 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
6de9cd9a 8565 }
282899df 8566 else
6de9cd9a 8567 {
726a989a 8568#ifdef ENABLE_GIMPLE_CHECKING
282899df
NS
8569 if (!(fallback & fb_mayfail))
8570 {
8571 fprintf (stderr, "gimplification failed:\n");
8572 print_generic_expr (stderr, *expr_p, 0);
8573 debug_tree (*expr_p);
8574 internal_error ("gimplification failed");
8575 }
8576#endif
8577 gcc_assert (fallback & fb_mayfail);
726a989a 8578
282899df 8579 /* If this is an asm statement, and the user asked for the
535a42b1 8580 impossible, don't die. Fail and let gimplify_asm_expr
282899df 8581 issue an error. */
6de9cd9a
DN
8582 ret = GS_ERROR;
8583 goto out;
8584 }
6de9cd9a 8585
6de9cd9a 8586 /* Make sure the temporary matches our predicate. */
282899df 8587 gcc_assert ((*gimple_test_f) (*expr_p));
6de9cd9a 8588
726a989a 8589 if (!gimple_seq_empty_p (internal_post))
6de9cd9a 8590 {
726a989a
RB
8591 annotate_all_with_location (internal_post, input_location);
8592 gimplify_seq_add_seq (pre_p, internal_post);
6de9cd9a
DN
8593 }
8594
8595 out:
8596 input_location = saved_location;
8597 return ret;
8598}
8599
44de5aeb 8600/* Look through TYPE for variable-sized objects and gimplify each such
65355d53 8601 size that we find. Add to LIST_P any statements generated. */
44de5aeb 8602
65355d53 8603void
726a989a 8604gimplify_type_sizes (tree type, gimple_seq *list_p)
44de5aeb 8605{
ad50bc8d
RH
8606 tree field, t;
8607
19dbbf36 8608 if (type == NULL || type == error_mark_node)
8e0a600b 8609 return;
ad50bc8d 8610
6c6cfbfd 8611 /* We first do the main variant, then copy into any other variants. */
ad50bc8d 8612 type = TYPE_MAIN_VARIANT (type);
44de5aeb 8613
8e0a600b 8614 /* Avoid infinite recursion. */
19dbbf36 8615 if (TYPE_SIZES_GIMPLIFIED (type))
8e0a600b
JJ
8616 return;
8617
8618 TYPE_SIZES_GIMPLIFIED (type) = 1;
8619
44de5aeb
RK
8620 switch (TREE_CODE (type))
8621 {
44de5aeb
RK
8622 case INTEGER_TYPE:
8623 case ENUMERAL_TYPE:
8624 case BOOLEAN_TYPE:
44de5aeb 8625 case REAL_TYPE:
325217ed 8626 case FIXED_POINT_TYPE:
65355d53
RH
8627 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
8628 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
ad50bc8d
RH
8629
8630 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8631 {
8632 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
8633 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
ad50bc8d 8634 }
44de5aeb
RK
8635 break;
8636
8637 case ARRAY_TYPE:
ad50bc8d 8638 /* These types may not have declarations, so handle them here. */
8e0a600b
JJ
8639 gimplify_type_sizes (TREE_TYPE (type), list_p);
8640 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
2e957792
JJ
8641 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
8642 with assigned stack slots, for -O1+ -g they should be tracked
8643 by VTA. */
08d78391
EB
8644 if (!(TYPE_NAME (type)
8645 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
8646 && DECL_IGNORED_P (TYPE_NAME (type)))
8647 && TYPE_DOMAIN (type)
802e9f8e
JJ
8648 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
8649 {
8650 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8651 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8652 DECL_IGNORED_P (t) = 0;
8653 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8654 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8655 DECL_IGNORED_P (t) = 0;
8656 }
44de5aeb
RK
8657 break;
8658
8659 case RECORD_TYPE:
8660 case UNION_TYPE:
8661 case QUAL_UNION_TYPE:
910ad8de 8662 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
44de5aeb 8663 if (TREE_CODE (field) == FIELD_DECL)
8e0a600b
JJ
8664 {
8665 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
9a9ba8d9
JJ
8666 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
8667 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
8e0a600b
JJ
8668 gimplify_type_sizes (TREE_TYPE (field), list_p);
8669 }
8670 break;
8671
8672 case POINTER_TYPE:
8673 case REFERENCE_TYPE:
706c4bb7
OH
8674 /* We used to recurse on the pointed-to type here, which turned out to
8675 be incorrect because its definition might refer to variables not
8676 yet initialized at this point if a forward declaration is involved.
8677
8678 It was actually useful for anonymous pointed-to types to ensure
8679 that the sizes evaluation dominates every possible later use of the
8680 values. Restricting to such types here would be safe since there
f63645be
KH
8681 is no possible forward declaration around, but would introduce an
8682 undesirable middle-end semantic to anonymity. We then defer to
8683 front-ends the responsibility of ensuring that the sizes are
8684 evaluated both early and late enough, e.g. by attaching artificial
706c4bb7 8685 type declarations to the tree. */
44de5aeb
RK
8686 break;
8687
8688 default:
8689 break;
8690 }
8691
65355d53
RH
8692 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
8693 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
44de5aeb 8694
ad50bc8d 8695 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
b4830636 8696 {
ad50bc8d
RH
8697 TYPE_SIZE (t) = TYPE_SIZE (type);
8698 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
8699 TYPE_SIZES_GIMPLIFIED (t) = 1;
b4830636 8700 }
b4830636
RH
8701}
8702
8703/* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
8704 a size or position, has had all of its SAVE_EXPRs evaluated.
726a989a 8705 We add any required statements to *STMT_P. */
44de5aeb
RK
8706
8707void
726a989a 8708gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
44de5aeb 8709{
3ac8781c 8710 tree expr = *expr_p;
a9c5ddf9 8711
44de5aeb 8712 /* We don't do anything if the value isn't there, is constant, or contains
1e748a2b 8713 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
aabcd309 8714 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
1e748a2b
RK
8715 will want to replace it with a new variable, but that will cause problems
8716 if this type is from outside the function. It's OK to have that here. */
848be094 8717 if (is_gimple_sizepos (expr))
44de5aeb
RK
8718 return;
8719
a9c5ddf9
RH
8720 *expr_p = unshare_expr (expr);
8721
ad50bc8d 8722 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
44de5aeb 8723}
6de9cd9a 8724
3ad065ef
EB
8725/* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
8726 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
8727 is true, also gimplify the parameters. */
726a989a
RB
8728
8729gimple
3ad065ef 8730gimplify_body (tree fndecl, bool do_parms)
6de9cd9a
DN
8731{
8732 location_t saved_location = input_location;
726a989a
RB
8733 gimple_seq parm_stmts, seq;
8734 gimple outer_bind;
9f9ebcdf 8735 struct cgraph_node *cgn;
6de9cd9a
DN
8736
8737 timevar_push (TV_TREE_GIMPLIFY);
953ff289 8738
f66d6761
SB
8739 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
8740 gimplification. */
8741 default_rtl_profile ();
8742
953ff289 8743 gcc_assert (gimplify_ctxp == NULL);
45852dcc 8744 push_gimplify_context ();
6de9cd9a 8745
acf0174b
JJ
8746 if (flag_openmp)
8747 {
8748 gcc_assert (gimplify_omp_ctxp == NULL);
8749 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
8750 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
8751 }
8752
44de5aeb
RK
8753 /* Unshare most shared trees in the body and in that of any nested functions.
8754 It would seem we don't have to do this for nested functions because
8755 they are supposed to be output and then the outer function gimplified
8756 first, but the g++ front end doesn't always do it that way. */
3ad065ef
EB
8757 unshare_body (fndecl);
8758 unvisit_body (fndecl);
6de9cd9a 8759
9f9ebcdf
MJ
8760 cgn = cgraph_get_node (fndecl);
8761 if (cgn && cgn->origin)
77f2a970
JJ
8762 nonlocal_vlas = pointer_set_create ();
8763
fa10beec 8764 /* Make sure input_location isn't set to something weird. */
6de9cd9a
DN
8765 input_location = DECL_SOURCE_LOCATION (fndecl);
8766
4744afba
RH
8767 /* Resolve callee-copies. This has to be done before processing
8768 the body so that DECL_VALUE_EXPR gets processed correctly. */
3ad065ef 8769 parm_stmts = do_parms ? gimplify_parameters () : NULL;
4744afba 8770
6de9cd9a 8771 /* Gimplify the function's body. */
726a989a 8772 seq = NULL;
3ad065ef 8773 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
726a989a
RB
8774 outer_bind = gimple_seq_first_stmt (seq);
8775 if (!outer_bind)
6de9cd9a 8776 {
726a989a
RB
8777 outer_bind = gimple_build_nop ();
8778 gimplify_seq_add_stmt (&seq, outer_bind);
6de9cd9a 8779 }
44de5aeb 8780
726a989a
RB
8781 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
8782 not the case, wrap everything in a GIMPLE_BIND to make it so. */
8783 if (gimple_code (outer_bind) == GIMPLE_BIND
8784 && gimple_seq_first (seq) == gimple_seq_last (seq))
8785 ;
8786 else
8787 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
8788
3ad065ef 8789 DECL_SAVED_TREE (fndecl) = NULL_TREE;
4744afba
RH
8790
8791 /* If we had callee-copies statements, insert them at the beginning
f0c10f0f 8792 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
726a989a 8793 if (!gimple_seq_empty_p (parm_stmts))
4744afba 8794 {
f0c10f0f
RG
8795 tree parm;
8796
726a989a
RB
8797 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
8798 gimple_bind_set_body (outer_bind, parm_stmts);
f0c10f0f
RG
8799
8800 for (parm = DECL_ARGUMENTS (current_function_decl);
910ad8de 8801 parm; parm = DECL_CHAIN (parm))
f0c10f0f
RG
8802 if (DECL_HAS_VALUE_EXPR_P (parm))
8803 {
8804 DECL_HAS_VALUE_EXPR_P (parm) = 0;
8805 DECL_IGNORED_P (parm) = 0;
8806 }
4744afba
RH
8807 }
8808
77f2a970
JJ
8809 if (nonlocal_vlas)
8810 {
96ddb7ec
JJ
8811 if (nonlocal_vla_vars)
8812 {
8813 /* tree-nested.c may later on call declare_vars (..., true);
8814 which relies on BLOCK_VARS chain to be the tail of the
8815 gimple_bind_vars chain. Ensure we don't violate that
8816 assumption. */
8817 if (gimple_bind_block (outer_bind)
8818 == DECL_INITIAL (current_function_decl))
8819 declare_vars (nonlocal_vla_vars, outer_bind, true);
8820 else
8821 BLOCK_VARS (DECL_INITIAL (current_function_decl))
8822 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
8823 nonlocal_vla_vars);
8824 nonlocal_vla_vars = NULL_TREE;
8825 }
77f2a970
JJ
8826 pointer_set_destroy (nonlocal_vlas);
8827 nonlocal_vlas = NULL;
8828 }
8829
6d7f7e0a 8830 if ((flag_openmp || flag_openmp_simd) && gimplify_omp_ctxp)
acf0174b
JJ
8831 {
8832 delete_omp_context (gimplify_omp_ctxp);
8833 gimplify_omp_ctxp = NULL;
8834 }
8835
726a989a 8836 pop_gimplify_context (outer_bind);
953ff289 8837 gcc_assert (gimplify_ctxp == NULL);
6de9cd9a 8838
07c5a154 8839#ifdef ENABLE_CHECKING
1da2ed5f 8840 if (!seen_error ())
34019e28 8841 verify_gimple_in_seq (gimple_bind_body (outer_bind));
07c5a154 8842#endif
6de9cd9a
DN
8843
8844 timevar_pop (TV_TREE_GIMPLIFY);
8845 input_location = saved_location;
726a989a
RB
8846
8847 return outer_bind;
6de9cd9a
DN
8848}
8849
6a1f6c9c 8850typedef char *char_p; /* For DEF_VEC_P. */
6a1f6c9c
JM
8851
8852/* Return whether we should exclude FNDECL from instrumentation. */
8853
8854static bool
8855flag_instrument_functions_exclude_p (tree fndecl)
8856{
9771b263 8857 vec<char_p> *v;
6a1f6c9c 8858
9771b263
DN
8859 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
8860 if (v && v->length () > 0)
6a1f6c9c
JM
8861 {
8862 const char *name;
8863 int i;
8864 char *s;
8865
8866 name = lang_hooks.decl_printable_name (fndecl, 0);
9771b263 8867 FOR_EACH_VEC_ELT (*v, i, s)
6a1f6c9c
JM
8868 if (strstr (name, s) != NULL)
8869 return true;
8870 }
8871
9771b263
DN
8872 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
8873 if (v && v->length () > 0)
6a1f6c9c
JM
8874 {
8875 const char *name;
8876 int i;
8877 char *s;
8878
8879 name = DECL_SOURCE_FILE (fndecl);
9771b263 8880 FOR_EACH_VEC_ELT (*v, i, s)
6a1f6c9c
JM
8881 if (strstr (name, s) != NULL)
8882 return true;
8883 }
8884
8885 return false;
8886}
8887
6de9cd9a 8888/* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
726a989a 8889 node for the function we want to gimplify.
b8698a0f 8890
ad19c4be 8891 Return the sequence of GIMPLE statements corresponding to the body
726a989a 8892 of FNDECL. */
6de9cd9a
DN
8893
8894void
8895gimplify_function_tree (tree fndecl)
8896{
af16bc76 8897 tree parm, ret;
726a989a
RB
8898 gimple_seq seq;
8899 gimple bind;
6de9cd9a 8900
a406865a
RG
8901 gcc_assert (!gimple_body (fndecl));
8902
db2960f4
SL
8903 if (DECL_STRUCT_FUNCTION (fndecl))
8904 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
8905 else
8906 push_struct_function (fndecl);
6de9cd9a 8907
910ad8de 8908 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
e41d82f5
RH
8909 {
8910 /* Preliminarily mark non-addressed complex variables as eligible
8911 for promotion to gimple registers. We'll transform their uses
8912 as we find them. */
0890b981
AP
8913 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
8914 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
e41d82f5
RH
8915 && !TREE_THIS_VOLATILE (parm)
8916 && !needs_to_live_in_memory (parm))
0890b981 8917 DECL_GIMPLE_REG_P (parm) = 1;
e41d82f5
RH
8918 }
8919
8920 ret = DECL_RESULT (fndecl);
0890b981 8921 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
7b7e6ecd 8922 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
e41d82f5 8923 && !needs_to_live_in_memory (ret))
0890b981 8924 DECL_GIMPLE_REG_P (ret) = 1;
e41d82f5 8925
3ad065ef 8926 bind = gimplify_body (fndecl, true);
726a989a
RB
8927
8928 /* The tree body of the function is no longer needed, replace it
8929 with the new GIMPLE body. */
355a7673 8930 seq = NULL;
726a989a
RB
8931 gimple_seq_add_stmt (&seq, bind);
8932 gimple_set_body (fndecl, seq);
6de9cd9a
DN
8933
8934 /* If we're instrumenting function entry/exit, then prepend the call to
8935 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
8936 catch the exit hook. */
8937 /* ??? Add some way to ignore exceptions for this TFE. */
8938 if (flag_instrument_function_entry_exit
8d5a7d1f
ILT
8939 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
8940 && !flag_instrument_functions_exclude_p (fndecl))
6de9cd9a 8941 {
726a989a
RB
8942 tree x;
8943 gimple new_bind;
8944 gimple tf;
8945 gimple_seq cleanup = NULL, body = NULL;
b01890ff
JH
8946 tree tmp_var;
8947 gimple call;
8948
e79983f4 8949 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
59527282 8950 call = gimple_build_call (x, 1, integer_zero_node);
b01890ff
JH
8951 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8952 gimple_call_set_lhs (call, tmp_var);
8953 gimplify_seq_add_stmt (&cleanup, call);
e79983f4 8954 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
b01890ff
JH
8955 call = gimple_build_call (x, 2,
8956 build_fold_addr_expr (current_function_decl),
8957 tmp_var);
8958 gimplify_seq_add_stmt (&cleanup, call);
726a989a 8959 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
6de9cd9a 8960
e79983f4 8961 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
59527282 8962 call = gimple_build_call (x, 1, integer_zero_node);
b01890ff
JH
8963 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8964 gimple_call_set_lhs (call, tmp_var);
8965 gimplify_seq_add_stmt (&body, call);
e79983f4 8966 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
b01890ff
JH
8967 call = gimple_build_call (x, 2,
8968 build_fold_addr_expr (current_function_decl),
8969 tmp_var);
8970 gimplify_seq_add_stmt (&body, call);
726a989a 8971 gimplify_seq_add_stmt (&body, tf);
32001f69 8972 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
726a989a
RB
8973 /* Clear the block for BIND, since it is no longer directly inside
8974 the function, but within a try block. */
32001f69 8975 gimple_bind_set_block (bind, NULL);
6de9cd9a 8976
726a989a
RB
8977 /* Replace the current function body with the body
8978 wrapped in the try/finally TF. */
355a7673 8979 seq = NULL;
726a989a
RB
8980 gimple_seq_add_stmt (&seq, new_bind);
8981 gimple_set_body (fndecl, seq);
6de9cd9a
DN
8982 }
8983
726a989a 8984 DECL_SAVED_TREE (fndecl) = NULL_TREE;
a406865a 8985 cfun->curr_properties = PROP_gimple_any;
726a989a 8986
db2960f4 8987 pop_cfun ();
6de9cd9a 8988}
726a989a 8989
4a7cb16f
AM
8990/* Return a dummy expression of type TYPE in order to keep going after an
8991 error. */
b184c8f1 8992
4a7cb16f
AM
8993static tree
8994dummy_object (tree type)
b184c8f1 8995{
4a7cb16f
AM
8996 tree t = build_int_cst (build_pointer_type (type), 0);
8997 return build2 (MEM_REF, type, t, t);
b184c8f1
AM
8998}
8999
4a7cb16f
AM
9000/* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
9001 builtin function, but a very special sort of operator. */
b184c8f1 9002
4a7cb16f
AM
9003enum gimplify_status
9004gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
9005{
9006 tree promoted_type, have_va_type;
9007 tree valist = TREE_OPERAND (*expr_p, 0);
9008 tree type = TREE_TYPE (*expr_p);
9009 tree t;
9010 location_t loc = EXPR_LOCATION (*expr_p);
b184c8f1 9011
4a7cb16f
AM
9012 /* Verify that valist is of the proper type. */
9013 have_va_type = TREE_TYPE (valist);
9014 if (have_va_type == error_mark_node)
9015 return GS_ERROR;
9016 have_va_type = targetm.canonical_va_list_type (have_va_type);
b184c8f1 9017
4a7cb16f
AM
9018 if (have_va_type == NULL_TREE)
9019 {
9020 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
9021 return GS_ERROR;
9022 }
b184c8f1 9023
4a7cb16f
AM
9024 /* Generate a diagnostic for requesting data of a type that cannot
9025 be passed through `...' due to type promotion at the call site. */
9026 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
9027 != type)
9028 {
9029 static bool gave_help;
9030 bool warned;
b184c8f1 9031
4a7cb16f
AM
9032 /* Unfortunately, this is merely undefined, rather than a constraint
9033 violation, so we cannot make this an error. If this call is never
9034 executed, the program is still strictly conforming. */
9035 warned = warning_at (loc, 0,
9036 "%qT is promoted to %qT when passed through %<...%>",
9037 type, promoted_type);
9038 if (!gave_help && warned)
9039 {
9040 gave_help = true;
9041 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
9042 promoted_type, type);
9043 }
b184c8f1 9044
4a7cb16f
AM
9045 /* We can, however, treat "undefined" any way we please.
9046 Call abort to encourage the user to fix the program. */
9047 if (warned)
9048 inform (loc, "if this code is reached, the program will abort");
9049 /* Before the abort, allow the evaluation of the va_list
9050 expression to exit or longjmp. */
9051 gimplify_and_add (valist, pre_p);
9052 t = build_call_expr_loc (loc,
9053 builtin_decl_implicit (BUILT_IN_TRAP), 0);
b184c8f1
AM
9054 gimplify_and_add (t, pre_p);
9055
4a7cb16f
AM
9056 /* This is dead code, but go ahead and finish so that the
9057 mode of the result comes out right. */
9058 *expr_p = dummy_object (type);
9059 return GS_ALL_DONE;
b184c8f1
AM
9060 }
9061 else
b184c8f1 9062 {
4a7cb16f
AM
9063 /* Make it easier for the backends by protecting the valist argument
9064 from multiple evaluations. */
9065 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
9066 {
9067 /* For this case, the backends will be expecting a pointer to
9068 TREE_TYPE (abi), but it's possible we've
9069 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
9070 So fix it. */
9071 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
9072 {
9073 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
9074 valist = fold_convert_loc (loc, p1,
9075 build_fold_addr_expr_loc (loc, valist));
9076 }
b184c8f1 9077
4a7cb16f
AM
9078 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
9079 }
9080 else
9081 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
b184c8f1 9082
4a7cb16f
AM
9083 if (!targetm.gimplify_va_arg_expr)
9084 /* FIXME: Once most targets are converted we should merely
9085 assert this is non-null. */
9086 return GS_ALL_DONE;
b184c8f1 9087
4a7cb16f
AM
9088 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
9089 return GS_OK;
b184c8f1 9090 }
b184c8f1 9091}
bcf71673 9092
45b0be94
AM
9093/* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
9094
9095 DST/SRC are the destination and source respectively. You can pass
9096 ungimplified trees in DST or SRC, in which case they will be
9097 converted to a gimple operand if necessary.
9098
9099 This function returns the newly created GIMPLE_ASSIGN tuple. */
9100
9101gimple
9102gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
9103{
9104 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
9105 gimplify_and_add (t, seq_p);
9106 ggc_free (t);
9107 return gimple_seq_last_stmt (*seq_p);
9108}
9109
18f429e2
AM
9110inline hashval_t
9111gimplify_hasher::hash (const value_type *p)
9112{
9113 tree t = p->val;
9114 return iterative_hash_expr (t, 0);
9115}
9116
9117inline bool
9118gimplify_hasher::equal (const value_type *p1, const compare_type *p2)
9119{
9120 tree t1 = p1->val;
9121 tree t2 = p2->val;
9122 enum tree_code code = TREE_CODE (t1);
9123
9124 if (TREE_CODE (t2) != code
9125 || TREE_TYPE (t1) != TREE_TYPE (t2))
9126 return false;
9127
9128 if (!operand_equal_p (t1, t2, 0))
9129 return false;
9130
9131#ifdef ENABLE_CHECKING
9132 /* Only allow them to compare equal if they also hash equal; otherwise
9133 results are nondeterminate, and we fail bootstrap comparison. */
9134 gcc_assert (hash (p1) == hash (p2));
9135#endif
9136
9137 return true;
9138}
This page took 4.844596 seconds and 5 git commands to generate.