]> gcc.gnu.org Git - gcc.git/blob - gcc/gimplify.cc
middle-end/107994 - ICE after error with comparison gimplification
[gcc.git] / gcc / gimplify.cc
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2022 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-iterator.h"
46 #include "gimple-fold.h"
47 #include "tree-eh.h"
48 #include "gimplify.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "tree-hash-traits.h"
57 #include "omp-general.h"
58 #include "omp-low.h"
59 #include "gimple-low.h"
60 #include "gomp-constants.h"
61 #include "splay-tree.h"
62 #include "gimple-walk.h"
63 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "dbgcnt.h"
69 #include "omp-offload.h"
70 #include "context.h"
71 #include "tree-nested.h"
72
73 /* Hash set of poisoned variables in a bind expr. */
74 static hash_set<tree> *asan_poisoned_variables = NULL;
75
76 enum gimplify_omp_var_data
77 {
78 GOVD_SEEN = 0x000001,
79 GOVD_EXPLICIT = 0x000002,
80 GOVD_SHARED = 0x000004,
81 GOVD_PRIVATE = 0x000008,
82 GOVD_FIRSTPRIVATE = 0x000010,
83 GOVD_LASTPRIVATE = 0x000020,
84 GOVD_REDUCTION = 0x000040,
85 GOVD_LOCAL = 0x00080,
86 GOVD_MAP = 0x000100,
87 GOVD_DEBUG_PRIVATE = 0x000200,
88 GOVD_PRIVATE_OUTER_REF = 0x000400,
89 GOVD_LINEAR = 0x000800,
90 GOVD_ALIGNED = 0x001000,
91
92 /* Flag for GOVD_MAP: don't copy back. */
93 GOVD_MAP_TO_ONLY = 0x002000,
94
95 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
96 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
97
98 GOVD_MAP_0LEN_ARRAY = 0x008000,
99
100 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
101 GOVD_MAP_ALWAYS_TO = 0x010000,
102
103 /* Flag for shared vars that are or might be stored to in the region. */
104 GOVD_WRITTEN = 0x020000,
105
106 /* Flag for GOVD_MAP, if it is a forced mapping. */
107 GOVD_MAP_FORCE = 0x040000,
108
109 /* Flag for GOVD_MAP: must be present already. */
110 GOVD_MAP_FORCE_PRESENT = 0x080000,
111
112 /* Flag for GOVD_MAP: only allocate. */
113 GOVD_MAP_ALLOC_ONLY = 0x100000,
114
115 /* Flag for GOVD_MAP: only copy back. */
116 GOVD_MAP_FROM_ONLY = 0x200000,
117
118 GOVD_NONTEMPORAL = 0x400000,
119
120 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
121 GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
122
123 GOVD_CONDTEMP = 0x1000000,
124
125 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
126 GOVD_REDUCTION_INSCAN = 0x2000000,
127
128 /* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
129 GOVD_FIRSTPRIVATE_IMPLICIT = 0x4000000,
130
131 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
132 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
133 | GOVD_LOCAL)
134 };
135
136
137 enum omp_region_type
138 {
139 ORT_WORKSHARE = 0x00,
140 ORT_TASKGROUP = 0x01,
141 ORT_SIMD = 0x04,
142
143 ORT_PARALLEL = 0x08,
144 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
145
146 ORT_TASK = 0x10,
147 ORT_UNTIED_TASK = ORT_TASK | 1,
148 ORT_TASKLOOP = ORT_TASK | 2,
149 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
150
151 ORT_TEAMS = 0x20,
152 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
153 ORT_HOST_TEAMS = ORT_TEAMS | 2,
154 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
155
156 /* Data region. */
157 ORT_TARGET_DATA = 0x40,
158
159 /* Data region with offloading. */
160 ORT_TARGET = 0x80,
161 ORT_COMBINED_TARGET = ORT_TARGET | 1,
162 ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
163
164 /* OpenACC variants. */
165 ORT_ACC = 0x100, /* A generic OpenACC region. */
166 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
167 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
168 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
169 ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
170 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
171
172 /* Dummy OpenMP region, used to disable expansion of
173 DECL_VALUE_EXPRs in taskloop pre body. */
174 ORT_NONE = 0x200
175 };
176
177 /* Gimplify hashtable helper. */
178
179 struct gimplify_hasher : free_ptr_hash <elt_t>
180 {
181 static inline hashval_t hash (const elt_t *);
182 static inline bool equal (const elt_t *, const elt_t *);
183 };
184
185 struct gimplify_ctx
186 {
187 struct gimplify_ctx *prev_context;
188
189 vec<gbind *> bind_expr_stack;
190 tree temps;
191 gimple_seq conditional_cleanups;
192 tree exit_label;
193 tree return_temp;
194
195 vec<tree> case_labels;
196 hash_set<tree> *live_switch_vars;
197 /* The formal temporary table. Should this be persistent? */
198 hash_table<gimplify_hasher> *temp_htab;
199
200 int conditions;
201 unsigned into_ssa : 1;
202 unsigned allow_rhs_cond_expr : 1;
203 unsigned in_cleanup_point_expr : 1;
204 unsigned keep_stack : 1;
205 unsigned save_stack : 1;
206 unsigned in_switch_expr : 1;
207 };
208
209 enum gimplify_defaultmap_kind
210 {
211 GDMK_SCALAR,
212 GDMK_SCALAR_TARGET, /* w/ Fortran's target attr, implicit mapping, only. */
213 GDMK_AGGREGATE,
214 GDMK_ALLOCATABLE,
215 GDMK_POINTER
216 };
217
218 struct gimplify_omp_ctx
219 {
220 struct gimplify_omp_ctx *outer_context;
221 splay_tree variables;
222 hash_set<tree> *privatized_types;
223 tree clauses;
224 /* Iteration variables in an OMP_FOR. */
225 vec<tree> loop_iter_var;
226 location_t location;
227 enum omp_clause_default_kind default_kind;
228 enum omp_region_type region_type;
229 enum tree_code code;
230 bool combined_loop;
231 bool distribute;
232 bool target_firstprivatize_array_bases;
233 bool add_safelen1;
234 bool order_concurrent;
235 bool has_depend;
236 bool in_for_exprs;
237 int defaultmap[5];
238 };
239
240 static struct gimplify_ctx *gimplify_ctxp;
241 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
242 static bool in_omp_construct;
243
244 /* Forward declaration. */
245 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
246 static hash_map<tree, tree> *oacc_declare_returns;
247 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
248 bool (*) (tree), fallback_t, bool);
249 static void prepare_gimple_addressable (tree *, gimple_seq *);
250
251 /* Shorter alias name for the above function for use in gimplify.cc
252 only. */
253
254 static inline void
255 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
256 {
257 gimple_seq_add_stmt_without_update (seq_p, gs);
258 }
259
260 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
261 NULL, a new sequence is allocated. This function is
262 similar to gimple_seq_add_seq, but does not scan the operands.
263 During gimplification, we need to manipulate statement sequences
264 before the def/use vectors have been constructed. */
265
266 static void
267 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
268 {
269 gimple_stmt_iterator si;
270
271 if (src == NULL)
272 return;
273
274 si = gsi_last (*dst_p);
275 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
276 }
277
278
279 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
280 and popping gimplify contexts. */
281
282 static struct gimplify_ctx *ctx_pool = NULL;
283
284 /* Return a gimplify context struct from the pool. */
285
286 static inline struct gimplify_ctx *
287 ctx_alloc (void)
288 {
289 struct gimplify_ctx * c = ctx_pool;
290
291 if (c)
292 ctx_pool = c->prev_context;
293 else
294 c = XNEW (struct gimplify_ctx);
295
296 memset (c, '\0', sizeof (*c));
297 return c;
298 }
299
300 /* Put gimplify context C back into the pool. */
301
302 static inline void
303 ctx_free (struct gimplify_ctx *c)
304 {
305 c->prev_context = ctx_pool;
306 ctx_pool = c;
307 }
308
309 /* Free allocated ctx stack memory. */
310
311 void
312 free_gimplify_stack (void)
313 {
314 struct gimplify_ctx *c;
315
316 while ((c = ctx_pool))
317 {
318 ctx_pool = c->prev_context;
319 free (c);
320 }
321 }
322
323
324 /* Set up a context for the gimplifier. */
325
326 void
327 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
328 {
329 struct gimplify_ctx *c = ctx_alloc ();
330
331 c->prev_context = gimplify_ctxp;
332 gimplify_ctxp = c;
333 gimplify_ctxp->into_ssa = in_ssa;
334 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
335 }
336
337 /* Tear down a context for the gimplifier. If BODY is non-null, then
338 put the temporaries into the outer BIND_EXPR. Otherwise, put them
339 in the local_decls.
340
341 BODY is not a sequence, but the first tuple in a sequence. */
342
343 void
344 pop_gimplify_context (gimple *body)
345 {
346 struct gimplify_ctx *c = gimplify_ctxp;
347
348 gcc_assert (c
349 && (!c->bind_expr_stack.exists ()
350 || c->bind_expr_stack.is_empty ()));
351 c->bind_expr_stack.release ();
352 gimplify_ctxp = c->prev_context;
353
354 if (body)
355 declare_vars (c->temps, body, false);
356 else
357 record_vars (c->temps);
358
359 delete c->temp_htab;
360 c->temp_htab = NULL;
361 ctx_free (c);
362 }
363
364 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
365
366 static void
367 gimple_push_bind_expr (gbind *bind_stmt)
368 {
369 gimplify_ctxp->bind_expr_stack.reserve (8);
370 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
371 }
372
373 /* Pop the first element off the stack of bindings. */
374
375 static void
376 gimple_pop_bind_expr (void)
377 {
378 gimplify_ctxp->bind_expr_stack.pop ();
379 }
380
381 /* Return the first element of the stack of bindings. */
382
383 gbind *
384 gimple_current_bind_expr (void)
385 {
386 return gimplify_ctxp->bind_expr_stack.last ();
387 }
388
389 /* Return the stack of bindings created during gimplification. */
390
391 vec<gbind *>
392 gimple_bind_expr_stack (void)
393 {
394 return gimplify_ctxp->bind_expr_stack;
395 }
396
397 /* Return true iff there is a COND_EXPR between us and the innermost
398 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
399
400 static bool
401 gimple_conditional_context (void)
402 {
403 return gimplify_ctxp->conditions > 0;
404 }
405
406 /* Note that we've entered a COND_EXPR. */
407
408 static void
409 gimple_push_condition (void)
410 {
411 #ifdef ENABLE_GIMPLE_CHECKING
412 if (gimplify_ctxp->conditions == 0)
413 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
414 #endif
415 ++(gimplify_ctxp->conditions);
416 }
417
418 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
419 now, add any conditional cleanups we've seen to the prequeue. */
420
421 static void
422 gimple_pop_condition (gimple_seq *pre_p)
423 {
424 int conds = --(gimplify_ctxp->conditions);
425
426 gcc_assert (conds >= 0);
427 if (conds == 0)
428 {
429 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
430 gimplify_ctxp->conditional_cleanups = NULL;
431 }
432 }
433
434 /* A stable comparison routine for use with splay trees and DECLs. */
435
436 static int
437 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
438 {
439 tree a = (tree) xa;
440 tree b = (tree) xb;
441
442 return DECL_UID (a) - DECL_UID (b);
443 }
444
445 /* Create a new omp construct that deals with variable remapping. */
446
447 static struct gimplify_omp_ctx *
448 new_omp_context (enum omp_region_type region_type)
449 {
450 struct gimplify_omp_ctx *c;
451
452 c = XCNEW (struct gimplify_omp_ctx);
453 c->outer_context = gimplify_omp_ctxp;
454 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
455 c->privatized_types = new hash_set<tree>;
456 c->location = input_location;
457 c->region_type = region_type;
458 if ((region_type & ORT_TASK) == 0)
459 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
460 else
461 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
462 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
463 c->defaultmap[GDMK_SCALAR_TARGET] = GOVD_MAP;
464 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
465 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
466 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
467
468 return c;
469 }
470
471 /* Destroy an omp construct that deals with variable remapping. */
472
473 static void
474 delete_omp_context (struct gimplify_omp_ctx *c)
475 {
476 splay_tree_delete (c->variables);
477 delete c->privatized_types;
478 c->loop_iter_var.release ();
479 XDELETE (c);
480 }
481
482 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
483 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
484
485 /* Both gimplify the statement T and append it to *SEQ_P. This function
486 behaves exactly as gimplify_stmt, but you don't have to pass T as a
487 reference. */
488
489 void
490 gimplify_and_add (tree t, gimple_seq *seq_p)
491 {
492 gimplify_stmt (&t, seq_p);
493 }
494
495 /* Gimplify statement T into sequence *SEQ_P, and return the first
496 tuple in the sequence of generated tuples for this statement.
497 Return NULL if gimplifying T produced no tuples. */
498
499 static gimple *
500 gimplify_and_return_first (tree t, gimple_seq *seq_p)
501 {
502 gimple_stmt_iterator last = gsi_last (*seq_p);
503
504 gimplify_and_add (t, seq_p);
505
506 if (!gsi_end_p (last))
507 {
508 gsi_next (&last);
509 return gsi_stmt (last);
510 }
511 else
512 return gimple_seq_first_stmt (*seq_p);
513 }
514
515 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
516 LHS, or for a call argument. */
517
518 static bool
519 is_gimple_mem_rhs (tree t)
520 {
521 /* If we're dealing with a renamable type, either source or dest must be
522 a renamed variable. */
523 if (is_gimple_reg_type (TREE_TYPE (t)))
524 return is_gimple_val (t);
525 else
526 return is_gimple_val (t) || is_gimple_lvalue (t);
527 }
528
529 /* Return true if T is a CALL_EXPR or an expression that can be
530 assigned to a temporary. Note that this predicate should only be
531 used during gimplification. See the rationale for this in
532 gimplify_modify_expr. */
533
534 static bool
535 is_gimple_reg_rhs_or_call (tree t)
536 {
537 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
538 || TREE_CODE (t) == CALL_EXPR);
539 }
540
541 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
542 this predicate should only be used during gimplification. See the
543 rationale for this in gimplify_modify_expr. */
544
545 static bool
546 is_gimple_mem_rhs_or_call (tree t)
547 {
548 /* If we're dealing with a renamable type, either source or dest must be
549 a renamed variable. */
550 if (is_gimple_reg_type (TREE_TYPE (t)))
551 return is_gimple_val (t);
552 else
553 return (is_gimple_val (t)
554 || is_gimple_lvalue (t)
555 || TREE_CLOBBER_P (t)
556 || TREE_CODE (t) == CALL_EXPR);
557 }
558
559 /* Create a temporary with a name derived from VAL. Subroutine of
560 lookup_tmp_var; nobody else should call this function. */
561
562 static inline tree
563 create_tmp_from_val (tree val)
564 {
565 /* Drop all qualifiers and address-space information from the value type. */
566 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
567 tree var = create_tmp_var (type, get_name (val));
568 return var;
569 }
570
571 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
572 an existing expression temporary. If NOT_GIMPLE_REG, mark it as such. */
573
574 static tree
575 lookup_tmp_var (tree val, bool is_formal, bool not_gimple_reg)
576 {
577 tree ret;
578
579 /* We cannot mark a formal temporary with DECL_NOT_GIMPLE_REG_P. */
580 gcc_assert (!is_formal || !not_gimple_reg);
581
582 /* If not optimizing, never really reuse a temporary. local-alloc
583 won't allocate any variable that is used in more than one basic
584 block, which means it will go into memory, causing much extra
585 work in reload and final and poorer code generation, outweighing
586 the extra memory allocation here. */
587 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
588 {
589 ret = create_tmp_from_val (val);
590 DECL_NOT_GIMPLE_REG_P (ret) = not_gimple_reg;
591 }
592 else
593 {
594 elt_t elt, *elt_p;
595 elt_t **slot;
596
597 elt.val = val;
598 if (!gimplify_ctxp->temp_htab)
599 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
600 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
601 if (*slot == NULL)
602 {
603 elt_p = XNEW (elt_t);
604 elt_p->val = val;
605 elt_p->temp = ret = create_tmp_from_val (val);
606 *slot = elt_p;
607 }
608 else
609 {
610 elt_p = *slot;
611 ret = elt_p->temp;
612 }
613 }
614
615 return ret;
616 }
617
618 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
619
620 static tree
621 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
622 bool is_formal, bool allow_ssa, bool not_gimple_reg)
623 {
624 tree t, mod;
625
626 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
627 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
628 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
629 fb_rvalue);
630
631 if (allow_ssa
632 && gimplify_ctxp->into_ssa
633 && is_gimple_reg_type (TREE_TYPE (val)))
634 {
635 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
636 if (! gimple_in_ssa_p (cfun))
637 {
638 const char *name = get_name (val);
639 if (name)
640 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
641 }
642 }
643 else
644 t = lookup_tmp_var (val, is_formal, not_gimple_reg);
645
646 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
647
648 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
649
650 /* gimplify_modify_expr might want to reduce this further. */
651 gimplify_and_add (mod, pre_p);
652 ggc_free (mod);
653
654 return t;
655 }
656
657 /* Return a formal temporary variable initialized with VAL. PRE_P is as
658 in gimplify_expr. Only use this function if:
659
660 1) The value of the unfactored expression represented by VAL will not
661 change between the initialization and use of the temporary, and
662 2) The temporary will not be otherwise modified.
663
664 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
665 and #2 means it is inappropriate for && temps.
666
667 For other cases, use get_initialized_tmp_var instead. */
668
669 tree
670 get_formal_tmp_var (tree val, gimple_seq *pre_p)
671 {
672 return internal_get_tmp_var (val, pre_p, NULL, true, true, false);
673 }
674
675 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
676 are as in gimplify_expr. */
677
678 tree
679 get_initialized_tmp_var (tree val, gimple_seq *pre_p,
680 gimple_seq *post_p /* = NULL */,
681 bool allow_ssa /* = true */)
682 {
683 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa, false);
684 }
685
686 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
687 generate debug info for them; otherwise don't. */
688
689 void
690 declare_vars (tree vars, gimple *gs, bool debug_info)
691 {
692 tree last = vars;
693 if (last)
694 {
695 tree temps, block;
696
697 gbind *scope = as_a <gbind *> (gs);
698
699 temps = nreverse (last);
700
701 block = gimple_bind_block (scope);
702 gcc_assert (!block || TREE_CODE (block) == BLOCK);
703 if (!block || !debug_info)
704 {
705 DECL_CHAIN (last) = gimple_bind_vars (scope);
706 gimple_bind_set_vars (scope, temps);
707 }
708 else
709 {
710 /* We need to attach the nodes both to the BIND_EXPR and to its
711 associated BLOCK for debugging purposes. The key point here
712 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
713 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
714 if (BLOCK_VARS (block))
715 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
716 else
717 {
718 gimple_bind_set_vars (scope,
719 chainon (gimple_bind_vars (scope), temps));
720 BLOCK_VARS (block) = temps;
721 }
722 }
723 }
724 }
725
726 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
727 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
728 no such upper bound can be obtained. */
729
730 static void
731 force_constant_size (tree var)
732 {
733 /* The only attempt we make is by querying the maximum size of objects
734 of the variable's type. */
735
736 HOST_WIDE_INT max_size;
737
738 gcc_assert (VAR_P (var));
739
740 max_size = max_int_size_in_bytes (TREE_TYPE (var));
741
742 gcc_assert (max_size >= 0);
743
744 DECL_SIZE_UNIT (var)
745 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
746 DECL_SIZE (var)
747 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
748 }
749
750 /* Push the temporary variable TMP into the current binding. */
751
752 void
753 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
754 {
755 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
756
757 /* Later processing assumes that the object size is constant, which might
758 not be true at this point. Force the use of a constant upper bound in
759 this case. */
760 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
761 force_constant_size (tmp);
762
763 DECL_CONTEXT (tmp) = fn->decl;
764 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
765
766 record_vars_into (tmp, fn->decl);
767 }
768
769 /* Push the temporary variable TMP into the current binding. */
770
771 void
772 gimple_add_tmp_var (tree tmp)
773 {
774 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
775
776 /* Later processing assumes that the object size is constant, which might
777 not be true at this point. Force the use of a constant upper bound in
778 this case. */
779 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
780 force_constant_size (tmp);
781
782 DECL_CONTEXT (tmp) = current_function_decl;
783 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
784
785 if (gimplify_ctxp)
786 {
787 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
788 gimplify_ctxp->temps = tmp;
789
790 /* Mark temporaries local within the nearest enclosing parallel. */
791 if (gimplify_omp_ctxp)
792 {
793 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
794 int flag = GOVD_LOCAL | GOVD_SEEN;
795 while (ctx
796 && (ctx->region_type == ORT_WORKSHARE
797 || ctx->region_type == ORT_TASKGROUP
798 || ctx->region_type == ORT_SIMD
799 || ctx->region_type == ORT_ACC))
800 {
801 if (ctx->region_type == ORT_SIMD
802 && TREE_ADDRESSABLE (tmp)
803 && !TREE_STATIC (tmp))
804 {
805 if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
806 ctx->add_safelen1 = true;
807 else if (ctx->in_for_exprs)
808 flag = GOVD_PRIVATE;
809 else
810 flag = GOVD_PRIVATE | GOVD_SEEN;
811 break;
812 }
813 ctx = ctx->outer_context;
814 }
815 if (ctx)
816 omp_add_variable (ctx, tmp, flag);
817 }
818 }
819 else if (cfun)
820 record_vars (tmp);
821 else
822 {
823 gimple_seq body_seq;
824
825 /* This case is for nested functions. We need to expose the locals
826 they create. */
827 body_seq = gimple_body (current_function_decl);
828 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
829 }
830 }
831
832
833 \f
834 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
835 nodes that are referenced more than once in GENERIC functions. This is
836 necessary because gimplification (translation into GIMPLE) is performed
837 by modifying tree nodes in-place, so gimplication of a shared node in a
838 first context could generate an invalid GIMPLE form in a second context.
839
840 This is achieved with a simple mark/copy/unmark algorithm that walks the
841 GENERIC representation top-down, marks nodes with TREE_VISITED the first
842 time it encounters them, duplicates them if they already have TREE_VISITED
843 set, and finally removes the TREE_VISITED marks it has set.
844
845 The algorithm works only at the function level, i.e. it generates a GENERIC
846 representation of a function with no nodes shared within the function when
847 passed a GENERIC function (except for nodes that are allowed to be shared).
848
849 At the global level, it is also necessary to unshare tree nodes that are
850 referenced in more than one function, for the same aforementioned reason.
851 This requires some cooperation from the front-end. There are 2 strategies:
852
853 1. Manual unsharing. The front-end needs to call unshare_expr on every
854 expression that might end up being shared across functions.
855
856 2. Deep unsharing. This is an extension of regular unsharing. Instead
857 of calling unshare_expr on expressions that might be shared across
858 functions, the front-end pre-marks them with TREE_VISITED. This will
859 ensure that they are unshared on the first reference within functions
860 when the regular unsharing algorithm runs. The counterpart is that
861 this algorithm must look deeper than for manual unsharing, which is
862 specified by LANG_HOOKS_DEEP_UNSHARING.
863
864 If there are only few specific cases of node sharing across functions, it is
865 probably easier for a front-end to unshare the expressions manually. On the
866 contrary, if the expressions generated at the global level are as widespread
867 as expressions generated within functions, deep unsharing is very likely the
868 way to go. */
869
870 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
871 These nodes model computations that must be done once. If we were to
872 unshare something like SAVE_EXPR(i++), the gimplification process would
873 create wrong code. However, if DATA is non-null, it must hold a pointer
874 set that is used to unshare the subtrees of these nodes. */
875
876 static tree
877 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
878 {
879 tree t = *tp;
880 enum tree_code code = TREE_CODE (t);
881
882 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
883 copy their subtrees if we can make sure to do it only once. */
884 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
885 {
886 if (data && !((hash_set<tree> *)data)->add (t))
887 ;
888 else
889 *walk_subtrees = 0;
890 }
891
892 /* Stop at types, decls, constants like copy_tree_r. */
893 else if (TREE_CODE_CLASS (code) == tcc_type
894 || TREE_CODE_CLASS (code) == tcc_declaration
895 || TREE_CODE_CLASS (code) == tcc_constant)
896 *walk_subtrees = 0;
897
898 /* Cope with the statement expression extension. */
899 else if (code == STATEMENT_LIST)
900 ;
901
902 /* Leave the bulk of the work to copy_tree_r itself. */
903 else
904 copy_tree_r (tp, walk_subtrees, NULL);
905
906 return NULL_TREE;
907 }
908
909 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
910 If *TP has been visited already, then *TP is deeply copied by calling
911 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
912
913 static tree
914 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
915 {
916 tree t = *tp;
917 enum tree_code code = TREE_CODE (t);
918
919 /* Skip types, decls, and constants. But we do want to look at their
920 types and the bounds of types. Mark them as visited so we properly
921 unmark their subtrees on the unmark pass. If we've already seen them,
922 don't look down further. */
923 if (TREE_CODE_CLASS (code) == tcc_type
924 || TREE_CODE_CLASS (code) == tcc_declaration
925 || TREE_CODE_CLASS (code) == tcc_constant)
926 {
927 if (TREE_VISITED (t))
928 *walk_subtrees = 0;
929 else
930 TREE_VISITED (t) = 1;
931 }
932
933 /* If this node has been visited already, unshare it and don't look
934 any deeper. */
935 else if (TREE_VISITED (t))
936 {
937 walk_tree (tp, mostly_copy_tree_r, data, NULL);
938 *walk_subtrees = 0;
939 }
940
941 /* Otherwise, mark the node as visited and keep looking. */
942 else
943 TREE_VISITED (t) = 1;
944
945 return NULL_TREE;
946 }
947
948 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
949 copy_if_shared_r callback unmodified. */
950
951 void
952 copy_if_shared (tree *tp, void *data)
953 {
954 walk_tree (tp, copy_if_shared_r, data, NULL);
955 }
956
957 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
958 any nested functions. */
959
960 static void
961 unshare_body (tree fndecl)
962 {
963 struct cgraph_node *cgn = cgraph_node::get (fndecl);
964 /* If the language requires deep unsharing, we need a pointer set to make
965 sure we don't repeatedly unshare subtrees of unshareable nodes. */
966 hash_set<tree> *visited
967 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
968
969 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
970 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
971 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
972
973 delete visited;
974
975 if (cgn)
976 for (cgn = first_nested_function (cgn); cgn;
977 cgn = next_nested_function (cgn))
978 unshare_body (cgn->decl);
979 }
980
981 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
982 Subtrees are walked until the first unvisited node is encountered. */
983
984 static tree
985 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
986 {
987 tree t = *tp;
988
989 /* If this node has been visited, unmark it and keep looking. */
990 if (TREE_VISITED (t))
991 TREE_VISITED (t) = 0;
992
993 /* Otherwise, don't look any deeper. */
994 else
995 *walk_subtrees = 0;
996
997 return NULL_TREE;
998 }
999
1000 /* Unmark the visited trees rooted at *TP. */
1001
1002 static inline void
1003 unmark_visited (tree *tp)
1004 {
1005 walk_tree (tp, unmark_visited_r, NULL, NULL);
1006 }
1007
1008 /* Likewise, but mark all trees as not visited. */
1009
1010 static void
1011 unvisit_body (tree fndecl)
1012 {
1013 struct cgraph_node *cgn = cgraph_node::get (fndecl);
1014
1015 unmark_visited (&DECL_SAVED_TREE (fndecl));
1016 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1017 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1018
1019 if (cgn)
1020 for (cgn = first_nested_function (cgn);
1021 cgn; cgn = next_nested_function (cgn))
1022 unvisit_body (cgn->decl);
1023 }
1024
1025 /* Unconditionally make an unshared copy of EXPR. This is used when using
1026 stored expressions which span multiple functions, such as BINFO_VTABLE,
1027 as the normal unsharing process can't tell that they're shared. */
1028
1029 tree
1030 unshare_expr (tree expr)
1031 {
1032 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1033 return expr;
1034 }
1035
1036 /* Worker for unshare_expr_without_location. */
1037
1038 static tree
1039 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1040 {
1041 if (EXPR_P (*tp))
1042 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1043 else
1044 *walk_subtrees = 0;
1045 return NULL_TREE;
1046 }
1047
1048 /* Similar to unshare_expr but also prune all expression locations
1049 from EXPR. */
1050
1051 tree
1052 unshare_expr_without_location (tree expr)
1053 {
1054 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1055 if (EXPR_P (expr))
1056 walk_tree (&expr, prune_expr_location, NULL, NULL);
1057 return expr;
1058 }
1059
1060 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1061 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1062 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1063 EXPR is the location of the EXPR. */
1064
1065 static location_t
1066 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1067 {
1068 if (!expr)
1069 return or_else;
1070
1071 if (EXPR_HAS_LOCATION (expr))
1072 return EXPR_LOCATION (expr);
1073
1074 if (TREE_CODE (expr) != STATEMENT_LIST)
1075 return or_else;
1076
1077 tree_stmt_iterator i = tsi_start (expr);
1078
1079 bool found = false;
1080 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1081 {
1082 found = true;
1083 tsi_next (&i);
1084 }
1085
1086 if (!found || !tsi_one_before_end_p (i))
1087 return or_else;
1088
1089 return rexpr_location (tsi_stmt (i), or_else);
1090 }
1091
1092 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1093 rexpr_location for the potential recursion. */
1094
1095 static inline bool
1096 rexpr_has_location (tree expr)
1097 {
1098 return rexpr_location (expr) != UNKNOWN_LOCATION;
1099 }
1100
1101 \f
1102 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1103 contain statements and have a value. Assign its value to a temporary
1104 and give it void_type_node. Return the temporary, or NULL_TREE if
1105 WRAPPER was already void. */
1106
1107 tree
1108 voidify_wrapper_expr (tree wrapper, tree temp)
1109 {
1110 tree type = TREE_TYPE (wrapper);
1111 if (type && !VOID_TYPE_P (type))
1112 {
1113 tree *p;
1114
1115 /* Set p to point to the body of the wrapper. Loop until we find
1116 something that isn't a wrapper. */
1117 for (p = &wrapper; p && *p; )
1118 {
1119 switch (TREE_CODE (*p))
1120 {
1121 case BIND_EXPR:
1122 TREE_SIDE_EFFECTS (*p) = 1;
1123 TREE_TYPE (*p) = void_type_node;
1124 /* For a BIND_EXPR, the body is operand 1. */
1125 p = &BIND_EXPR_BODY (*p);
1126 break;
1127
1128 case CLEANUP_POINT_EXPR:
1129 case TRY_FINALLY_EXPR:
1130 case TRY_CATCH_EXPR:
1131 TREE_SIDE_EFFECTS (*p) = 1;
1132 TREE_TYPE (*p) = void_type_node;
1133 p = &TREE_OPERAND (*p, 0);
1134 break;
1135
1136 case STATEMENT_LIST:
1137 {
1138 tree_stmt_iterator i = tsi_last (*p);
1139 TREE_SIDE_EFFECTS (*p) = 1;
1140 TREE_TYPE (*p) = void_type_node;
1141 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1142 }
1143 break;
1144
1145 case COMPOUND_EXPR:
1146 /* Advance to the last statement. Set all container types to
1147 void. */
1148 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1149 {
1150 TREE_SIDE_EFFECTS (*p) = 1;
1151 TREE_TYPE (*p) = void_type_node;
1152 }
1153 break;
1154
1155 case TRANSACTION_EXPR:
1156 TREE_SIDE_EFFECTS (*p) = 1;
1157 TREE_TYPE (*p) = void_type_node;
1158 p = &TRANSACTION_EXPR_BODY (*p);
1159 break;
1160
1161 default:
1162 /* Assume that any tree upon which voidify_wrapper_expr is
1163 directly called is a wrapper, and that its body is op0. */
1164 if (p == &wrapper)
1165 {
1166 TREE_SIDE_EFFECTS (*p) = 1;
1167 TREE_TYPE (*p) = void_type_node;
1168 p = &TREE_OPERAND (*p, 0);
1169 break;
1170 }
1171 goto out;
1172 }
1173 }
1174
1175 out:
1176 if (p == NULL || IS_EMPTY_STMT (*p))
1177 temp = NULL_TREE;
1178 else if (temp)
1179 {
1180 /* The wrapper is on the RHS of an assignment that we're pushing
1181 down. */
1182 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1183 || TREE_CODE (temp) == MODIFY_EXPR);
1184 TREE_OPERAND (temp, 1) = *p;
1185 *p = temp;
1186 }
1187 else
1188 {
1189 temp = create_tmp_var (type, "retval");
1190 *p = build2 (INIT_EXPR, type, temp, *p);
1191 }
1192
1193 return temp;
1194 }
1195
1196 return NULL_TREE;
1197 }
1198
1199 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1200 a temporary through which they communicate. */
1201
1202 static void
1203 build_stack_save_restore (gcall **save, gcall **restore)
1204 {
1205 tree tmp_var;
1206
1207 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1208 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1209 gimple_call_set_lhs (*save, tmp_var);
1210
1211 *restore
1212 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1213 1, tmp_var);
1214 }
1215
1216 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1217
1218 static tree
1219 build_asan_poison_call_expr (tree decl)
1220 {
1221 /* Do not poison variables that have size equal to zero. */
1222 tree unit_size = DECL_SIZE_UNIT (decl);
1223 if (zerop (unit_size))
1224 return NULL_TREE;
1225
1226 tree base = build_fold_addr_expr (decl);
1227
1228 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1229 void_type_node, 3,
1230 build_int_cst (integer_type_node,
1231 ASAN_MARK_POISON),
1232 base, unit_size);
1233 }
1234
1235 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1236 on POISON flag, shadow memory of a DECL variable. The call will be
1237 put on location identified by IT iterator, where BEFORE flag drives
1238 position where the stmt will be put. */
1239
1240 static void
1241 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1242 bool before)
1243 {
1244 tree unit_size = DECL_SIZE_UNIT (decl);
1245 tree base = build_fold_addr_expr (decl);
1246
1247 /* Do not poison variables that have size equal to zero. */
1248 if (zerop (unit_size))
1249 return;
1250
1251 /* It's necessary to have all stack variables aligned to ASAN granularity
1252 bytes. */
1253 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1254 unsigned shadow_granularity
1255 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE : ASAN_SHADOW_GRANULARITY;
1256 if (DECL_ALIGN_UNIT (decl) <= shadow_granularity)
1257 SET_DECL_ALIGN (decl, BITS_PER_UNIT * shadow_granularity);
1258
1259 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1260
1261 gimple *g
1262 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1263 build_int_cst (integer_type_node, flags),
1264 base, unit_size);
1265
1266 if (before)
1267 gsi_insert_before (it, g, GSI_NEW_STMT);
1268 else
1269 gsi_insert_after (it, g, GSI_NEW_STMT);
1270 }
1271
1272 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1273 either poisons or unpoisons a DECL. Created statement is appended
1274 to SEQ_P gimple sequence. */
1275
1276 static void
1277 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1278 {
1279 gimple_stmt_iterator it = gsi_last (*seq_p);
1280 bool before = false;
1281
1282 if (gsi_end_p (it))
1283 before = true;
1284
1285 asan_poison_variable (decl, poison, &it, before);
1286 }
1287
1288 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1289
1290 static int
1291 sort_by_decl_uid (const void *a, const void *b)
1292 {
1293 const tree *t1 = (const tree *)a;
1294 const tree *t2 = (const tree *)b;
1295
1296 int uid1 = DECL_UID (*t1);
1297 int uid2 = DECL_UID (*t2);
1298
1299 if (uid1 < uid2)
1300 return -1;
1301 else if (uid1 > uid2)
1302 return 1;
1303 else
1304 return 0;
1305 }
1306
1307 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1308 depending on POISON flag. Created statement is appended
1309 to SEQ_P gimple sequence. */
1310
1311 static void
1312 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1313 {
1314 unsigned c = variables->elements ();
1315 if (c == 0)
1316 return;
1317
1318 auto_vec<tree> sorted_variables (c);
1319
1320 for (hash_set<tree>::iterator it = variables->begin ();
1321 it != variables->end (); ++it)
1322 sorted_variables.safe_push (*it);
1323
1324 sorted_variables.qsort (sort_by_decl_uid);
1325
1326 unsigned i;
1327 tree var;
1328 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1329 {
1330 asan_poison_variable (var, poison, seq_p);
1331
1332 /* Add use_after_scope_memory attribute for the variable in order
1333 to prevent re-written into SSA. */
1334 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1335 DECL_ATTRIBUTES (var)))
1336 DECL_ATTRIBUTES (var)
1337 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1338 integer_one_node,
1339 DECL_ATTRIBUTES (var));
1340 }
1341 }
1342
1343 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1344
1345 static enum gimplify_status
1346 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1347 {
1348 tree bind_expr = *expr_p;
1349 bool old_keep_stack = gimplify_ctxp->keep_stack;
1350 bool old_save_stack = gimplify_ctxp->save_stack;
1351 tree t;
1352 gbind *bind_stmt;
1353 gimple_seq body, cleanup;
1354 gcall *stack_save;
1355 location_t start_locus = 0, end_locus = 0;
1356 tree ret_clauses = NULL;
1357
1358 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1359
1360 /* Mark variables seen in this bind expr. */
1361 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1362 {
1363 if (VAR_P (t))
1364 {
1365 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1366
1367 /* Mark variable as local. */
1368 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1369 {
1370 if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1371 || splay_tree_lookup (ctx->variables,
1372 (splay_tree_key) t) == NULL)
1373 {
1374 int flag = GOVD_LOCAL;
1375 if (ctx->region_type == ORT_SIMD
1376 && TREE_ADDRESSABLE (t)
1377 && !TREE_STATIC (t))
1378 {
1379 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1380 ctx->add_safelen1 = true;
1381 else
1382 flag = GOVD_PRIVATE;
1383 }
1384 omp_add_variable (ctx, t, flag | GOVD_SEEN);
1385 }
1386 /* Static locals inside of target construct or offloaded
1387 routines need to be "omp declare target". */
1388 if (TREE_STATIC (t))
1389 for (; ctx; ctx = ctx->outer_context)
1390 if ((ctx->region_type & ORT_TARGET) != 0)
1391 {
1392 if (!lookup_attribute ("omp declare target",
1393 DECL_ATTRIBUTES (t)))
1394 {
1395 tree id = get_identifier ("omp declare target");
1396 DECL_ATTRIBUTES (t)
1397 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1398 varpool_node *node = varpool_node::get (t);
1399 if (node)
1400 {
1401 node->offloadable = 1;
1402 if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1403 {
1404 g->have_offload = true;
1405 if (!in_lto_p)
1406 vec_safe_push (offload_vars, t);
1407 }
1408 }
1409 }
1410 break;
1411 }
1412 }
1413
1414 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1415
1416 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1417 cfun->has_local_explicit_reg_vars = true;
1418 }
1419 }
1420
1421 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1422 BIND_EXPR_BLOCK (bind_expr));
1423 gimple_push_bind_expr (bind_stmt);
1424
1425 gimplify_ctxp->keep_stack = false;
1426 gimplify_ctxp->save_stack = false;
1427
1428 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1429 body = NULL;
1430 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1431 gimple_bind_set_body (bind_stmt, body);
1432
1433 /* Source location wise, the cleanup code (stack_restore and clobbers)
1434 belongs to the end of the block, so propagate what we have. The
1435 stack_save operation belongs to the beginning of block, which we can
1436 infer from the bind_expr directly if the block has no explicit
1437 assignment. */
1438 if (BIND_EXPR_BLOCK (bind_expr))
1439 {
1440 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1441 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1442 }
1443 if (start_locus == 0)
1444 start_locus = EXPR_LOCATION (bind_expr);
1445
1446 cleanup = NULL;
1447 stack_save = NULL;
1448
1449 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1450 the stack space allocated to the VLAs. */
1451 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1452 {
1453 gcall *stack_restore;
1454
1455 /* Save stack on entry and restore it on exit. Add a try_finally
1456 block to achieve this. */
1457 build_stack_save_restore (&stack_save, &stack_restore);
1458
1459 gimple_set_location (stack_save, start_locus);
1460 gimple_set_location (stack_restore, end_locus);
1461
1462 gimplify_seq_add_stmt (&cleanup, stack_restore);
1463 }
1464
1465 /* Add clobbers for all variables that go out of scope. */
1466 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1467 {
1468 if (VAR_P (t)
1469 && !is_global_var (t)
1470 && DECL_CONTEXT (t) == current_function_decl)
1471 {
1472 if (!DECL_HARD_REGISTER (t)
1473 && !TREE_THIS_VOLATILE (t)
1474 && !DECL_HAS_VALUE_EXPR_P (t)
1475 /* Only care for variables that have to be in memory. Others
1476 will be rewritten into SSA names, hence moved to the
1477 top-level. */
1478 && !is_gimple_reg (t)
1479 && flag_stack_reuse != SR_NONE)
1480 {
1481 tree clobber = build_clobber (TREE_TYPE (t), CLOBBER_EOL);
1482 gimple *clobber_stmt;
1483 clobber_stmt = gimple_build_assign (t, clobber);
1484 gimple_set_location (clobber_stmt, end_locus);
1485 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1486 }
1487
1488 if (flag_openacc && oacc_declare_returns != NULL)
1489 {
1490 tree key = t;
1491 if (DECL_HAS_VALUE_EXPR_P (key))
1492 {
1493 key = DECL_VALUE_EXPR (key);
1494 if (TREE_CODE (key) == INDIRECT_REF)
1495 key = TREE_OPERAND (key, 0);
1496 }
1497 tree *c = oacc_declare_returns->get (key);
1498 if (c != NULL)
1499 {
1500 if (ret_clauses)
1501 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1502
1503 ret_clauses = unshare_expr (*c);
1504
1505 oacc_declare_returns->remove (key);
1506
1507 if (oacc_declare_returns->is_empty ())
1508 {
1509 delete oacc_declare_returns;
1510 oacc_declare_returns = NULL;
1511 }
1512 }
1513 }
1514 }
1515
1516 if (asan_poisoned_variables != NULL
1517 && asan_poisoned_variables->contains (t))
1518 {
1519 asan_poisoned_variables->remove (t);
1520 asan_poison_variable (t, true, &cleanup);
1521 }
1522
1523 if (gimplify_ctxp->live_switch_vars != NULL
1524 && gimplify_ctxp->live_switch_vars->contains (t))
1525 gimplify_ctxp->live_switch_vars->remove (t);
1526 }
1527
1528 if (ret_clauses)
1529 {
1530 gomp_target *stmt;
1531 gimple_stmt_iterator si = gsi_start (cleanup);
1532
1533 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1534 ret_clauses);
1535 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1536 }
1537
1538 if (cleanup)
1539 {
1540 gtry *gs;
1541 gimple_seq new_body;
1542
1543 new_body = NULL;
1544 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1545 GIMPLE_TRY_FINALLY);
1546
1547 if (stack_save)
1548 gimplify_seq_add_stmt (&new_body, stack_save);
1549 gimplify_seq_add_stmt (&new_body, gs);
1550 gimple_bind_set_body (bind_stmt, new_body);
1551 }
1552
1553 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1554 if (!gimplify_ctxp->keep_stack)
1555 gimplify_ctxp->keep_stack = old_keep_stack;
1556 gimplify_ctxp->save_stack = old_save_stack;
1557
1558 gimple_pop_bind_expr ();
1559
1560 gimplify_seq_add_stmt (pre_p, bind_stmt);
1561
1562 if (temp)
1563 {
1564 *expr_p = temp;
1565 return GS_OK;
1566 }
1567
1568 *expr_p = NULL_TREE;
1569 return GS_ALL_DONE;
1570 }
1571
1572 /* Maybe add early return predict statement to PRE_P sequence. */
1573
1574 static void
1575 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1576 {
1577 /* If we are not in a conditional context, add PREDICT statement. */
1578 if (gimple_conditional_context ())
1579 {
1580 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1581 NOT_TAKEN);
1582 gimplify_seq_add_stmt (pre_p, predict);
1583 }
1584 }
1585
1586 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1587 GIMPLE value, it is assigned to a new temporary and the statement is
1588 re-written to return the temporary.
1589
1590 PRE_P points to the sequence where side effects that must happen before
1591 STMT should be stored. */
1592
1593 static enum gimplify_status
1594 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1595 {
1596 greturn *ret;
1597 tree ret_expr = TREE_OPERAND (stmt, 0);
1598 tree result_decl, result;
1599
1600 if (ret_expr == error_mark_node)
1601 return GS_ERROR;
1602
1603 if (!ret_expr
1604 || TREE_CODE (ret_expr) == RESULT_DECL)
1605 {
1606 maybe_add_early_return_predict_stmt (pre_p);
1607 greturn *ret = gimple_build_return (ret_expr);
1608 copy_warning (ret, stmt);
1609 gimplify_seq_add_stmt (pre_p, ret);
1610 return GS_ALL_DONE;
1611 }
1612
1613 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1614 result_decl = NULL_TREE;
1615 else if (TREE_CODE (ret_expr) == COMPOUND_EXPR)
1616 {
1617 /* Used in C++ for handling EH cleanup of the return value if a local
1618 cleanup throws. Assume the front-end knows what it's doing. */
1619 result_decl = DECL_RESULT (current_function_decl);
1620 /* But crash if we end up trying to modify ret_expr below. */
1621 ret_expr = NULL_TREE;
1622 }
1623 else
1624 {
1625 result_decl = TREE_OPERAND (ret_expr, 0);
1626
1627 /* See through a return by reference. */
1628 if (TREE_CODE (result_decl) == INDIRECT_REF)
1629 result_decl = TREE_OPERAND (result_decl, 0);
1630
1631 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1632 || TREE_CODE (ret_expr) == INIT_EXPR)
1633 && TREE_CODE (result_decl) == RESULT_DECL);
1634 }
1635
1636 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1637 Recall that aggregate_value_p is FALSE for any aggregate type that is
1638 returned in registers. If we're returning values in registers, then
1639 we don't want to extend the lifetime of the RESULT_DECL, particularly
1640 across another call. In addition, for those aggregates for which
1641 hard_function_value generates a PARALLEL, we'll die during normal
1642 expansion of structure assignments; there's special code in expand_return
1643 to handle this case that does not exist in expand_expr. */
1644 if (!result_decl)
1645 result = NULL_TREE;
1646 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1647 {
1648 if (!poly_int_tree_p (DECL_SIZE (result_decl)))
1649 {
1650 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1651 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1652 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1653 should be effectively allocated by the caller, i.e. all calls to
1654 this function must be subject to the Return Slot Optimization. */
1655 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1656 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1657 }
1658 result = result_decl;
1659 }
1660 else if (gimplify_ctxp->return_temp)
1661 result = gimplify_ctxp->return_temp;
1662 else
1663 {
1664 result = create_tmp_reg (TREE_TYPE (result_decl));
1665
1666 /* ??? With complex control flow (usually involving abnormal edges),
1667 we can wind up warning about an uninitialized value for this. Due
1668 to how this variable is constructed and initialized, this is never
1669 true. Give up and never warn. */
1670 suppress_warning (result, OPT_Wuninitialized);
1671
1672 gimplify_ctxp->return_temp = result;
1673 }
1674
1675 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1676 Then gimplify the whole thing. */
1677 if (result != result_decl)
1678 TREE_OPERAND (ret_expr, 0) = result;
1679
1680 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1681
1682 maybe_add_early_return_predict_stmt (pre_p);
1683 ret = gimple_build_return (result);
1684 copy_warning (ret, stmt);
1685 gimplify_seq_add_stmt (pre_p, ret);
1686
1687 return GS_ALL_DONE;
1688 }
1689
1690 /* Gimplify a variable-length array DECL. */
1691
1692 static void
1693 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1694 {
1695 /* This is a variable-sized decl. Simplify its size and mark it
1696 for deferred expansion. */
1697 tree t, addr, ptr_type;
1698
1699 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1700 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1701
1702 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1703 if (DECL_HAS_VALUE_EXPR_P (decl))
1704 return;
1705
1706 /* All occurrences of this decl in final gimplified code will be
1707 replaced by indirection. Setting DECL_VALUE_EXPR does two
1708 things: First, it lets the rest of the gimplifier know what
1709 replacement to use. Second, it lets the debug info know
1710 where to find the value. */
1711 ptr_type = build_pointer_type (TREE_TYPE (decl));
1712 addr = create_tmp_var (ptr_type, get_name (decl));
1713 DECL_IGNORED_P (addr) = 0;
1714 t = build_fold_indirect_ref (addr);
1715 TREE_THIS_NOTRAP (t) = 1;
1716 SET_DECL_VALUE_EXPR (decl, t);
1717 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1718
1719 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1720 max_int_size_in_bytes (TREE_TYPE (decl)));
1721 /* The call has been built for a variable-sized object. */
1722 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1723 t = fold_convert (ptr_type, t);
1724 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1725
1726 gimplify_and_add (t, seq_p);
1727
1728 /* Record the dynamic allocation associated with DECL if requested. */
1729 if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
1730 record_dynamic_alloc (decl);
1731 }
1732
1733 /* A helper function to be called via walk_tree. Mark all labels under *TP
1734 as being forced. To be called for DECL_INITIAL of static variables. */
1735
1736 static tree
1737 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1738 {
1739 if (TYPE_P (*tp))
1740 *walk_subtrees = 0;
1741 if (TREE_CODE (*tp) == LABEL_DECL)
1742 {
1743 FORCED_LABEL (*tp) = 1;
1744 cfun->has_forced_label_in_static = 1;
1745 }
1746
1747 return NULL_TREE;
1748 }
1749
1750 /* Generate an initialization to automatic variable DECL based on INIT_TYPE.
1751 Build a call to internal const function DEFERRED_INIT:
1752 1st argument: SIZE of the DECL;
1753 2nd argument: INIT_TYPE;
1754 3rd argument: NAME of the DECL;
1755
1756 as LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL). */
1757
1758 static void
1759 gimple_add_init_for_auto_var (tree decl,
1760 enum auto_init_type init_type,
1761 gimple_seq *seq_p)
1762 {
1763 gcc_assert (auto_var_p (decl));
1764 gcc_assert (init_type > AUTO_INIT_UNINITIALIZED);
1765 location_t loc = EXPR_LOCATION (decl);
1766 tree decl_size = TYPE_SIZE_UNIT (TREE_TYPE (decl));
1767
1768 tree init_type_node
1769 = build_int_cst (integer_type_node, (int) init_type);
1770
1771 tree decl_name = NULL_TREE;
1772 if (DECL_NAME (decl))
1773
1774 decl_name = build_string_literal (DECL_NAME (decl));
1775
1776 else
1777 {
1778 char *decl_name_anonymous = xasprintf ("D.%u", DECL_UID (decl));
1779 decl_name = build_string_literal (decl_name_anonymous);
1780 free (decl_name_anonymous);
1781 }
1782
1783 tree call = build_call_expr_internal_loc (loc, IFN_DEFERRED_INIT,
1784 TREE_TYPE (decl), 3,
1785 decl_size, init_type_node,
1786 decl_name);
1787
1788 gimplify_assign (decl, call, seq_p);
1789 }
1790
1791 /* Generate padding initialization for automatic vairable DECL.
1792 C guarantees that brace-init with fewer initializers than members
1793 aggregate will initialize the rest of the aggregate as-if it were
1794 static initialization. In turn static initialization guarantees
1795 that padding is initialized to zero. So, we always initialize paddings
1796 to zeroes regardless INIT_TYPE.
1797 To do the padding initialization, we insert a call to
1798 __builtin_clear_padding (&decl, 0, for_auto_init = true).
1799 Note, we add an additional dummy argument for __builtin_clear_padding,
1800 'for_auto_init' to distinguish whether this call is for automatic
1801 variable initialization or not.
1802 */
1803 static void
1804 gimple_add_padding_init_for_auto_var (tree decl, bool is_vla,
1805 gimple_seq *seq_p)
1806 {
1807 tree addr_of_decl = NULL_TREE;
1808 tree fn = builtin_decl_explicit (BUILT_IN_CLEAR_PADDING);
1809
1810 if (is_vla)
1811 {
1812 /* The temporary address variable for this vla should be
1813 created in gimplify_vla_decl. */
1814 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
1815 gcc_assert (TREE_CODE (DECL_VALUE_EXPR (decl)) == INDIRECT_REF);
1816 addr_of_decl = TREE_OPERAND (DECL_VALUE_EXPR (decl), 0);
1817 }
1818 else
1819 {
1820 mark_addressable (decl);
1821 addr_of_decl = build_fold_addr_expr (decl);
1822 }
1823
1824 gimple *call = gimple_build_call (fn, 2, addr_of_decl,
1825 build_one_cst (TREE_TYPE (addr_of_decl)));
1826 gimplify_seq_add_stmt (seq_p, call);
1827 }
1828
1829 /* Return true if the DECL need to be automaticly initialized by the
1830 compiler. */
1831 static bool
1832 is_var_need_auto_init (tree decl)
1833 {
1834 if (auto_var_p (decl)
1835 && (TREE_CODE (decl) != VAR_DECL
1836 || !DECL_HARD_REGISTER (decl))
1837 && (flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
1838 && (!lookup_attribute ("uninitialized", DECL_ATTRIBUTES (decl)))
1839 && !OPAQUE_TYPE_P (TREE_TYPE (decl))
1840 && !is_empty_type (TREE_TYPE (decl)))
1841 return true;
1842 return false;
1843 }
1844
1845 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1846 and initialization explicit. */
1847
1848 static enum gimplify_status
1849 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1850 {
1851 tree stmt = *stmt_p;
1852 tree decl = DECL_EXPR_DECL (stmt);
1853
1854 *stmt_p = NULL_TREE;
1855
1856 if (TREE_TYPE (decl) == error_mark_node)
1857 return GS_ERROR;
1858
1859 if ((TREE_CODE (decl) == TYPE_DECL
1860 || VAR_P (decl))
1861 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1862 {
1863 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1864 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1865 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1866 }
1867
1868 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1869 in case its size expressions contain problematic nodes like CALL_EXPR. */
1870 if (TREE_CODE (decl) == TYPE_DECL
1871 && DECL_ORIGINAL_TYPE (decl)
1872 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1873 {
1874 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1875 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1876 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1877 }
1878
1879 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1880 {
1881 tree init = DECL_INITIAL (decl);
1882 bool is_vla = false;
1883 /* Check whether a decl has FE created VALUE_EXPR here BEFORE
1884 gimplify_vla_decl creates VALUE_EXPR for a vla decl.
1885 If the decl has VALUE_EXPR that was created by FE (usually
1886 C++FE), it's a proxy varaible, and FE already initialized
1887 the VALUE_EXPR of it, we should not initialize it anymore. */
1888 bool decl_had_value_expr_p = DECL_HAS_VALUE_EXPR_P (decl);
1889
1890 poly_uint64 size;
1891 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &size)
1892 || (!TREE_STATIC (decl)
1893 && flag_stack_check == GENERIC_STACK_CHECK
1894 && maybe_gt (size,
1895 (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
1896 {
1897 gimplify_vla_decl (decl, seq_p);
1898 is_vla = true;
1899 }
1900
1901 if (asan_poisoned_variables
1902 && !is_vla
1903 && TREE_ADDRESSABLE (decl)
1904 && !TREE_STATIC (decl)
1905 && !DECL_HAS_VALUE_EXPR_P (decl)
1906 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1907 && dbg_cnt (asan_use_after_scope)
1908 && !gimplify_omp_ctxp
1909 /* GNAT introduces temporaries to hold return values of calls in
1910 initializers of variables defined in other units, so the
1911 declaration of the variable is discarded completely. We do not
1912 want to issue poison calls for such dropped variables. */
1913 && (DECL_SEEN_IN_BIND_EXPR_P (decl)
1914 || (DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)))
1915 {
1916 asan_poisoned_variables->add (decl);
1917 asan_poison_variable (decl, false, seq_p);
1918 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1919 gimplify_ctxp->live_switch_vars->add (decl);
1920 }
1921
1922 /* Some front ends do not explicitly declare all anonymous
1923 artificial variables. We compensate here by declaring the
1924 variables, though it would be better if the front ends would
1925 explicitly declare them. */
1926 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1927 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1928 gimple_add_tmp_var (decl);
1929
1930 if (init && init != error_mark_node)
1931 {
1932 if (!TREE_STATIC (decl))
1933 {
1934 DECL_INITIAL (decl) = NULL_TREE;
1935 init = build2 (INIT_EXPR, void_type_node, decl, init);
1936 gimplify_and_add (init, seq_p);
1937 ggc_free (init);
1938 /* Clear TREE_READONLY if we really have an initialization. */
1939 if (!DECL_INITIAL (decl)
1940 && !omp_privatize_by_reference (decl))
1941 TREE_READONLY (decl) = 0;
1942 }
1943 else
1944 /* We must still examine initializers for static variables
1945 as they may contain a label address. */
1946 walk_tree (&init, force_labels_r, NULL, NULL);
1947 }
1948 /* When there is no explicit initializer, if the user requested,
1949 We should insert an artifical initializer for this automatic
1950 variable. */
1951 else if (is_var_need_auto_init (decl)
1952 && !decl_had_value_expr_p)
1953 {
1954 gimple_add_init_for_auto_var (decl,
1955 flag_auto_var_init,
1956 seq_p);
1957 /* The expanding of a call to the above .DEFERRED_INIT will apply
1958 block initialization to the whole space covered by this variable.
1959 As a result, all the paddings will be initialized to zeroes
1960 for zero initialization and 0xFE byte-repeatable patterns for
1961 pattern initialization.
1962 In order to make the paddings as zeroes for pattern init, We
1963 should add a call to __builtin_clear_padding to clear the
1964 paddings to zero in compatiple with CLANG.
1965 We cannot insert this call if the variable is a gimple register
1966 since __builtin_clear_padding will take the address of the
1967 variable. As a result, if a long double/_Complex long double
1968 variable will spilled into stack later, its padding is 0XFE. */
1969 if (flag_auto_var_init == AUTO_INIT_PATTERN
1970 && !is_gimple_reg (decl)
1971 && clear_padding_type_may_have_padding_p (TREE_TYPE (decl)))
1972 gimple_add_padding_init_for_auto_var (decl, is_vla, seq_p);
1973 }
1974 }
1975
1976 return GS_ALL_DONE;
1977 }
1978
1979 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1980 and replacing the LOOP_EXPR with goto, but if the loop contains an
1981 EXIT_EXPR, we need to append a label for it to jump to. */
1982
1983 static enum gimplify_status
1984 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1985 {
1986 tree saved_label = gimplify_ctxp->exit_label;
1987 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1988
1989 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1990
1991 gimplify_ctxp->exit_label = NULL_TREE;
1992
1993 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1994
1995 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1996
1997 if (gimplify_ctxp->exit_label)
1998 gimplify_seq_add_stmt (pre_p,
1999 gimple_build_label (gimplify_ctxp->exit_label));
2000
2001 gimplify_ctxp->exit_label = saved_label;
2002
2003 *expr_p = NULL;
2004 return GS_ALL_DONE;
2005 }
2006
2007 /* Gimplify a statement list onto a sequence. These may be created either
2008 by an enlightened front-end, or by shortcut_cond_expr. */
2009
2010 static enum gimplify_status
2011 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
2012 {
2013 tree temp = voidify_wrapper_expr (*expr_p, NULL);
2014
2015 tree_stmt_iterator i = tsi_start (*expr_p);
2016
2017 while (!tsi_end_p (i))
2018 {
2019 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
2020 tsi_delink (&i);
2021 }
2022
2023 if (temp)
2024 {
2025 *expr_p = temp;
2026 return GS_OK;
2027 }
2028
2029 return GS_ALL_DONE;
2030 }
2031
2032
2033 /* Emit warning for the unreachable statment STMT if needed.
2034 Return the gimple itself when the warning is emitted, otherwise
2035 return NULL. */
2036 static gimple *
2037 emit_warn_switch_unreachable (gimple *stmt)
2038 {
2039 if (gimple_code (stmt) == GIMPLE_GOTO
2040 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
2041 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
2042 /* Don't warn for compiler-generated gotos. These occur
2043 in Duff's devices, for example. */
2044 return NULL;
2045 else if ((flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
2046 && ((gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2047 || (gimple_call_builtin_p (stmt, BUILT_IN_CLEAR_PADDING)
2048 && (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)))
2049 || (is_gimple_assign (stmt)
2050 && gimple_assign_single_p (stmt)
2051 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2052 && gimple_call_internal_p (
2053 SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt)),
2054 IFN_DEFERRED_INIT))))
2055 /* Don't warn for compiler-generated initializations for
2056 -ftrivial-auto-var-init.
2057 There are 3 cases:
2058 case 1: a call to .DEFERRED_INIT;
2059 case 2: a call to __builtin_clear_padding with the 2nd argument is
2060 present and non-zero;
2061 case 3: a gimple assign store right after the call to .DEFERRED_INIT
2062 that has the LHS of .DEFERRED_INIT as the RHS as following:
2063 _1 = .DEFERRED_INIT (4, 2, &"i1"[0]);
2064 i1 = _1. */
2065 return NULL;
2066 else
2067 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
2068 "statement will never be executed");
2069 return stmt;
2070 }
2071
2072 /* Callback for walk_gimple_seq. */
2073
2074 static tree
2075 warn_switch_unreachable_and_auto_init_r (gimple_stmt_iterator *gsi_p,
2076 bool *handled_ops_p,
2077 struct walk_stmt_info *wi)
2078 {
2079 gimple *stmt = gsi_stmt (*gsi_p);
2080 bool unreachable_issued = wi->info != NULL;
2081
2082 *handled_ops_p = true;
2083 switch (gimple_code (stmt))
2084 {
2085 case GIMPLE_TRY:
2086 /* A compiler-generated cleanup or a user-written try block.
2087 If it's empty, don't dive into it--that would result in
2088 worse location info. */
2089 if (gimple_try_eval (stmt) == NULL)
2090 {
2091 if (warn_switch_unreachable && !unreachable_issued)
2092 wi->info = emit_warn_switch_unreachable (stmt);
2093
2094 /* Stop when auto var init warning is not on. */
2095 if (!warn_trivial_auto_var_init)
2096 return integer_zero_node;
2097 }
2098 /* Fall through. */
2099 case GIMPLE_BIND:
2100 case GIMPLE_CATCH:
2101 case GIMPLE_EH_FILTER:
2102 case GIMPLE_TRANSACTION:
2103 /* Walk the sub-statements. */
2104 *handled_ops_p = false;
2105 break;
2106
2107 case GIMPLE_DEBUG:
2108 /* Ignore these. We may generate them before declarations that
2109 are never executed. If there's something to warn about,
2110 there will be non-debug stmts too, and we'll catch those. */
2111 break;
2112
2113 case GIMPLE_LABEL:
2114 /* Stop till the first Label. */
2115 return integer_zero_node;
2116 case GIMPLE_CALL:
2117 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2118 {
2119 *handled_ops_p = false;
2120 break;
2121 }
2122 if (warn_trivial_auto_var_init
2123 && flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2124 && gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2125 {
2126 /* Get the variable name from the 3rd argument of call. */
2127 tree var_name = gimple_call_arg (stmt, 2);
2128 var_name = TREE_OPERAND (TREE_OPERAND (var_name, 0), 0);
2129 const char *var_name_str = TREE_STRING_POINTER (var_name);
2130
2131 warning_at (gimple_location (stmt), OPT_Wtrivial_auto_var_init,
2132 "%qs cannot be initialized with"
2133 "%<-ftrivial-auto-var_init%>",
2134 var_name_str);
2135 break;
2136 }
2137
2138 /* Fall through. */
2139 default:
2140 /* check the first "real" statement (not a decl/lexical scope/...), issue
2141 warning if needed. */
2142 if (warn_switch_unreachable && !unreachable_issued)
2143 wi->info = emit_warn_switch_unreachable (stmt);
2144 /* Stop when auto var init warning is not on. */
2145 if (!warn_trivial_auto_var_init)
2146 return integer_zero_node;
2147 break;
2148 }
2149 return NULL_TREE;
2150 }
2151
2152
2153 /* Possibly warn about unreachable statements between switch's controlling
2154 expression and the first case. Also warn about -ftrivial-auto-var-init
2155 cannot initialize the auto variable under such situation.
2156 SEQ is the body of a switch expression. */
2157
2158 static void
2159 maybe_warn_switch_unreachable_and_auto_init (gimple_seq seq)
2160 {
2161 if ((!warn_switch_unreachable && !warn_trivial_auto_var_init)
2162 /* This warning doesn't play well with Fortran when optimizations
2163 are on. */
2164 || lang_GNU_Fortran ()
2165 || seq == NULL)
2166 return;
2167
2168 struct walk_stmt_info wi;
2169
2170 memset (&wi, 0, sizeof (wi));
2171 walk_gimple_seq (seq, warn_switch_unreachable_and_auto_init_r, NULL, &wi);
2172 }
2173
2174
2175 /* A label entry that pairs label and a location. */
2176 struct label_entry
2177 {
2178 tree label;
2179 location_t loc;
2180 };
2181
2182 /* Find LABEL in vector of label entries VEC. */
2183
2184 static struct label_entry *
2185 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
2186 {
2187 unsigned int i;
2188 struct label_entry *l;
2189
2190 FOR_EACH_VEC_ELT (*vec, i, l)
2191 if (l->label == label)
2192 return l;
2193 return NULL;
2194 }
2195
2196 /* Return true if LABEL, a LABEL_DECL, represents a case label
2197 in a vector of labels CASES. */
2198
2199 static bool
2200 case_label_p (const vec<tree> *cases, tree label)
2201 {
2202 unsigned int i;
2203 tree l;
2204
2205 FOR_EACH_VEC_ELT (*cases, i, l)
2206 if (CASE_LABEL (l) == label)
2207 return true;
2208 return false;
2209 }
2210
2211 /* Find the last nondebug statement in a scope STMT. */
2212
2213 static gimple *
2214 last_stmt_in_scope (gimple *stmt)
2215 {
2216 if (!stmt)
2217 return NULL;
2218
2219 switch (gimple_code (stmt))
2220 {
2221 case GIMPLE_BIND:
2222 {
2223 gbind *bind = as_a <gbind *> (stmt);
2224 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
2225 return last_stmt_in_scope (stmt);
2226 }
2227
2228 case GIMPLE_TRY:
2229 {
2230 gtry *try_stmt = as_a <gtry *> (stmt);
2231 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
2232 gimple *last_eval = last_stmt_in_scope (stmt);
2233 if (gimple_stmt_may_fallthru (last_eval)
2234 && (last_eval == NULL
2235 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
2236 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2237 {
2238 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
2239 return last_stmt_in_scope (stmt);
2240 }
2241 else
2242 return last_eval;
2243 }
2244
2245 case GIMPLE_DEBUG:
2246 gcc_unreachable ();
2247
2248 default:
2249 return stmt;
2250 }
2251 }
2252
2253 /* Collect labels that may fall through into LABELS and return the statement
2254 preceding another case label, or a user-defined label. Store a location
2255 useful to give warnings at *PREVLOC (usually the location of the returned
2256 statement or of its surrounding scope). */
2257
2258 static gimple *
2259 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2260 auto_vec <struct label_entry> *labels,
2261 location_t *prevloc)
2262 {
2263 gimple *prev = NULL;
2264
2265 *prevloc = UNKNOWN_LOCATION;
2266 do
2267 {
2268 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2269 {
2270 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2271 which starts on a GIMPLE_SWITCH and ends with a break label.
2272 Handle that as a single statement that can fall through. */
2273 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2274 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2275 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2276 if (last
2277 && gimple_code (first) == GIMPLE_SWITCH
2278 && gimple_code (last) == GIMPLE_LABEL)
2279 {
2280 tree label = gimple_label_label (as_a <glabel *> (last));
2281 if (SWITCH_BREAK_LABEL_P (label))
2282 {
2283 prev = bind;
2284 gsi_next (gsi_p);
2285 continue;
2286 }
2287 }
2288 }
2289 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2290 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2291 {
2292 /* Nested scope. Only look at the last statement of
2293 the innermost scope. */
2294 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2295 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2296 if (last)
2297 {
2298 prev = last;
2299 /* It might be a label without a location. Use the
2300 location of the scope then. */
2301 if (!gimple_has_location (prev))
2302 *prevloc = bind_loc;
2303 }
2304 gsi_next (gsi_p);
2305 continue;
2306 }
2307
2308 /* Ifs are tricky. */
2309 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2310 {
2311 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2312 tree false_lab = gimple_cond_false_label (cond_stmt);
2313 location_t if_loc = gimple_location (cond_stmt);
2314
2315 /* If we have e.g.
2316 if (i > 1) goto <D.2259>; else goto D;
2317 we can't do much with the else-branch. */
2318 if (!DECL_ARTIFICIAL (false_lab))
2319 break;
2320
2321 /* Go on until the false label, then one step back. */
2322 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2323 {
2324 gimple *stmt = gsi_stmt (*gsi_p);
2325 if (gimple_code (stmt) == GIMPLE_LABEL
2326 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2327 break;
2328 }
2329
2330 /* Not found? Oops. */
2331 if (gsi_end_p (*gsi_p))
2332 break;
2333
2334 /* A dead label can't fall through. */
2335 if (!UNUSED_LABEL_P (false_lab))
2336 {
2337 struct label_entry l = { false_lab, if_loc };
2338 labels->safe_push (l);
2339 }
2340
2341 /* Go to the last statement of the then branch. */
2342 gsi_prev (gsi_p);
2343
2344 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2345 <D.1759>:
2346 <stmt>;
2347 goto <D.1761>;
2348 <D.1760>:
2349 */
2350 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2351 && !gimple_has_location (gsi_stmt (*gsi_p)))
2352 {
2353 /* Look at the statement before, it might be
2354 attribute fallthrough, in which case don't warn. */
2355 gsi_prev (gsi_p);
2356 bool fallthru_before_dest
2357 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2358 gsi_next (gsi_p);
2359 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2360 if (!fallthru_before_dest)
2361 {
2362 struct label_entry l = { goto_dest, if_loc };
2363 labels->safe_push (l);
2364 }
2365 }
2366 /* This case is about
2367 if (1 != 0) goto <D.2022>; else goto <D.2023>;
2368 <D.2022>:
2369 n = n + 1; // #1
2370 <D.2023>: // #2
2371 <D.1988>: // #3
2372 where #2 is UNUSED_LABEL_P and we want to warn about #1 falling
2373 through to #3. So set PREV to #1. */
2374 else if (UNUSED_LABEL_P (false_lab))
2375 prev = gsi_stmt (*gsi_p);
2376
2377 /* And move back. */
2378 gsi_next (gsi_p);
2379 }
2380
2381 /* Remember the last statement. Skip labels that are of no interest
2382 to us. */
2383 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2384 {
2385 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2386 if (find_label_entry (labels, label))
2387 prev = gsi_stmt (*gsi_p);
2388 }
2389 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2390 ;
2391 else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT)
2392 ;
2393 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2394 prev = gsi_stmt (*gsi_p);
2395 gsi_next (gsi_p);
2396 }
2397 while (!gsi_end_p (*gsi_p)
2398 /* Stop if we find a case or a user-defined label. */
2399 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2400 || !gimple_has_location (gsi_stmt (*gsi_p))));
2401
2402 if (prev && gimple_has_location (prev))
2403 *prevloc = gimple_location (prev);
2404 return prev;
2405 }
2406
2407 /* Return true if the switch fallthough warning should occur. LABEL is
2408 the label statement that we're falling through to. */
2409
2410 static bool
2411 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2412 {
2413 gimple_stmt_iterator gsi = *gsi_p;
2414
2415 /* Don't warn if the label is marked with a "falls through" comment. */
2416 if (FALLTHROUGH_LABEL_P (label))
2417 return false;
2418
2419 /* Don't warn for non-case labels followed by a statement:
2420 case 0:
2421 foo ();
2422 label:
2423 bar ();
2424 as these are likely intentional. */
2425 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2426 {
2427 tree l;
2428 while (!gsi_end_p (gsi)
2429 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2430 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2431 && !case_label_p (&gimplify_ctxp->case_labels, l))
2432 gsi_next_nondebug (&gsi);
2433 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2434 return false;
2435 }
2436
2437 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2438 immediately breaks. */
2439 gsi = *gsi_p;
2440
2441 /* Skip all immediately following labels. */
2442 while (!gsi_end_p (gsi)
2443 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2444 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2445 gsi_next_nondebug (&gsi);
2446
2447 /* { ... something; default:; } */
2448 if (gsi_end_p (gsi)
2449 /* { ... something; default: break; } or
2450 { ... something; default: goto L; } */
2451 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2452 /* { ... something; default: return; } */
2453 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2454 return false;
2455
2456 return true;
2457 }
2458
2459 /* Callback for walk_gimple_seq. */
2460
2461 static tree
2462 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2463 struct walk_stmt_info *)
2464 {
2465 gimple *stmt = gsi_stmt (*gsi_p);
2466
2467 *handled_ops_p = true;
2468 switch (gimple_code (stmt))
2469 {
2470 case GIMPLE_TRY:
2471 case GIMPLE_BIND:
2472 case GIMPLE_CATCH:
2473 case GIMPLE_EH_FILTER:
2474 case GIMPLE_TRANSACTION:
2475 /* Walk the sub-statements. */
2476 *handled_ops_p = false;
2477 break;
2478
2479 /* Find a sequence of form:
2480
2481 GIMPLE_LABEL
2482 [...]
2483 <may fallthru stmt>
2484 GIMPLE_LABEL
2485
2486 and possibly warn. */
2487 case GIMPLE_LABEL:
2488 {
2489 /* Found a label. Skip all immediately following labels. */
2490 while (!gsi_end_p (*gsi_p)
2491 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2492 gsi_next_nondebug (gsi_p);
2493
2494 /* There might be no more statements. */
2495 if (gsi_end_p (*gsi_p))
2496 return integer_zero_node;
2497
2498 /* Vector of labels that fall through. */
2499 auto_vec <struct label_entry> labels;
2500 location_t prevloc;
2501 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2502
2503 /* There might be no more statements. */
2504 if (gsi_end_p (*gsi_p))
2505 return integer_zero_node;
2506
2507 gimple *next = gsi_stmt (*gsi_p);
2508 tree label;
2509 /* If what follows is a label, then we may have a fallthrough. */
2510 if (gimple_code (next) == GIMPLE_LABEL
2511 && gimple_has_location (next)
2512 && (label = gimple_label_label (as_a <glabel *> (next)))
2513 && prev != NULL)
2514 {
2515 struct label_entry *l;
2516 bool warned_p = false;
2517 auto_diagnostic_group d;
2518 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2519 /* Quiet. */;
2520 else if (gimple_code (prev) == GIMPLE_LABEL
2521 && (label = gimple_label_label (as_a <glabel *> (prev)))
2522 && (l = find_label_entry (&labels, label)))
2523 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2524 "this statement may fall through");
2525 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2526 /* Try to be clever and don't warn when the statement
2527 can't actually fall through. */
2528 && gimple_stmt_may_fallthru (prev)
2529 && prevloc != UNKNOWN_LOCATION)
2530 warned_p = warning_at (prevloc,
2531 OPT_Wimplicit_fallthrough_,
2532 "this statement may fall through");
2533 if (warned_p)
2534 inform (gimple_location (next), "here");
2535
2536 /* Mark this label as processed so as to prevent multiple
2537 warnings in nested switches. */
2538 FALLTHROUGH_LABEL_P (label) = true;
2539
2540 /* So that next warn_implicit_fallthrough_r will start looking for
2541 a new sequence starting with this label. */
2542 gsi_prev (gsi_p);
2543 }
2544 }
2545 break;
2546 default:
2547 break;
2548 }
2549 return NULL_TREE;
2550 }
2551
2552 /* Warn when a switch case falls through. */
2553
2554 static void
2555 maybe_warn_implicit_fallthrough (gimple_seq seq)
2556 {
2557 if (!warn_implicit_fallthrough)
2558 return;
2559
2560 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2561 if (!(lang_GNU_C ()
2562 || lang_GNU_CXX ()
2563 || lang_GNU_OBJC ()))
2564 return;
2565
2566 struct walk_stmt_info wi;
2567 memset (&wi, 0, sizeof (wi));
2568 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2569 }
2570
2571 /* Callback for walk_gimple_seq. */
2572
2573 static tree
2574 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2575 struct walk_stmt_info *wi)
2576 {
2577 gimple *stmt = gsi_stmt (*gsi_p);
2578
2579 *handled_ops_p = true;
2580 switch (gimple_code (stmt))
2581 {
2582 case GIMPLE_TRY:
2583 case GIMPLE_BIND:
2584 case GIMPLE_CATCH:
2585 case GIMPLE_EH_FILTER:
2586 case GIMPLE_TRANSACTION:
2587 /* Walk the sub-statements. */
2588 *handled_ops_p = false;
2589 break;
2590 case GIMPLE_CALL:
2591 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2592 {
2593 gsi_remove (gsi_p, true);
2594 if (gsi_end_p (*gsi_p))
2595 {
2596 *static_cast<location_t *>(wi->info) = gimple_location (stmt);
2597 return integer_zero_node;
2598 }
2599
2600 bool found = false;
2601 location_t loc = gimple_location (stmt);
2602
2603 gimple_stmt_iterator gsi2 = *gsi_p;
2604 stmt = gsi_stmt (gsi2);
2605 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2606 {
2607 /* Go on until the artificial label. */
2608 tree goto_dest = gimple_goto_dest (stmt);
2609 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2610 {
2611 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2612 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2613 == goto_dest)
2614 break;
2615 }
2616
2617 /* Not found? Stop. */
2618 if (gsi_end_p (gsi2))
2619 break;
2620
2621 /* Look one past it. */
2622 gsi_next (&gsi2);
2623 }
2624
2625 /* We're looking for a case label or default label here. */
2626 while (!gsi_end_p (gsi2))
2627 {
2628 stmt = gsi_stmt (gsi2);
2629 if (gimple_code (stmt) == GIMPLE_LABEL)
2630 {
2631 tree label = gimple_label_label (as_a <glabel *> (stmt));
2632 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2633 {
2634 found = true;
2635 break;
2636 }
2637 }
2638 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2639 ;
2640 else if (!is_gimple_debug (stmt))
2641 /* Anything else is not expected. */
2642 break;
2643 gsi_next (&gsi2);
2644 }
2645 if (!found)
2646 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2647 "a case label or default label");
2648 }
2649 break;
2650 default:
2651 break;
2652 }
2653 return NULL_TREE;
2654 }
2655
2656 /* Expand all FALLTHROUGH () calls in SEQ. */
2657
2658 static void
2659 expand_FALLTHROUGH (gimple_seq *seq_p)
2660 {
2661 struct walk_stmt_info wi;
2662 location_t loc;
2663 memset (&wi, 0, sizeof (wi));
2664 wi.info = (void *) &loc;
2665 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2666 if (wi.callback_result == integer_zero_node)
2667 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2668 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2669 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2670 "a case label or default label");
2671 }
2672
2673 \f
2674 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2675 branch to. */
2676
2677 static enum gimplify_status
2678 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2679 {
2680 tree switch_expr = *expr_p;
2681 gimple_seq switch_body_seq = NULL;
2682 enum gimplify_status ret;
2683 tree index_type = TREE_TYPE (switch_expr);
2684 if (index_type == NULL_TREE)
2685 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2686
2687 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2688 fb_rvalue);
2689 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2690 return ret;
2691
2692 if (SWITCH_BODY (switch_expr))
2693 {
2694 vec<tree> labels;
2695 vec<tree> saved_labels;
2696 hash_set<tree> *saved_live_switch_vars = NULL;
2697 tree default_case = NULL_TREE;
2698 gswitch *switch_stmt;
2699
2700 /* Save old labels, get new ones from body, then restore the old
2701 labels. Save all the things from the switch body to append after. */
2702 saved_labels = gimplify_ctxp->case_labels;
2703 gimplify_ctxp->case_labels.create (8);
2704
2705 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2706 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2707 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2708 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2709 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2710 else
2711 gimplify_ctxp->live_switch_vars = NULL;
2712
2713 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2714 gimplify_ctxp->in_switch_expr = true;
2715
2716 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2717
2718 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2719 maybe_warn_switch_unreachable_and_auto_init (switch_body_seq);
2720 maybe_warn_implicit_fallthrough (switch_body_seq);
2721 /* Only do this for the outermost GIMPLE_SWITCH. */
2722 if (!gimplify_ctxp->in_switch_expr)
2723 expand_FALLTHROUGH (&switch_body_seq);
2724
2725 labels = gimplify_ctxp->case_labels;
2726 gimplify_ctxp->case_labels = saved_labels;
2727
2728 if (gimplify_ctxp->live_switch_vars)
2729 {
2730 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
2731 delete gimplify_ctxp->live_switch_vars;
2732 }
2733 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2734
2735 preprocess_case_label_vec_for_gimple (labels, index_type,
2736 &default_case);
2737
2738 bool add_bind = false;
2739 if (!default_case)
2740 {
2741 glabel *new_default;
2742
2743 default_case
2744 = build_case_label (NULL_TREE, NULL_TREE,
2745 create_artificial_label (UNKNOWN_LOCATION));
2746 if (old_in_switch_expr)
2747 {
2748 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2749 add_bind = true;
2750 }
2751 new_default = gimple_build_label (CASE_LABEL (default_case));
2752 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2753 }
2754 else if (old_in_switch_expr)
2755 {
2756 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2757 if (last && gimple_code (last) == GIMPLE_LABEL)
2758 {
2759 tree label = gimple_label_label (as_a <glabel *> (last));
2760 if (SWITCH_BREAK_LABEL_P (label))
2761 add_bind = true;
2762 }
2763 }
2764
2765 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2766 default_case, labels);
2767 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2768 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2769 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2770 so that we can easily find the start and end of the switch
2771 statement. */
2772 if (add_bind)
2773 {
2774 gimple_seq bind_body = NULL;
2775 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2776 gimple_seq_add_seq (&bind_body, switch_body_seq);
2777 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2778 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2779 gimplify_seq_add_stmt (pre_p, bind);
2780 }
2781 else
2782 {
2783 gimplify_seq_add_stmt (pre_p, switch_stmt);
2784 gimplify_seq_add_seq (pre_p, switch_body_seq);
2785 }
2786 labels.release ();
2787 }
2788 else
2789 gcc_unreachable ();
2790
2791 return GS_ALL_DONE;
2792 }
2793
2794 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2795
2796 static enum gimplify_status
2797 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2798 {
2799 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2800 == current_function_decl);
2801
2802 tree label = LABEL_EXPR_LABEL (*expr_p);
2803 glabel *label_stmt = gimple_build_label (label);
2804 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2805 gimplify_seq_add_stmt (pre_p, label_stmt);
2806
2807 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2808 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2809 NOT_TAKEN));
2810 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2811 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2812 TAKEN));
2813
2814 return GS_ALL_DONE;
2815 }
2816
2817 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2818
2819 static enum gimplify_status
2820 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2821 {
2822 struct gimplify_ctx *ctxp;
2823 glabel *label_stmt;
2824
2825 /* Invalid programs can play Duff's Device type games with, for example,
2826 #pragma omp parallel. At least in the C front end, we don't
2827 detect such invalid branches until after gimplification, in the
2828 diagnose_omp_blocks pass. */
2829 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2830 if (ctxp->case_labels.exists ())
2831 break;
2832
2833 tree label = CASE_LABEL (*expr_p);
2834 label_stmt = gimple_build_label (label);
2835 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2836 ctxp->case_labels.safe_push (*expr_p);
2837 gimplify_seq_add_stmt (pre_p, label_stmt);
2838
2839 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2840 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2841 NOT_TAKEN));
2842 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2843 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2844 TAKEN));
2845
2846 return GS_ALL_DONE;
2847 }
2848
2849 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2850 if necessary. */
2851
2852 tree
2853 build_and_jump (tree *label_p)
2854 {
2855 if (label_p == NULL)
2856 /* If there's nowhere to jump, just fall through. */
2857 return NULL_TREE;
2858
2859 if (*label_p == NULL_TREE)
2860 {
2861 tree label = create_artificial_label (UNKNOWN_LOCATION);
2862 *label_p = label;
2863 }
2864
2865 return build1 (GOTO_EXPR, void_type_node, *label_p);
2866 }
2867
2868 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2869 This also involves building a label to jump to and communicating it to
2870 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2871
2872 static enum gimplify_status
2873 gimplify_exit_expr (tree *expr_p)
2874 {
2875 tree cond = TREE_OPERAND (*expr_p, 0);
2876 tree expr;
2877
2878 expr = build_and_jump (&gimplify_ctxp->exit_label);
2879 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2880 *expr_p = expr;
2881
2882 return GS_OK;
2883 }
2884
2885 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2886 different from its canonical type, wrap the whole thing inside a
2887 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2888 type.
2889
2890 The canonical type of a COMPONENT_REF is the type of the field being
2891 referenced--unless the field is a bit-field which can be read directly
2892 in a smaller mode, in which case the canonical type is the
2893 sign-appropriate type corresponding to that mode. */
2894
2895 static void
2896 canonicalize_component_ref (tree *expr_p)
2897 {
2898 tree expr = *expr_p;
2899 tree type;
2900
2901 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2902
2903 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2904 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2905 else
2906 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2907
2908 /* One could argue that all the stuff below is not necessary for
2909 the non-bitfield case and declare it a FE error if type
2910 adjustment would be needed. */
2911 if (TREE_TYPE (expr) != type)
2912 {
2913 #ifdef ENABLE_TYPES_CHECKING
2914 tree old_type = TREE_TYPE (expr);
2915 #endif
2916 int type_quals;
2917
2918 /* We need to preserve qualifiers and propagate them from
2919 operand 0. */
2920 type_quals = TYPE_QUALS (type)
2921 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2922 if (TYPE_QUALS (type) != type_quals)
2923 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2924
2925 /* Set the type of the COMPONENT_REF to the underlying type. */
2926 TREE_TYPE (expr) = type;
2927
2928 #ifdef ENABLE_TYPES_CHECKING
2929 /* It is now a FE error, if the conversion from the canonical
2930 type to the original expression type is not useless. */
2931 gcc_assert (useless_type_conversion_p (old_type, type));
2932 #endif
2933 }
2934 }
2935
2936 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2937 to foo, embed that change in the ADDR_EXPR by converting
2938 T array[U];
2939 (T *)&array
2940 ==>
2941 &array[L]
2942 where L is the lower bound. For simplicity, only do this for constant
2943 lower bound.
2944 The constraint is that the type of &array[L] is trivially convertible
2945 to T *. */
2946
2947 static void
2948 canonicalize_addr_expr (tree *expr_p)
2949 {
2950 tree expr = *expr_p;
2951 tree addr_expr = TREE_OPERAND (expr, 0);
2952 tree datype, ddatype, pddatype;
2953
2954 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2955 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2956 || TREE_CODE (addr_expr) != ADDR_EXPR)
2957 return;
2958
2959 /* The addr_expr type should be a pointer to an array. */
2960 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2961 if (TREE_CODE (datype) != ARRAY_TYPE)
2962 return;
2963
2964 /* The pointer to element type shall be trivially convertible to
2965 the expression pointer type. */
2966 ddatype = TREE_TYPE (datype);
2967 pddatype = build_pointer_type (ddatype);
2968 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2969 pddatype))
2970 return;
2971
2972 /* The lower bound and element sizes must be constant. */
2973 if (!TYPE_SIZE_UNIT (ddatype)
2974 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2975 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2976 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2977 return;
2978
2979 /* All checks succeeded. Build a new node to merge the cast. */
2980 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2981 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2982 NULL_TREE, NULL_TREE);
2983 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2984
2985 /* We can have stripped a required restrict qualifier above. */
2986 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2987 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2988 }
2989
2990 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2991 underneath as appropriate. */
2992
2993 static enum gimplify_status
2994 gimplify_conversion (tree *expr_p)
2995 {
2996 location_t loc = EXPR_LOCATION (*expr_p);
2997 gcc_assert (CONVERT_EXPR_P (*expr_p));
2998
2999 /* Then strip away all but the outermost conversion. */
3000 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
3001
3002 /* And remove the outermost conversion if it's useless. */
3003 if (tree_ssa_useless_type_conversion (*expr_p))
3004 *expr_p = TREE_OPERAND (*expr_p, 0);
3005
3006 /* If we still have a conversion at the toplevel,
3007 then canonicalize some constructs. */
3008 if (CONVERT_EXPR_P (*expr_p))
3009 {
3010 tree sub = TREE_OPERAND (*expr_p, 0);
3011
3012 /* If a NOP conversion is changing the type of a COMPONENT_REF
3013 expression, then canonicalize its type now in order to expose more
3014 redundant conversions. */
3015 if (TREE_CODE (sub) == COMPONENT_REF)
3016 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
3017
3018 /* If a NOP conversion is changing a pointer to array of foo
3019 to a pointer to foo, embed that change in the ADDR_EXPR. */
3020 else if (TREE_CODE (sub) == ADDR_EXPR)
3021 canonicalize_addr_expr (expr_p);
3022 }
3023
3024 /* If we have a conversion to a non-register type force the
3025 use of a VIEW_CONVERT_EXPR instead. */
3026 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
3027 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
3028 TREE_OPERAND (*expr_p, 0));
3029
3030 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
3031 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
3032 TREE_SET_CODE (*expr_p, NOP_EXPR);
3033
3034 return GS_OK;
3035 }
3036
3037 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
3038 DECL_VALUE_EXPR, and it's worth re-examining things. */
3039
3040 static enum gimplify_status
3041 gimplify_var_or_parm_decl (tree *expr_p)
3042 {
3043 tree decl = *expr_p;
3044
3045 /* ??? If this is a local variable, and it has not been seen in any
3046 outer BIND_EXPR, then it's probably the result of a duplicate
3047 declaration, for which we've already issued an error. It would
3048 be really nice if the front end wouldn't leak these at all.
3049 Currently the only known culprit is C++ destructors, as seen
3050 in g++.old-deja/g++.jason/binding.C.
3051 Another possible culpit are size expressions for variably modified
3052 types which are lost in the FE or not gimplified correctly. */
3053 if (VAR_P (decl)
3054 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
3055 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
3056 && decl_function_context (decl) == current_function_decl)
3057 {
3058 gcc_assert (seen_error ());
3059 return GS_ERROR;
3060 }
3061
3062 /* When within an OMP context, notice uses of variables. */
3063 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
3064 return GS_ALL_DONE;
3065
3066 /* If the decl is an alias for another expression, substitute it now. */
3067 if (DECL_HAS_VALUE_EXPR_P (decl))
3068 {
3069 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
3070 return GS_OK;
3071 }
3072
3073 return GS_ALL_DONE;
3074 }
3075
3076 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
3077
3078 static void
3079 recalculate_side_effects (tree t)
3080 {
3081 enum tree_code code = TREE_CODE (t);
3082 int len = TREE_OPERAND_LENGTH (t);
3083 int i;
3084
3085 switch (TREE_CODE_CLASS (code))
3086 {
3087 case tcc_expression:
3088 switch (code)
3089 {
3090 case INIT_EXPR:
3091 case MODIFY_EXPR:
3092 case VA_ARG_EXPR:
3093 case PREDECREMENT_EXPR:
3094 case PREINCREMENT_EXPR:
3095 case POSTDECREMENT_EXPR:
3096 case POSTINCREMENT_EXPR:
3097 /* All of these have side-effects, no matter what their
3098 operands are. */
3099 return;
3100
3101 default:
3102 break;
3103 }
3104 /* Fall through. */
3105
3106 case tcc_comparison: /* a comparison expression */
3107 case tcc_unary: /* a unary arithmetic expression */
3108 case tcc_binary: /* a binary arithmetic expression */
3109 case tcc_reference: /* a reference */
3110 case tcc_vl_exp: /* a function call */
3111 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3112 for (i = 0; i < len; ++i)
3113 {
3114 tree op = TREE_OPERAND (t, i);
3115 if (op && TREE_SIDE_EFFECTS (op))
3116 TREE_SIDE_EFFECTS (t) = 1;
3117 }
3118 break;
3119
3120 case tcc_constant:
3121 /* No side-effects. */
3122 return;
3123
3124 default:
3125 gcc_unreachable ();
3126 }
3127 }
3128
3129 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
3130 node *EXPR_P.
3131
3132 compound_lval
3133 : min_lval '[' val ']'
3134 | min_lval '.' ID
3135 | compound_lval '[' val ']'
3136 | compound_lval '.' ID
3137
3138 This is not part of the original SIMPLE definition, which separates
3139 array and member references, but it seems reasonable to handle them
3140 together. Also, this way we don't run into problems with union
3141 aliasing; gcc requires that for accesses through a union to alias, the
3142 union reference must be explicit, which was not always the case when we
3143 were splitting up array and member refs.
3144
3145 PRE_P points to the sequence where side effects that must happen before
3146 *EXPR_P should be stored.
3147
3148 POST_P points to the sequence where side effects that must happen after
3149 *EXPR_P should be stored. */
3150
3151 static enum gimplify_status
3152 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3153 fallback_t fallback)
3154 {
3155 tree *p;
3156 enum gimplify_status ret = GS_ALL_DONE, tret;
3157 int i;
3158 location_t loc = EXPR_LOCATION (*expr_p);
3159 tree expr = *expr_p;
3160
3161 /* Create a stack of the subexpressions so later we can walk them in
3162 order from inner to outer. */
3163 auto_vec<tree, 10> expr_stack;
3164
3165 /* We can handle anything that get_inner_reference can deal with. */
3166 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
3167 {
3168 restart:
3169 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
3170 if (TREE_CODE (*p) == INDIRECT_REF)
3171 *p = fold_indirect_ref_loc (loc, *p);
3172
3173 if (handled_component_p (*p))
3174 ;
3175 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
3176 additional COMPONENT_REFs. */
3177 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
3178 && gimplify_var_or_parm_decl (p) == GS_OK)
3179 goto restart;
3180 else
3181 break;
3182
3183 expr_stack.safe_push (*p);
3184 }
3185
3186 gcc_assert (expr_stack.length ());
3187
3188 /* Now EXPR_STACK is a stack of pointers to all the refs we've
3189 walked through and P points to the innermost expression.
3190
3191 Java requires that we elaborated nodes in source order. That
3192 means we must gimplify the inner expression followed by each of
3193 the indices, in order. But we can't gimplify the inner
3194 expression until we deal with any variable bounds, sizes, or
3195 positions in order to deal with PLACEHOLDER_EXPRs.
3196
3197 The base expression may contain a statement expression that
3198 has declarations used in size expressions, so has to be
3199 gimplified before gimplifying the size expressions.
3200
3201 So we do this in three steps. First we deal with variable
3202 bounds, sizes, and positions, then we gimplify the base and
3203 ensure it is memory if needed, then we deal with the annotations
3204 for any variables in the components and any indices, from left
3205 to right. */
3206
3207 bool need_non_reg = false;
3208 for (i = expr_stack.length () - 1; i >= 0; i--)
3209 {
3210 tree t = expr_stack[i];
3211
3212 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3213 {
3214 /* Deal with the low bound and element type size and put them into
3215 the ARRAY_REF. If these values are set, they have already been
3216 gimplified. */
3217 if (TREE_OPERAND (t, 2) == NULL_TREE)
3218 {
3219 tree low = unshare_expr (array_ref_low_bound (t));
3220 if (!is_gimple_min_invariant (low))
3221 {
3222 TREE_OPERAND (t, 2) = low;
3223 }
3224 }
3225
3226 if (TREE_OPERAND (t, 3) == NULL_TREE)
3227 {
3228 tree elmt_size = array_ref_element_size (t);
3229 if (!is_gimple_min_invariant (elmt_size))
3230 {
3231 elmt_size = unshare_expr (elmt_size);
3232 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
3233 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
3234
3235 /* Divide the element size by the alignment of the element
3236 type (above). */
3237 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
3238 elmt_size, factor);
3239
3240 TREE_OPERAND (t, 3) = elmt_size;
3241 }
3242 }
3243 need_non_reg = true;
3244 }
3245 else if (TREE_CODE (t) == COMPONENT_REF)
3246 {
3247 /* Set the field offset into T and gimplify it. */
3248 if (TREE_OPERAND (t, 2) == NULL_TREE)
3249 {
3250 tree offset = component_ref_field_offset (t);
3251 if (!is_gimple_min_invariant (offset))
3252 {
3253 offset = unshare_expr (offset);
3254 tree field = TREE_OPERAND (t, 1);
3255 tree factor
3256 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3257
3258 /* Divide the offset by its alignment. */
3259 offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3260 offset, factor);
3261
3262 TREE_OPERAND (t, 2) = offset;
3263 }
3264 }
3265 need_non_reg = true;
3266 }
3267 }
3268
3269 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3270 so as to match the min_lval predicate. Failure to do so may result
3271 in the creation of large aggregate temporaries. */
3272 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3273 fallback | fb_lvalue);
3274 ret = MIN (ret, tret);
3275 if (ret == GS_ERROR)
3276 return GS_ERROR;
3277
3278 /* Step 2a: if we have component references we do not support on
3279 registers then make sure the base isn't a register. Of course
3280 we can only do so if an rvalue is OK. */
3281 if (need_non_reg && (fallback & fb_rvalue))
3282 prepare_gimple_addressable (p, pre_p);
3283
3284 /* Step 3: gimplify size expressions and the indices and operands of
3285 ARRAY_REF. During this loop we also remove any useless conversions. */
3286
3287 for (; expr_stack.length () > 0; )
3288 {
3289 tree t = expr_stack.pop ();
3290
3291 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3292 {
3293 /* Gimplify the low bound and element type size. */
3294 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3295 is_gimple_reg, fb_rvalue);
3296 ret = MIN (ret, tret);
3297
3298 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3299 is_gimple_reg, fb_rvalue);
3300 ret = MIN (ret, tret);
3301
3302 /* Gimplify the dimension. */
3303 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3304 is_gimple_val, fb_rvalue);
3305 ret = MIN (ret, tret);
3306 }
3307 else if (TREE_CODE (t) == COMPONENT_REF)
3308 {
3309 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3310 is_gimple_reg, fb_rvalue);
3311 ret = MIN (ret, tret);
3312 }
3313
3314 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3315
3316 /* The innermost expression P may have originally had
3317 TREE_SIDE_EFFECTS set which would have caused all the outer
3318 expressions in *EXPR_P leading to P to also have had
3319 TREE_SIDE_EFFECTS set. */
3320 recalculate_side_effects (t);
3321 }
3322
3323 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3324 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3325 {
3326 canonicalize_component_ref (expr_p);
3327 }
3328
3329 expr_stack.release ();
3330
3331 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3332
3333 return ret;
3334 }
3335
3336 /* Gimplify the self modifying expression pointed to by EXPR_P
3337 (++, --, +=, -=).
3338
3339 PRE_P points to the list where side effects that must happen before
3340 *EXPR_P should be stored.
3341
3342 POST_P points to the list where side effects that must happen after
3343 *EXPR_P should be stored.
3344
3345 WANT_VALUE is nonzero iff we want to use the value of this expression
3346 in another expression.
3347
3348 ARITH_TYPE is the type the computation should be performed in. */
3349
3350 enum gimplify_status
3351 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3352 bool want_value, tree arith_type)
3353 {
3354 enum tree_code code;
3355 tree lhs, lvalue, rhs, t1;
3356 gimple_seq post = NULL, *orig_post_p = post_p;
3357 bool postfix;
3358 enum tree_code arith_code;
3359 enum gimplify_status ret;
3360 location_t loc = EXPR_LOCATION (*expr_p);
3361
3362 code = TREE_CODE (*expr_p);
3363
3364 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3365 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3366
3367 /* Prefix or postfix? */
3368 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3369 /* Faster to treat as prefix if result is not used. */
3370 postfix = want_value;
3371 else
3372 postfix = false;
3373
3374 /* For postfix, make sure the inner expression's post side effects
3375 are executed after side effects from this expression. */
3376 if (postfix)
3377 post_p = &post;
3378
3379 /* Add or subtract? */
3380 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3381 arith_code = PLUS_EXPR;
3382 else
3383 arith_code = MINUS_EXPR;
3384
3385 /* Gimplify the LHS into a GIMPLE lvalue. */
3386 lvalue = TREE_OPERAND (*expr_p, 0);
3387 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3388 if (ret == GS_ERROR)
3389 return ret;
3390
3391 /* Extract the operands to the arithmetic operation. */
3392 lhs = lvalue;
3393 rhs = TREE_OPERAND (*expr_p, 1);
3394
3395 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3396 that as the result value and in the postqueue operation. */
3397 if (postfix)
3398 {
3399 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3400 if (ret == GS_ERROR)
3401 return ret;
3402
3403 lhs = get_initialized_tmp_var (lhs, pre_p);
3404 }
3405
3406 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3407 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3408 {
3409 rhs = convert_to_ptrofftype_loc (loc, rhs);
3410 if (arith_code == MINUS_EXPR)
3411 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3412 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3413 }
3414 else
3415 t1 = fold_convert (TREE_TYPE (*expr_p),
3416 fold_build2 (arith_code, arith_type,
3417 fold_convert (arith_type, lhs),
3418 fold_convert (arith_type, rhs)));
3419
3420 if (postfix)
3421 {
3422 gimplify_assign (lvalue, t1, pre_p);
3423 gimplify_seq_add_seq (orig_post_p, post);
3424 *expr_p = lhs;
3425 return GS_ALL_DONE;
3426 }
3427 else
3428 {
3429 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3430 return GS_OK;
3431 }
3432 }
3433
3434 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3435
3436 static void
3437 maybe_with_size_expr (tree *expr_p)
3438 {
3439 tree expr = *expr_p;
3440 tree type = TREE_TYPE (expr);
3441 tree size;
3442
3443 /* If we've already wrapped this or the type is error_mark_node, we can't do
3444 anything. */
3445 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3446 || type == error_mark_node)
3447 return;
3448
3449 /* If the size isn't known or is a constant, we have nothing to do. */
3450 size = TYPE_SIZE_UNIT (type);
3451 if (!size || poly_int_tree_p (size))
3452 return;
3453
3454 /* Otherwise, make a WITH_SIZE_EXPR. */
3455 size = unshare_expr (size);
3456 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3457 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3458 }
3459
3460 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3461 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3462 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3463 gimplified to an SSA name. */
3464
3465 enum gimplify_status
3466 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3467 bool allow_ssa)
3468 {
3469 bool (*test) (tree);
3470 fallback_t fb;
3471
3472 /* In general, we allow lvalues for function arguments to avoid
3473 extra overhead of copying large aggregates out of even larger
3474 aggregates into temporaries only to copy the temporaries to
3475 the argument list. Make optimizers happy by pulling out to
3476 temporaries those types that fit in registers. */
3477 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3478 test = is_gimple_val, fb = fb_rvalue;
3479 else
3480 {
3481 test = is_gimple_lvalue, fb = fb_either;
3482 /* Also strip a TARGET_EXPR that would force an extra copy. */
3483 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3484 {
3485 tree init = TARGET_EXPR_INITIAL (*arg_p);
3486 if (init
3487 && !VOID_TYPE_P (TREE_TYPE (init)))
3488 *arg_p = init;
3489 }
3490 }
3491
3492 /* If this is a variable sized type, we must remember the size. */
3493 maybe_with_size_expr (arg_p);
3494
3495 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3496 /* Make sure arguments have the same location as the function call
3497 itself. */
3498 protected_set_expr_location (*arg_p, call_location);
3499
3500 /* There is a sequence point before a function call. Side effects in
3501 the argument list must occur before the actual call. So, when
3502 gimplifying arguments, force gimplify_expr to use an internal
3503 post queue which is then appended to the end of PRE_P. */
3504 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3505 }
3506
3507 /* Don't fold inside offloading or taskreg regions: it can break code by
3508 adding decl references that weren't in the source. We'll do it during
3509 omplower pass instead. */
3510
3511 static bool
3512 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3513 {
3514 struct gimplify_omp_ctx *ctx;
3515 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3516 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3517 return false;
3518 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3519 return false;
3520 /* Delay folding of builtins until the IL is in consistent state
3521 so the diagnostic machinery can do a better job. */
3522 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3523 return false;
3524 return fold_stmt (gsi);
3525 }
3526
3527 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3528 WANT_VALUE is true if the result of the call is desired. */
3529
3530 static enum gimplify_status
3531 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3532 {
3533 tree fndecl, parms, p, fnptrtype;
3534 enum gimplify_status ret;
3535 int i, nargs;
3536 gcall *call;
3537 bool builtin_va_start_p = false;
3538 location_t loc = EXPR_LOCATION (*expr_p);
3539
3540 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3541
3542 /* For reliable diagnostics during inlining, it is necessary that
3543 every call_expr be annotated with file and line. */
3544 if (! EXPR_HAS_LOCATION (*expr_p))
3545 SET_EXPR_LOCATION (*expr_p, input_location);
3546
3547 /* Gimplify internal functions created in the FEs. */
3548 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3549 {
3550 if (want_value)
3551 return GS_ALL_DONE;
3552
3553 nargs = call_expr_nargs (*expr_p);
3554 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3555 auto_vec<tree> vargs (nargs);
3556
3557 if (ifn == IFN_ASSUME)
3558 {
3559 if (simple_condition_p (CALL_EXPR_ARG (*expr_p, 0)))
3560 {
3561 /* If the [[assume (cond)]]; condition is simple
3562 enough and can be evaluated unconditionally
3563 without side-effects, expand it as
3564 if (!cond) __builtin_unreachable (); */
3565 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
3566 *expr_p = build3 (COND_EXPR, void_type_node,
3567 CALL_EXPR_ARG (*expr_p, 0), void_node,
3568 build_call_expr_loc (EXPR_LOCATION (*expr_p),
3569 fndecl, 0));
3570 return GS_OK;
3571 }
3572 /* If not optimizing, ignore the assumptions. */
3573 if (!optimize || seen_error ())
3574 {
3575 *expr_p = NULL_TREE;
3576 return GS_ALL_DONE;
3577 }
3578 /* Temporarily, until gimple lowering, transform
3579 .ASSUME (cond);
3580 into:
3581 [[assume (guard)]]
3582 {
3583 guard = cond;
3584 }
3585 such that gimple lowering can outline the condition into
3586 a separate function easily. */
3587 tree guard = create_tmp_var (boolean_type_node);
3588 *expr_p = build2 (MODIFY_EXPR, void_type_node, guard,
3589 gimple_boolify (CALL_EXPR_ARG (*expr_p, 0)));
3590 *expr_p = build3 (BIND_EXPR, void_type_node, NULL, *expr_p, NULL);
3591 push_gimplify_context ();
3592 gimple_seq body = NULL;
3593 gimple *g = gimplify_and_return_first (*expr_p, &body);
3594 pop_gimplify_context (g);
3595 g = gimple_build_assume (guard, body);
3596 gimple_set_location (g, loc);
3597 gimplify_seq_add_stmt (pre_p, g);
3598 *expr_p = NULL_TREE;
3599 return GS_ALL_DONE;
3600 }
3601
3602 for (i = 0; i < nargs; i++)
3603 {
3604 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3605 EXPR_LOCATION (*expr_p));
3606 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3607 }
3608
3609 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3610 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3611 gimplify_seq_add_stmt (pre_p, call);
3612 return GS_ALL_DONE;
3613 }
3614
3615 /* This may be a call to a builtin function.
3616
3617 Builtin function calls may be transformed into different
3618 (and more efficient) builtin function calls under certain
3619 circumstances. Unfortunately, gimplification can muck things
3620 up enough that the builtin expanders are not aware that certain
3621 transformations are still valid.
3622
3623 So we attempt transformation/gimplification of the call before
3624 we gimplify the CALL_EXPR. At this time we do not manage to
3625 transform all calls in the same manner as the expanders do, but
3626 we do transform most of them. */
3627 fndecl = get_callee_fndecl (*expr_p);
3628 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3629 switch (DECL_FUNCTION_CODE (fndecl))
3630 {
3631 CASE_BUILT_IN_ALLOCA:
3632 /* If the call has been built for a variable-sized object, then we
3633 want to restore the stack level when the enclosing BIND_EXPR is
3634 exited to reclaim the allocated space; otherwise, we precisely
3635 need to do the opposite and preserve the latest stack level. */
3636 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3637 gimplify_ctxp->save_stack = true;
3638 else
3639 gimplify_ctxp->keep_stack = true;
3640 break;
3641
3642 case BUILT_IN_VA_START:
3643 {
3644 builtin_va_start_p = TRUE;
3645 if (call_expr_nargs (*expr_p) < 2)
3646 {
3647 error ("too few arguments to function %<va_start%>");
3648 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3649 return GS_OK;
3650 }
3651
3652 if (fold_builtin_next_arg (*expr_p, true))
3653 {
3654 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3655 return GS_OK;
3656 }
3657 break;
3658 }
3659
3660 case BUILT_IN_EH_RETURN:
3661 cfun->calls_eh_return = true;
3662 break;
3663
3664 case BUILT_IN_CLEAR_PADDING:
3665 if (call_expr_nargs (*expr_p) == 1)
3666 {
3667 /* Remember the original type of the argument in an internal
3668 dummy second argument, as in GIMPLE pointer conversions are
3669 useless. Also mark this call as not for automatic
3670 initialization in the internal dummy third argument. */
3671 p = CALL_EXPR_ARG (*expr_p, 0);
3672 *expr_p
3673 = build_call_expr_loc (EXPR_LOCATION (*expr_p), fndecl, 2, p,
3674 build_zero_cst (TREE_TYPE (p)));
3675 return GS_OK;
3676 }
3677 break;
3678
3679 default:
3680 ;
3681 }
3682 if (fndecl && fndecl_built_in_p (fndecl))
3683 {
3684 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3685 if (new_tree && new_tree != *expr_p)
3686 {
3687 /* There was a transformation of this call which computes the
3688 same value, but in a more efficient way. Return and try
3689 again. */
3690 *expr_p = new_tree;
3691 return GS_OK;
3692 }
3693 }
3694
3695 /* Remember the original function pointer type. */
3696 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3697
3698 if (flag_openmp
3699 && fndecl
3700 && cfun
3701 && (cfun->curr_properties & PROP_gimple_any) == 0)
3702 {
3703 tree variant = omp_resolve_declare_variant (fndecl);
3704 if (variant != fndecl)
3705 CALL_EXPR_FN (*expr_p) = build1 (ADDR_EXPR, fnptrtype, variant);
3706 }
3707
3708 /* There is a sequence point before the call, so any side effects in
3709 the calling expression must occur before the actual call. Force
3710 gimplify_expr to use an internal post queue. */
3711 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3712 is_gimple_call_addr, fb_rvalue);
3713
3714 if (ret == GS_ERROR)
3715 return GS_ERROR;
3716
3717 nargs = call_expr_nargs (*expr_p);
3718
3719 /* Get argument types for verification. */
3720 fndecl = get_callee_fndecl (*expr_p);
3721 parms = NULL_TREE;
3722 if (fndecl)
3723 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3724 else
3725 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3726
3727 if (fndecl && DECL_ARGUMENTS (fndecl))
3728 p = DECL_ARGUMENTS (fndecl);
3729 else if (parms)
3730 p = parms;
3731 else
3732 p = NULL_TREE;
3733 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3734 ;
3735
3736 /* If the last argument is __builtin_va_arg_pack () and it is not
3737 passed as a named argument, decrease the number of CALL_EXPR
3738 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3739 if (!p
3740 && i < nargs
3741 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3742 {
3743 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3744 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3745
3746 if (last_arg_fndecl
3747 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3748 {
3749 tree call = *expr_p;
3750
3751 --nargs;
3752 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3753 CALL_EXPR_FN (call),
3754 nargs, CALL_EXPR_ARGP (call));
3755
3756 /* Copy all CALL_EXPR flags, location and block, except
3757 CALL_EXPR_VA_ARG_PACK flag. */
3758 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3759 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3760 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3761 = CALL_EXPR_RETURN_SLOT_OPT (call);
3762 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3763 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3764
3765 /* Set CALL_EXPR_VA_ARG_PACK. */
3766 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3767 }
3768 }
3769
3770 /* If the call returns twice then after building the CFG the call
3771 argument computations will no longer dominate the call because
3772 we add an abnormal incoming edge to the call. So do not use SSA
3773 vars there. */
3774 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3775
3776 /* Gimplify the function arguments. */
3777 if (nargs > 0)
3778 {
3779 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3780 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3781 PUSH_ARGS_REVERSED ? i-- : i++)
3782 {
3783 enum gimplify_status t;
3784
3785 /* Avoid gimplifying the second argument to va_start, which needs to
3786 be the plain PARM_DECL. */
3787 if ((i != 1) || !builtin_va_start_p)
3788 {
3789 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3790 EXPR_LOCATION (*expr_p), ! returns_twice);
3791
3792 if (t == GS_ERROR)
3793 ret = GS_ERROR;
3794 }
3795 }
3796 }
3797
3798 /* Gimplify the static chain. */
3799 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3800 {
3801 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3802 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3803 else
3804 {
3805 enum gimplify_status t;
3806 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3807 EXPR_LOCATION (*expr_p), ! returns_twice);
3808 if (t == GS_ERROR)
3809 ret = GS_ERROR;
3810 }
3811 }
3812
3813 /* Verify the function result. */
3814 if (want_value && fndecl
3815 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3816 {
3817 error_at (loc, "using result of function returning %<void%>");
3818 ret = GS_ERROR;
3819 }
3820
3821 /* Try this again in case gimplification exposed something. */
3822 if (ret != GS_ERROR)
3823 {
3824 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3825
3826 if (new_tree && new_tree != *expr_p)
3827 {
3828 /* There was a transformation of this call which computes the
3829 same value, but in a more efficient way. Return and try
3830 again. */
3831 *expr_p = new_tree;
3832 return GS_OK;
3833 }
3834 }
3835 else
3836 {
3837 *expr_p = error_mark_node;
3838 return GS_ERROR;
3839 }
3840
3841 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3842 decl. This allows us to eliminate redundant or useless
3843 calls to "const" functions. */
3844 if (TREE_CODE (*expr_p) == CALL_EXPR)
3845 {
3846 int flags = call_expr_flags (*expr_p);
3847 if (flags & (ECF_CONST | ECF_PURE)
3848 /* An infinite loop is considered a side effect. */
3849 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3850 TREE_SIDE_EFFECTS (*expr_p) = 0;
3851 }
3852
3853 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3854 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3855 form and delegate the creation of a GIMPLE_CALL to
3856 gimplify_modify_expr. This is always possible because when
3857 WANT_VALUE is true, the caller wants the result of this call into
3858 a temporary, which means that we will emit an INIT_EXPR in
3859 internal_get_tmp_var which will then be handled by
3860 gimplify_modify_expr. */
3861 if (!want_value)
3862 {
3863 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3864 have to do is replicate it as a GIMPLE_CALL tuple. */
3865 gimple_stmt_iterator gsi;
3866 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3867 notice_special_calls (call);
3868 gimplify_seq_add_stmt (pre_p, call);
3869 gsi = gsi_last (*pre_p);
3870 maybe_fold_stmt (&gsi);
3871 *expr_p = NULL_TREE;
3872 }
3873 else
3874 /* Remember the original function type. */
3875 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3876 CALL_EXPR_FN (*expr_p));
3877
3878 return ret;
3879 }
3880
3881 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3882 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3883
3884 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3885 condition is true or false, respectively. If null, we should generate
3886 our own to skip over the evaluation of this specific expression.
3887
3888 LOCUS is the source location of the COND_EXPR.
3889
3890 This function is the tree equivalent of do_jump.
3891
3892 shortcut_cond_r should only be called by shortcut_cond_expr. */
3893
3894 static tree
3895 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3896 location_t locus)
3897 {
3898 tree local_label = NULL_TREE;
3899 tree t, expr = NULL;
3900
3901 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3902 retain the shortcut semantics. Just insert the gotos here;
3903 shortcut_cond_expr will append the real blocks later. */
3904 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3905 {
3906 location_t new_locus;
3907
3908 /* Turn if (a && b) into
3909
3910 if (a); else goto no;
3911 if (b) goto yes; else goto no;
3912 (no:) */
3913
3914 if (false_label_p == NULL)
3915 false_label_p = &local_label;
3916
3917 /* Keep the original source location on the first 'if'. */
3918 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3919 append_to_statement_list (t, &expr);
3920
3921 /* Set the source location of the && on the second 'if'. */
3922 new_locus = rexpr_location (pred, locus);
3923 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3924 new_locus);
3925 append_to_statement_list (t, &expr);
3926 }
3927 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3928 {
3929 location_t new_locus;
3930
3931 /* Turn if (a || b) into
3932
3933 if (a) goto yes;
3934 if (b) goto yes; else goto no;
3935 (yes:) */
3936
3937 if (true_label_p == NULL)
3938 true_label_p = &local_label;
3939
3940 /* Keep the original source location on the first 'if'. */
3941 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3942 append_to_statement_list (t, &expr);
3943
3944 /* Set the source location of the || on the second 'if'. */
3945 new_locus = rexpr_location (pred, locus);
3946 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3947 new_locus);
3948 append_to_statement_list (t, &expr);
3949 }
3950 else if (TREE_CODE (pred) == COND_EXPR
3951 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3952 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3953 {
3954 location_t new_locus;
3955
3956 /* As long as we're messing with gotos, turn if (a ? b : c) into
3957 if (a)
3958 if (b) goto yes; else goto no;
3959 else
3960 if (c) goto yes; else goto no;
3961
3962 Don't do this if one of the arms has void type, which can happen
3963 in C++ when the arm is throw. */
3964
3965 /* Keep the original source location on the first 'if'. Set the source
3966 location of the ? on the second 'if'. */
3967 new_locus = rexpr_location (pred, locus);
3968 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3969 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3970 false_label_p, locus),
3971 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3972 false_label_p, new_locus));
3973 }
3974 else
3975 {
3976 expr = build3 (COND_EXPR, void_type_node, pred,
3977 build_and_jump (true_label_p),
3978 build_and_jump (false_label_p));
3979 SET_EXPR_LOCATION (expr, locus);
3980 }
3981
3982 if (local_label)
3983 {
3984 t = build1 (LABEL_EXPR, void_type_node, local_label);
3985 append_to_statement_list (t, &expr);
3986 }
3987
3988 return expr;
3989 }
3990
3991 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3992 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3993 statement, if it is the last one. Otherwise, return NULL. */
3994
3995 static tree
3996 find_goto (tree expr)
3997 {
3998 if (!expr)
3999 return NULL_TREE;
4000
4001 if (TREE_CODE (expr) == GOTO_EXPR)
4002 return expr;
4003
4004 if (TREE_CODE (expr) != STATEMENT_LIST)
4005 return NULL_TREE;
4006
4007 tree_stmt_iterator i = tsi_start (expr);
4008
4009 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
4010 tsi_next (&i);
4011
4012 if (!tsi_one_before_end_p (i))
4013 return NULL_TREE;
4014
4015 return find_goto (tsi_stmt (i));
4016 }
4017
4018 /* Same as find_goto, except that it returns NULL if the destination
4019 is not a LABEL_DECL. */
4020
4021 static inline tree
4022 find_goto_label (tree expr)
4023 {
4024 tree dest = find_goto (expr);
4025 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
4026 return dest;
4027 return NULL_TREE;
4028 }
4029
4030 /* Given a conditional expression EXPR with short-circuit boolean
4031 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
4032 predicate apart into the equivalent sequence of conditionals. */
4033
4034 static tree
4035 shortcut_cond_expr (tree expr)
4036 {
4037 tree pred = TREE_OPERAND (expr, 0);
4038 tree then_ = TREE_OPERAND (expr, 1);
4039 tree else_ = TREE_OPERAND (expr, 2);
4040 tree true_label, false_label, end_label, t;
4041 tree *true_label_p;
4042 tree *false_label_p;
4043 bool emit_end, emit_false, jump_over_else;
4044 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
4045 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
4046
4047 /* First do simple transformations. */
4048 if (!else_se)
4049 {
4050 /* If there is no 'else', turn
4051 if (a && b) then c
4052 into
4053 if (a) if (b) then c. */
4054 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
4055 {
4056 /* Keep the original source location on the first 'if'. */
4057 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4058 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4059 /* Set the source location of the && on the second 'if'. */
4060 if (rexpr_has_location (pred))
4061 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4062 then_ = shortcut_cond_expr (expr);
4063 then_se = then_ && TREE_SIDE_EFFECTS (then_);
4064 pred = TREE_OPERAND (pred, 0);
4065 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
4066 SET_EXPR_LOCATION (expr, locus);
4067 }
4068 }
4069
4070 if (!then_se)
4071 {
4072 /* If there is no 'then', turn
4073 if (a || b); else d
4074 into
4075 if (a); else if (b); else d. */
4076 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
4077 {
4078 /* Keep the original source location on the first 'if'. */
4079 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4080 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4081 /* Set the source location of the || on the second 'if'. */
4082 if (rexpr_has_location (pred))
4083 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4084 else_ = shortcut_cond_expr (expr);
4085 else_se = else_ && TREE_SIDE_EFFECTS (else_);
4086 pred = TREE_OPERAND (pred, 0);
4087 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
4088 SET_EXPR_LOCATION (expr, locus);
4089 }
4090 }
4091
4092 /* If we're done, great. */
4093 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
4094 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
4095 return expr;
4096
4097 /* Otherwise we need to mess with gotos. Change
4098 if (a) c; else d;
4099 to
4100 if (a); else goto no;
4101 c; goto end;
4102 no: d; end:
4103 and recursively gimplify the condition. */
4104
4105 true_label = false_label = end_label = NULL_TREE;
4106
4107 /* If our arms just jump somewhere, hijack those labels so we don't
4108 generate jumps to jumps. */
4109
4110 if (tree then_goto = find_goto_label (then_))
4111 {
4112 true_label = GOTO_DESTINATION (then_goto);
4113 then_ = NULL;
4114 then_se = false;
4115 }
4116
4117 if (tree else_goto = find_goto_label (else_))
4118 {
4119 false_label = GOTO_DESTINATION (else_goto);
4120 else_ = NULL;
4121 else_se = false;
4122 }
4123
4124 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
4125 if (true_label)
4126 true_label_p = &true_label;
4127 else
4128 true_label_p = NULL;
4129
4130 /* The 'else' branch also needs a label if it contains interesting code. */
4131 if (false_label || else_se)
4132 false_label_p = &false_label;
4133 else
4134 false_label_p = NULL;
4135
4136 /* If there was nothing else in our arms, just forward the label(s). */
4137 if (!then_se && !else_se)
4138 return shortcut_cond_r (pred, true_label_p, false_label_p,
4139 EXPR_LOC_OR_LOC (expr, input_location));
4140
4141 /* If our last subexpression already has a terminal label, reuse it. */
4142 if (else_se)
4143 t = expr_last (else_);
4144 else if (then_se)
4145 t = expr_last (then_);
4146 else
4147 t = NULL;
4148 if (t && TREE_CODE (t) == LABEL_EXPR)
4149 end_label = LABEL_EXPR_LABEL (t);
4150
4151 /* If we don't care about jumping to the 'else' branch, jump to the end
4152 if the condition is false. */
4153 if (!false_label_p)
4154 false_label_p = &end_label;
4155
4156 /* We only want to emit these labels if we aren't hijacking them. */
4157 emit_end = (end_label == NULL_TREE);
4158 emit_false = (false_label == NULL_TREE);
4159
4160 /* We only emit the jump over the else clause if we have to--if the
4161 then clause may fall through. Otherwise we can wind up with a
4162 useless jump and a useless label at the end of gimplified code,
4163 which will cause us to think that this conditional as a whole
4164 falls through even if it doesn't. If we then inline a function
4165 which ends with such a condition, that can cause us to issue an
4166 inappropriate warning about control reaching the end of a
4167 non-void function. */
4168 jump_over_else = block_may_fallthru (then_);
4169
4170 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
4171 EXPR_LOC_OR_LOC (expr, input_location));
4172
4173 expr = NULL;
4174 append_to_statement_list (pred, &expr);
4175
4176 append_to_statement_list (then_, &expr);
4177 if (else_se)
4178 {
4179 if (jump_over_else)
4180 {
4181 tree last = expr_last (expr);
4182 t = build_and_jump (&end_label);
4183 if (rexpr_has_location (last))
4184 SET_EXPR_LOCATION (t, rexpr_location (last));
4185 append_to_statement_list (t, &expr);
4186 }
4187 if (emit_false)
4188 {
4189 t = build1 (LABEL_EXPR, void_type_node, false_label);
4190 append_to_statement_list (t, &expr);
4191 }
4192 append_to_statement_list (else_, &expr);
4193 }
4194 if (emit_end && end_label)
4195 {
4196 t = build1 (LABEL_EXPR, void_type_node, end_label);
4197 append_to_statement_list (t, &expr);
4198 }
4199
4200 return expr;
4201 }
4202
4203 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
4204
4205 tree
4206 gimple_boolify (tree expr)
4207 {
4208 tree type = TREE_TYPE (expr);
4209 location_t loc = EXPR_LOCATION (expr);
4210
4211 if (TREE_CODE (expr) == NE_EXPR
4212 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
4213 && integer_zerop (TREE_OPERAND (expr, 1)))
4214 {
4215 tree call = TREE_OPERAND (expr, 0);
4216 tree fn = get_callee_fndecl (call);
4217
4218 /* For __builtin_expect ((long) (x), y) recurse into x as well
4219 if x is truth_value_p. */
4220 if (fn
4221 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
4222 && call_expr_nargs (call) == 2)
4223 {
4224 tree arg = CALL_EXPR_ARG (call, 0);
4225 if (arg)
4226 {
4227 if (TREE_CODE (arg) == NOP_EXPR
4228 && TREE_TYPE (arg) == TREE_TYPE (call))
4229 arg = TREE_OPERAND (arg, 0);
4230 if (truth_value_p (TREE_CODE (arg)))
4231 {
4232 arg = gimple_boolify (arg);
4233 CALL_EXPR_ARG (call, 0)
4234 = fold_convert_loc (loc, TREE_TYPE (call), arg);
4235 }
4236 }
4237 }
4238 }
4239
4240 switch (TREE_CODE (expr))
4241 {
4242 case TRUTH_AND_EXPR:
4243 case TRUTH_OR_EXPR:
4244 case TRUTH_XOR_EXPR:
4245 case TRUTH_ANDIF_EXPR:
4246 case TRUTH_ORIF_EXPR:
4247 /* Also boolify the arguments of truth exprs. */
4248 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
4249 /* FALLTHRU */
4250
4251 case TRUTH_NOT_EXPR:
4252 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4253
4254 /* These expressions always produce boolean results. */
4255 if (TREE_CODE (type) != BOOLEAN_TYPE)
4256 TREE_TYPE (expr) = boolean_type_node;
4257 return expr;
4258
4259 case ANNOTATE_EXPR:
4260 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
4261 {
4262 case annot_expr_ivdep_kind:
4263 case annot_expr_unroll_kind:
4264 case annot_expr_no_vector_kind:
4265 case annot_expr_vector_kind:
4266 case annot_expr_parallel_kind:
4267 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4268 if (TREE_CODE (type) != BOOLEAN_TYPE)
4269 TREE_TYPE (expr) = boolean_type_node;
4270 return expr;
4271 default:
4272 gcc_unreachable ();
4273 }
4274
4275 default:
4276 if (COMPARISON_CLASS_P (expr))
4277 {
4278 /* These expressions always produce boolean results. */
4279 if (TREE_CODE (type) != BOOLEAN_TYPE)
4280 TREE_TYPE (expr) = boolean_type_node;
4281 return expr;
4282 }
4283 /* Other expressions that get here must have boolean values, but
4284 might need to be converted to the appropriate mode. */
4285 if (TREE_CODE (type) == BOOLEAN_TYPE)
4286 return expr;
4287 return fold_convert_loc (loc, boolean_type_node, expr);
4288 }
4289 }
4290
4291 /* Given a conditional expression *EXPR_P without side effects, gimplify
4292 its operands. New statements are inserted to PRE_P. */
4293
4294 static enum gimplify_status
4295 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
4296 {
4297 tree expr = *expr_p, cond;
4298 enum gimplify_status ret, tret;
4299 enum tree_code code;
4300
4301 cond = gimple_boolify (COND_EXPR_COND (expr));
4302
4303 /* We need to handle && and || specially, as their gimplification
4304 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4305 code = TREE_CODE (cond);
4306 if (code == TRUTH_ANDIF_EXPR)
4307 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
4308 else if (code == TRUTH_ORIF_EXPR)
4309 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
4310 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_val, fb_rvalue);
4311 COND_EXPR_COND (*expr_p) = cond;
4312
4313 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
4314 is_gimple_val, fb_rvalue);
4315 ret = MIN (ret, tret);
4316 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
4317 is_gimple_val, fb_rvalue);
4318
4319 return MIN (ret, tret);
4320 }
4321
4322 /* Return true if evaluating EXPR could trap.
4323 EXPR is GENERIC, while tree_could_trap_p can be called
4324 only on GIMPLE. */
4325
4326 bool
4327 generic_expr_could_trap_p (tree expr)
4328 {
4329 unsigned i, n;
4330
4331 if (!expr || is_gimple_val (expr))
4332 return false;
4333
4334 if (!EXPR_P (expr) || tree_could_trap_p (expr))
4335 return true;
4336
4337 n = TREE_OPERAND_LENGTH (expr);
4338 for (i = 0; i < n; i++)
4339 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
4340 return true;
4341
4342 return false;
4343 }
4344
4345 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4346 into
4347
4348 if (p) if (p)
4349 t1 = a; a;
4350 else or else
4351 t1 = b; b;
4352 t1;
4353
4354 The second form is used when *EXPR_P is of type void.
4355
4356 PRE_P points to the list where side effects that must happen before
4357 *EXPR_P should be stored. */
4358
4359 static enum gimplify_status
4360 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4361 {
4362 tree expr = *expr_p;
4363 tree type = TREE_TYPE (expr);
4364 location_t loc = EXPR_LOCATION (expr);
4365 tree tmp, arm1, arm2;
4366 enum gimplify_status ret;
4367 tree label_true, label_false, label_cont;
4368 bool have_then_clause_p, have_else_clause_p;
4369 gcond *cond_stmt;
4370 enum tree_code pred_code;
4371 gimple_seq seq = NULL;
4372
4373 /* If this COND_EXPR has a value, copy the values into a temporary within
4374 the arms. */
4375 if (!VOID_TYPE_P (type))
4376 {
4377 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
4378 tree result;
4379
4380 /* If either an rvalue is ok or we do not require an lvalue, create the
4381 temporary. But we cannot do that if the type is addressable. */
4382 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
4383 && !TREE_ADDRESSABLE (type))
4384 {
4385 if (gimplify_ctxp->allow_rhs_cond_expr
4386 /* If either branch has side effects or could trap, it can't be
4387 evaluated unconditionally. */
4388 && !TREE_SIDE_EFFECTS (then_)
4389 && !generic_expr_could_trap_p (then_)
4390 && !TREE_SIDE_EFFECTS (else_)
4391 && !generic_expr_could_trap_p (else_))
4392 return gimplify_pure_cond_expr (expr_p, pre_p);
4393
4394 tmp = create_tmp_var (type, "iftmp");
4395 result = tmp;
4396 }
4397
4398 /* Otherwise, only create and copy references to the values. */
4399 else
4400 {
4401 type = build_pointer_type (type);
4402
4403 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4404 then_ = build_fold_addr_expr_loc (loc, then_);
4405
4406 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4407 else_ = build_fold_addr_expr_loc (loc, else_);
4408
4409 expr
4410 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4411
4412 tmp = create_tmp_var (type, "iftmp");
4413 result = build_simple_mem_ref_loc (loc, tmp);
4414 }
4415
4416 /* Build the new then clause, `tmp = then_;'. But don't build the
4417 assignment if the value is void; in C++ it can be if it's a throw. */
4418 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4419 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4420
4421 /* Similarly, build the new else clause, `tmp = else_;'. */
4422 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4423 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4424
4425 TREE_TYPE (expr) = void_type_node;
4426 recalculate_side_effects (expr);
4427
4428 /* Move the COND_EXPR to the prequeue. */
4429 gimplify_stmt (&expr, pre_p);
4430
4431 *expr_p = result;
4432 return GS_ALL_DONE;
4433 }
4434
4435 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4436 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4437 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4438 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4439
4440 /* Make sure the condition has BOOLEAN_TYPE. */
4441 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4442
4443 /* Break apart && and || conditions. */
4444 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4445 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4446 {
4447 expr = shortcut_cond_expr (expr);
4448
4449 if (expr != *expr_p)
4450 {
4451 *expr_p = expr;
4452
4453 /* We can't rely on gimplify_expr to re-gimplify the expanded
4454 form properly, as cleanups might cause the target labels to be
4455 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4456 set up a conditional context. */
4457 gimple_push_condition ();
4458 gimplify_stmt (expr_p, &seq);
4459 gimple_pop_condition (pre_p);
4460 gimple_seq_add_seq (pre_p, seq);
4461
4462 return GS_ALL_DONE;
4463 }
4464 }
4465
4466 /* Now do the normal gimplification. */
4467
4468 /* Gimplify condition. */
4469 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
4470 is_gimple_condexpr_for_cond, fb_rvalue);
4471 if (ret == GS_ERROR)
4472 return GS_ERROR;
4473 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4474
4475 gimple_push_condition ();
4476
4477 have_then_clause_p = have_else_clause_p = false;
4478 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4479 if (label_true
4480 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4481 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4482 have different locations, otherwise we end up with incorrect
4483 location information on the branches. */
4484 && (optimize
4485 || !EXPR_HAS_LOCATION (expr)
4486 || !rexpr_has_location (label_true)
4487 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4488 {
4489 have_then_clause_p = true;
4490 label_true = GOTO_DESTINATION (label_true);
4491 }
4492 else
4493 label_true = create_artificial_label (UNKNOWN_LOCATION);
4494 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4495 if (label_false
4496 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4497 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4498 have different locations, otherwise we end up with incorrect
4499 location information on the branches. */
4500 && (optimize
4501 || !EXPR_HAS_LOCATION (expr)
4502 || !rexpr_has_location (label_false)
4503 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4504 {
4505 have_else_clause_p = true;
4506 label_false = GOTO_DESTINATION (label_false);
4507 }
4508 else
4509 label_false = create_artificial_label (UNKNOWN_LOCATION);
4510
4511 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4512 &arm2);
4513 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4514 label_false);
4515 gimple_set_location (cond_stmt, EXPR_LOCATION (expr));
4516 copy_warning (cond_stmt, COND_EXPR_COND (expr));
4517 gimplify_seq_add_stmt (&seq, cond_stmt);
4518 gimple_stmt_iterator gsi = gsi_last (seq);
4519 maybe_fold_stmt (&gsi);
4520
4521 label_cont = NULL_TREE;
4522 if (!have_then_clause_p)
4523 {
4524 /* For if (...) {} else { code; } put label_true after
4525 the else block. */
4526 if (TREE_OPERAND (expr, 1) == NULL_TREE
4527 && !have_else_clause_p
4528 && TREE_OPERAND (expr, 2) != NULL_TREE)
4529 {
4530 /* For if (0) {} else { code; } tell -Wimplicit-fallthrough
4531 handling that label_cont == label_true can be only reached
4532 through fallthrough from { code; }. */
4533 if (integer_zerop (COND_EXPR_COND (expr)))
4534 UNUSED_LABEL_P (label_true) = 1;
4535 label_cont = label_true;
4536 }
4537 else
4538 {
4539 bool then_side_effects
4540 = (TREE_OPERAND (expr, 1)
4541 && TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)));
4542 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4543 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4544 /* For if (...) { code; } else {} or
4545 if (...) { code; } else goto label; or
4546 if (...) { code; return; } else { ... }
4547 label_cont isn't needed. */
4548 if (!have_else_clause_p
4549 && TREE_OPERAND (expr, 2) != NULL_TREE
4550 && gimple_seq_may_fallthru (seq))
4551 {
4552 gimple *g;
4553 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4554
4555 /* For if (0) { non-side-effect-code } else { code }
4556 tell -Wimplicit-fallthrough handling that label_cont can
4557 be only reached through fallthrough from { code }. */
4558 if (integer_zerop (COND_EXPR_COND (expr)))
4559 {
4560 UNUSED_LABEL_P (label_true) = 1;
4561 if (!then_side_effects)
4562 UNUSED_LABEL_P (label_cont) = 1;
4563 }
4564
4565 g = gimple_build_goto (label_cont);
4566
4567 /* GIMPLE_COND's are very low level; they have embedded
4568 gotos. This particular embedded goto should not be marked
4569 with the location of the original COND_EXPR, as it would
4570 correspond to the COND_EXPR's condition, not the ELSE or the
4571 THEN arms. To avoid marking it with the wrong location, flag
4572 it as "no location". */
4573 gimple_set_do_not_emit_location (g);
4574
4575 gimplify_seq_add_stmt (&seq, g);
4576 }
4577 }
4578 }
4579 if (!have_else_clause_p)
4580 {
4581 /* For if (1) { code } or if (1) { code } else { non-side-effect-code }
4582 tell -Wimplicit-fallthrough handling that label_false can be only
4583 reached through fallthrough from { code }. */
4584 if (integer_nonzerop (COND_EXPR_COND (expr))
4585 && (TREE_OPERAND (expr, 2) == NULL_TREE
4586 || !TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2))))
4587 UNUSED_LABEL_P (label_false) = 1;
4588 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4589 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4590 }
4591 if (label_cont)
4592 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4593
4594 gimple_pop_condition (pre_p);
4595 gimple_seq_add_seq (pre_p, seq);
4596
4597 if (ret == GS_ERROR)
4598 ; /* Do nothing. */
4599 else if (have_then_clause_p || have_else_clause_p)
4600 ret = GS_ALL_DONE;
4601 else
4602 {
4603 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4604 expr = TREE_OPERAND (expr, 0);
4605 gimplify_stmt (&expr, pre_p);
4606 }
4607
4608 *expr_p = NULL;
4609 return ret;
4610 }
4611
4612 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4613 to be marked addressable.
4614
4615 We cannot rely on such an expression being directly markable if a temporary
4616 has been created by the gimplification. In this case, we create another
4617 temporary and initialize it with a copy, which will become a store after we
4618 mark it addressable. This can happen if the front-end passed us something
4619 that it could not mark addressable yet, like a Fortran pass-by-reference
4620 parameter (int) floatvar. */
4621
4622 static void
4623 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4624 {
4625 while (handled_component_p (*expr_p))
4626 expr_p = &TREE_OPERAND (*expr_p, 0);
4627
4628 /* Do not allow an SSA name as the temporary. */
4629 if (is_gimple_reg (*expr_p))
4630 *expr_p = internal_get_tmp_var (*expr_p, seq_p, NULL, false, false, true);
4631 }
4632
4633 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4634 a call to __builtin_memcpy. */
4635
4636 static enum gimplify_status
4637 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4638 gimple_seq *seq_p)
4639 {
4640 tree t, to, to_ptr, from, from_ptr;
4641 gcall *gs;
4642 location_t loc = EXPR_LOCATION (*expr_p);
4643
4644 to = TREE_OPERAND (*expr_p, 0);
4645 from = TREE_OPERAND (*expr_p, 1);
4646
4647 /* Mark the RHS addressable. Beware that it may not be possible to do so
4648 directly if a temporary has been created by the gimplification. */
4649 prepare_gimple_addressable (&from, seq_p);
4650
4651 mark_addressable (from);
4652 from_ptr = build_fold_addr_expr_loc (loc, from);
4653 gimplify_arg (&from_ptr, seq_p, loc);
4654
4655 mark_addressable (to);
4656 to_ptr = build_fold_addr_expr_loc (loc, to);
4657 gimplify_arg (&to_ptr, seq_p, loc);
4658
4659 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4660
4661 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4662 gimple_call_set_alloca_for_var (gs, true);
4663
4664 if (want_value)
4665 {
4666 /* tmp = memcpy() */
4667 t = create_tmp_var (TREE_TYPE (to_ptr));
4668 gimple_call_set_lhs (gs, t);
4669 gimplify_seq_add_stmt (seq_p, gs);
4670
4671 *expr_p = build_simple_mem_ref (t);
4672 return GS_ALL_DONE;
4673 }
4674
4675 gimplify_seq_add_stmt (seq_p, gs);
4676 *expr_p = NULL;
4677 return GS_ALL_DONE;
4678 }
4679
4680 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4681 a call to __builtin_memset. In this case we know that the RHS is
4682 a CONSTRUCTOR with an empty element list. */
4683
4684 static enum gimplify_status
4685 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4686 gimple_seq *seq_p)
4687 {
4688 tree t, from, to, to_ptr;
4689 gcall *gs;
4690 location_t loc = EXPR_LOCATION (*expr_p);
4691
4692 /* Assert our assumptions, to abort instead of producing wrong code
4693 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4694 not be immediately exposed. */
4695 from = TREE_OPERAND (*expr_p, 1);
4696 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4697 from = TREE_OPERAND (from, 0);
4698
4699 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4700 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4701
4702 /* Now proceed. */
4703 to = TREE_OPERAND (*expr_p, 0);
4704
4705 to_ptr = build_fold_addr_expr_loc (loc, to);
4706 gimplify_arg (&to_ptr, seq_p, loc);
4707 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4708
4709 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4710
4711 if (want_value)
4712 {
4713 /* tmp = memset() */
4714 t = create_tmp_var (TREE_TYPE (to_ptr));
4715 gimple_call_set_lhs (gs, t);
4716 gimplify_seq_add_stmt (seq_p, gs);
4717
4718 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4719 return GS_ALL_DONE;
4720 }
4721
4722 gimplify_seq_add_stmt (seq_p, gs);
4723 *expr_p = NULL;
4724 return GS_ALL_DONE;
4725 }
4726
4727 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4728 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4729 assignment. Return non-null if we detect a potential overlap. */
4730
4731 struct gimplify_init_ctor_preeval_data
4732 {
4733 /* The base decl of the lhs object. May be NULL, in which case we
4734 have to assume the lhs is indirect. */
4735 tree lhs_base_decl;
4736
4737 /* The alias set of the lhs object. */
4738 alias_set_type lhs_alias_set;
4739 };
4740
4741 static tree
4742 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4743 {
4744 struct gimplify_init_ctor_preeval_data *data
4745 = (struct gimplify_init_ctor_preeval_data *) xdata;
4746 tree t = *tp;
4747
4748 /* If we find the base object, obviously we have overlap. */
4749 if (data->lhs_base_decl == t)
4750 return t;
4751
4752 /* If the constructor component is indirect, determine if we have a
4753 potential overlap with the lhs. The only bits of information we
4754 have to go on at this point are addressability and alias sets. */
4755 if ((INDIRECT_REF_P (t)
4756 || TREE_CODE (t) == MEM_REF)
4757 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4758 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4759 return t;
4760
4761 /* If the constructor component is a call, determine if it can hide a
4762 potential overlap with the lhs through an INDIRECT_REF like above.
4763 ??? Ugh - this is completely broken. In fact this whole analysis
4764 doesn't look conservative. */
4765 if (TREE_CODE (t) == CALL_EXPR)
4766 {
4767 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4768
4769 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4770 if (POINTER_TYPE_P (TREE_VALUE (type))
4771 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4772 && alias_sets_conflict_p (data->lhs_alias_set,
4773 get_alias_set
4774 (TREE_TYPE (TREE_VALUE (type)))))
4775 return t;
4776 }
4777
4778 if (IS_TYPE_OR_DECL_P (t))
4779 *walk_subtrees = 0;
4780 return NULL;
4781 }
4782
4783 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4784 force values that overlap with the lhs (as described by *DATA)
4785 into temporaries. */
4786
4787 static void
4788 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4789 struct gimplify_init_ctor_preeval_data *data)
4790 {
4791 enum gimplify_status one;
4792
4793 /* If the value is constant, then there's nothing to pre-evaluate. */
4794 if (TREE_CONSTANT (*expr_p))
4795 {
4796 /* Ensure it does not have side effects, it might contain a reference to
4797 the object we're initializing. */
4798 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4799 return;
4800 }
4801
4802 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4803 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4804 return;
4805
4806 /* Recurse for nested constructors. */
4807 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4808 {
4809 unsigned HOST_WIDE_INT ix;
4810 constructor_elt *ce;
4811 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4812
4813 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4814 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4815
4816 return;
4817 }
4818
4819 /* If this is a variable sized type, we must remember the size. */
4820 maybe_with_size_expr (expr_p);
4821
4822 /* Gimplify the constructor element to something appropriate for the rhs
4823 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4824 the gimplifier will consider this a store to memory. Doing this
4825 gimplification now means that we won't have to deal with complicated
4826 language-specific trees, nor trees like SAVE_EXPR that can induce
4827 exponential search behavior. */
4828 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4829 if (one == GS_ERROR)
4830 {
4831 *expr_p = NULL;
4832 return;
4833 }
4834
4835 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4836 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4837 always be true for all scalars, since is_gimple_mem_rhs insists on a
4838 temporary variable for them. */
4839 if (DECL_P (*expr_p))
4840 return;
4841
4842 /* If this is of variable size, we have no choice but to assume it doesn't
4843 overlap since we can't make a temporary for it. */
4844 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4845 return;
4846
4847 /* Otherwise, we must search for overlap ... */
4848 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4849 return;
4850
4851 /* ... and if found, force the value into a temporary. */
4852 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4853 }
4854
4855 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4856 a RANGE_EXPR in a CONSTRUCTOR for an array.
4857
4858 var = lower;
4859 loop_entry:
4860 object[var] = value;
4861 if (var == upper)
4862 goto loop_exit;
4863 var = var + 1;
4864 goto loop_entry;
4865 loop_exit:
4866
4867 We increment var _after_ the loop exit check because we might otherwise
4868 fail if upper == TYPE_MAX_VALUE (type for upper).
4869
4870 Note that we never have to deal with SAVE_EXPRs here, because this has
4871 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4872
4873 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4874 gimple_seq *, bool);
4875
4876 static void
4877 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4878 tree value, tree array_elt_type,
4879 gimple_seq *pre_p, bool cleared)
4880 {
4881 tree loop_entry_label, loop_exit_label, fall_thru_label;
4882 tree var, var_type, cref, tmp;
4883
4884 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4885 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4886 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4887
4888 /* Create and initialize the index variable. */
4889 var_type = TREE_TYPE (upper);
4890 var = create_tmp_var (var_type);
4891 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4892
4893 /* Add the loop entry label. */
4894 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4895
4896 /* Build the reference. */
4897 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4898 var, NULL_TREE, NULL_TREE);
4899
4900 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4901 the store. Otherwise just assign value to the reference. */
4902
4903 if (TREE_CODE (value) == CONSTRUCTOR)
4904 /* NB we might have to call ourself recursively through
4905 gimplify_init_ctor_eval if the value is a constructor. */
4906 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4907 pre_p, cleared);
4908 else
4909 {
4910 if (gimplify_expr (&value, pre_p, NULL, is_gimple_val, fb_rvalue)
4911 != GS_ERROR)
4912 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4913 }
4914
4915 /* We exit the loop when the index var is equal to the upper bound. */
4916 gimplify_seq_add_stmt (pre_p,
4917 gimple_build_cond (EQ_EXPR, var, upper,
4918 loop_exit_label, fall_thru_label));
4919
4920 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4921
4922 /* Otherwise, increment the index var... */
4923 tmp = build2 (PLUS_EXPR, var_type, var,
4924 fold_convert (var_type, integer_one_node));
4925 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4926
4927 /* ...and jump back to the loop entry. */
4928 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4929
4930 /* Add the loop exit label. */
4931 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4932 }
4933
4934 /* A subroutine of gimplify_init_constructor. Generate individual
4935 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4936 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4937 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4938 zeroed first. */
4939
4940 static void
4941 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4942 gimple_seq *pre_p, bool cleared)
4943 {
4944 tree array_elt_type = NULL;
4945 unsigned HOST_WIDE_INT ix;
4946 tree purpose, value;
4947
4948 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4949 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4950
4951 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4952 {
4953 tree cref;
4954
4955 /* NULL values are created above for gimplification errors. */
4956 if (value == NULL)
4957 continue;
4958
4959 if (cleared && initializer_zerop (value))
4960 continue;
4961
4962 /* ??? Here's to hoping the front end fills in all of the indices,
4963 so we don't have to figure out what's missing ourselves. */
4964 gcc_assert (purpose);
4965
4966 /* Skip zero-sized fields, unless value has side-effects. This can
4967 happen with calls to functions returning a empty type, which
4968 we shouldn't discard. As a number of downstream passes don't
4969 expect sets of empty type fields, we rely on the gimplification of
4970 the MODIFY_EXPR we make below to drop the assignment statement. */
4971 if (!TREE_SIDE_EFFECTS (value)
4972 && TREE_CODE (purpose) == FIELD_DECL
4973 && is_empty_type (TREE_TYPE (purpose)))
4974 continue;
4975
4976 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4977 whole range. */
4978 if (TREE_CODE (purpose) == RANGE_EXPR)
4979 {
4980 tree lower = TREE_OPERAND (purpose, 0);
4981 tree upper = TREE_OPERAND (purpose, 1);
4982
4983 /* If the lower bound is equal to upper, just treat it as if
4984 upper was the index. */
4985 if (simple_cst_equal (lower, upper))
4986 purpose = upper;
4987 else
4988 {
4989 gimplify_init_ctor_eval_range (object, lower, upper, value,
4990 array_elt_type, pre_p, cleared);
4991 continue;
4992 }
4993 }
4994
4995 if (array_elt_type)
4996 {
4997 /* Do not use bitsizetype for ARRAY_REF indices. */
4998 if (TYPE_DOMAIN (TREE_TYPE (object)))
4999 purpose
5000 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
5001 purpose);
5002 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
5003 purpose, NULL_TREE, NULL_TREE);
5004 }
5005 else
5006 {
5007 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
5008 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
5009 unshare_expr (object), purpose, NULL_TREE);
5010 }
5011
5012 if (TREE_CODE (value) == CONSTRUCTOR
5013 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
5014 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
5015 pre_p, cleared);
5016 else
5017 {
5018 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
5019 gimplify_and_add (init, pre_p);
5020 ggc_free (init);
5021 }
5022 }
5023 }
5024
5025 /* Return the appropriate RHS predicate for this LHS. */
5026
5027 gimple_predicate
5028 rhs_predicate_for (tree lhs)
5029 {
5030 if (is_gimple_reg (lhs))
5031 return is_gimple_reg_rhs_or_call;
5032 else
5033 return is_gimple_mem_rhs_or_call;
5034 }
5035
5036 /* Return the initial guess for an appropriate RHS predicate for this LHS,
5037 before the LHS has been gimplified. */
5038
5039 static gimple_predicate
5040 initial_rhs_predicate_for (tree lhs)
5041 {
5042 if (is_gimple_reg_type (TREE_TYPE (lhs)))
5043 return is_gimple_reg_rhs_or_call;
5044 else
5045 return is_gimple_mem_rhs_or_call;
5046 }
5047
5048 /* Gimplify a C99 compound literal expression. This just means adding
5049 the DECL_EXPR before the current statement and using its anonymous
5050 decl instead. */
5051
5052 static enum gimplify_status
5053 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
5054 bool (*gimple_test_f) (tree),
5055 fallback_t fallback)
5056 {
5057 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
5058 tree decl = DECL_EXPR_DECL (decl_s);
5059 tree init = DECL_INITIAL (decl);
5060 /* Mark the decl as addressable if the compound literal
5061 expression is addressable now, otherwise it is marked too late
5062 after we gimplify the initialization expression. */
5063 if (TREE_ADDRESSABLE (*expr_p))
5064 TREE_ADDRESSABLE (decl) = 1;
5065 /* Otherwise, if we don't need an lvalue and have a literal directly
5066 substitute it. Check if it matches the gimple predicate, as
5067 otherwise we'd generate a new temporary, and we can as well just
5068 use the decl we already have. */
5069 else if (!TREE_ADDRESSABLE (decl)
5070 && !TREE_THIS_VOLATILE (decl)
5071 && init
5072 && (fallback & fb_lvalue) == 0
5073 && gimple_test_f (init))
5074 {
5075 *expr_p = init;
5076 return GS_OK;
5077 }
5078
5079 /* If the decl is not addressable, then it is being used in some
5080 expression or on the right hand side of a statement, and it can
5081 be put into a readonly data section. */
5082 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
5083 TREE_READONLY (decl) = 1;
5084
5085 /* This decl isn't mentioned in the enclosing block, so add it to the
5086 list of temps. FIXME it seems a bit of a kludge to say that
5087 anonymous artificial vars aren't pushed, but everything else is. */
5088 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
5089 gimple_add_tmp_var (decl);
5090
5091 gimplify_and_add (decl_s, pre_p);
5092 *expr_p = decl;
5093 return GS_OK;
5094 }
5095
5096 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
5097 return a new CONSTRUCTOR if something changed. */
5098
5099 static tree
5100 optimize_compound_literals_in_ctor (tree orig_ctor)
5101 {
5102 tree ctor = orig_ctor;
5103 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
5104 unsigned int idx, num = vec_safe_length (elts);
5105
5106 for (idx = 0; idx < num; idx++)
5107 {
5108 tree value = (*elts)[idx].value;
5109 tree newval = value;
5110 if (TREE_CODE (value) == CONSTRUCTOR)
5111 newval = optimize_compound_literals_in_ctor (value);
5112 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
5113 {
5114 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
5115 tree decl = DECL_EXPR_DECL (decl_s);
5116 tree init = DECL_INITIAL (decl);
5117
5118 if (!TREE_ADDRESSABLE (value)
5119 && !TREE_ADDRESSABLE (decl)
5120 && init
5121 && TREE_CODE (init) == CONSTRUCTOR)
5122 newval = optimize_compound_literals_in_ctor (init);
5123 }
5124 if (newval == value)
5125 continue;
5126
5127 if (ctor == orig_ctor)
5128 {
5129 ctor = copy_node (orig_ctor);
5130 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
5131 elts = CONSTRUCTOR_ELTS (ctor);
5132 }
5133 (*elts)[idx].value = newval;
5134 }
5135 return ctor;
5136 }
5137
5138 /* A subroutine of gimplify_modify_expr. Break out elements of a
5139 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
5140
5141 Note that we still need to clear any elements that don't have explicit
5142 initializers, so if not all elements are initialized we keep the
5143 original MODIFY_EXPR, we just remove all of the constructor elements.
5144
5145 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
5146 GS_ERROR if we would have to create a temporary when gimplifying
5147 this constructor. Otherwise, return GS_OK.
5148
5149 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
5150
5151 static enum gimplify_status
5152 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5153 bool want_value, bool notify_temp_creation)
5154 {
5155 tree object, ctor, type;
5156 enum gimplify_status ret;
5157 vec<constructor_elt, va_gc> *elts;
5158 bool cleared = false;
5159 bool is_empty_ctor = false;
5160 bool is_init_expr = (TREE_CODE (*expr_p) == INIT_EXPR);
5161
5162 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
5163
5164 if (!notify_temp_creation)
5165 {
5166 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5167 is_gimple_lvalue, fb_lvalue);
5168 if (ret == GS_ERROR)
5169 return ret;
5170 }
5171
5172 object = TREE_OPERAND (*expr_p, 0);
5173 ctor = TREE_OPERAND (*expr_p, 1)
5174 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
5175 type = TREE_TYPE (ctor);
5176 elts = CONSTRUCTOR_ELTS (ctor);
5177 ret = GS_ALL_DONE;
5178
5179 switch (TREE_CODE (type))
5180 {
5181 case RECORD_TYPE:
5182 case UNION_TYPE:
5183 case QUAL_UNION_TYPE:
5184 case ARRAY_TYPE:
5185 {
5186 /* Use readonly data for initializers of this or smaller size
5187 regardless of the num_nonzero_elements / num_unique_nonzero_elements
5188 ratio. */
5189 const HOST_WIDE_INT min_unique_size = 64;
5190 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
5191 is smaller than this, use readonly data. */
5192 const int unique_nonzero_ratio = 8;
5193 /* True if a single access of the object must be ensured. This is the
5194 case if the target is volatile, the type is non-addressable and more
5195 than one field need to be assigned. */
5196 const bool ensure_single_access
5197 = TREE_THIS_VOLATILE (object)
5198 && !TREE_ADDRESSABLE (type)
5199 && vec_safe_length (elts) > 1;
5200 struct gimplify_init_ctor_preeval_data preeval_data;
5201 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
5202 HOST_WIDE_INT num_unique_nonzero_elements;
5203 bool complete_p, valid_const_initializer;
5204
5205 /* Aggregate types must lower constructors to initialization of
5206 individual elements. The exception is that a CONSTRUCTOR node
5207 with no elements indicates zero-initialization of the whole. */
5208 if (vec_safe_is_empty (elts))
5209 {
5210 if (notify_temp_creation)
5211 return GS_OK;
5212
5213 /* The var will be initialized and so appear on lhs of
5214 assignment, it can't be TREE_READONLY anymore. */
5215 if (VAR_P (object))
5216 TREE_READONLY (object) = 0;
5217
5218 is_empty_ctor = true;
5219 break;
5220 }
5221
5222 /* Fetch information about the constructor to direct later processing.
5223 We might want to make static versions of it in various cases, and
5224 can only do so if it known to be a valid constant initializer. */
5225 valid_const_initializer
5226 = categorize_ctor_elements (ctor, &num_nonzero_elements,
5227 &num_unique_nonzero_elements,
5228 &num_ctor_elements, &complete_p);
5229
5230 /* If a const aggregate variable is being initialized, then it
5231 should never be a lose to promote the variable to be static. */
5232 if (valid_const_initializer
5233 && num_nonzero_elements > 1
5234 && TREE_READONLY (object)
5235 && VAR_P (object)
5236 && !DECL_REGISTER (object)
5237 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))
5238 /* For ctors that have many repeated nonzero elements
5239 represented through RANGE_EXPRs, prefer initializing
5240 those through runtime loops over copies of large amounts
5241 of data from readonly data section. */
5242 && (num_unique_nonzero_elements
5243 > num_nonzero_elements / unique_nonzero_ratio
5244 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
5245 <= (unsigned HOST_WIDE_INT) min_unique_size)))
5246 {
5247 if (notify_temp_creation)
5248 return GS_ERROR;
5249
5250 DECL_INITIAL (object) = ctor;
5251 TREE_STATIC (object) = 1;
5252 if (!DECL_NAME (object))
5253 DECL_NAME (object) = create_tmp_var_name ("C");
5254 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
5255
5256 /* ??? C++ doesn't automatically append a .<number> to the
5257 assembler name, and even when it does, it looks at FE private
5258 data structures to figure out what that number should be,
5259 which are not set for this variable. I suppose this is
5260 important for local statics for inline functions, which aren't
5261 "local" in the object file sense. So in order to get a unique
5262 TU-local symbol, we must invoke the lhd version now. */
5263 lhd_set_decl_assembler_name (object);
5264
5265 *expr_p = NULL_TREE;
5266 break;
5267 }
5268
5269 /* The var will be initialized and so appear on lhs of
5270 assignment, it can't be TREE_READONLY anymore. */
5271 if (VAR_P (object) && !notify_temp_creation)
5272 TREE_READONLY (object) = 0;
5273
5274 /* If there are "lots" of initialized elements, even discounting
5275 those that are not address constants (and thus *must* be
5276 computed at runtime), then partition the constructor into
5277 constant and non-constant parts. Block copy the constant
5278 parts in, then generate code for the non-constant parts. */
5279 /* TODO. There's code in cp/typeck.cc to do this. */
5280
5281 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
5282 /* store_constructor will ignore the clearing of variable-sized
5283 objects. Initializers for such objects must explicitly set
5284 every field that needs to be set. */
5285 cleared = false;
5286 else if (!complete_p)
5287 /* If the constructor isn't complete, clear the whole object
5288 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
5289
5290 ??? This ought not to be needed. For any element not present
5291 in the initializer, we should simply set them to zero. Except
5292 we'd need to *find* the elements that are not present, and that
5293 requires trickery to avoid quadratic compile-time behavior in
5294 large cases or excessive memory use in small cases. */
5295 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
5296 else if (num_ctor_elements - num_nonzero_elements
5297 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
5298 && num_nonzero_elements < num_ctor_elements / 4)
5299 /* If there are "lots" of zeros, it's more efficient to clear
5300 the memory and then set the nonzero elements. */
5301 cleared = true;
5302 else if (ensure_single_access && num_nonzero_elements == 0)
5303 /* If a single access to the target must be ensured and all elements
5304 are zero, then it's optimal to clear whatever their number. */
5305 cleared = true;
5306 else
5307 cleared = false;
5308
5309 /* If there are "lots" of initialized elements, and all of them
5310 are valid address constants, then the entire initializer can
5311 be dropped to memory, and then memcpy'd out. Don't do this
5312 for sparse arrays, though, as it's more efficient to follow
5313 the standard CONSTRUCTOR behavior of memset followed by
5314 individual element initialization. Also don't do this for small
5315 all-zero initializers (which aren't big enough to merit
5316 clearing), and don't try to make bitwise copies of
5317 TREE_ADDRESSABLE types. */
5318 if (valid_const_initializer
5319 && complete_p
5320 && !(cleared || num_nonzero_elements == 0)
5321 && !TREE_ADDRESSABLE (type))
5322 {
5323 HOST_WIDE_INT size = int_size_in_bytes (type);
5324 unsigned int align;
5325
5326 /* ??? We can still get unbounded array types, at least
5327 from the C++ front end. This seems wrong, but attempt
5328 to work around it for now. */
5329 if (size < 0)
5330 {
5331 size = int_size_in_bytes (TREE_TYPE (object));
5332 if (size >= 0)
5333 TREE_TYPE (ctor) = type = TREE_TYPE (object);
5334 }
5335
5336 /* Find the maximum alignment we can assume for the object. */
5337 /* ??? Make use of DECL_OFFSET_ALIGN. */
5338 if (DECL_P (object))
5339 align = DECL_ALIGN (object);
5340 else
5341 align = TYPE_ALIGN (type);
5342
5343 /* Do a block move either if the size is so small as to make
5344 each individual move a sub-unit move on average, or if it
5345 is so large as to make individual moves inefficient. */
5346 if (size > 0
5347 && num_nonzero_elements > 1
5348 /* For ctors that have many repeated nonzero elements
5349 represented through RANGE_EXPRs, prefer initializing
5350 those through runtime loops over copies of large amounts
5351 of data from readonly data section. */
5352 && (num_unique_nonzero_elements
5353 > num_nonzero_elements / unique_nonzero_ratio
5354 || size <= min_unique_size)
5355 && (size < num_nonzero_elements
5356 || !can_move_by_pieces (size, align)))
5357 {
5358 if (notify_temp_creation)
5359 return GS_ERROR;
5360
5361 walk_tree (&ctor, force_labels_r, NULL, NULL);
5362 ctor = tree_output_constant_def (ctor);
5363 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
5364 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
5365 TREE_OPERAND (*expr_p, 1) = ctor;
5366
5367 /* This is no longer an assignment of a CONSTRUCTOR, but
5368 we still may have processing to do on the LHS. So
5369 pretend we didn't do anything here to let that happen. */
5370 return GS_UNHANDLED;
5371 }
5372 }
5373
5374 /* If a single access to the target must be ensured and there are
5375 nonzero elements or the zero elements are not assigned en masse,
5376 initialize the target from a temporary. */
5377 if (ensure_single_access && (num_nonzero_elements > 0 || !cleared))
5378 {
5379 if (notify_temp_creation)
5380 return GS_ERROR;
5381
5382 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
5383 TREE_OPERAND (*expr_p, 0) = temp;
5384 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5385 *expr_p,
5386 build2 (MODIFY_EXPR, void_type_node,
5387 object, temp));
5388 return GS_OK;
5389 }
5390
5391 if (notify_temp_creation)
5392 return GS_OK;
5393
5394 /* If there are nonzero elements and if needed, pre-evaluate to capture
5395 elements overlapping with the lhs into temporaries. We must do this
5396 before clearing to fetch the values before they are zeroed-out. */
5397 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
5398 {
5399 preeval_data.lhs_base_decl = get_base_address (object);
5400 if (!DECL_P (preeval_data.lhs_base_decl))
5401 preeval_data.lhs_base_decl = NULL;
5402 preeval_data.lhs_alias_set = get_alias_set (object);
5403
5404 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
5405 pre_p, post_p, &preeval_data);
5406 }
5407
5408 bool ctor_has_side_effects_p
5409 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5410
5411 if (cleared)
5412 {
5413 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5414 Note that we still have to gimplify, in order to handle the
5415 case of variable sized types. Avoid shared tree structures. */
5416 CONSTRUCTOR_ELTS (ctor) = NULL;
5417 TREE_SIDE_EFFECTS (ctor) = 0;
5418 object = unshare_expr (object);
5419 gimplify_stmt (expr_p, pre_p);
5420 }
5421
5422 /* If we have not block cleared the object, or if there are nonzero
5423 elements in the constructor, or if the constructor has side effects,
5424 add assignments to the individual scalar fields of the object. */
5425 if (!cleared
5426 || num_nonzero_elements > 0
5427 || ctor_has_side_effects_p)
5428 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5429
5430 *expr_p = NULL_TREE;
5431 }
5432 break;
5433
5434 case COMPLEX_TYPE:
5435 {
5436 tree r, i;
5437
5438 if (notify_temp_creation)
5439 return GS_OK;
5440
5441 /* Extract the real and imaginary parts out of the ctor. */
5442 gcc_assert (elts->length () == 2);
5443 r = (*elts)[0].value;
5444 i = (*elts)[1].value;
5445 if (r == NULL || i == NULL)
5446 {
5447 tree zero = build_zero_cst (TREE_TYPE (type));
5448 if (r == NULL)
5449 r = zero;
5450 if (i == NULL)
5451 i = zero;
5452 }
5453
5454 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5455 represent creation of a complex value. */
5456 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5457 {
5458 ctor = build_complex (type, r, i);
5459 TREE_OPERAND (*expr_p, 1) = ctor;
5460 }
5461 else
5462 {
5463 ctor = build2 (COMPLEX_EXPR, type, r, i);
5464 TREE_OPERAND (*expr_p, 1) = ctor;
5465 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5466 pre_p,
5467 post_p,
5468 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5469 fb_rvalue);
5470 }
5471 }
5472 break;
5473
5474 case VECTOR_TYPE:
5475 {
5476 unsigned HOST_WIDE_INT ix;
5477 constructor_elt *ce;
5478
5479 if (notify_temp_creation)
5480 return GS_OK;
5481
5482 /* Vector types use CONSTRUCTOR all the way through gimple
5483 compilation as a general initializer. */
5484 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5485 {
5486 enum gimplify_status tret;
5487 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5488 fb_rvalue);
5489 if (tret == GS_ERROR)
5490 ret = GS_ERROR;
5491 else if (TREE_STATIC (ctor)
5492 && !initializer_constant_valid_p (ce->value,
5493 TREE_TYPE (ce->value)))
5494 TREE_STATIC (ctor) = 0;
5495 }
5496 recompute_constructor_flags (ctor);
5497
5498 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5499 if (TREE_CONSTANT (ctor))
5500 {
5501 bool constant_p = true;
5502 tree value;
5503
5504 /* Even when ctor is constant, it might contain non-*_CST
5505 elements, such as addresses or trapping values like
5506 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5507 in VECTOR_CST nodes. */
5508 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5509 if (!CONSTANT_CLASS_P (value))
5510 {
5511 constant_p = false;
5512 break;
5513 }
5514
5515 if (constant_p)
5516 {
5517 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5518 break;
5519 }
5520 }
5521
5522 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5523 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5524 }
5525 break;
5526
5527 default:
5528 /* So how did we get a CONSTRUCTOR for a scalar type? */
5529 gcc_unreachable ();
5530 }
5531
5532 if (ret == GS_ERROR)
5533 return GS_ERROR;
5534 /* If we have gimplified both sides of the initializer but have
5535 not emitted an assignment, do so now. */
5536 if (*expr_p
5537 /* If the type is an empty type, we don't need to emit the
5538 assignment. */
5539 && !is_empty_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
5540 {
5541 tree lhs = TREE_OPERAND (*expr_p, 0);
5542 tree rhs = TREE_OPERAND (*expr_p, 1);
5543 if (want_value && object == lhs)
5544 lhs = unshare_expr (lhs);
5545 gassign *init = gimple_build_assign (lhs, rhs);
5546 gimplify_seq_add_stmt (pre_p, init);
5547 }
5548 if (want_value)
5549 {
5550 *expr_p = object;
5551 ret = GS_OK;
5552 }
5553 else
5554 {
5555 *expr_p = NULL;
5556 ret = GS_ALL_DONE;
5557 }
5558
5559 /* If the user requests to initialize automatic variables, we
5560 should initialize paddings inside the variable. Add a call to
5561 __builtin_clear_pading (&object, 0, for_auto_init = true) to
5562 initialize paddings of object always to zero regardless of
5563 INIT_TYPE. Note, we will not insert this call if the aggregate
5564 variable has be completely cleared already or it's initialized
5565 with an empty constructor. We cannot insert this call if the
5566 variable is a gimple register since __builtin_clear_padding will take
5567 the address of the variable. As a result, if a long double/_Complex long
5568 double variable will be spilled into stack later, its padding cannot
5569 be cleared with __builtin_clear_padding. We should clear its padding
5570 when it is spilled into memory. */
5571 if (is_init_expr
5572 && !is_gimple_reg (object)
5573 && clear_padding_type_may_have_padding_p (type)
5574 && ((AGGREGATE_TYPE_P (type) && !cleared && !is_empty_ctor)
5575 || !AGGREGATE_TYPE_P (type))
5576 && is_var_need_auto_init (object))
5577 gimple_add_padding_init_for_auto_var (object, false, pre_p);
5578
5579 return ret;
5580 }
5581
5582 /* Given a pointer value OP0, return a simplified version of an
5583 indirection through OP0, or NULL_TREE if no simplification is
5584 possible. This may only be applied to a rhs of an expression.
5585 Note that the resulting type may be different from the type pointed
5586 to in the sense that it is still compatible from the langhooks
5587 point of view. */
5588
5589 static tree
5590 gimple_fold_indirect_ref_rhs (tree t)
5591 {
5592 return gimple_fold_indirect_ref (t);
5593 }
5594
5595 /* Subroutine of gimplify_modify_expr to do simplifications of
5596 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5597 something changes. */
5598
5599 static enum gimplify_status
5600 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5601 gimple_seq *pre_p, gimple_seq *post_p,
5602 bool want_value)
5603 {
5604 enum gimplify_status ret = GS_UNHANDLED;
5605 bool changed;
5606
5607 do
5608 {
5609 changed = false;
5610 switch (TREE_CODE (*from_p))
5611 {
5612 case VAR_DECL:
5613 /* If we're assigning from a read-only variable initialized with
5614 a constructor and not volatile, do the direct assignment from
5615 the constructor, but only if the target is not volatile either
5616 since this latter assignment might end up being done on a per
5617 field basis. However, if the target is volatile and the type
5618 is aggregate and non-addressable, gimplify_init_constructor
5619 knows that it needs to ensure a single access to the target
5620 and it will return GS_OK only in this case. */
5621 if (TREE_READONLY (*from_p)
5622 && DECL_INITIAL (*from_p)
5623 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR
5624 && !TREE_THIS_VOLATILE (*from_p)
5625 && (!TREE_THIS_VOLATILE (*to_p)
5626 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p))
5627 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p)))))
5628 {
5629 tree old_from = *from_p;
5630 enum gimplify_status subret;
5631
5632 /* Move the constructor into the RHS. */
5633 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5634
5635 /* Let's see if gimplify_init_constructor will need to put
5636 it in memory. */
5637 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5638 false, true);
5639 if (subret == GS_ERROR)
5640 {
5641 /* If so, revert the change. */
5642 *from_p = old_from;
5643 }
5644 else
5645 {
5646 ret = GS_OK;
5647 changed = true;
5648 }
5649 }
5650 break;
5651 case INDIRECT_REF:
5652 if (!TREE_ADDRESSABLE (TREE_TYPE (*from_p)))
5653 /* If we have code like
5654
5655 *(const A*)(A*)&x
5656
5657 where the type of "x" is a (possibly cv-qualified variant
5658 of "A"), treat the entire expression as identical to "x".
5659 This kind of code arises in C++ when an object is bound
5660 to a const reference, and if "x" is a TARGET_EXPR we want
5661 to take advantage of the optimization below. But not if
5662 the type is TREE_ADDRESSABLE; then C++17 says that the
5663 TARGET_EXPR needs to be a temporary. */
5664 if (tree t
5665 = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0)))
5666 {
5667 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5668 if (TREE_THIS_VOLATILE (t) != volatile_p)
5669 {
5670 if (DECL_P (t))
5671 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5672 build_fold_addr_expr (t));
5673 if (REFERENCE_CLASS_P (t))
5674 TREE_THIS_VOLATILE (t) = volatile_p;
5675 }
5676 *from_p = t;
5677 ret = GS_OK;
5678 changed = true;
5679 }
5680 break;
5681
5682 case TARGET_EXPR:
5683 {
5684 /* If we are initializing something from a TARGET_EXPR, strip the
5685 TARGET_EXPR and initialize it directly, if possible. This can't
5686 be done if the initializer is void, since that implies that the
5687 temporary is set in some non-trivial way.
5688
5689 ??? What about code that pulls out the temp and uses it
5690 elsewhere? I think that such code never uses the TARGET_EXPR as
5691 an initializer. If I'm wrong, we'll die because the temp won't
5692 have any RTL. In that case, I guess we'll need to replace
5693 references somehow. */
5694 tree init = TARGET_EXPR_INITIAL (*from_p);
5695
5696 if (init
5697 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5698 || !TARGET_EXPR_NO_ELIDE (*from_p))
5699 && !VOID_TYPE_P (TREE_TYPE (init)))
5700 {
5701 *from_p = init;
5702 ret = GS_OK;
5703 changed = true;
5704 }
5705 }
5706 break;
5707
5708 case COMPOUND_EXPR:
5709 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5710 caught. */
5711 gimplify_compound_expr (from_p, pre_p, true);
5712 ret = GS_OK;
5713 changed = true;
5714 break;
5715
5716 case CONSTRUCTOR:
5717 /* If we already made some changes, let the front end have a
5718 crack at this before we break it down. */
5719 if (ret != GS_UNHANDLED)
5720 break;
5721
5722 /* If we're initializing from a CONSTRUCTOR, break this into
5723 individual MODIFY_EXPRs. */
5724 ret = gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5725 false);
5726 return ret;
5727
5728 case COND_EXPR:
5729 /* If we're assigning to a non-register type, push the assignment
5730 down into the branches. This is mandatory for ADDRESSABLE types,
5731 since we cannot generate temporaries for such, but it saves a
5732 copy in other cases as well. */
5733 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5734 {
5735 /* This code should mirror the code in gimplify_cond_expr. */
5736 enum tree_code code = TREE_CODE (*expr_p);
5737 tree cond = *from_p;
5738 tree result = *to_p;
5739
5740 ret = gimplify_expr (&result, pre_p, post_p,
5741 is_gimple_lvalue, fb_lvalue);
5742 if (ret != GS_ERROR)
5743 ret = GS_OK;
5744
5745 /* If we are going to write RESULT more than once, clear
5746 TREE_READONLY flag, otherwise we might incorrectly promote
5747 the variable to static const and initialize it at compile
5748 time in one of the branches. */
5749 if (VAR_P (result)
5750 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5751 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5752 TREE_READONLY (result) = 0;
5753 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5754 TREE_OPERAND (cond, 1)
5755 = build2 (code, void_type_node, result,
5756 TREE_OPERAND (cond, 1));
5757 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5758 TREE_OPERAND (cond, 2)
5759 = build2 (code, void_type_node, unshare_expr (result),
5760 TREE_OPERAND (cond, 2));
5761
5762 TREE_TYPE (cond) = void_type_node;
5763 recalculate_side_effects (cond);
5764
5765 if (want_value)
5766 {
5767 gimplify_and_add (cond, pre_p);
5768 *expr_p = unshare_expr (result);
5769 }
5770 else
5771 *expr_p = cond;
5772 return ret;
5773 }
5774 break;
5775
5776 case CALL_EXPR:
5777 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5778 return slot so that we don't generate a temporary. */
5779 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5780 && aggregate_value_p (*from_p, *from_p))
5781 {
5782 bool use_target;
5783
5784 if (!(rhs_predicate_for (*to_p))(*from_p))
5785 /* If we need a temporary, *to_p isn't accurate. */
5786 use_target = false;
5787 /* It's OK to use the return slot directly unless it's an NRV. */
5788 else if (TREE_CODE (*to_p) == RESULT_DECL
5789 && DECL_NAME (*to_p) == NULL_TREE
5790 && needs_to_live_in_memory (*to_p))
5791 use_target = true;
5792 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5793 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5794 /* Don't force regs into memory. */
5795 use_target = false;
5796 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5797 /* It's OK to use the target directly if it's being
5798 initialized. */
5799 use_target = true;
5800 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5801 != INTEGER_CST)
5802 /* Always use the target and thus RSO for variable-sized types.
5803 GIMPLE cannot deal with a variable-sized assignment
5804 embedded in a call statement. */
5805 use_target = true;
5806 else if (TREE_CODE (*to_p) != SSA_NAME
5807 && (!is_gimple_variable (*to_p)
5808 || needs_to_live_in_memory (*to_p)))
5809 /* Don't use the original target if it's already addressable;
5810 if its address escapes, and the called function uses the
5811 NRV optimization, a conforming program could see *to_p
5812 change before the called function returns; see c++/19317.
5813 When optimizing, the return_slot pass marks more functions
5814 as safe after we have escape info. */
5815 use_target = false;
5816 else
5817 use_target = true;
5818
5819 if (use_target)
5820 {
5821 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5822 mark_addressable (*to_p);
5823 }
5824 }
5825 break;
5826
5827 case WITH_SIZE_EXPR:
5828 /* Likewise for calls that return an aggregate of non-constant size,
5829 since we would not be able to generate a temporary at all. */
5830 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5831 {
5832 *from_p = TREE_OPERAND (*from_p, 0);
5833 /* We don't change ret in this case because the
5834 WITH_SIZE_EXPR might have been added in
5835 gimplify_modify_expr, so returning GS_OK would lead to an
5836 infinite loop. */
5837 changed = true;
5838 }
5839 break;
5840
5841 /* If we're initializing from a container, push the initialization
5842 inside it. */
5843 case CLEANUP_POINT_EXPR:
5844 case BIND_EXPR:
5845 case STATEMENT_LIST:
5846 {
5847 tree wrap = *from_p;
5848 tree t;
5849
5850 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5851 fb_lvalue);
5852 if (ret != GS_ERROR)
5853 ret = GS_OK;
5854
5855 t = voidify_wrapper_expr (wrap, *expr_p);
5856 gcc_assert (t == *expr_p);
5857
5858 if (want_value)
5859 {
5860 gimplify_and_add (wrap, pre_p);
5861 *expr_p = unshare_expr (*to_p);
5862 }
5863 else
5864 *expr_p = wrap;
5865 return GS_OK;
5866 }
5867
5868 case NOP_EXPR:
5869 /* Pull out compound literal expressions from a NOP_EXPR.
5870 Those are created in the C FE to drop qualifiers during
5871 lvalue conversion. */
5872 if ((TREE_CODE (TREE_OPERAND (*from_p, 0)) == COMPOUND_LITERAL_EXPR)
5873 && tree_ssa_useless_type_conversion (*from_p))
5874 {
5875 *from_p = TREE_OPERAND (*from_p, 0);
5876 ret = GS_OK;
5877 changed = true;
5878 }
5879 break;
5880
5881 case COMPOUND_LITERAL_EXPR:
5882 {
5883 tree complit = TREE_OPERAND (*expr_p, 1);
5884 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5885 tree decl = DECL_EXPR_DECL (decl_s);
5886 tree init = DECL_INITIAL (decl);
5887
5888 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5889 into struct T x = { 0, 1, 2 } if the address of the
5890 compound literal has never been taken. */
5891 if (!TREE_ADDRESSABLE (complit)
5892 && !TREE_ADDRESSABLE (decl)
5893 && init)
5894 {
5895 *expr_p = copy_node (*expr_p);
5896 TREE_OPERAND (*expr_p, 1) = init;
5897 return GS_OK;
5898 }
5899 }
5900
5901 default:
5902 break;
5903 }
5904 }
5905 while (changed);
5906
5907 return ret;
5908 }
5909
5910
5911 /* Return true if T looks like a valid GIMPLE statement. */
5912
5913 static bool
5914 is_gimple_stmt (tree t)
5915 {
5916 const enum tree_code code = TREE_CODE (t);
5917
5918 switch (code)
5919 {
5920 case NOP_EXPR:
5921 /* The only valid NOP_EXPR is the empty statement. */
5922 return IS_EMPTY_STMT (t);
5923
5924 case BIND_EXPR:
5925 case COND_EXPR:
5926 /* These are only valid if they're void. */
5927 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5928
5929 case SWITCH_EXPR:
5930 case GOTO_EXPR:
5931 case RETURN_EXPR:
5932 case LABEL_EXPR:
5933 case CASE_LABEL_EXPR:
5934 case TRY_CATCH_EXPR:
5935 case TRY_FINALLY_EXPR:
5936 case EH_FILTER_EXPR:
5937 case CATCH_EXPR:
5938 case ASM_EXPR:
5939 case STATEMENT_LIST:
5940 case OACC_PARALLEL:
5941 case OACC_KERNELS:
5942 case OACC_SERIAL:
5943 case OACC_DATA:
5944 case OACC_HOST_DATA:
5945 case OACC_DECLARE:
5946 case OACC_UPDATE:
5947 case OACC_ENTER_DATA:
5948 case OACC_EXIT_DATA:
5949 case OACC_CACHE:
5950 case OMP_PARALLEL:
5951 case OMP_FOR:
5952 case OMP_SIMD:
5953 case OMP_DISTRIBUTE:
5954 case OMP_LOOP:
5955 case OACC_LOOP:
5956 case OMP_SCAN:
5957 case OMP_SCOPE:
5958 case OMP_SECTIONS:
5959 case OMP_SECTION:
5960 case OMP_SINGLE:
5961 case OMP_MASTER:
5962 case OMP_MASKED:
5963 case OMP_TASKGROUP:
5964 case OMP_ORDERED:
5965 case OMP_CRITICAL:
5966 case OMP_TASK:
5967 case OMP_TARGET:
5968 case OMP_TARGET_DATA:
5969 case OMP_TARGET_UPDATE:
5970 case OMP_TARGET_ENTER_DATA:
5971 case OMP_TARGET_EXIT_DATA:
5972 case OMP_TASKLOOP:
5973 case OMP_TEAMS:
5974 /* These are always void. */
5975 return true;
5976
5977 case CALL_EXPR:
5978 case MODIFY_EXPR:
5979 case PREDICT_EXPR:
5980 /* These are valid regardless of their type. */
5981 return true;
5982
5983 default:
5984 return false;
5985 }
5986 }
5987
5988
5989 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5990 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
5991
5992 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5993 other, unmodified part of the complex object just before the total store.
5994 As a consequence, if the object is still uninitialized, an undefined value
5995 will be loaded into a register, which may result in a spurious exception
5996 if the register is floating-point and the value happens to be a signaling
5997 NaN for example. Then the fully-fledged complex operations lowering pass
5998 followed by a DCE pass are necessary in order to fix things up. */
5999
6000 static enum gimplify_status
6001 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
6002 bool want_value)
6003 {
6004 enum tree_code code, ocode;
6005 tree lhs, rhs, new_rhs, other, realpart, imagpart;
6006
6007 lhs = TREE_OPERAND (*expr_p, 0);
6008 rhs = TREE_OPERAND (*expr_p, 1);
6009 code = TREE_CODE (lhs);
6010 lhs = TREE_OPERAND (lhs, 0);
6011
6012 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
6013 other = build1 (ocode, TREE_TYPE (rhs), lhs);
6014 suppress_warning (other);
6015 other = get_formal_tmp_var (other, pre_p);
6016
6017 realpart = code == REALPART_EXPR ? rhs : other;
6018 imagpart = code == REALPART_EXPR ? other : rhs;
6019
6020 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
6021 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
6022 else
6023 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
6024
6025 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
6026 *expr_p = (want_value) ? rhs : NULL_TREE;
6027
6028 return GS_ALL_DONE;
6029 }
6030
6031 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6032
6033 modify_expr
6034 : varname '=' rhs
6035 | '*' ID '=' rhs
6036
6037 PRE_P points to the list where side effects that must happen before
6038 *EXPR_P should be stored.
6039
6040 POST_P points to the list where side effects that must happen after
6041 *EXPR_P should be stored.
6042
6043 WANT_VALUE is nonzero iff we want to use the value of this expression
6044 in another expression. */
6045
6046 static enum gimplify_status
6047 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6048 bool want_value)
6049 {
6050 tree *from_p = &TREE_OPERAND (*expr_p, 1);
6051 tree *to_p = &TREE_OPERAND (*expr_p, 0);
6052 enum gimplify_status ret = GS_UNHANDLED;
6053 gimple *assign;
6054 location_t loc = EXPR_LOCATION (*expr_p);
6055 gimple_stmt_iterator gsi;
6056
6057 if (error_operand_p (*from_p) || error_operand_p (*to_p))
6058 return GS_ERROR;
6059
6060 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
6061 || TREE_CODE (*expr_p) == INIT_EXPR);
6062
6063 /* Trying to simplify a clobber using normal logic doesn't work,
6064 so handle it here. */
6065 if (TREE_CLOBBER_P (*from_p))
6066 {
6067 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6068 if (ret == GS_ERROR)
6069 return ret;
6070 gcc_assert (!want_value);
6071 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
6072 {
6073 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
6074 pre_p, post_p);
6075 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
6076 }
6077 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
6078 *expr_p = NULL;
6079 return GS_ALL_DONE;
6080 }
6081
6082 /* Convert initialization from an empty variable-size CONSTRUCTOR to
6083 memset. */
6084 if (TREE_TYPE (*from_p) != error_mark_node
6085 && TYPE_SIZE_UNIT (TREE_TYPE (*from_p))
6086 && !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (*from_p)))
6087 && TREE_CODE (*from_p) == CONSTRUCTOR
6088 && CONSTRUCTOR_NELTS (*from_p) == 0)
6089 {
6090 maybe_with_size_expr (from_p);
6091 gcc_assert (TREE_CODE (*from_p) == WITH_SIZE_EXPR);
6092 return gimplify_modify_expr_to_memset (expr_p,
6093 TREE_OPERAND (*from_p, 1),
6094 want_value, pre_p);
6095 }
6096
6097 /* Insert pointer conversions required by the middle-end that are not
6098 required by the frontend. This fixes middle-end type checking for
6099 for example gcc.dg/redecl-6.c. */
6100 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
6101 {
6102 STRIP_USELESS_TYPE_CONVERSION (*from_p);
6103 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
6104 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
6105 }
6106
6107 /* See if any simplifications can be done based on what the RHS is. */
6108 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6109 want_value);
6110 if (ret != GS_UNHANDLED)
6111 return ret;
6112
6113 /* For empty types only gimplify the left hand side and right hand
6114 side as statements and throw away the assignment. Do this after
6115 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
6116 types properly. */
6117 if (is_empty_type (TREE_TYPE (*from_p))
6118 && !want_value
6119 /* Don't do this for calls that return addressable types, expand_call
6120 relies on those having a lhs. */
6121 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
6122 && TREE_CODE (*from_p) == CALL_EXPR))
6123 {
6124 gimplify_stmt (from_p, pre_p);
6125 gimplify_stmt (to_p, pre_p);
6126 *expr_p = NULL_TREE;
6127 return GS_ALL_DONE;
6128 }
6129
6130 /* If the value being copied is of variable width, compute the length
6131 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
6132 before gimplifying any of the operands so that we can resolve any
6133 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
6134 the size of the expression to be copied, not of the destination, so
6135 that is what we must do here. */
6136 maybe_with_size_expr (from_p);
6137
6138 /* As a special case, we have to temporarily allow for assignments
6139 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
6140 a toplevel statement, when gimplifying the GENERIC expression
6141 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
6142 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
6143
6144 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
6145 prevent gimplify_expr from trying to create a new temporary for
6146 foo's LHS, we tell it that it should only gimplify until it
6147 reaches the CALL_EXPR. On return from gimplify_expr, the newly
6148 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
6149 and all we need to do here is set 'a' to be its LHS. */
6150
6151 /* Gimplify the RHS first for C++17 and bug 71104. */
6152 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
6153 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
6154 if (ret == GS_ERROR)
6155 return ret;
6156
6157 /* Then gimplify the LHS. */
6158 /* If we gimplified the RHS to a CALL_EXPR and that call may return
6159 twice we have to make sure to gimplify into non-SSA as otherwise
6160 the abnormal edge added later will make those defs not dominate
6161 their uses.
6162 ??? Technically this applies only to the registers used in the
6163 resulting non-register *TO_P. */
6164 bool saved_into_ssa = gimplify_ctxp->into_ssa;
6165 if (saved_into_ssa
6166 && TREE_CODE (*from_p) == CALL_EXPR
6167 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
6168 gimplify_ctxp->into_ssa = false;
6169 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6170 gimplify_ctxp->into_ssa = saved_into_ssa;
6171 if (ret == GS_ERROR)
6172 return ret;
6173
6174 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
6175 guess for the predicate was wrong. */
6176 gimple_predicate final_pred = rhs_predicate_for (*to_p);
6177 if (final_pred != initial_pred)
6178 {
6179 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
6180 if (ret == GS_ERROR)
6181 return ret;
6182 }
6183
6184 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
6185 size as argument to the call. */
6186 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6187 {
6188 tree call = TREE_OPERAND (*from_p, 0);
6189 tree vlasize = TREE_OPERAND (*from_p, 1);
6190
6191 if (TREE_CODE (call) == CALL_EXPR
6192 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
6193 {
6194 int nargs = call_expr_nargs (call);
6195 tree type = TREE_TYPE (call);
6196 tree ap = CALL_EXPR_ARG (call, 0);
6197 tree tag = CALL_EXPR_ARG (call, 1);
6198 tree aptag = CALL_EXPR_ARG (call, 2);
6199 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
6200 IFN_VA_ARG, type,
6201 nargs + 1, ap, tag,
6202 aptag, vlasize);
6203 TREE_OPERAND (*from_p, 0) = newcall;
6204 }
6205 }
6206
6207 /* Now see if the above changed *from_p to something we handle specially. */
6208 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6209 want_value);
6210 if (ret != GS_UNHANDLED)
6211 return ret;
6212
6213 /* If we've got a variable sized assignment between two lvalues (i.e. does
6214 not involve a call), then we can make things a bit more straightforward
6215 by converting the assignment to memcpy or memset. */
6216 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6217 {
6218 tree from = TREE_OPERAND (*from_p, 0);
6219 tree size = TREE_OPERAND (*from_p, 1);
6220
6221 if (TREE_CODE (from) == CONSTRUCTOR)
6222 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
6223
6224 if (is_gimple_addressable (from))
6225 {
6226 *from_p = from;
6227 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
6228 pre_p);
6229 }
6230 }
6231
6232 /* Transform partial stores to non-addressable complex variables into
6233 total stores. This allows us to use real instead of virtual operands
6234 for these variables, which improves optimization. */
6235 if ((TREE_CODE (*to_p) == REALPART_EXPR
6236 || TREE_CODE (*to_p) == IMAGPART_EXPR)
6237 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
6238 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
6239
6240 /* Try to alleviate the effects of the gimplification creating artificial
6241 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
6242 make sure not to create DECL_DEBUG_EXPR links across functions. */
6243 if (!gimplify_ctxp->into_ssa
6244 && VAR_P (*from_p)
6245 && DECL_IGNORED_P (*from_p)
6246 && DECL_P (*to_p)
6247 && !DECL_IGNORED_P (*to_p)
6248 && decl_function_context (*to_p) == current_function_decl
6249 && decl_function_context (*from_p) == current_function_decl)
6250 {
6251 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
6252 DECL_NAME (*from_p)
6253 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
6254 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
6255 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
6256 }
6257
6258 if (want_value && TREE_THIS_VOLATILE (*to_p))
6259 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
6260
6261 if (TREE_CODE (*from_p) == CALL_EXPR)
6262 {
6263 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
6264 instead of a GIMPLE_ASSIGN. */
6265 gcall *call_stmt;
6266 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
6267 {
6268 /* Gimplify internal functions created in the FEs. */
6269 int nargs = call_expr_nargs (*from_p), i;
6270 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
6271 auto_vec<tree> vargs (nargs);
6272
6273 for (i = 0; i < nargs; i++)
6274 {
6275 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
6276 EXPR_LOCATION (*from_p));
6277 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
6278 }
6279 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
6280 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
6281 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
6282 }
6283 else
6284 {
6285 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
6286 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
6287 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
6288 tree fndecl = get_callee_fndecl (*from_p);
6289 if (fndecl
6290 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
6291 && call_expr_nargs (*from_p) == 3)
6292 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
6293 CALL_EXPR_ARG (*from_p, 0),
6294 CALL_EXPR_ARG (*from_p, 1),
6295 CALL_EXPR_ARG (*from_p, 2));
6296 else
6297 {
6298 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
6299 }
6300 }
6301 notice_special_calls (call_stmt);
6302 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
6303 gimple_call_set_lhs (call_stmt, *to_p);
6304 else if (TREE_CODE (*to_p) == SSA_NAME)
6305 /* The above is somewhat premature, avoid ICEing later for a
6306 SSA name w/o a definition. We may have uses in the GIMPLE IL.
6307 ??? This doesn't make it a default-def. */
6308 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
6309
6310 assign = call_stmt;
6311 }
6312 else
6313 {
6314 assign = gimple_build_assign (*to_p, *from_p);
6315 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
6316 if (COMPARISON_CLASS_P (*from_p))
6317 copy_warning (assign, *from_p);
6318 }
6319
6320 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
6321 {
6322 /* We should have got an SSA name from the start. */
6323 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
6324 || ! gimple_in_ssa_p (cfun));
6325 }
6326
6327 gimplify_seq_add_stmt (pre_p, assign);
6328 gsi = gsi_last (*pre_p);
6329 maybe_fold_stmt (&gsi);
6330
6331 if (want_value)
6332 {
6333 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
6334 return GS_OK;
6335 }
6336 else
6337 *expr_p = NULL;
6338
6339 return GS_ALL_DONE;
6340 }
6341
6342 /* Gimplify a comparison between two variable-sized objects. Do this
6343 with a call to BUILT_IN_MEMCMP. */
6344
6345 static enum gimplify_status
6346 gimplify_variable_sized_compare (tree *expr_p)
6347 {
6348 location_t loc = EXPR_LOCATION (*expr_p);
6349 tree op0 = TREE_OPERAND (*expr_p, 0);
6350 tree op1 = TREE_OPERAND (*expr_p, 1);
6351 tree t, arg, dest, src, expr;
6352
6353 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
6354 arg = unshare_expr (arg);
6355 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
6356 src = build_fold_addr_expr_loc (loc, op1);
6357 dest = build_fold_addr_expr_loc (loc, op0);
6358 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
6359 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
6360
6361 expr
6362 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
6363 SET_EXPR_LOCATION (expr, loc);
6364 *expr_p = expr;
6365
6366 return GS_OK;
6367 }
6368
6369 /* Gimplify a comparison between two aggregate objects of integral scalar
6370 mode as a comparison between the bitwise equivalent scalar values. */
6371
6372 static enum gimplify_status
6373 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
6374 {
6375 location_t loc = EXPR_LOCATION (*expr_p);
6376 tree op0 = TREE_OPERAND (*expr_p, 0);
6377 tree op1 = TREE_OPERAND (*expr_p, 1);
6378
6379 tree type = TREE_TYPE (op0);
6380 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
6381
6382 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
6383 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
6384
6385 *expr_p
6386 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
6387
6388 return GS_OK;
6389 }
6390
6391 /* Gimplify an expression sequence. This function gimplifies each
6392 expression and rewrites the original expression with the last
6393 expression of the sequence in GIMPLE form.
6394
6395 PRE_P points to the list where the side effects for all the
6396 expressions in the sequence will be emitted.
6397
6398 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6399
6400 static enum gimplify_status
6401 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6402 {
6403 tree t = *expr_p;
6404
6405 do
6406 {
6407 tree *sub_p = &TREE_OPERAND (t, 0);
6408
6409 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
6410 gimplify_compound_expr (sub_p, pre_p, false);
6411 else
6412 gimplify_stmt (sub_p, pre_p);
6413
6414 t = TREE_OPERAND (t, 1);
6415 }
6416 while (TREE_CODE (t) == COMPOUND_EXPR);
6417
6418 *expr_p = t;
6419 if (want_value)
6420 return GS_OK;
6421 else
6422 {
6423 gimplify_stmt (expr_p, pre_p);
6424 return GS_ALL_DONE;
6425 }
6426 }
6427
6428 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6429 gimplify. After gimplification, EXPR_P will point to a new temporary
6430 that holds the original value of the SAVE_EXPR node.
6431
6432 PRE_P points to the list where side effects that must happen before
6433 *EXPR_P should be stored. */
6434
6435 static enum gimplify_status
6436 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6437 {
6438 enum gimplify_status ret = GS_ALL_DONE;
6439 tree val;
6440
6441 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6442 val = TREE_OPERAND (*expr_p, 0);
6443
6444 if (TREE_TYPE (val) == error_mark_node)
6445 return GS_ERROR;
6446
6447 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6448 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
6449 {
6450 /* The operand may be a void-valued expression. It is
6451 being executed only for its side-effects. */
6452 if (TREE_TYPE (val) == void_type_node)
6453 {
6454 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6455 is_gimple_stmt, fb_none);
6456 val = NULL;
6457 }
6458 else
6459 /* The temporary may not be an SSA name as later abnormal and EH
6460 control flow may invalidate use/def domination. When in SSA
6461 form then assume there are no such issues and SAVE_EXPRs only
6462 appear via GENERIC foldings. */
6463 val = get_initialized_tmp_var (val, pre_p, post_p,
6464 gimple_in_ssa_p (cfun));
6465
6466 TREE_OPERAND (*expr_p, 0) = val;
6467 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
6468 }
6469
6470 *expr_p = val;
6471
6472 return ret;
6473 }
6474
6475 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6476
6477 unary_expr
6478 : ...
6479 | '&' varname
6480 ...
6481
6482 PRE_P points to the list where side effects that must happen before
6483 *EXPR_P should be stored.
6484
6485 POST_P points to the list where side effects that must happen after
6486 *EXPR_P should be stored. */
6487
6488 static enum gimplify_status
6489 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6490 {
6491 tree expr = *expr_p;
6492 tree op0 = TREE_OPERAND (expr, 0);
6493 enum gimplify_status ret;
6494 location_t loc = EXPR_LOCATION (*expr_p);
6495
6496 switch (TREE_CODE (op0))
6497 {
6498 case INDIRECT_REF:
6499 do_indirect_ref:
6500 /* Check if we are dealing with an expression of the form '&*ptr'.
6501 While the front end folds away '&*ptr' into 'ptr', these
6502 expressions may be generated internally by the compiler (e.g.,
6503 builtins like __builtin_va_end). */
6504 /* Caution: the silent array decomposition semantics we allow for
6505 ADDR_EXPR means we can't always discard the pair. */
6506 /* Gimplification of the ADDR_EXPR operand may drop
6507 cv-qualification conversions, so make sure we add them if
6508 needed. */
6509 {
6510 tree op00 = TREE_OPERAND (op0, 0);
6511 tree t_expr = TREE_TYPE (expr);
6512 tree t_op00 = TREE_TYPE (op00);
6513
6514 if (!useless_type_conversion_p (t_expr, t_op00))
6515 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6516 *expr_p = op00;
6517 ret = GS_OK;
6518 }
6519 break;
6520
6521 case VIEW_CONVERT_EXPR:
6522 /* Take the address of our operand and then convert it to the type of
6523 this ADDR_EXPR.
6524
6525 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6526 all clear. The impact of this transformation is even less clear. */
6527
6528 /* If the operand is a useless conversion, look through it. Doing so
6529 guarantees that the ADDR_EXPR and its operand will remain of the
6530 same type. */
6531 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6532 op0 = TREE_OPERAND (op0, 0);
6533
6534 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6535 build_fold_addr_expr_loc (loc,
6536 TREE_OPERAND (op0, 0)));
6537 ret = GS_OK;
6538 break;
6539
6540 case MEM_REF:
6541 if (integer_zerop (TREE_OPERAND (op0, 1)))
6542 goto do_indirect_ref;
6543
6544 /* fall through */
6545
6546 default:
6547 /* If we see a call to a declared builtin or see its address
6548 being taken (we can unify those cases here) then we can mark
6549 the builtin for implicit generation by GCC. */
6550 if (TREE_CODE (op0) == FUNCTION_DECL
6551 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6552 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6553 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6554
6555 /* We use fb_either here because the C frontend sometimes takes
6556 the address of a call that returns a struct; see
6557 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6558 the implied temporary explicit. */
6559
6560 /* Make the operand addressable. */
6561 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6562 is_gimple_addressable, fb_either);
6563 if (ret == GS_ERROR)
6564 break;
6565
6566 /* Then mark it. Beware that it may not be possible to do so directly
6567 if a temporary has been created by the gimplification. */
6568 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6569
6570 op0 = TREE_OPERAND (expr, 0);
6571
6572 /* For various reasons, the gimplification of the expression
6573 may have made a new INDIRECT_REF. */
6574 if (TREE_CODE (op0) == INDIRECT_REF
6575 || (TREE_CODE (op0) == MEM_REF
6576 && integer_zerop (TREE_OPERAND (op0, 1))))
6577 goto do_indirect_ref;
6578
6579 mark_addressable (TREE_OPERAND (expr, 0));
6580
6581 /* The FEs may end up building ADDR_EXPRs early on a decl with
6582 an incomplete type. Re-build ADDR_EXPRs in canonical form
6583 here. */
6584 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6585 *expr_p = build_fold_addr_expr (op0);
6586
6587 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6588 recompute_tree_invariant_for_addr_expr (*expr_p);
6589
6590 /* If we re-built the ADDR_EXPR add a conversion to the original type
6591 if required. */
6592 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6593 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6594
6595 break;
6596 }
6597
6598 return ret;
6599 }
6600
6601 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6602 value; output operands should be a gimple lvalue. */
6603
6604 static enum gimplify_status
6605 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6606 {
6607 tree expr;
6608 int noutputs;
6609 const char **oconstraints;
6610 int i;
6611 tree link;
6612 const char *constraint;
6613 bool allows_mem, allows_reg, is_inout;
6614 enum gimplify_status ret, tret;
6615 gasm *stmt;
6616 vec<tree, va_gc> *inputs;
6617 vec<tree, va_gc> *outputs;
6618 vec<tree, va_gc> *clobbers;
6619 vec<tree, va_gc> *labels;
6620 tree link_next;
6621
6622 expr = *expr_p;
6623 noutputs = list_length (ASM_OUTPUTS (expr));
6624 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6625
6626 inputs = NULL;
6627 outputs = NULL;
6628 clobbers = NULL;
6629 labels = NULL;
6630
6631 ret = GS_ALL_DONE;
6632 link_next = NULL_TREE;
6633 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6634 {
6635 bool ok;
6636 size_t constraint_len;
6637
6638 link_next = TREE_CHAIN (link);
6639
6640 oconstraints[i]
6641 = constraint
6642 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6643 constraint_len = strlen (constraint);
6644 if (constraint_len == 0)
6645 continue;
6646
6647 ok = parse_output_constraint (&constraint, i, 0, 0,
6648 &allows_mem, &allows_reg, &is_inout);
6649 if (!ok)
6650 {
6651 ret = GS_ERROR;
6652 is_inout = false;
6653 }
6654
6655 /* If we can't make copies, we can only accept memory.
6656 Similarly for VLAs. */
6657 tree outtype = TREE_TYPE (TREE_VALUE (link));
6658 if (outtype != error_mark_node
6659 && (TREE_ADDRESSABLE (outtype)
6660 || !COMPLETE_TYPE_P (outtype)
6661 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype))))
6662 {
6663 if (allows_mem)
6664 allows_reg = 0;
6665 else
6666 {
6667 error ("impossible constraint in %<asm%>");
6668 error ("non-memory output %d must stay in memory", i);
6669 return GS_ERROR;
6670 }
6671 }
6672
6673 if (!allows_reg && allows_mem)
6674 mark_addressable (TREE_VALUE (link));
6675
6676 tree orig = TREE_VALUE (link);
6677 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6678 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6679 fb_lvalue | fb_mayfail);
6680 if (tret == GS_ERROR)
6681 {
6682 if (orig != error_mark_node)
6683 error ("invalid lvalue in %<asm%> output %d", i);
6684 ret = tret;
6685 }
6686
6687 /* If the constraint does not allow memory make sure we gimplify
6688 it to a register if it is not already but its base is. This
6689 happens for complex and vector components. */
6690 if (!allows_mem)
6691 {
6692 tree op = TREE_VALUE (link);
6693 if (! is_gimple_val (op)
6694 && is_gimple_reg_type (TREE_TYPE (op))
6695 && is_gimple_reg (get_base_address (op)))
6696 {
6697 tree tem = create_tmp_reg (TREE_TYPE (op));
6698 tree ass;
6699 if (is_inout)
6700 {
6701 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6702 tem, unshare_expr (op));
6703 gimplify_and_add (ass, pre_p);
6704 }
6705 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6706 gimplify_and_add (ass, post_p);
6707
6708 TREE_VALUE (link) = tem;
6709 tret = GS_OK;
6710 }
6711 }
6712
6713 vec_safe_push (outputs, link);
6714 TREE_CHAIN (link) = NULL_TREE;
6715
6716 if (is_inout)
6717 {
6718 /* An input/output operand. To give the optimizers more
6719 flexibility, split it into separate input and output
6720 operands. */
6721 tree input;
6722 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6723 char buf[11];
6724
6725 /* Turn the in/out constraint into an output constraint. */
6726 char *p = xstrdup (constraint);
6727 p[0] = '=';
6728 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6729
6730 /* And add a matching input constraint. */
6731 if (allows_reg)
6732 {
6733 sprintf (buf, "%u", i);
6734
6735 /* If there are multiple alternatives in the constraint,
6736 handle each of them individually. Those that allow register
6737 will be replaced with operand number, the others will stay
6738 unchanged. */
6739 if (strchr (p, ',') != NULL)
6740 {
6741 size_t len = 0, buflen = strlen (buf);
6742 char *beg, *end, *str, *dst;
6743
6744 for (beg = p + 1;;)
6745 {
6746 end = strchr (beg, ',');
6747 if (end == NULL)
6748 end = strchr (beg, '\0');
6749 if ((size_t) (end - beg) < buflen)
6750 len += buflen + 1;
6751 else
6752 len += end - beg + 1;
6753 if (*end)
6754 beg = end + 1;
6755 else
6756 break;
6757 }
6758
6759 str = (char *) alloca (len);
6760 for (beg = p + 1, dst = str;;)
6761 {
6762 const char *tem;
6763 bool mem_p, reg_p, inout_p;
6764
6765 end = strchr (beg, ',');
6766 if (end)
6767 *end = '\0';
6768 beg[-1] = '=';
6769 tem = beg - 1;
6770 parse_output_constraint (&tem, i, 0, 0,
6771 &mem_p, &reg_p, &inout_p);
6772 if (dst != str)
6773 *dst++ = ',';
6774 if (reg_p)
6775 {
6776 memcpy (dst, buf, buflen);
6777 dst += buflen;
6778 }
6779 else
6780 {
6781 if (end)
6782 len = end - beg;
6783 else
6784 len = strlen (beg);
6785 memcpy (dst, beg, len);
6786 dst += len;
6787 }
6788 if (end)
6789 beg = end + 1;
6790 else
6791 break;
6792 }
6793 *dst = '\0';
6794 input = build_string (dst - str, str);
6795 }
6796 else
6797 input = build_string (strlen (buf), buf);
6798 }
6799 else
6800 input = build_string (constraint_len - 1, constraint + 1);
6801
6802 free (p);
6803
6804 input = build_tree_list (build_tree_list (NULL_TREE, input),
6805 unshare_expr (TREE_VALUE (link)));
6806 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6807 }
6808 }
6809
6810 link_next = NULL_TREE;
6811 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6812 {
6813 link_next = TREE_CHAIN (link);
6814 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6815 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6816 oconstraints, &allows_mem, &allows_reg);
6817
6818 /* If we can't make copies, we can only accept memory. */
6819 tree intype = TREE_TYPE (TREE_VALUE (link));
6820 if (intype != error_mark_node
6821 && (TREE_ADDRESSABLE (intype)
6822 || !COMPLETE_TYPE_P (intype)
6823 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype))))
6824 {
6825 if (allows_mem)
6826 allows_reg = 0;
6827 else
6828 {
6829 error ("impossible constraint in %<asm%>");
6830 error ("non-memory input %d must stay in memory", i);
6831 return GS_ERROR;
6832 }
6833 }
6834
6835 /* If the operand is a memory input, it should be an lvalue. */
6836 if (!allows_reg && allows_mem)
6837 {
6838 tree inputv = TREE_VALUE (link);
6839 STRIP_NOPS (inputv);
6840 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6841 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6842 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6843 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6844 || TREE_CODE (inputv) == MODIFY_EXPR)
6845 TREE_VALUE (link) = error_mark_node;
6846 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6847 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6848 if (tret != GS_ERROR)
6849 {
6850 /* Unlike output operands, memory inputs are not guaranteed
6851 to be lvalues by the FE, and while the expressions are
6852 marked addressable there, if it is e.g. a statement
6853 expression, temporaries in it might not end up being
6854 addressable. They might be already used in the IL and thus
6855 it is too late to make them addressable now though. */
6856 tree x = TREE_VALUE (link);
6857 while (handled_component_p (x))
6858 x = TREE_OPERAND (x, 0);
6859 if (TREE_CODE (x) == MEM_REF
6860 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6861 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6862 if ((VAR_P (x)
6863 || TREE_CODE (x) == PARM_DECL
6864 || TREE_CODE (x) == RESULT_DECL)
6865 && !TREE_ADDRESSABLE (x)
6866 && is_gimple_reg (x))
6867 {
6868 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6869 input_location), 0,
6870 "memory input %d is not directly addressable",
6871 i);
6872 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6873 }
6874 }
6875 mark_addressable (TREE_VALUE (link));
6876 if (tret == GS_ERROR)
6877 {
6878 if (inputv != error_mark_node)
6879 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6880 "memory input %d is not directly addressable", i);
6881 ret = tret;
6882 }
6883 }
6884 else
6885 {
6886 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6887 is_gimple_asm_val, fb_rvalue);
6888 if (tret == GS_ERROR)
6889 ret = tret;
6890 }
6891
6892 TREE_CHAIN (link) = NULL_TREE;
6893 vec_safe_push (inputs, link);
6894 }
6895
6896 link_next = NULL_TREE;
6897 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6898 {
6899 link_next = TREE_CHAIN (link);
6900 TREE_CHAIN (link) = NULL_TREE;
6901 vec_safe_push (clobbers, link);
6902 }
6903
6904 link_next = NULL_TREE;
6905 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6906 {
6907 link_next = TREE_CHAIN (link);
6908 TREE_CHAIN (link) = NULL_TREE;
6909 vec_safe_push (labels, link);
6910 }
6911
6912 /* Do not add ASMs with errors to the gimple IL stream. */
6913 if (ret != GS_ERROR)
6914 {
6915 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6916 inputs, outputs, clobbers, labels);
6917
6918 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6919 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6920 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
6921
6922 gimplify_seq_add_stmt (pre_p, stmt);
6923 }
6924
6925 return ret;
6926 }
6927
6928 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6929 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6930 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6931 return to this function.
6932
6933 FIXME should we complexify the prequeue handling instead? Or use flags
6934 for all the cleanups and let the optimizer tighten them up? The current
6935 code seems pretty fragile; it will break on a cleanup within any
6936 non-conditional nesting. But any such nesting would be broken, anyway;
6937 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6938 and continues out of it. We can do that at the RTL level, though, so
6939 having an optimizer to tighten up try/finally regions would be a Good
6940 Thing. */
6941
6942 static enum gimplify_status
6943 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6944 {
6945 gimple_stmt_iterator iter;
6946 gimple_seq body_sequence = NULL;
6947
6948 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6949
6950 /* We only care about the number of conditions between the innermost
6951 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6952 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6953 int old_conds = gimplify_ctxp->conditions;
6954 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6955 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6956 gimplify_ctxp->conditions = 0;
6957 gimplify_ctxp->conditional_cleanups = NULL;
6958 gimplify_ctxp->in_cleanup_point_expr = true;
6959
6960 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6961
6962 gimplify_ctxp->conditions = old_conds;
6963 gimplify_ctxp->conditional_cleanups = old_cleanups;
6964 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6965
6966 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6967 {
6968 gimple *wce = gsi_stmt (iter);
6969
6970 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6971 {
6972 if (gsi_one_before_end_p (iter))
6973 {
6974 /* Note that gsi_insert_seq_before and gsi_remove do not
6975 scan operands, unlike some other sequence mutators. */
6976 if (!gimple_wce_cleanup_eh_only (wce))
6977 gsi_insert_seq_before_without_update (&iter,
6978 gimple_wce_cleanup (wce),
6979 GSI_SAME_STMT);
6980 gsi_remove (&iter, true);
6981 break;
6982 }
6983 else
6984 {
6985 gtry *gtry;
6986 gimple_seq seq;
6987 enum gimple_try_flags kind;
6988
6989 if (gimple_wce_cleanup_eh_only (wce))
6990 kind = GIMPLE_TRY_CATCH;
6991 else
6992 kind = GIMPLE_TRY_FINALLY;
6993 seq = gsi_split_seq_after (iter);
6994
6995 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6996 /* Do not use gsi_replace here, as it may scan operands.
6997 We want to do a simple structural modification only. */
6998 gsi_set_stmt (&iter, gtry);
6999 iter = gsi_start (gtry->eval);
7000 }
7001 }
7002 else
7003 gsi_next (&iter);
7004 }
7005
7006 gimplify_seq_add_seq (pre_p, body_sequence);
7007 if (temp)
7008 {
7009 *expr_p = temp;
7010 return GS_OK;
7011 }
7012 else
7013 {
7014 *expr_p = NULL;
7015 return GS_ALL_DONE;
7016 }
7017 }
7018
7019 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
7020 is the cleanup action required. EH_ONLY is true if the cleanup should
7021 only be executed if an exception is thrown, not on normal exit.
7022 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
7023 only valid for clobbers. */
7024
7025 static void
7026 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
7027 bool force_uncond = false)
7028 {
7029 gimple *wce;
7030 gimple_seq cleanup_stmts = NULL;
7031
7032 /* Errors can result in improperly nested cleanups. Which results in
7033 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
7034 if (seen_error ())
7035 return;
7036
7037 if (gimple_conditional_context ())
7038 {
7039 /* If we're in a conditional context, this is more complex. We only
7040 want to run the cleanup if we actually ran the initialization that
7041 necessitates it, but we want to run it after the end of the
7042 conditional context. So we wrap the try/finally around the
7043 condition and use a flag to determine whether or not to actually
7044 run the destructor. Thus
7045
7046 test ? f(A()) : 0
7047
7048 becomes (approximately)
7049
7050 flag = 0;
7051 try {
7052 if (test) { A::A(temp); flag = 1; val = f(temp); }
7053 else { val = 0; }
7054 } finally {
7055 if (flag) A::~A(temp);
7056 }
7057 val
7058 */
7059 if (force_uncond)
7060 {
7061 gimplify_stmt (&cleanup, &cleanup_stmts);
7062 wce = gimple_build_wce (cleanup_stmts);
7063 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
7064 }
7065 else
7066 {
7067 tree flag = create_tmp_var (boolean_type_node, "cleanup");
7068 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
7069 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
7070
7071 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
7072 gimplify_stmt (&cleanup, &cleanup_stmts);
7073 wce = gimple_build_wce (cleanup_stmts);
7074 gimple_wce_set_cleanup_eh_only (wce, eh_only);
7075
7076 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
7077 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
7078 gimplify_seq_add_stmt (pre_p, ftrue);
7079
7080 /* Because of this manipulation, and the EH edges that jump
7081 threading cannot redirect, the temporary (VAR) will appear
7082 to be used uninitialized. Don't warn. */
7083 suppress_warning (var, OPT_Wuninitialized);
7084 }
7085 }
7086 else
7087 {
7088 gimplify_stmt (&cleanup, &cleanup_stmts);
7089 wce = gimple_build_wce (cleanup_stmts);
7090 gimple_wce_set_cleanup_eh_only (wce, eh_only);
7091 gimplify_seq_add_stmt (pre_p, wce);
7092 }
7093 }
7094
7095 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
7096
7097 static enum gimplify_status
7098 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
7099 {
7100 tree targ = *expr_p;
7101 tree temp = TARGET_EXPR_SLOT (targ);
7102 tree init = TARGET_EXPR_INITIAL (targ);
7103 enum gimplify_status ret;
7104
7105 bool unpoison_empty_seq = false;
7106 gimple_stmt_iterator unpoison_it;
7107
7108 if (init)
7109 {
7110 gimple_seq init_pre_p = NULL;
7111
7112 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
7113 to the temps list. Handle also variable length TARGET_EXPRs. */
7114 if (!poly_int_tree_p (DECL_SIZE (temp)))
7115 {
7116 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
7117 gimplify_type_sizes (TREE_TYPE (temp), &init_pre_p);
7118 /* FIXME: this is correct only when the size of the type does
7119 not depend on expressions evaluated in init. */
7120 gimplify_vla_decl (temp, &init_pre_p);
7121 }
7122 else
7123 {
7124 /* Save location where we need to place unpoisoning. It's possible
7125 that a variable will be converted to needs_to_live_in_memory. */
7126 unpoison_it = gsi_last (*pre_p);
7127 unpoison_empty_seq = gsi_end_p (unpoison_it);
7128
7129 gimple_add_tmp_var (temp);
7130 }
7131
7132 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
7133 expression is supposed to initialize the slot. */
7134 if (VOID_TYPE_P (TREE_TYPE (init)))
7135 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7136 fb_none);
7137 else
7138 {
7139 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
7140 init = init_expr;
7141 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7142 fb_none);
7143 init = NULL;
7144 ggc_free (init_expr);
7145 }
7146 if (ret == GS_ERROR)
7147 {
7148 /* PR c++/28266 Make sure this is expanded only once. */
7149 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7150 return GS_ERROR;
7151 }
7152
7153 if (init)
7154 gimplify_and_add (init, &init_pre_p);
7155
7156 /* Add a clobber for the temporary going out of scope, like
7157 gimplify_bind_expr. */
7158 if (gimplify_ctxp->in_cleanup_point_expr
7159 && needs_to_live_in_memory (temp))
7160 {
7161 if (flag_stack_reuse == SR_ALL)
7162 {
7163 tree clobber = build_clobber (TREE_TYPE (temp), CLOBBER_EOL);
7164 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
7165 gimple_push_cleanup (temp, clobber, false, pre_p, true);
7166 }
7167 if (asan_poisoned_variables
7168 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
7169 && !TREE_STATIC (temp)
7170 && dbg_cnt (asan_use_after_scope)
7171 && !gimplify_omp_ctxp)
7172 {
7173 tree asan_cleanup = build_asan_poison_call_expr (temp);
7174 if (asan_cleanup)
7175 {
7176 if (unpoison_empty_seq)
7177 unpoison_it = gsi_start (*pre_p);
7178
7179 asan_poison_variable (temp, false, &unpoison_it,
7180 unpoison_empty_seq);
7181 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
7182 }
7183 }
7184 }
7185
7186 gimple_seq_add_seq (pre_p, init_pre_p);
7187
7188 /* If needed, push the cleanup for the temp. */
7189 if (TARGET_EXPR_CLEANUP (targ))
7190 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
7191 CLEANUP_EH_ONLY (targ), pre_p);
7192
7193 /* Only expand this once. */
7194 TREE_OPERAND (targ, 3) = init;
7195 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7196 }
7197 else
7198 /* We should have expanded this before. */
7199 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
7200
7201 *expr_p = temp;
7202 return GS_OK;
7203 }
7204
7205 /* Gimplification of expression trees. */
7206
7207 /* Gimplify an expression which appears at statement context. The
7208 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
7209 NULL, a new sequence is allocated.
7210
7211 Return true if we actually added a statement to the queue. */
7212
7213 bool
7214 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
7215 {
7216 gimple_seq_node last;
7217
7218 last = gimple_seq_last (*seq_p);
7219 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
7220 return last != gimple_seq_last (*seq_p);
7221 }
7222
7223 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
7224 to CTX. If entries already exist, force them to be some flavor of private.
7225 If there is no enclosing parallel, do nothing. */
7226
7227 void
7228 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
7229 {
7230 splay_tree_node n;
7231
7232 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
7233 return;
7234
7235 do
7236 {
7237 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7238 if (n != NULL)
7239 {
7240 if (n->value & GOVD_SHARED)
7241 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
7242 else if (n->value & GOVD_MAP)
7243 n->value |= GOVD_MAP_TO_ONLY;
7244 else
7245 return;
7246 }
7247 else if ((ctx->region_type & ORT_TARGET) != 0)
7248 {
7249 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
7250 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7251 else
7252 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
7253 }
7254 else if (ctx->region_type != ORT_WORKSHARE
7255 && ctx->region_type != ORT_TASKGROUP
7256 && ctx->region_type != ORT_SIMD
7257 && ctx->region_type != ORT_ACC
7258 && !(ctx->region_type & ORT_TARGET_DATA))
7259 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7260
7261 ctx = ctx->outer_context;
7262 }
7263 while (ctx);
7264 }
7265
7266 /* Similarly for each of the type sizes of TYPE. */
7267
7268 static void
7269 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
7270 {
7271 if (type == NULL || type == error_mark_node)
7272 return;
7273 type = TYPE_MAIN_VARIANT (type);
7274
7275 if (ctx->privatized_types->add (type))
7276 return;
7277
7278 switch (TREE_CODE (type))
7279 {
7280 case INTEGER_TYPE:
7281 case ENUMERAL_TYPE:
7282 case BOOLEAN_TYPE:
7283 case REAL_TYPE:
7284 case FIXED_POINT_TYPE:
7285 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
7286 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
7287 break;
7288
7289 case ARRAY_TYPE:
7290 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7291 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
7292 break;
7293
7294 case RECORD_TYPE:
7295 case UNION_TYPE:
7296 case QUAL_UNION_TYPE:
7297 {
7298 tree field;
7299 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
7300 if (TREE_CODE (field) == FIELD_DECL)
7301 {
7302 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
7303 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
7304 }
7305 }
7306 break;
7307
7308 case POINTER_TYPE:
7309 case REFERENCE_TYPE:
7310 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7311 break;
7312
7313 default:
7314 break;
7315 }
7316
7317 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
7318 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
7319 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
7320 }
7321
7322 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
7323
7324 static void
7325 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
7326 {
7327 splay_tree_node n;
7328 unsigned int nflags;
7329 tree t;
7330
7331 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
7332 return;
7333
7334 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
7335 there are constructors involved somewhere. Exception is a shared clause,
7336 there is nothing privatized in that case. */
7337 if ((flags & GOVD_SHARED) == 0
7338 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
7339 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
7340 flags |= GOVD_SEEN;
7341
7342 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7343 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7344 {
7345 /* We shouldn't be re-adding the decl with the same data
7346 sharing class. */
7347 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
7348 nflags = n->value | flags;
7349 /* The only combination of data sharing classes we should see is
7350 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
7351 reduction variables to be used in data sharing clauses. */
7352 gcc_assert ((ctx->region_type & ORT_ACC) != 0
7353 || ((nflags & GOVD_DATA_SHARE_CLASS)
7354 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
7355 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
7356 n->value = nflags;
7357 return;
7358 }
7359
7360 /* When adding a variable-sized variable, we have to handle all sorts
7361 of additional bits of data: the pointer replacement variable, and
7362 the parameters of the type. */
7363 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7364 {
7365 /* Add the pointer replacement variable as PRIVATE if the variable
7366 replacement is private, else FIRSTPRIVATE since we'll need the
7367 address of the original variable either for SHARED, or for the
7368 copy into or out of the context. */
7369 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
7370 {
7371 if (flags & GOVD_MAP)
7372 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
7373 else if (flags & GOVD_PRIVATE)
7374 nflags = GOVD_PRIVATE;
7375 else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7376 && (flags & GOVD_FIRSTPRIVATE))
7377 || (ctx->region_type == ORT_TARGET_DATA
7378 && (flags & GOVD_DATA_SHARE_CLASS) == 0))
7379 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
7380 else
7381 nflags = GOVD_FIRSTPRIVATE;
7382 nflags |= flags & GOVD_SEEN;
7383 t = DECL_VALUE_EXPR (decl);
7384 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7385 t = TREE_OPERAND (t, 0);
7386 gcc_assert (DECL_P (t));
7387 omp_add_variable (ctx, t, nflags);
7388 }
7389
7390 /* Add all of the variable and type parameters (which should have
7391 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7392 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
7393 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
7394 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7395
7396 /* The variable-sized variable itself is never SHARED, only some form
7397 of PRIVATE. The sharing would take place via the pointer variable
7398 which we remapped above. */
7399 if (flags & GOVD_SHARED)
7400 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
7401 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
7402
7403 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7404 alloca statement we generate for the variable, so make sure it
7405 is available. This isn't automatically needed for the SHARED
7406 case, since we won't be allocating local storage then.
7407 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7408 in this case omp_notice_variable will be called later
7409 on when it is gimplified. */
7410 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
7411 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
7412 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
7413 }
7414 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
7415 && omp_privatize_by_reference (decl))
7416 {
7417 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7418
7419 /* Similar to the direct variable sized case above, we'll need the
7420 size of references being privatized. */
7421 if ((flags & GOVD_SHARED) == 0)
7422 {
7423 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7424 if (t && DECL_P (t))
7425 omp_notice_variable (ctx, t, true);
7426 }
7427 }
7428
7429 if (n != NULL)
7430 n->value |= flags;
7431 else
7432 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
7433
7434 /* For reductions clauses in OpenACC loop directives, by default create a
7435 copy clause on the enclosing parallel construct for carrying back the
7436 results. */
7437 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
7438 {
7439 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
7440 while (outer_ctx)
7441 {
7442 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
7443 if (n != NULL)
7444 {
7445 /* Ignore local variables and explicitly declared clauses. */
7446 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
7447 break;
7448 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
7449 {
7450 /* According to the OpenACC spec, such a reduction variable
7451 should already have a copy map on a kernels construct,
7452 verify that here. */
7453 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
7454 && (n->value & GOVD_MAP));
7455 }
7456 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7457 {
7458 /* Remove firstprivate and make it a copy map. */
7459 n->value &= ~GOVD_FIRSTPRIVATE;
7460 n->value |= GOVD_MAP;
7461 }
7462 }
7463 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7464 {
7465 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
7466 GOVD_MAP | GOVD_SEEN);
7467 break;
7468 }
7469 outer_ctx = outer_ctx->outer_context;
7470 }
7471 }
7472 }
7473
7474 /* Notice a threadprivate variable DECL used in OMP context CTX.
7475 This just prints out diagnostics about threadprivate variable uses
7476 in untied tasks. If DECL2 is non-NULL, prevent this warning
7477 on that variable. */
7478
7479 static bool
7480 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7481 tree decl2)
7482 {
7483 splay_tree_node n;
7484 struct gimplify_omp_ctx *octx;
7485
7486 for (octx = ctx; octx; octx = octx->outer_context)
7487 if ((octx->region_type & ORT_TARGET) != 0
7488 || octx->order_concurrent)
7489 {
7490 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7491 if (n == NULL)
7492 {
7493 if (octx->order_concurrent)
7494 {
7495 error ("threadprivate variable %qE used in a region with"
7496 " %<order(concurrent)%> clause", DECL_NAME (decl));
7497 inform (octx->location, "enclosing region");
7498 }
7499 else
7500 {
7501 error ("threadprivate variable %qE used in target region",
7502 DECL_NAME (decl));
7503 inform (octx->location, "enclosing target region");
7504 }
7505 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7506 }
7507 if (decl2)
7508 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7509 }
7510
7511 if (ctx->region_type != ORT_UNTIED_TASK)
7512 return false;
7513 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7514 if (n == NULL)
7515 {
7516 error ("threadprivate variable %qE used in untied task",
7517 DECL_NAME (decl));
7518 inform (ctx->location, "enclosing task");
7519 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7520 }
7521 if (decl2)
7522 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7523 return false;
7524 }
7525
7526 /* Return true if global var DECL is device resident. */
7527
7528 static bool
7529 device_resident_p (tree decl)
7530 {
7531 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7532
7533 if (!attr)
7534 return false;
7535
7536 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7537 {
7538 tree c = TREE_VALUE (t);
7539 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7540 return true;
7541 }
7542
7543 return false;
7544 }
7545
7546 /* Return true if DECL has an ACC DECLARE attribute. */
7547
7548 static bool
7549 is_oacc_declared (tree decl)
7550 {
7551 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7552 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7553 return declared != NULL_TREE;
7554 }
7555
7556 /* Determine outer default flags for DECL mentioned in an OMP region
7557 but not declared in an enclosing clause.
7558
7559 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7560 remapped firstprivate instead of shared. To some extent this is
7561 addressed in omp_firstprivatize_type_sizes, but not
7562 effectively. */
7563
7564 static unsigned
7565 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7566 bool in_code, unsigned flags)
7567 {
7568 enum omp_clause_default_kind default_kind = ctx->default_kind;
7569 enum omp_clause_default_kind kind;
7570
7571 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7572 if (ctx->region_type & ORT_TASK)
7573 {
7574 tree detach_clause = omp_find_clause (ctx->clauses, OMP_CLAUSE_DETACH);
7575
7576 /* The event-handle specified by a detach clause should always be firstprivate,
7577 regardless of the current default. */
7578 if (detach_clause && OMP_CLAUSE_DECL (detach_clause) == decl)
7579 kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
7580 }
7581 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7582 default_kind = kind;
7583 else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
7584 default_kind = OMP_CLAUSE_DEFAULT_SHARED;
7585 /* For C/C++ default({,first}private), variables with static storage duration
7586 declared in a namespace or global scope and referenced in construct
7587 must be explicitly specified, i.e. acts as default(none). */
7588 else if ((default_kind == OMP_CLAUSE_DEFAULT_PRIVATE
7589 || default_kind == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
7590 && VAR_P (decl)
7591 && is_global_var (decl)
7592 && (DECL_FILE_SCOPE_P (decl)
7593 || (DECL_CONTEXT (decl)
7594 && TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL))
7595 && !lang_GNU_Fortran ())
7596 default_kind = OMP_CLAUSE_DEFAULT_NONE;
7597
7598 switch (default_kind)
7599 {
7600 case OMP_CLAUSE_DEFAULT_NONE:
7601 {
7602 const char *rtype;
7603
7604 if (ctx->region_type & ORT_PARALLEL)
7605 rtype = "parallel";
7606 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7607 rtype = "taskloop";
7608 else if (ctx->region_type & ORT_TASK)
7609 rtype = "task";
7610 else if (ctx->region_type & ORT_TEAMS)
7611 rtype = "teams";
7612 else
7613 gcc_unreachable ();
7614
7615 error ("%qE not specified in enclosing %qs",
7616 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7617 inform (ctx->location, "enclosing %qs", rtype);
7618 }
7619 /* FALLTHRU */
7620 case OMP_CLAUSE_DEFAULT_SHARED:
7621 flags |= GOVD_SHARED;
7622 break;
7623 case OMP_CLAUSE_DEFAULT_PRIVATE:
7624 flags |= GOVD_PRIVATE;
7625 break;
7626 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7627 flags |= GOVD_FIRSTPRIVATE;
7628 break;
7629 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7630 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7631 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7632 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7633 {
7634 omp_notice_variable (octx, decl, in_code);
7635 for (; octx; octx = octx->outer_context)
7636 {
7637 splay_tree_node n2;
7638
7639 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7640 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7641 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7642 continue;
7643 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7644 {
7645 flags |= GOVD_FIRSTPRIVATE;
7646 goto found_outer;
7647 }
7648 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7649 {
7650 flags |= GOVD_SHARED;
7651 goto found_outer;
7652 }
7653 }
7654 }
7655
7656 if (TREE_CODE (decl) == PARM_DECL
7657 || (!is_global_var (decl)
7658 && DECL_CONTEXT (decl) == current_function_decl))
7659 flags |= GOVD_FIRSTPRIVATE;
7660 else
7661 flags |= GOVD_SHARED;
7662 found_outer:
7663 break;
7664
7665 default:
7666 gcc_unreachable ();
7667 }
7668
7669 return flags;
7670 }
7671
7672
7673 /* Determine outer default flags for DECL mentioned in an OACC region
7674 but not declared in an enclosing clause. */
7675
7676 static unsigned
7677 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7678 {
7679 const char *rkind;
7680 bool on_device = false;
7681 bool is_private = false;
7682 bool declared = is_oacc_declared (decl);
7683 tree type = TREE_TYPE (decl);
7684
7685 if (omp_privatize_by_reference (decl))
7686 type = TREE_TYPE (type);
7687
7688 /* For Fortran COMMON blocks, only used variables in those blocks are
7689 transfered and remapped. The block itself will have a private clause to
7690 avoid transfering the data twice.
7691 The hook evaluates to false by default. For a variable in Fortran's COMMON
7692 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7693 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7694 the whole block. For C++ and Fortran, it can also be true under certain
7695 other conditions, if DECL_HAS_VALUE_EXPR. */
7696 if (RECORD_OR_UNION_TYPE_P (type))
7697 is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
7698
7699 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7700 && is_global_var (decl)
7701 && device_resident_p (decl)
7702 && !is_private)
7703 {
7704 on_device = true;
7705 flags |= GOVD_MAP_TO_ONLY;
7706 }
7707
7708 switch (ctx->region_type)
7709 {
7710 case ORT_ACC_KERNELS:
7711 rkind = "kernels";
7712
7713 if (is_private)
7714 flags |= GOVD_FIRSTPRIVATE;
7715 else if (AGGREGATE_TYPE_P (type))
7716 {
7717 /* Aggregates default to 'present_or_copy', or 'present'. */
7718 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7719 flags |= GOVD_MAP;
7720 else
7721 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7722 }
7723 else
7724 /* Scalars default to 'copy'. */
7725 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7726
7727 break;
7728
7729 case ORT_ACC_PARALLEL:
7730 case ORT_ACC_SERIAL:
7731 rkind = ctx->region_type == ORT_ACC_PARALLEL ? "parallel" : "serial";
7732
7733 if (is_private)
7734 flags |= GOVD_FIRSTPRIVATE;
7735 else if (on_device || declared)
7736 flags |= GOVD_MAP;
7737 else if (AGGREGATE_TYPE_P (type))
7738 {
7739 /* Aggregates default to 'present_or_copy', or 'present'. */
7740 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7741 flags |= GOVD_MAP;
7742 else
7743 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7744 }
7745 else
7746 /* Scalars default to 'firstprivate'. */
7747 flags |= GOVD_FIRSTPRIVATE;
7748
7749 break;
7750
7751 default:
7752 gcc_unreachable ();
7753 }
7754
7755 if (DECL_ARTIFICIAL (decl))
7756 ; /* We can get compiler-generated decls, and should not complain
7757 about them. */
7758 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7759 {
7760 error ("%qE not specified in enclosing OpenACC %qs construct",
7761 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7762 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7763 }
7764 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7765 ; /* Handled above. */
7766 else
7767 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7768
7769 return flags;
7770 }
7771
7772 /* Record the fact that DECL was used within the OMP context CTX.
7773 IN_CODE is true when real code uses DECL, and false when we should
7774 merely emit default(none) errors. Return true if DECL is going to
7775 be remapped and thus DECL shouldn't be gimplified into its
7776 DECL_VALUE_EXPR (if any). */
7777
7778 static bool
7779 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7780 {
7781 splay_tree_node n;
7782 unsigned flags = in_code ? GOVD_SEEN : 0;
7783 bool ret = false, shared;
7784
7785 if (error_operand_p (decl))
7786 return false;
7787
7788 if (ctx->region_type == ORT_NONE)
7789 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7790
7791 if (is_global_var (decl))
7792 {
7793 /* Threadprivate variables are predetermined. */
7794 if (DECL_THREAD_LOCAL_P (decl))
7795 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7796
7797 if (DECL_HAS_VALUE_EXPR_P (decl))
7798 {
7799 if (ctx->region_type & ORT_ACC)
7800 /* For OpenACC, defer expansion of value to avoid transfering
7801 privatized common block data instead of im-/explicitly transfered
7802 variables which are in common blocks. */
7803 ;
7804 else
7805 {
7806 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7807
7808 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7809 return omp_notice_threadprivate_variable (ctx, decl, value);
7810 }
7811 }
7812
7813 if (gimplify_omp_ctxp->outer_context == NULL
7814 && VAR_P (decl)
7815 && oacc_get_fn_attrib (current_function_decl))
7816 {
7817 location_t loc = DECL_SOURCE_LOCATION (decl);
7818
7819 if (lookup_attribute ("omp declare target link",
7820 DECL_ATTRIBUTES (decl)))
7821 {
7822 error_at (loc,
7823 "%qE with %<link%> clause used in %<routine%> function",
7824 DECL_NAME (decl));
7825 return false;
7826 }
7827 else if (!lookup_attribute ("omp declare target",
7828 DECL_ATTRIBUTES (decl)))
7829 {
7830 error_at (loc,
7831 "%qE requires a %<declare%> directive for use "
7832 "in a %<routine%> function", DECL_NAME (decl));
7833 return false;
7834 }
7835 }
7836 }
7837
7838 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7839 if ((ctx->region_type & ORT_TARGET) != 0)
7840 {
7841 if (ctx->region_type & ORT_ACC)
7842 /* For OpenACC, as remarked above, defer expansion. */
7843 shared = false;
7844 else
7845 shared = true;
7846
7847 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7848 if (n == NULL)
7849 {
7850 unsigned nflags = flags;
7851 if ((ctx->region_type & ORT_ACC) == 0)
7852 {
7853 bool is_declare_target = false;
7854 if (is_global_var (decl)
7855 && varpool_node::get_create (decl)->offloadable)
7856 {
7857 struct gimplify_omp_ctx *octx;
7858 for (octx = ctx->outer_context;
7859 octx; octx = octx->outer_context)
7860 {
7861 n = splay_tree_lookup (octx->variables,
7862 (splay_tree_key)decl);
7863 if (n
7864 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7865 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7866 break;
7867 }
7868 is_declare_target = octx == NULL;
7869 }
7870 if (!is_declare_target)
7871 {
7872 int gdmk;
7873 enum omp_clause_defaultmap_kind kind;
7874 if (lang_hooks.decls.omp_allocatable_p (decl))
7875 gdmk = GDMK_ALLOCATABLE;
7876 else if (lang_hooks.decls.omp_scalar_target_p (decl))
7877 gdmk = GDMK_SCALAR_TARGET;
7878 else if (lang_hooks.decls.omp_scalar_p (decl, false))
7879 gdmk = GDMK_SCALAR;
7880 else if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7881 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7882 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7883 == POINTER_TYPE)))
7884 gdmk = GDMK_POINTER;
7885 else
7886 gdmk = GDMK_AGGREGATE;
7887 kind = lang_hooks.decls.omp_predetermined_mapping (decl);
7888 if (kind != OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED)
7889 {
7890 if (kind == OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE)
7891 nflags |= GOVD_FIRSTPRIVATE;
7892 else if (kind == OMP_CLAUSE_DEFAULTMAP_TO)
7893 nflags |= GOVD_MAP | GOVD_MAP_TO_ONLY;
7894 else
7895 gcc_unreachable ();
7896 }
7897 else if (ctx->defaultmap[gdmk] == 0)
7898 {
7899 tree d = lang_hooks.decls.omp_report_decl (decl);
7900 error ("%qE not specified in enclosing %<target%>",
7901 DECL_NAME (d));
7902 inform (ctx->location, "enclosing %<target%>");
7903 }
7904 else if (ctx->defaultmap[gdmk]
7905 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
7906 nflags |= ctx->defaultmap[gdmk];
7907 else
7908 {
7909 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7910 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
7911 }
7912 }
7913 }
7914
7915 struct gimplify_omp_ctx *octx = ctx->outer_context;
7916 if ((ctx->region_type & ORT_ACC) && octx)
7917 {
7918 /* Look in outer OpenACC contexts, to see if there's a
7919 data attribute for this variable. */
7920 omp_notice_variable (octx, decl, in_code);
7921
7922 for (; octx; octx = octx->outer_context)
7923 {
7924 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7925 break;
7926 splay_tree_node n2
7927 = splay_tree_lookup (octx->variables,
7928 (splay_tree_key) decl);
7929 if (n2)
7930 {
7931 if (octx->region_type == ORT_ACC_HOST_DATA)
7932 error ("variable %qE declared in enclosing "
7933 "%<host_data%> region", DECL_NAME (decl));
7934 nflags |= GOVD_MAP;
7935 if (octx->region_type == ORT_ACC_DATA
7936 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7937 nflags |= GOVD_MAP_0LEN_ARRAY;
7938 goto found_outer;
7939 }
7940 }
7941 }
7942
7943 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
7944 | GOVD_MAP_ALLOC_ONLY)) == flags)
7945 {
7946 tree type = TREE_TYPE (decl);
7947
7948 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7949 && omp_privatize_by_reference (decl))
7950 type = TREE_TYPE (type);
7951 if (!omp_mappable_type (type))
7952 {
7953 error ("%qD referenced in target region does not have "
7954 "a mappable type", decl);
7955 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7956 }
7957 else
7958 {
7959 if ((ctx->region_type & ORT_ACC) != 0)
7960 nflags = oacc_default_clause (ctx, decl, flags);
7961 else
7962 nflags |= GOVD_MAP;
7963 }
7964 }
7965 found_outer:
7966 omp_add_variable (ctx, decl, nflags);
7967 }
7968 else
7969 {
7970 /* If nothing changed, there's nothing left to do. */
7971 if ((n->value & flags) == flags)
7972 return ret;
7973 flags |= n->value;
7974 n->value = flags;
7975 }
7976 goto do_outer;
7977 }
7978
7979 if (n == NULL)
7980 {
7981 if (ctx->region_type == ORT_WORKSHARE
7982 || ctx->region_type == ORT_TASKGROUP
7983 || ctx->region_type == ORT_SIMD
7984 || ctx->region_type == ORT_ACC
7985 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7986 goto do_outer;
7987
7988 flags = omp_default_clause (ctx, decl, in_code, flags);
7989
7990 if ((flags & GOVD_PRIVATE)
7991 && lang_hooks.decls.omp_private_outer_ref (decl))
7992 flags |= GOVD_PRIVATE_OUTER_REF;
7993
7994 omp_add_variable (ctx, decl, flags);
7995
7996 shared = (flags & GOVD_SHARED) != 0;
7997 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7998 goto do_outer;
7999 }
8000
8001 /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
8002 lb, b or incr expressions, those shouldn't be turned into simd arrays. */
8003 if (ctx->region_type == ORT_SIMD
8004 && ctx->in_for_exprs
8005 && ((n->value & (GOVD_PRIVATE | GOVD_SEEN | GOVD_EXPLICIT))
8006 == GOVD_PRIVATE))
8007 flags &= ~GOVD_SEEN;
8008
8009 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
8010 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
8011 && DECL_SIZE (decl))
8012 {
8013 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8014 {
8015 splay_tree_node n2;
8016 tree t = DECL_VALUE_EXPR (decl);
8017 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
8018 t = TREE_OPERAND (t, 0);
8019 gcc_assert (DECL_P (t));
8020 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8021 n2->value |= GOVD_SEEN;
8022 }
8023 else if (omp_privatize_by_reference (decl)
8024 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
8025 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
8026 != INTEGER_CST))
8027 {
8028 splay_tree_node n2;
8029 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
8030 gcc_assert (DECL_P (t));
8031 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8032 if (n2)
8033 omp_notice_variable (ctx, t, true);
8034 }
8035 }
8036
8037 if (ctx->region_type & ORT_ACC)
8038 /* For OpenACC, as remarked above, defer expansion. */
8039 shared = false;
8040 else
8041 shared = ((flags | n->value) & GOVD_SHARED) != 0;
8042 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8043
8044 /* If nothing changed, there's nothing left to do. */
8045 if ((n->value & flags) == flags)
8046 return ret;
8047 flags |= n->value;
8048 n->value = flags;
8049
8050 do_outer:
8051 /* If the variable is private in the current context, then we don't
8052 need to propagate anything to an outer context. */
8053 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
8054 return ret;
8055 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8056 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8057 return ret;
8058 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8059 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8060 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8061 return ret;
8062 if (ctx->outer_context
8063 && omp_notice_variable (ctx->outer_context, decl, in_code))
8064 return true;
8065 return ret;
8066 }
8067
8068 /* Verify that DECL is private within CTX. If there's specific information
8069 to the contrary in the innermost scope, generate an error. */
8070
8071 static bool
8072 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
8073 {
8074 splay_tree_node n;
8075
8076 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8077 if (n != NULL)
8078 {
8079 if (n->value & GOVD_SHARED)
8080 {
8081 if (ctx == gimplify_omp_ctxp)
8082 {
8083 if (simd)
8084 error ("iteration variable %qE is predetermined linear",
8085 DECL_NAME (decl));
8086 else
8087 error ("iteration variable %qE should be private",
8088 DECL_NAME (decl));
8089 n->value = GOVD_PRIVATE;
8090 return true;
8091 }
8092 else
8093 return false;
8094 }
8095 else if ((n->value & GOVD_EXPLICIT) != 0
8096 && (ctx == gimplify_omp_ctxp
8097 || (ctx->region_type == ORT_COMBINED_PARALLEL
8098 && gimplify_omp_ctxp->outer_context == ctx)))
8099 {
8100 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
8101 error ("iteration variable %qE should not be firstprivate",
8102 DECL_NAME (decl));
8103 else if ((n->value & GOVD_REDUCTION) != 0)
8104 error ("iteration variable %qE should not be reduction",
8105 DECL_NAME (decl));
8106 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
8107 error ("iteration variable %qE should not be linear",
8108 DECL_NAME (decl));
8109 }
8110 return (ctx == gimplify_omp_ctxp
8111 || (ctx->region_type == ORT_COMBINED_PARALLEL
8112 && gimplify_omp_ctxp->outer_context == ctx));
8113 }
8114
8115 if (ctx->region_type != ORT_WORKSHARE
8116 && ctx->region_type != ORT_TASKGROUP
8117 && ctx->region_type != ORT_SIMD
8118 && ctx->region_type != ORT_ACC)
8119 return false;
8120 else if (ctx->outer_context)
8121 return omp_is_private (ctx->outer_context, decl, simd);
8122 return false;
8123 }
8124
8125 /* Return true if DECL is private within a parallel region
8126 that binds to the current construct's context or in parallel
8127 region's REDUCTION clause. */
8128
8129 static bool
8130 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
8131 {
8132 splay_tree_node n;
8133
8134 do
8135 {
8136 ctx = ctx->outer_context;
8137 if (ctx == NULL)
8138 {
8139 if (is_global_var (decl))
8140 return false;
8141
8142 /* References might be private, but might be shared too,
8143 when checking for copyprivate, assume they might be
8144 private, otherwise assume they might be shared. */
8145 if (copyprivate)
8146 return true;
8147
8148 if (omp_privatize_by_reference (decl))
8149 return false;
8150
8151 /* Treat C++ privatized non-static data members outside
8152 of the privatization the same. */
8153 if (omp_member_access_dummy_var (decl))
8154 return false;
8155
8156 return true;
8157 }
8158
8159 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8160
8161 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
8162 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
8163 {
8164 if ((ctx->region_type & ORT_TARGET_DATA) != 0
8165 || n == NULL
8166 || (n->value & GOVD_MAP) == 0)
8167 continue;
8168 return false;
8169 }
8170
8171 if (n != NULL)
8172 {
8173 if ((n->value & GOVD_LOCAL) != 0
8174 && omp_member_access_dummy_var (decl))
8175 return false;
8176 return (n->value & GOVD_SHARED) == 0;
8177 }
8178
8179 if (ctx->region_type == ORT_WORKSHARE
8180 || ctx->region_type == ORT_TASKGROUP
8181 || ctx->region_type == ORT_SIMD
8182 || ctx->region_type == ORT_ACC)
8183 continue;
8184
8185 break;
8186 }
8187 while (1);
8188 return false;
8189 }
8190
8191 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
8192
8193 static tree
8194 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
8195 {
8196 tree t = *tp;
8197
8198 /* If this node has been visited, unmark it and keep looking. */
8199 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
8200 return t;
8201
8202 if (IS_TYPE_OR_DECL_P (t))
8203 *walk_subtrees = 0;
8204 return NULL_TREE;
8205 }
8206
8207
8208 /* Gimplify the affinity clause but effectively ignore it.
8209 Generate:
8210 var = begin;
8211 if ((step > 1) ? var <= end : var > end)
8212 locatator_var_expr; */
8213
8214 static void
8215 gimplify_omp_affinity (tree *list_p, gimple_seq *pre_p)
8216 {
8217 tree last_iter = NULL_TREE;
8218 tree last_bind = NULL_TREE;
8219 tree label = NULL_TREE;
8220 tree *last_body = NULL;
8221 for (tree c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8222 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY)
8223 {
8224 tree t = OMP_CLAUSE_DECL (c);
8225 if (TREE_CODE (t) == TREE_LIST
8226 && TREE_PURPOSE (t)
8227 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8228 {
8229 if (TREE_VALUE (t) == null_pointer_node)
8230 continue;
8231 if (TREE_PURPOSE (t) != last_iter)
8232 {
8233 if (last_bind)
8234 {
8235 append_to_statement_list (label, last_body);
8236 gimplify_and_add (last_bind, pre_p);
8237 last_bind = NULL_TREE;
8238 }
8239 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8240 {
8241 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8242 is_gimple_val, fb_rvalue) == GS_ERROR
8243 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8244 is_gimple_val, fb_rvalue) == GS_ERROR
8245 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8246 is_gimple_val, fb_rvalue) == GS_ERROR
8247 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8248 is_gimple_val, fb_rvalue)
8249 == GS_ERROR))
8250 return;
8251 }
8252 last_iter = TREE_PURPOSE (t);
8253 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8254 last_bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
8255 NULL, block);
8256 last_body = &BIND_EXPR_BODY (last_bind);
8257 tree cond = NULL_TREE;
8258 location_t loc = OMP_CLAUSE_LOCATION (c);
8259 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8260 {
8261 tree var = TREE_VEC_ELT (it, 0);
8262 tree begin = TREE_VEC_ELT (it, 1);
8263 tree end = TREE_VEC_ELT (it, 2);
8264 tree step = TREE_VEC_ELT (it, 3);
8265 loc = DECL_SOURCE_LOCATION (var);
8266 tree tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8267 var, begin);
8268 append_to_statement_list_force (tem, last_body);
8269
8270 tree cond1 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8271 step, build_zero_cst (TREE_TYPE (step)));
8272 tree cond2 = fold_build2_loc (loc, LE_EXPR, boolean_type_node,
8273 var, end);
8274 tree cond3 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8275 var, end);
8276 cond1 = fold_build3_loc (loc, COND_EXPR, boolean_type_node,
8277 cond1, cond2, cond3);
8278 if (cond)
8279 cond = fold_build2_loc (loc, TRUTH_AND_EXPR,
8280 boolean_type_node, cond, cond1);
8281 else
8282 cond = cond1;
8283 }
8284 tree cont_label = create_artificial_label (loc);
8285 label = build1 (LABEL_EXPR, void_type_node, cont_label);
8286 tree tem = fold_build3_loc (loc, COND_EXPR, void_type_node, cond,
8287 void_node,
8288 build_and_jump (&cont_label));
8289 append_to_statement_list_force (tem, last_body);
8290 }
8291 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8292 {
8293 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t), 0),
8294 last_body);
8295 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8296 }
8297 if (error_operand_p (TREE_VALUE (t)))
8298 return;
8299 append_to_statement_list_force (TREE_VALUE (t), last_body);
8300 TREE_VALUE (t) = null_pointer_node;
8301 }
8302 else
8303 {
8304 if (last_bind)
8305 {
8306 append_to_statement_list (label, last_body);
8307 gimplify_and_add (last_bind, pre_p);
8308 last_bind = NULL_TREE;
8309 }
8310 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8311 {
8312 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8313 NULL, is_gimple_val, fb_rvalue);
8314 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8315 }
8316 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8317 return;
8318 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8319 is_gimple_lvalue, fb_lvalue) == GS_ERROR)
8320 return;
8321 gimplify_and_add (OMP_CLAUSE_DECL (c), pre_p);
8322 }
8323 }
8324 if (last_bind)
8325 {
8326 append_to_statement_list (label, last_body);
8327 gimplify_and_add (last_bind, pre_p);
8328 }
8329 return;
8330 }
8331
8332 /* If *LIST_P contains any OpenMP depend clauses with iterators,
8333 lower all the depend clauses by populating corresponding depend
8334 array. Returns 0 if there are no such depend clauses, or
8335 2 if all depend clauses should be removed, 1 otherwise. */
8336
8337 static int
8338 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
8339 {
8340 tree c;
8341 gimple *g;
8342 size_t n[5] = { 0, 0, 0, 0, 0 };
8343 bool unused[5];
8344 tree counts[5] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
8345 tree last_iter = NULL_TREE, last_count = NULL_TREE;
8346 size_t i, j;
8347 location_t first_loc = UNKNOWN_LOCATION;
8348
8349 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8350 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8351 {
8352 switch (OMP_CLAUSE_DEPEND_KIND (c))
8353 {
8354 case OMP_CLAUSE_DEPEND_IN:
8355 i = 2;
8356 break;
8357 case OMP_CLAUSE_DEPEND_OUT:
8358 case OMP_CLAUSE_DEPEND_INOUT:
8359 i = 0;
8360 break;
8361 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8362 i = 1;
8363 break;
8364 case OMP_CLAUSE_DEPEND_DEPOBJ:
8365 i = 3;
8366 break;
8367 case OMP_CLAUSE_DEPEND_INOUTSET:
8368 i = 4;
8369 break;
8370 default:
8371 gcc_unreachable ();
8372 }
8373 tree t = OMP_CLAUSE_DECL (c);
8374 if (first_loc == UNKNOWN_LOCATION)
8375 first_loc = OMP_CLAUSE_LOCATION (c);
8376 if (TREE_CODE (t) == TREE_LIST
8377 && TREE_PURPOSE (t)
8378 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8379 {
8380 if (TREE_PURPOSE (t) != last_iter)
8381 {
8382 tree tcnt = size_one_node;
8383 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8384 {
8385 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8386 is_gimple_val, fb_rvalue) == GS_ERROR
8387 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8388 is_gimple_val, fb_rvalue) == GS_ERROR
8389 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8390 is_gimple_val, fb_rvalue) == GS_ERROR
8391 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8392 is_gimple_val, fb_rvalue)
8393 == GS_ERROR))
8394 return 2;
8395 tree var = TREE_VEC_ELT (it, 0);
8396 tree begin = TREE_VEC_ELT (it, 1);
8397 tree end = TREE_VEC_ELT (it, 2);
8398 tree step = TREE_VEC_ELT (it, 3);
8399 tree orig_step = TREE_VEC_ELT (it, 4);
8400 tree type = TREE_TYPE (var);
8401 tree stype = TREE_TYPE (step);
8402 location_t loc = DECL_SOURCE_LOCATION (var);
8403 tree endmbegin;
8404 /* Compute count for this iterator as
8405 orig_step > 0
8406 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
8407 : (begin > end ? (end - begin + (step + 1)) / step : 0)
8408 and compute product of those for the entire depend
8409 clause. */
8410 if (POINTER_TYPE_P (type))
8411 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
8412 stype, end, begin);
8413 else
8414 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
8415 end, begin);
8416 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
8417 step,
8418 build_int_cst (stype, 1));
8419 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
8420 build_int_cst (stype, 1));
8421 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
8422 unshare_expr (endmbegin),
8423 stepm1);
8424 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8425 pos, step);
8426 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
8427 endmbegin, stepp1);
8428 if (TYPE_UNSIGNED (stype))
8429 {
8430 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
8431 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
8432 }
8433 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8434 neg, step);
8435 step = NULL_TREE;
8436 tree cond = fold_build2_loc (loc, LT_EXPR,
8437 boolean_type_node,
8438 begin, end);
8439 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
8440 build_int_cst (stype, 0));
8441 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
8442 end, begin);
8443 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
8444 build_int_cst (stype, 0));
8445 tree osteptype = TREE_TYPE (orig_step);
8446 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8447 orig_step,
8448 build_int_cst (osteptype, 0));
8449 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
8450 cond, pos, neg);
8451 cnt = fold_convert_loc (loc, sizetype, cnt);
8452 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
8453 fb_rvalue) == GS_ERROR)
8454 return 2;
8455 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
8456 }
8457 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
8458 fb_rvalue) == GS_ERROR)
8459 return 2;
8460 last_iter = TREE_PURPOSE (t);
8461 last_count = tcnt;
8462 }
8463 if (counts[i] == NULL_TREE)
8464 counts[i] = last_count;
8465 else
8466 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
8467 PLUS_EXPR, counts[i], last_count);
8468 }
8469 else
8470 n[i]++;
8471 }
8472 for (i = 0; i < 5; i++)
8473 if (counts[i])
8474 break;
8475 if (i == 5)
8476 return 0;
8477
8478 tree total = size_zero_node;
8479 for (i = 0; i < 5; i++)
8480 {
8481 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
8482 if (counts[i] == NULL_TREE)
8483 counts[i] = size_zero_node;
8484 if (n[i])
8485 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
8486 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
8487 fb_rvalue) == GS_ERROR)
8488 return 2;
8489 total = size_binop (PLUS_EXPR, total, counts[i]);
8490 }
8491
8492 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
8493 == GS_ERROR)
8494 return 2;
8495 bool is_old = unused[1] && unused[3] && unused[4];
8496 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
8497 size_int (is_old ? 1 : 4));
8498 if (!unused[4])
8499 totalpx = size_binop (PLUS_EXPR, totalpx,
8500 size_binop (MULT_EXPR, counts[4], size_int (2)));
8501 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
8502 tree array = create_tmp_var_raw (type);
8503 TREE_ADDRESSABLE (array) = 1;
8504 if (!poly_int_tree_p (totalpx))
8505 {
8506 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
8507 gimplify_type_sizes (TREE_TYPE (array), pre_p);
8508 if (gimplify_omp_ctxp)
8509 {
8510 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8511 while (ctx
8512 && (ctx->region_type == ORT_WORKSHARE
8513 || ctx->region_type == ORT_TASKGROUP
8514 || ctx->region_type == ORT_SIMD
8515 || ctx->region_type == ORT_ACC))
8516 ctx = ctx->outer_context;
8517 if (ctx)
8518 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
8519 }
8520 gimplify_vla_decl (array, pre_p);
8521 }
8522 else
8523 gimple_add_tmp_var (array);
8524 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
8525 NULL_TREE);
8526 tree tem;
8527 if (!is_old)
8528 {
8529 tem = build2 (MODIFY_EXPR, void_type_node, r,
8530 build_int_cst (ptr_type_node, 0));
8531 gimplify_and_add (tem, pre_p);
8532 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
8533 NULL_TREE);
8534 }
8535 tem = build2 (MODIFY_EXPR, void_type_node, r,
8536 fold_convert (ptr_type_node, total));
8537 gimplify_and_add (tem, pre_p);
8538 for (i = 1; i < (is_old ? 2 : 4); i++)
8539 {
8540 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
8541 NULL_TREE, NULL_TREE);
8542 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
8543 gimplify_and_add (tem, pre_p);
8544 }
8545
8546 tree cnts[6];
8547 for (j = 5; j; j--)
8548 if (!unused[j - 1])
8549 break;
8550 for (i = 0; i < 5; i++)
8551 {
8552 if (i && (i >= j || unused[i - 1]))
8553 {
8554 cnts[i] = cnts[i - 1];
8555 continue;
8556 }
8557 cnts[i] = create_tmp_var (sizetype);
8558 if (i == 0)
8559 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
8560 else
8561 {
8562 tree t;
8563 if (is_old)
8564 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
8565 else
8566 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
8567 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
8568 == GS_ERROR)
8569 return 2;
8570 g = gimple_build_assign (cnts[i], t);
8571 }
8572 gimple_seq_add_stmt (pre_p, g);
8573 }
8574 if (unused[4])
8575 cnts[5] = NULL_TREE;
8576 else
8577 {
8578 tree t = size_binop (PLUS_EXPR, total, size_int (5));
8579 cnts[5] = create_tmp_var (sizetype);
8580 g = gimple_build_assign (cnts[i], t);
8581 gimple_seq_add_stmt (pre_p, g);
8582 }
8583
8584 last_iter = NULL_TREE;
8585 tree last_bind = NULL_TREE;
8586 tree *last_body = NULL;
8587 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8588 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8589 {
8590 switch (OMP_CLAUSE_DEPEND_KIND (c))
8591 {
8592 case OMP_CLAUSE_DEPEND_IN:
8593 i = 2;
8594 break;
8595 case OMP_CLAUSE_DEPEND_OUT:
8596 case OMP_CLAUSE_DEPEND_INOUT:
8597 i = 0;
8598 break;
8599 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8600 i = 1;
8601 break;
8602 case OMP_CLAUSE_DEPEND_DEPOBJ:
8603 i = 3;
8604 break;
8605 case OMP_CLAUSE_DEPEND_INOUTSET:
8606 i = 4;
8607 break;
8608 default:
8609 gcc_unreachable ();
8610 }
8611 tree t = OMP_CLAUSE_DECL (c);
8612 if (TREE_CODE (t) == TREE_LIST
8613 && TREE_PURPOSE (t)
8614 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8615 {
8616 if (TREE_PURPOSE (t) != last_iter)
8617 {
8618 if (last_bind)
8619 gimplify_and_add (last_bind, pre_p);
8620 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8621 last_bind = build3 (BIND_EXPR, void_type_node,
8622 BLOCK_VARS (block), NULL, block);
8623 TREE_SIDE_EFFECTS (last_bind) = 1;
8624 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
8625 tree *p = &BIND_EXPR_BODY (last_bind);
8626 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8627 {
8628 tree var = TREE_VEC_ELT (it, 0);
8629 tree begin = TREE_VEC_ELT (it, 1);
8630 tree end = TREE_VEC_ELT (it, 2);
8631 tree step = TREE_VEC_ELT (it, 3);
8632 tree orig_step = TREE_VEC_ELT (it, 4);
8633 tree type = TREE_TYPE (var);
8634 location_t loc = DECL_SOURCE_LOCATION (var);
8635 /* Emit:
8636 var = begin;
8637 goto cond_label;
8638 beg_label:
8639 ...
8640 var = var + step;
8641 cond_label:
8642 if (orig_step > 0) {
8643 if (var < end) goto beg_label;
8644 } else {
8645 if (var > end) goto beg_label;
8646 }
8647 for each iterator, with inner iterators added to
8648 the ... above. */
8649 tree beg_label = create_artificial_label (loc);
8650 tree cond_label = NULL_TREE;
8651 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8652 var, begin);
8653 append_to_statement_list_force (tem, p);
8654 tem = build_and_jump (&cond_label);
8655 append_to_statement_list_force (tem, p);
8656 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
8657 append_to_statement_list (tem, p);
8658 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
8659 NULL_TREE, NULL_TREE);
8660 TREE_SIDE_EFFECTS (bind) = 1;
8661 SET_EXPR_LOCATION (bind, loc);
8662 append_to_statement_list_force (bind, p);
8663 if (POINTER_TYPE_P (type))
8664 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
8665 var, fold_convert_loc (loc, sizetype,
8666 step));
8667 else
8668 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
8669 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8670 var, tem);
8671 append_to_statement_list_force (tem, p);
8672 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
8673 append_to_statement_list (tem, p);
8674 tree cond = fold_build2_loc (loc, LT_EXPR,
8675 boolean_type_node,
8676 var, end);
8677 tree pos
8678 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8679 cond, build_and_jump (&beg_label),
8680 void_node);
8681 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8682 var, end);
8683 tree neg
8684 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8685 cond, build_and_jump (&beg_label),
8686 void_node);
8687 tree osteptype = TREE_TYPE (orig_step);
8688 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8689 orig_step,
8690 build_int_cst (osteptype, 0));
8691 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
8692 cond, pos, neg);
8693 append_to_statement_list_force (tem, p);
8694 p = &BIND_EXPR_BODY (bind);
8695 }
8696 last_body = p;
8697 }
8698 last_iter = TREE_PURPOSE (t);
8699 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8700 {
8701 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
8702 0), last_body);
8703 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8704 }
8705 if (error_operand_p (TREE_VALUE (t)))
8706 return 2;
8707 if (TREE_VALUE (t) != null_pointer_node)
8708 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
8709 if (i == 4)
8710 {
8711 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8712 NULL_TREE, NULL_TREE);
8713 tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
8714 NULL_TREE, NULL_TREE);
8715 r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
8716 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8717 void_type_node, r, r2);
8718 append_to_statement_list_force (tem, last_body);
8719 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8720 void_type_node, cnts[i],
8721 size_binop (PLUS_EXPR, cnts[i],
8722 size_int (1)));
8723 append_to_statement_list_force (tem, last_body);
8724 i = 5;
8725 }
8726 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8727 NULL_TREE, NULL_TREE);
8728 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8729 void_type_node, r, TREE_VALUE (t));
8730 append_to_statement_list_force (tem, last_body);
8731 if (i == 5)
8732 {
8733 r = build4 (ARRAY_REF, ptr_type_node, array,
8734 size_binop (PLUS_EXPR, cnts[i], size_int (1)),
8735 NULL_TREE, NULL_TREE);
8736 tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
8737 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8738 void_type_node, r, tem);
8739 append_to_statement_list_force (tem, last_body);
8740 }
8741 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8742 void_type_node, cnts[i],
8743 size_binop (PLUS_EXPR, cnts[i],
8744 size_int (1 + (i == 5))));
8745 append_to_statement_list_force (tem, last_body);
8746 TREE_VALUE (t) = null_pointer_node;
8747 }
8748 else
8749 {
8750 if (last_bind)
8751 {
8752 gimplify_and_add (last_bind, pre_p);
8753 last_bind = NULL_TREE;
8754 }
8755 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8756 {
8757 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8758 NULL, is_gimple_val, fb_rvalue);
8759 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8760 }
8761 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8762 return 2;
8763 if (OMP_CLAUSE_DECL (c) != null_pointer_node)
8764 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8765 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8766 is_gimple_val, fb_rvalue) == GS_ERROR)
8767 return 2;
8768 if (i == 4)
8769 {
8770 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8771 NULL_TREE, NULL_TREE);
8772 tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
8773 NULL_TREE, NULL_TREE);
8774 r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
8775 tem = build2 (MODIFY_EXPR, void_type_node, r, r2);
8776 gimplify_and_add (tem, pre_p);
8777 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR,
8778 cnts[i],
8779 size_int (1)));
8780 gimple_seq_add_stmt (pre_p, g);
8781 i = 5;
8782 }
8783 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8784 NULL_TREE, NULL_TREE);
8785 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
8786 gimplify_and_add (tem, pre_p);
8787 if (i == 5)
8788 {
8789 r = build4 (ARRAY_REF, ptr_type_node, array,
8790 size_binop (PLUS_EXPR, cnts[i], size_int (1)),
8791 NULL_TREE, NULL_TREE);
8792 tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
8793 tem = build2 (MODIFY_EXPR, void_type_node, r, tem);
8794 append_to_statement_list_force (tem, last_body);
8795 gimplify_and_add (tem, pre_p);
8796 }
8797 g = gimple_build_assign (cnts[i],
8798 size_binop (PLUS_EXPR, cnts[i],
8799 size_int (1 + (i == 5))));
8800 gimple_seq_add_stmt (pre_p, g);
8801 }
8802 }
8803 if (last_bind)
8804 gimplify_and_add (last_bind, pre_p);
8805 tree cond = boolean_false_node;
8806 if (is_old)
8807 {
8808 if (!unused[0])
8809 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
8810 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
8811 size_int (2)));
8812 if (!unused[2])
8813 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8814 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8815 cnts[2],
8816 size_binop_loc (first_loc, PLUS_EXPR,
8817 totalpx,
8818 size_int (1))));
8819 }
8820 else
8821 {
8822 tree prev = size_int (5);
8823 for (i = 0; i < 5; i++)
8824 {
8825 if (unused[i])
8826 continue;
8827 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
8828 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8829 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8830 cnts[i], unshare_expr (prev)));
8831 }
8832 }
8833 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
8834 build_call_expr_loc (first_loc,
8835 builtin_decl_explicit (BUILT_IN_TRAP),
8836 0), void_node);
8837 gimplify_and_add (tem, pre_p);
8838 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8839 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8840 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8841 OMP_CLAUSE_CHAIN (c) = *list_p;
8842 *list_p = c;
8843 return 1;
8844 }
8845
8846 /* For a set of mappings describing an array section pointed to by a struct
8847 (or derived type, etc.) component, create an "alloc" or "release" node to
8848 insert into a list following a GOMP_MAP_STRUCT node. For some types of
8849 mapping (e.g. Fortran arrays with descriptors), an additional mapping may
8850 be created that is inserted into the list of mapping nodes attached to the
8851 directive being processed -- not part of the sorted list of nodes after
8852 GOMP_MAP_STRUCT.
8853
8854 CODE is the code of the directive being processed. GRP_START and GRP_END
8855 are the first and last of two or three nodes representing this array section
8856 mapping (e.g. a data movement node like GOMP_MAP_{TO,FROM}, optionally a
8857 GOMP_MAP_TO_PSET, and finally a GOMP_MAP_ALWAYS_POINTER). EXTRA_NODE is
8858 filled with the additional node described above, if needed.
8859
8860 This function does not add the new nodes to any lists itself. It is the
8861 responsibility of the caller to do that. */
8862
8863 static tree
8864 build_omp_struct_comp_nodes (enum tree_code code, tree grp_start, tree grp_end,
8865 tree *extra_node)
8866 {
8867 enum gomp_map_kind mkind
8868 = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
8869 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8870
8871 gcc_assert (grp_start != grp_end);
8872
8873 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
8874 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8875 OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (grp_end));
8876 OMP_CLAUSE_CHAIN (c2) = NULL_TREE;
8877 tree grp_mid = NULL_TREE;
8878 if (OMP_CLAUSE_CHAIN (grp_start) != grp_end)
8879 grp_mid = OMP_CLAUSE_CHAIN (grp_start);
8880
8881 if (grp_mid
8882 && OMP_CLAUSE_CODE (grp_mid) == OMP_CLAUSE_MAP
8883 && OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_TO_PSET)
8884 OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (grp_mid);
8885 else
8886 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
8887
8888 if (grp_mid
8889 && OMP_CLAUSE_CODE (grp_mid) == OMP_CLAUSE_MAP
8890 && (OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_ALWAYS_POINTER
8891 || OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_ATTACH_DETACH))
8892 {
8893 tree c3
8894 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
8895 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8896 OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (grp_mid));
8897 OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
8898 OMP_CLAUSE_CHAIN (c3) = NULL_TREE;
8899
8900 *extra_node = c3;
8901 }
8902 else
8903 *extra_node = NULL_TREE;
8904
8905 return c2;
8906 }
8907
8908 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8909 and set *BITPOSP and *POFFSETP to the bit offset of the access.
8910 If BASE_REF is non-NULL and the containing object is a reference, set
8911 *BASE_REF to that reference before dereferencing the object.
8912 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8913 has array type, else return NULL. */
8914
8915 static tree
8916 extract_base_bit_offset (tree base, poly_int64 *bitposp,
8917 poly_offset_int *poffsetp)
8918 {
8919 tree offset;
8920 poly_int64 bitsize, bitpos;
8921 machine_mode mode;
8922 int unsignedp, reversep, volatilep = 0;
8923 poly_offset_int poffset;
8924
8925 STRIP_NOPS (base);
8926
8927 base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
8928 &unsignedp, &reversep, &volatilep);
8929
8930 STRIP_NOPS (base);
8931
8932 if (offset && poly_int_tree_p (offset))
8933 {
8934 poffset = wi::to_poly_offset (offset);
8935 offset = NULL_TREE;
8936 }
8937 else
8938 poffset = 0;
8939
8940 if (maybe_ne (bitpos, 0))
8941 poffset += bits_to_bytes_round_down (bitpos);
8942
8943 *bitposp = bitpos;
8944 *poffsetp = poffset;
8945
8946 return base;
8947 }
8948
8949 /* Used for topological sorting of mapping groups. UNVISITED means we haven't
8950 started processing the group yet. The TEMPORARY mark is used when we first
8951 encounter a group on a depth-first traversal, and the PERMANENT mark is used
8952 when we have processed all the group's children (i.e. all the base pointers
8953 referred to by the group's mapping nodes, recursively). */
8954
8955 enum omp_tsort_mark {
8956 UNVISITED,
8957 TEMPORARY,
8958 PERMANENT
8959 };
8960
8961 /* A group of OMP_CLAUSE_MAP nodes that correspond to a single "map"
8962 clause. */
8963
8964 struct omp_mapping_group {
8965 tree *grp_start;
8966 tree grp_end;
8967 omp_tsort_mark mark;
8968 /* If we've removed the group but need to reindex, mark the group as
8969 deleted. */
8970 bool deleted;
8971 struct omp_mapping_group *sibling;
8972 struct omp_mapping_group *next;
8973 };
8974
8975 DEBUG_FUNCTION void
8976 debug_mapping_group (omp_mapping_group *grp)
8977 {
8978 tree tmp = OMP_CLAUSE_CHAIN (grp->grp_end);
8979 OMP_CLAUSE_CHAIN (grp->grp_end) = NULL;
8980 debug_generic_expr (*grp->grp_start);
8981 OMP_CLAUSE_CHAIN (grp->grp_end) = tmp;
8982 }
8983
8984 /* Return the OpenMP "base pointer" of an expression EXPR, or NULL if there
8985 isn't one. */
8986
8987 static tree
8988 omp_get_base_pointer (tree expr)
8989 {
8990 while (TREE_CODE (expr) == ARRAY_REF
8991 || TREE_CODE (expr) == COMPONENT_REF)
8992 expr = TREE_OPERAND (expr, 0);
8993
8994 if (TREE_CODE (expr) == INDIRECT_REF
8995 || (TREE_CODE (expr) == MEM_REF
8996 && integer_zerop (TREE_OPERAND (expr, 1))))
8997 {
8998 expr = TREE_OPERAND (expr, 0);
8999 while (TREE_CODE (expr) == COMPOUND_EXPR)
9000 expr = TREE_OPERAND (expr, 1);
9001 if (TREE_CODE (expr) == POINTER_PLUS_EXPR)
9002 expr = TREE_OPERAND (expr, 0);
9003 if (TREE_CODE (expr) == SAVE_EXPR)
9004 expr = TREE_OPERAND (expr, 0);
9005 STRIP_NOPS (expr);
9006 return expr;
9007 }
9008
9009 return NULL_TREE;
9010 }
9011
9012 /* Remove COMPONENT_REFS and indirections from EXPR. */
9013
9014 static tree
9015 omp_strip_components_and_deref (tree expr)
9016 {
9017 while (TREE_CODE (expr) == COMPONENT_REF
9018 || TREE_CODE (expr) == INDIRECT_REF
9019 || (TREE_CODE (expr) == MEM_REF
9020 && integer_zerop (TREE_OPERAND (expr, 1)))
9021 || TREE_CODE (expr) == POINTER_PLUS_EXPR
9022 || TREE_CODE (expr) == COMPOUND_EXPR)
9023 if (TREE_CODE (expr) == COMPOUND_EXPR)
9024 expr = TREE_OPERAND (expr, 1);
9025 else
9026 expr = TREE_OPERAND (expr, 0);
9027
9028 STRIP_NOPS (expr);
9029
9030 return expr;
9031 }
9032
9033 static tree
9034 omp_strip_indirections (tree expr)
9035 {
9036 while (TREE_CODE (expr) == INDIRECT_REF
9037 || (TREE_CODE (expr) == MEM_REF
9038 && integer_zerop (TREE_OPERAND (expr, 1))))
9039 expr = TREE_OPERAND (expr, 0);
9040
9041 return expr;
9042 }
9043
9044 /* An attach or detach operation depends directly on the address being
9045 attached/detached. Return that address, or none if there are no
9046 attachments/detachments. */
9047
9048 static tree
9049 omp_get_attachment (omp_mapping_group *grp)
9050 {
9051 tree node = *grp->grp_start;
9052
9053 switch (OMP_CLAUSE_MAP_KIND (node))
9054 {
9055 case GOMP_MAP_TO:
9056 case GOMP_MAP_FROM:
9057 case GOMP_MAP_TOFROM:
9058 case GOMP_MAP_ALWAYS_FROM:
9059 case GOMP_MAP_ALWAYS_TO:
9060 case GOMP_MAP_ALWAYS_TOFROM:
9061 case GOMP_MAP_FORCE_FROM:
9062 case GOMP_MAP_FORCE_TO:
9063 case GOMP_MAP_FORCE_TOFROM:
9064 case GOMP_MAP_FORCE_PRESENT:
9065 case GOMP_MAP_ALLOC:
9066 case GOMP_MAP_RELEASE:
9067 case GOMP_MAP_DELETE:
9068 case GOMP_MAP_FORCE_ALLOC:
9069 if (node == grp->grp_end)
9070 return NULL_TREE;
9071
9072 node = OMP_CLAUSE_CHAIN (node);
9073 if (node && OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_TO_PSET)
9074 {
9075 gcc_assert (node != grp->grp_end);
9076 node = OMP_CLAUSE_CHAIN (node);
9077 }
9078 if (node)
9079 switch (OMP_CLAUSE_MAP_KIND (node))
9080 {
9081 case GOMP_MAP_POINTER:
9082 case GOMP_MAP_ALWAYS_POINTER:
9083 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9084 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9085 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9086 return NULL_TREE;
9087
9088 case GOMP_MAP_ATTACH_DETACH:
9089 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9090 return OMP_CLAUSE_DECL (node);
9091
9092 default:
9093 internal_error ("unexpected mapping node");
9094 }
9095 return error_mark_node;
9096
9097 case GOMP_MAP_TO_PSET:
9098 gcc_assert (node != grp->grp_end);
9099 node = OMP_CLAUSE_CHAIN (node);
9100 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
9101 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
9102 return OMP_CLAUSE_DECL (node);
9103 else
9104 internal_error ("unexpected mapping node");
9105 return error_mark_node;
9106
9107 case GOMP_MAP_ATTACH:
9108 case GOMP_MAP_DETACH:
9109 node = OMP_CLAUSE_CHAIN (node);
9110 if (!node || *grp->grp_start == grp->grp_end)
9111 return OMP_CLAUSE_DECL (*grp->grp_start);
9112 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9113 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9114 return OMP_CLAUSE_DECL (*grp->grp_start);
9115 else
9116 internal_error ("unexpected mapping node");
9117 return error_mark_node;
9118
9119 case GOMP_MAP_STRUCT:
9120 case GOMP_MAP_FORCE_DEVICEPTR:
9121 case GOMP_MAP_DEVICE_RESIDENT:
9122 case GOMP_MAP_LINK:
9123 case GOMP_MAP_IF_PRESENT:
9124 case GOMP_MAP_FIRSTPRIVATE:
9125 case GOMP_MAP_FIRSTPRIVATE_INT:
9126 case GOMP_MAP_USE_DEVICE_PTR:
9127 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9128 return NULL_TREE;
9129
9130 default:
9131 internal_error ("unexpected mapping node");
9132 }
9133
9134 return error_mark_node;
9135 }
9136
9137 /* Given a pointer START_P to the start of a group of related (e.g. pointer)
9138 mappings, return the chain pointer to the end of that group in the list. */
9139
9140 static tree *
9141 omp_group_last (tree *start_p)
9142 {
9143 tree c = *start_p, nc, *grp_last_p = start_p;
9144
9145 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
9146
9147 nc = OMP_CLAUSE_CHAIN (c);
9148
9149 if (!nc || OMP_CLAUSE_CODE (nc) != OMP_CLAUSE_MAP)
9150 return grp_last_p;
9151
9152 switch (OMP_CLAUSE_MAP_KIND (c))
9153 {
9154 default:
9155 while (nc
9156 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9157 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9158 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
9159 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH
9160 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
9161 || (OMP_CLAUSE_MAP_KIND (nc)
9162 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
9163 || (OMP_CLAUSE_MAP_KIND (nc)
9164 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)
9165 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ALWAYS_POINTER
9166 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_TO_PSET))
9167 {
9168 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9169 c = nc;
9170 tree nc2 = OMP_CLAUSE_CHAIN (nc);
9171 if (nc2
9172 && OMP_CLAUSE_CODE (nc2) == OMP_CLAUSE_MAP
9173 && (OMP_CLAUSE_MAP_KIND (nc)
9174 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
9175 && OMP_CLAUSE_MAP_KIND (nc2) == GOMP_MAP_ATTACH)
9176 {
9177 grp_last_p = &OMP_CLAUSE_CHAIN (nc);
9178 c = nc2;
9179 nc2 = OMP_CLAUSE_CHAIN (nc2);
9180 }
9181 nc = nc2;
9182 }
9183 break;
9184
9185 case GOMP_MAP_ATTACH:
9186 case GOMP_MAP_DETACH:
9187 /* This is a weird artifact of how directives are parsed: bare attach or
9188 detach clauses get a subsequent (meaningless) FIRSTPRIVATE_POINTER or
9189 FIRSTPRIVATE_REFERENCE node. FIXME. */
9190 if (nc
9191 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9192 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9193 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER))
9194 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9195 break;
9196
9197 case GOMP_MAP_TO_PSET:
9198 if (OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9199 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH
9200 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_DETACH))
9201 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9202 break;
9203
9204 case GOMP_MAP_STRUCT:
9205 {
9206 unsigned HOST_WIDE_INT num_mappings
9207 = tree_to_uhwi (OMP_CLAUSE_SIZE (c));
9208 if (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
9209 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9210 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH)
9211 grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
9212 for (unsigned i = 0; i < num_mappings; i++)
9213 grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
9214 }
9215 break;
9216 }
9217
9218 return grp_last_p;
9219 }
9220
9221 /* Walk through LIST_P, and return a list of groups of mappings found (e.g.
9222 OMP_CLAUSE_MAP with GOMP_MAP_{TO/FROM/TOFROM} followed by one or two
9223 associated GOMP_MAP_POINTER mappings). Return a vector of omp_mapping_group
9224 if we have more than one such group, else return NULL. */
9225
9226 static void
9227 omp_gather_mapping_groups_1 (tree *list_p, vec<omp_mapping_group> *groups,
9228 tree gather_sentinel)
9229 {
9230 for (tree *cp = list_p;
9231 *cp && *cp != gather_sentinel;
9232 cp = &OMP_CLAUSE_CHAIN (*cp))
9233 {
9234 if (OMP_CLAUSE_CODE (*cp) != OMP_CLAUSE_MAP)
9235 continue;
9236
9237 tree *grp_last_p = omp_group_last (cp);
9238 omp_mapping_group grp;
9239
9240 grp.grp_start = cp;
9241 grp.grp_end = *grp_last_p;
9242 grp.mark = UNVISITED;
9243 grp.sibling = NULL;
9244 grp.deleted = false;
9245 grp.next = NULL;
9246 groups->safe_push (grp);
9247
9248 cp = grp_last_p;
9249 }
9250 }
9251
9252 static vec<omp_mapping_group> *
9253 omp_gather_mapping_groups (tree *list_p)
9254 {
9255 vec<omp_mapping_group> *groups = new vec<omp_mapping_group> ();
9256
9257 omp_gather_mapping_groups_1 (list_p, groups, NULL_TREE);
9258
9259 if (groups->length () > 0)
9260 return groups;
9261 else
9262 {
9263 delete groups;
9264 return NULL;
9265 }
9266 }
9267
9268 /* A pointer mapping group GRP may define a block of memory starting at some
9269 base address, and maybe also define a firstprivate pointer or firstprivate
9270 reference that points to that block. The return value is a node containing
9271 the former, and the *FIRSTPRIVATE pointer is set if we have the latter.
9272 If we define several base pointers, i.e. for a GOMP_MAP_STRUCT mapping,
9273 return the number of consecutive chained nodes in CHAINED. */
9274
9275 static tree
9276 omp_group_base (omp_mapping_group *grp, unsigned int *chained,
9277 tree *firstprivate)
9278 {
9279 tree node = *grp->grp_start;
9280
9281 *firstprivate = NULL_TREE;
9282 *chained = 1;
9283
9284 switch (OMP_CLAUSE_MAP_KIND (node))
9285 {
9286 case GOMP_MAP_TO:
9287 case GOMP_MAP_FROM:
9288 case GOMP_MAP_TOFROM:
9289 case GOMP_MAP_ALWAYS_FROM:
9290 case GOMP_MAP_ALWAYS_TO:
9291 case GOMP_MAP_ALWAYS_TOFROM:
9292 case GOMP_MAP_FORCE_FROM:
9293 case GOMP_MAP_FORCE_TO:
9294 case GOMP_MAP_FORCE_TOFROM:
9295 case GOMP_MAP_FORCE_PRESENT:
9296 case GOMP_MAP_ALLOC:
9297 case GOMP_MAP_RELEASE:
9298 case GOMP_MAP_DELETE:
9299 case GOMP_MAP_FORCE_ALLOC:
9300 case GOMP_MAP_IF_PRESENT:
9301 if (node == grp->grp_end)
9302 return node;
9303
9304 node = OMP_CLAUSE_CHAIN (node);
9305 if (node && OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_TO_PSET)
9306 {
9307 if (node == grp->grp_end)
9308 return *grp->grp_start;
9309 node = OMP_CLAUSE_CHAIN (node);
9310 }
9311 if (node)
9312 switch (OMP_CLAUSE_MAP_KIND (node))
9313 {
9314 case GOMP_MAP_POINTER:
9315 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9316 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9317 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9318 *firstprivate = OMP_CLAUSE_DECL (node);
9319 return *grp->grp_start;
9320
9321 case GOMP_MAP_ALWAYS_POINTER:
9322 case GOMP_MAP_ATTACH_DETACH:
9323 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9324 return *grp->grp_start;
9325
9326 default:
9327 internal_error ("unexpected mapping node");
9328 }
9329 else
9330 internal_error ("unexpected mapping node");
9331 return error_mark_node;
9332
9333 case GOMP_MAP_TO_PSET:
9334 gcc_assert (node != grp->grp_end);
9335 node = OMP_CLAUSE_CHAIN (node);
9336 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
9337 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
9338 return NULL_TREE;
9339 else
9340 internal_error ("unexpected mapping node");
9341 return error_mark_node;
9342
9343 case GOMP_MAP_ATTACH:
9344 case GOMP_MAP_DETACH:
9345 node = OMP_CLAUSE_CHAIN (node);
9346 if (!node || *grp->grp_start == grp->grp_end)
9347 return NULL_TREE;
9348 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9349 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9350 {
9351 /* We're mapping the base pointer itself in a bare attach or detach
9352 node. This is a side effect of how parsing works, and the mapping
9353 will be removed anyway (at least for enter/exit data directives).
9354 We should ignore the mapping here. FIXME. */
9355 return NULL_TREE;
9356 }
9357 else
9358 internal_error ("unexpected mapping node");
9359 return error_mark_node;
9360
9361 case GOMP_MAP_STRUCT:
9362 {
9363 unsigned HOST_WIDE_INT num_mappings
9364 = tree_to_uhwi (OMP_CLAUSE_SIZE (node));
9365 node = OMP_CLAUSE_CHAIN (node);
9366 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9367 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9368 {
9369 *firstprivate = OMP_CLAUSE_DECL (node);
9370 node = OMP_CLAUSE_CHAIN (node);
9371 }
9372 *chained = num_mappings;
9373 return node;
9374 }
9375
9376 case GOMP_MAP_FORCE_DEVICEPTR:
9377 case GOMP_MAP_DEVICE_RESIDENT:
9378 case GOMP_MAP_LINK:
9379 case GOMP_MAP_FIRSTPRIVATE:
9380 case GOMP_MAP_FIRSTPRIVATE_INT:
9381 case GOMP_MAP_USE_DEVICE_PTR:
9382 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9383 return NULL_TREE;
9384
9385 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9386 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9387 case GOMP_MAP_POINTER:
9388 case GOMP_MAP_ALWAYS_POINTER:
9389 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9390 /* These shouldn't appear by themselves. */
9391 if (!seen_error ())
9392 internal_error ("unexpected pointer mapping node");
9393 return error_mark_node;
9394
9395 default:
9396 gcc_unreachable ();
9397 }
9398
9399 return error_mark_node;
9400 }
9401
9402 /* Given a vector of omp_mapping_groups, build a hash table so we can look up
9403 nodes by tree_operand_hash. */
9404
9405 static void
9406 omp_index_mapping_groups_1 (hash_map<tree_operand_hash,
9407 omp_mapping_group *> *grpmap,
9408 vec<omp_mapping_group> *groups,
9409 tree reindex_sentinel)
9410 {
9411 omp_mapping_group *grp;
9412 unsigned int i;
9413 bool reindexing = reindex_sentinel != NULL_TREE, above_hwm = false;
9414
9415 FOR_EACH_VEC_ELT (*groups, i, grp)
9416 {
9417 if (reindexing && *grp->grp_start == reindex_sentinel)
9418 above_hwm = true;
9419
9420 if (reindexing && !above_hwm)
9421 continue;
9422
9423 tree fpp;
9424 unsigned int chained;
9425 tree node = omp_group_base (grp, &chained, &fpp);
9426
9427 if (node == error_mark_node || (!node && !fpp))
9428 continue;
9429
9430 for (unsigned j = 0;
9431 node && j < chained;
9432 node = OMP_CLAUSE_CHAIN (node), j++)
9433 {
9434 tree decl = OMP_CLAUSE_DECL (node);
9435
9436 /* Sometimes we see zero-offset MEM_REF instead of INDIRECT_REF,
9437 meaning node-hash lookups don't work. This is a workaround for
9438 that, but ideally we should just create the INDIRECT_REF at
9439 source instead. FIXME. */
9440 if (TREE_CODE (decl) == MEM_REF
9441 && integer_zerop (TREE_OPERAND (decl, 1)))
9442 decl = build_fold_indirect_ref (TREE_OPERAND (decl, 0));
9443
9444 omp_mapping_group **prev = grpmap->get (decl);
9445
9446 if (prev && *prev == grp)
9447 /* Empty. */;
9448 else if (prev)
9449 {
9450 /* Mapping the same thing twice is normally diagnosed as an error,
9451 but can happen under some circumstances, e.g. in pr99928-16.c,
9452 the directive:
9453
9454 #pragma omp target simd reduction(+:a[:3]) \
9455 map(always, tofrom: a[:6])
9456 ...
9457
9458 will result in two "a[0]" mappings (of different sizes). */
9459
9460 grp->sibling = (*prev)->sibling;
9461 (*prev)->sibling = grp;
9462 }
9463 else
9464 grpmap->put (decl, grp);
9465 }
9466
9467 if (!fpp)
9468 continue;
9469
9470 omp_mapping_group **prev = grpmap->get (fpp);
9471 if (prev && *prev != grp)
9472 {
9473 grp->sibling = (*prev)->sibling;
9474 (*prev)->sibling = grp;
9475 }
9476 else
9477 grpmap->put (fpp, grp);
9478 }
9479 }
9480
9481 static hash_map<tree_operand_hash, omp_mapping_group *> *
9482 omp_index_mapping_groups (vec<omp_mapping_group> *groups)
9483 {
9484 hash_map<tree_operand_hash, omp_mapping_group *> *grpmap
9485 = new hash_map<tree_operand_hash, omp_mapping_group *>;
9486
9487 omp_index_mapping_groups_1 (grpmap, groups, NULL_TREE);
9488
9489 return grpmap;
9490 }
9491
9492 /* Rebuild group map from partially-processed clause list (during
9493 omp_build_struct_sibling_lists). We have already processed nodes up until
9494 a high-water mark (HWM). This is a bit tricky because the list is being
9495 reordered as it is scanned, but we know:
9496
9497 1. The list after HWM has not been touched yet, so we can reindex it safely.
9498
9499 2. The list before and including HWM has been altered, but remains
9500 well-formed throughout the sibling-list building operation.
9501
9502 so, we can do the reindex operation in two parts, on the processed and
9503 then the unprocessed halves of the list. */
9504
9505 static hash_map<tree_operand_hash, omp_mapping_group *> *
9506 omp_reindex_mapping_groups (tree *list_p,
9507 vec<omp_mapping_group> *groups,
9508 vec<omp_mapping_group> *processed_groups,
9509 tree sentinel)
9510 {
9511 hash_map<tree_operand_hash, omp_mapping_group *> *grpmap
9512 = new hash_map<tree_operand_hash, omp_mapping_group *>;
9513
9514 processed_groups->truncate (0);
9515
9516 omp_gather_mapping_groups_1 (list_p, processed_groups, sentinel);
9517 omp_index_mapping_groups_1 (grpmap, processed_groups, NULL_TREE);
9518 if (sentinel)
9519 omp_index_mapping_groups_1 (grpmap, groups, sentinel);
9520
9521 return grpmap;
9522 }
9523
9524 /* Find the immediately-containing struct for a component ref (etc.)
9525 expression EXPR. */
9526
9527 static tree
9528 omp_containing_struct (tree expr)
9529 {
9530 tree expr0 = expr;
9531
9532 STRIP_NOPS (expr);
9533
9534 /* Note: don't strip NOPs unless we're also stripping off array refs or a
9535 component ref. */
9536 if (TREE_CODE (expr) != ARRAY_REF && TREE_CODE (expr) != COMPONENT_REF)
9537 return expr0;
9538
9539 while (TREE_CODE (expr) == ARRAY_REF)
9540 expr = TREE_OPERAND (expr, 0);
9541
9542 if (TREE_CODE (expr) == COMPONENT_REF)
9543 expr = TREE_OPERAND (expr, 0);
9544
9545 return expr;
9546 }
9547
9548 /* Return TRUE if DECL describes a component that is part of a whole structure
9549 that is mapped elsewhere in GRPMAP. *MAPPED_BY_GROUP is set to the group
9550 that maps that structure, if present. */
9551
9552 static bool
9553 omp_mapped_by_containing_struct (hash_map<tree_operand_hash,
9554 omp_mapping_group *> *grpmap,
9555 tree decl,
9556 omp_mapping_group **mapped_by_group)
9557 {
9558 tree wsdecl = NULL_TREE;
9559
9560 *mapped_by_group = NULL;
9561
9562 while (true)
9563 {
9564 wsdecl = omp_containing_struct (decl);
9565 if (wsdecl == decl)
9566 break;
9567 omp_mapping_group **wholestruct = grpmap->get (wsdecl);
9568 if (!wholestruct
9569 && TREE_CODE (wsdecl) == MEM_REF
9570 && integer_zerop (TREE_OPERAND (wsdecl, 1)))
9571 {
9572 tree deref = TREE_OPERAND (wsdecl, 0);
9573 deref = build_fold_indirect_ref (deref);
9574 wholestruct = grpmap->get (deref);
9575 }
9576 if (wholestruct)
9577 {
9578 *mapped_by_group = *wholestruct;
9579 return true;
9580 }
9581 decl = wsdecl;
9582 }
9583
9584 return false;
9585 }
9586
9587 /* Helper function for omp_tsort_mapping_groups. Returns TRUE on success, or
9588 FALSE on error. */
9589
9590 static bool
9591 omp_tsort_mapping_groups_1 (omp_mapping_group ***outlist,
9592 vec<omp_mapping_group> *groups,
9593 hash_map<tree_operand_hash, omp_mapping_group *>
9594 *grpmap,
9595 omp_mapping_group *grp)
9596 {
9597 if (grp->mark == PERMANENT)
9598 return true;
9599 if (grp->mark == TEMPORARY)
9600 {
9601 fprintf (stderr, "when processing group:\n");
9602 debug_mapping_group (grp);
9603 internal_error ("base pointer cycle detected");
9604 return false;
9605 }
9606 grp->mark = TEMPORARY;
9607
9608 tree attaches_to = omp_get_attachment (grp);
9609
9610 if (attaches_to)
9611 {
9612 omp_mapping_group **basep = grpmap->get (attaches_to);
9613
9614 if (basep && *basep != grp)
9615 {
9616 for (omp_mapping_group *w = *basep; w; w = w->sibling)
9617 if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, w))
9618 return false;
9619 }
9620 }
9621
9622 tree decl = OMP_CLAUSE_DECL (*grp->grp_start);
9623
9624 while (decl)
9625 {
9626 tree base = omp_get_base_pointer (decl);
9627
9628 if (!base)
9629 break;
9630
9631 omp_mapping_group **innerp = grpmap->get (base);
9632 omp_mapping_group *wholestruct;
9633
9634 /* We should treat whole-structure mappings as if all (pointer, in this
9635 case) members are mapped as individual list items. Check if we have
9636 such a whole-structure mapping, if we don't have an explicit reference
9637 to the pointer member itself. */
9638 if (!innerp
9639 && TREE_CODE (base) == COMPONENT_REF
9640 && omp_mapped_by_containing_struct (grpmap, base, &wholestruct))
9641 innerp = &wholestruct;
9642
9643 if (innerp && *innerp != grp)
9644 {
9645 for (omp_mapping_group *w = *innerp; w; w = w->sibling)
9646 if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, w))
9647 return false;
9648 break;
9649 }
9650
9651 decl = base;
9652 }
9653
9654 grp->mark = PERMANENT;
9655
9656 /* Emit grp to output list. */
9657
9658 **outlist = grp;
9659 *outlist = &grp->next;
9660
9661 return true;
9662 }
9663
9664 /* Topologically sort GROUPS, so that OMP 5.0-defined base pointers come
9665 before mappings that use those pointers. This is an implementation of the
9666 depth-first search algorithm, described e.g. at:
9667
9668 https://en.wikipedia.org/wiki/Topological_sorting
9669 */
9670
9671 static omp_mapping_group *
9672 omp_tsort_mapping_groups (vec<omp_mapping_group> *groups,
9673 hash_map<tree_operand_hash, omp_mapping_group *>
9674 *grpmap)
9675 {
9676 omp_mapping_group *grp, *outlist = NULL, **cursor;
9677 unsigned int i;
9678
9679 cursor = &outlist;
9680
9681 FOR_EACH_VEC_ELT (*groups, i, grp)
9682 {
9683 if (grp->mark != PERMANENT)
9684 if (!omp_tsort_mapping_groups_1 (&cursor, groups, grpmap, grp))
9685 return NULL;
9686 }
9687
9688 return outlist;
9689 }
9690
9691 /* Split INLIST into two parts, moving groups corresponding to
9692 ALLOC/RELEASE/DELETE mappings to one list, and other mappings to another.
9693 The former list is then appended to the latter. Each sub-list retains the
9694 order of the original list.
9695 Note that ATTACH nodes are later moved to the end of the list in
9696 gimplify_adjust_omp_clauses, for target regions. */
9697
9698 static omp_mapping_group *
9699 omp_segregate_mapping_groups (omp_mapping_group *inlist)
9700 {
9701 omp_mapping_group *ard_groups = NULL, *tf_groups = NULL;
9702 omp_mapping_group **ard_tail = &ard_groups, **tf_tail = &tf_groups;
9703
9704 for (omp_mapping_group *w = inlist; w;)
9705 {
9706 tree c = *w->grp_start;
9707 omp_mapping_group *next = w->next;
9708
9709 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
9710
9711 switch (OMP_CLAUSE_MAP_KIND (c))
9712 {
9713 case GOMP_MAP_ALLOC:
9714 case GOMP_MAP_RELEASE:
9715 case GOMP_MAP_DELETE:
9716 *ard_tail = w;
9717 w->next = NULL;
9718 ard_tail = &w->next;
9719 break;
9720
9721 default:
9722 *tf_tail = w;
9723 w->next = NULL;
9724 tf_tail = &w->next;
9725 }
9726
9727 w = next;
9728 }
9729
9730 /* Now splice the lists together... */
9731 *tf_tail = ard_groups;
9732
9733 return tf_groups;
9734 }
9735
9736 /* Given a list LIST_P containing groups of mappings given by GROUPS, reorder
9737 those groups based on the output list of omp_tsort_mapping_groups --
9738 singly-linked, threaded through each element's NEXT pointer starting at
9739 HEAD. Each list element appears exactly once in that linked list.
9740
9741 Each element of GROUPS may correspond to one or several mapping nodes.
9742 Node groups are kept together, and in the reordered list, the positions of
9743 the original groups are reused for the positions of the reordered list.
9744 Hence if we have e.g.
9745
9746 {to ptr ptr} firstprivate {tofrom ptr} ...
9747 ^ ^ ^
9748 first group non-"map" second group
9749
9750 and say the second group contains a base pointer for the first so must be
9751 moved before it, the resulting list will contain:
9752
9753 {tofrom ptr} firstprivate {to ptr ptr} ...
9754 ^ prev. second group ^ prev. first group
9755 */
9756
9757 static tree *
9758 omp_reorder_mapping_groups (vec<omp_mapping_group> *groups,
9759 omp_mapping_group *head,
9760 tree *list_p)
9761 {
9762 omp_mapping_group *grp;
9763 unsigned int i;
9764 unsigned numgroups = groups->length ();
9765 auto_vec<tree> old_heads (numgroups);
9766 auto_vec<tree *> old_headps (numgroups);
9767 auto_vec<tree> new_heads (numgroups);
9768 auto_vec<tree> old_succs (numgroups);
9769 bool map_at_start = (list_p == (*groups)[0].grp_start);
9770
9771 tree *new_grp_tail = NULL;
9772
9773 /* Stash the start & end nodes of each mapping group before we start
9774 modifying the list. */
9775 FOR_EACH_VEC_ELT (*groups, i, grp)
9776 {
9777 old_headps.quick_push (grp->grp_start);
9778 old_heads.quick_push (*grp->grp_start);
9779 old_succs.quick_push (OMP_CLAUSE_CHAIN (grp->grp_end));
9780 }
9781
9782 /* And similarly, the heads of the groups in the order we want to rearrange
9783 the list to. */
9784 for (omp_mapping_group *w = head; w; w = w->next)
9785 new_heads.quick_push (*w->grp_start);
9786
9787 FOR_EACH_VEC_ELT (*groups, i, grp)
9788 {
9789 gcc_assert (head);
9790
9791 if (new_grp_tail && old_succs[i - 1] == old_heads[i])
9792 {
9793 /* a {b c d} {e f g} h i j (original)
9794 -->
9795 a {k l m} {e f g} h i j (inserted new group on last iter)
9796 -->
9797 a {k l m} {n o p} h i j (this time, chain last group to new one)
9798 ^new_grp_tail
9799 */
9800 *new_grp_tail = new_heads[i];
9801 }
9802 else if (new_grp_tail)
9803 {
9804 /* a {b c d} e {f g h} i j k (original)
9805 -->
9806 a {l m n} e {f g h} i j k (gap after last iter's group)
9807 -->
9808 a {l m n} e {o p q} h i j (chain last group to old successor)
9809 ^new_grp_tail
9810 */
9811 *new_grp_tail = old_succs[i - 1];
9812 *old_headps[i] = new_heads[i];
9813 }
9814 else
9815 {
9816 /* The first inserted group -- point to new group, and leave end
9817 open.
9818 a {b c d} e f
9819 -->
9820 a {g h i...
9821 */
9822 *grp->grp_start = new_heads[i];
9823 }
9824
9825 new_grp_tail = &OMP_CLAUSE_CHAIN (head->grp_end);
9826
9827 head = head->next;
9828 }
9829
9830 if (new_grp_tail)
9831 *new_grp_tail = old_succs[numgroups - 1];
9832
9833 gcc_assert (!head);
9834
9835 return map_at_start ? (*groups)[0].grp_start : list_p;
9836 }
9837
9838 /* DECL is supposed to have lastprivate semantics in the outer contexts
9839 of combined/composite constructs, starting with OCTX.
9840 Add needed lastprivate, shared or map clause if no data sharing or
9841 mapping clause are present. IMPLICIT_P is true if it is an implicit
9842 clause (IV on simd), in which case the lastprivate will not be
9843 copied to some constructs. */
9844
9845 static void
9846 omp_lastprivate_for_combined_outer_constructs (struct gimplify_omp_ctx *octx,
9847 tree decl, bool implicit_p)
9848 {
9849 struct gimplify_omp_ctx *orig_octx = octx;
9850 for (; octx; octx = octx->outer_context)
9851 {
9852 if ((octx->region_type == ORT_COMBINED_PARALLEL
9853 || (octx->region_type & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS)
9854 && splay_tree_lookup (octx->variables,
9855 (splay_tree_key) decl) == NULL)
9856 {
9857 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
9858 continue;
9859 }
9860 if ((octx->region_type & ORT_TASK) != 0
9861 && octx->combined_loop
9862 && splay_tree_lookup (octx->variables,
9863 (splay_tree_key) decl) == NULL)
9864 {
9865 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
9866 continue;
9867 }
9868 if (implicit_p
9869 && octx->region_type == ORT_WORKSHARE
9870 && octx->combined_loop
9871 && splay_tree_lookup (octx->variables,
9872 (splay_tree_key) decl) == NULL
9873 && octx->outer_context
9874 && octx->outer_context->region_type == ORT_COMBINED_PARALLEL
9875 && splay_tree_lookup (octx->outer_context->variables,
9876 (splay_tree_key) decl) == NULL)
9877 {
9878 octx = octx->outer_context;
9879 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
9880 continue;
9881 }
9882 if ((octx->region_type == ORT_WORKSHARE || octx->region_type == ORT_ACC)
9883 && octx->combined_loop
9884 && splay_tree_lookup (octx->variables,
9885 (splay_tree_key) decl) == NULL
9886 && !omp_check_private (octx, decl, false))
9887 {
9888 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
9889 continue;
9890 }
9891 if (octx->region_type == ORT_COMBINED_TARGET)
9892 {
9893 splay_tree_node n = splay_tree_lookup (octx->variables,
9894 (splay_tree_key) decl);
9895 if (n == NULL)
9896 {
9897 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
9898 octx = octx->outer_context;
9899 }
9900 else if (!implicit_p
9901 && (n->value & GOVD_FIRSTPRIVATE_IMPLICIT))
9902 {
9903 n->value &= ~(GOVD_FIRSTPRIVATE
9904 | GOVD_FIRSTPRIVATE_IMPLICIT
9905 | GOVD_EXPLICIT);
9906 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
9907 octx = octx->outer_context;
9908 }
9909 }
9910 break;
9911 }
9912 if (octx && (implicit_p || octx != orig_octx))
9913 omp_notice_variable (octx, decl, true);
9914 }
9915
9916 /* If we have mappings INNER and OUTER, where INNER is a component access and
9917 OUTER is a mapping of the whole containing struct, check that the mappings
9918 are compatible. We'll be deleting the inner mapping, so we need to make
9919 sure the outer mapping does (at least) the same transfers to/from the device
9920 as the inner mapping. */
9921
9922 bool
9923 omp_check_mapping_compatibility (location_t loc,
9924 omp_mapping_group *outer,
9925 omp_mapping_group *inner)
9926 {
9927 tree first_outer = *outer->grp_start, first_inner = *inner->grp_start;
9928
9929 gcc_assert (OMP_CLAUSE_CODE (first_outer) == OMP_CLAUSE_MAP);
9930 gcc_assert (OMP_CLAUSE_CODE (first_inner) == OMP_CLAUSE_MAP);
9931
9932 enum gomp_map_kind outer_kind = OMP_CLAUSE_MAP_KIND (first_outer);
9933 enum gomp_map_kind inner_kind = OMP_CLAUSE_MAP_KIND (first_inner);
9934
9935 if (outer_kind == inner_kind)
9936 return true;
9937
9938 switch (outer_kind)
9939 {
9940 case GOMP_MAP_ALWAYS_TO:
9941 if (inner_kind == GOMP_MAP_FORCE_PRESENT
9942 || inner_kind == GOMP_MAP_ALLOC
9943 || inner_kind == GOMP_MAP_TO)
9944 return true;
9945 break;
9946
9947 case GOMP_MAP_ALWAYS_FROM:
9948 if (inner_kind == GOMP_MAP_FORCE_PRESENT
9949 || inner_kind == GOMP_MAP_ALLOC
9950 || inner_kind == GOMP_MAP_FROM)
9951 return true;
9952 break;
9953
9954 case GOMP_MAP_TO:
9955 case GOMP_MAP_FROM:
9956 if (inner_kind == GOMP_MAP_FORCE_PRESENT
9957 || inner_kind == GOMP_MAP_ALLOC)
9958 return true;
9959 break;
9960
9961 case GOMP_MAP_ALWAYS_TOFROM:
9962 case GOMP_MAP_TOFROM:
9963 if (inner_kind == GOMP_MAP_FORCE_PRESENT
9964 || inner_kind == GOMP_MAP_ALLOC
9965 || inner_kind == GOMP_MAP_TO
9966 || inner_kind == GOMP_MAP_FROM
9967 || inner_kind == GOMP_MAP_TOFROM)
9968 return true;
9969 break;
9970
9971 default:
9972 ;
9973 }
9974
9975 error_at (loc, "data movement for component %qE is not compatible with "
9976 "movement for struct %qE", OMP_CLAUSE_DECL (first_inner),
9977 OMP_CLAUSE_DECL (first_outer));
9978
9979 return false;
9980 }
9981
9982 /* Similar to omp_resolve_clause_dependencies, but for OpenACC. The only
9983 clause dependencies we handle for now are struct element mappings and
9984 whole-struct mappings on the same directive, and duplicate clause
9985 detection. */
9986
9987 void
9988 oacc_resolve_clause_dependencies (vec<omp_mapping_group> *groups,
9989 hash_map<tree_operand_hash,
9990 omp_mapping_group *> *grpmap)
9991 {
9992 int i;
9993 omp_mapping_group *grp;
9994 hash_set<tree_operand_hash> *seen_components = NULL;
9995 hash_set<tree_operand_hash> *shown_error = NULL;
9996
9997 FOR_EACH_VEC_ELT (*groups, i, grp)
9998 {
9999 tree grp_end = grp->grp_end;
10000 tree decl = OMP_CLAUSE_DECL (grp_end);
10001
10002 gcc_assert (OMP_CLAUSE_CODE (grp_end) == OMP_CLAUSE_MAP);
10003
10004 if (DECL_P (grp_end))
10005 continue;
10006
10007 tree c = OMP_CLAUSE_DECL (*grp->grp_start);
10008 while (TREE_CODE (c) == ARRAY_REF)
10009 c = TREE_OPERAND (c, 0);
10010 if (TREE_CODE (c) != COMPONENT_REF)
10011 continue;
10012 if (!seen_components)
10013 seen_components = new hash_set<tree_operand_hash> ();
10014 if (!shown_error)
10015 shown_error = new hash_set<tree_operand_hash> ();
10016 if (seen_components->contains (c)
10017 && !shown_error->contains (c))
10018 {
10019 error_at (OMP_CLAUSE_LOCATION (grp_end),
10020 "%qE appears more than once in map clauses",
10021 OMP_CLAUSE_DECL (grp_end));
10022 shown_error->add (c);
10023 }
10024 else
10025 seen_components->add (c);
10026
10027 omp_mapping_group *struct_group;
10028 if (omp_mapped_by_containing_struct (grpmap, decl, &struct_group)
10029 && *grp->grp_start == grp_end)
10030 {
10031 omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end),
10032 struct_group, grp);
10033 /* Remove the whole of this mapping -- redundant. */
10034 grp->deleted = true;
10035 }
10036 }
10037
10038 if (seen_components)
10039 delete seen_components;
10040 if (shown_error)
10041 delete shown_error;
10042 }
10043
10044 /* Link node NEWNODE so it is pointed to by chain INSERT_AT. NEWNODE's chain
10045 is linked to the previous node pointed to by INSERT_AT. */
10046
10047 static tree *
10048 omp_siblist_insert_node_after (tree newnode, tree *insert_at)
10049 {
10050 OMP_CLAUSE_CHAIN (newnode) = *insert_at;
10051 *insert_at = newnode;
10052 return &OMP_CLAUSE_CHAIN (newnode);
10053 }
10054
10055 /* Move NODE (which is currently pointed to by the chain OLD_POS) so it is
10056 pointed to by chain MOVE_AFTER instead. */
10057
10058 static void
10059 omp_siblist_move_node_after (tree node, tree *old_pos, tree *move_after)
10060 {
10061 gcc_assert (node == *old_pos);
10062 *old_pos = OMP_CLAUSE_CHAIN (node);
10063 OMP_CLAUSE_CHAIN (node) = *move_after;
10064 *move_after = node;
10065 }
10066
10067 /* Move nodes from FIRST_PTR (pointed to by previous node's chain) to
10068 LAST_NODE to after MOVE_AFTER chain. Similar to below function, but no
10069 new nodes are prepended to the list before splicing into the new position.
10070 Return the position we should continue scanning the list at, or NULL to
10071 stay where we were. */
10072
10073 static tree *
10074 omp_siblist_move_nodes_after (tree *first_ptr, tree last_node,
10075 tree *move_after)
10076 {
10077 if (first_ptr == move_after)
10078 return NULL;
10079
10080 tree tmp = *first_ptr;
10081 *first_ptr = OMP_CLAUSE_CHAIN (last_node);
10082 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10083 *move_after = tmp;
10084
10085 return first_ptr;
10086 }
10087
10088 /* Concatenate two lists described by [FIRST_NEW, LAST_NEW_TAIL] and
10089 [FIRST_PTR, LAST_NODE], and insert them in the OMP clause list after chain
10090 pointer MOVE_AFTER.
10091
10092 The latter list was previously part of the OMP clause list, and the former
10093 (prepended) part is comprised of new nodes.
10094
10095 We start with a list of nodes starting with a struct mapping node. We
10096 rearrange the list so that new nodes starting from FIRST_NEW and whose last
10097 node's chain is LAST_NEW_TAIL comes directly after MOVE_AFTER, followed by
10098 the group of mapping nodes we are currently processing (from the chain
10099 FIRST_PTR to LAST_NODE). The return value is the pointer to the next chain
10100 we should continue processing from, or NULL to stay where we were.
10101
10102 The transformation (in the case where MOVE_AFTER and FIRST_PTR are
10103 different) is worked through below. Here we are processing LAST_NODE, and
10104 FIRST_PTR points at the preceding mapping clause:
10105
10106 #. mapping node chain
10107 ---------------------------------------------------
10108 A. struct_node [->B]
10109 B. comp_1 [->C]
10110 C. comp_2 [->D (move_after)]
10111 D. map_to_3 [->E]
10112 E. attach_3 [->F (first_ptr)]
10113 F. map_to_4 [->G (continue_at)]
10114 G. attach_4 (last_node) [->H]
10115 H. ...
10116
10117 *last_new_tail = *first_ptr;
10118
10119 I. new_node (first_new) [->F (last_new_tail)]
10120
10121 *first_ptr = OMP_CLAUSE_CHAIN (last_node)
10122
10123 #. mapping node chain
10124 ----------------------------------------------------
10125 A. struct_node [->B]
10126 B. comp_1 [->C]
10127 C. comp_2 [->D (move_after)]
10128 D. map_to_3 [->E]
10129 E. attach_3 [->H (first_ptr)]
10130 F. map_to_4 [->G (continue_at)]
10131 G. attach_4 (last_node) [->H]
10132 H. ...
10133
10134 I. new_node (first_new) [->F (last_new_tail)]
10135
10136 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10137
10138 #. mapping node chain
10139 ---------------------------------------------------
10140 A. struct_node [->B]
10141 B. comp_1 [->C]
10142 C. comp_2 [->D (move_after)]
10143 D. map_to_3 [->E]
10144 E. attach_3 [->H (continue_at)]
10145 F. map_to_4 [->G]
10146 G. attach_4 (last_node) [->D]
10147 H. ...
10148
10149 I. new_node (first_new) [->F (last_new_tail)]
10150
10151 *move_after = first_new;
10152
10153 #. mapping node chain
10154 ---------------------------------------------------
10155 A. struct_node [->B]
10156 B. comp_1 [->C]
10157 C. comp_2 [->I (move_after)]
10158 D. map_to_3 [->E]
10159 E. attach_3 [->H (continue_at)]
10160 F. map_to_4 [->G]
10161 G. attach_4 (last_node) [->D]
10162 H. ...
10163 I. new_node (first_new) [->F (last_new_tail)]
10164
10165 or, in order:
10166
10167 #. mapping node chain
10168 ---------------------------------------------------
10169 A. struct_node [->B]
10170 B. comp_1 [->C]
10171 C. comp_2 [->I (move_after)]
10172 I. new_node (first_new) [->F (last_new_tail)]
10173 F. map_to_4 [->G]
10174 G. attach_4 (last_node) [->D]
10175 D. map_to_3 [->E]
10176 E. attach_3 [->H (continue_at)]
10177 H. ...
10178 */
10179
10180 static tree *
10181 omp_siblist_move_concat_nodes_after (tree first_new, tree *last_new_tail,
10182 tree *first_ptr, tree last_node,
10183 tree *move_after)
10184 {
10185 tree *continue_at = NULL;
10186 *last_new_tail = *first_ptr;
10187 if (first_ptr == move_after)
10188 *move_after = first_new;
10189 else
10190 {
10191 *first_ptr = OMP_CLAUSE_CHAIN (last_node);
10192 continue_at = first_ptr;
10193 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10194 *move_after = first_new;
10195 }
10196 return continue_at;
10197 }
10198
10199 /* Mapping struct members causes an additional set of nodes to be created,
10200 starting with GOMP_MAP_STRUCT followed by a number of mappings equal to the
10201 number of members being mapped, in order of ascending position (address or
10202 bitwise).
10203
10204 We scan through the list of mapping clauses, calling this function for each
10205 struct member mapping we find, and build up the list of mappings after the
10206 initial GOMP_MAP_STRUCT node. For pointer members, these will be
10207 newly-created ALLOC nodes. For non-pointer members, the existing mapping is
10208 moved into place in the sorted list.
10209
10210 struct {
10211 int *a;
10212 int *b;
10213 int c;
10214 int *d;
10215 };
10216
10217 #pragma (acc|omp directive) copy(struct.a[0:n], struct.b[0:n], struct.c,
10218 struct.d[0:n])
10219
10220 GOMP_MAP_STRUCT (4)
10221 [GOMP_MAP_FIRSTPRIVATE_REFERENCE -- for refs to structs]
10222 GOMP_MAP_ALLOC (struct.a)
10223 GOMP_MAP_ALLOC (struct.b)
10224 GOMP_MAP_TO (struct.c)
10225 GOMP_MAP_ALLOC (struct.d)
10226 ...
10227
10228 In the case where we are mapping references to pointers, or in Fortran if
10229 we are mapping an array with a descriptor, additional nodes may be created
10230 after the struct node list also.
10231
10232 The return code is either a pointer to the next node to process (if the
10233 list has been rearranged), else NULL to continue with the next node in the
10234 original list. */
10235
10236 static tree *
10237 omp_accumulate_sibling_list (enum omp_region_type region_type,
10238 enum tree_code code,
10239 hash_map<tree_operand_hash, tree>
10240 *&struct_map_to_clause, tree *grp_start_p,
10241 tree grp_end, tree *inner)
10242 {
10243 poly_offset_int coffset;
10244 poly_int64 cbitpos;
10245 tree ocd = OMP_CLAUSE_DECL (grp_end);
10246 bool openmp = !(region_type & ORT_ACC);
10247 tree *continue_at = NULL;
10248
10249 while (TREE_CODE (ocd) == ARRAY_REF)
10250 ocd = TREE_OPERAND (ocd, 0);
10251
10252 if (TREE_CODE (ocd) == INDIRECT_REF)
10253 ocd = TREE_OPERAND (ocd, 0);
10254
10255 tree base = extract_base_bit_offset (ocd, &cbitpos, &coffset);
10256
10257 bool ptr = (OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_ALWAYS_POINTER);
10258 bool attach_detach = ((OMP_CLAUSE_MAP_KIND (grp_end)
10259 == GOMP_MAP_ATTACH_DETACH)
10260 || (OMP_CLAUSE_MAP_KIND (grp_end)
10261 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION));
10262 bool attach = (OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_ATTACH
10263 || OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_DETACH);
10264
10265 /* FIXME: If we're not mapping the base pointer in some other clause on this
10266 directive, I think we want to create ALLOC/RELEASE here -- i.e. not
10267 early-exit. */
10268 if (openmp && attach_detach)
10269 return NULL;
10270
10271 if (!struct_map_to_clause || struct_map_to_clause->get (base) == NULL)
10272 {
10273 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
10274 gomp_map_kind k = attach ? GOMP_MAP_FORCE_PRESENT : GOMP_MAP_STRUCT;
10275
10276 OMP_CLAUSE_SET_MAP_KIND (l, k);
10277
10278 OMP_CLAUSE_DECL (l) = unshare_expr (base);
10279
10280 OMP_CLAUSE_SIZE (l)
10281 = (!attach ? size_int (1)
10282 : (DECL_P (OMP_CLAUSE_DECL (l))
10283 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l))
10284 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l)))));
10285 if (struct_map_to_clause == NULL)
10286 struct_map_to_clause = new hash_map<tree_operand_hash, tree>;
10287 struct_map_to_clause->put (base, l);
10288
10289 if (ptr || attach_detach)
10290 {
10291 tree extra_node;
10292 tree alloc_node
10293 = build_omp_struct_comp_nodes (code, *grp_start_p, grp_end,
10294 &extra_node);
10295 OMP_CLAUSE_CHAIN (l) = alloc_node;
10296
10297 tree *insert_node_pos = grp_start_p;
10298
10299 if (extra_node)
10300 {
10301 OMP_CLAUSE_CHAIN (extra_node) = *insert_node_pos;
10302 OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
10303 }
10304 else
10305 OMP_CLAUSE_CHAIN (alloc_node) = *insert_node_pos;
10306
10307 *insert_node_pos = l;
10308 }
10309 else
10310 {
10311 gcc_assert (*grp_start_p == grp_end);
10312 grp_start_p = omp_siblist_insert_node_after (l, grp_start_p);
10313 }
10314
10315 tree noind = omp_strip_indirections (base);
10316
10317 if (!openmp
10318 && (region_type & ORT_TARGET)
10319 && TREE_CODE (noind) == COMPONENT_REF)
10320 {
10321 /* The base for this component access is a struct component access
10322 itself. Insert a node to be processed on the next iteration of
10323 our caller's loop, which will subsequently be turned into a new,
10324 inner GOMP_MAP_STRUCT mapping.
10325
10326 We need to do this else the non-DECL_P base won't be
10327 rewritten correctly in the offloaded region. */
10328 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
10329 OMP_CLAUSE_MAP);
10330 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_FORCE_PRESENT);
10331 OMP_CLAUSE_DECL (c2) = unshare_expr (noind);
10332 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (TREE_TYPE (noind));
10333 *inner = c2;
10334 return NULL;
10335 }
10336
10337 tree sdecl = omp_strip_components_and_deref (base);
10338
10339 if (POINTER_TYPE_P (TREE_TYPE (sdecl)) && (region_type & ORT_TARGET))
10340 {
10341 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
10342 OMP_CLAUSE_MAP);
10343 bool base_ref
10344 = (TREE_CODE (base) == INDIRECT_REF
10345 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
10346 == REFERENCE_TYPE)
10347 || ((TREE_CODE (TREE_OPERAND (base, 0))
10348 == INDIRECT_REF)
10349 && (TREE_CODE (TREE_TYPE (TREE_OPERAND
10350 (TREE_OPERAND (base, 0), 0)))
10351 == REFERENCE_TYPE))));
10352 enum gomp_map_kind mkind = base_ref ? GOMP_MAP_FIRSTPRIVATE_REFERENCE
10353 : GOMP_MAP_FIRSTPRIVATE_POINTER;
10354 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
10355 OMP_CLAUSE_DECL (c2) = sdecl;
10356 tree baddr = build_fold_addr_expr (base);
10357 baddr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
10358 ptrdiff_type_node, baddr);
10359 /* This isn't going to be good enough when we add support for more
10360 complicated lvalue expressions. FIXME. */
10361 if (TREE_CODE (TREE_TYPE (sdecl)) == REFERENCE_TYPE
10362 && TREE_CODE (TREE_TYPE (TREE_TYPE (sdecl))) == POINTER_TYPE)
10363 sdecl = build_simple_mem_ref (sdecl);
10364 tree decladdr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
10365 ptrdiff_type_node, sdecl);
10366 OMP_CLAUSE_SIZE (c2)
10367 = fold_build2_loc (OMP_CLAUSE_LOCATION (grp_end), MINUS_EXPR,
10368 ptrdiff_type_node, baddr, decladdr);
10369 /* Insert after struct node. */
10370 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
10371 OMP_CLAUSE_CHAIN (l) = c2;
10372 }
10373
10374 return NULL;
10375 }
10376 else if (struct_map_to_clause)
10377 {
10378 tree *osc = struct_map_to_clause->get (base);
10379 tree *sc = NULL, *scp = NULL;
10380 sc = &OMP_CLAUSE_CHAIN (*osc);
10381 /* The struct mapping might be immediately followed by a
10382 FIRSTPRIVATE_POINTER and/or FIRSTPRIVATE_REFERENCE -- if it's an
10383 indirect access or a reference, or both. (This added node is removed
10384 in omp-low.c after it has been processed there.) */
10385 if (*sc != grp_end
10386 && (OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_POINTER
10387 || OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10388 sc = &OMP_CLAUSE_CHAIN (*sc);
10389 for (; *sc != grp_end; sc = &OMP_CLAUSE_CHAIN (*sc))
10390 if ((ptr || attach_detach) && sc == grp_start_p)
10391 break;
10392 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc)) != COMPONENT_REF
10393 && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != INDIRECT_REF
10394 && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != ARRAY_REF)
10395 break;
10396 else
10397 {
10398 tree sc_decl = OMP_CLAUSE_DECL (*sc);
10399 poly_offset_int offset;
10400 poly_int64 bitpos;
10401
10402 if (TREE_CODE (sc_decl) == ARRAY_REF)
10403 {
10404 while (TREE_CODE (sc_decl) == ARRAY_REF)
10405 sc_decl = TREE_OPERAND (sc_decl, 0);
10406 if (TREE_CODE (sc_decl) != COMPONENT_REF
10407 || TREE_CODE (TREE_TYPE (sc_decl)) != ARRAY_TYPE)
10408 break;
10409 }
10410 else if (TREE_CODE (sc_decl) == INDIRECT_REF
10411 && TREE_CODE (TREE_OPERAND (sc_decl, 0)) == COMPONENT_REF
10412 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (sc_decl, 0)))
10413 == REFERENCE_TYPE))
10414 sc_decl = TREE_OPERAND (sc_decl, 0);
10415
10416 tree base2 = extract_base_bit_offset (sc_decl, &bitpos, &offset);
10417 if (!base2 || !operand_equal_p (base2, base, 0))
10418 break;
10419 if (scp)
10420 continue;
10421 if (maybe_lt (coffset, offset)
10422 || (known_eq (coffset, offset)
10423 && maybe_lt (cbitpos, bitpos)))
10424 {
10425 if (ptr || attach_detach)
10426 scp = sc;
10427 else
10428 break;
10429 }
10430 }
10431
10432 if (!attach)
10433 OMP_CLAUSE_SIZE (*osc)
10434 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc), size_one_node);
10435 if (ptr || attach_detach)
10436 {
10437 tree cl = NULL_TREE, extra_node;
10438 tree alloc_node = build_omp_struct_comp_nodes (code, *grp_start_p,
10439 grp_end, &extra_node);
10440 tree *tail_chain = NULL;
10441
10442 /* Here, we have:
10443
10444 grp_end : the last (or only) node in this group.
10445 grp_start_p : pointer to the first node in a pointer mapping group
10446 up to and including GRP_END.
10447 sc : pointer to the chain for the end of the struct component
10448 list.
10449 scp : pointer to the chain for the sorted position at which we
10450 should insert in the middle of the struct component list
10451 (else NULL to insert at end).
10452 alloc_node : the "alloc" node for the structure (pointer-type)
10453 component. We insert at SCP (if present), else SC
10454 (the end of the struct component list).
10455 extra_node : a newly-synthesized node for an additional indirect
10456 pointer mapping or a Fortran pointer set, if needed.
10457 cl : first node to prepend before grp_start_p.
10458 tail_chain : pointer to chain of last prepended node.
10459
10460 The general idea is we move the nodes for this struct mapping
10461 together: the alloc node goes into the sorted list directly after
10462 the struct mapping, and any extra nodes (together with the nodes
10463 mapping arrays pointed to by struct components) get moved after
10464 that list. When SCP is NULL, we insert the nodes at SC, i.e. at
10465 the end of the struct component mapping list. It's important that
10466 the alloc_node comes first in that case because it's part of the
10467 sorted component mapping list (but subsequent nodes are not!). */
10468
10469 if (scp)
10470 omp_siblist_insert_node_after (alloc_node, scp);
10471
10472 /* Make [cl,tail_chain] a list of the alloc node (if we haven't
10473 already inserted it) and the extra_node (if it is present). The
10474 list can be empty if we added alloc_node above and there is no
10475 extra node. */
10476 if (scp && extra_node)
10477 {
10478 cl = extra_node;
10479 tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
10480 }
10481 else if (extra_node)
10482 {
10483 OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
10484 cl = alloc_node;
10485 tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
10486 }
10487 else if (!scp)
10488 {
10489 cl = alloc_node;
10490 tail_chain = &OMP_CLAUSE_CHAIN (alloc_node);
10491 }
10492
10493 continue_at
10494 = cl ? omp_siblist_move_concat_nodes_after (cl, tail_chain,
10495 grp_start_p, grp_end,
10496 sc)
10497 : omp_siblist_move_nodes_after (grp_start_p, grp_end, sc);
10498 }
10499 else if (*sc != grp_end)
10500 {
10501 gcc_assert (*grp_start_p == grp_end);
10502
10503 /* We are moving the current node back to a previous struct node:
10504 the node that used to point to the current node will now point to
10505 the next node. */
10506 continue_at = grp_start_p;
10507 /* In the non-pointer case, the mapping clause itself is moved into
10508 the correct position in the struct component list, which in this
10509 case is just SC. */
10510 omp_siblist_move_node_after (*grp_start_p, grp_start_p, sc);
10511 }
10512 }
10513 return continue_at;
10514 }
10515
10516 /* Scan through GROUPS, and create sorted structure sibling lists without
10517 gimplifying. */
10518
10519 static bool
10520 omp_build_struct_sibling_lists (enum tree_code code,
10521 enum omp_region_type region_type,
10522 vec<omp_mapping_group> *groups,
10523 hash_map<tree_operand_hash, omp_mapping_group *>
10524 **grpmap,
10525 tree *list_p)
10526 {
10527 unsigned i;
10528 omp_mapping_group *grp;
10529 hash_map<tree_operand_hash, tree> *struct_map_to_clause = NULL;
10530 bool success = true;
10531 tree *new_next = NULL;
10532 tree *tail = &OMP_CLAUSE_CHAIN ((*groups)[groups->length () - 1].grp_end);
10533 auto_vec<omp_mapping_group> pre_hwm_groups;
10534
10535 FOR_EACH_VEC_ELT (*groups, i, grp)
10536 {
10537 tree c = grp->grp_end;
10538 tree decl = OMP_CLAUSE_DECL (c);
10539 tree grp_end = grp->grp_end;
10540 tree sentinel = OMP_CLAUSE_CHAIN (grp_end);
10541
10542 if (new_next)
10543 grp->grp_start = new_next;
10544
10545 new_next = NULL;
10546
10547 tree *grp_start_p = grp->grp_start;
10548
10549 if (DECL_P (decl))
10550 continue;
10551
10552 /* Skip groups we marked for deletion in
10553 oacc_resolve_clause_dependencies. */
10554 if (grp->deleted)
10555 continue;
10556
10557 if (OMP_CLAUSE_CHAIN (*grp_start_p)
10558 && OMP_CLAUSE_CHAIN (*grp_start_p) != grp_end)
10559 {
10560 /* Don't process an array descriptor that isn't inside a derived type
10561 as a struct (the GOMP_MAP_POINTER following will have the form
10562 "var.data", but such mappings are handled specially). */
10563 tree grpmid = OMP_CLAUSE_CHAIN (*grp_start_p);
10564 if (OMP_CLAUSE_CODE (grpmid) == OMP_CLAUSE_MAP
10565 && OMP_CLAUSE_MAP_KIND (grpmid) == GOMP_MAP_TO_PSET
10566 && DECL_P (OMP_CLAUSE_DECL (grpmid)))
10567 continue;
10568 }
10569
10570 tree d = decl;
10571 if (TREE_CODE (d) == ARRAY_REF)
10572 {
10573 while (TREE_CODE (d) == ARRAY_REF)
10574 d = TREE_OPERAND (d, 0);
10575 if (TREE_CODE (d) == COMPONENT_REF
10576 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
10577 decl = d;
10578 }
10579 if (d == decl
10580 && TREE_CODE (decl) == INDIRECT_REF
10581 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
10582 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
10583 == REFERENCE_TYPE)
10584 && (OMP_CLAUSE_MAP_KIND (c)
10585 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
10586 decl = TREE_OPERAND (decl, 0);
10587
10588 STRIP_NOPS (decl);
10589
10590 if (TREE_CODE (decl) != COMPONENT_REF)
10591 continue;
10592
10593 /* If we're mapping the whole struct in another node, skip adding this
10594 node to a sibling list. */
10595 omp_mapping_group *wholestruct;
10596 if (omp_mapped_by_containing_struct (*grpmap, OMP_CLAUSE_DECL (c),
10597 &wholestruct))
10598 {
10599 if (!(region_type & ORT_ACC)
10600 && *grp_start_p == grp_end)
10601 /* Remove the whole of this mapping -- redundant. */
10602 grp->deleted = true;
10603
10604 continue;
10605 }
10606
10607 if (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
10608 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
10609 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
10610 && code != OACC_UPDATE
10611 && code != OMP_TARGET_UPDATE)
10612 {
10613 if (error_operand_p (decl))
10614 {
10615 success = false;
10616 goto error_out;
10617 }
10618
10619 tree stype = TREE_TYPE (decl);
10620 if (TREE_CODE (stype) == REFERENCE_TYPE)
10621 stype = TREE_TYPE (stype);
10622 if (TYPE_SIZE_UNIT (stype) == NULL
10623 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
10624 {
10625 error_at (OMP_CLAUSE_LOCATION (c),
10626 "mapping field %qE of variable length "
10627 "structure", OMP_CLAUSE_DECL (c));
10628 success = false;
10629 goto error_out;
10630 }
10631
10632 tree inner = NULL_TREE;
10633
10634 new_next
10635 = omp_accumulate_sibling_list (region_type, code,
10636 struct_map_to_clause, grp_start_p,
10637 grp_end, &inner);
10638
10639 if (inner)
10640 {
10641 if (new_next && *new_next == NULL_TREE)
10642 *new_next = inner;
10643 else
10644 *tail = inner;
10645
10646 OMP_CLAUSE_CHAIN (inner) = NULL_TREE;
10647 omp_mapping_group newgrp;
10648 newgrp.grp_start = new_next ? new_next : tail;
10649 newgrp.grp_end = inner;
10650 newgrp.mark = UNVISITED;
10651 newgrp.sibling = NULL;
10652 newgrp.deleted = false;
10653 newgrp.next = NULL;
10654 groups->safe_push (newgrp);
10655
10656 /* !!! Growing GROUPS might invalidate the pointers in the group
10657 map. Rebuild it here. This is a bit inefficient, but
10658 shouldn't happen very often. */
10659 delete (*grpmap);
10660 *grpmap
10661 = omp_reindex_mapping_groups (list_p, groups, &pre_hwm_groups,
10662 sentinel);
10663
10664 tail = &OMP_CLAUSE_CHAIN (inner);
10665 }
10666 }
10667 }
10668
10669 /* Delete groups marked for deletion above. At this point the order of the
10670 groups may no longer correspond to the order of the underlying list,
10671 which complicates this a little. First clear out OMP_CLAUSE_DECL for
10672 deleted nodes... */
10673
10674 FOR_EACH_VEC_ELT (*groups, i, grp)
10675 if (grp->deleted)
10676 for (tree d = *grp->grp_start;
10677 d != OMP_CLAUSE_CHAIN (grp->grp_end);
10678 d = OMP_CLAUSE_CHAIN (d))
10679 OMP_CLAUSE_DECL (d) = NULL_TREE;
10680
10681 /* ...then sweep through the list removing the now-empty nodes. */
10682
10683 tail = list_p;
10684 while (*tail)
10685 {
10686 if (OMP_CLAUSE_CODE (*tail) == OMP_CLAUSE_MAP
10687 && OMP_CLAUSE_DECL (*tail) == NULL_TREE)
10688 *tail = OMP_CLAUSE_CHAIN (*tail);
10689 else
10690 tail = &OMP_CLAUSE_CHAIN (*tail);
10691 }
10692
10693 error_out:
10694 if (struct_map_to_clause)
10695 delete struct_map_to_clause;
10696
10697 return success;
10698 }
10699
10700 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
10701 and previous omp contexts. */
10702
10703 static void
10704 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
10705 enum omp_region_type region_type,
10706 enum tree_code code)
10707 {
10708 struct gimplify_omp_ctx *ctx, *outer_ctx;
10709 tree c;
10710 tree *prev_list_p = NULL, *orig_list_p = list_p;
10711 int handled_depend_iterators = -1;
10712 int nowait = -1;
10713
10714 ctx = new_omp_context (region_type);
10715 ctx->code = code;
10716 outer_ctx = ctx->outer_context;
10717 if (code == OMP_TARGET)
10718 {
10719 if (!lang_GNU_Fortran ())
10720 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
10721 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
10722 ctx->defaultmap[GDMK_SCALAR_TARGET] = (lang_GNU_Fortran ()
10723 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
10724 }
10725 if (!lang_GNU_Fortran ())
10726 switch (code)
10727 {
10728 case OMP_TARGET:
10729 case OMP_TARGET_DATA:
10730 case OMP_TARGET_ENTER_DATA:
10731 case OMP_TARGET_EXIT_DATA:
10732 case OACC_DECLARE:
10733 case OACC_HOST_DATA:
10734 case OACC_PARALLEL:
10735 case OACC_KERNELS:
10736 ctx->target_firstprivatize_array_bases = true;
10737 default:
10738 break;
10739 }
10740
10741 if (code == OMP_TARGET
10742 || code == OMP_TARGET_DATA
10743 || code == OMP_TARGET_ENTER_DATA
10744 || code == OMP_TARGET_EXIT_DATA)
10745 {
10746 vec<omp_mapping_group> *groups;
10747 groups = omp_gather_mapping_groups (list_p);
10748 if (groups)
10749 {
10750 hash_map<tree_operand_hash, omp_mapping_group *> *grpmap;
10751 grpmap = omp_index_mapping_groups (groups);
10752
10753 omp_build_struct_sibling_lists (code, region_type, groups, &grpmap,
10754 list_p);
10755
10756 omp_mapping_group *outlist = NULL;
10757
10758 /* Topological sorting may fail if we have duplicate nodes, which
10759 we should have detected and shown an error for already. Skip
10760 sorting in that case. */
10761 if (seen_error ())
10762 goto failure;
10763
10764 delete grpmap;
10765 delete groups;
10766
10767 /* Rebuild now we have struct sibling lists. */
10768 groups = omp_gather_mapping_groups (list_p);
10769 grpmap = omp_index_mapping_groups (groups);
10770
10771 outlist = omp_tsort_mapping_groups (groups, grpmap);
10772 outlist = omp_segregate_mapping_groups (outlist);
10773 list_p = omp_reorder_mapping_groups (groups, outlist, list_p);
10774
10775 failure:
10776 delete grpmap;
10777 delete groups;
10778 }
10779 }
10780 else if (region_type & ORT_ACC)
10781 {
10782 vec<omp_mapping_group> *groups;
10783 groups = omp_gather_mapping_groups (list_p);
10784 if (groups)
10785 {
10786 hash_map<tree_operand_hash, omp_mapping_group *> *grpmap;
10787 grpmap = omp_index_mapping_groups (groups);
10788
10789 oacc_resolve_clause_dependencies (groups, grpmap);
10790 omp_build_struct_sibling_lists (code, region_type, groups, &grpmap,
10791 list_p);
10792
10793 delete groups;
10794 delete grpmap;
10795 }
10796 }
10797
10798 while ((c = *list_p) != NULL)
10799 {
10800 bool remove = false;
10801 bool notice_outer = true;
10802 const char *check_non_private = NULL;
10803 unsigned int flags;
10804 tree decl;
10805
10806 switch (OMP_CLAUSE_CODE (c))
10807 {
10808 case OMP_CLAUSE_PRIVATE:
10809 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
10810 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
10811 {
10812 flags |= GOVD_PRIVATE_OUTER_REF;
10813 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
10814 }
10815 else
10816 notice_outer = false;
10817 goto do_add;
10818 case OMP_CLAUSE_SHARED:
10819 flags = GOVD_SHARED | GOVD_EXPLICIT;
10820 goto do_add;
10821 case OMP_CLAUSE_FIRSTPRIVATE:
10822 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
10823 check_non_private = "firstprivate";
10824 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
10825 {
10826 gcc_assert (code == OMP_TARGET);
10827 flags |= GOVD_FIRSTPRIVATE_IMPLICIT;
10828 }
10829 goto do_add;
10830 case OMP_CLAUSE_LASTPRIVATE:
10831 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
10832 switch (code)
10833 {
10834 case OMP_DISTRIBUTE:
10835 error_at (OMP_CLAUSE_LOCATION (c),
10836 "conditional %<lastprivate%> clause on "
10837 "%qs construct", "distribute");
10838 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
10839 break;
10840 case OMP_TASKLOOP:
10841 error_at (OMP_CLAUSE_LOCATION (c),
10842 "conditional %<lastprivate%> clause on "
10843 "%qs construct", "taskloop");
10844 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
10845 break;
10846 default:
10847 break;
10848 }
10849 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
10850 if (code != OMP_LOOP)
10851 check_non_private = "lastprivate";
10852 decl = OMP_CLAUSE_DECL (c);
10853 if (error_operand_p (decl))
10854 goto do_add;
10855 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
10856 && !lang_hooks.decls.omp_scalar_p (decl, true))
10857 {
10858 error_at (OMP_CLAUSE_LOCATION (c),
10859 "non-scalar variable %qD in conditional "
10860 "%<lastprivate%> clause", decl);
10861 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
10862 }
10863 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
10864 flags |= GOVD_LASTPRIVATE_CONDITIONAL;
10865 omp_lastprivate_for_combined_outer_constructs (outer_ctx, decl,
10866 false);
10867 goto do_add;
10868 case OMP_CLAUSE_REDUCTION:
10869 if (OMP_CLAUSE_REDUCTION_TASK (c))
10870 {
10871 if (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
10872 {
10873 if (nowait == -1)
10874 nowait = omp_find_clause (*list_p,
10875 OMP_CLAUSE_NOWAIT) != NULL_TREE;
10876 if (nowait
10877 && (outer_ctx == NULL
10878 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
10879 {
10880 error_at (OMP_CLAUSE_LOCATION (c),
10881 "%<task%> reduction modifier on a construct "
10882 "with a %<nowait%> clause");
10883 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
10884 }
10885 }
10886 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
10887 {
10888 error_at (OMP_CLAUSE_LOCATION (c),
10889 "invalid %<task%> reduction modifier on construct "
10890 "other than %<parallel%>, %qs, %<sections%> or "
10891 "%<scope%>", lang_GNU_Fortran () ? "do" : "for");
10892 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
10893 }
10894 }
10895 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
10896 switch (code)
10897 {
10898 case OMP_SECTIONS:
10899 error_at (OMP_CLAUSE_LOCATION (c),
10900 "%<inscan%> %<reduction%> clause on "
10901 "%qs construct", "sections");
10902 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
10903 break;
10904 case OMP_PARALLEL:
10905 error_at (OMP_CLAUSE_LOCATION (c),
10906 "%<inscan%> %<reduction%> clause on "
10907 "%qs construct", "parallel");
10908 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
10909 break;
10910 case OMP_TEAMS:
10911 error_at (OMP_CLAUSE_LOCATION (c),
10912 "%<inscan%> %<reduction%> clause on "
10913 "%qs construct", "teams");
10914 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
10915 break;
10916 case OMP_TASKLOOP:
10917 error_at (OMP_CLAUSE_LOCATION (c),
10918 "%<inscan%> %<reduction%> clause on "
10919 "%qs construct", "taskloop");
10920 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
10921 break;
10922 case OMP_SCOPE:
10923 error_at (OMP_CLAUSE_LOCATION (c),
10924 "%<inscan%> %<reduction%> clause on "
10925 "%qs construct", "scope");
10926 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
10927 break;
10928 default:
10929 break;
10930 }
10931 /* FALLTHRU */
10932 case OMP_CLAUSE_IN_REDUCTION:
10933 case OMP_CLAUSE_TASK_REDUCTION:
10934 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
10935 /* OpenACC permits reductions on private variables. */
10936 if (!(region_type & ORT_ACC)
10937 /* taskgroup is actually not a worksharing region. */
10938 && code != OMP_TASKGROUP)
10939 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
10940 decl = OMP_CLAUSE_DECL (c);
10941 if (TREE_CODE (decl) == MEM_REF)
10942 {
10943 tree type = TREE_TYPE (decl);
10944 bool saved_into_ssa = gimplify_ctxp->into_ssa;
10945 gimplify_ctxp->into_ssa = false;
10946 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
10947 NULL, is_gimple_val, fb_rvalue, false)
10948 == GS_ERROR)
10949 {
10950 gimplify_ctxp->into_ssa = saved_into_ssa;
10951 remove = true;
10952 break;
10953 }
10954 gimplify_ctxp->into_ssa = saved_into_ssa;
10955 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
10956 if (DECL_P (v))
10957 {
10958 omp_firstprivatize_variable (ctx, v);
10959 omp_notice_variable (ctx, v, true);
10960 }
10961 decl = TREE_OPERAND (decl, 0);
10962 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
10963 {
10964 gimplify_ctxp->into_ssa = false;
10965 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
10966 NULL, is_gimple_val, fb_rvalue, false)
10967 == GS_ERROR)
10968 {
10969 gimplify_ctxp->into_ssa = saved_into_ssa;
10970 remove = true;
10971 break;
10972 }
10973 gimplify_ctxp->into_ssa = saved_into_ssa;
10974 v = TREE_OPERAND (decl, 1);
10975 if (DECL_P (v))
10976 {
10977 omp_firstprivatize_variable (ctx, v);
10978 omp_notice_variable (ctx, v, true);
10979 }
10980 decl = TREE_OPERAND (decl, 0);
10981 }
10982 if (TREE_CODE (decl) == ADDR_EXPR
10983 || TREE_CODE (decl) == INDIRECT_REF)
10984 decl = TREE_OPERAND (decl, 0);
10985 }
10986 goto do_add_decl;
10987 case OMP_CLAUSE_LINEAR:
10988 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
10989 is_gimple_val, fb_rvalue) == GS_ERROR)
10990 {
10991 remove = true;
10992 break;
10993 }
10994 else
10995 {
10996 if (code == OMP_SIMD
10997 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10998 {
10999 struct gimplify_omp_ctx *octx = outer_ctx;
11000 if (octx
11001 && octx->region_type == ORT_WORKSHARE
11002 && octx->combined_loop
11003 && !octx->distribute)
11004 {
11005 if (octx->outer_context
11006 && (octx->outer_context->region_type
11007 == ORT_COMBINED_PARALLEL))
11008 octx = octx->outer_context->outer_context;
11009 else
11010 octx = octx->outer_context;
11011 }
11012 if (octx
11013 && octx->region_type == ORT_WORKSHARE
11014 && octx->combined_loop
11015 && octx->distribute)
11016 {
11017 error_at (OMP_CLAUSE_LOCATION (c),
11018 "%<linear%> clause for variable other than "
11019 "loop iterator specified on construct "
11020 "combined with %<distribute%>");
11021 remove = true;
11022 break;
11023 }
11024 }
11025 /* For combined #pragma omp parallel for simd, need to put
11026 lastprivate and perhaps firstprivate too on the
11027 parallel. Similarly for #pragma omp for simd. */
11028 struct gimplify_omp_ctx *octx = outer_ctx;
11029 bool taskloop_seen = false;
11030 decl = NULL_TREE;
11031 do
11032 {
11033 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11034 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11035 break;
11036 decl = OMP_CLAUSE_DECL (c);
11037 if (error_operand_p (decl))
11038 {
11039 decl = NULL_TREE;
11040 break;
11041 }
11042 flags = GOVD_SEEN;
11043 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11044 flags |= GOVD_FIRSTPRIVATE;
11045 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11046 flags |= GOVD_LASTPRIVATE;
11047 if (octx
11048 && octx->region_type == ORT_WORKSHARE
11049 && octx->combined_loop)
11050 {
11051 if (octx->outer_context
11052 && (octx->outer_context->region_type
11053 == ORT_COMBINED_PARALLEL))
11054 octx = octx->outer_context;
11055 else if (omp_check_private (octx, decl, false))
11056 break;
11057 }
11058 else if (octx
11059 && (octx->region_type & ORT_TASK) != 0
11060 && octx->combined_loop)
11061 taskloop_seen = true;
11062 else if (octx
11063 && octx->region_type == ORT_COMBINED_PARALLEL
11064 && ((ctx->region_type == ORT_WORKSHARE
11065 && octx == outer_ctx)
11066 || taskloop_seen))
11067 flags = GOVD_SEEN | GOVD_SHARED;
11068 else if (octx
11069 && ((octx->region_type & ORT_COMBINED_TEAMS)
11070 == ORT_COMBINED_TEAMS))
11071 flags = GOVD_SEEN | GOVD_SHARED;
11072 else if (octx
11073 && octx->region_type == ORT_COMBINED_TARGET)
11074 {
11075 if (flags & GOVD_LASTPRIVATE)
11076 flags = GOVD_SEEN | GOVD_MAP;
11077 }
11078 else
11079 break;
11080 splay_tree_node on
11081 = splay_tree_lookup (octx->variables,
11082 (splay_tree_key) decl);
11083 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
11084 {
11085 octx = NULL;
11086 break;
11087 }
11088 omp_add_variable (octx, decl, flags);
11089 if (octx->outer_context == NULL)
11090 break;
11091 octx = octx->outer_context;
11092 }
11093 while (1);
11094 if (octx
11095 && decl
11096 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11097 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
11098 omp_notice_variable (octx, decl, true);
11099 }
11100 flags = GOVD_LINEAR | GOVD_EXPLICIT;
11101 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11102 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11103 {
11104 notice_outer = false;
11105 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11106 }
11107 goto do_add;
11108
11109 case OMP_CLAUSE_MAP:
11110 decl = OMP_CLAUSE_DECL (c);
11111 if (error_operand_p (decl))
11112 remove = true;
11113 switch (code)
11114 {
11115 case OMP_TARGET:
11116 break;
11117 case OACC_DATA:
11118 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
11119 break;
11120 /* FALLTHRU */
11121 case OMP_TARGET_DATA:
11122 case OMP_TARGET_ENTER_DATA:
11123 case OMP_TARGET_EXIT_DATA:
11124 case OACC_ENTER_DATA:
11125 case OACC_EXIT_DATA:
11126 case OACC_HOST_DATA:
11127 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11128 || (OMP_CLAUSE_MAP_KIND (c)
11129 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11130 /* For target {,enter ,exit }data only the array slice is
11131 mapped, but not the pointer to it. */
11132 remove = true;
11133 break;
11134 default:
11135 break;
11136 }
11137 /* For Fortran, not only the pointer to the data is mapped but also
11138 the address of the pointer, the array descriptor etc.; for
11139 'exit data' - and in particular for 'delete:' - having an 'alloc:'
11140 does not make sense. Likewise, for 'update' only transferring the
11141 data itself is needed as the rest has been handled in previous
11142 directives. However, for 'exit data', the array descriptor needs
11143 to be delete; hence, we turn the MAP_TO_PSET into a MAP_DELETE.
11144
11145 NOTE: Generally, it is not safe to perform "enter data" operations
11146 on arrays where the data *or the descriptor* may go out of scope
11147 before a corresponding "exit data" operation -- and such a
11148 descriptor may be synthesized temporarily, e.g. to pass an
11149 explicit-shape array to a function expecting an assumed-shape
11150 argument. Performing "enter data" inside the called function
11151 would thus be problematic. */
11152 if (code == OMP_TARGET_EXIT_DATA
11153 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET)
11154 OMP_CLAUSE_SET_MAP_KIND (c, OMP_CLAUSE_MAP_KIND (*prev_list_p)
11155 == GOMP_MAP_DELETE
11156 ? GOMP_MAP_DELETE : GOMP_MAP_RELEASE);
11157 else if ((code == OMP_TARGET_EXIT_DATA || code == OMP_TARGET_UPDATE)
11158 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11159 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET))
11160 remove = true;
11161
11162 if (remove)
11163 break;
11164 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
11165 {
11166 struct gimplify_omp_ctx *octx;
11167 for (octx = outer_ctx; octx; octx = octx->outer_context)
11168 {
11169 if (octx->region_type != ORT_ACC_HOST_DATA)
11170 break;
11171 splay_tree_node n2
11172 = splay_tree_lookup (octx->variables,
11173 (splay_tree_key) decl);
11174 if (n2)
11175 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
11176 "declared in enclosing %<host_data%> region",
11177 DECL_NAME (decl));
11178 }
11179 }
11180 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
11181 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
11182 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
11183 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
11184 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
11185 {
11186 remove = true;
11187 break;
11188 }
11189 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11190 || (OMP_CLAUSE_MAP_KIND (c)
11191 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
11192 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11193 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
11194 {
11195 OMP_CLAUSE_SIZE (c)
11196 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
11197 false);
11198 if ((region_type & ORT_TARGET) != 0)
11199 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
11200 GOVD_FIRSTPRIVATE | GOVD_SEEN);
11201 }
11202
11203 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
11204 {
11205 tree base = omp_strip_components_and_deref (decl);
11206 if (DECL_P (base))
11207 {
11208 decl = base;
11209 splay_tree_node n
11210 = splay_tree_lookup (ctx->variables,
11211 (splay_tree_key) decl);
11212 if (seen_error ()
11213 && n
11214 && (n->value & (GOVD_MAP | GOVD_FIRSTPRIVATE)) != 0)
11215 {
11216 remove = true;
11217 break;
11218 }
11219 flags = GOVD_MAP | GOVD_EXPLICIT;
11220
11221 goto do_add_decl;
11222 }
11223 }
11224
11225 if (TREE_CODE (decl) == TARGET_EXPR)
11226 {
11227 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
11228 is_gimple_lvalue, fb_lvalue)
11229 == GS_ERROR)
11230 remove = true;
11231 }
11232 else if (!DECL_P (decl))
11233 {
11234 tree d = decl, *pd;
11235 if (TREE_CODE (d) == ARRAY_REF)
11236 {
11237 while (TREE_CODE (d) == ARRAY_REF)
11238 d = TREE_OPERAND (d, 0);
11239 if (TREE_CODE (d) == COMPONENT_REF
11240 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
11241 decl = d;
11242 }
11243 pd = &OMP_CLAUSE_DECL (c);
11244 if (d == decl
11245 && TREE_CODE (decl) == INDIRECT_REF
11246 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
11247 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
11248 == REFERENCE_TYPE)
11249 && (OMP_CLAUSE_MAP_KIND (c)
11250 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
11251 {
11252 pd = &TREE_OPERAND (decl, 0);
11253 decl = TREE_OPERAND (decl, 0);
11254 }
11255 /* An "attach/detach" operation on an update directive should
11256 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
11257 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
11258 depends on the previous mapping. */
11259 if (code == OACC_UPDATE
11260 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11261 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
11262
11263 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11264 {
11265 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11266 == ARRAY_TYPE)
11267 remove = true;
11268 else
11269 {
11270 gomp_map_kind k = ((code == OACC_EXIT_DATA
11271 || code == OMP_TARGET_EXIT_DATA)
11272 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH);
11273 OMP_CLAUSE_SET_MAP_KIND (c, k);
11274 }
11275 }
11276
11277 tree cref = decl;
11278
11279 while (TREE_CODE (cref) == ARRAY_REF)
11280 cref = TREE_OPERAND (cref, 0);
11281
11282 if (TREE_CODE (cref) == INDIRECT_REF)
11283 cref = TREE_OPERAND (cref, 0);
11284
11285 if (TREE_CODE (cref) == COMPONENT_REF)
11286 {
11287 tree base = cref;
11288 while (base && !DECL_P (base))
11289 {
11290 tree innerbase = omp_get_base_pointer (base);
11291 if (!innerbase)
11292 break;
11293 base = innerbase;
11294 }
11295 if (base
11296 && DECL_P (base)
11297 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
11298 && POINTER_TYPE_P (TREE_TYPE (base)))
11299 {
11300 splay_tree_node n
11301 = splay_tree_lookup (ctx->variables,
11302 (splay_tree_key) base);
11303 n->value |= GOVD_SEEN;
11304 }
11305 }
11306
11307 if (code == OMP_TARGET && OMP_CLAUSE_MAP_IN_REDUCTION (c))
11308 {
11309 /* Don't gimplify *pd fully at this point, as the base
11310 will need to be adjusted during omp lowering. */
11311 auto_vec<tree, 10> expr_stack;
11312 tree *p = pd;
11313 while (handled_component_p (*p)
11314 || TREE_CODE (*p) == INDIRECT_REF
11315 || TREE_CODE (*p) == ADDR_EXPR
11316 || TREE_CODE (*p) == MEM_REF
11317 || TREE_CODE (*p) == NON_LVALUE_EXPR)
11318 {
11319 expr_stack.safe_push (*p);
11320 p = &TREE_OPERAND (*p, 0);
11321 }
11322 for (int i = expr_stack.length () - 1; i >= 0; i--)
11323 {
11324 tree t = expr_stack[i];
11325 if (TREE_CODE (t) == ARRAY_REF
11326 || TREE_CODE (t) == ARRAY_RANGE_REF)
11327 {
11328 if (TREE_OPERAND (t, 2) == NULL_TREE)
11329 {
11330 tree low = unshare_expr (array_ref_low_bound (t));
11331 if (!is_gimple_min_invariant (low))
11332 {
11333 TREE_OPERAND (t, 2) = low;
11334 if (gimplify_expr (&TREE_OPERAND (t, 2),
11335 pre_p, NULL,
11336 is_gimple_reg,
11337 fb_rvalue) == GS_ERROR)
11338 remove = true;
11339 }
11340 }
11341 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
11342 NULL, is_gimple_reg,
11343 fb_rvalue) == GS_ERROR)
11344 remove = true;
11345 if (TREE_OPERAND (t, 3) == NULL_TREE)
11346 {
11347 tree elmt_size = array_ref_element_size (t);
11348 if (!is_gimple_min_invariant (elmt_size))
11349 {
11350 elmt_size = unshare_expr (elmt_size);
11351 tree elmt_type
11352 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t,
11353 0)));
11354 tree factor
11355 = size_int (TYPE_ALIGN_UNIT (elmt_type));
11356 elmt_size
11357 = size_binop (EXACT_DIV_EXPR, elmt_size,
11358 factor);
11359 TREE_OPERAND (t, 3) = elmt_size;
11360 if (gimplify_expr (&TREE_OPERAND (t, 3),
11361 pre_p, NULL,
11362 is_gimple_reg,
11363 fb_rvalue) == GS_ERROR)
11364 remove = true;
11365 }
11366 }
11367 else if (gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
11368 NULL, is_gimple_reg,
11369 fb_rvalue) == GS_ERROR)
11370 remove = true;
11371 }
11372 else if (TREE_CODE (t) == COMPONENT_REF)
11373 {
11374 if (TREE_OPERAND (t, 2) == NULL_TREE)
11375 {
11376 tree offset = component_ref_field_offset (t);
11377 if (!is_gimple_min_invariant (offset))
11378 {
11379 offset = unshare_expr (offset);
11380 tree field = TREE_OPERAND (t, 1);
11381 tree factor
11382 = size_int (DECL_OFFSET_ALIGN (field)
11383 / BITS_PER_UNIT);
11384 offset = size_binop (EXACT_DIV_EXPR, offset,
11385 factor);
11386 TREE_OPERAND (t, 2) = offset;
11387 if (gimplify_expr (&TREE_OPERAND (t, 2),
11388 pre_p, NULL,
11389 is_gimple_reg,
11390 fb_rvalue) == GS_ERROR)
11391 remove = true;
11392 }
11393 }
11394 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
11395 NULL, is_gimple_reg,
11396 fb_rvalue) == GS_ERROR)
11397 remove = true;
11398 }
11399 }
11400 for (; expr_stack.length () > 0; )
11401 {
11402 tree t = expr_stack.pop ();
11403
11404 if (TREE_CODE (t) == ARRAY_REF
11405 || TREE_CODE (t) == ARRAY_RANGE_REF)
11406 {
11407 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))
11408 && gimplify_expr (&TREE_OPERAND (t, 1), pre_p,
11409 NULL, is_gimple_val,
11410 fb_rvalue) == GS_ERROR)
11411 remove = true;
11412 }
11413 }
11414 }
11415 else if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue,
11416 fb_lvalue) == GS_ERROR)
11417 {
11418 remove = true;
11419 break;
11420 }
11421
11422 if (!remove
11423 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
11424 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH
11425 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
11426 && OMP_CLAUSE_CHAIN (c)
11427 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
11428 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
11429 == GOMP_MAP_ALWAYS_POINTER)
11430 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
11431 == GOMP_MAP_ATTACH_DETACH)
11432 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
11433 == GOMP_MAP_TO_PSET)))
11434 prev_list_p = list_p;
11435
11436 break;
11437 }
11438 flags = GOVD_MAP | GOVD_EXPLICIT;
11439 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
11440 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
11441 flags |= GOVD_MAP_ALWAYS_TO;
11442
11443 if ((code == OMP_TARGET
11444 || code == OMP_TARGET_DATA
11445 || code == OMP_TARGET_ENTER_DATA
11446 || code == OMP_TARGET_EXIT_DATA)
11447 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11448 {
11449 for (struct gimplify_omp_ctx *octx = outer_ctx; octx;
11450 octx = octx->outer_context)
11451 {
11452 splay_tree_node n
11453 = splay_tree_lookup (octx->variables,
11454 (splay_tree_key) OMP_CLAUSE_DECL (c));
11455 /* If this is contained in an outer OpenMP region as a
11456 firstprivate value, remove the attach/detach. */
11457 if (n && (n->value & GOVD_FIRSTPRIVATE))
11458 {
11459 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FIRSTPRIVATE_POINTER);
11460 goto do_add;
11461 }
11462 }
11463
11464 enum gomp_map_kind map_kind = (code == OMP_TARGET_EXIT_DATA
11465 ? GOMP_MAP_DETACH
11466 : GOMP_MAP_ATTACH);
11467 OMP_CLAUSE_SET_MAP_KIND (c, map_kind);
11468 }
11469
11470 goto do_add;
11471
11472 case OMP_CLAUSE_AFFINITY:
11473 gimplify_omp_affinity (list_p, pre_p);
11474 remove = true;
11475 break;
11476 case OMP_CLAUSE_DOACROSS:
11477 if (OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
11478 {
11479 tree deps = OMP_CLAUSE_DECL (c);
11480 while (deps && TREE_CODE (deps) == TREE_LIST)
11481 {
11482 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
11483 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
11484 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
11485 pre_p, NULL, is_gimple_val, fb_rvalue);
11486 deps = TREE_CHAIN (deps);
11487 }
11488 }
11489 else
11490 gcc_assert (OMP_CLAUSE_DOACROSS_KIND (c)
11491 == OMP_CLAUSE_DOACROSS_SOURCE);
11492 break;
11493 case OMP_CLAUSE_DEPEND:
11494 if (handled_depend_iterators == -1)
11495 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
11496 if (handled_depend_iterators)
11497 {
11498 if (handled_depend_iterators == 2)
11499 remove = true;
11500 break;
11501 }
11502 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
11503 {
11504 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
11505 NULL, is_gimple_val, fb_rvalue);
11506 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
11507 }
11508 if (error_operand_p (OMP_CLAUSE_DECL (c)))
11509 {
11510 remove = true;
11511 break;
11512 }
11513 if (OMP_CLAUSE_DECL (c) != null_pointer_node)
11514 {
11515 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
11516 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
11517 is_gimple_val, fb_rvalue) == GS_ERROR)
11518 {
11519 remove = true;
11520 break;
11521 }
11522 }
11523 if (code == OMP_TASK)
11524 ctx->has_depend = true;
11525 break;
11526
11527 case OMP_CLAUSE_TO:
11528 case OMP_CLAUSE_FROM:
11529 case OMP_CLAUSE__CACHE_:
11530 decl = OMP_CLAUSE_DECL (c);
11531 if (error_operand_p (decl))
11532 {
11533 remove = true;
11534 break;
11535 }
11536 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
11537 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
11538 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
11539 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
11540 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
11541 {
11542 remove = true;
11543 break;
11544 }
11545 if (!DECL_P (decl))
11546 {
11547 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
11548 NULL, is_gimple_lvalue, fb_lvalue)
11549 == GS_ERROR)
11550 {
11551 remove = true;
11552 break;
11553 }
11554 break;
11555 }
11556 goto do_notice;
11557
11558 case OMP_CLAUSE_USE_DEVICE_PTR:
11559 case OMP_CLAUSE_USE_DEVICE_ADDR:
11560 flags = GOVD_EXPLICIT;
11561 goto do_add;
11562
11563 case OMP_CLAUSE_HAS_DEVICE_ADDR:
11564 decl = OMP_CLAUSE_DECL (c);
11565 while (TREE_CODE (decl) == INDIRECT_REF
11566 || TREE_CODE (decl) == ARRAY_REF)
11567 decl = TREE_OPERAND (decl, 0);
11568 flags = GOVD_EXPLICIT;
11569 goto do_add_decl;
11570
11571 case OMP_CLAUSE_IS_DEVICE_PTR:
11572 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
11573 goto do_add;
11574
11575 do_add:
11576 decl = OMP_CLAUSE_DECL (c);
11577 do_add_decl:
11578 if (error_operand_p (decl))
11579 {
11580 remove = true;
11581 break;
11582 }
11583 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
11584 {
11585 tree t = omp_member_access_dummy_var (decl);
11586 if (t)
11587 {
11588 tree v = DECL_VALUE_EXPR (decl);
11589 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
11590 if (outer_ctx)
11591 omp_notice_variable (outer_ctx, t, true);
11592 }
11593 }
11594 if (code == OACC_DATA
11595 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11596 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
11597 flags |= GOVD_MAP_0LEN_ARRAY;
11598 omp_add_variable (ctx, decl, flags);
11599 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11600 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
11601 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
11602 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11603 {
11604 struct gimplify_omp_ctx *pctx
11605 = code == OMP_TARGET ? outer_ctx : ctx;
11606 if (pctx)
11607 omp_add_variable (pctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
11608 GOVD_LOCAL | GOVD_SEEN);
11609 if (pctx
11610 && OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
11611 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
11612 find_decl_expr,
11613 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
11614 NULL) == NULL_TREE)
11615 omp_add_variable (pctx,
11616 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
11617 GOVD_LOCAL | GOVD_SEEN);
11618 gimplify_omp_ctxp = pctx;
11619 push_gimplify_context ();
11620
11621 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
11622 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11623
11624 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
11625 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
11626 pop_gimplify_context
11627 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
11628 push_gimplify_context ();
11629 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
11630 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
11631 pop_gimplify_context
11632 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
11633 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
11634 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
11635
11636 gimplify_omp_ctxp = outer_ctx;
11637 }
11638 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11639 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
11640 {
11641 gimplify_omp_ctxp = ctx;
11642 push_gimplify_context ();
11643 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
11644 {
11645 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
11646 NULL, NULL);
11647 TREE_SIDE_EFFECTS (bind) = 1;
11648 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
11649 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
11650 }
11651 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
11652 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
11653 pop_gimplify_context
11654 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
11655 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
11656
11657 gimplify_omp_ctxp = outer_ctx;
11658 }
11659 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11660 && OMP_CLAUSE_LINEAR_STMT (c))
11661 {
11662 gimplify_omp_ctxp = ctx;
11663 push_gimplify_context ();
11664 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
11665 {
11666 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
11667 NULL, NULL);
11668 TREE_SIDE_EFFECTS (bind) = 1;
11669 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
11670 OMP_CLAUSE_LINEAR_STMT (c) = bind;
11671 }
11672 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
11673 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
11674 pop_gimplify_context
11675 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
11676 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
11677
11678 gimplify_omp_ctxp = outer_ctx;
11679 }
11680 if (notice_outer)
11681 goto do_notice;
11682 break;
11683
11684 case OMP_CLAUSE_COPYIN:
11685 case OMP_CLAUSE_COPYPRIVATE:
11686 decl = OMP_CLAUSE_DECL (c);
11687 if (error_operand_p (decl))
11688 {
11689 remove = true;
11690 break;
11691 }
11692 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
11693 && !remove
11694 && !omp_check_private (ctx, decl, true))
11695 {
11696 remove = true;
11697 if (is_global_var (decl))
11698 {
11699 if (DECL_THREAD_LOCAL_P (decl))
11700 remove = false;
11701 else if (DECL_HAS_VALUE_EXPR_P (decl))
11702 {
11703 tree value = get_base_address (DECL_VALUE_EXPR (decl));
11704
11705 if (value
11706 && DECL_P (value)
11707 && DECL_THREAD_LOCAL_P (value))
11708 remove = false;
11709 }
11710 }
11711 if (remove)
11712 error_at (OMP_CLAUSE_LOCATION (c),
11713 "copyprivate variable %qE is not threadprivate"
11714 " or private in outer context", DECL_NAME (decl));
11715 }
11716 do_notice:
11717 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11718 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
11719 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
11720 && outer_ctx
11721 && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
11722 || (region_type == ORT_WORKSHARE
11723 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11724 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
11725 || code == OMP_LOOP)))
11726 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
11727 || (code == OMP_LOOP
11728 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11729 && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
11730 == ORT_COMBINED_TEAMS))))
11731 {
11732 splay_tree_node on
11733 = splay_tree_lookup (outer_ctx->variables,
11734 (splay_tree_key)decl);
11735 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
11736 {
11737 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11738 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
11739 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
11740 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
11741 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
11742 == POINTER_TYPE))))
11743 omp_firstprivatize_variable (outer_ctx, decl);
11744 else
11745 {
11746 omp_add_variable (outer_ctx, decl,
11747 GOVD_SEEN | GOVD_SHARED);
11748 if (outer_ctx->outer_context)
11749 omp_notice_variable (outer_ctx->outer_context, decl,
11750 true);
11751 }
11752 }
11753 }
11754 if (outer_ctx)
11755 omp_notice_variable (outer_ctx, decl, true);
11756 if (check_non_private
11757 && (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
11758 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
11759 || decl == OMP_CLAUSE_DECL (c)
11760 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
11761 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
11762 == ADDR_EXPR
11763 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
11764 == POINTER_PLUS_EXPR
11765 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
11766 (OMP_CLAUSE_DECL (c), 0), 0))
11767 == ADDR_EXPR)))))
11768 && omp_check_private (ctx, decl, false))
11769 {
11770 error ("%s variable %qE is private in outer context",
11771 check_non_private, DECL_NAME (decl));
11772 remove = true;
11773 }
11774 break;
11775
11776 case OMP_CLAUSE_DETACH:
11777 flags = GOVD_FIRSTPRIVATE | GOVD_SEEN;
11778 goto do_add;
11779
11780 case OMP_CLAUSE_IF:
11781 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
11782 && OMP_CLAUSE_IF_MODIFIER (c) != code)
11783 {
11784 const char *p[2];
11785 for (int i = 0; i < 2; i++)
11786 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
11787 {
11788 case VOID_CST: p[i] = "cancel"; break;
11789 case OMP_PARALLEL: p[i] = "parallel"; break;
11790 case OMP_SIMD: p[i] = "simd"; break;
11791 case OMP_TASK: p[i] = "task"; break;
11792 case OMP_TASKLOOP: p[i] = "taskloop"; break;
11793 case OMP_TARGET_DATA: p[i] = "target data"; break;
11794 case OMP_TARGET: p[i] = "target"; break;
11795 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
11796 case OMP_TARGET_ENTER_DATA:
11797 p[i] = "target enter data"; break;
11798 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
11799 default: gcc_unreachable ();
11800 }
11801 error_at (OMP_CLAUSE_LOCATION (c),
11802 "expected %qs %<if%> clause modifier rather than %qs",
11803 p[0], p[1]);
11804 remove = true;
11805 }
11806 /* Fall through. */
11807
11808 case OMP_CLAUSE_FINAL:
11809 OMP_CLAUSE_OPERAND (c, 0)
11810 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
11811 /* Fall through. */
11812
11813 case OMP_CLAUSE_NUM_TEAMS:
11814 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS
11815 && OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
11816 && !is_gimple_min_invariant (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
11817 {
11818 if (error_operand_p (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
11819 {
11820 remove = true;
11821 break;
11822 }
11823 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
11824 = get_initialized_tmp_var (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c),
11825 pre_p, NULL, true);
11826 }
11827 /* Fall through. */
11828
11829 case OMP_CLAUSE_SCHEDULE:
11830 case OMP_CLAUSE_NUM_THREADS:
11831 case OMP_CLAUSE_THREAD_LIMIT:
11832 case OMP_CLAUSE_DIST_SCHEDULE:
11833 case OMP_CLAUSE_DEVICE:
11834 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEVICE
11835 && OMP_CLAUSE_DEVICE_ANCESTOR (c))
11836 {
11837 if (code != OMP_TARGET)
11838 {
11839 error_at (OMP_CLAUSE_LOCATION (c),
11840 "%<device%> clause with %<ancestor%> is only "
11841 "allowed on %<target%> construct");
11842 remove = true;
11843 break;
11844 }
11845
11846 tree clauses = *orig_list_p;
11847 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
11848 if (OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEVICE
11849 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_FIRSTPRIVATE
11850 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_PRIVATE
11851 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEFAULTMAP
11852 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_MAP
11853 )
11854 {
11855 error_at (OMP_CLAUSE_LOCATION (c),
11856 "with %<ancestor%>, only the %<device%>, "
11857 "%<firstprivate%>, %<private%>, %<defaultmap%>, "
11858 "and %<map%> clauses may appear on the "
11859 "construct");
11860 remove = true;
11861 break;
11862 }
11863 }
11864 /* Fall through. */
11865
11866 case OMP_CLAUSE_PRIORITY:
11867 case OMP_CLAUSE_GRAINSIZE:
11868 case OMP_CLAUSE_NUM_TASKS:
11869 case OMP_CLAUSE_FILTER:
11870 case OMP_CLAUSE_HINT:
11871 case OMP_CLAUSE_ASYNC:
11872 case OMP_CLAUSE_WAIT:
11873 case OMP_CLAUSE_NUM_GANGS:
11874 case OMP_CLAUSE_NUM_WORKERS:
11875 case OMP_CLAUSE_VECTOR_LENGTH:
11876 case OMP_CLAUSE_WORKER:
11877 case OMP_CLAUSE_VECTOR:
11878 if (OMP_CLAUSE_OPERAND (c, 0)
11879 && !is_gimple_min_invariant (OMP_CLAUSE_OPERAND (c, 0)))
11880 {
11881 if (error_operand_p (OMP_CLAUSE_OPERAND (c, 0)))
11882 {
11883 remove = true;
11884 break;
11885 }
11886 /* All these clauses care about value, not a particular decl,
11887 so try to force it into a SSA_NAME or fresh temporary. */
11888 OMP_CLAUSE_OPERAND (c, 0)
11889 = get_initialized_tmp_var (OMP_CLAUSE_OPERAND (c, 0),
11890 pre_p, NULL, true);
11891 }
11892 break;
11893
11894 case OMP_CLAUSE_GANG:
11895 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
11896 is_gimple_val, fb_rvalue) == GS_ERROR)
11897 remove = true;
11898 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
11899 is_gimple_val, fb_rvalue) == GS_ERROR)
11900 remove = true;
11901 break;
11902
11903 case OMP_CLAUSE_NOWAIT:
11904 nowait = 1;
11905 break;
11906
11907 case OMP_CLAUSE_ORDERED:
11908 case OMP_CLAUSE_UNTIED:
11909 case OMP_CLAUSE_COLLAPSE:
11910 case OMP_CLAUSE_TILE:
11911 case OMP_CLAUSE_AUTO:
11912 case OMP_CLAUSE_SEQ:
11913 case OMP_CLAUSE_INDEPENDENT:
11914 case OMP_CLAUSE_MERGEABLE:
11915 case OMP_CLAUSE_PROC_BIND:
11916 case OMP_CLAUSE_SAFELEN:
11917 case OMP_CLAUSE_SIMDLEN:
11918 case OMP_CLAUSE_NOGROUP:
11919 case OMP_CLAUSE_THREADS:
11920 case OMP_CLAUSE_SIMD:
11921 case OMP_CLAUSE_BIND:
11922 case OMP_CLAUSE_IF_PRESENT:
11923 case OMP_CLAUSE_FINALIZE:
11924 break;
11925
11926 case OMP_CLAUSE_ORDER:
11927 ctx->order_concurrent = true;
11928 break;
11929
11930 case OMP_CLAUSE_DEFAULTMAP:
11931 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
11932 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
11933 {
11934 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
11935 gdmkmin = GDMK_SCALAR;
11936 gdmkmax = GDMK_POINTER;
11937 break;
11938 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
11939 gdmkmin = GDMK_SCALAR;
11940 gdmkmax = GDMK_SCALAR_TARGET;
11941 break;
11942 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
11943 gdmkmin = gdmkmax = GDMK_AGGREGATE;
11944 break;
11945 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
11946 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
11947 break;
11948 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
11949 gdmkmin = gdmkmax = GDMK_POINTER;
11950 break;
11951 default:
11952 gcc_unreachable ();
11953 }
11954 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
11955 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
11956 {
11957 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
11958 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
11959 break;
11960 case OMP_CLAUSE_DEFAULTMAP_TO:
11961 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
11962 break;
11963 case OMP_CLAUSE_DEFAULTMAP_FROM:
11964 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
11965 break;
11966 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
11967 ctx->defaultmap[gdmk] = GOVD_MAP;
11968 break;
11969 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
11970 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
11971 break;
11972 case OMP_CLAUSE_DEFAULTMAP_NONE:
11973 ctx->defaultmap[gdmk] = 0;
11974 break;
11975 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
11976 switch (gdmk)
11977 {
11978 case GDMK_SCALAR:
11979 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
11980 break;
11981 case GDMK_SCALAR_TARGET:
11982 ctx->defaultmap[gdmk] = (lang_GNU_Fortran ()
11983 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
11984 break;
11985 case GDMK_AGGREGATE:
11986 case GDMK_ALLOCATABLE:
11987 ctx->defaultmap[gdmk] = GOVD_MAP;
11988 break;
11989 case GDMK_POINTER:
11990 ctx->defaultmap[gdmk] = GOVD_MAP;
11991 if (!lang_GNU_Fortran ())
11992 ctx->defaultmap[gdmk] |= GOVD_MAP_0LEN_ARRAY;
11993 break;
11994 default:
11995 gcc_unreachable ();
11996 }
11997 break;
11998 default:
11999 gcc_unreachable ();
12000 }
12001 break;
12002
12003 case OMP_CLAUSE_ALIGNED:
12004 decl = OMP_CLAUSE_DECL (c);
12005 if (error_operand_p (decl))
12006 {
12007 remove = true;
12008 break;
12009 }
12010 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
12011 is_gimple_val, fb_rvalue) == GS_ERROR)
12012 {
12013 remove = true;
12014 break;
12015 }
12016 if (!is_global_var (decl)
12017 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
12018 omp_add_variable (ctx, decl, GOVD_ALIGNED);
12019 break;
12020
12021 case OMP_CLAUSE_NONTEMPORAL:
12022 decl = OMP_CLAUSE_DECL (c);
12023 if (error_operand_p (decl))
12024 {
12025 remove = true;
12026 break;
12027 }
12028 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
12029 break;
12030
12031 case OMP_CLAUSE_ALLOCATE:
12032 decl = OMP_CLAUSE_DECL (c);
12033 if (error_operand_p (decl))
12034 {
12035 remove = true;
12036 break;
12037 }
12038 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c), pre_p, NULL,
12039 is_gimple_val, fb_rvalue) == GS_ERROR)
12040 {
12041 remove = true;
12042 break;
12043 }
12044 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
12045 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
12046 == INTEGER_CST))
12047 ;
12048 else if (code == OMP_TASKLOOP
12049 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
12050 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
12051 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
12052 pre_p, NULL, false);
12053 break;
12054
12055 case OMP_CLAUSE_DEFAULT:
12056 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
12057 break;
12058
12059 case OMP_CLAUSE_INCLUSIVE:
12060 case OMP_CLAUSE_EXCLUSIVE:
12061 decl = OMP_CLAUSE_DECL (c);
12062 {
12063 splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
12064 (splay_tree_key) decl);
12065 if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
12066 {
12067 error_at (OMP_CLAUSE_LOCATION (c),
12068 "%qD specified in %qs clause but not in %<inscan%> "
12069 "%<reduction%> clause on the containing construct",
12070 decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
12071 remove = true;
12072 }
12073 else
12074 {
12075 n->value |= GOVD_REDUCTION_INSCAN;
12076 if (outer_ctx->region_type == ORT_SIMD
12077 && outer_ctx->outer_context
12078 && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
12079 {
12080 n = splay_tree_lookup (outer_ctx->outer_context->variables,
12081 (splay_tree_key) decl);
12082 if (n && (n->value & GOVD_REDUCTION) != 0)
12083 n->value |= GOVD_REDUCTION_INSCAN;
12084 }
12085 }
12086 }
12087 break;
12088
12089 case OMP_CLAUSE_NOHOST:
12090 default:
12091 gcc_unreachable ();
12092 }
12093
12094 if (code == OACC_DATA
12095 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12096 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12097 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12098 remove = true;
12099 if (remove)
12100 *list_p = OMP_CLAUSE_CHAIN (c);
12101 else
12102 list_p = &OMP_CLAUSE_CHAIN (c);
12103 }
12104
12105 ctx->clauses = *orig_list_p;
12106 gimplify_omp_ctxp = ctx;
12107 }
12108
12109 /* Return true if DECL is a candidate for shared to firstprivate
12110 optimization. We only consider non-addressable scalars, not
12111 too big, and not references. */
12112
12113 static bool
12114 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
12115 {
12116 if (TREE_ADDRESSABLE (decl))
12117 return false;
12118 tree type = TREE_TYPE (decl);
12119 if (!is_gimple_reg_type (type)
12120 || TREE_CODE (type) == REFERENCE_TYPE
12121 || TREE_ADDRESSABLE (type))
12122 return false;
12123 /* Don't optimize too large decls, as each thread/task will have
12124 its own. */
12125 HOST_WIDE_INT len = int_size_in_bytes (type);
12126 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
12127 return false;
12128 if (omp_privatize_by_reference (decl))
12129 return false;
12130 return true;
12131 }
12132
12133 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
12134 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
12135 GOVD_WRITTEN in outer contexts. */
12136
12137 static void
12138 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
12139 {
12140 for (; ctx; ctx = ctx->outer_context)
12141 {
12142 splay_tree_node n = splay_tree_lookup (ctx->variables,
12143 (splay_tree_key) decl);
12144 if (n == NULL)
12145 continue;
12146 else if (n->value & GOVD_SHARED)
12147 {
12148 n->value |= GOVD_WRITTEN;
12149 return;
12150 }
12151 else if (n->value & GOVD_DATA_SHARE_CLASS)
12152 return;
12153 }
12154 }
12155
12156 /* Helper callback for walk_gimple_seq to discover possible stores
12157 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12158 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12159 for those. */
12160
12161 static tree
12162 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
12163 {
12164 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
12165
12166 *walk_subtrees = 0;
12167 if (!wi->is_lhs)
12168 return NULL_TREE;
12169
12170 tree op = *tp;
12171 do
12172 {
12173 if (handled_component_p (op))
12174 op = TREE_OPERAND (op, 0);
12175 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
12176 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
12177 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
12178 else
12179 break;
12180 }
12181 while (1);
12182 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
12183 return NULL_TREE;
12184
12185 omp_mark_stores (gimplify_omp_ctxp, op);
12186 return NULL_TREE;
12187 }
12188
12189 /* Helper callback for walk_gimple_seq to discover possible stores
12190 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12191 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12192 for those. */
12193
12194 static tree
12195 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
12196 bool *handled_ops_p,
12197 struct walk_stmt_info *wi)
12198 {
12199 gimple *stmt = gsi_stmt (*gsi_p);
12200 switch (gimple_code (stmt))
12201 {
12202 /* Don't recurse on OpenMP constructs for which
12203 gimplify_adjust_omp_clauses already handled the bodies,
12204 except handle gimple_omp_for_pre_body. */
12205 case GIMPLE_OMP_FOR:
12206 *handled_ops_p = true;
12207 if (gimple_omp_for_pre_body (stmt))
12208 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
12209 omp_find_stores_stmt, omp_find_stores_op, wi);
12210 break;
12211 case GIMPLE_OMP_PARALLEL:
12212 case GIMPLE_OMP_TASK:
12213 case GIMPLE_OMP_SECTIONS:
12214 case GIMPLE_OMP_SINGLE:
12215 case GIMPLE_OMP_SCOPE:
12216 case GIMPLE_OMP_TARGET:
12217 case GIMPLE_OMP_TEAMS:
12218 case GIMPLE_OMP_CRITICAL:
12219 *handled_ops_p = true;
12220 break;
12221 default:
12222 break;
12223 }
12224 return NULL_TREE;
12225 }
12226
12227 struct gimplify_adjust_omp_clauses_data
12228 {
12229 tree *list_p;
12230 gimple_seq *pre_p;
12231 };
12232
12233 /* For all variables that were not actually used within the context,
12234 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
12235
12236 static int
12237 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
12238 {
12239 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
12240 gimple_seq *pre_p
12241 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
12242 tree decl = (tree) n->key;
12243 unsigned flags = n->value;
12244 enum omp_clause_code code;
12245 tree clause;
12246 bool private_debug;
12247
12248 if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
12249 && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
12250 flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
12251 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
12252 return 0;
12253 if ((flags & GOVD_SEEN) == 0)
12254 return 0;
12255 if (flags & GOVD_DEBUG_PRIVATE)
12256 {
12257 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
12258 private_debug = true;
12259 }
12260 else if (flags & GOVD_MAP)
12261 private_debug = false;
12262 else
12263 private_debug
12264 = lang_hooks.decls.omp_private_debug_clause (decl,
12265 !!(flags & GOVD_SHARED));
12266 if (private_debug)
12267 code = OMP_CLAUSE_PRIVATE;
12268 else if (flags & GOVD_MAP)
12269 {
12270 code = OMP_CLAUSE_MAP;
12271 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
12272 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
12273 {
12274 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
12275 return 0;
12276 }
12277 if (VAR_P (decl)
12278 && DECL_IN_CONSTANT_POOL (decl)
12279 && !lookup_attribute ("omp declare target",
12280 DECL_ATTRIBUTES (decl)))
12281 {
12282 tree id = get_identifier ("omp declare target");
12283 DECL_ATTRIBUTES (decl)
12284 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
12285 varpool_node *node = varpool_node::get (decl);
12286 if (node)
12287 {
12288 node->offloadable = 1;
12289 if (ENABLE_OFFLOADING)
12290 g->have_offload = true;
12291 }
12292 }
12293 }
12294 else if (flags & GOVD_SHARED)
12295 {
12296 if (is_global_var (decl))
12297 {
12298 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
12299 while (ctx != NULL)
12300 {
12301 splay_tree_node on
12302 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12303 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
12304 | GOVD_PRIVATE | GOVD_REDUCTION
12305 | GOVD_LINEAR | GOVD_MAP)) != 0)
12306 break;
12307 ctx = ctx->outer_context;
12308 }
12309 if (ctx == NULL)
12310 return 0;
12311 }
12312 code = OMP_CLAUSE_SHARED;
12313 /* Don't optimize shared into firstprivate for read-only vars
12314 on tasks with depend clause, we shouldn't try to copy them
12315 until the dependencies are satisfied. */
12316 if (gimplify_omp_ctxp->has_depend)
12317 flags |= GOVD_WRITTEN;
12318 }
12319 else if (flags & GOVD_PRIVATE)
12320 code = OMP_CLAUSE_PRIVATE;
12321 else if (flags & GOVD_FIRSTPRIVATE)
12322 {
12323 code = OMP_CLAUSE_FIRSTPRIVATE;
12324 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
12325 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
12326 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
12327 {
12328 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
12329 "%<target%> construct", decl);
12330 return 0;
12331 }
12332 }
12333 else if (flags & GOVD_LASTPRIVATE)
12334 code = OMP_CLAUSE_LASTPRIVATE;
12335 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
12336 return 0;
12337 else if (flags & GOVD_CONDTEMP)
12338 {
12339 code = OMP_CLAUSE__CONDTEMP_;
12340 gimple_add_tmp_var (decl);
12341 }
12342 else
12343 gcc_unreachable ();
12344
12345 if (((flags & GOVD_LASTPRIVATE)
12346 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
12347 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12348 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12349
12350 tree chain = *list_p;
12351 clause = build_omp_clause (input_location, code);
12352 OMP_CLAUSE_DECL (clause) = decl;
12353 OMP_CLAUSE_CHAIN (clause) = chain;
12354 if (private_debug)
12355 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
12356 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
12357 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
12358 else if (code == OMP_CLAUSE_SHARED
12359 && (flags & GOVD_WRITTEN) == 0
12360 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12361 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
12362 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
12363 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
12364 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
12365 {
12366 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
12367 OMP_CLAUSE_DECL (nc) = decl;
12368 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12369 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12370 OMP_CLAUSE_DECL (clause)
12371 = build_simple_mem_ref_loc (input_location, decl);
12372 OMP_CLAUSE_DECL (clause)
12373 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
12374 build_int_cst (build_pointer_type (char_type_node), 0));
12375 OMP_CLAUSE_SIZE (clause) = size_zero_node;
12376 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12377 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
12378 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
12379 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
12380 OMP_CLAUSE_CHAIN (nc) = chain;
12381 OMP_CLAUSE_CHAIN (clause) = nc;
12382 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12383 gimplify_omp_ctxp = ctx->outer_context;
12384 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
12385 pre_p, NULL, is_gimple_val, fb_rvalue);
12386 gimplify_omp_ctxp = ctx;
12387 }
12388 else if (code == OMP_CLAUSE_MAP)
12389 {
12390 int kind;
12391 /* Not all combinations of these GOVD_MAP flags are actually valid. */
12392 switch (flags & (GOVD_MAP_TO_ONLY
12393 | GOVD_MAP_FORCE
12394 | GOVD_MAP_FORCE_PRESENT
12395 | GOVD_MAP_ALLOC_ONLY
12396 | GOVD_MAP_FROM_ONLY))
12397 {
12398 case 0:
12399 kind = GOMP_MAP_TOFROM;
12400 break;
12401 case GOVD_MAP_FORCE:
12402 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
12403 break;
12404 case GOVD_MAP_TO_ONLY:
12405 kind = GOMP_MAP_TO;
12406 break;
12407 case GOVD_MAP_FROM_ONLY:
12408 kind = GOMP_MAP_FROM;
12409 break;
12410 case GOVD_MAP_ALLOC_ONLY:
12411 kind = GOMP_MAP_ALLOC;
12412 break;
12413 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
12414 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
12415 break;
12416 case GOVD_MAP_FORCE_PRESENT:
12417 kind = GOMP_MAP_FORCE_PRESENT;
12418 break;
12419 default:
12420 gcc_unreachable ();
12421 }
12422 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
12423 /* Setting of the implicit flag for the runtime is currently disabled for
12424 OpenACC. */
12425 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
12426 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (clause) = 1;
12427 if (DECL_SIZE (decl)
12428 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
12429 {
12430 tree decl2 = DECL_VALUE_EXPR (decl);
12431 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
12432 decl2 = TREE_OPERAND (decl2, 0);
12433 gcc_assert (DECL_P (decl2));
12434 tree mem = build_simple_mem_ref (decl2);
12435 OMP_CLAUSE_DECL (clause) = mem;
12436 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
12437 if (gimplify_omp_ctxp->outer_context)
12438 {
12439 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
12440 omp_notice_variable (ctx, decl2, true);
12441 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
12442 }
12443 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
12444 OMP_CLAUSE_MAP);
12445 OMP_CLAUSE_DECL (nc) = decl;
12446 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12447 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
12448 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
12449 else
12450 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
12451 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
12452 OMP_CLAUSE_CHAIN (clause) = nc;
12453 }
12454 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
12455 && omp_privatize_by_reference (decl))
12456 {
12457 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
12458 OMP_CLAUSE_SIZE (clause)
12459 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
12460 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12461 gimplify_omp_ctxp = ctx->outer_context;
12462 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
12463 pre_p, NULL, is_gimple_val, fb_rvalue);
12464 gimplify_omp_ctxp = ctx;
12465 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
12466 OMP_CLAUSE_MAP);
12467 OMP_CLAUSE_DECL (nc) = decl;
12468 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12469 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
12470 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
12471 OMP_CLAUSE_CHAIN (clause) = nc;
12472 }
12473 else
12474 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
12475 }
12476 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
12477 {
12478 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
12479 OMP_CLAUSE_DECL (nc) = decl;
12480 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
12481 OMP_CLAUSE_CHAIN (nc) = chain;
12482 OMP_CLAUSE_CHAIN (clause) = nc;
12483 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12484 gimplify_omp_ctxp = ctx->outer_context;
12485 lang_hooks.decls.omp_finish_clause (nc, pre_p,
12486 (ctx->region_type & ORT_ACC) != 0);
12487 gimplify_omp_ctxp = ctx;
12488 }
12489 *list_p = clause;
12490 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12491 gimplify_omp_ctxp = ctx->outer_context;
12492 /* Don't call omp_finish_clause on implicitly added OMP_CLAUSE_PRIVATE
12493 in simd. Those are only added for the local vars inside of simd body
12494 and they don't need to be e.g. default constructible. */
12495 if (code != OMP_CLAUSE_PRIVATE || ctx->region_type != ORT_SIMD)
12496 lang_hooks.decls.omp_finish_clause (clause, pre_p,
12497 (ctx->region_type & ORT_ACC) != 0);
12498 if (gimplify_omp_ctxp)
12499 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
12500 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
12501 && DECL_P (OMP_CLAUSE_SIZE (clause)))
12502 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
12503 true);
12504 gimplify_omp_ctxp = ctx;
12505 return 0;
12506 }
12507
12508 static void
12509 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
12510 enum tree_code code)
12511 {
12512 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12513 tree *orig_list_p = list_p;
12514 tree c, decl;
12515 bool has_inscan_reductions = false;
12516
12517 if (body)
12518 {
12519 struct gimplify_omp_ctx *octx;
12520 for (octx = ctx; octx; octx = octx->outer_context)
12521 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
12522 break;
12523 if (octx)
12524 {
12525 struct walk_stmt_info wi;
12526 memset (&wi, 0, sizeof (wi));
12527 walk_gimple_seq (body, omp_find_stores_stmt,
12528 omp_find_stores_op, &wi);
12529 }
12530 }
12531
12532 if (ctx->add_safelen1)
12533 {
12534 /* If there are VLAs in the body of simd loop, prevent
12535 vectorization. */
12536 gcc_assert (ctx->region_type == ORT_SIMD);
12537 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
12538 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
12539 OMP_CLAUSE_CHAIN (c) = *list_p;
12540 *list_p = c;
12541 list_p = &OMP_CLAUSE_CHAIN (c);
12542 }
12543
12544 if (ctx->region_type == ORT_WORKSHARE
12545 && ctx->outer_context
12546 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
12547 {
12548 for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
12549 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12550 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12551 {
12552 decl = OMP_CLAUSE_DECL (c);
12553 splay_tree_node n
12554 = splay_tree_lookup (ctx->outer_context->variables,
12555 (splay_tree_key) decl);
12556 gcc_checking_assert (!splay_tree_lookup (ctx->variables,
12557 (splay_tree_key) decl));
12558 omp_add_variable (ctx, decl, n->value);
12559 tree c2 = copy_node (c);
12560 OMP_CLAUSE_CHAIN (c2) = *list_p;
12561 *list_p = c2;
12562 if ((n->value & GOVD_FIRSTPRIVATE) == 0)
12563 continue;
12564 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12565 OMP_CLAUSE_FIRSTPRIVATE);
12566 OMP_CLAUSE_DECL (c2) = decl;
12567 OMP_CLAUSE_CHAIN (c2) = *list_p;
12568 *list_p = c2;
12569 }
12570 }
12571
12572 tree attach_list = NULL_TREE;
12573 tree *attach_tail = &attach_list;
12574
12575 while ((c = *list_p) != NULL)
12576 {
12577 splay_tree_node n;
12578 bool remove = false;
12579 bool move_attach = false;
12580
12581 switch (OMP_CLAUSE_CODE (c))
12582 {
12583 case OMP_CLAUSE_FIRSTPRIVATE:
12584 if ((ctx->region_type & ORT_TARGET)
12585 && (ctx->region_type & ORT_ACC) == 0
12586 && TYPE_ATOMIC (strip_array_types
12587 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
12588 {
12589 error_at (OMP_CLAUSE_LOCATION (c),
12590 "%<_Atomic%> %qD in %<firstprivate%> clause on "
12591 "%<target%> construct", OMP_CLAUSE_DECL (c));
12592 remove = true;
12593 break;
12594 }
12595 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12596 {
12597 decl = OMP_CLAUSE_DECL (c);
12598 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12599 if ((n->value & GOVD_MAP) != 0)
12600 {
12601 remove = true;
12602 break;
12603 }
12604 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c) = 0;
12605 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) = 0;
12606 }
12607 /* FALLTHRU */
12608 case OMP_CLAUSE_PRIVATE:
12609 case OMP_CLAUSE_SHARED:
12610 case OMP_CLAUSE_LINEAR:
12611 decl = OMP_CLAUSE_DECL (c);
12612 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12613 remove = !(n->value & GOVD_SEEN);
12614 if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
12615 && code == OMP_PARALLEL
12616 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12617 remove = true;
12618 if (! remove)
12619 {
12620 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
12621 if ((n->value & GOVD_DEBUG_PRIVATE)
12622 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
12623 {
12624 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
12625 || ((n->value & GOVD_DATA_SHARE_CLASS)
12626 == GOVD_SHARED));
12627 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
12628 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
12629 }
12630 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12631 && ctx->has_depend
12632 && DECL_P (decl))
12633 n->value |= GOVD_WRITTEN;
12634 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12635 && (n->value & GOVD_WRITTEN) == 0
12636 && DECL_P (decl)
12637 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12638 OMP_CLAUSE_SHARED_READONLY (c) = 1;
12639 else if (DECL_P (decl)
12640 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12641 && (n->value & GOVD_WRITTEN) != 0)
12642 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
12643 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
12644 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12645 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12646 }
12647 else
12648 n->value &= ~GOVD_EXPLICIT;
12649 break;
12650
12651 case OMP_CLAUSE_LASTPRIVATE:
12652 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
12653 accurately reflect the presence of a FIRSTPRIVATE clause. */
12654 decl = OMP_CLAUSE_DECL (c);
12655 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12656 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
12657 = (n->value & GOVD_FIRSTPRIVATE) != 0;
12658 if (code == OMP_DISTRIBUTE
12659 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
12660 {
12661 remove = true;
12662 error_at (OMP_CLAUSE_LOCATION (c),
12663 "same variable used in %<firstprivate%> and "
12664 "%<lastprivate%> clauses on %<distribute%> "
12665 "construct");
12666 }
12667 if (!remove
12668 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12669 && DECL_P (decl)
12670 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12671 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12672 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
12673 remove = true;
12674 break;
12675
12676 case OMP_CLAUSE_ALIGNED:
12677 decl = OMP_CLAUSE_DECL (c);
12678 if (!is_global_var (decl))
12679 {
12680 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12681 remove = n == NULL || !(n->value & GOVD_SEEN);
12682 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
12683 {
12684 struct gimplify_omp_ctx *octx;
12685 if (n != NULL
12686 && (n->value & (GOVD_DATA_SHARE_CLASS
12687 & ~GOVD_FIRSTPRIVATE)))
12688 remove = true;
12689 else
12690 for (octx = ctx->outer_context; octx;
12691 octx = octx->outer_context)
12692 {
12693 n = splay_tree_lookup (octx->variables,
12694 (splay_tree_key) decl);
12695 if (n == NULL)
12696 continue;
12697 if (n->value & GOVD_LOCAL)
12698 break;
12699 /* We have to avoid assigning a shared variable
12700 to itself when trying to add
12701 __builtin_assume_aligned. */
12702 if (n->value & GOVD_SHARED)
12703 {
12704 remove = true;
12705 break;
12706 }
12707 }
12708 }
12709 }
12710 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
12711 {
12712 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12713 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
12714 remove = true;
12715 }
12716 break;
12717
12718 case OMP_CLAUSE_HAS_DEVICE_ADDR:
12719 decl = OMP_CLAUSE_DECL (c);
12720 while (TREE_CODE (decl) == INDIRECT_REF
12721 || TREE_CODE (decl) == ARRAY_REF)
12722 decl = TREE_OPERAND (decl, 0);
12723 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12724 remove = n == NULL || !(n->value & GOVD_SEEN);
12725 break;
12726
12727 case OMP_CLAUSE_IS_DEVICE_PTR:
12728 case OMP_CLAUSE_NONTEMPORAL:
12729 decl = OMP_CLAUSE_DECL (c);
12730 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12731 remove = n == NULL || !(n->value & GOVD_SEEN);
12732 break;
12733
12734 case OMP_CLAUSE_MAP:
12735 if (code == OMP_TARGET_EXIT_DATA
12736 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
12737 {
12738 remove = true;
12739 break;
12740 }
12741 /* If we have a target region, we can push all the attaches to the
12742 end of the list (we may have standalone "attach" operations
12743 synthesized for GOMP_MAP_STRUCT nodes that must be processed after
12744 the attachment point AND the pointed-to block have been mapped).
12745 If we have something else, e.g. "enter data", we need to keep
12746 "attach" nodes together with the previous node they attach to so
12747 that separate "exit data" operations work properly (see
12748 libgomp/target.c). */
12749 if ((ctx->region_type & ORT_TARGET) != 0
12750 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12751 || (OMP_CLAUSE_MAP_KIND (c)
12752 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)))
12753 move_attach = true;
12754 decl = OMP_CLAUSE_DECL (c);
12755 /* Data clauses associated with reductions must be
12756 compatible with present_or_copy. Warn and adjust the clause
12757 if that is not the case. */
12758 if (ctx->region_type == ORT_ACC_PARALLEL
12759 || ctx->region_type == ORT_ACC_SERIAL)
12760 {
12761 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
12762 n = NULL;
12763
12764 if (DECL_P (t))
12765 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
12766
12767 if (n && (n->value & GOVD_REDUCTION))
12768 {
12769 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
12770
12771 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
12772 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
12773 && kind != GOMP_MAP_FORCE_PRESENT
12774 && kind != GOMP_MAP_POINTER)
12775 {
12776 warning_at (OMP_CLAUSE_LOCATION (c), 0,
12777 "incompatible data clause with reduction "
12778 "on %qE; promoting to %<present_or_copy%>",
12779 DECL_NAME (t));
12780 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
12781 }
12782 }
12783 }
12784 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
12785 && (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA))
12786 {
12787 remove = true;
12788 break;
12789 }
12790 if (!DECL_P (decl))
12791 {
12792 if ((ctx->region_type & ORT_TARGET) != 0
12793 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
12794 {
12795 if (TREE_CODE (decl) == INDIRECT_REF
12796 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
12797 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
12798 == REFERENCE_TYPE))
12799 decl = TREE_OPERAND (decl, 0);
12800 if (TREE_CODE (decl) == COMPONENT_REF)
12801 {
12802 while (TREE_CODE (decl) == COMPONENT_REF)
12803 decl = TREE_OPERAND (decl, 0);
12804 if (DECL_P (decl))
12805 {
12806 n = splay_tree_lookup (ctx->variables,
12807 (splay_tree_key) decl);
12808 if (!(n->value & GOVD_SEEN))
12809 remove = true;
12810 }
12811 }
12812 }
12813 break;
12814 }
12815 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12816 if ((ctx->region_type & ORT_TARGET) != 0
12817 && !(n->value & GOVD_SEEN)
12818 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
12819 && (!is_global_var (decl)
12820 || !lookup_attribute ("omp declare target link",
12821 DECL_ATTRIBUTES (decl))))
12822 {
12823 remove = true;
12824 /* For struct element mapping, if struct is never referenced
12825 in target block and none of the mapping has always modifier,
12826 remove all the struct element mappings, which immediately
12827 follow the GOMP_MAP_STRUCT map clause. */
12828 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
12829 {
12830 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
12831 while (cnt--)
12832 OMP_CLAUSE_CHAIN (c)
12833 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
12834 }
12835 }
12836 else if (DECL_SIZE (decl)
12837 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
12838 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
12839 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
12840 && (OMP_CLAUSE_MAP_KIND (c)
12841 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12842 {
12843 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
12844 for these, TREE_CODE (DECL_SIZE (decl)) will always be
12845 INTEGER_CST. */
12846 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
12847
12848 tree decl2 = DECL_VALUE_EXPR (decl);
12849 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
12850 decl2 = TREE_OPERAND (decl2, 0);
12851 gcc_assert (DECL_P (decl2));
12852 tree mem = build_simple_mem_ref (decl2);
12853 OMP_CLAUSE_DECL (c) = mem;
12854 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
12855 if (ctx->outer_context)
12856 {
12857 omp_notice_variable (ctx->outer_context, decl2, true);
12858 omp_notice_variable (ctx->outer_context,
12859 OMP_CLAUSE_SIZE (c), true);
12860 }
12861 if (((ctx->region_type & ORT_TARGET) != 0
12862 || !ctx->target_firstprivatize_array_bases)
12863 && ((n->value & GOVD_SEEN) == 0
12864 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
12865 {
12866 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12867 OMP_CLAUSE_MAP);
12868 OMP_CLAUSE_DECL (nc) = decl;
12869 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12870 if (ctx->target_firstprivatize_array_bases)
12871 OMP_CLAUSE_SET_MAP_KIND (nc,
12872 GOMP_MAP_FIRSTPRIVATE_POINTER);
12873 else
12874 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
12875 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
12876 OMP_CLAUSE_CHAIN (c) = nc;
12877 c = nc;
12878 }
12879 }
12880 else
12881 {
12882 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
12883 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
12884 gcc_assert ((n->value & GOVD_SEEN) == 0
12885 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
12886 == 0));
12887 }
12888 break;
12889
12890 case OMP_CLAUSE_TO:
12891 case OMP_CLAUSE_FROM:
12892 case OMP_CLAUSE__CACHE_:
12893 decl = OMP_CLAUSE_DECL (c);
12894 if (!DECL_P (decl))
12895 break;
12896 if (DECL_SIZE (decl)
12897 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
12898 {
12899 tree decl2 = DECL_VALUE_EXPR (decl);
12900 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
12901 decl2 = TREE_OPERAND (decl2, 0);
12902 gcc_assert (DECL_P (decl2));
12903 tree mem = build_simple_mem_ref (decl2);
12904 OMP_CLAUSE_DECL (c) = mem;
12905 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
12906 if (ctx->outer_context)
12907 {
12908 omp_notice_variable (ctx->outer_context, decl2, true);
12909 omp_notice_variable (ctx->outer_context,
12910 OMP_CLAUSE_SIZE (c), true);
12911 }
12912 }
12913 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
12914 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
12915 break;
12916
12917 case OMP_CLAUSE_REDUCTION:
12918 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
12919 {
12920 decl = OMP_CLAUSE_DECL (c);
12921 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12922 if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
12923 {
12924 remove = true;
12925 error_at (OMP_CLAUSE_LOCATION (c),
12926 "%qD specified in %<inscan%> %<reduction%> clause "
12927 "but not in %<scan%> directive clause", decl);
12928 break;
12929 }
12930 has_inscan_reductions = true;
12931 }
12932 /* FALLTHRU */
12933 case OMP_CLAUSE_IN_REDUCTION:
12934 case OMP_CLAUSE_TASK_REDUCTION:
12935 decl = OMP_CLAUSE_DECL (c);
12936 /* OpenACC reductions need a present_or_copy data clause.
12937 Add one if necessary. Emit error when the reduction is private. */
12938 if (ctx->region_type == ORT_ACC_PARALLEL
12939 || ctx->region_type == ORT_ACC_SERIAL)
12940 {
12941 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12942 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
12943 {
12944 remove = true;
12945 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
12946 "reduction on %qE", DECL_NAME (decl));
12947 }
12948 else if ((n->value & GOVD_MAP) == 0)
12949 {
12950 tree next = OMP_CLAUSE_CHAIN (c);
12951 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
12952 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
12953 OMP_CLAUSE_DECL (nc) = decl;
12954 OMP_CLAUSE_CHAIN (c) = nc;
12955 lang_hooks.decls.omp_finish_clause (nc, pre_p,
12956 (ctx->region_type
12957 & ORT_ACC) != 0);
12958 while (1)
12959 {
12960 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
12961 if (OMP_CLAUSE_CHAIN (nc) == NULL)
12962 break;
12963 nc = OMP_CLAUSE_CHAIN (nc);
12964 }
12965 OMP_CLAUSE_CHAIN (nc) = next;
12966 n->value |= GOVD_MAP;
12967 }
12968 }
12969 if (DECL_P (decl)
12970 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12971 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12972 break;
12973
12974 case OMP_CLAUSE_ALLOCATE:
12975 decl = OMP_CLAUSE_DECL (c);
12976 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12977 if (n != NULL && !(n->value & GOVD_SEEN))
12978 {
12979 if ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE | GOVD_LINEAR))
12980 != 0
12981 && (n->value & (GOVD_REDUCTION | GOVD_LASTPRIVATE)) == 0)
12982 remove = true;
12983 }
12984 if (!remove
12985 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
12986 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)) != INTEGER_CST
12987 && ((ctx->region_type & (ORT_PARALLEL | ORT_TARGET)) != 0
12988 || (ctx->region_type & ORT_TASKLOOP) == ORT_TASK
12989 || (ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS))
12990 {
12991 tree allocator = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
12992 n = splay_tree_lookup (ctx->variables, (splay_tree_key) allocator);
12993 if (n == NULL)
12994 {
12995 enum omp_clause_default_kind default_kind
12996 = ctx->default_kind;
12997 ctx->default_kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
12998 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
12999 true);
13000 ctx->default_kind = default_kind;
13001 }
13002 else
13003 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
13004 true);
13005 }
13006 break;
13007
13008 case OMP_CLAUSE_COPYIN:
13009 case OMP_CLAUSE_COPYPRIVATE:
13010 case OMP_CLAUSE_IF:
13011 case OMP_CLAUSE_NUM_THREADS:
13012 case OMP_CLAUSE_NUM_TEAMS:
13013 case OMP_CLAUSE_THREAD_LIMIT:
13014 case OMP_CLAUSE_DIST_SCHEDULE:
13015 case OMP_CLAUSE_DEVICE:
13016 case OMP_CLAUSE_SCHEDULE:
13017 case OMP_CLAUSE_NOWAIT:
13018 case OMP_CLAUSE_ORDERED:
13019 case OMP_CLAUSE_DEFAULT:
13020 case OMP_CLAUSE_UNTIED:
13021 case OMP_CLAUSE_COLLAPSE:
13022 case OMP_CLAUSE_FINAL:
13023 case OMP_CLAUSE_MERGEABLE:
13024 case OMP_CLAUSE_PROC_BIND:
13025 case OMP_CLAUSE_SAFELEN:
13026 case OMP_CLAUSE_SIMDLEN:
13027 case OMP_CLAUSE_DEPEND:
13028 case OMP_CLAUSE_DOACROSS:
13029 case OMP_CLAUSE_PRIORITY:
13030 case OMP_CLAUSE_GRAINSIZE:
13031 case OMP_CLAUSE_NUM_TASKS:
13032 case OMP_CLAUSE_NOGROUP:
13033 case OMP_CLAUSE_THREADS:
13034 case OMP_CLAUSE_SIMD:
13035 case OMP_CLAUSE_FILTER:
13036 case OMP_CLAUSE_HINT:
13037 case OMP_CLAUSE_DEFAULTMAP:
13038 case OMP_CLAUSE_ORDER:
13039 case OMP_CLAUSE_BIND:
13040 case OMP_CLAUSE_DETACH:
13041 case OMP_CLAUSE_USE_DEVICE_PTR:
13042 case OMP_CLAUSE_USE_DEVICE_ADDR:
13043 case OMP_CLAUSE_ASYNC:
13044 case OMP_CLAUSE_WAIT:
13045 case OMP_CLAUSE_INDEPENDENT:
13046 case OMP_CLAUSE_NUM_GANGS:
13047 case OMP_CLAUSE_NUM_WORKERS:
13048 case OMP_CLAUSE_VECTOR_LENGTH:
13049 case OMP_CLAUSE_GANG:
13050 case OMP_CLAUSE_WORKER:
13051 case OMP_CLAUSE_VECTOR:
13052 case OMP_CLAUSE_AUTO:
13053 case OMP_CLAUSE_SEQ:
13054 case OMP_CLAUSE_TILE:
13055 case OMP_CLAUSE_IF_PRESENT:
13056 case OMP_CLAUSE_FINALIZE:
13057 case OMP_CLAUSE_INCLUSIVE:
13058 case OMP_CLAUSE_EXCLUSIVE:
13059 break;
13060
13061 case OMP_CLAUSE_NOHOST:
13062 default:
13063 gcc_unreachable ();
13064 }
13065
13066 if (remove)
13067 *list_p = OMP_CLAUSE_CHAIN (c);
13068 else if (move_attach)
13069 {
13070 /* Remove attach node from here, separate out into its own list. */
13071 *attach_tail = c;
13072 *list_p = OMP_CLAUSE_CHAIN (c);
13073 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
13074 attach_tail = &OMP_CLAUSE_CHAIN (c);
13075 }
13076 else
13077 list_p = &OMP_CLAUSE_CHAIN (c);
13078 }
13079
13080 /* Splice attach nodes at the end of the list. */
13081 if (attach_list)
13082 {
13083 *list_p = attach_list;
13084 list_p = attach_tail;
13085 }
13086
13087 /* Add in any implicit data sharing. */
13088 struct gimplify_adjust_omp_clauses_data data;
13089 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
13090 {
13091 /* OpenMP. Implicit clauses are added at the start of the clause list,
13092 but after any non-map clauses. */
13093 tree *implicit_add_list_p = orig_list_p;
13094 while (*implicit_add_list_p
13095 && OMP_CLAUSE_CODE (*implicit_add_list_p) != OMP_CLAUSE_MAP)
13096 implicit_add_list_p = &OMP_CLAUSE_CHAIN (*implicit_add_list_p);
13097 data.list_p = implicit_add_list_p;
13098 }
13099 else
13100 /* OpenACC. */
13101 data.list_p = list_p;
13102 data.pre_p = pre_p;
13103 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
13104
13105 if (has_inscan_reductions)
13106 for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
13107 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
13108 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
13109 {
13110 error_at (OMP_CLAUSE_LOCATION (c),
13111 "%<inscan%> %<reduction%> clause used together with "
13112 "%<linear%> clause for a variable other than loop "
13113 "iterator");
13114 break;
13115 }
13116
13117 gimplify_omp_ctxp = ctx->outer_context;
13118 delete_omp_context (ctx);
13119 }
13120
13121 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
13122 -1 if unknown yet (simd is involved, won't be known until vectorization)
13123 and 1 if they do. If SCORES is non-NULL, it should point to an array
13124 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
13125 of the CONSTRUCTS (position -1 if it will never match) followed by
13126 number of constructs in the OpenMP context construct trait. If the
13127 score depends on whether it will be in a declare simd clone or not,
13128 the function returns 2 and there will be two sets of the scores, the first
13129 one for the case that it is not in a declare simd clone, the other
13130 that it is in a declare simd clone. */
13131
13132 int
13133 omp_construct_selector_matches (enum tree_code *constructs, int nconstructs,
13134 int *scores)
13135 {
13136 int matched = 0, cnt = 0;
13137 bool simd_seen = false;
13138 bool target_seen = false;
13139 int declare_simd_cnt = -1;
13140 auto_vec<enum tree_code, 16> codes;
13141 for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
13142 {
13143 if (((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
13144 || ((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
13145 == ORT_TARGET && ctx->code == OMP_TARGET)
13146 || ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
13147 || (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
13148 || (ctx->region_type == ORT_SIMD
13149 && ctx->code == OMP_SIMD
13150 && !omp_find_clause (ctx->clauses, OMP_CLAUSE_BIND)))
13151 {
13152 ++cnt;
13153 if (scores)
13154 codes.safe_push (ctx->code);
13155 else if (matched < nconstructs && ctx->code == constructs[matched])
13156 {
13157 if (ctx->code == OMP_SIMD)
13158 {
13159 if (matched)
13160 return 0;
13161 simd_seen = true;
13162 }
13163 ++matched;
13164 }
13165 if (ctx->code == OMP_TARGET)
13166 {
13167 if (scores == NULL)
13168 return matched < nconstructs ? 0 : simd_seen ? -1 : 1;
13169 target_seen = true;
13170 break;
13171 }
13172 }
13173 else if (ctx->region_type == ORT_WORKSHARE
13174 && ctx->code == OMP_LOOP
13175 && ctx->outer_context
13176 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
13177 && ctx->outer_context->outer_context
13178 && ctx->outer_context->outer_context->code == OMP_LOOP
13179 && ctx->outer_context->outer_context->distribute)
13180 ctx = ctx->outer_context->outer_context;
13181 ctx = ctx->outer_context;
13182 }
13183 if (!target_seen
13184 && lookup_attribute ("omp declare simd",
13185 DECL_ATTRIBUTES (current_function_decl)))
13186 {
13187 /* Declare simd is a maybe case, it is supposed to be added only to the
13188 omp-simd-clone.cc added clones and not to the base function. */
13189 declare_simd_cnt = cnt++;
13190 if (scores)
13191 codes.safe_push (OMP_SIMD);
13192 else if (cnt == 0
13193 && constructs[0] == OMP_SIMD)
13194 {
13195 gcc_assert (matched == 0);
13196 simd_seen = true;
13197 if (++matched == nconstructs)
13198 return -1;
13199 }
13200 }
13201 if (tree attr = lookup_attribute ("omp declare variant variant",
13202 DECL_ATTRIBUTES (current_function_decl)))
13203 {
13204 enum tree_code variant_constructs[5];
13205 int variant_nconstructs = 0;
13206 if (!target_seen)
13207 variant_nconstructs
13208 = omp_constructor_traits_to_codes (TREE_VALUE (attr),
13209 variant_constructs);
13210 for (int i = 0; i < variant_nconstructs; i++)
13211 {
13212 ++cnt;
13213 if (scores)
13214 codes.safe_push (variant_constructs[i]);
13215 else if (matched < nconstructs
13216 && variant_constructs[i] == constructs[matched])
13217 {
13218 if (variant_constructs[i] == OMP_SIMD)
13219 {
13220 if (matched)
13221 return 0;
13222 simd_seen = true;
13223 }
13224 ++matched;
13225 }
13226 }
13227 }
13228 if (!target_seen
13229 && lookup_attribute ("omp declare target block",
13230 DECL_ATTRIBUTES (current_function_decl)))
13231 {
13232 if (scores)
13233 codes.safe_push (OMP_TARGET);
13234 else if (matched < nconstructs && constructs[matched] == OMP_TARGET)
13235 ++matched;
13236 }
13237 if (scores)
13238 {
13239 for (int pass = 0; pass < (declare_simd_cnt == -1 ? 1 : 2); pass++)
13240 {
13241 int j = codes.length () - 1;
13242 for (int i = nconstructs - 1; i >= 0; i--)
13243 {
13244 while (j >= 0
13245 && (pass != 0 || declare_simd_cnt != j)
13246 && constructs[i] != codes[j])
13247 --j;
13248 if (pass == 0 && declare_simd_cnt != -1 && j > declare_simd_cnt)
13249 *scores++ = j - 1;
13250 else
13251 *scores++ = j;
13252 }
13253 *scores++ = ((pass == 0 && declare_simd_cnt != -1)
13254 ? codes.length () - 1 : codes.length ());
13255 }
13256 return declare_simd_cnt == -1 ? 1 : 2;
13257 }
13258 if (matched == nconstructs)
13259 return simd_seen ? -1 : 1;
13260 return 0;
13261 }
13262
13263 /* Gimplify OACC_CACHE. */
13264
13265 static void
13266 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
13267 {
13268 tree expr = *expr_p;
13269
13270 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
13271 OACC_CACHE);
13272 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
13273 OACC_CACHE);
13274
13275 /* TODO: Do something sensible with this information. */
13276
13277 *expr_p = NULL_TREE;
13278 }
13279
13280 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
13281 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
13282 kind. The entry kind will replace the one in CLAUSE, while the exit
13283 kind will be used in a new omp_clause and returned to the caller. */
13284
13285 static tree
13286 gimplify_oacc_declare_1 (tree clause)
13287 {
13288 HOST_WIDE_INT kind, new_op;
13289 bool ret = false;
13290 tree c = NULL;
13291
13292 kind = OMP_CLAUSE_MAP_KIND (clause);
13293
13294 switch (kind)
13295 {
13296 case GOMP_MAP_ALLOC:
13297 new_op = GOMP_MAP_RELEASE;
13298 ret = true;
13299 break;
13300
13301 case GOMP_MAP_FROM:
13302 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
13303 new_op = GOMP_MAP_FROM;
13304 ret = true;
13305 break;
13306
13307 case GOMP_MAP_TOFROM:
13308 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
13309 new_op = GOMP_MAP_FROM;
13310 ret = true;
13311 break;
13312
13313 case GOMP_MAP_DEVICE_RESIDENT:
13314 case GOMP_MAP_FORCE_DEVICEPTR:
13315 case GOMP_MAP_FORCE_PRESENT:
13316 case GOMP_MAP_LINK:
13317 case GOMP_MAP_POINTER:
13318 case GOMP_MAP_TO:
13319 break;
13320
13321 default:
13322 gcc_unreachable ();
13323 break;
13324 }
13325
13326 if (ret)
13327 {
13328 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
13329 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
13330 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
13331 }
13332
13333 return c;
13334 }
13335
13336 /* Gimplify OACC_DECLARE. */
13337
13338 static void
13339 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
13340 {
13341 tree expr = *expr_p;
13342 gomp_target *stmt;
13343 tree clauses, t, decl;
13344
13345 clauses = OACC_DECLARE_CLAUSES (expr);
13346
13347 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
13348 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
13349
13350 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
13351 {
13352 decl = OMP_CLAUSE_DECL (t);
13353
13354 if (TREE_CODE (decl) == MEM_REF)
13355 decl = TREE_OPERAND (decl, 0);
13356
13357 if (VAR_P (decl) && !is_oacc_declared (decl))
13358 {
13359 tree attr = get_identifier ("oacc declare target");
13360 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
13361 DECL_ATTRIBUTES (decl));
13362 }
13363
13364 if (VAR_P (decl)
13365 && !is_global_var (decl)
13366 && DECL_CONTEXT (decl) == current_function_decl)
13367 {
13368 tree c = gimplify_oacc_declare_1 (t);
13369 if (c)
13370 {
13371 if (oacc_declare_returns == NULL)
13372 oacc_declare_returns = new hash_map<tree, tree>;
13373
13374 oacc_declare_returns->put (decl, c);
13375 }
13376 }
13377
13378 if (gimplify_omp_ctxp)
13379 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
13380 }
13381
13382 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
13383 clauses);
13384
13385 gimplify_seq_add_stmt (pre_p, stmt);
13386
13387 *expr_p = NULL_TREE;
13388 }
13389
13390 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
13391 gimplification of the body, as well as scanning the body for used
13392 variables. We need to do this scan now, because variable-sized
13393 decls will be decomposed during gimplification. */
13394
13395 static void
13396 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
13397 {
13398 tree expr = *expr_p;
13399 gimple *g;
13400 gimple_seq body = NULL;
13401
13402 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
13403 OMP_PARALLEL_COMBINED (expr)
13404 ? ORT_COMBINED_PARALLEL
13405 : ORT_PARALLEL, OMP_PARALLEL);
13406
13407 push_gimplify_context ();
13408
13409 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
13410 if (gimple_code (g) == GIMPLE_BIND)
13411 pop_gimplify_context (g);
13412 else
13413 pop_gimplify_context (NULL);
13414
13415 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
13416 OMP_PARALLEL);
13417
13418 g = gimple_build_omp_parallel (body,
13419 OMP_PARALLEL_CLAUSES (expr),
13420 NULL_TREE, NULL_TREE);
13421 if (OMP_PARALLEL_COMBINED (expr))
13422 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
13423 gimplify_seq_add_stmt (pre_p, g);
13424 *expr_p = NULL_TREE;
13425 }
13426
13427 /* Gimplify the contents of an OMP_TASK statement. This involves
13428 gimplification of the body, as well as scanning the body for used
13429 variables. We need to do this scan now, because variable-sized
13430 decls will be decomposed during gimplification. */
13431
13432 static void
13433 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
13434 {
13435 tree expr = *expr_p;
13436 gimple *g;
13437 gimple_seq body = NULL;
13438 bool nowait = false;
13439 bool has_depend = false;
13440
13441 if (OMP_TASK_BODY (expr) == NULL_TREE)
13442 {
13443 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13444 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
13445 {
13446 has_depend = true;
13447 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
13448 {
13449 error_at (OMP_CLAUSE_LOCATION (c),
13450 "%<mutexinoutset%> kind in %<depend%> clause on a "
13451 "%<taskwait%> construct");
13452 break;
13453 }
13454 }
13455 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NOWAIT)
13456 nowait = true;
13457 if (nowait && !has_depend)
13458 {
13459 error_at (EXPR_LOCATION (expr),
13460 "%<taskwait%> construct with %<nowait%> clause but no "
13461 "%<depend%> clauses");
13462 *expr_p = NULL_TREE;
13463 return;
13464 }
13465 }
13466
13467 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
13468 omp_find_clause (OMP_TASK_CLAUSES (expr),
13469 OMP_CLAUSE_UNTIED)
13470 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
13471
13472 if (OMP_TASK_BODY (expr))
13473 {
13474 push_gimplify_context ();
13475
13476 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
13477 if (gimple_code (g) == GIMPLE_BIND)
13478 pop_gimplify_context (g);
13479 else
13480 pop_gimplify_context (NULL);
13481 }
13482
13483 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
13484 OMP_TASK);
13485
13486 g = gimple_build_omp_task (body,
13487 OMP_TASK_CLAUSES (expr),
13488 NULL_TREE, NULL_TREE,
13489 NULL_TREE, NULL_TREE, NULL_TREE);
13490 if (OMP_TASK_BODY (expr) == NULL_TREE)
13491 gimple_omp_task_set_taskwait_p (g, true);
13492 gimplify_seq_add_stmt (pre_p, g);
13493 *expr_p = NULL_TREE;
13494 }
13495
13496 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
13497 force it into a temporary initialized in PRE_P and add firstprivate clause
13498 to ORIG_FOR_STMT. */
13499
13500 static void
13501 gimplify_omp_taskloop_expr (tree type, tree *tp, gimple_seq *pre_p,
13502 tree orig_for_stmt)
13503 {
13504 if (*tp == NULL || is_gimple_constant (*tp))
13505 return;
13506
13507 *tp = get_initialized_tmp_var (*tp, pre_p, NULL, false);
13508 /* Reference to pointer conversion is considered useless,
13509 but is significant for firstprivate clause. Force it
13510 here. */
13511 if (type
13512 && TREE_CODE (type) == POINTER_TYPE
13513 && TREE_CODE (TREE_TYPE (*tp)) == REFERENCE_TYPE)
13514 {
13515 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
13516 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v, *tp);
13517 gimplify_and_add (m, pre_p);
13518 *tp = v;
13519 }
13520
13521 tree c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
13522 OMP_CLAUSE_DECL (c) = *tp;
13523 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
13524 OMP_FOR_CLAUSES (orig_for_stmt) = c;
13525 }
13526
13527 /* Helper function of gimplify_omp_for, find OMP_ORDERED with
13528 null OMP_ORDERED_BODY inside of OMP_FOR's body. */
13529
13530 static tree
13531 find_standalone_omp_ordered (tree *tp, int *walk_subtrees, void *)
13532 {
13533 switch (TREE_CODE (*tp))
13534 {
13535 case OMP_ORDERED:
13536 if (OMP_ORDERED_BODY (*tp) == NULL_TREE)
13537 return *tp;
13538 break;
13539 case OMP_SIMD:
13540 case OMP_PARALLEL:
13541 case OMP_TARGET:
13542 *walk_subtrees = 0;
13543 break;
13544 default:
13545 break;
13546 }
13547 return NULL_TREE;
13548 }
13549
13550 /* Gimplify the gross structure of an OMP_FOR statement. */
13551
13552 static enum gimplify_status
13553 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
13554 {
13555 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
13556 enum gimplify_status ret = GS_ALL_DONE;
13557 enum gimplify_status tret;
13558 gomp_for *gfor;
13559 gimple_seq for_body, for_pre_body;
13560 int i;
13561 bitmap has_decl_expr = NULL;
13562 enum omp_region_type ort = ORT_WORKSHARE;
13563 bool openacc = TREE_CODE (*expr_p) == OACC_LOOP;
13564
13565 orig_for_stmt = for_stmt = *expr_p;
13566
13567 bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_BIND)
13568 != NULL_TREE);
13569 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
13570 {
13571 tree *data[4] = { NULL, NULL, NULL, NULL };
13572 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
13573 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
13574 find_combined_omp_for, data, NULL);
13575 if (inner_for_stmt == NULL_TREE)
13576 {
13577 gcc_assert (seen_error ());
13578 *expr_p = NULL_TREE;
13579 return GS_ERROR;
13580 }
13581 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
13582 {
13583 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
13584 &OMP_FOR_PRE_BODY (for_stmt));
13585 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
13586 }
13587 if (OMP_FOR_PRE_BODY (inner_for_stmt))
13588 {
13589 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
13590 &OMP_FOR_PRE_BODY (for_stmt));
13591 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
13592 }
13593
13594 if (data[0])
13595 {
13596 /* We have some statements or variable declarations in between
13597 the composite construct directives. Move them around the
13598 inner_for_stmt. */
13599 data[0] = expr_p;
13600 for (i = 0; i < 3; i++)
13601 if (data[i])
13602 {
13603 tree t = *data[i];
13604 if (i < 2 && data[i + 1] == &OMP_BODY (t))
13605 data[i + 1] = data[i];
13606 *data[i] = OMP_BODY (t);
13607 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
13608 NULL_TREE, make_node (BLOCK));
13609 OMP_BODY (t) = body;
13610 append_to_statement_list_force (inner_for_stmt,
13611 &BIND_EXPR_BODY (body));
13612 *data[3] = t;
13613 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
13614 gcc_assert (*data[3] == inner_for_stmt);
13615 }
13616 return GS_OK;
13617 }
13618
13619 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
13620 if (!loop_p
13621 && OMP_FOR_ORIG_DECLS (inner_for_stmt)
13622 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13623 i)) == TREE_LIST
13624 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13625 i)))
13626 {
13627 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
13628 /* Class iterators aren't allowed on OMP_SIMD, so the only
13629 case we need to solve is distribute parallel for. They are
13630 allowed on the loop construct, but that is already handled
13631 in gimplify_omp_loop. */
13632 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
13633 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
13634 && data[1]);
13635 tree orig_decl = TREE_PURPOSE (orig);
13636 tree last = TREE_VALUE (orig);
13637 tree *pc;
13638 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
13639 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
13640 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
13641 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
13642 && OMP_CLAUSE_DECL (*pc) == orig_decl)
13643 break;
13644 if (*pc == NULL_TREE)
13645 {
13646 tree *spc;
13647 for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
13648 *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
13649 if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
13650 && OMP_CLAUSE_DECL (*spc) == orig_decl)
13651 break;
13652 if (*spc)
13653 {
13654 tree c = *spc;
13655 *spc = OMP_CLAUSE_CHAIN (c);
13656 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
13657 *pc = c;
13658 }
13659 }
13660 if (*pc == NULL_TREE)
13661 ;
13662 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
13663 {
13664 /* private clause will appear only on inner_for_stmt.
13665 Change it into firstprivate, and add private clause
13666 on for_stmt. */
13667 tree c = copy_node (*pc);
13668 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
13669 OMP_FOR_CLAUSES (for_stmt) = c;
13670 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
13671 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
13672 }
13673 else
13674 {
13675 /* lastprivate clause will appear on both inner_for_stmt
13676 and for_stmt. Add firstprivate clause to
13677 inner_for_stmt. */
13678 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
13679 OMP_CLAUSE_FIRSTPRIVATE);
13680 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
13681 OMP_CLAUSE_CHAIN (c) = *pc;
13682 *pc = c;
13683 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
13684 }
13685 tree c = build_omp_clause (UNKNOWN_LOCATION,
13686 OMP_CLAUSE_FIRSTPRIVATE);
13687 OMP_CLAUSE_DECL (c) = last;
13688 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13689 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13690 c = build_omp_clause (UNKNOWN_LOCATION,
13691 *pc ? OMP_CLAUSE_SHARED
13692 : OMP_CLAUSE_FIRSTPRIVATE);
13693 OMP_CLAUSE_DECL (c) = orig_decl;
13694 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13695 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13696 }
13697 /* Similarly, take care of C++ range for temporaries, those should
13698 be firstprivate on OMP_PARALLEL if any. */
13699 if (data[1])
13700 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
13701 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
13702 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13703 i)) == TREE_LIST
13704 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13705 i)))
13706 {
13707 tree orig
13708 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
13709 tree v = TREE_CHAIN (orig);
13710 tree c = build_omp_clause (UNKNOWN_LOCATION,
13711 OMP_CLAUSE_FIRSTPRIVATE);
13712 /* First add firstprivate clause for the __for_end artificial
13713 decl. */
13714 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
13715 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
13716 == REFERENCE_TYPE)
13717 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
13718 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13719 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13720 if (TREE_VEC_ELT (v, 0))
13721 {
13722 /* And now the same for __for_range artificial decl if it
13723 exists. */
13724 c = build_omp_clause (UNKNOWN_LOCATION,
13725 OMP_CLAUSE_FIRSTPRIVATE);
13726 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
13727 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
13728 == REFERENCE_TYPE)
13729 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
13730 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13731 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13732 }
13733 }
13734 }
13735
13736 switch (TREE_CODE (for_stmt))
13737 {
13738 case OMP_FOR:
13739 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
13740 {
13741 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13742 OMP_CLAUSE_SCHEDULE))
13743 error_at (EXPR_LOCATION (for_stmt),
13744 "%qs clause may not appear on non-rectangular %qs",
13745 "schedule", lang_GNU_Fortran () ? "do" : "for");
13746 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED))
13747 error_at (EXPR_LOCATION (for_stmt),
13748 "%qs clause may not appear on non-rectangular %qs",
13749 "ordered", lang_GNU_Fortran () ? "do" : "for");
13750 }
13751 break;
13752 case OMP_DISTRIBUTE:
13753 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt)
13754 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13755 OMP_CLAUSE_DIST_SCHEDULE))
13756 error_at (EXPR_LOCATION (for_stmt),
13757 "%qs clause may not appear on non-rectangular %qs",
13758 "dist_schedule", "distribute");
13759 break;
13760 case OACC_LOOP:
13761 ort = ORT_ACC;
13762 break;
13763 case OMP_TASKLOOP:
13764 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
13765 {
13766 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13767 OMP_CLAUSE_GRAINSIZE))
13768 error_at (EXPR_LOCATION (for_stmt),
13769 "%qs clause may not appear on non-rectangular %qs",
13770 "grainsize", "taskloop");
13771 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13772 OMP_CLAUSE_NUM_TASKS))
13773 error_at (EXPR_LOCATION (for_stmt),
13774 "%qs clause may not appear on non-rectangular %qs",
13775 "num_tasks", "taskloop");
13776 }
13777 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
13778 ort = ORT_UNTIED_TASKLOOP;
13779 else
13780 ort = ORT_TASKLOOP;
13781 break;
13782 case OMP_SIMD:
13783 ort = ORT_SIMD;
13784 break;
13785 default:
13786 gcc_unreachable ();
13787 }
13788
13789 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
13790 clause for the IV. */
13791 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
13792 {
13793 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
13794 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
13795 decl = TREE_OPERAND (t, 0);
13796 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
13797 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
13798 && OMP_CLAUSE_DECL (c) == decl)
13799 {
13800 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
13801 break;
13802 }
13803 }
13804
13805 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
13806 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
13807 loop_p && TREE_CODE (for_stmt) != OMP_SIMD
13808 ? OMP_LOOP : TREE_CODE (for_stmt));
13809
13810 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
13811 gimplify_omp_ctxp->distribute = true;
13812
13813 /* Handle OMP_FOR_INIT. */
13814 for_pre_body = NULL;
13815 if ((ort == ORT_SIMD
13816 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
13817 && OMP_FOR_PRE_BODY (for_stmt))
13818 {
13819 has_decl_expr = BITMAP_ALLOC (NULL);
13820 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
13821 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
13822 == VAR_DECL)
13823 {
13824 t = OMP_FOR_PRE_BODY (for_stmt);
13825 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
13826 }
13827 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
13828 {
13829 tree_stmt_iterator si;
13830 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
13831 tsi_next (&si))
13832 {
13833 t = tsi_stmt (si);
13834 if (TREE_CODE (t) == DECL_EXPR
13835 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
13836 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
13837 }
13838 }
13839 }
13840 if (OMP_FOR_PRE_BODY (for_stmt))
13841 {
13842 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
13843 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
13844 else
13845 {
13846 struct gimplify_omp_ctx ctx;
13847 memset (&ctx, 0, sizeof (ctx));
13848 ctx.region_type = ORT_NONE;
13849 gimplify_omp_ctxp = &ctx;
13850 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
13851 gimplify_omp_ctxp = NULL;
13852 }
13853 }
13854 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
13855
13856 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
13857 for_stmt = inner_for_stmt;
13858
13859 /* For taskloop, need to gimplify the start, end and step before the
13860 taskloop, outside of the taskloop omp context. */
13861 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
13862 {
13863 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
13864 {
13865 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
13866 gimple_seq *for_pre_p = (gimple_seq_empty_p (for_pre_body)
13867 ? pre_p : &for_pre_body);
13868 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
13869 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
13870 {
13871 tree v = TREE_OPERAND (t, 1);
13872 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
13873 for_pre_p, orig_for_stmt);
13874 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
13875 for_pre_p, orig_for_stmt);
13876 }
13877 else
13878 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
13879 orig_for_stmt);
13880
13881 /* Handle OMP_FOR_COND. */
13882 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
13883 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
13884 {
13885 tree v = TREE_OPERAND (t, 1);
13886 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
13887 for_pre_p, orig_for_stmt);
13888 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
13889 for_pre_p, orig_for_stmt);
13890 }
13891 else
13892 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
13893 orig_for_stmt);
13894
13895 /* Handle OMP_FOR_INCR. */
13896 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
13897 if (TREE_CODE (t) == MODIFY_EXPR)
13898 {
13899 decl = TREE_OPERAND (t, 0);
13900 t = TREE_OPERAND (t, 1);
13901 tree *tp = &TREE_OPERAND (t, 1);
13902 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
13903 tp = &TREE_OPERAND (t, 0);
13904
13905 gimplify_omp_taskloop_expr (NULL_TREE, tp, for_pre_p,
13906 orig_for_stmt);
13907 }
13908 }
13909
13910 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
13911 OMP_TASKLOOP);
13912 }
13913
13914 if (orig_for_stmt != for_stmt)
13915 gimplify_omp_ctxp->combined_loop = true;
13916
13917 for_body = NULL;
13918 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
13919 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
13920 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
13921 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
13922
13923 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
13924 bool is_doacross = false;
13925 if (c && walk_tree_without_duplicates (&OMP_FOR_BODY (for_stmt),
13926 find_standalone_omp_ordered, NULL))
13927 {
13928 OMP_CLAUSE_ORDERED_DOACROSS (c) = 1;
13929 is_doacross = true;
13930 int len = TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt));
13931 gimplify_omp_ctxp->loop_iter_var.create (len * 2);
13932 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
13933 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LINEAR)
13934 {
13935 error_at (OMP_CLAUSE_LOCATION (*pc),
13936 "%<linear%> clause may not be specified together "
13937 "with %<ordered%> clause if stand-alone %<ordered%> "
13938 "construct is nested in it");
13939 *pc = OMP_CLAUSE_CHAIN (*pc);
13940 }
13941 else
13942 pc = &OMP_CLAUSE_CHAIN (*pc);
13943 }
13944 int collapse = 1, tile = 0;
13945 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
13946 if (c)
13947 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
13948 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
13949 if (c)
13950 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
13951 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ALLOCATE);
13952 hash_set<tree> *allocate_uids = NULL;
13953 if (c)
13954 {
13955 allocate_uids = new hash_set<tree>;
13956 for (; c; c = OMP_CLAUSE_CHAIN (c))
13957 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE)
13958 allocate_uids->add (OMP_CLAUSE_DECL (c));
13959 }
13960 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
13961 {
13962 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
13963 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
13964 decl = TREE_OPERAND (t, 0);
13965 gcc_assert (DECL_P (decl));
13966 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
13967 || POINTER_TYPE_P (TREE_TYPE (decl)));
13968 if (is_doacross)
13969 {
13970 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
13971 {
13972 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
13973 if (TREE_CODE (orig_decl) == TREE_LIST)
13974 {
13975 orig_decl = TREE_PURPOSE (orig_decl);
13976 if (!orig_decl)
13977 orig_decl = decl;
13978 }
13979 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
13980 }
13981 else
13982 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
13983 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
13984 }
13985
13986 if (for_stmt == orig_for_stmt)
13987 {
13988 tree orig_decl = decl;
13989 if (OMP_FOR_ORIG_DECLS (for_stmt))
13990 {
13991 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
13992 if (TREE_CODE (orig_decl) == TREE_LIST)
13993 {
13994 orig_decl = TREE_PURPOSE (orig_decl);
13995 if (!orig_decl)
13996 orig_decl = decl;
13997 }
13998 }
13999 if (is_global_var (orig_decl) && DECL_THREAD_LOCAL_P (orig_decl))
14000 error_at (EXPR_LOCATION (for_stmt),
14001 "threadprivate iteration variable %qD", orig_decl);
14002 }
14003
14004 /* Make sure the iteration variable is private. */
14005 tree c = NULL_TREE;
14006 tree c2 = NULL_TREE;
14007 if (orig_for_stmt != for_stmt)
14008 {
14009 /* Preserve this information until we gimplify the inner simd. */
14010 if (has_decl_expr
14011 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
14012 TREE_PRIVATE (t) = 1;
14013 }
14014 else if (ort == ORT_SIMD)
14015 {
14016 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
14017 (splay_tree_key) decl);
14018 omp_is_private (gimplify_omp_ctxp, decl,
14019 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
14020 != 1));
14021 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
14022 {
14023 omp_notice_variable (gimplify_omp_ctxp, decl, true);
14024 if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
14025 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14026 OMP_CLAUSE_LASTPRIVATE);
14027 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
14028 OMP_CLAUSE_LASTPRIVATE))
14029 if (OMP_CLAUSE_DECL (c3) == decl)
14030 {
14031 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
14032 "conditional %<lastprivate%> on loop "
14033 "iterator %qD ignored", decl);
14034 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
14035 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
14036 }
14037 }
14038 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
14039 {
14040 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
14041 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
14042 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
14043 if ((has_decl_expr
14044 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
14045 || TREE_PRIVATE (t))
14046 {
14047 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14048 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14049 }
14050 struct gimplify_omp_ctx *outer
14051 = gimplify_omp_ctxp->outer_context;
14052 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
14053 {
14054 if (outer->region_type == ORT_WORKSHARE
14055 && outer->combined_loop)
14056 {
14057 n = splay_tree_lookup (outer->variables,
14058 (splay_tree_key)decl);
14059 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
14060 {
14061 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14062 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14063 }
14064 else
14065 {
14066 struct gimplify_omp_ctx *octx = outer->outer_context;
14067 if (octx
14068 && octx->region_type == ORT_COMBINED_PARALLEL
14069 && octx->outer_context
14070 && (octx->outer_context->region_type
14071 == ORT_WORKSHARE)
14072 && octx->outer_context->combined_loop)
14073 {
14074 octx = octx->outer_context;
14075 n = splay_tree_lookup (octx->variables,
14076 (splay_tree_key)decl);
14077 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
14078 {
14079 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14080 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14081 }
14082 }
14083 }
14084 }
14085 }
14086
14087 OMP_CLAUSE_DECL (c) = decl;
14088 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
14089 OMP_FOR_CLAUSES (for_stmt) = c;
14090 omp_add_variable (gimplify_omp_ctxp, decl, flags);
14091 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
14092 omp_lastprivate_for_combined_outer_constructs (outer, decl,
14093 true);
14094 }
14095 else
14096 {
14097 bool lastprivate
14098 = (!has_decl_expr
14099 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
14100 if (TREE_PRIVATE (t))
14101 lastprivate = false;
14102 if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
14103 {
14104 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14105 if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
14106 lastprivate = false;
14107 }
14108
14109 struct gimplify_omp_ctx *outer
14110 = gimplify_omp_ctxp->outer_context;
14111 if (outer && lastprivate)
14112 omp_lastprivate_for_combined_outer_constructs (outer, decl,
14113 true);
14114
14115 c = build_omp_clause (input_location,
14116 lastprivate ? OMP_CLAUSE_LASTPRIVATE
14117 : OMP_CLAUSE_PRIVATE);
14118 OMP_CLAUSE_DECL (c) = decl;
14119 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
14120 OMP_FOR_CLAUSES (for_stmt) = c;
14121 omp_add_variable (gimplify_omp_ctxp, decl,
14122 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
14123 | GOVD_EXPLICIT | GOVD_SEEN);
14124 c = NULL_TREE;
14125 }
14126 }
14127 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
14128 {
14129 omp_notice_variable (gimplify_omp_ctxp, decl, true);
14130 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
14131 (splay_tree_key) decl);
14132 if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
14133 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14134 OMP_CLAUSE_LASTPRIVATE);
14135 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
14136 OMP_CLAUSE_LASTPRIVATE))
14137 if (OMP_CLAUSE_DECL (c3) == decl)
14138 {
14139 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
14140 "conditional %<lastprivate%> on loop "
14141 "iterator %qD ignored", decl);
14142 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
14143 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
14144 }
14145 }
14146 else
14147 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
14148
14149 /* If DECL is not a gimple register, create a temporary variable to act
14150 as an iteration counter. This is valid, since DECL cannot be
14151 modified in the body of the loop. Similarly for any iteration vars
14152 in simd with collapse > 1 where the iterator vars must be
14153 lastprivate. And similarly for vars mentioned in allocate clauses. */
14154 if (orig_for_stmt != for_stmt)
14155 var = decl;
14156 else if (!is_gimple_reg (decl)
14157 || (ort == ORT_SIMD
14158 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
14159 || (allocate_uids && allocate_uids->contains (decl)))
14160 {
14161 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14162 /* Make sure omp_add_variable is not called on it prematurely.
14163 We call it ourselves a few lines later. */
14164 gimplify_omp_ctxp = NULL;
14165 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
14166 gimplify_omp_ctxp = ctx;
14167 TREE_OPERAND (t, 0) = var;
14168
14169 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
14170
14171 if (ort == ORT_SIMD
14172 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
14173 {
14174 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
14175 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
14176 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
14177 OMP_CLAUSE_DECL (c2) = var;
14178 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
14179 OMP_FOR_CLAUSES (for_stmt) = c2;
14180 omp_add_variable (gimplify_omp_ctxp, var,
14181 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
14182 if (c == NULL_TREE)
14183 {
14184 c = c2;
14185 c2 = NULL_TREE;
14186 }
14187 }
14188 else
14189 omp_add_variable (gimplify_omp_ctxp, var,
14190 GOVD_PRIVATE | GOVD_SEEN);
14191 }
14192 else
14193 var = decl;
14194
14195 gimplify_omp_ctxp->in_for_exprs = true;
14196 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14197 {
14198 tree lb = TREE_OPERAND (t, 1);
14199 tret = gimplify_expr (&TREE_VEC_ELT (lb, 1), &for_pre_body, NULL,
14200 is_gimple_val, fb_rvalue, false);
14201 ret = MIN (ret, tret);
14202 tret = gimplify_expr (&TREE_VEC_ELT (lb, 2), &for_pre_body, NULL,
14203 is_gimple_val, fb_rvalue, false);
14204 }
14205 else
14206 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14207 is_gimple_val, fb_rvalue, false);
14208 gimplify_omp_ctxp->in_for_exprs = false;
14209 ret = MIN (ret, tret);
14210 if (ret == GS_ERROR)
14211 return ret;
14212
14213 /* Handle OMP_FOR_COND. */
14214 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
14215 gcc_assert (COMPARISON_CLASS_P (t));
14216 gcc_assert (TREE_OPERAND (t, 0) == decl);
14217
14218 gimplify_omp_ctxp->in_for_exprs = true;
14219 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14220 {
14221 tree ub = TREE_OPERAND (t, 1);
14222 tret = gimplify_expr (&TREE_VEC_ELT (ub, 1), &for_pre_body, NULL,
14223 is_gimple_val, fb_rvalue, false);
14224 ret = MIN (ret, tret);
14225 tret = gimplify_expr (&TREE_VEC_ELT (ub, 2), &for_pre_body, NULL,
14226 is_gimple_val, fb_rvalue, false);
14227 }
14228 else
14229 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14230 is_gimple_val, fb_rvalue, false);
14231 gimplify_omp_ctxp->in_for_exprs = false;
14232 ret = MIN (ret, tret);
14233
14234 /* Handle OMP_FOR_INCR. */
14235 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14236 switch (TREE_CODE (t))
14237 {
14238 case PREINCREMENT_EXPR:
14239 case POSTINCREMENT_EXPR:
14240 {
14241 tree decl = TREE_OPERAND (t, 0);
14242 /* c_omp_for_incr_canonicalize_ptr() should have been
14243 called to massage things appropriately. */
14244 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
14245
14246 if (orig_for_stmt != for_stmt)
14247 break;
14248 t = build_int_cst (TREE_TYPE (decl), 1);
14249 if (c)
14250 OMP_CLAUSE_LINEAR_STEP (c) = t;
14251 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
14252 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
14253 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
14254 break;
14255 }
14256
14257 case PREDECREMENT_EXPR:
14258 case POSTDECREMENT_EXPR:
14259 /* c_omp_for_incr_canonicalize_ptr() should have been
14260 called to massage things appropriately. */
14261 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
14262 if (orig_for_stmt != for_stmt)
14263 break;
14264 t = build_int_cst (TREE_TYPE (decl), -1);
14265 if (c)
14266 OMP_CLAUSE_LINEAR_STEP (c) = t;
14267 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
14268 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
14269 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
14270 break;
14271
14272 case MODIFY_EXPR:
14273 gcc_assert (TREE_OPERAND (t, 0) == decl);
14274 TREE_OPERAND (t, 0) = var;
14275
14276 t = TREE_OPERAND (t, 1);
14277 switch (TREE_CODE (t))
14278 {
14279 case PLUS_EXPR:
14280 if (TREE_OPERAND (t, 1) == decl)
14281 {
14282 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
14283 TREE_OPERAND (t, 0) = var;
14284 break;
14285 }
14286
14287 /* Fallthru. */
14288 case MINUS_EXPR:
14289 case POINTER_PLUS_EXPR:
14290 gcc_assert (TREE_OPERAND (t, 0) == decl);
14291 TREE_OPERAND (t, 0) = var;
14292 break;
14293 default:
14294 gcc_unreachable ();
14295 }
14296
14297 gimplify_omp_ctxp->in_for_exprs = true;
14298 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14299 is_gimple_val, fb_rvalue, false);
14300 ret = MIN (ret, tret);
14301 if (c)
14302 {
14303 tree step = TREE_OPERAND (t, 1);
14304 tree stept = TREE_TYPE (decl);
14305 if (POINTER_TYPE_P (stept))
14306 stept = sizetype;
14307 step = fold_convert (stept, step);
14308 if (TREE_CODE (t) == MINUS_EXPR)
14309 step = fold_build1 (NEGATE_EXPR, stept, step);
14310 OMP_CLAUSE_LINEAR_STEP (c) = step;
14311 if (step != TREE_OPERAND (t, 1))
14312 {
14313 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
14314 &for_pre_body, NULL,
14315 is_gimple_val, fb_rvalue, false);
14316 ret = MIN (ret, tret);
14317 }
14318 }
14319 gimplify_omp_ctxp->in_for_exprs = false;
14320 break;
14321
14322 default:
14323 gcc_unreachable ();
14324 }
14325
14326 if (c2)
14327 {
14328 gcc_assert (c);
14329 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
14330 }
14331
14332 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
14333 {
14334 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
14335 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14336 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
14337 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
14338 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
14339 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
14340 && OMP_CLAUSE_DECL (c) == decl)
14341 {
14342 if (is_doacross && (collapse == 1 || i >= collapse))
14343 t = var;
14344 else
14345 {
14346 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14347 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14348 gcc_assert (TREE_OPERAND (t, 0) == var);
14349 t = TREE_OPERAND (t, 1);
14350 gcc_assert (TREE_CODE (t) == PLUS_EXPR
14351 || TREE_CODE (t) == MINUS_EXPR
14352 || TREE_CODE (t) == POINTER_PLUS_EXPR);
14353 gcc_assert (TREE_OPERAND (t, 0) == var);
14354 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
14355 is_doacross ? var : decl,
14356 TREE_OPERAND (t, 1));
14357 }
14358 gimple_seq *seq;
14359 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
14360 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
14361 else
14362 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
14363 push_gimplify_context ();
14364 gimplify_assign (decl, t, seq);
14365 gimple *bind = NULL;
14366 if (gimplify_ctxp->temps)
14367 {
14368 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
14369 *seq = NULL;
14370 gimplify_seq_add_stmt (seq, bind);
14371 }
14372 pop_gimplify_context (bind);
14373 }
14374 }
14375 if (OMP_FOR_NON_RECTANGULAR (for_stmt) && var != decl)
14376 for (int j = i + 1; j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
14377 {
14378 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
14379 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14380 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14381 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14382 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14383 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
14384 gcc_assert (COMPARISON_CLASS_P (t));
14385 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14386 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14387 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14388 }
14389 }
14390
14391 BITMAP_FREE (has_decl_expr);
14392 delete allocate_uids;
14393
14394 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
14395 || (loop_p && orig_for_stmt == for_stmt))
14396 {
14397 push_gimplify_context ();
14398 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
14399 {
14400 OMP_FOR_BODY (orig_for_stmt)
14401 = build3 (BIND_EXPR, void_type_node, NULL,
14402 OMP_FOR_BODY (orig_for_stmt), NULL);
14403 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
14404 }
14405 }
14406
14407 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
14408 &for_body);
14409
14410 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
14411 || (loop_p && orig_for_stmt == for_stmt))
14412 {
14413 if (gimple_code (g) == GIMPLE_BIND)
14414 pop_gimplify_context (g);
14415 else
14416 pop_gimplify_context (NULL);
14417 }
14418
14419 if (orig_for_stmt != for_stmt)
14420 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14421 {
14422 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14423 decl = TREE_OPERAND (t, 0);
14424 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14425 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
14426 gimplify_omp_ctxp = ctx->outer_context;
14427 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
14428 gimplify_omp_ctxp = ctx;
14429 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
14430 TREE_OPERAND (t, 0) = var;
14431 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14432 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14433 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
14434 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
14435 for (int j = i + 1;
14436 j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
14437 {
14438 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
14439 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14440 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14441 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14442 {
14443 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14444 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14445 }
14446 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
14447 gcc_assert (COMPARISON_CLASS_P (t));
14448 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14449 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14450 {
14451 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14452 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14453 }
14454 }
14455 }
14456
14457 gimplify_adjust_omp_clauses (pre_p, for_body,
14458 &OMP_FOR_CLAUSES (orig_for_stmt),
14459 TREE_CODE (orig_for_stmt));
14460
14461 int kind;
14462 switch (TREE_CODE (orig_for_stmt))
14463 {
14464 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
14465 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
14466 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
14467 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
14468 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
14469 default:
14470 gcc_unreachable ();
14471 }
14472 if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
14473 {
14474 gimplify_seq_add_seq (pre_p, for_pre_body);
14475 for_pre_body = NULL;
14476 }
14477 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
14478 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
14479 for_pre_body);
14480 if (orig_for_stmt != for_stmt)
14481 gimple_omp_for_set_combined_p (gfor, true);
14482 if (gimplify_omp_ctxp
14483 && (gimplify_omp_ctxp->combined_loop
14484 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
14485 && gimplify_omp_ctxp->outer_context
14486 && gimplify_omp_ctxp->outer_context->combined_loop)))
14487 {
14488 gimple_omp_for_set_combined_into_p (gfor, true);
14489 if (gimplify_omp_ctxp->combined_loop)
14490 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
14491 else
14492 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
14493 }
14494
14495 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14496 {
14497 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14498 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
14499 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
14500 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
14501 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
14502 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
14503 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14504 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
14505 }
14506
14507 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
14508 constructs with GIMPLE_OMP_TASK sandwiched in between them.
14509 The outer taskloop stands for computing the number of iterations,
14510 counts for collapsed loops and holding taskloop specific clauses.
14511 The task construct stands for the effect of data sharing on the
14512 explicit task it creates and the inner taskloop stands for expansion
14513 of the static loop inside of the explicit task construct. */
14514 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
14515 {
14516 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
14517 tree task_clauses = NULL_TREE;
14518 tree c = *gfor_clauses_ptr;
14519 tree *gtask_clauses_ptr = &task_clauses;
14520 tree outer_for_clauses = NULL_TREE;
14521 tree *gforo_clauses_ptr = &outer_for_clauses;
14522 bitmap lastprivate_uids = NULL;
14523 if (omp_find_clause (c, OMP_CLAUSE_ALLOCATE))
14524 {
14525 c = omp_find_clause (c, OMP_CLAUSE_LASTPRIVATE);
14526 if (c)
14527 {
14528 lastprivate_uids = BITMAP_ALLOC (NULL);
14529 for (; c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14530 OMP_CLAUSE_LASTPRIVATE))
14531 bitmap_set_bit (lastprivate_uids,
14532 DECL_UID (OMP_CLAUSE_DECL (c)));
14533 }
14534 c = *gfor_clauses_ptr;
14535 }
14536 for (; c; c = OMP_CLAUSE_CHAIN (c))
14537 switch (OMP_CLAUSE_CODE (c))
14538 {
14539 /* These clauses are allowed on task, move them there. */
14540 case OMP_CLAUSE_SHARED:
14541 case OMP_CLAUSE_FIRSTPRIVATE:
14542 case OMP_CLAUSE_DEFAULT:
14543 case OMP_CLAUSE_IF:
14544 case OMP_CLAUSE_UNTIED:
14545 case OMP_CLAUSE_FINAL:
14546 case OMP_CLAUSE_MERGEABLE:
14547 case OMP_CLAUSE_PRIORITY:
14548 case OMP_CLAUSE_REDUCTION:
14549 case OMP_CLAUSE_IN_REDUCTION:
14550 *gtask_clauses_ptr = c;
14551 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14552 break;
14553 case OMP_CLAUSE_PRIVATE:
14554 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
14555 {
14556 /* We want private on outer for and firstprivate
14557 on task. */
14558 *gtask_clauses_ptr
14559 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14560 OMP_CLAUSE_FIRSTPRIVATE);
14561 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14562 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
14563 openacc);
14564 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14565 *gforo_clauses_ptr = c;
14566 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14567 }
14568 else
14569 {
14570 *gtask_clauses_ptr = c;
14571 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14572 }
14573 break;
14574 /* These clauses go into outer taskloop clauses. */
14575 case OMP_CLAUSE_GRAINSIZE:
14576 case OMP_CLAUSE_NUM_TASKS:
14577 case OMP_CLAUSE_NOGROUP:
14578 *gforo_clauses_ptr = c;
14579 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14580 break;
14581 /* Collapse clause we duplicate on both taskloops. */
14582 case OMP_CLAUSE_COLLAPSE:
14583 *gfor_clauses_ptr = c;
14584 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14585 *gforo_clauses_ptr = copy_node (c);
14586 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
14587 break;
14588 /* For lastprivate, keep the clause on inner taskloop, and add
14589 a shared clause on task. If the same decl is also firstprivate,
14590 add also firstprivate clause on the inner taskloop. */
14591 case OMP_CLAUSE_LASTPRIVATE:
14592 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
14593 {
14594 /* For taskloop C++ lastprivate IVs, we want:
14595 1) private on outer taskloop
14596 2) firstprivate and shared on task
14597 3) lastprivate on inner taskloop */
14598 *gtask_clauses_ptr
14599 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14600 OMP_CLAUSE_FIRSTPRIVATE);
14601 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14602 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
14603 openacc);
14604 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14605 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
14606 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14607 OMP_CLAUSE_PRIVATE);
14608 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
14609 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
14610 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
14611 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
14612 }
14613 *gfor_clauses_ptr = c;
14614 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14615 *gtask_clauses_ptr
14616 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
14617 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14618 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
14619 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
14620 gtask_clauses_ptr
14621 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14622 break;
14623 /* Allocate clause we duplicate on task and inner taskloop
14624 if the decl is lastprivate, otherwise just put on task. */
14625 case OMP_CLAUSE_ALLOCATE:
14626 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
14627 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
14628 {
14629 /* Additionally, put firstprivate clause on task
14630 for the allocator if it is not constant. */
14631 *gtask_clauses_ptr
14632 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14633 OMP_CLAUSE_FIRSTPRIVATE);
14634 OMP_CLAUSE_DECL (*gtask_clauses_ptr)
14635 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
14636 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14637 }
14638 if (lastprivate_uids
14639 && bitmap_bit_p (lastprivate_uids,
14640 DECL_UID (OMP_CLAUSE_DECL (c))))
14641 {
14642 *gfor_clauses_ptr = c;
14643 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14644 *gtask_clauses_ptr = copy_node (c);
14645 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14646 }
14647 else
14648 {
14649 *gtask_clauses_ptr = c;
14650 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14651 }
14652 break;
14653 default:
14654 gcc_unreachable ();
14655 }
14656 *gfor_clauses_ptr = NULL_TREE;
14657 *gtask_clauses_ptr = NULL_TREE;
14658 *gforo_clauses_ptr = NULL_TREE;
14659 BITMAP_FREE (lastprivate_uids);
14660 gimple_set_location (gfor, input_location);
14661 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
14662 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
14663 NULL_TREE, NULL_TREE, NULL_TREE);
14664 gimple_set_location (g, input_location);
14665 gimple_omp_task_set_taskloop_p (g, true);
14666 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
14667 gomp_for *gforo
14668 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
14669 gimple_omp_for_collapse (gfor),
14670 gimple_omp_for_pre_body (gfor));
14671 gimple_omp_for_set_pre_body (gfor, NULL);
14672 gimple_omp_for_set_combined_p (gforo, true);
14673 gimple_omp_for_set_combined_into_p (gfor, true);
14674 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
14675 {
14676 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
14677 tree v = create_tmp_var (type);
14678 gimple_omp_for_set_index (gforo, i, v);
14679 t = unshare_expr (gimple_omp_for_initial (gfor, i));
14680 gimple_omp_for_set_initial (gforo, i, t);
14681 gimple_omp_for_set_cond (gforo, i,
14682 gimple_omp_for_cond (gfor, i));
14683 t = unshare_expr (gimple_omp_for_final (gfor, i));
14684 gimple_omp_for_set_final (gforo, i, t);
14685 t = unshare_expr (gimple_omp_for_incr (gfor, i));
14686 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
14687 TREE_OPERAND (t, 0) = v;
14688 gimple_omp_for_set_incr (gforo, i, t);
14689 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
14690 OMP_CLAUSE_DECL (t) = v;
14691 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
14692 gimple_omp_for_set_clauses (gforo, t);
14693 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
14694 {
14695 tree *p1 = NULL, *p2 = NULL;
14696 t = gimple_omp_for_initial (gforo, i);
14697 if (TREE_CODE (t) == TREE_VEC)
14698 p1 = &TREE_VEC_ELT (t, 0);
14699 t = gimple_omp_for_final (gforo, i);
14700 if (TREE_CODE (t) == TREE_VEC)
14701 {
14702 if (p1)
14703 p2 = &TREE_VEC_ELT (t, 0);
14704 else
14705 p1 = &TREE_VEC_ELT (t, 0);
14706 }
14707 if (p1)
14708 {
14709 int j;
14710 for (j = 0; j < i; j++)
14711 if (*p1 == gimple_omp_for_index (gfor, j))
14712 {
14713 *p1 = gimple_omp_for_index (gforo, j);
14714 if (p2)
14715 *p2 = *p1;
14716 break;
14717 }
14718 gcc_assert (j < i);
14719 }
14720 }
14721 }
14722 gimplify_seq_add_stmt (pre_p, gforo);
14723 }
14724 else
14725 gimplify_seq_add_stmt (pre_p, gfor);
14726
14727 if (TREE_CODE (orig_for_stmt) == OMP_FOR)
14728 {
14729 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14730 unsigned lastprivate_conditional = 0;
14731 while (ctx
14732 && (ctx->region_type == ORT_TARGET_DATA
14733 || ctx->region_type == ORT_TASKGROUP))
14734 ctx = ctx->outer_context;
14735 if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
14736 for (tree c = gimple_omp_for_clauses (gfor);
14737 c; c = OMP_CLAUSE_CHAIN (c))
14738 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14739 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
14740 ++lastprivate_conditional;
14741 if (lastprivate_conditional)
14742 {
14743 struct omp_for_data fd;
14744 omp_extract_for_data (gfor, &fd, NULL);
14745 tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
14746 lastprivate_conditional);
14747 tree var = create_tmp_var_raw (type);
14748 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
14749 OMP_CLAUSE_DECL (c) = var;
14750 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
14751 gimple_omp_for_set_clauses (gfor, c);
14752 omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
14753 }
14754 }
14755 else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
14756 {
14757 unsigned lastprivate_conditional = 0;
14758 for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
14759 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14760 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
14761 ++lastprivate_conditional;
14762 if (lastprivate_conditional)
14763 {
14764 struct omp_for_data fd;
14765 omp_extract_for_data (gfor, &fd, NULL);
14766 tree type = unsigned_type_for (fd.iter_type);
14767 while (lastprivate_conditional--)
14768 {
14769 tree c = build_omp_clause (UNKNOWN_LOCATION,
14770 OMP_CLAUSE__CONDTEMP_);
14771 OMP_CLAUSE_DECL (c) = create_tmp_var (type);
14772 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
14773 gimple_omp_for_set_clauses (gfor, c);
14774 }
14775 }
14776 }
14777
14778 if (ret != GS_ALL_DONE)
14779 return GS_ERROR;
14780 *expr_p = NULL_TREE;
14781 return GS_ALL_DONE;
14782 }
14783
14784 /* Helper for gimplify_omp_loop, called through walk_tree. */
14785
14786 static tree
14787 note_no_context_vars (tree *tp, int *, void *data)
14788 {
14789 if (VAR_P (*tp)
14790 && DECL_CONTEXT (*tp) == NULL_TREE
14791 && !is_global_var (*tp))
14792 {
14793 vec<tree> *d = (vec<tree> *) data;
14794 d->safe_push (*tp);
14795 DECL_CONTEXT (*tp) = current_function_decl;
14796 }
14797 return NULL_TREE;
14798 }
14799
14800 /* Gimplify the gross structure of an OMP_LOOP statement. */
14801
14802 static enum gimplify_status
14803 gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
14804 {
14805 tree for_stmt = *expr_p;
14806 tree clauses = OMP_FOR_CLAUSES (for_stmt);
14807 struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
14808 enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
14809 int i;
14810
14811 /* If order is not present, the behavior is as if order(concurrent)
14812 appeared. */
14813 tree order = omp_find_clause (clauses, OMP_CLAUSE_ORDER);
14814 if (order == NULL_TREE)
14815 {
14816 order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
14817 OMP_CLAUSE_CHAIN (order) = clauses;
14818 OMP_FOR_CLAUSES (for_stmt) = clauses = order;
14819 }
14820
14821 tree bind = omp_find_clause (clauses, OMP_CLAUSE_BIND);
14822 if (bind == NULL_TREE)
14823 {
14824 if (!flag_openmp) /* flag_openmp_simd */
14825 ;
14826 else if (octx && (octx->region_type & ORT_TEAMS) != 0)
14827 kind = OMP_CLAUSE_BIND_TEAMS;
14828 else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
14829 kind = OMP_CLAUSE_BIND_PARALLEL;
14830 else
14831 {
14832 for (; octx; octx = octx->outer_context)
14833 {
14834 if ((octx->region_type & ORT_ACC) != 0
14835 || octx->region_type == ORT_NONE
14836 || octx->region_type == ORT_IMPLICIT_TARGET)
14837 continue;
14838 break;
14839 }
14840 if (octx == NULL && !in_omp_construct)
14841 error_at (EXPR_LOCATION (for_stmt),
14842 "%<bind%> clause not specified on a %<loop%> "
14843 "construct not nested inside another OpenMP construct");
14844 }
14845 bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
14846 OMP_CLAUSE_CHAIN (bind) = clauses;
14847 OMP_CLAUSE_BIND_KIND (bind) = kind;
14848 OMP_FOR_CLAUSES (for_stmt) = bind;
14849 }
14850 else
14851 switch (OMP_CLAUSE_BIND_KIND (bind))
14852 {
14853 case OMP_CLAUSE_BIND_THREAD:
14854 break;
14855 case OMP_CLAUSE_BIND_PARALLEL:
14856 if (!flag_openmp) /* flag_openmp_simd */
14857 {
14858 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
14859 break;
14860 }
14861 for (; octx; octx = octx->outer_context)
14862 if (octx->region_type == ORT_SIMD
14863 && omp_find_clause (octx->clauses, OMP_CLAUSE_BIND) == NULL_TREE)
14864 {
14865 error_at (EXPR_LOCATION (for_stmt),
14866 "%<bind(parallel)%> on a %<loop%> construct nested "
14867 "inside %<simd%> construct");
14868 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
14869 break;
14870 }
14871 kind = OMP_CLAUSE_BIND_PARALLEL;
14872 break;
14873 case OMP_CLAUSE_BIND_TEAMS:
14874 if (!flag_openmp) /* flag_openmp_simd */
14875 {
14876 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
14877 break;
14878 }
14879 if ((octx
14880 && octx->region_type != ORT_IMPLICIT_TARGET
14881 && octx->region_type != ORT_NONE
14882 && (octx->region_type & ORT_TEAMS) == 0)
14883 || in_omp_construct)
14884 {
14885 error_at (EXPR_LOCATION (for_stmt),
14886 "%<bind(teams)%> on a %<loop%> region not strictly "
14887 "nested inside of a %<teams%> region");
14888 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
14889 break;
14890 }
14891 kind = OMP_CLAUSE_BIND_TEAMS;
14892 break;
14893 default:
14894 gcc_unreachable ();
14895 }
14896
14897 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
14898 switch (OMP_CLAUSE_CODE (*pc))
14899 {
14900 case OMP_CLAUSE_REDUCTION:
14901 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
14902 {
14903 error_at (OMP_CLAUSE_LOCATION (*pc),
14904 "%<inscan%> %<reduction%> clause on "
14905 "%qs construct", "loop");
14906 OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
14907 }
14908 if (OMP_CLAUSE_REDUCTION_TASK (*pc))
14909 {
14910 error_at (OMP_CLAUSE_LOCATION (*pc),
14911 "invalid %<task%> reduction modifier on construct "
14912 "other than %<parallel%>, %qs or %<sections%>",
14913 lang_GNU_Fortran () ? "do" : "for");
14914 OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
14915 }
14916 pc = &OMP_CLAUSE_CHAIN (*pc);
14917 break;
14918 case OMP_CLAUSE_LASTPRIVATE:
14919 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14920 {
14921 tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14922 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14923 if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
14924 break;
14925 if (OMP_FOR_ORIG_DECLS (for_stmt)
14926 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
14927 i)) == TREE_LIST
14928 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
14929 i)))
14930 {
14931 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14932 if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
14933 break;
14934 }
14935 }
14936 if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
14937 {
14938 error_at (OMP_CLAUSE_LOCATION (*pc),
14939 "%<lastprivate%> clause on a %<loop%> construct refers "
14940 "to a variable %qD which is not the loop iterator",
14941 OMP_CLAUSE_DECL (*pc));
14942 *pc = OMP_CLAUSE_CHAIN (*pc);
14943 break;
14944 }
14945 pc = &OMP_CLAUSE_CHAIN (*pc);
14946 break;
14947 default:
14948 pc = &OMP_CLAUSE_CHAIN (*pc);
14949 break;
14950 }
14951
14952 TREE_SET_CODE (for_stmt, OMP_SIMD);
14953
14954 int last;
14955 switch (kind)
14956 {
14957 case OMP_CLAUSE_BIND_THREAD: last = 0; break;
14958 case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
14959 case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
14960 }
14961 for (int pass = 1; pass <= last; pass++)
14962 {
14963 if (pass == 2)
14964 {
14965 tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL,
14966 make_node (BLOCK));
14967 append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
14968 *expr_p = make_node (OMP_PARALLEL);
14969 TREE_TYPE (*expr_p) = void_type_node;
14970 OMP_PARALLEL_BODY (*expr_p) = bind;
14971 OMP_PARALLEL_COMBINED (*expr_p) = 1;
14972 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
14973 tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
14974 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14975 if (OMP_FOR_ORIG_DECLS (for_stmt)
14976 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
14977 == TREE_LIST))
14978 {
14979 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14980 if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
14981 {
14982 *pc = build_omp_clause (UNKNOWN_LOCATION,
14983 OMP_CLAUSE_FIRSTPRIVATE);
14984 OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
14985 pc = &OMP_CLAUSE_CHAIN (*pc);
14986 }
14987 }
14988 }
14989 tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
14990 tree *pc = &OMP_FOR_CLAUSES (t);
14991 TREE_TYPE (t) = void_type_node;
14992 OMP_FOR_BODY (t) = *expr_p;
14993 SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
14994 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
14995 switch (OMP_CLAUSE_CODE (c))
14996 {
14997 case OMP_CLAUSE_BIND:
14998 case OMP_CLAUSE_ORDER:
14999 case OMP_CLAUSE_COLLAPSE:
15000 *pc = copy_node (c);
15001 pc = &OMP_CLAUSE_CHAIN (*pc);
15002 break;
15003 case OMP_CLAUSE_PRIVATE:
15004 case OMP_CLAUSE_FIRSTPRIVATE:
15005 /* Only needed on innermost. */
15006 break;
15007 case OMP_CLAUSE_LASTPRIVATE:
15008 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
15009 {
15010 *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
15011 OMP_CLAUSE_FIRSTPRIVATE);
15012 OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
15013 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
15014 pc = &OMP_CLAUSE_CHAIN (*pc);
15015 }
15016 *pc = copy_node (c);
15017 OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
15018 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
15019 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
15020 {
15021 if (pass != last)
15022 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
15023 else
15024 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
15025 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
15026 }
15027 pc = &OMP_CLAUSE_CHAIN (*pc);
15028 break;
15029 case OMP_CLAUSE_REDUCTION:
15030 *pc = copy_node (c);
15031 OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
15032 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
15033 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
15034 {
15035 auto_vec<tree> no_context_vars;
15036 int walk_subtrees = 0;
15037 note_no_context_vars (&OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
15038 &walk_subtrees, &no_context_vars);
15039 if (tree p = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c))
15040 note_no_context_vars (&p, &walk_subtrees, &no_context_vars);
15041 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (c),
15042 note_no_context_vars,
15043 &no_context_vars);
15044 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (c),
15045 note_no_context_vars,
15046 &no_context_vars);
15047
15048 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
15049 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
15050 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
15051 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
15052 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
15053
15054 hash_map<tree, tree> decl_map;
15055 decl_map.put (OMP_CLAUSE_DECL (c), OMP_CLAUSE_DECL (c));
15056 decl_map.put (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
15057 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc));
15058 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
15059 decl_map.put (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
15060 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc));
15061
15062 copy_body_data id;
15063 memset (&id, 0, sizeof (id));
15064 id.src_fn = current_function_decl;
15065 id.dst_fn = current_function_decl;
15066 id.src_cfun = cfun;
15067 id.decl_map = &decl_map;
15068 id.copy_decl = copy_decl_no_change;
15069 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
15070 id.transform_new_cfg = true;
15071 id.transform_return_to_modify = false;
15072 id.eh_lp_nr = 0;
15073 walk_tree (&OMP_CLAUSE_REDUCTION_INIT (*pc), copy_tree_body_r,
15074 &id, NULL);
15075 walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (*pc), copy_tree_body_r,
15076 &id, NULL);
15077
15078 for (tree d : no_context_vars)
15079 {
15080 DECL_CONTEXT (d) = NULL_TREE;
15081 DECL_CONTEXT (*decl_map.get (d)) = NULL_TREE;
15082 }
15083 }
15084 else
15085 {
15086 OMP_CLAUSE_REDUCTION_INIT (*pc)
15087 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
15088 OMP_CLAUSE_REDUCTION_MERGE (*pc)
15089 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
15090 }
15091 pc = &OMP_CLAUSE_CHAIN (*pc);
15092 break;
15093 default:
15094 gcc_unreachable ();
15095 }
15096 *pc = NULL_TREE;
15097 *expr_p = t;
15098 }
15099 return gimplify_expr (expr_p, pre_p, NULL, is_gimple_stmt, fb_none);
15100 }
15101
15102
15103 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
15104 of OMP_TARGET's body. */
15105
15106 static tree
15107 find_omp_teams (tree *tp, int *walk_subtrees, void *)
15108 {
15109 *walk_subtrees = 0;
15110 switch (TREE_CODE (*tp))
15111 {
15112 case OMP_TEAMS:
15113 return *tp;
15114 case BIND_EXPR:
15115 case STATEMENT_LIST:
15116 *walk_subtrees = 1;
15117 break;
15118 default:
15119 break;
15120 }
15121 return NULL_TREE;
15122 }
15123
15124 /* Helper function of optimize_target_teams, determine if the expression
15125 can be computed safely before the target construct on the host. */
15126
15127 static tree
15128 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
15129 {
15130 splay_tree_node n;
15131
15132 if (TYPE_P (*tp))
15133 {
15134 *walk_subtrees = 0;
15135 return NULL_TREE;
15136 }
15137 switch (TREE_CODE (*tp))
15138 {
15139 case VAR_DECL:
15140 case PARM_DECL:
15141 case RESULT_DECL:
15142 *walk_subtrees = 0;
15143 if (error_operand_p (*tp)
15144 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
15145 || DECL_HAS_VALUE_EXPR_P (*tp)
15146 || DECL_THREAD_LOCAL_P (*tp)
15147 || TREE_SIDE_EFFECTS (*tp)
15148 || TREE_THIS_VOLATILE (*tp))
15149 return *tp;
15150 if (is_global_var (*tp)
15151 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
15152 || lookup_attribute ("omp declare target link",
15153 DECL_ATTRIBUTES (*tp))))
15154 return *tp;
15155 if (VAR_P (*tp)
15156 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
15157 && !is_global_var (*tp)
15158 && decl_function_context (*tp) == current_function_decl)
15159 return *tp;
15160 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
15161 (splay_tree_key) *tp);
15162 if (n == NULL)
15163 {
15164 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
15165 return NULL_TREE;
15166 return *tp;
15167 }
15168 else if (n->value & GOVD_LOCAL)
15169 return *tp;
15170 else if (n->value & GOVD_FIRSTPRIVATE)
15171 return NULL_TREE;
15172 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
15173 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
15174 return NULL_TREE;
15175 return *tp;
15176 case INTEGER_CST:
15177 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
15178 return *tp;
15179 return NULL_TREE;
15180 case TARGET_EXPR:
15181 if (TARGET_EXPR_INITIAL (*tp)
15182 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
15183 return *tp;
15184 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
15185 walk_subtrees, NULL);
15186 /* Allow some reasonable subset of integral arithmetics. */
15187 case PLUS_EXPR:
15188 case MINUS_EXPR:
15189 case MULT_EXPR:
15190 case TRUNC_DIV_EXPR:
15191 case CEIL_DIV_EXPR:
15192 case FLOOR_DIV_EXPR:
15193 case ROUND_DIV_EXPR:
15194 case TRUNC_MOD_EXPR:
15195 case CEIL_MOD_EXPR:
15196 case FLOOR_MOD_EXPR:
15197 case ROUND_MOD_EXPR:
15198 case RDIV_EXPR:
15199 case EXACT_DIV_EXPR:
15200 case MIN_EXPR:
15201 case MAX_EXPR:
15202 case LSHIFT_EXPR:
15203 case RSHIFT_EXPR:
15204 case BIT_IOR_EXPR:
15205 case BIT_XOR_EXPR:
15206 case BIT_AND_EXPR:
15207 case NEGATE_EXPR:
15208 case ABS_EXPR:
15209 case BIT_NOT_EXPR:
15210 case NON_LVALUE_EXPR:
15211 CASE_CONVERT:
15212 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
15213 return *tp;
15214 return NULL_TREE;
15215 /* And disallow anything else, except for comparisons. */
15216 default:
15217 if (COMPARISON_CLASS_P (*tp))
15218 return NULL_TREE;
15219 return *tp;
15220 }
15221 }
15222
15223 /* Try to determine if the num_teams and/or thread_limit expressions
15224 can have their values determined already before entering the
15225 target construct.
15226 INTEGER_CSTs trivially are,
15227 integral decls that are firstprivate (explicitly or implicitly)
15228 or explicitly map(always, to:) or map(always, tofrom:) on the target
15229 region too, and expressions involving simple arithmetics on those
15230 too, function calls are not ok, dereferencing something neither etc.
15231 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
15232 EXPR based on what we find:
15233 0 stands for clause not specified at all, use implementation default
15234 -1 stands for value that can't be determined easily before entering
15235 the target construct.
15236 -2 means that no explicit teams construct was specified
15237 If teams construct is not present at all, use 1 for num_teams
15238 and 0 for thread_limit (only one team is involved, and the thread
15239 limit is implementation defined. */
15240
15241 static void
15242 optimize_target_teams (tree target, gimple_seq *pre_p)
15243 {
15244 tree body = OMP_BODY (target);
15245 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
15246 tree num_teams_lower = NULL_TREE;
15247 tree num_teams_upper = integer_zero_node;
15248 tree thread_limit = integer_zero_node;
15249 location_t num_teams_loc = EXPR_LOCATION (target);
15250 location_t thread_limit_loc = EXPR_LOCATION (target);
15251 tree c, *p, expr;
15252 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
15253
15254 if (teams == NULL_TREE)
15255 num_teams_upper = build_int_cst (integer_type_node, -2);
15256 else
15257 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
15258 {
15259 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
15260 {
15261 p = &num_teams_upper;
15262 num_teams_loc = OMP_CLAUSE_LOCATION (c);
15263 if (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c))
15264 {
15265 expr = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c);
15266 if (TREE_CODE (expr) == INTEGER_CST)
15267 num_teams_lower = expr;
15268 else if (walk_tree (&expr, computable_teams_clause,
15269 NULL, NULL))
15270 num_teams_lower = integer_minus_one_node;
15271 else
15272 {
15273 num_teams_lower = expr;
15274 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
15275 if (gimplify_expr (&num_teams_lower, pre_p, NULL,
15276 is_gimple_val, fb_rvalue, false)
15277 == GS_ERROR)
15278 {
15279 gimplify_omp_ctxp = target_ctx;
15280 num_teams_lower = integer_minus_one_node;
15281 }
15282 else
15283 {
15284 gimplify_omp_ctxp = target_ctx;
15285 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
15286 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
15287 = num_teams_lower;
15288 }
15289 }
15290 }
15291 }
15292 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
15293 {
15294 p = &thread_limit;
15295 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
15296 }
15297 else
15298 continue;
15299 expr = OMP_CLAUSE_OPERAND (c, 0);
15300 if (TREE_CODE (expr) == INTEGER_CST)
15301 {
15302 *p = expr;
15303 continue;
15304 }
15305 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
15306 {
15307 *p = integer_minus_one_node;
15308 continue;
15309 }
15310 *p = expr;
15311 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
15312 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
15313 == GS_ERROR)
15314 {
15315 gimplify_omp_ctxp = target_ctx;
15316 *p = integer_minus_one_node;
15317 continue;
15318 }
15319 gimplify_omp_ctxp = target_ctx;
15320 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
15321 OMP_CLAUSE_OPERAND (c, 0) = *p;
15322 }
15323 if (!omp_find_clause (OMP_TARGET_CLAUSES (target), OMP_CLAUSE_THREAD_LIMIT))
15324 {
15325 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
15326 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
15327 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
15328 OMP_TARGET_CLAUSES (target) = c;
15329 }
15330 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
15331 OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c) = num_teams_upper;
15332 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c) = num_teams_lower;
15333 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
15334 OMP_TARGET_CLAUSES (target) = c;
15335 }
15336
15337 /* Gimplify the gross structure of several OMP constructs. */
15338
15339 static void
15340 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
15341 {
15342 tree expr = *expr_p;
15343 gimple *stmt;
15344 gimple_seq body = NULL;
15345 enum omp_region_type ort;
15346
15347 switch (TREE_CODE (expr))
15348 {
15349 case OMP_SECTIONS:
15350 case OMP_SINGLE:
15351 ort = ORT_WORKSHARE;
15352 break;
15353 case OMP_SCOPE:
15354 ort = ORT_TASKGROUP;
15355 break;
15356 case OMP_TARGET:
15357 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
15358 break;
15359 case OACC_KERNELS:
15360 ort = ORT_ACC_KERNELS;
15361 break;
15362 case OACC_PARALLEL:
15363 ort = ORT_ACC_PARALLEL;
15364 break;
15365 case OACC_SERIAL:
15366 ort = ORT_ACC_SERIAL;
15367 break;
15368 case OACC_DATA:
15369 ort = ORT_ACC_DATA;
15370 break;
15371 case OMP_TARGET_DATA:
15372 ort = ORT_TARGET_DATA;
15373 break;
15374 case OMP_TEAMS:
15375 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
15376 if (gimplify_omp_ctxp == NULL
15377 || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
15378 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
15379 break;
15380 case OACC_HOST_DATA:
15381 ort = ORT_ACC_HOST_DATA;
15382 break;
15383 default:
15384 gcc_unreachable ();
15385 }
15386
15387 bool save_in_omp_construct = in_omp_construct;
15388 if ((ort & ORT_ACC) == 0)
15389 in_omp_construct = false;
15390 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
15391 TREE_CODE (expr));
15392 if (TREE_CODE (expr) == OMP_TARGET)
15393 optimize_target_teams (expr, pre_p);
15394 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
15395 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
15396 {
15397 push_gimplify_context ();
15398 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
15399 if (gimple_code (g) == GIMPLE_BIND)
15400 pop_gimplify_context (g);
15401 else
15402 pop_gimplify_context (NULL);
15403 if ((ort & ORT_TARGET_DATA) != 0)
15404 {
15405 enum built_in_function end_ix;
15406 switch (TREE_CODE (expr))
15407 {
15408 case OACC_DATA:
15409 case OACC_HOST_DATA:
15410 end_ix = BUILT_IN_GOACC_DATA_END;
15411 break;
15412 case OMP_TARGET_DATA:
15413 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
15414 break;
15415 default:
15416 gcc_unreachable ();
15417 }
15418 tree fn = builtin_decl_explicit (end_ix);
15419 g = gimple_build_call (fn, 0);
15420 gimple_seq cleanup = NULL;
15421 gimple_seq_add_stmt (&cleanup, g);
15422 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
15423 body = NULL;
15424 gimple_seq_add_stmt (&body, g);
15425 }
15426 }
15427 else
15428 gimplify_and_add (OMP_BODY (expr), &body);
15429 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
15430 TREE_CODE (expr));
15431 in_omp_construct = save_in_omp_construct;
15432
15433 switch (TREE_CODE (expr))
15434 {
15435 case OACC_DATA:
15436 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
15437 OMP_CLAUSES (expr));
15438 break;
15439 case OACC_HOST_DATA:
15440 if (omp_find_clause (OMP_CLAUSES (expr), OMP_CLAUSE_IF_PRESENT))
15441 {
15442 for (tree c = OMP_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15443 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
15444 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c) = 1;
15445 }
15446
15447 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
15448 OMP_CLAUSES (expr));
15449 break;
15450 case OACC_KERNELS:
15451 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
15452 OMP_CLAUSES (expr));
15453 break;
15454 case OACC_PARALLEL:
15455 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
15456 OMP_CLAUSES (expr));
15457 break;
15458 case OACC_SERIAL:
15459 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
15460 OMP_CLAUSES (expr));
15461 break;
15462 case OMP_SECTIONS:
15463 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
15464 break;
15465 case OMP_SINGLE:
15466 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
15467 break;
15468 case OMP_SCOPE:
15469 stmt = gimple_build_omp_scope (body, OMP_CLAUSES (expr));
15470 break;
15471 case OMP_TARGET:
15472 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
15473 OMP_CLAUSES (expr));
15474 break;
15475 case OMP_TARGET_DATA:
15476 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
15477 to be evaluated before the use_device_{ptr,addr} clauses if they
15478 refer to the same variables. */
15479 {
15480 tree use_device_clauses;
15481 tree *pc, *uc = &use_device_clauses;
15482 for (pc = &OMP_CLAUSES (expr); *pc; )
15483 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
15484 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
15485 {
15486 *uc = *pc;
15487 *pc = OMP_CLAUSE_CHAIN (*pc);
15488 uc = &OMP_CLAUSE_CHAIN (*uc);
15489 }
15490 else
15491 pc = &OMP_CLAUSE_CHAIN (*pc);
15492 *uc = NULL_TREE;
15493 *pc = use_device_clauses;
15494 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
15495 OMP_CLAUSES (expr));
15496 }
15497 break;
15498 case OMP_TEAMS:
15499 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
15500 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
15501 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
15502 break;
15503 default:
15504 gcc_unreachable ();
15505 }
15506
15507 gimplify_seq_add_stmt (pre_p, stmt);
15508 *expr_p = NULL_TREE;
15509 }
15510
15511 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
15512 target update constructs. */
15513
15514 static void
15515 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
15516 {
15517 tree expr = *expr_p;
15518 int kind;
15519 gomp_target *stmt;
15520 enum omp_region_type ort = ORT_WORKSHARE;
15521
15522 switch (TREE_CODE (expr))
15523 {
15524 case OACC_ENTER_DATA:
15525 kind = GF_OMP_TARGET_KIND_OACC_ENTER_DATA;
15526 ort = ORT_ACC;
15527 break;
15528 case OACC_EXIT_DATA:
15529 kind = GF_OMP_TARGET_KIND_OACC_EXIT_DATA;
15530 ort = ORT_ACC;
15531 break;
15532 case OACC_UPDATE:
15533 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
15534 ort = ORT_ACC;
15535 break;
15536 case OMP_TARGET_UPDATE:
15537 kind = GF_OMP_TARGET_KIND_UPDATE;
15538 break;
15539 case OMP_TARGET_ENTER_DATA:
15540 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
15541 break;
15542 case OMP_TARGET_EXIT_DATA:
15543 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
15544 break;
15545 default:
15546 gcc_unreachable ();
15547 }
15548 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
15549 ort, TREE_CODE (expr));
15550 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
15551 TREE_CODE (expr));
15552 if (TREE_CODE (expr) == OACC_UPDATE
15553 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
15554 OMP_CLAUSE_IF_PRESENT))
15555 {
15556 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
15557 clause. */
15558 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15559 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
15560 switch (OMP_CLAUSE_MAP_KIND (c))
15561 {
15562 case GOMP_MAP_FORCE_TO:
15563 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
15564 break;
15565 case GOMP_MAP_FORCE_FROM:
15566 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
15567 break;
15568 default:
15569 break;
15570 }
15571 }
15572 else if (TREE_CODE (expr) == OACC_EXIT_DATA
15573 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
15574 OMP_CLAUSE_FINALIZE))
15575 {
15576 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
15577 semantics. */
15578 bool have_clause = false;
15579 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15580 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
15581 switch (OMP_CLAUSE_MAP_KIND (c))
15582 {
15583 case GOMP_MAP_FROM:
15584 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
15585 have_clause = true;
15586 break;
15587 case GOMP_MAP_RELEASE:
15588 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
15589 have_clause = true;
15590 break;
15591 case GOMP_MAP_TO_PSET:
15592 /* Fortran arrays with descriptors must map that descriptor when
15593 doing standalone "attach" operations (in OpenACC). In that
15594 case GOMP_MAP_TO_PSET appears by itself with no preceding
15595 clause (see trans-openmp.cc:gfc_trans_omp_clauses). */
15596 break;
15597 case GOMP_MAP_POINTER:
15598 /* TODO PR92929: we may see these here, but they'll always follow
15599 one of the clauses above, and will be handled by libgomp as
15600 one group, so no handling required here. */
15601 gcc_assert (have_clause);
15602 break;
15603 case GOMP_MAP_DETACH:
15604 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
15605 have_clause = false;
15606 break;
15607 case GOMP_MAP_STRUCT:
15608 have_clause = false;
15609 break;
15610 default:
15611 gcc_unreachable ();
15612 }
15613 }
15614 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
15615
15616 gimplify_seq_add_stmt (pre_p, stmt);
15617 *expr_p = NULL_TREE;
15618 }
15619
15620 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
15621 stabilized the lhs of the atomic operation as *ADDR. Return true if
15622 EXPR is this stabilized form. */
15623
15624 static bool
15625 goa_lhs_expr_p (tree expr, tree addr)
15626 {
15627 /* Also include casts to other type variants. The C front end is fond
15628 of adding these for e.g. volatile variables. This is like
15629 STRIP_TYPE_NOPS but includes the main variant lookup. */
15630 STRIP_USELESS_TYPE_CONVERSION (expr);
15631
15632 if (TREE_CODE (expr) == INDIRECT_REF)
15633 {
15634 expr = TREE_OPERAND (expr, 0);
15635 while (expr != addr
15636 && (CONVERT_EXPR_P (expr)
15637 || TREE_CODE (expr) == NON_LVALUE_EXPR)
15638 && TREE_CODE (expr) == TREE_CODE (addr)
15639 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
15640 {
15641 expr = TREE_OPERAND (expr, 0);
15642 addr = TREE_OPERAND (addr, 0);
15643 }
15644 if (expr == addr)
15645 return true;
15646 return (TREE_CODE (addr) == ADDR_EXPR
15647 && TREE_CODE (expr) == ADDR_EXPR
15648 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
15649 }
15650 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
15651 return true;
15652 return false;
15653 }
15654
15655 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
15656 expression does not involve the lhs, evaluate it into a temporary.
15657 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
15658 or -1 if an error was encountered. */
15659
15660 static int
15661 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
15662 tree lhs_var, tree &target_expr, bool rhs, int depth)
15663 {
15664 tree expr = *expr_p;
15665 int saw_lhs = 0;
15666
15667 if (goa_lhs_expr_p (expr, lhs_addr))
15668 {
15669 if (pre_p)
15670 *expr_p = lhs_var;
15671 return 1;
15672 }
15673 if (is_gimple_val (expr))
15674 return 0;
15675
15676 /* Maximum depth of lhs in expression is for the
15677 __builtin_clear_padding (...), __builtin_clear_padding (...),
15678 __builtin_memcmp (&TARGET_EXPR <lhs, >, ...) == 0 ? ... : lhs; */
15679 if (++depth > 7)
15680 goto finish;
15681
15682 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
15683 {
15684 case tcc_binary:
15685 case tcc_comparison:
15686 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
15687 lhs_var, target_expr, true, depth);
15688 /* FALLTHRU */
15689 case tcc_unary:
15690 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
15691 lhs_var, target_expr, true, depth);
15692 break;
15693 case tcc_expression:
15694 switch (TREE_CODE (expr))
15695 {
15696 case TRUTH_ANDIF_EXPR:
15697 case TRUTH_ORIF_EXPR:
15698 case TRUTH_AND_EXPR:
15699 case TRUTH_OR_EXPR:
15700 case TRUTH_XOR_EXPR:
15701 case BIT_INSERT_EXPR:
15702 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
15703 lhs_addr, lhs_var, target_expr, true,
15704 depth);
15705 /* FALLTHRU */
15706 case TRUTH_NOT_EXPR:
15707 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15708 lhs_addr, lhs_var, target_expr, true,
15709 depth);
15710 break;
15711 case MODIFY_EXPR:
15712 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
15713 target_expr, true, depth))
15714 break;
15715 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
15716 lhs_addr, lhs_var, target_expr, true,
15717 depth);
15718 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15719 lhs_addr, lhs_var, target_expr, false,
15720 depth);
15721 break;
15722 /* FALLTHRU */
15723 case ADDR_EXPR:
15724 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
15725 target_expr, true, depth))
15726 break;
15727 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15728 lhs_addr, lhs_var, target_expr, false,
15729 depth);
15730 break;
15731 case COMPOUND_EXPR:
15732 /* Break out any preevaluations from cp_build_modify_expr. */
15733 for (; TREE_CODE (expr) == COMPOUND_EXPR;
15734 expr = TREE_OPERAND (expr, 1))
15735 {
15736 /* Special-case __builtin_clear_padding call before
15737 __builtin_memcmp. */
15738 if (TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR)
15739 {
15740 tree fndecl = get_callee_fndecl (TREE_OPERAND (expr, 0));
15741 if (fndecl
15742 && fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING)
15743 && VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
15744 && (!pre_p
15745 || goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL,
15746 lhs_addr, lhs_var,
15747 target_expr, true, depth)))
15748 {
15749 if (pre_p)
15750 *expr_p = expr;
15751 saw_lhs = goa_stabilize_expr (&TREE_OPERAND (expr, 0),
15752 pre_p, lhs_addr, lhs_var,
15753 target_expr, true, depth);
15754 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1),
15755 pre_p, lhs_addr, lhs_var,
15756 target_expr, rhs, depth);
15757 return saw_lhs;
15758 }
15759 }
15760
15761 if (pre_p)
15762 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
15763 }
15764 if (!pre_p)
15765 return goa_stabilize_expr (&expr, pre_p, lhs_addr, lhs_var,
15766 target_expr, rhs, depth);
15767 *expr_p = expr;
15768 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var,
15769 target_expr, rhs, depth);
15770 case COND_EXPR:
15771 if (!goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL, lhs_addr,
15772 lhs_var, target_expr, true, depth))
15773 break;
15774 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15775 lhs_addr, lhs_var, target_expr, true,
15776 depth);
15777 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
15778 lhs_addr, lhs_var, target_expr, true,
15779 depth);
15780 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 2), pre_p,
15781 lhs_addr, lhs_var, target_expr, true,
15782 depth);
15783 break;
15784 case TARGET_EXPR:
15785 if (TARGET_EXPR_INITIAL (expr))
15786 {
15787 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr,
15788 lhs_var, target_expr, true,
15789 depth))
15790 break;
15791 if (expr == target_expr)
15792 saw_lhs = 1;
15793 else
15794 {
15795 saw_lhs = goa_stabilize_expr (&TARGET_EXPR_INITIAL (expr),
15796 pre_p, lhs_addr, lhs_var,
15797 target_expr, true, depth);
15798 if (saw_lhs && target_expr == NULL_TREE && pre_p)
15799 target_expr = expr;
15800 }
15801 }
15802 break;
15803 default:
15804 break;
15805 }
15806 break;
15807 case tcc_reference:
15808 if (TREE_CODE (expr) == BIT_FIELD_REF
15809 || TREE_CODE (expr) == VIEW_CONVERT_EXPR)
15810 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15811 lhs_addr, lhs_var, target_expr, true,
15812 depth);
15813 break;
15814 case tcc_vl_exp:
15815 if (TREE_CODE (expr) == CALL_EXPR)
15816 {
15817 if (tree fndecl = get_callee_fndecl (expr))
15818 if (fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING)
15819 || fndecl_built_in_p (fndecl, BUILT_IN_MEMCMP))
15820 {
15821 int nargs = call_expr_nargs (expr);
15822 for (int i = 0; i < nargs; i++)
15823 saw_lhs |= goa_stabilize_expr (&CALL_EXPR_ARG (expr, i),
15824 pre_p, lhs_addr, lhs_var,
15825 target_expr, true, depth);
15826 }
15827 }
15828 break;
15829 default:
15830 break;
15831 }
15832
15833 finish:
15834 if (saw_lhs == 0 && pre_p)
15835 {
15836 enum gimplify_status gs;
15837 if (TREE_CODE (expr) == CALL_EXPR && VOID_TYPE_P (TREE_TYPE (expr)))
15838 {
15839 gimplify_stmt (&expr, pre_p);
15840 return saw_lhs;
15841 }
15842 else if (rhs)
15843 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
15844 else
15845 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_lvalue, fb_lvalue);
15846 if (gs != GS_ALL_DONE)
15847 saw_lhs = -1;
15848 }
15849
15850 return saw_lhs;
15851 }
15852
15853 /* Gimplify an OMP_ATOMIC statement. */
15854
15855 static enum gimplify_status
15856 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
15857 {
15858 tree addr = TREE_OPERAND (*expr_p, 0);
15859 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
15860 ? NULL : TREE_OPERAND (*expr_p, 1);
15861 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
15862 tree tmp_load;
15863 gomp_atomic_load *loadstmt;
15864 gomp_atomic_store *storestmt;
15865 tree target_expr = NULL_TREE;
15866
15867 tmp_load = create_tmp_reg (type);
15868 if (rhs
15869 && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load, target_expr,
15870 true, 0) < 0)
15871 return GS_ERROR;
15872
15873 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
15874 != GS_ALL_DONE)
15875 return GS_ERROR;
15876
15877 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
15878 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
15879 gimplify_seq_add_stmt (pre_p, loadstmt);
15880 if (rhs)
15881 {
15882 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
15883 representatives. Use BIT_FIELD_REF on the lhs instead. */
15884 tree rhsarg = rhs;
15885 if (TREE_CODE (rhs) == COND_EXPR)
15886 rhsarg = TREE_OPERAND (rhs, 1);
15887 if (TREE_CODE (rhsarg) == BIT_INSERT_EXPR
15888 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
15889 {
15890 tree bitpos = TREE_OPERAND (rhsarg, 2);
15891 tree op1 = TREE_OPERAND (rhsarg, 1);
15892 tree bitsize;
15893 tree tmp_store = tmp_load;
15894 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
15895 tmp_store = get_initialized_tmp_var (tmp_load, pre_p);
15896 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
15897 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
15898 else
15899 bitsize = TYPE_SIZE (TREE_TYPE (op1));
15900 gcc_assert (TREE_OPERAND (rhsarg, 0) == tmp_load);
15901 tree t = build2_loc (EXPR_LOCATION (rhsarg),
15902 MODIFY_EXPR, void_type_node,
15903 build3_loc (EXPR_LOCATION (rhsarg),
15904 BIT_FIELD_REF, TREE_TYPE (op1),
15905 tmp_store, bitsize, bitpos), op1);
15906 if (TREE_CODE (rhs) == COND_EXPR)
15907 t = build3_loc (EXPR_LOCATION (rhs), COND_EXPR, void_type_node,
15908 TREE_OPERAND (rhs, 0), t, void_node);
15909 gimplify_and_add (t, pre_p);
15910 rhs = tmp_store;
15911 }
15912 bool save_allow_rhs_cond_expr = gimplify_ctxp->allow_rhs_cond_expr;
15913 if (TREE_CODE (rhs) == COND_EXPR)
15914 gimplify_ctxp->allow_rhs_cond_expr = true;
15915 enum gimplify_status gs = gimplify_expr (&rhs, pre_p, NULL,
15916 is_gimple_val, fb_rvalue);
15917 gimplify_ctxp->allow_rhs_cond_expr = save_allow_rhs_cond_expr;
15918 if (gs != GS_ALL_DONE)
15919 return GS_ERROR;
15920 }
15921
15922 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
15923 rhs = tmp_load;
15924 storestmt
15925 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
15926 if (TREE_CODE (*expr_p) != OMP_ATOMIC_READ && OMP_ATOMIC_WEAK (*expr_p))
15927 {
15928 gimple_omp_atomic_set_weak (loadstmt);
15929 gimple_omp_atomic_set_weak (storestmt);
15930 }
15931 gimplify_seq_add_stmt (pre_p, storestmt);
15932 switch (TREE_CODE (*expr_p))
15933 {
15934 case OMP_ATOMIC_READ:
15935 case OMP_ATOMIC_CAPTURE_OLD:
15936 *expr_p = tmp_load;
15937 gimple_omp_atomic_set_need_value (loadstmt);
15938 break;
15939 case OMP_ATOMIC_CAPTURE_NEW:
15940 *expr_p = rhs;
15941 gimple_omp_atomic_set_need_value (storestmt);
15942 break;
15943 default:
15944 *expr_p = NULL;
15945 break;
15946 }
15947
15948 return GS_ALL_DONE;
15949 }
15950
15951 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
15952 body, and adding some EH bits. */
15953
15954 static enum gimplify_status
15955 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
15956 {
15957 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
15958 gimple *body_stmt;
15959 gtransaction *trans_stmt;
15960 gimple_seq body = NULL;
15961 int subcode = 0;
15962
15963 /* Wrap the transaction body in a BIND_EXPR so we have a context
15964 where to put decls for OMP. */
15965 if (TREE_CODE (tbody) != BIND_EXPR)
15966 {
15967 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
15968 TREE_SIDE_EFFECTS (bind) = 1;
15969 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
15970 TRANSACTION_EXPR_BODY (expr) = bind;
15971 }
15972
15973 push_gimplify_context ();
15974 temp = voidify_wrapper_expr (*expr_p, NULL);
15975
15976 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
15977 pop_gimplify_context (body_stmt);
15978
15979 trans_stmt = gimple_build_transaction (body);
15980 if (TRANSACTION_EXPR_OUTER (expr))
15981 subcode = GTMA_IS_OUTER;
15982 else if (TRANSACTION_EXPR_RELAXED (expr))
15983 subcode = GTMA_IS_RELAXED;
15984 gimple_transaction_set_subcode (trans_stmt, subcode);
15985
15986 gimplify_seq_add_stmt (pre_p, trans_stmt);
15987
15988 if (temp)
15989 {
15990 *expr_p = temp;
15991 return GS_OK;
15992 }
15993
15994 *expr_p = NULL_TREE;
15995 return GS_ALL_DONE;
15996 }
15997
15998 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
15999 is the OMP_BODY of the original EXPR (which has already been
16000 gimplified so it's not present in the EXPR).
16001
16002 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
16003
16004 static gimple *
16005 gimplify_omp_ordered (tree expr, gimple_seq body)
16006 {
16007 tree c, decls;
16008 int failures = 0;
16009 unsigned int i;
16010 tree source_c = NULL_TREE;
16011 tree sink_c = NULL_TREE;
16012
16013 if (gimplify_omp_ctxp)
16014 {
16015 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
16016 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16017 && gimplify_omp_ctxp->loop_iter_var.is_empty ())
16018 {
16019 error_at (OMP_CLAUSE_LOCATION (c),
16020 "%<ordered%> construct with %qs clause must be "
16021 "closely nested inside a loop with %<ordered%> clause",
16022 OMP_CLAUSE_DOACROSS_DEPEND (c) ? "depend" : "doacross");
16023 failures++;
16024 }
16025 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16026 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
16027 {
16028 bool fail = false;
16029 sink_c = c;
16030 if (OMP_CLAUSE_DECL (c) == NULL_TREE)
16031 continue; /* omp_cur_iteration - 1 */
16032 for (decls = OMP_CLAUSE_DECL (c), i = 0;
16033 decls && TREE_CODE (decls) == TREE_LIST;
16034 decls = TREE_CHAIN (decls), ++i)
16035 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
16036 continue;
16037 else if (TREE_VALUE (decls)
16038 != gimplify_omp_ctxp->loop_iter_var[2 * i])
16039 {
16040 error_at (OMP_CLAUSE_LOCATION (c),
16041 "variable %qE is not an iteration "
16042 "of outermost loop %d, expected %qE",
16043 TREE_VALUE (decls), i + 1,
16044 gimplify_omp_ctxp->loop_iter_var[2 * i]);
16045 fail = true;
16046 failures++;
16047 }
16048 else
16049 TREE_VALUE (decls)
16050 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
16051 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
16052 {
16053 error_at (OMP_CLAUSE_LOCATION (c),
16054 "number of variables in %qs clause with "
16055 "%<sink%> modifier does not match number of "
16056 "iteration variables",
16057 OMP_CLAUSE_DOACROSS_DEPEND (c)
16058 ? "depend" : "doacross");
16059 failures++;
16060 }
16061 }
16062 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16063 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SOURCE)
16064 {
16065 if (source_c)
16066 {
16067 error_at (OMP_CLAUSE_LOCATION (c),
16068 "more than one %qs clause with %<source%> "
16069 "modifier on an %<ordered%> construct",
16070 OMP_CLAUSE_DOACROSS_DEPEND (source_c)
16071 ? "depend" : "doacross");
16072 failures++;
16073 }
16074 else
16075 source_c = c;
16076 }
16077 }
16078 if (source_c && sink_c)
16079 {
16080 error_at (OMP_CLAUSE_LOCATION (source_c),
16081 "%qs clause with %<source%> modifier specified "
16082 "together with %qs clauses with %<sink%> modifier "
16083 "on the same construct",
16084 OMP_CLAUSE_DOACROSS_DEPEND (source_c) ? "depend" : "doacross",
16085 OMP_CLAUSE_DOACROSS_DEPEND (sink_c) ? "depend" : "doacross");
16086 failures++;
16087 }
16088
16089 if (failures)
16090 return gimple_build_nop ();
16091 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
16092 }
16093
16094 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
16095 expression produces a value to be used as an operand inside a GIMPLE
16096 statement, the value will be stored back in *EXPR_P. This value will
16097 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
16098 an SSA_NAME. The corresponding sequence of GIMPLE statements is
16099 emitted in PRE_P and POST_P.
16100
16101 Additionally, this process may overwrite parts of the input
16102 expression during gimplification. Ideally, it should be
16103 possible to do non-destructive gimplification.
16104
16105 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
16106 the expression needs to evaluate to a value to be used as
16107 an operand in a GIMPLE statement, this value will be stored in
16108 *EXPR_P on exit. This happens when the caller specifies one
16109 of fb_lvalue or fb_rvalue fallback flags.
16110
16111 PRE_P will contain the sequence of GIMPLE statements corresponding
16112 to the evaluation of EXPR and all the side-effects that must
16113 be executed before the main expression. On exit, the last
16114 statement of PRE_P is the core statement being gimplified. For
16115 instance, when gimplifying 'if (++a)' the last statement in
16116 PRE_P will be 'if (t.1)' where t.1 is the result of
16117 pre-incrementing 'a'.
16118
16119 POST_P will contain the sequence of GIMPLE statements corresponding
16120 to the evaluation of all the side-effects that must be executed
16121 after the main expression. If this is NULL, the post
16122 side-effects are stored at the end of PRE_P.
16123
16124 The reason why the output is split in two is to handle post
16125 side-effects explicitly. In some cases, an expression may have
16126 inner and outer post side-effects which need to be emitted in
16127 an order different from the one given by the recursive
16128 traversal. For instance, for the expression (*p--)++ the post
16129 side-effects of '--' must actually occur *after* the post
16130 side-effects of '++'. However, gimplification will first visit
16131 the inner expression, so if a separate POST sequence was not
16132 used, the resulting sequence would be:
16133
16134 1 t.1 = *p
16135 2 p = p - 1
16136 3 t.2 = t.1 + 1
16137 4 *p = t.2
16138
16139 However, the post-decrement operation in line #2 must not be
16140 evaluated until after the store to *p at line #4, so the
16141 correct sequence should be:
16142
16143 1 t.1 = *p
16144 2 t.2 = t.1 + 1
16145 3 *p = t.2
16146 4 p = p - 1
16147
16148 So, by specifying a separate post queue, it is possible
16149 to emit the post side-effects in the correct order.
16150 If POST_P is NULL, an internal queue will be used. Before
16151 returning to the caller, the sequence POST_P is appended to
16152 the main output sequence PRE_P.
16153
16154 GIMPLE_TEST_F points to a function that takes a tree T and
16155 returns nonzero if T is in the GIMPLE form requested by the
16156 caller. The GIMPLE predicates are in gimple.cc.
16157
16158 FALLBACK tells the function what sort of a temporary we want if
16159 gimplification cannot produce an expression that complies with
16160 GIMPLE_TEST_F.
16161
16162 fb_none means that no temporary should be generated
16163 fb_rvalue means that an rvalue is OK to generate
16164 fb_lvalue means that an lvalue is OK to generate
16165 fb_either means that either is OK, but an lvalue is preferable.
16166 fb_mayfail means that gimplification may fail (in which case
16167 GS_ERROR will be returned)
16168
16169 The return value is either GS_ERROR or GS_ALL_DONE, since this
16170 function iterates until EXPR is completely gimplified or an error
16171 occurs. */
16172
16173 enum gimplify_status
16174 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
16175 bool (*gimple_test_f) (tree), fallback_t fallback)
16176 {
16177 tree tmp;
16178 gimple_seq internal_pre = NULL;
16179 gimple_seq internal_post = NULL;
16180 tree save_expr;
16181 bool is_statement;
16182 location_t saved_location;
16183 enum gimplify_status ret;
16184 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
16185 tree label;
16186
16187 save_expr = *expr_p;
16188 if (save_expr == NULL_TREE)
16189 return GS_ALL_DONE;
16190
16191 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
16192 is_statement = gimple_test_f == is_gimple_stmt;
16193 if (is_statement)
16194 gcc_assert (pre_p);
16195
16196 /* Consistency checks. */
16197 if (gimple_test_f == is_gimple_reg)
16198 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
16199 else if (gimple_test_f == is_gimple_val
16200 || gimple_test_f == is_gimple_call_addr
16201 || gimple_test_f == is_gimple_condexpr_for_cond
16202 || gimple_test_f == is_gimple_mem_rhs
16203 || gimple_test_f == is_gimple_mem_rhs_or_call
16204 || gimple_test_f == is_gimple_reg_rhs
16205 || gimple_test_f == is_gimple_reg_rhs_or_call
16206 || gimple_test_f == is_gimple_asm_val
16207 || gimple_test_f == is_gimple_mem_ref_addr)
16208 gcc_assert (fallback & fb_rvalue);
16209 else if (gimple_test_f == is_gimple_min_lval
16210 || gimple_test_f == is_gimple_lvalue)
16211 gcc_assert (fallback & fb_lvalue);
16212 else if (gimple_test_f == is_gimple_addressable)
16213 gcc_assert (fallback & fb_either);
16214 else if (gimple_test_f == is_gimple_stmt)
16215 gcc_assert (fallback == fb_none);
16216 else
16217 {
16218 /* We should have recognized the GIMPLE_TEST_F predicate to
16219 know what kind of fallback to use in case a temporary is
16220 needed to hold the value or address of *EXPR_P. */
16221 gcc_unreachable ();
16222 }
16223
16224 /* We used to check the predicate here and return immediately if it
16225 succeeds. This is wrong; the design is for gimplification to be
16226 idempotent, and for the predicates to only test for valid forms, not
16227 whether they are fully simplified. */
16228 if (pre_p == NULL)
16229 pre_p = &internal_pre;
16230
16231 if (post_p == NULL)
16232 post_p = &internal_post;
16233
16234 /* Remember the last statements added to PRE_P and POST_P. Every
16235 new statement added by the gimplification helpers needs to be
16236 annotated with location information. To centralize the
16237 responsibility, we remember the last statement that had been
16238 added to both queues before gimplifying *EXPR_P. If
16239 gimplification produces new statements in PRE_P and POST_P, those
16240 statements will be annotated with the same location information
16241 as *EXPR_P. */
16242 pre_last_gsi = gsi_last (*pre_p);
16243 post_last_gsi = gsi_last (*post_p);
16244
16245 saved_location = input_location;
16246 if (save_expr != error_mark_node
16247 && EXPR_HAS_LOCATION (*expr_p))
16248 input_location = EXPR_LOCATION (*expr_p);
16249
16250 /* Loop over the specific gimplifiers until the toplevel node
16251 remains the same. */
16252 do
16253 {
16254 /* Strip away as many useless type conversions as possible
16255 at the toplevel. */
16256 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
16257
16258 /* Remember the expr. */
16259 save_expr = *expr_p;
16260
16261 /* Die, die, die, my darling. */
16262 if (error_operand_p (save_expr))
16263 {
16264 ret = GS_ERROR;
16265 break;
16266 }
16267
16268 /* Do any language-specific gimplification. */
16269 ret = ((enum gimplify_status)
16270 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
16271 if (ret == GS_OK)
16272 {
16273 if (*expr_p == NULL_TREE)
16274 break;
16275 if (*expr_p != save_expr)
16276 continue;
16277 }
16278 else if (ret != GS_UNHANDLED)
16279 break;
16280
16281 /* Make sure that all the cases set 'ret' appropriately. */
16282 ret = GS_UNHANDLED;
16283 switch (TREE_CODE (*expr_p))
16284 {
16285 /* First deal with the special cases. */
16286
16287 case POSTINCREMENT_EXPR:
16288 case POSTDECREMENT_EXPR:
16289 case PREINCREMENT_EXPR:
16290 case PREDECREMENT_EXPR:
16291 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
16292 fallback != fb_none,
16293 TREE_TYPE (*expr_p));
16294 break;
16295
16296 case VIEW_CONVERT_EXPR:
16297 if ((fallback & fb_rvalue)
16298 && is_gimple_reg_type (TREE_TYPE (*expr_p))
16299 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
16300 {
16301 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
16302 post_p, is_gimple_val, fb_rvalue);
16303 recalculate_side_effects (*expr_p);
16304 break;
16305 }
16306 /* Fallthru. */
16307
16308 case ARRAY_REF:
16309 case ARRAY_RANGE_REF:
16310 case REALPART_EXPR:
16311 case IMAGPART_EXPR:
16312 case COMPONENT_REF:
16313 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
16314 fallback ? fallback : fb_rvalue);
16315 break;
16316
16317 case COND_EXPR:
16318 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
16319
16320 /* C99 code may assign to an array in a structure value of a
16321 conditional expression, and this has undefined behavior
16322 only on execution, so create a temporary if an lvalue is
16323 required. */
16324 if (fallback == fb_lvalue)
16325 {
16326 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16327 mark_addressable (*expr_p);
16328 ret = GS_OK;
16329 }
16330 break;
16331
16332 case CALL_EXPR:
16333 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
16334
16335 /* C99 code may assign to an array in a structure returned
16336 from a function, and this has undefined behavior only on
16337 execution, so create a temporary if an lvalue is
16338 required. */
16339 if (fallback == fb_lvalue)
16340 {
16341 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16342 mark_addressable (*expr_p);
16343 ret = GS_OK;
16344 }
16345 break;
16346
16347 case TREE_LIST:
16348 gcc_unreachable ();
16349
16350 case COMPOUND_EXPR:
16351 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
16352 break;
16353
16354 case COMPOUND_LITERAL_EXPR:
16355 ret = gimplify_compound_literal_expr (expr_p, pre_p,
16356 gimple_test_f, fallback);
16357 break;
16358
16359 case MODIFY_EXPR:
16360 case INIT_EXPR:
16361 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
16362 fallback != fb_none);
16363 break;
16364
16365 case TRUTH_ANDIF_EXPR:
16366 case TRUTH_ORIF_EXPR:
16367 {
16368 /* Preserve the original type of the expression and the
16369 source location of the outer expression. */
16370 tree org_type = TREE_TYPE (*expr_p);
16371 *expr_p = gimple_boolify (*expr_p);
16372 *expr_p = build3_loc (input_location, COND_EXPR,
16373 org_type, *expr_p,
16374 fold_convert_loc
16375 (input_location,
16376 org_type, boolean_true_node),
16377 fold_convert_loc
16378 (input_location,
16379 org_type, boolean_false_node));
16380 ret = GS_OK;
16381 break;
16382 }
16383
16384 case TRUTH_NOT_EXPR:
16385 {
16386 tree type = TREE_TYPE (*expr_p);
16387 /* The parsers are careful to generate TRUTH_NOT_EXPR
16388 only with operands that are always zero or one.
16389 We do not fold here but handle the only interesting case
16390 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
16391 *expr_p = gimple_boolify (*expr_p);
16392 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
16393 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
16394 TREE_TYPE (*expr_p),
16395 TREE_OPERAND (*expr_p, 0));
16396 else
16397 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
16398 TREE_TYPE (*expr_p),
16399 TREE_OPERAND (*expr_p, 0),
16400 build_int_cst (TREE_TYPE (*expr_p), 1));
16401 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
16402 *expr_p = fold_convert_loc (input_location, type, *expr_p);
16403 ret = GS_OK;
16404 break;
16405 }
16406
16407 case ADDR_EXPR:
16408 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
16409 break;
16410
16411 case ANNOTATE_EXPR:
16412 {
16413 tree cond = TREE_OPERAND (*expr_p, 0);
16414 tree kind = TREE_OPERAND (*expr_p, 1);
16415 tree data = TREE_OPERAND (*expr_p, 2);
16416 tree type = TREE_TYPE (cond);
16417 if (!INTEGRAL_TYPE_P (type))
16418 {
16419 *expr_p = cond;
16420 ret = GS_OK;
16421 break;
16422 }
16423 tree tmp = create_tmp_var (type);
16424 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
16425 gcall *call
16426 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
16427 gimple_call_set_lhs (call, tmp);
16428 gimplify_seq_add_stmt (pre_p, call);
16429 *expr_p = tmp;
16430 ret = GS_ALL_DONE;
16431 break;
16432 }
16433
16434 case VA_ARG_EXPR:
16435 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
16436 break;
16437
16438 CASE_CONVERT:
16439 if (IS_EMPTY_STMT (*expr_p))
16440 {
16441 ret = GS_ALL_DONE;
16442 break;
16443 }
16444
16445 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
16446 || fallback == fb_none)
16447 {
16448 /* Just strip a conversion to void (or in void context) and
16449 try again. */
16450 *expr_p = TREE_OPERAND (*expr_p, 0);
16451 ret = GS_OK;
16452 break;
16453 }
16454
16455 ret = gimplify_conversion (expr_p);
16456 if (ret == GS_ERROR)
16457 break;
16458 if (*expr_p != save_expr)
16459 break;
16460 /* FALLTHRU */
16461
16462 case FIX_TRUNC_EXPR:
16463 /* unary_expr: ... | '(' cast ')' val | ... */
16464 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16465 is_gimple_val, fb_rvalue);
16466 recalculate_side_effects (*expr_p);
16467 break;
16468
16469 case INDIRECT_REF:
16470 {
16471 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
16472 bool notrap = TREE_THIS_NOTRAP (*expr_p);
16473 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
16474
16475 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
16476 if (*expr_p != save_expr)
16477 {
16478 ret = GS_OK;
16479 break;
16480 }
16481
16482 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16483 is_gimple_reg, fb_rvalue);
16484 if (ret == GS_ERROR)
16485 break;
16486
16487 recalculate_side_effects (*expr_p);
16488 *expr_p = fold_build2_loc (input_location, MEM_REF,
16489 TREE_TYPE (*expr_p),
16490 TREE_OPERAND (*expr_p, 0),
16491 build_int_cst (saved_ptr_type, 0));
16492 TREE_THIS_VOLATILE (*expr_p) = volatilep;
16493 TREE_THIS_NOTRAP (*expr_p) = notrap;
16494 ret = GS_OK;
16495 break;
16496 }
16497
16498 /* We arrive here through the various re-gimplifcation paths. */
16499 case MEM_REF:
16500 /* First try re-folding the whole thing. */
16501 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
16502 TREE_OPERAND (*expr_p, 0),
16503 TREE_OPERAND (*expr_p, 1));
16504 if (tmp)
16505 {
16506 REF_REVERSE_STORAGE_ORDER (tmp)
16507 = REF_REVERSE_STORAGE_ORDER (*expr_p);
16508 *expr_p = tmp;
16509 recalculate_side_effects (*expr_p);
16510 ret = GS_OK;
16511 break;
16512 }
16513 /* Avoid re-gimplifying the address operand if it is already
16514 in suitable form. Re-gimplifying would mark the address
16515 operand addressable. Always gimplify when not in SSA form
16516 as we still may have to gimplify decls with value-exprs. */
16517 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
16518 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
16519 {
16520 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16521 is_gimple_mem_ref_addr, fb_rvalue);
16522 if (ret == GS_ERROR)
16523 break;
16524 }
16525 recalculate_side_effects (*expr_p);
16526 ret = GS_ALL_DONE;
16527 break;
16528
16529 /* Constants need not be gimplified. */
16530 case INTEGER_CST:
16531 case REAL_CST:
16532 case FIXED_CST:
16533 case STRING_CST:
16534 case COMPLEX_CST:
16535 case VECTOR_CST:
16536 /* Drop the overflow flag on constants, we do not want
16537 that in the GIMPLE IL. */
16538 if (TREE_OVERFLOW_P (*expr_p))
16539 *expr_p = drop_tree_overflow (*expr_p);
16540 ret = GS_ALL_DONE;
16541 break;
16542
16543 case CONST_DECL:
16544 /* If we require an lvalue, such as for ADDR_EXPR, retain the
16545 CONST_DECL node. Otherwise the decl is replaceable by its
16546 value. */
16547 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
16548 if (fallback & fb_lvalue)
16549 ret = GS_ALL_DONE;
16550 else
16551 {
16552 *expr_p = DECL_INITIAL (*expr_p);
16553 ret = GS_OK;
16554 }
16555 break;
16556
16557 case DECL_EXPR:
16558 ret = gimplify_decl_expr (expr_p, pre_p);
16559 break;
16560
16561 case BIND_EXPR:
16562 ret = gimplify_bind_expr (expr_p, pre_p);
16563 break;
16564
16565 case LOOP_EXPR:
16566 ret = gimplify_loop_expr (expr_p, pre_p);
16567 break;
16568
16569 case SWITCH_EXPR:
16570 ret = gimplify_switch_expr (expr_p, pre_p);
16571 break;
16572
16573 case EXIT_EXPR:
16574 ret = gimplify_exit_expr (expr_p);
16575 break;
16576
16577 case GOTO_EXPR:
16578 /* If the target is not LABEL, then it is a computed jump
16579 and the target needs to be gimplified. */
16580 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
16581 {
16582 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
16583 NULL, is_gimple_val, fb_rvalue);
16584 if (ret == GS_ERROR)
16585 break;
16586 }
16587 gimplify_seq_add_stmt (pre_p,
16588 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
16589 ret = GS_ALL_DONE;
16590 break;
16591
16592 case PREDICT_EXPR:
16593 gimplify_seq_add_stmt (pre_p,
16594 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
16595 PREDICT_EXPR_OUTCOME (*expr_p)));
16596 ret = GS_ALL_DONE;
16597 break;
16598
16599 case LABEL_EXPR:
16600 ret = gimplify_label_expr (expr_p, pre_p);
16601 label = LABEL_EXPR_LABEL (*expr_p);
16602 gcc_assert (decl_function_context (label) == current_function_decl);
16603
16604 /* If the label is used in a goto statement, or address of the label
16605 is taken, we need to unpoison all variables that were seen so far.
16606 Doing so would prevent us from reporting a false positives. */
16607 if (asan_poisoned_variables
16608 && asan_used_labels != NULL
16609 && asan_used_labels->contains (label)
16610 && !gimplify_omp_ctxp)
16611 asan_poison_variables (asan_poisoned_variables, false, pre_p);
16612 break;
16613
16614 case CASE_LABEL_EXPR:
16615 ret = gimplify_case_label_expr (expr_p, pre_p);
16616
16617 if (gimplify_ctxp->live_switch_vars)
16618 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
16619 pre_p);
16620 break;
16621
16622 case RETURN_EXPR:
16623 ret = gimplify_return_expr (*expr_p, pre_p);
16624 break;
16625
16626 case CONSTRUCTOR:
16627 /* Don't reduce this in place; let gimplify_init_constructor work its
16628 magic. Buf if we're just elaborating this for side effects, just
16629 gimplify any element that has side-effects. */
16630 if (fallback == fb_none)
16631 {
16632 unsigned HOST_WIDE_INT ix;
16633 tree val;
16634 tree temp = NULL_TREE;
16635 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
16636 if (TREE_SIDE_EFFECTS (val))
16637 append_to_statement_list (val, &temp);
16638
16639 *expr_p = temp;
16640 ret = temp ? GS_OK : GS_ALL_DONE;
16641 }
16642 /* C99 code may assign to an array in a constructed
16643 structure or union, and this has undefined behavior only
16644 on execution, so create a temporary if an lvalue is
16645 required. */
16646 else if (fallback == fb_lvalue)
16647 {
16648 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16649 mark_addressable (*expr_p);
16650 ret = GS_OK;
16651 }
16652 else
16653 ret = GS_ALL_DONE;
16654 break;
16655
16656 /* The following are special cases that are not handled by the
16657 original GIMPLE grammar. */
16658
16659 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
16660 eliminated. */
16661 case SAVE_EXPR:
16662 ret = gimplify_save_expr (expr_p, pre_p, post_p);
16663 break;
16664
16665 case BIT_FIELD_REF:
16666 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
16667 post_p, is_gimple_lvalue, fb_either);
16668 recalculate_side_effects (*expr_p);
16669 break;
16670
16671 case TARGET_MEM_REF:
16672 {
16673 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
16674
16675 if (TMR_BASE (*expr_p))
16676 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
16677 post_p, is_gimple_mem_ref_addr, fb_either);
16678 if (TMR_INDEX (*expr_p))
16679 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
16680 post_p, is_gimple_val, fb_rvalue);
16681 if (TMR_INDEX2 (*expr_p))
16682 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
16683 post_p, is_gimple_val, fb_rvalue);
16684 /* TMR_STEP and TMR_OFFSET are always integer constants. */
16685 ret = MIN (r0, r1);
16686 }
16687 break;
16688
16689 case NON_LVALUE_EXPR:
16690 /* This should have been stripped above. */
16691 gcc_unreachable ();
16692
16693 case ASM_EXPR:
16694 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
16695 break;
16696
16697 case TRY_FINALLY_EXPR:
16698 case TRY_CATCH_EXPR:
16699 {
16700 gimple_seq eval, cleanup;
16701 gtry *try_;
16702
16703 /* Calls to destructors are generated automatically in FINALLY/CATCH
16704 block. They should have location as UNKNOWN_LOCATION. However,
16705 gimplify_call_expr will reset these call stmts to input_location
16706 if it finds stmt's location is unknown. To prevent resetting for
16707 destructors, we set the input_location to unknown.
16708 Note that this only affects the destructor calls in FINALLY/CATCH
16709 block, and will automatically reset to its original value by the
16710 end of gimplify_expr. */
16711 input_location = UNKNOWN_LOCATION;
16712 eval = cleanup = NULL;
16713 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
16714 if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
16715 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
16716 {
16717 gimple_seq n = NULL, e = NULL;
16718 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
16719 0), &n);
16720 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
16721 1), &e);
16722 if (!gimple_seq_empty_p (n) && !gimple_seq_empty_p (e))
16723 {
16724 geh_else *stmt = gimple_build_eh_else (n, e);
16725 gimple_seq_add_stmt (&cleanup, stmt);
16726 }
16727 }
16728 else
16729 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
16730 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
16731 if (gimple_seq_empty_p (cleanup))
16732 {
16733 gimple_seq_add_seq (pre_p, eval);
16734 ret = GS_ALL_DONE;
16735 break;
16736 }
16737 try_ = gimple_build_try (eval, cleanup,
16738 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
16739 ? GIMPLE_TRY_FINALLY
16740 : GIMPLE_TRY_CATCH);
16741 if (EXPR_HAS_LOCATION (save_expr))
16742 gimple_set_location (try_, EXPR_LOCATION (save_expr));
16743 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
16744 gimple_set_location (try_, saved_location);
16745 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
16746 gimple_try_set_catch_is_cleanup (try_,
16747 TRY_CATCH_IS_CLEANUP (*expr_p));
16748 gimplify_seq_add_stmt (pre_p, try_);
16749 ret = GS_ALL_DONE;
16750 break;
16751 }
16752
16753 case CLEANUP_POINT_EXPR:
16754 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
16755 break;
16756
16757 case TARGET_EXPR:
16758 ret = gimplify_target_expr (expr_p, pre_p, post_p);
16759 break;
16760
16761 case CATCH_EXPR:
16762 {
16763 gimple *c;
16764 gimple_seq handler = NULL;
16765 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
16766 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
16767 gimplify_seq_add_stmt (pre_p, c);
16768 ret = GS_ALL_DONE;
16769 break;
16770 }
16771
16772 case EH_FILTER_EXPR:
16773 {
16774 gimple *ehf;
16775 gimple_seq failure = NULL;
16776
16777 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
16778 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
16779 copy_warning (ehf, *expr_p);
16780 gimplify_seq_add_stmt (pre_p, ehf);
16781 ret = GS_ALL_DONE;
16782 break;
16783 }
16784
16785 case OBJ_TYPE_REF:
16786 {
16787 enum gimplify_status r0, r1;
16788 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
16789 post_p, is_gimple_val, fb_rvalue);
16790 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
16791 post_p, is_gimple_val, fb_rvalue);
16792 TREE_SIDE_EFFECTS (*expr_p) = 0;
16793 ret = MIN (r0, r1);
16794 }
16795 break;
16796
16797 case LABEL_DECL:
16798 /* We get here when taking the address of a label. We mark
16799 the label as "forced"; meaning it can never be removed and
16800 it is a potential target for any computed goto. */
16801 FORCED_LABEL (*expr_p) = 1;
16802 ret = GS_ALL_DONE;
16803 break;
16804
16805 case STATEMENT_LIST:
16806 ret = gimplify_statement_list (expr_p, pre_p);
16807 break;
16808
16809 case WITH_SIZE_EXPR:
16810 {
16811 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
16812 post_p == &internal_post ? NULL : post_p,
16813 gimple_test_f, fallback);
16814 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
16815 is_gimple_val, fb_rvalue);
16816 ret = GS_ALL_DONE;
16817 }
16818 break;
16819
16820 case VAR_DECL:
16821 case PARM_DECL:
16822 ret = gimplify_var_or_parm_decl (expr_p);
16823 break;
16824
16825 case RESULT_DECL:
16826 /* When within an OMP context, notice uses of variables. */
16827 if (gimplify_omp_ctxp)
16828 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
16829 ret = GS_ALL_DONE;
16830 break;
16831
16832 case DEBUG_EXPR_DECL:
16833 gcc_unreachable ();
16834
16835 case DEBUG_BEGIN_STMT:
16836 gimplify_seq_add_stmt (pre_p,
16837 gimple_build_debug_begin_stmt
16838 (TREE_BLOCK (*expr_p),
16839 EXPR_LOCATION (*expr_p)));
16840 ret = GS_ALL_DONE;
16841 *expr_p = NULL;
16842 break;
16843
16844 case SSA_NAME:
16845 /* Allow callbacks into the gimplifier during optimization. */
16846 ret = GS_ALL_DONE;
16847 break;
16848
16849 case OMP_PARALLEL:
16850 gimplify_omp_parallel (expr_p, pre_p);
16851 ret = GS_ALL_DONE;
16852 break;
16853
16854 case OMP_TASK:
16855 gimplify_omp_task (expr_p, pre_p);
16856 ret = GS_ALL_DONE;
16857 break;
16858
16859 case OMP_SIMD:
16860 {
16861 /* Temporarily disable into_ssa, as scan_omp_simd
16862 which calls copy_gimple_seq_and_replace_locals can't deal
16863 with SSA_NAMEs defined outside of the body properly. */
16864 bool saved_into_ssa = gimplify_ctxp->into_ssa;
16865 gimplify_ctxp->into_ssa = false;
16866 ret = gimplify_omp_for (expr_p, pre_p);
16867 gimplify_ctxp->into_ssa = saved_into_ssa;
16868 break;
16869 }
16870
16871 case OMP_FOR:
16872 case OMP_DISTRIBUTE:
16873 case OMP_TASKLOOP:
16874 case OACC_LOOP:
16875 ret = gimplify_omp_for (expr_p, pre_p);
16876 break;
16877
16878 case OMP_LOOP:
16879 ret = gimplify_omp_loop (expr_p, pre_p);
16880 break;
16881
16882 case OACC_CACHE:
16883 gimplify_oacc_cache (expr_p, pre_p);
16884 ret = GS_ALL_DONE;
16885 break;
16886
16887 case OACC_DECLARE:
16888 gimplify_oacc_declare (expr_p, pre_p);
16889 ret = GS_ALL_DONE;
16890 break;
16891
16892 case OACC_HOST_DATA:
16893 case OACC_DATA:
16894 case OACC_KERNELS:
16895 case OACC_PARALLEL:
16896 case OACC_SERIAL:
16897 case OMP_SCOPE:
16898 case OMP_SECTIONS:
16899 case OMP_SINGLE:
16900 case OMP_TARGET:
16901 case OMP_TARGET_DATA:
16902 case OMP_TEAMS:
16903 gimplify_omp_workshare (expr_p, pre_p);
16904 ret = GS_ALL_DONE;
16905 break;
16906
16907 case OACC_ENTER_DATA:
16908 case OACC_EXIT_DATA:
16909 case OACC_UPDATE:
16910 case OMP_TARGET_UPDATE:
16911 case OMP_TARGET_ENTER_DATA:
16912 case OMP_TARGET_EXIT_DATA:
16913 gimplify_omp_target_update (expr_p, pre_p);
16914 ret = GS_ALL_DONE;
16915 break;
16916
16917 case OMP_SECTION:
16918 case OMP_MASTER:
16919 case OMP_MASKED:
16920 case OMP_ORDERED:
16921 case OMP_CRITICAL:
16922 case OMP_SCAN:
16923 {
16924 gimple_seq body = NULL;
16925 gimple *g;
16926 bool saved_in_omp_construct = in_omp_construct;
16927
16928 in_omp_construct = true;
16929 gimplify_and_add (OMP_BODY (*expr_p), &body);
16930 in_omp_construct = saved_in_omp_construct;
16931 switch (TREE_CODE (*expr_p))
16932 {
16933 case OMP_SECTION:
16934 g = gimple_build_omp_section (body);
16935 break;
16936 case OMP_MASTER:
16937 g = gimple_build_omp_master (body);
16938 break;
16939 case OMP_ORDERED:
16940 g = gimplify_omp_ordered (*expr_p, body);
16941 if (OMP_BODY (*expr_p) == NULL_TREE
16942 && gimple_code (g) == GIMPLE_OMP_ORDERED)
16943 gimple_omp_ordered_standalone (g);
16944 break;
16945 case OMP_MASKED:
16946 gimplify_scan_omp_clauses (&OMP_MASKED_CLAUSES (*expr_p),
16947 pre_p, ORT_WORKSHARE, OMP_MASKED);
16948 gimplify_adjust_omp_clauses (pre_p, body,
16949 &OMP_MASKED_CLAUSES (*expr_p),
16950 OMP_MASKED);
16951 g = gimple_build_omp_masked (body,
16952 OMP_MASKED_CLAUSES (*expr_p));
16953 break;
16954 case OMP_CRITICAL:
16955 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
16956 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
16957 gimplify_adjust_omp_clauses (pre_p, body,
16958 &OMP_CRITICAL_CLAUSES (*expr_p),
16959 OMP_CRITICAL);
16960 g = gimple_build_omp_critical (body,
16961 OMP_CRITICAL_NAME (*expr_p),
16962 OMP_CRITICAL_CLAUSES (*expr_p));
16963 break;
16964 case OMP_SCAN:
16965 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
16966 pre_p, ORT_WORKSHARE, OMP_SCAN);
16967 gimplify_adjust_omp_clauses (pre_p, body,
16968 &OMP_SCAN_CLAUSES (*expr_p),
16969 OMP_SCAN);
16970 g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
16971 break;
16972 default:
16973 gcc_unreachable ();
16974 }
16975 gimplify_seq_add_stmt (pre_p, g);
16976 ret = GS_ALL_DONE;
16977 break;
16978 }
16979
16980 case OMP_TASKGROUP:
16981 {
16982 gimple_seq body = NULL;
16983
16984 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
16985 bool saved_in_omp_construct = in_omp_construct;
16986 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
16987 OMP_TASKGROUP);
16988 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
16989
16990 in_omp_construct = true;
16991 gimplify_and_add (OMP_BODY (*expr_p), &body);
16992 in_omp_construct = saved_in_omp_construct;
16993 gimple_seq cleanup = NULL;
16994 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
16995 gimple *g = gimple_build_call (fn, 0);
16996 gimple_seq_add_stmt (&cleanup, g);
16997 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
16998 body = NULL;
16999 gimple_seq_add_stmt (&body, g);
17000 g = gimple_build_omp_taskgroup (body, *pclauses);
17001 gimplify_seq_add_stmt (pre_p, g);
17002 ret = GS_ALL_DONE;
17003 break;
17004 }
17005
17006 case OMP_ATOMIC:
17007 case OMP_ATOMIC_READ:
17008 case OMP_ATOMIC_CAPTURE_OLD:
17009 case OMP_ATOMIC_CAPTURE_NEW:
17010 ret = gimplify_omp_atomic (expr_p, pre_p);
17011 break;
17012
17013 case TRANSACTION_EXPR:
17014 ret = gimplify_transaction (expr_p, pre_p);
17015 break;
17016
17017 case TRUTH_AND_EXPR:
17018 case TRUTH_OR_EXPR:
17019 case TRUTH_XOR_EXPR:
17020 {
17021 tree orig_type = TREE_TYPE (*expr_p);
17022 tree new_type, xop0, xop1;
17023 *expr_p = gimple_boolify (*expr_p);
17024 new_type = TREE_TYPE (*expr_p);
17025 if (!useless_type_conversion_p (orig_type, new_type))
17026 {
17027 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
17028 ret = GS_OK;
17029 break;
17030 }
17031
17032 /* Boolified binary truth expressions are semantically equivalent
17033 to bitwise binary expressions. Canonicalize them to the
17034 bitwise variant. */
17035 switch (TREE_CODE (*expr_p))
17036 {
17037 case TRUTH_AND_EXPR:
17038 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
17039 break;
17040 case TRUTH_OR_EXPR:
17041 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
17042 break;
17043 case TRUTH_XOR_EXPR:
17044 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
17045 break;
17046 default:
17047 break;
17048 }
17049 /* Now make sure that operands have compatible type to
17050 expression's new_type. */
17051 xop0 = TREE_OPERAND (*expr_p, 0);
17052 xop1 = TREE_OPERAND (*expr_p, 1);
17053 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
17054 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
17055 new_type,
17056 xop0);
17057 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
17058 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
17059 new_type,
17060 xop1);
17061 /* Continue classified as tcc_binary. */
17062 goto expr_2;
17063 }
17064
17065 case VEC_COND_EXPR:
17066 goto expr_3;
17067
17068 case VEC_PERM_EXPR:
17069 /* Classified as tcc_expression. */
17070 goto expr_3;
17071
17072 case BIT_INSERT_EXPR:
17073 /* Argument 3 is a constant. */
17074 goto expr_2;
17075
17076 case POINTER_PLUS_EXPR:
17077 {
17078 enum gimplify_status r0, r1;
17079 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17080 post_p, is_gimple_val, fb_rvalue);
17081 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17082 post_p, is_gimple_val, fb_rvalue);
17083 recalculate_side_effects (*expr_p);
17084 ret = MIN (r0, r1);
17085 break;
17086 }
17087
17088 default:
17089 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
17090 {
17091 case tcc_comparison:
17092 /* Handle comparison of objects of non scalar mode aggregates
17093 with a call to memcmp. It would be nice to only have to do
17094 this for variable-sized objects, but then we'd have to allow
17095 the same nest of reference nodes we allow for MODIFY_EXPR and
17096 that's too complex.
17097
17098 Compare scalar mode aggregates as scalar mode values. Using
17099 memcmp for them would be very inefficient at best, and is
17100 plain wrong if bitfields are involved. */
17101 if (error_operand_p (TREE_OPERAND (*expr_p, 1)))
17102 ret = GS_ERROR;
17103 else
17104 {
17105 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
17106
17107 /* Vector comparisons need no boolification. */
17108 if (TREE_CODE (type) == VECTOR_TYPE)
17109 goto expr_2;
17110 else if (!AGGREGATE_TYPE_P (type))
17111 {
17112 tree org_type = TREE_TYPE (*expr_p);
17113 *expr_p = gimple_boolify (*expr_p);
17114 if (!useless_type_conversion_p (org_type,
17115 TREE_TYPE (*expr_p)))
17116 {
17117 *expr_p = fold_convert_loc (input_location,
17118 org_type, *expr_p);
17119 ret = GS_OK;
17120 }
17121 else
17122 goto expr_2;
17123 }
17124 else if (TYPE_MODE (type) != BLKmode)
17125 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
17126 else
17127 ret = gimplify_variable_sized_compare (expr_p);
17128 }
17129 break;
17130
17131 /* If *EXPR_P does not need to be special-cased, handle it
17132 according to its class. */
17133 case tcc_unary:
17134 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17135 post_p, is_gimple_val, fb_rvalue);
17136 break;
17137
17138 case tcc_binary:
17139 expr_2:
17140 {
17141 enum gimplify_status r0, r1;
17142
17143 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17144 post_p, is_gimple_val, fb_rvalue);
17145 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17146 post_p, is_gimple_val, fb_rvalue);
17147
17148 ret = MIN (r0, r1);
17149 break;
17150 }
17151
17152 expr_3:
17153 {
17154 enum gimplify_status r0, r1, r2;
17155
17156 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17157 post_p, is_gimple_val, fb_rvalue);
17158 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17159 post_p, is_gimple_val, fb_rvalue);
17160 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
17161 post_p, is_gimple_val, fb_rvalue);
17162
17163 ret = MIN (MIN (r0, r1), r2);
17164 break;
17165 }
17166
17167 case tcc_declaration:
17168 case tcc_constant:
17169 ret = GS_ALL_DONE;
17170 goto dont_recalculate;
17171
17172 default:
17173 gcc_unreachable ();
17174 }
17175
17176 recalculate_side_effects (*expr_p);
17177
17178 dont_recalculate:
17179 break;
17180 }
17181
17182 gcc_assert (*expr_p || ret != GS_OK);
17183 }
17184 while (ret == GS_OK);
17185
17186 /* If we encountered an error_mark somewhere nested inside, either
17187 stub out the statement or propagate the error back out. */
17188 if (ret == GS_ERROR)
17189 {
17190 if (is_statement)
17191 *expr_p = NULL;
17192 goto out;
17193 }
17194
17195 /* This was only valid as a return value from the langhook, which
17196 we handled. Make sure it doesn't escape from any other context. */
17197 gcc_assert (ret != GS_UNHANDLED);
17198
17199 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
17200 {
17201 /* We aren't looking for a value, and we don't have a valid
17202 statement. If it doesn't have side-effects, throw it away.
17203 We can also get here with code such as "*&&L;", where L is
17204 a LABEL_DECL that is marked as FORCED_LABEL. */
17205 if (TREE_CODE (*expr_p) == LABEL_DECL
17206 || !TREE_SIDE_EFFECTS (*expr_p))
17207 *expr_p = NULL;
17208 else if (!TREE_THIS_VOLATILE (*expr_p))
17209 {
17210 /* This is probably a _REF that contains something nested that
17211 has side effects. Recurse through the operands to find it. */
17212 enum tree_code code = TREE_CODE (*expr_p);
17213
17214 switch (code)
17215 {
17216 case COMPONENT_REF:
17217 case REALPART_EXPR:
17218 case IMAGPART_EXPR:
17219 case VIEW_CONVERT_EXPR:
17220 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
17221 gimple_test_f, fallback);
17222 break;
17223
17224 case ARRAY_REF:
17225 case ARRAY_RANGE_REF:
17226 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
17227 gimple_test_f, fallback);
17228 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
17229 gimple_test_f, fallback);
17230 break;
17231
17232 default:
17233 /* Anything else with side-effects must be converted to
17234 a valid statement before we get here. */
17235 gcc_unreachable ();
17236 }
17237
17238 *expr_p = NULL;
17239 }
17240 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
17241 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode
17242 && !is_empty_type (TREE_TYPE (*expr_p)))
17243 {
17244 /* Historically, the compiler has treated a bare reference
17245 to a non-BLKmode volatile lvalue as forcing a load. */
17246 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
17247
17248 /* Normally, we do not want to create a temporary for a
17249 TREE_ADDRESSABLE type because such a type should not be
17250 copied by bitwise-assignment. However, we make an
17251 exception here, as all we are doing here is ensuring that
17252 we read the bytes that make up the type. We use
17253 create_tmp_var_raw because create_tmp_var will abort when
17254 given a TREE_ADDRESSABLE type. */
17255 tree tmp = create_tmp_var_raw (type, "vol");
17256 gimple_add_tmp_var (tmp);
17257 gimplify_assign (tmp, *expr_p, pre_p);
17258 *expr_p = NULL;
17259 }
17260 else
17261 /* We can't do anything useful with a volatile reference to
17262 an incomplete type, so just throw it away. Likewise for
17263 a BLKmode type, since any implicit inner load should
17264 already have been turned into an explicit one by the
17265 gimplification process. */
17266 *expr_p = NULL;
17267 }
17268
17269 /* If we are gimplifying at the statement level, we're done. Tack
17270 everything together and return. */
17271 if (fallback == fb_none || is_statement)
17272 {
17273 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
17274 it out for GC to reclaim it. */
17275 *expr_p = NULL_TREE;
17276
17277 if (!gimple_seq_empty_p (internal_pre)
17278 || !gimple_seq_empty_p (internal_post))
17279 {
17280 gimplify_seq_add_seq (&internal_pre, internal_post);
17281 gimplify_seq_add_seq (pre_p, internal_pre);
17282 }
17283
17284 /* The result of gimplifying *EXPR_P is going to be the last few
17285 statements in *PRE_P and *POST_P. Add location information
17286 to all the statements that were added by the gimplification
17287 helpers. */
17288 if (!gimple_seq_empty_p (*pre_p))
17289 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
17290
17291 if (!gimple_seq_empty_p (*post_p))
17292 annotate_all_with_location_after (*post_p, post_last_gsi,
17293 input_location);
17294
17295 goto out;
17296 }
17297
17298 #ifdef ENABLE_GIMPLE_CHECKING
17299 if (*expr_p)
17300 {
17301 enum tree_code code = TREE_CODE (*expr_p);
17302 /* These expressions should already be in gimple IR form. */
17303 gcc_assert (code != MODIFY_EXPR
17304 && code != ASM_EXPR
17305 && code != BIND_EXPR
17306 && code != CATCH_EXPR
17307 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
17308 && code != EH_FILTER_EXPR
17309 && code != GOTO_EXPR
17310 && code != LABEL_EXPR
17311 && code != LOOP_EXPR
17312 && code != SWITCH_EXPR
17313 && code != TRY_FINALLY_EXPR
17314 && code != EH_ELSE_EXPR
17315 && code != OACC_PARALLEL
17316 && code != OACC_KERNELS
17317 && code != OACC_SERIAL
17318 && code != OACC_DATA
17319 && code != OACC_HOST_DATA
17320 && code != OACC_DECLARE
17321 && code != OACC_UPDATE
17322 && code != OACC_ENTER_DATA
17323 && code != OACC_EXIT_DATA
17324 && code != OACC_CACHE
17325 && code != OMP_CRITICAL
17326 && code != OMP_FOR
17327 && code != OACC_LOOP
17328 && code != OMP_MASTER
17329 && code != OMP_MASKED
17330 && code != OMP_TASKGROUP
17331 && code != OMP_ORDERED
17332 && code != OMP_PARALLEL
17333 && code != OMP_SCAN
17334 && code != OMP_SECTIONS
17335 && code != OMP_SECTION
17336 && code != OMP_SINGLE
17337 && code != OMP_SCOPE);
17338 }
17339 #endif
17340
17341 /* Otherwise we're gimplifying a subexpression, so the resulting
17342 value is interesting. If it's a valid operand that matches
17343 GIMPLE_TEST_F, we're done. Unless we are handling some
17344 post-effects internally; if that's the case, we need to copy into
17345 a temporary before adding the post-effects to POST_P. */
17346 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
17347 goto out;
17348
17349 /* Otherwise, we need to create a new temporary for the gimplified
17350 expression. */
17351
17352 /* We can't return an lvalue if we have an internal postqueue. The
17353 object the lvalue refers to would (probably) be modified by the
17354 postqueue; we need to copy the value out first, which means an
17355 rvalue. */
17356 if ((fallback & fb_lvalue)
17357 && gimple_seq_empty_p (internal_post)
17358 && is_gimple_addressable (*expr_p))
17359 {
17360 /* An lvalue will do. Take the address of the expression, store it
17361 in a temporary, and replace the expression with an INDIRECT_REF of
17362 that temporary. */
17363 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
17364 unsigned int ref_align = get_object_alignment (*expr_p);
17365 tree ref_type = TREE_TYPE (*expr_p);
17366 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
17367 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
17368 if (TYPE_ALIGN (ref_type) != ref_align)
17369 ref_type = build_aligned_type (ref_type, ref_align);
17370 *expr_p = build2 (MEM_REF, ref_type,
17371 tmp, build_zero_cst (ref_alias_type));
17372 }
17373 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
17374 {
17375 /* An rvalue will do. Assign the gimplified expression into a
17376 new temporary TMP and replace the original expression with
17377 TMP. First, make sure that the expression has a type so that
17378 it can be assigned into a temporary. */
17379 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
17380 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
17381 }
17382 else
17383 {
17384 #ifdef ENABLE_GIMPLE_CHECKING
17385 if (!(fallback & fb_mayfail))
17386 {
17387 fprintf (stderr, "gimplification failed:\n");
17388 print_generic_expr (stderr, *expr_p);
17389 debug_tree (*expr_p);
17390 internal_error ("gimplification failed");
17391 }
17392 #endif
17393 gcc_assert (fallback & fb_mayfail);
17394
17395 /* If this is an asm statement, and the user asked for the
17396 impossible, don't die. Fail and let gimplify_asm_expr
17397 issue an error. */
17398 ret = GS_ERROR;
17399 goto out;
17400 }
17401
17402 /* Make sure the temporary matches our predicate. */
17403 gcc_assert ((*gimple_test_f) (*expr_p));
17404
17405 if (!gimple_seq_empty_p (internal_post))
17406 {
17407 annotate_all_with_location (internal_post, input_location);
17408 gimplify_seq_add_seq (pre_p, internal_post);
17409 }
17410
17411 out:
17412 input_location = saved_location;
17413 return ret;
17414 }
17415
17416 /* Like gimplify_expr but make sure the gimplified result is not itself
17417 a SSA name (but a decl if it were). Temporaries required by
17418 evaluating *EXPR_P may be still SSA names. */
17419
17420 static enum gimplify_status
17421 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
17422 bool (*gimple_test_f) (tree), fallback_t fallback,
17423 bool allow_ssa)
17424 {
17425 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
17426 gimple_test_f, fallback);
17427 if (! allow_ssa
17428 && TREE_CODE (*expr_p) == SSA_NAME)
17429 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
17430 return ret;
17431 }
17432
17433 /* Look through TYPE for variable-sized objects and gimplify each such
17434 size that we find. Add to LIST_P any statements generated. */
17435
17436 void
17437 gimplify_type_sizes (tree type, gimple_seq *list_p)
17438 {
17439 if (type == NULL || type == error_mark_node)
17440 return;
17441
17442 const bool ignored_p
17443 = TYPE_NAME (type)
17444 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
17445 && DECL_IGNORED_P (TYPE_NAME (type));
17446 tree t;
17447
17448 /* We first do the main variant, then copy into any other variants. */
17449 type = TYPE_MAIN_VARIANT (type);
17450
17451 /* Avoid infinite recursion. */
17452 if (TYPE_SIZES_GIMPLIFIED (type))
17453 return;
17454
17455 TYPE_SIZES_GIMPLIFIED (type) = 1;
17456
17457 switch (TREE_CODE (type))
17458 {
17459 case INTEGER_TYPE:
17460 case ENUMERAL_TYPE:
17461 case BOOLEAN_TYPE:
17462 case REAL_TYPE:
17463 case FIXED_POINT_TYPE:
17464 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
17465 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
17466
17467 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
17468 {
17469 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
17470 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
17471 }
17472 break;
17473
17474 case ARRAY_TYPE:
17475 /* These types may not have declarations, so handle them here. */
17476 gimplify_type_sizes (TREE_TYPE (type), list_p);
17477 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
17478 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
17479 with assigned stack slots, for -O1+ -g they should be tracked
17480 by VTA. */
17481 if (!ignored_p
17482 && TYPE_DOMAIN (type)
17483 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
17484 {
17485 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
17486 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
17487 DECL_IGNORED_P (t) = 0;
17488 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
17489 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
17490 DECL_IGNORED_P (t) = 0;
17491 }
17492 break;
17493
17494 case RECORD_TYPE:
17495 case UNION_TYPE:
17496 case QUAL_UNION_TYPE:
17497 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
17498 if (TREE_CODE (field) == FIELD_DECL)
17499 {
17500 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
17501 /* Likewise, ensure variable offsets aren't removed. */
17502 if (!ignored_p
17503 && (t = DECL_FIELD_OFFSET (field))
17504 && VAR_P (t)
17505 && DECL_ARTIFICIAL (t))
17506 DECL_IGNORED_P (t) = 0;
17507 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
17508 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
17509 gimplify_type_sizes (TREE_TYPE (field), list_p);
17510 }
17511 break;
17512
17513 case POINTER_TYPE:
17514 case REFERENCE_TYPE:
17515 /* We used to recurse on the pointed-to type here, which turned out to
17516 be incorrect because its definition might refer to variables not
17517 yet initialized at this point if a forward declaration is involved.
17518
17519 It was actually useful for anonymous pointed-to types to ensure
17520 that the sizes evaluation dominates every possible later use of the
17521 values. Restricting to such types here would be safe since there
17522 is no possible forward declaration around, but would introduce an
17523 undesirable middle-end semantic to anonymity. We then defer to
17524 front-ends the responsibility of ensuring that the sizes are
17525 evaluated both early and late enough, e.g. by attaching artificial
17526 type declarations to the tree. */
17527 break;
17528
17529 default:
17530 break;
17531 }
17532
17533 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
17534 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
17535
17536 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
17537 {
17538 TYPE_SIZE (t) = TYPE_SIZE (type);
17539 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
17540 TYPE_SIZES_GIMPLIFIED (t) = 1;
17541 }
17542 }
17543
17544 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
17545 a size or position, has had all of its SAVE_EXPRs evaluated.
17546 We add any required statements to *STMT_P. */
17547
17548 void
17549 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
17550 {
17551 tree expr = *expr_p;
17552
17553 /* We don't do anything if the value isn't there, is constant, or contains
17554 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
17555 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
17556 will want to replace it with a new variable, but that will cause problems
17557 if this type is from outside the function. It's OK to have that here. */
17558 if (expr == NULL_TREE
17559 || is_gimple_constant (expr)
17560 || TREE_CODE (expr) == VAR_DECL
17561 || CONTAINS_PLACEHOLDER_P (expr))
17562 return;
17563
17564 *expr_p = unshare_expr (expr);
17565
17566 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
17567 if the def vanishes. */
17568 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
17569
17570 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
17571 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
17572 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
17573 if (is_gimple_constant (*expr_p))
17574 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
17575 }
17576
17577 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
17578 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
17579 is true, also gimplify the parameters. */
17580
17581 gbind *
17582 gimplify_body (tree fndecl, bool do_parms)
17583 {
17584 location_t saved_location = input_location;
17585 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
17586 gimple *outer_stmt;
17587 gbind *outer_bind;
17588
17589 timevar_push (TV_TREE_GIMPLIFY);
17590
17591 init_tree_ssa (cfun);
17592
17593 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
17594 gimplification. */
17595 default_rtl_profile ();
17596
17597 gcc_assert (gimplify_ctxp == NULL);
17598 push_gimplify_context (true);
17599
17600 if (flag_openacc || flag_openmp)
17601 {
17602 gcc_assert (gimplify_omp_ctxp == NULL);
17603 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
17604 gimplify_omp_ctxp = new_omp_context (ORT_IMPLICIT_TARGET);
17605 }
17606
17607 /* Unshare most shared trees in the body and in that of any nested functions.
17608 It would seem we don't have to do this for nested functions because
17609 they are supposed to be output and then the outer function gimplified
17610 first, but the g++ front end doesn't always do it that way. */
17611 unshare_body (fndecl);
17612 unvisit_body (fndecl);
17613
17614 /* Make sure input_location isn't set to something weird. */
17615 input_location = DECL_SOURCE_LOCATION (fndecl);
17616
17617 /* Resolve callee-copies. This has to be done before processing
17618 the body so that DECL_VALUE_EXPR gets processed correctly. */
17619 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
17620
17621 /* Gimplify the function's body. */
17622 seq = NULL;
17623 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
17624 outer_stmt = gimple_seq_first_nondebug_stmt (seq);
17625 if (!outer_stmt)
17626 {
17627 outer_stmt = gimple_build_nop ();
17628 gimplify_seq_add_stmt (&seq, outer_stmt);
17629 }
17630
17631 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
17632 not the case, wrap everything in a GIMPLE_BIND to make it so. */
17633 if (gimple_code (outer_stmt) == GIMPLE_BIND
17634 && (gimple_seq_first_nondebug_stmt (seq)
17635 == gimple_seq_last_nondebug_stmt (seq)))
17636 {
17637 outer_bind = as_a <gbind *> (outer_stmt);
17638 if (gimple_seq_first_stmt (seq) != outer_stmt
17639 || gimple_seq_last_stmt (seq) != outer_stmt)
17640 {
17641 /* If there are debug stmts before or after outer_stmt, move them
17642 inside of outer_bind body. */
17643 gimple_stmt_iterator gsi = gsi_for_stmt (outer_stmt, &seq);
17644 gimple_seq second_seq = NULL;
17645 if (gimple_seq_first_stmt (seq) != outer_stmt
17646 && gimple_seq_last_stmt (seq) != outer_stmt)
17647 {
17648 second_seq = gsi_split_seq_after (gsi);
17649 gsi_remove (&gsi, false);
17650 }
17651 else if (gimple_seq_first_stmt (seq) != outer_stmt)
17652 gsi_remove (&gsi, false);
17653 else
17654 {
17655 gsi_remove (&gsi, false);
17656 second_seq = seq;
17657 seq = NULL;
17658 }
17659 gimple_seq_add_seq_without_update (&seq,
17660 gimple_bind_body (outer_bind));
17661 gimple_seq_add_seq_without_update (&seq, second_seq);
17662 gimple_bind_set_body (outer_bind, seq);
17663 }
17664 }
17665 else
17666 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
17667
17668 DECL_SAVED_TREE (fndecl) = NULL_TREE;
17669
17670 /* If we had callee-copies statements, insert them at the beginning
17671 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
17672 if (!gimple_seq_empty_p (parm_stmts))
17673 {
17674 tree parm;
17675
17676 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
17677 if (parm_cleanup)
17678 {
17679 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
17680 GIMPLE_TRY_FINALLY);
17681 parm_stmts = NULL;
17682 gimple_seq_add_stmt (&parm_stmts, g);
17683 }
17684 gimple_bind_set_body (outer_bind, parm_stmts);
17685
17686 for (parm = DECL_ARGUMENTS (current_function_decl);
17687 parm; parm = DECL_CHAIN (parm))
17688 if (DECL_HAS_VALUE_EXPR_P (parm))
17689 {
17690 DECL_HAS_VALUE_EXPR_P (parm) = 0;
17691 DECL_IGNORED_P (parm) = 0;
17692 }
17693 }
17694
17695 if ((flag_openacc || flag_openmp || flag_openmp_simd)
17696 && gimplify_omp_ctxp)
17697 {
17698 delete_omp_context (gimplify_omp_ctxp);
17699 gimplify_omp_ctxp = NULL;
17700 }
17701
17702 pop_gimplify_context (outer_bind);
17703 gcc_assert (gimplify_ctxp == NULL);
17704
17705 if (flag_checking && !seen_error ())
17706 verify_gimple_in_seq (gimple_bind_body (outer_bind));
17707
17708 timevar_pop (TV_TREE_GIMPLIFY);
17709 input_location = saved_location;
17710
17711 return outer_bind;
17712 }
17713
17714 typedef char *char_p; /* For DEF_VEC_P. */
17715
17716 /* Return whether we should exclude FNDECL from instrumentation. */
17717
17718 static bool
17719 flag_instrument_functions_exclude_p (tree fndecl)
17720 {
17721 vec<char_p> *v;
17722
17723 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
17724 if (v && v->length () > 0)
17725 {
17726 const char *name;
17727 int i;
17728 char *s;
17729
17730 name = lang_hooks.decl_printable_name (fndecl, 1);
17731 FOR_EACH_VEC_ELT (*v, i, s)
17732 if (strstr (name, s) != NULL)
17733 return true;
17734 }
17735
17736 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
17737 if (v && v->length () > 0)
17738 {
17739 const char *name;
17740 int i;
17741 char *s;
17742
17743 name = DECL_SOURCE_FILE (fndecl);
17744 FOR_EACH_VEC_ELT (*v, i, s)
17745 if (strstr (name, s) != NULL)
17746 return true;
17747 }
17748
17749 return false;
17750 }
17751
17752 /* Build a call to the instrumentation function FNCODE and add it to SEQ.
17753 If COND_VAR is not NULL, it is a boolean variable guarding the call to
17754 the instrumentation function. IF STMT is not NULL, it is a statement
17755 to be executed just before the call to the instrumentation function. */
17756
17757 static void
17758 build_instrumentation_call (gimple_seq *seq, enum built_in_function fncode,
17759 tree cond_var, gimple *stmt)
17760 {
17761 /* The instrumentation hooks aren't going to call the instrumented
17762 function and the address they receive is expected to be matchable
17763 against symbol addresses. Make sure we don't create a trampoline,
17764 in case the current function is nested. */
17765 tree this_fn_addr = build_fold_addr_expr (current_function_decl);
17766 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
17767
17768 tree label_true, label_false;
17769 if (cond_var)
17770 {
17771 label_true = create_artificial_label (UNKNOWN_LOCATION);
17772 label_false = create_artificial_label (UNKNOWN_LOCATION);
17773 gcond *cond = gimple_build_cond (EQ_EXPR, cond_var, boolean_false_node,
17774 label_true, label_false);
17775 gimplify_seq_add_stmt (seq, cond);
17776 gimplify_seq_add_stmt (seq, gimple_build_label (label_true));
17777 gimplify_seq_add_stmt (seq, gimple_build_predict (PRED_COLD_LABEL,
17778 NOT_TAKEN));
17779 }
17780
17781 if (stmt)
17782 gimplify_seq_add_stmt (seq, stmt);
17783
17784 tree x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
17785 gcall *call = gimple_build_call (x, 1, integer_zero_node);
17786 tree tmp_var = create_tmp_var (ptr_type_node, "return_addr");
17787 gimple_call_set_lhs (call, tmp_var);
17788 gimplify_seq_add_stmt (seq, call);
17789 x = builtin_decl_implicit (fncode);
17790 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
17791 gimplify_seq_add_stmt (seq, call);
17792
17793 if (cond_var)
17794 gimplify_seq_add_stmt (seq, gimple_build_label (label_false));
17795 }
17796
17797 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
17798 node for the function we want to gimplify.
17799
17800 Return the sequence of GIMPLE statements corresponding to the body
17801 of FNDECL. */
17802
17803 void
17804 gimplify_function_tree (tree fndecl)
17805 {
17806 gimple_seq seq;
17807 gbind *bind;
17808
17809 gcc_assert (!gimple_body (fndecl));
17810
17811 if (DECL_STRUCT_FUNCTION (fndecl))
17812 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
17813 else
17814 push_struct_function (fndecl);
17815
17816 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
17817 if necessary. */
17818 cfun->curr_properties |= PROP_gimple_lva;
17819
17820 if (asan_sanitize_use_after_scope ())
17821 asan_poisoned_variables = new hash_set<tree> ();
17822 bind = gimplify_body (fndecl, true);
17823 if (asan_poisoned_variables)
17824 {
17825 delete asan_poisoned_variables;
17826 asan_poisoned_variables = NULL;
17827 }
17828
17829 /* The tree body of the function is no longer needed, replace it
17830 with the new GIMPLE body. */
17831 seq = NULL;
17832 gimple_seq_add_stmt (&seq, bind);
17833 gimple_set_body (fndecl, seq);
17834
17835 /* If we're instrumenting function entry/exit, then prepend the call to
17836 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
17837 catch the exit hook. */
17838 /* ??? Add some way to ignore exceptions for this TFE. */
17839 if (flag_instrument_function_entry_exit
17840 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
17841 /* Do not instrument extern inline functions. */
17842 && !(DECL_DECLARED_INLINE_P (fndecl)
17843 && DECL_EXTERNAL (fndecl)
17844 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
17845 && !flag_instrument_functions_exclude_p (fndecl))
17846 {
17847 gimple_seq body = NULL, cleanup = NULL;
17848 gassign *assign;
17849 tree cond_var;
17850
17851 /* If -finstrument-functions-once is specified, generate:
17852
17853 static volatile bool C.0 = false;
17854 bool tmp_called;
17855
17856 tmp_called = C.0;
17857 if (!tmp_called)
17858 {
17859 C.0 = true;
17860 [call profiling enter function]
17861 }
17862
17863 without specific protection for data races. */
17864 if (flag_instrument_function_entry_exit > 1)
17865 {
17866 tree first_var
17867 = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
17868 VAR_DECL,
17869 create_tmp_var_name ("C"),
17870 boolean_type_node);
17871 DECL_ARTIFICIAL (first_var) = 1;
17872 DECL_IGNORED_P (first_var) = 1;
17873 TREE_STATIC (first_var) = 1;
17874 TREE_THIS_VOLATILE (first_var) = 1;
17875 TREE_USED (first_var) = 1;
17876 DECL_INITIAL (first_var) = boolean_false_node;
17877 varpool_node::add (first_var);
17878
17879 cond_var = create_tmp_var (boolean_type_node, "tmp_called");
17880 assign = gimple_build_assign (cond_var, first_var);
17881 gimplify_seq_add_stmt (&body, assign);
17882
17883 assign = gimple_build_assign (first_var, boolean_true_node);
17884 }
17885
17886 else
17887 {
17888 cond_var = NULL_TREE;
17889 assign = NULL;
17890 }
17891
17892 build_instrumentation_call (&body, BUILT_IN_PROFILE_FUNC_ENTER,
17893 cond_var, assign);
17894
17895 /* If -finstrument-functions-once is specified, generate:
17896
17897 if (!tmp_called)
17898 [call profiling exit function]
17899
17900 without specific protection for data races. */
17901 build_instrumentation_call (&cleanup, BUILT_IN_PROFILE_FUNC_EXIT,
17902 cond_var, NULL);
17903
17904 gimple *tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
17905 gimplify_seq_add_stmt (&body, tf);
17906 gbind *new_bind = gimple_build_bind (NULL, body, NULL);
17907
17908 /* Replace the current function body with the body
17909 wrapped in the try/finally TF. */
17910 seq = NULL;
17911 gimple_seq_add_stmt (&seq, new_bind);
17912 gimple_set_body (fndecl, seq);
17913 bind = new_bind;
17914 }
17915
17916 if (sanitize_flags_p (SANITIZE_THREAD)
17917 && param_tsan_instrument_func_entry_exit)
17918 {
17919 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
17920 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
17921 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
17922 /* Replace the current function body with the body
17923 wrapped in the try/finally TF. */
17924 seq = NULL;
17925 gimple_seq_add_stmt (&seq, new_bind);
17926 gimple_set_body (fndecl, seq);
17927 }
17928
17929 DECL_SAVED_TREE (fndecl) = NULL_TREE;
17930 cfun->curr_properties |= PROP_gimple_any;
17931
17932 pop_cfun ();
17933
17934 dump_function (TDI_gimple, fndecl);
17935 }
17936
17937 /* Return a dummy expression of type TYPE in order to keep going after an
17938 error. */
17939
17940 static tree
17941 dummy_object (tree type)
17942 {
17943 tree t = build_int_cst (build_pointer_type (type), 0);
17944 return build2 (MEM_REF, type, t, t);
17945 }
17946
17947 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
17948 builtin function, but a very special sort of operator. */
17949
17950 enum gimplify_status
17951 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
17952 gimple_seq *post_p ATTRIBUTE_UNUSED)
17953 {
17954 tree promoted_type, have_va_type;
17955 tree valist = TREE_OPERAND (*expr_p, 0);
17956 tree type = TREE_TYPE (*expr_p);
17957 tree t, tag, aptag;
17958 location_t loc = EXPR_LOCATION (*expr_p);
17959
17960 /* Verify that valist is of the proper type. */
17961 have_va_type = TREE_TYPE (valist);
17962 if (have_va_type == error_mark_node)
17963 return GS_ERROR;
17964 have_va_type = targetm.canonical_va_list_type (have_va_type);
17965 if (have_va_type == NULL_TREE
17966 && POINTER_TYPE_P (TREE_TYPE (valist)))
17967 /* Handle 'Case 1: Not an array type' from c-common.cc/build_va_arg. */
17968 have_va_type
17969 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
17970 gcc_assert (have_va_type != NULL_TREE);
17971
17972 /* Generate a diagnostic for requesting data of a type that cannot
17973 be passed through `...' due to type promotion at the call site. */
17974 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
17975 != type)
17976 {
17977 static bool gave_help;
17978 bool warned;
17979 /* Use the expansion point to handle cases such as passing bool (defined
17980 in a system header) through `...'. */
17981 location_t xloc
17982 = expansion_point_location_if_in_system_header (loc);
17983
17984 /* Unfortunately, this is merely undefined, rather than a constraint
17985 violation, so we cannot make this an error. If this call is never
17986 executed, the program is still strictly conforming. */
17987 auto_diagnostic_group d;
17988 warned = warning_at (xloc, 0,
17989 "%qT is promoted to %qT when passed through %<...%>",
17990 type, promoted_type);
17991 if (!gave_help && warned)
17992 {
17993 gave_help = true;
17994 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
17995 promoted_type, type);
17996 }
17997
17998 /* We can, however, treat "undefined" any way we please.
17999 Call abort to encourage the user to fix the program. */
18000 if (warned)
18001 inform (xloc, "if this code is reached, the program will abort");
18002 /* Before the abort, allow the evaluation of the va_list
18003 expression to exit or longjmp. */
18004 gimplify_and_add (valist, pre_p);
18005 t = build_call_expr_loc (loc,
18006 builtin_decl_implicit (BUILT_IN_TRAP), 0);
18007 gimplify_and_add (t, pre_p);
18008
18009 /* This is dead code, but go ahead and finish so that the
18010 mode of the result comes out right. */
18011 *expr_p = dummy_object (type);
18012 return GS_ALL_DONE;
18013 }
18014
18015 tag = build_int_cst (build_pointer_type (type), 0);
18016 aptag = build_int_cst (TREE_TYPE (valist), 0);
18017
18018 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
18019 valist, tag, aptag);
18020
18021 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
18022 needs to be expanded. */
18023 cfun->curr_properties &= ~PROP_gimple_lva;
18024
18025 return GS_OK;
18026 }
18027
18028 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
18029
18030 DST/SRC are the destination and source respectively. You can pass
18031 ungimplified trees in DST or SRC, in which case they will be
18032 converted to a gimple operand if necessary.
18033
18034 This function returns the newly created GIMPLE_ASSIGN tuple. */
18035
18036 gimple *
18037 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
18038 {
18039 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
18040 gimplify_and_add (t, seq_p);
18041 ggc_free (t);
18042 return gimple_seq_last_stmt (*seq_p);
18043 }
18044
18045 inline hashval_t
18046 gimplify_hasher::hash (const elt_t *p)
18047 {
18048 tree t = p->val;
18049 return iterative_hash_expr (t, 0);
18050 }
18051
18052 inline bool
18053 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
18054 {
18055 tree t1 = p1->val;
18056 tree t2 = p2->val;
18057 enum tree_code code = TREE_CODE (t1);
18058
18059 if (TREE_CODE (t2) != code
18060 || TREE_TYPE (t1) != TREE_TYPE (t2))
18061 return false;
18062
18063 if (!operand_equal_p (t1, t2, 0))
18064 return false;
18065
18066 /* Only allow them to compare equal if they also hash equal; otherwise
18067 results are nondeterminate, and we fail bootstrap comparison. */
18068 gcc_checking_assert (hash (p1) == hash (p2));
18069
18070 return true;
18071 }
This page took 0.838159 seconds and 5 git commands to generate.