]> gcc.gnu.org Git - gcc.git/blob - gcc/omp-low.cc
openmp: Fix ICE with taskgroup at -O0 -fexceptions [PR107001]
[gcc.git] / gcc / omp-low.cc
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
4
5 Contributed by Diego Novillo <dnovillo@redhat.com>
6
7 Copyright (C) 2005-2022 Free Software Foundation, Inc.
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
42 #include "gimplify.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "omp-offload.h"
63
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
70
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
74
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
77
78 struct omp_context
79 {
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.cc (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
85
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
89
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
96
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
104
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
108
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
112
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
116
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
121
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
125
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
129
130 /* And a hash map from the allocate variables to their corresponding
131 allocators. */
132 hash_map<tree, tree> *allocate_map;
133
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses;
139
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses;
145
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
149 int depth;
150
151 /* True if this parallel directive is nested within another. */
152 bool is_nested;
153
154 /* True if this construct can be cancelled. */
155 bool cancellable;
156
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
158 context. */
159 bool combined_into_simd_safelen1;
160
161 /* True if there is nested scan context with inclusive clause. */
162 bool scan_inclusive;
163
164 /* True if there is nested scan context with exclusive clause. */
165 bool scan_exclusive;
166
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase;
169
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent;
172
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
174 bool loop_p;
175
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
178 bool teams_nested_p;
179
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p;
183
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec<tree> oacc_privatization_candidates;
186 };
187
188 static splay_tree all_contexts;
189 static int taskreg_nesting_level;
190 static int target_nesting_level;
191 static bitmap make_addressable_vars;
192 static bitmap global_nonaddressable_vars;
193 static vec<omp_context *> taskreg_contexts;
194 static vec<gomp_task *> task_cpyfns;
195
196 static void scan_omp (gimple_seq *, omp_context *);
197 static tree scan_omp_1_op (tree *, int *, void *);
198 static bool omp_maybe_offloaded_ctx (omp_context *ctx);
199
200 #define WALK_SUBSTMTS \
201 case GIMPLE_BIND: \
202 case GIMPLE_TRY: \
203 case GIMPLE_CATCH: \
204 case GIMPLE_EH_FILTER: \
205 case GIMPLE_TRANSACTION: \
206 /* The sub-statements for these should be walked. */ \
207 *handled_ops_p = false; \
208 break;
209
210 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
211 (This doesn't include OpenACC 'kernels' decomposed parts.) */
212
213 static bool
214 is_oacc_parallel_or_serial (omp_context *ctx)
215 {
216 enum gimple_code outer_type = gimple_code (ctx->stmt);
217 return ((outer_type == GIMPLE_OMP_TARGET)
218 && ((gimple_omp_target_kind (ctx->stmt)
219 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
220 || (gimple_omp_target_kind (ctx->stmt)
221 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
222 }
223
224 /* Return whether CTX represents an OpenACC 'kernels' construct.
225 (This doesn't include OpenACC 'kernels' decomposed parts.) */
226
227 static bool
228 is_oacc_kernels (omp_context *ctx)
229 {
230 enum gimple_code outer_type = gimple_code (ctx->stmt);
231 return ((outer_type == GIMPLE_OMP_TARGET)
232 && (gimple_omp_target_kind (ctx->stmt)
233 == GF_OMP_TARGET_KIND_OACC_KERNELS));
234 }
235
236 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
237
238 static bool
239 is_oacc_kernels_decomposed_part (omp_context *ctx)
240 {
241 enum gimple_code outer_type = gimple_code (ctx->stmt);
242 return ((outer_type == GIMPLE_OMP_TARGET)
243 && ((gimple_omp_target_kind (ctx->stmt)
244 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED)
245 || (gimple_omp_target_kind (ctx->stmt)
246 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE)
247 || (gimple_omp_target_kind (ctx->stmt)
248 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS)));
249 }
250
251 /* Return true if STMT corresponds to an OpenMP target region. */
252 static bool
253 is_omp_target (gimple *stmt)
254 {
255 if (gimple_code (stmt) == GIMPLE_OMP_TARGET)
256 {
257 int kind = gimple_omp_target_kind (stmt);
258 return (kind == GF_OMP_TARGET_KIND_REGION
259 || kind == GF_OMP_TARGET_KIND_DATA
260 || kind == GF_OMP_TARGET_KIND_ENTER_DATA
261 || kind == GF_OMP_TARGET_KIND_EXIT_DATA);
262 }
263 return false;
264 }
265
266 /* If DECL is the artificial dummy VAR_DECL created for non-static
267 data member privatization, return the underlying "this" parameter,
268 otherwise return NULL. */
269
270 tree
271 omp_member_access_dummy_var (tree decl)
272 {
273 if (!VAR_P (decl)
274 || !DECL_ARTIFICIAL (decl)
275 || !DECL_IGNORED_P (decl)
276 || !DECL_HAS_VALUE_EXPR_P (decl)
277 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
278 return NULL_TREE;
279
280 tree v = DECL_VALUE_EXPR (decl);
281 if (TREE_CODE (v) != COMPONENT_REF)
282 return NULL_TREE;
283
284 while (1)
285 switch (TREE_CODE (v))
286 {
287 case COMPONENT_REF:
288 case MEM_REF:
289 case INDIRECT_REF:
290 CASE_CONVERT:
291 case POINTER_PLUS_EXPR:
292 v = TREE_OPERAND (v, 0);
293 continue;
294 case PARM_DECL:
295 if (DECL_CONTEXT (v) == current_function_decl
296 && DECL_ARTIFICIAL (v)
297 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
298 return v;
299 return NULL_TREE;
300 default:
301 return NULL_TREE;
302 }
303 }
304
305 /* Helper for unshare_and_remap, called through walk_tree. */
306
307 static tree
308 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
309 {
310 tree *pair = (tree *) data;
311 if (*tp == pair[0])
312 {
313 *tp = unshare_expr (pair[1]);
314 *walk_subtrees = 0;
315 }
316 else if (IS_TYPE_OR_DECL_P (*tp))
317 *walk_subtrees = 0;
318 return NULL_TREE;
319 }
320
321 /* Return unshare_expr (X) with all occurrences of FROM
322 replaced with TO. */
323
324 static tree
325 unshare_and_remap (tree x, tree from, tree to)
326 {
327 tree pair[2] = { from, to };
328 x = unshare_expr (x);
329 walk_tree (&x, unshare_and_remap_1, pair, NULL);
330 return x;
331 }
332
333 /* Convenience function for calling scan_omp_1_op on tree operands. */
334
335 static inline tree
336 scan_omp_op (tree *tp, omp_context *ctx)
337 {
338 struct walk_stmt_info wi;
339
340 memset (&wi, 0, sizeof (wi));
341 wi.info = ctx;
342 wi.want_locations = true;
343
344 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
345 }
346
347 static void lower_omp (gimple_seq *, omp_context *);
348 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
349 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
350
351 /* Return true if CTX is for an omp parallel. */
352
353 static inline bool
354 is_parallel_ctx (omp_context *ctx)
355 {
356 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
357 }
358
359
360 /* Return true if CTX is for an omp task. */
361
362 static inline bool
363 is_task_ctx (omp_context *ctx)
364 {
365 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
366 }
367
368
369 /* Return true if CTX is for an omp taskloop. */
370
371 static inline bool
372 is_taskloop_ctx (omp_context *ctx)
373 {
374 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
375 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
376 }
377
378
379 /* Return true if CTX is for a host omp teams. */
380
381 static inline bool
382 is_host_teams_ctx (omp_context *ctx)
383 {
384 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
385 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
386 }
387
388 /* Return true if CTX is for an omp parallel or omp task or host omp teams
389 (the last one is strictly not a task region in OpenMP speak, but we
390 need to treat it similarly). */
391
392 static inline bool
393 is_taskreg_ctx (omp_context *ctx)
394 {
395 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
396 }
397
398 /* Return true if EXPR is variable sized. */
399
400 static inline bool
401 is_variable_sized (const_tree expr)
402 {
403 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
404 }
405
406 /* Lookup variables. The "maybe" form
407 allows for the variable form to not have been entered, otherwise we
408 assert that the variable must have been entered. */
409
410 static inline tree
411 lookup_decl (tree var, omp_context *ctx)
412 {
413 tree *n = ctx->cb.decl_map->get (var);
414 return *n;
415 }
416
417 static inline tree
418 maybe_lookup_decl (const_tree var, omp_context *ctx)
419 {
420 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
421 return n ? *n : NULL_TREE;
422 }
423
424 static inline tree
425 lookup_field (tree var, omp_context *ctx)
426 {
427 splay_tree_node n;
428 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
429 return (tree) n->value;
430 }
431
432 static inline tree
433 lookup_sfield (splay_tree_key key, omp_context *ctx)
434 {
435 splay_tree_node n;
436 n = splay_tree_lookup (ctx->sfield_map
437 ? ctx->sfield_map : ctx->field_map, key);
438 return (tree) n->value;
439 }
440
441 static inline tree
442 lookup_sfield (tree var, omp_context *ctx)
443 {
444 return lookup_sfield ((splay_tree_key) var, ctx);
445 }
446
447 static inline tree
448 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
449 {
450 splay_tree_node n;
451 n = splay_tree_lookup (ctx->field_map, key);
452 return n ? (tree) n->value : NULL_TREE;
453 }
454
455 static inline tree
456 maybe_lookup_field (tree var, omp_context *ctx)
457 {
458 return maybe_lookup_field ((splay_tree_key) var, ctx);
459 }
460
461 /* Return true if DECL should be copied by pointer. SHARED_CTX is
462 the parallel context if DECL is to be shared. */
463
464 static bool
465 use_pointer_for_field (tree decl, omp_context *shared_ctx)
466 {
467 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
468 || TYPE_ATOMIC (TREE_TYPE (decl)))
469 return true;
470
471 /* We can only use copy-in/copy-out semantics for shared variables
472 when we know the value is not accessible from an outer scope. */
473 if (shared_ctx)
474 {
475 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
476
477 /* ??? Trivially accessible from anywhere. But why would we even
478 be passing an address in this case? Should we simply assert
479 this to be false, or should we have a cleanup pass that removes
480 these from the list of mappings? */
481 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
482 return true;
483
484 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
485 without analyzing the expression whether or not its location
486 is accessible to anyone else. In the case of nested parallel
487 regions it certainly may be. */
488 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
489 return true;
490
491 /* Do not use copy-in/copy-out for variables that have their
492 address taken. */
493 if (is_global_var (decl))
494 {
495 /* For file scope vars, track whether we've seen them as
496 non-addressable initially and in that case, keep the same
497 answer for the duration of the pass, even when they are made
498 addressable later on e.g. through reduction expansion. Global
499 variables which weren't addressable before the pass will not
500 have their privatized copies address taken. See PR91216. */
501 if (!TREE_ADDRESSABLE (decl))
502 {
503 if (!global_nonaddressable_vars)
504 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
505 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
506 }
507 else if (!global_nonaddressable_vars
508 || !bitmap_bit_p (global_nonaddressable_vars,
509 DECL_UID (decl)))
510 return true;
511 }
512 else if (TREE_ADDRESSABLE (decl))
513 return true;
514
515 /* lower_send_shared_vars only uses copy-in, but not copy-out
516 for these. */
517 if (TREE_READONLY (decl)
518 || ((TREE_CODE (decl) == RESULT_DECL
519 || TREE_CODE (decl) == PARM_DECL)
520 && DECL_BY_REFERENCE (decl)))
521 return false;
522
523 /* Disallow copy-in/out in nested parallel if
524 decl is shared in outer parallel, otherwise
525 each thread could store the shared variable
526 in its own copy-in location, making the
527 variable no longer really shared. */
528 if (shared_ctx->is_nested)
529 {
530 omp_context *up;
531
532 for (up = shared_ctx->outer; up; up = up->outer)
533 if ((is_taskreg_ctx (up)
534 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
535 && is_gimple_omp_offloaded (up->stmt)))
536 && maybe_lookup_decl (decl, up))
537 break;
538
539 if (up)
540 {
541 tree c;
542
543 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
544 {
545 for (c = gimple_omp_target_clauses (up->stmt);
546 c; c = OMP_CLAUSE_CHAIN (c))
547 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
548 && OMP_CLAUSE_DECL (c) == decl)
549 break;
550 }
551 else
552 for (c = gimple_omp_taskreg_clauses (up->stmt);
553 c; c = OMP_CLAUSE_CHAIN (c))
554 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
555 && OMP_CLAUSE_DECL (c) == decl)
556 break;
557
558 if (c)
559 goto maybe_mark_addressable_and_ret;
560 }
561 }
562
563 /* For tasks avoid using copy-in/out. As tasks can be
564 deferred or executed in different thread, when GOMP_task
565 returns, the task hasn't necessarily terminated. */
566 if (is_task_ctx (shared_ctx))
567 {
568 tree outer;
569 maybe_mark_addressable_and_ret:
570 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
571 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
572 {
573 /* Taking address of OUTER in lower_send_shared_vars
574 might need regimplification of everything that uses the
575 variable. */
576 if (!make_addressable_vars)
577 make_addressable_vars = BITMAP_ALLOC (NULL);
578 bitmap_set_bit (make_addressable_vars, DECL_UID (outer));
579 TREE_ADDRESSABLE (outer) = 1;
580 }
581 return true;
582 }
583 }
584
585 return false;
586 }
587
588 /* Construct a new automatic decl similar to VAR. */
589
590 static tree
591 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
592 {
593 tree copy = copy_var_decl (var, name, type);
594
595 DECL_CONTEXT (copy) = current_function_decl;
596
597 if (ctx)
598 {
599 DECL_CHAIN (copy) = ctx->block_vars;
600 ctx->block_vars = copy;
601 }
602 else
603 record_vars (copy);
604
605 /* If VAR is listed in make_addressable_vars, it wasn't
606 originally addressable, but was only later made so.
607 We don't need to take address of privatizations
608 from that var. */
609 if (TREE_ADDRESSABLE (var)
610 && ((make_addressable_vars
611 && bitmap_bit_p (make_addressable_vars, DECL_UID (var)))
612 || (global_nonaddressable_vars
613 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
614 TREE_ADDRESSABLE (copy) = 0;
615
616 return copy;
617 }
618
619 static tree
620 omp_copy_decl_1 (tree var, omp_context *ctx)
621 {
622 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
623 }
624
625 /* Build tree nodes to access the field for VAR on the receiver side. */
626
627 static tree
628 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
629 {
630 tree x, field = lookup_field (var, ctx);
631
632 /* If the receiver record type was remapped in the child function,
633 remap the field into the new record type. */
634 x = maybe_lookup_field (field, ctx);
635 if (x != NULL)
636 field = x;
637
638 x = build_simple_mem_ref (ctx->receiver_decl);
639 TREE_THIS_NOTRAP (x) = 1;
640 x = omp_build_component_ref (x, field);
641 if (by_ref)
642 {
643 x = build_simple_mem_ref (x);
644 TREE_THIS_NOTRAP (x) = 1;
645 }
646
647 return x;
648 }
649
650 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
651 of a parallel, this is a component reference; for workshare constructs
652 this is some variable. */
653
654 static tree
655 build_outer_var_ref (tree var, omp_context *ctx,
656 enum omp_clause_code code = OMP_CLAUSE_ERROR)
657 {
658 tree x;
659 omp_context *outer = ctx->outer;
660 for (; outer; outer = outer->outer)
661 {
662 if (gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
663 continue;
664 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCOPE
665 && !maybe_lookup_decl (var, outer))
666 continue;
667 break;
668 }
669
670 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
671 x = var;
672 else if (is_variable_sized (var))
673 {
674 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
675 x = build_outer_var_ref (x, ctx, code);
676 x = build_simple_mem_ref (x);
677 }
678 else if (is_taskreg_ctx (ctx))
679 {
680 bool by_ref = use_pointer_for_field (var, NULL);
681 x = build_receiver_ref (var, by_ref, ctx);
682 }
683 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
684 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
685 || ctx->loop_p
686 || code == OMP_CLAUSE_ALLOCATE
687 || (code == OMP_CLAUSE_PRIVATE
688 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
689 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
690 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
691 {
692 /* #pragma omp simd isn't a worksharing construct, and can reference
693 even private vars in its linear etc. clauses.
694 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
695 to private vars in all worksharing constructs. */
696 x = NULL_TREE;
697 if (outer && is_taskreg_ctx (outer))
698 x = lookup_decl (var, outer);
699 else if (outer)
700 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
701 if (x == NULL_TREE)
702 x = var;
703 }
704 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
705 {
706 gcc_assert (outer);
707 splay_tree_node n
708 = splay_tree_lookup (outer->field_map,
709 (splay_tree_key) &DECL_UID (var));
710 if (n == NULL)
711 {
712 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
713 x = var;
714 else
715 x = lookup_decl (var, outer);
716 }
717 else
718 {
719 tree field = (tree) n->value;
720 /* If the receiver record type was remapped in the child function,
721 remap the field into the new record type. */
722 x = maybe_lookup_field (field, outer);
723 if (x != NULL)
724 field = x;
725
726 x = build_simple_mem_ref (outer->receiver_decl);
727 x = omp_build_component_ref (x, field);
728 if (use_pointer_for_field (var, outer))
729 x = build_simple_mem_ref (x);
730 }
731 }
732 else if (outer)
733 x = lookup_decl (var, outer);
734 else if (omp_privatize_by_reference (var))
735 /* This can happen with orphaned constructs. If var is reference, it is
736 possible it is shared and as such valid. */
737 x = var;
738 else if (omp_member_access_dummy_var (var))
739 x = var;
740 else
741 gcc_unreachable ();
742
743 if (x == var)
744 {
745 tree t = omp_member_access_dummy_var (var);
746 if (t)
747 {
748 x = DECL_VALUE_EXPR (var);
749 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
750 if (o != t)
751 x = unshare_and_remap (x, t, o);
752 else
753 x = unshare_expr (x);
754 }
755 }
756
757 if (omp_privatize_by_reference (var))
758 x = build_simple_mem_ref (x);
759
760 return x;
761 }
762
763 /* Build tree nodes to access the field for VAR on the sender side. */
764
765 static tree
766 build_sender_ref (splay_tree_key key, omp_context *ctx)
767 {
768 tree field = lookup_sfield (key, ctx);
769 return omp_build_component_ref (ctx->sender_decl, field);
770 }
771
772 static tree
773 build_sender_ref (tree var, omp_context *ctx)
774 {
775 return build_sender_ref ((splay_tree_key) var, ctx);
776 }
777
778 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
779 BASE_POINTERS_RESTRICT, declare the field with restrict. */
780
781 static void
782 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
783 {
784 tree field, type, sfield = NULL_TREE;
785 splay_tree_key key = (splay_tree_key) var;
786
787 if ((mask & 16) != 0)
788 {
789 key = (splay_tree_key) &DECL_NAME (var);
790 gcc_checking_assert (key != (splay_tree_key) var);
791 }
792 if ((mask & 8) != 0)
793 {
794 key = (splay_tree_key) &DECL_UID (var);
795 gcc_checking_assert (key != (splay_tree_key) var);
796 }
797 gcc_assert ((mask & 1) == 0
798 || !splay_tree_lookup (ctx->field_map, key));
799 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
800 || !splay_tree_lookup (ctx->sfield_map, key));
801 gcc_assert ((mask & 3) == 3
802 || !is_gimple_omp_oacc (ctx->stmt));
803
804 type = TREE_TYPE (var);
805 if ((mask & 16) != 0)
806 type = lang_hooks.decls.omp_array_data (var, true);
807
808 /* Prevent redeclaring the var in the split-off function with a restrict
809 pointer type. Note that we only clear type itself, restrict qualifiers in
810 the pointed-to type will be ignored by points-to analysis. */
811 if (POINTER_TYPE_P (type)
812 && TYPE_RESTRICT (type))
813 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
814
815 if (mask & 4)
816 {
817 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
818 type = build_pointer_type (build_pointer_type (type));
819 }
820 else if (by_ref)
821 type = build_pointer_type (type);
822 else if ((mask & (32 | 3)) == 1
823 && omp_privatize_by_reference (var))
824 type = TREE_TYPE (type);
825
826 field = build_decl (DECL_SOURCE_LOCATION (var),
827 FIELD_DECL, DECL_NAME (var), type);
828
829 /* Remember what variable this field was created for. This does have a
830 side effect of making dwarf2out ignore this member, so for helpful
831 debugging we clear it later in delete_omp_context. */
832 DECL_ABSTRACT_ORIGIN (field) = var;
833 if ((mask & 16) == 0 && type == TREE_TYPE (var))
834 {
835 SET_DECL_ALIGN (field, DECL_ALIGN (var));
836 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
837 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
838 }
839 else
840 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
841
842 if ((mask & 3) == 3)
843 {
844 insert_field_into_struct (ctx->record_type, field);
845 if (ctx->srecord_type)
846 {
847 sfield = build_decl (DECL_SOURCE_LOCATION (var),
848 FIELD_DECL, DECL_NAME (var), type);
849 DECL_ABSTRACT_ORIGIN (sfield) = var;
850 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
851 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
852 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
853 insert_field_into_struct (ctx->srecord_type, sfield);
854 }
855 }
856 else
857 {
858 if (ctx->srecord_type == NULL_TREE)
859 {
860 tree t;
861
862 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
863 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
864 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
865 {
866 sfield = build_decl (DECL_SOURCE_LOCATION (t),
867 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
868 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
869 insert_field_into_struct (ctx->srecord_type, sfield);
870 splay_tree_insert (ctx->sfield_map,
871 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
872 (splay_tree_value) sfield);
873 }
874 }
875 sfield = field;
876 insert_field_into_struct ((mask & 1) ? ctx->record_type
877 : ctx->srecord_type, field);
878 }
879
880 if (mask & 1)
881 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
882 if ((mask & 2) && ctx->sfield_map)
883 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
884 }
885
886 static tree
887 install_var_local (tree var, omp_context *ctx)
888 {
889 tree new_var = omp_copy_decl_1 (var, ctx);
890 insert_decl_map (&ctx->cb, var, new_var);
891 return new_var;
892 }
893
894 /* Adjust the replacement for DECL in CTX for the new context. This means
895 copying the DECL_VALUE_EXPR, and fixing up the type. */
896
897 static void
898 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
899 {
900 tree new_decl, size;
901
902 new_decl = lookup_decl (decl, ctx);
903
904 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
905
906 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
907 && DECL_HAS_VALUE_EXPR_P (decl))
908 {
909 tree ve = DECL_VALUE_EXPR (decl);
910 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
911 SET_DECL_VALUE_EXPR (new_decl, ve);
912 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
913 }
914
915 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
916 {
917 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
918 if (size == error_mark_node)
919 size = TYPE_SIZE (TREE_TYPE (new_decl));
920 DECL_SIZE (new_decl) = size;
921
922 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
923 if (size == error_mark_node)
924 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
925 DECL_SIZE_UNIT (new_decl) = size;
926 }
927 }
928
929 /* The callback for remap_decl. Search all containing contexts for a
930 mapping of the variable; this avoids having to duplicate the splay
931 tree ahead of time. We know a mapping doesn't already exist in the
932 given context. Create new mappings to implement default semantics. */
933
934 static tree
935 omp_copy_decl (tree var, copy_body_data *cb)
936 {
937 omp_context *ctx = (omp_context *) cb;
938 tree new_var;
939
940 if (TREE_CODE (var) == LABEL_DECL)
941 {
942 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
943 return var;
944 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
945 DECL_CONTEXT (new_var) = current_function_decl;
946 insert_decl_map (&ctx->cb, var, new_var);
947 return new_var;
948 }
949
950 while (!is_taskreg_ctx (ctx))
951 {
952 ctx = ctx->outer;
953 if (ctx == NULL)
954 return var;
955 new_var = maybe_lookup_decl (var, ctx);
956 if (new_var)
957 return new_var;
958 }
959
960 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
961 return var;
962
963 return error_mark_node;
964 }
965
966 /* Create a new context, with OUTER_CTX being the surrounding context. */
967
968 static omp_context *
969 new_omp_context (gimple *stmt, omp_context *outer_ctx)
970 {
971 omp_context *ctx = XCNEW (omp_context);
972
973 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
974 (splay_tree_value) ctx);
975 ctx->stmt = stmt;
976
977 if (outer_ctx)
978 {
979 ctx->outer = outer_ctx;
980 ctx->cb = outer_ctx->cb;
981 ctx->cb.block = NULL;
982 ctx->depth = outer_ctx->depth + 1;
983 }
984 else
985 {
986 ctx->cb.src_fn = current_function_decl;
987 ctx->cb.dst_fn = current_function_decl;
988 ctx->cb.src_node = cgraph_node::get (current_function_decl);
989 gcc_checking_assert (ctx->cb.src_node);
990 ctx->cb.dst_node = ctx->cb.src_node;
991 ctx->cb.src_cfun = cfun;
992 ctx->cb.copy_decl = omp_copy_decl;
993 ctx->cb.eh_lp_nr = 0;
994 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
995 ctx->cb.adjust_array_error_bounds = true;
996 ctx->cb.dont_remap_vla_if_no_change = true;
997 ctx->depth = 1;
998 }
999
1000 ctx->cb.decl_map = new hash_map<tree, tree>;
1001
1002 return ctx;
1003 }
1004
1005 static gimple_seq maybe_catch_exception (gimple_seq);
1006
1007 /* Finalize task copyfn. */
1008
1009 static void
1010 finalize_task_copyfn (gomp_task *task_stmt)
1011 {
1012 struct function *child_cfun;
1013 tree child_fn;
1014 gimple_seq seq = NULL, new_seq;
1015 gbind *bind;
1016
1017 child_fn = gimple_omp_task_copy_fn (task_stmt);
1018 if (child_fn == NULL_TREE)
1019 return;
1020
1021 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
1022 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
1023
1024 push_cfun (child_cfun);
1025 bind = gimplify_body (child_fn, false);
1026 gimple_seq_add_stmt (&seq, bind);
1027 new_seq = maybe_catch_exception (seq);
1028 if (new_seq != seq)
1029 {
1030 bind = gimple_build_bind (NULL, new_seq, NULL);
1031 seq = NULL;
1032 gimple_seq_add_stmt (&seq, bind);
1033 }
1034 gimple_set_body (child_fn, seq);
1035 pop_cfun ();
1036
1037 /* Inform the callgraph about the new function. */
1038 cgraph_node *node = cgraph_node::get_create (child_fn);
1039 node->parallelized_function = 1;
1040 cgraph_node::add_new_function (child_fn, false);
1041 }
1042
1043 /* Destroy a omp_context data structures. Called through the splay tree
1044 value delete callback. */
1045
1046 static void
1047 delete_omp_context (splay_tree_value value)
1048 {
1049 omp_context *ctx = (omp_context *) value;
1050
1051 delete ctx->cb.decl_map;
1052
1053 if (ctx->field_map)
1054 splay_tree_delete (ctx->field_map);
1055 if (ctx->sfield_map)
1056 splay_tree_delete (ctx->sfield_map);
1057
1058 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1059 it produces corrupt debug information. */
1060 if (ctx->record_type)
1061 {
1062 tree t;
1063 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1064 DECL_ABSTRACT_ORIGIN (t) = NULL;
1065 }
1066 if (ctx->srecord_type)
1067 {
1068 tree t;
1069 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1070 DECL_ABSTRACT_ORIGIN (t) = NULL;
1071 }
1072
1073 if (ctx->task_reduction_map)
1074 {
1075 ctx->task_reductions.release ();
1076 delete ctx->task_reduction_map;
1077 }
1078
1079 delete ctx->lastprivate_conditional_map;
1080 delete ctx->allocate_map;
1081
1082 XDELETE (ctx);
1083 }
1084
1085 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1086 context. */
1087
1088 static void
1089 fixup_child_record_type (omp_context *ctx)
1090 {
1091 tree f, type = ctx->record_type;
1092
1093 if (!ctx->receiver_decl)
1094 return;
1095 /* ??? It isn't sufficient to just call remap_type here, because
1096 variably_modified_type_p doesn't work the way we expect for
1097 record types. Testing each field for whether it needs remapping
1098 and creating a new record by hand works, however. */
1099 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1100 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1101 break;
1102 if (f)
1103 {
1104 tree name, new_fields = NULL;
1105
1106 type = lang_hooks.types.make_type (RECORD_TYPE);
1107 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1108 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1109 TYPE_DECL, name, type);
1110 TYPE_NAME (type) = name;
1111
1112 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1113 {
1114 tree new_f = copy_node (f);
1115 DECL_CONTEXT (new_f) = type;
1116 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1117 DECL_CHAIN (new_f) = new_fields;
1118 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1119 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1120 &ctx->cb, NULL);
1121 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1122 &ctx->cb, NULL);
1123 new_fields = new_f;
1124
1125 /* Arrange to be able to look up the receiver field
1126 given the sender field. */
1127 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1128 (splay_tree_value) new_f);
1129 }
1130 TYPE_FIELDS (type) = nreverse (new_fields);
1131 layout_type (type);
1132 }
1133
1134 /* In a target region we never modify any of the pointers in *.omp_data_i,
1135 so attempt to help the optimizers. */
1136 if (is_gimple_omp_offloaded (ctx->stmt))
1137 type = build_qualified_type (type, TYPE_QUAL_CONST);
1138
1139 TREE_TYPE (ctx->receiver_decl)
1140 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1141 }
1142
1143 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1144 specified by CLAUSES. */
1145
1146 static void
1147 scan_sharing_clauses (tree clauses, omp_context *ctx)
1148 {
1149 tree c, decl;
1150 bool scan_array_reductions = false;
1151
1152 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1153 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
1154 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1155 /* omp_default_mem_alloc is 1 */
1156 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1157 || OMP_CLAUSE_ALLOCATE_ALIGN (c) != NULL_TREE))
1158 {
1159 /* The allocate clauses that appear on a target construct or on
1160 constructs in a target region must specify an allocator expression
1161 unless a requires directive with the dynamic_allocators clause
1162 is present in the same compilation unit. */
1163 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1164 && ((omp_requires_mask & OMP_REQUIRES_DYNAMIC_ALLOCATORS) == 0)
1165 && omp_maybe_offloaded_ctx (ctx))
1166 error_at (OMP_CLAUSE_LOCATION (c), "%<allocate%> clause must"
1167 " specify an allocator here");
1168 if (ctx->allocate_map == NULL)
1169 ctx->allocate_map = new hash_map<tree, tree>;
1170 tree val = integer_zero_node;
1171 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1172 val = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
1173 if (OMP_CLAUSE_ALLOCATE_ALIGN (c))
1174 val = build_tree_list (val, OMP_CLAUSE_ALLOCATE_ALIGN (c));
1175 ctx->allocate_map->put (OMP_CLAUSE_DECL (c), val);
1176 }
1177
1178 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1179 {
1180 bool by_ref;
1181
1182 switch (OMP_CLAUSE_CODE (c))
1183 {
1184 case OMP_CLAUSE_PRIVATE:
1185 decl = OMP_CLAUSE_DECL (c);
1186 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1187 goto do_private;
1188 else if (!is_variable_sized (decl))
1189 install_var_local (decl, ctx);
1190 break;
1191
1192 case OMP_CLAUSE_SHARED:
1193 decl = OMP_CLAUSE_DECL (c);
1194 if (ctx->allocate_map && ctx->allocate_map->get (decl))
1195 ctx->allocate_map->remove (decl);
1196 /* Ignore shared directives in teams construct inside of
1197 target construct. */
1198 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1199 && !is_host_teams_ctx (ctx))
1200 {
1201 /* Global variables don't need to be copied,
1202 the receiver side will use them directly. */
1203 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1204 if (is_global_var (odecl))
1205 break;
1206 insert_decl_map (&ctx->cb, decl, odecl);
1207 break;
1208 }
1209 gcc_assert (is_taskreg_ctx (ctx));
1210 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1211 || !is_variable_sized (decl));
1212 /* Global variables don't need to be copied,
1213 the receiver side will use them directly. */
1214 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1215 break;
1216 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1217 {
1218 use_pointer_for_field (decl, ctx);
1219 break;
1220 }
1221 by_ref = use_pointer_for_field (decl, NULL);
1222 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1223 || TREE_ADDRESSABLE (decl)
1224 || by_ref
1225 || omp_privatize_by_reference (decl))
1226 {
1227 by_ref = use_pointer_for_field (decl, ctx);
1228 install_var_field (decl, by_ref, 3, ctx);
1229 install_var_local (decl, ctx);
1230 break;
1231 }
1232 /* We don't need to copy const scalar vars back. */
1233 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1234 goto do_private;
1235
1236 case OMP_CLAUSE_REDUCTION:
1237 /* Collect 'reduction' clauses on OpenACC compute construct. */
1238 if (is_gimple_omp_oacc (ctx->stmt)
1239 && is_gimple_omp_offloaded (ctx->stmt))
1240 {
1241 /* No 'reduction' clauses on OpenACC 'kernels'. */
1242 gcc_checking_assert (!is_oacc_kernels (ctx));
1243 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1244 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
1245
1246 ctx->local_reduction_clauses
1247 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1248 }
1249 /* FALLTHRU */
1250
1251 case OMP_CLAUSE_IN_REDUCTION:
1252 decl = OMP_CLAUSE_DECL (c);
1253 if (ctx->allocate_map
1254 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1255 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
1256 || OMP_CLAUSE_REDUCTION_TASK (c)))
1257 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1258 || is_task_ctx (ctx)))
1259 {
1260 /* For now. */
1261 if (ctx->allocate_map->get (decl))
1262 ctx->allocate_map->remove (decl);
1263 }
1264 if (TREE_CODE (decl) == MEM_REF)
1265 {
1266 tree t = TREE_OPERAND (decl, 0);
1267 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1268 t = TREE_OPERAND (t, 0);
1269 if (TREE_CODE (t) == INDIRECT_REF
1270 || TREE_CODE (t) == ADDR_EXPR)
1271 t = TREE_OPERAND (t, 0);
1272 if (is_omp_target (ctx->stmt))
1273 {
1274 if (is_variable_sized (t))
1275 {
1276 gcc_assert (DECL_HAS_VALUE_EXPR_P (t));
1277 t = DECL_VALUE_EXPR (t);
1278 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
1279 t = TREE_OPERAND (t, 0);
1280 gcc_assert (DECL_P (t));
1281 }
1282 tree at = t;
1283 if (ctx->outer)
1284 scan_omp_op (&at, ctx->outer);
1285 tree nt = omp_copy_decl_1 (at, ctx->outer);
1286 splay_tree_insert (ctx->field_map,
1287 (splay_tree_key) &DECL_CONTEXT (t),
1288 (splay_tree_value) nt);
1289 if (at != t)
1290 splay_tree_insert (ctx->field_map,
1291 (splay_tree_key) &DECL_CONTEXT (at),
1292 (splay_tree_value) nt);
1293 break;
1294 }
1295 install_var_local (t, ctx);
1296 if (is_taskreg_ctx (ctx)
1297 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1298 || (is_task_ctx (ctx)
1299 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1300 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1301 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1302 == POINTER_TYPE)))))
1303 && !is_variable_sized (t)
1304 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1305 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1306 && !is_task_ctx (ctx))))
1307 {
1308 by_ref = use_pointer_for_field (t, NULL);
1309 if (is_task_ctx (ctx)
1310 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1311 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1312 {
1313 install_var_field (t, false, 1, ctx);
1314 install_var_field (t, by_ref, 2, ctx);
1315 }
1316 else
1317 install_var_field (t, by_ref, 3, ctx);
1318 }
1319 break;
1320 }
1321 if (is_omp_target (ctx->stmt))
1322 {
1323 tree at = decl;
1324 if (ctx->outer)
1325 scan_omp_op (&at, ctx->outer);
1326 tree nt = omp_copy_decl_1 (at, ctx->outer);
1327 splay_tree_insert (ctx->field_map,
1328 (splay_tree_key) &DECL_CONTEXT (decl),
1329 (splay_tree_value) nt);
1330 if (at != decl)
1331 splay_tree_insert (ctx->field_map,
1332 (splay_tree_key) &DECL_CONTEXT (at),
1333 (splay_tree_value) nt);
1334 break;
1335 }
1336 if (is_task_ctx (ctx)
1337 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1338 && OMP_CLAUSE_REDUCTION_TASK (c)
1339 && is_parallel_ctx (ctx)))
1340 {
1341 /* Global variables don't need to be copied,
1342 the receiver side will use them directly. */
1343 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1344 {
1345 by_ref = use_pointer_for_field (decl, ctx);
1346 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1347 install_var_field (decl, by_ref, 3, ctx);
1348 }
1349 install_var_local (decl, ctx);
1350 break;
1351 }
1352 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1353 && OMP_CLAUSE_REDUCTION_TASK (c))
1354 {
1355 install_var_local (decl, ctx);
1356 break;
1357 }
1358 goto do_private;
1359
1360 case OMP_CLAUSE_LASTPRIVATE:
1361 /* Let the corresponding firstprivate clause create
1362 the variable. */
1363 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1364 break;
1365 /* FALLTHRU */
1366
1367 case OMP_CLAUSE_FIRSTPRIVATE:
1368 case OMP_CLAUSE_LINEAR:
1369 decl = OMP_CLAUSE_DECL (c);
1370 do_private:
1371 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1372 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR
1373 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1374 && is_gimple_omp_offloaded (ctx->stmt))
1375 {
1376 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1377 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR
1378 && lang_hooks.decls.omp_array_data (decl, true)))
1379 {
1380 by_ref = !omp_privatize_by_reference (decl);
1381 install_var_field (decl, by_ref, 3, ctx);
1382 }
1383 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1384 {
1385 if (TREE_CODE (decl) == INDIRECT_REF)
1386 decl = TREE_OPERAND (decl, 0);
1387 install_var_field (decl, true, 3, ctx);
1388 }
1389 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1390 install_var_field (decl, true, 3, ctx);
1391 else
1392 install_var_field (decl, false, 3, ctx);
1393 }
1394 if (is_variable_sized (decl))
1395 {
1396 if (is_task_ctx (ctx))
1397 {
1398 if (ctx->allocate_map
1399 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1400 {
1401 /* For now. */
1402 if (ctx->allocate_map->get (decl))
1403 ctx->allocate_map->remove (decl);
1404 }
1405 install_var_field (decl, false, 1, ctx);
1406 }
1407 break;
1408 }
1409 else if (is_taskreg_ctx (ctx))
1410 {
1411 bool global
1412 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1413 by_ref = use_pointer_for_field (decl, NULL);
1414
1415 if (is_task_ctx (ctx)
1416 && (global || by_ref || omp_privatize_by_reference (decl)))
1417 {
1418 if (ctx->allocate_map
1419 && ctx->allocate_map->get (decl))
1420 install_var_field (decl, by_ref, 32 | 1, ctx);
1421 else
1422 install_var_field (decl, false, 1, ctx);
1423 if (!global)
1424 install_var_field (decl, by_ref, 2, ctx);
1425 }
1426 else if (!global)
1427 install_var_field (decl, by_ref, 3, ctx);
1428 }
1429 install_var_local (decl, ctx);
1430 /* For descr arrays on target: firstprivatize data + attach ptr. */
1431 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1432 && is_gimple_omp_offloaded (ctx->stmt)
1433 && !is_gimple_omp_oacc (ctx->stmt)
1434 && lang_hooks.decls.omp_array_data (decl, true))
1435 {
1436 install_var_field (decl, false, 16 | 3, ctx);
1437 install_var_field (decl, true, 8 | 3, ctx);
1438 }
1439 break;
1440
1441 case OMP_CLAUSE_USE_DEVICE_PTR:
1442 case OMP_CLAUSE_USE_DEVICE_ADDR:
1443 decl = OMP_CLAUSE_DECL (c);
1444
1445 /* Fortran array descriptors. */
1446 if (lang_hooks.decls.omp_array_data (decl, true))
1447 install_var_field (decl, false, 19, ctx);
1448 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1449 && !omp_privatize_by_reference (decl)
1450 && !omp_is_allocatable_or_ptr (decl))
1451 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1452 install_var_field (decl, true, 11, ctx);
1453 else
1454 install_var_field (decl, false, 11, ctx);
1455 if (DECL_SIZE (decl)
1456 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1457 {
1458 tree decl2 = DECL_VALUE_EXPR (decl);
1459 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1460 decl2 = TREE_OPERAND (decl2, 0);
1461 gcc_assert (DECL_P (decl2));
1462 install_var_local (decl2, ctx);
1463 }
1464 install_var_local (decl, ctx);
1465 break;
1466
1467 case OMP_CLAUSE_HAS_DEVICE_ADDR:
1468 decl = OMP_CLAUSE_DECL (c);
1469 while (TREE_CODE (decl) == INDIRECT_REF
1470 || TREE_CODE (decl) == ARRAY_REF)
1471 decl = TREE_OPERAND (decl, 0);
1472 goto do_private;
1473
1474 case OMP_CLAUSE_IS_DEVICE_PTR:
1475 decl = OMP_CLAUSE_DECL (c);
1476 goto do_private;
1477
1478 case OMP_CLAUSE__LOOPTEMP_:
1479 case OMP_CLAUSE__REDUCTEMP_:
1480 gcc_assert (is_taskreg_ctx (ctx));
1481 decl = OMP_CLAUSE_DECL (c);
1482 install_var_field (decl, false, 3, ctx);
1483 install_var_local (decl, ctx);
1484 break;
1485
1486 case OMP_CLAUSE_COPYPRIVATE:
1487 case OMP_CLAUSE_COPYIN:
1488 decl = OMP_CLAUSE_DECL (c);
1489 by_ref = use_pointer_for_field (decl, NULL);
1490 install_var_field (decl, by_ref, 3, ctx);
1491 break;
1492
1493 case OMP_CLAUSE_FINAL:
1494 case OMP_CLAUSE_IF:
1495 case OMP_CLAUSE_NUM_THREADS:
1496 case OMP_CLAUSE_NUM_TEAMS:
1497 case OMP_CLAUSE_THREAD_LIMIT:
1498 case OMP_CLAUSE_DEVICE:
1499 case OMP_CLAUSE_SCHEDULE:
1500 case OMP_CLAUSE_DIST_SCHEDULE:
1501 case OMP_CLAUSE_DEPEND:
1502 case OMP_CLAUSE_PRIORITY:
1503 case OMP_CLAUSE_GRAINSIZE:
1504 case OMP_CLAUSE_NUM_TASKS:
1505 case OMP_CLAUSE_NUM_GANGS:
1506 case OMP_CLAUSE_NUM_WORKERS:
1507 case OMP_CLAUSE_VECTOR_LENGTH:
1508 case OMP_CLAUSE_DETACH:
1509 case OMP_CLAUSE_FILTER:
1510 if (ctx->outer)
1511 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1512 break;
1513
1514 case OMP_CLAUSE_TO:
1515 case OMP_CLAUSE_FROM:
1516 case OMP_CLAUSE_MAP:
1517 if (ctx->outer)
1518 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1519 decl = OMP_CLAUSE_DECL (c);
1520 /* If requested, make 'decl' addressable. */
1521 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1522 && OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c))
1523 {
1524 gcc_checking_assert (DECL_P (decl));
1525
1526 bool decl_addressable = TREE_ADDRESSABLE (decl);
1527 if (!decl_addressable)
1528 {
1529 if (!make_addressable_vars)
1530 make_addressable_vars = BITMAP_ALLOC (NULL);
1531 bitmap_set_bit (make_addressable_vars, DECL_UID (decl));
1532 TREE_ADDRESSABLE (decl) = 1;
1533 }
1534
1535 if (dump_enabled_p ())
1536 {
1537 location_t loc = OMP_CLAUSE_LOCATION (c);
1538 const dump_user_location_t d_u_loc
1539 = dump_user_location_t::from_location_t (loc);
1540 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
1541 #if __GNUC__ >= 10
1542 # pragma GCC diagnostic push
1543 # pragma GCC diagnostic ignored "-Wformat"
1544 #endif
1545 if (!decl_addressable)
1546 dump_printf_loc (MSG_NOTE, d_u_loc,
1547 "variable %<%T%>"
1548 " made addressable\n",
1549 decl);
1550 else
1551 dump_printf_loc (MSG_NOTE, d_u_loc,
1552 "variable %<%T%>"
1553 " already made addressable\n",
1554 decl);
1555 #if __GNUC__ >= 10
1556 # pragma GCC diagnostic pop
1557 #endif
1558 }
1559
1560 /* Done. */
1561 OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c) = 0;
1562 }
1563 /* Global variables with "omp declare target" attribute
1564 don't need to be copied, the receiver side will use them
1565 directly. However, global variables with "omp declare target link"
1566 attribute need to be copied. Or when ALWAYS modifier is used. */
1567 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1568 && DECL_P (decl)
1569 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1570 && (OMP_CLAUSE_MAP_KIND (c)
1571 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)
1572 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
1573 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH)
1574 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1575 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1576 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1577 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1578 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
1579 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1580 && varpool_node::get_create (decl)->offloadable
1581 && !lookup_attribute ("omp declare target link",
1582 DECL_ATTRIBUTES (decl)))
1583 break;
1584 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1585 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1586 {
1587 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1588 not offloaded; there is nothing to map for those. */
1589 if (!is_gimple_omp_offloaded (ctx->stmt)
1590 && !POINTER_TYPE_P (TREE_TYPE (decl))
1591 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1592 break;
1593 }
1594 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1595 && DECL_P (decl)
1596 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1597 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1598 && is_omp_target (ctx->stmt))
1599 {
1600 /* If this is an offloaded region, an attach operation should
1601 only exist when the pointer variable is mapped in a prior
1602 clause.
1603 If we had an error, we may not have attempted to sort clauses
1604 properly, so avoid the test. */
1605 if (is_gimple_omp_offloaded (ctx->stmt)
1606 && !seen_error ())
1607 gcc_assert
1608 (maybe_lookup_decl (decl, ctx)
1609 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1610 && lookup_attribute ("omp declare target",
1611 DECL_ATTRIBUTES (decl))));
1612
1613 /* By itself, attach/detach is generated as part of pointer
1614 variable mapping and should not create new variables in the
1615 offloaded region, however sender refs for it must be created
1616 for its address to be passed to the runtime. */
1617 tree field
1618 = build_decl (OMP_CLAUSE_LOCATION (c),
1619 FIELD_DECL, NULL_TREE, ptr_type_node);
1620 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1621 insert_field_into_struct (ctx->record_type, field);
1622 /* To not clash with a map of the pointer variable itself,
1623 attach/detach maps have their field looked up by the *clause*
1624 tree expression, not the decl. */
1625 gcc_assert (!splay_tree_lookup (ctx->field_map,
1626 (splay_tree_key) c));
1627 splay_tree_insert (ctx->field_map, (splay_tree_key) c,
1628 (splay_tree_value) field);
1629 break;
1630 }
1631 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1632 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1633 || (OMP_CLAUSE_MAP_KIND (c)
1634 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1635 {
1636 if (TREE_CODE (decl) == COMPONENT_REF
1637 || (TREE_CODE (decl) == INDIRECT_REF
1638 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1639 && (((TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1640 == REFERENCE_TYPE)
1641 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1642 == POINTER_TYPE)))))
1643 break;
1644 if (DECL_SIZE (decl)
1645 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1646 {
1647 tree decl2 = DECL_VALUE_EXPR (decl);
1648 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1649 decl2 = TREE_OPERAND (decl2, 0);
1650 gcc_assert (DECL_P (decl2));
1651 install_var_local (decl2, ctx);
1652 }
1653 install_var_local (decl, ctx);
1654 break;
1655 }
1656 if (DECL_P (decl))
1657 {
1658 if (DECL_SIZE (decl)
1659 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1660 {
1661 tree decl2 = DECL_VALUE_EXPR (decl);
1662 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1663 decl2 = TREE_OPERAND (decl2, 0);
1664 gcc_assert (DECL_P (decl2));
1665 install_var_field (decl2, true, 3, ctx);
1666 install_var_local (decl2, ctx);
1667 install_var_local (decl, ctx);
1668 }
1669 else
1670 {
1671 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1672 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1673 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1674 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1675 install_var_field (decl, true, 7, ctx);
1676 else
1677 install_var_field (decl, true, 3, ctx);
1678 if (is_gimple_omp_offloaded (ctx->stmt)
1679 && !(is_gimple_omp_oacc (ctx->stmt)
1680 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
1681 install_var_local (decl, ctx);
1682 }
1683 }
1684 else
1685 {
1686 tree base = get_base_address (decl);
1687 tree nc = OMP_CLAUSE_CHAIN (c);
1688 if (DECL_P (base)
1689 && nc != NULL_TREE
1690 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1691 && OMP_CLAUSE_DECL (nc) == base
1692 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1693 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1694 {
1695 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1696 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1697 }
1698 else
1699 {
1700 if (ctx->outer)
1701 {
1702 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1703 decl = OMP_CLAUSE_DECL (c);
1704 }
1705 gcc_assert (!splay_tree_lookup (ctx->field_map,
1706 (splay_tree_key) decl));
1707 tree field
1708 = build_decl (OMP_CLAUSE_LOCATION (c),
1709 FIELD_DECL, NULL_TREE, ptr_type_node);
1710 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1711 insert_field_into_struct (ctx->record_type, field);
1712 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1713 (splay_tree_value) field);
1714 }
1715 }
1716 break;
1717
1718 case OMP_CLAUSE_ORDER:
1719 ctx->order_concurrent = true;
1720 break;
1721
1722 case OMP_CLAUSE_BIND:
1723 ctx->loop_p = true;
1724 break;
1725
1726 case OMP_CLAUSE_NOWAIT:
1727 case OMP_CLAUSE_ORDERED:
1728 case OMP_CLAUSE_COLLAPSE:
1729 case OMP_CLAUSE_UNTIED:
1730 case OMP_CLAUSE_MERGEABLE:
1731 case OMP_CLAUSE_PROC_BIND:
1732 case OMP_CLAUSE_SAFELEN:
1733 case OMP_CLAUSE_SIMDLEN:
1734 case OMP_CLAUSE_THREADS:
1735 case OMP_CLAUSE_SIMD:
1736 case OMP_CLAUSE_NOGROUP:
1737 case OMP_CLAUSE_DEFAULTMAP:
1738 case OMP_CLAUSE_ASYNC:
1739 case OMP_CLAUSE_WAIT:
1740 case OMP_CLAUSE_GANG:
1741 case OMP_CLAUSE_WORKER:
1742 case OMP_CLAUSE_VECTOR:
1743 case OMP_CLAUSE_INDEPENDENT:
1744 case OMP_CLAUSE_AUTO:
1745 case OMP_CLAUSE_SEQ:
1746 case OMP_CLAUSE_TILE:
1747 case OMP_CLAUSE__SIMT_:
1748 case OMP_CLAUSE_DEFAULT:
1749 case OMP_CLAUSE_NONTEMPORAL:
1750 case OMP_CLAUSE_IF_PRESENT:
1751 case OMP_CLAUSE_FINALIZE:
1752 case OMP_CLAUSE_TASK_REDUCTION:
1753 case OMP_CLAUSE_ALLOCATE:
1754 break;
1755
1756 case OMP_CLAUSE_ALIGNED:
1757 decl = OMP_CLAUSE_DECL (c);
1758 if (is_global_var (decl)
1759 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1760 install_var_local (decl, ctx);
1761 break;
1762
1763 case OMP_CLAUSE__CONDTEMP_:
1764 decl = OMP_CLAUSE_DECL (c);
1765 if (is_parallel_ctx (ctx))
1766 {
1767 install_var_field (decl, false, 3, ctx);
1768 install_var_local (decl, ctx);
1769 }
1770 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1771 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1772 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1773 install_var_local (decl, ctx);
1774 break;
1775
1776 case OMP_CLAUSE__CACHE_:
1777 case OMP_CLAUSE_NOHOST:
1778 default:
1779 gcc_unreachable ();
1780 }
1781 }
1782
1783 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1784 {
1785 switch (OMP_CLAUSE_CODE (c))
1786 {
1787 case OMP_CLAUSE_LASTPRIVATE:
1788 /* Let the corresponding firstprivate clause create
1789 the variable. */
1790 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1791 scan_array_reductions = true;
1792 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1793 break;
1794 /* FALLTHRU */
1795
1796 case OMP_CLAUSE_FIRSTPRIVATE:
1797 case OMP_CLAUSE_PRIVATE:
1798 case OMP_CLAUSE_LINEAR:
1799 case OMP_CLAUSE_HAS_DEVICE_ADDR:
1800 case OMP_CLAUSE_IS_DEVICE_PTR:
1801 decl = OMP_CLAUSE_DECL (c);
1802 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1803 {
1804 while (TREE_CODE (decl) == INDIRECT_REF
1805 || TREE_CODE (decl) == ARRAY_REF)
1806 decl = TREE_OPERAND (decl, 0);
1807 }
1808
1809 if (is_variable_sized (decl))
1810 {
1811 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1812 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR
1813 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1814 && is_gimple_omp_offloaded (ctx->stmt))
1815 {
1816 tree decl2 = DECL_VALUE_EXPR (decl);
1817 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1818 decl2 = TREE_OPERAND (decl2, 0);
1819 gcc_assert (DECL_P (decl2));
1820 install_var_local (decl2, ctx);
1821 fixup_remapped_decl (decl2, ctx, false);
1822 }
1823 install_var_local (decl, ctx);
1824 }
1825 fixup_remapped_decl (decl, ctx,
1826 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1827 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1828 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1829 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1830 scan_array_reductions = true;
1831 break;
1832
1833 case OMP_CLAUSE_REDUCTION:
1834 case OMP_CLAUSE_IN_REDUCTION:
1835 decl = OMP_CLAUSE_DECL (c);
1836 if (TREE_CODE (decl) != MEM_REF && !is_omp_target (ctx->stmt))
1837 {
1838 if (is_variable_sized (decl))
1839 install_var_local (decl, ctx);
1840 fixup_remapped_decl (decl, ctx, false);
1841 }
1842 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1843 scan_array_reductions = true;
1844 break;
1845
1846 case OMP_CLAUSE_TASK_REDUCTION:
1847 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1848 scan_array_reductions = true;
1849 break;
1850
1851 case OMP_CLAUSE_SHARED:
1852 /* Ignore shared directives in teams construct inside of
1853 target construct. */
1854 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1855 && !is_host_teams_ctx (ctx))
1856 break;
1857 decl = OMP_CLAUSE_DECL (c);
1858 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1859 break;
1860 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1861 {
1862 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1863 ctx->outer)))
1864 break;
1865 bool by_ref = use_pointer_for_field (decl, ctx);
1866 install_var_field (decl, by_ref, 11, ctx);
1867 break;
1868 }
1869 fixup_remapped_decl (decl, ctx, false);
1870 break;
1871
1872 case OMP_CLAUSE_MAP:
1873 if (!is_gimple_omp_offloaded (ctx->stmt))
1874 break;
1875 decl = OMP_CLAUSE_DECL (c);
1876 if (DECL_P (decl)
1877 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1878 && (OMP_CLAUSE_MAP_KIND (c)
1879 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1880 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1881 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1882 && varpool_node::get_create (decl)->offloadable)
1883 break;
1884 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1885 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1886 && is_omp_target (ctx->stmt)
1887 && !is_gimple_omp_offloaded (ctx->stmt))
1888 break;
1889 if (DECL_P (decl))
1890 {
1891 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1892 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1893 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1894 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1895 {
1896 tree new_decl = lookup_decl (decl, ctx);
1897 TREE_TYPE (new_decl)
1898 = remap_type (TREE_TYPE (decl), &ctx->cb);
1899 }
1900 else if (DECL_SIZE (decl)
1901 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1902 {
1903 tree decl2 = DECL_VALUE_EXPR (decl);
1904 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1905 decl2 = TREE_OPERAND (decl2, 0);
1906 gcc_assert (DECL_P (decl2));
1907 fixup_remapped_decl (decl2, ctx, false);
1908 fixup_remapped_decl (decl, ctx, true);
1909 }
1910 else
1911 fixup_remapped_decl (decl, ctx, false);
1912 }
1913 break;
1914
1915 case OMP_CLAUSE_COPYPRIVATE:
1916 case OMP_CLAUSE_COPYIN:
1917 case OMP_CLAUSE_DEFAULT:
1918 case OMP_CLAUSE_IF:
1919 case OMP_CLAUSE_NUM_THREADS:
1920 case OMP_CLAUSE_NUM_TEAMS:
1921 case OMP_CLAUSE_THREAD_LIMIT:
1922 case OMP_CLAUSE_DEVICE:
1923 case OMP_CLAUSE_SCHEDULE:
1924 case OMP_CLAUSE_DIST_SCHEDULE:
1925 case OMP_CLAUSE_NOWAIT:
1926 case OMP_CLAUSE_ORDERED:
1927 case OMP_CLAUSE_COLLAPSE:
1928 case OMP_CLAUSE_UNTIED:
1929 case OMP_CLAUSE_FINAL:
1930 case OMP_CLAUSE_MERGEABLE:
1931 case OMP_CLAUSE_PROC_BIND:
1932 case OMP_CLAUSE_SAFELEN:
1933 case OMP_CLAUSE_SIMDLEN:
1934 case OMP_CLAUSE_ALIGNED:
1935 case OMP_CLAUSE_DEPEND:
1936 case OMP_CLAUSE_DETACH:
1937 case OMP_CLAUSE_ALLOCATE:
1938 case OMP_CLAUSE__LOOPTEMP_:
1939 case OMP_CLAUSE__REDUCTEMP_:
1940 case OMP_CLAUSE_TO:
1941 case OMP_CLAUSE_FROM:
1942 case OMP_CLAUSE_PRIORITY:
1943 case OMP_CLAUSE_GRAINSIZE:
1944 case OMP_CLAUSE_NUM_TASKS:
1945 case OMP_CLAUSE_THREADS:
1946 case OMP_CLAUSE_SIMD:
1947 case OMP_CLAUSE_NOGROUP:
1948 case OMP_CLAUSE_DEFAULTMAP:
1949 case OMP_CLAUSE_ORDER:
1950 case OMP_CLAUSE_BIND:
1951 case OMP_CLAUSE_USE_DEVICE_PTR:
1952 case OMP_CLAUSE_USE_DEVICE_ADDR:
1953 case OMP_CLAUSE_NONTEMPORAL:
1954 case OMP_CLAUSE_ASYNC:
1955 case OMP_CLAUSE_WAIT:
1956 case OMP_CLAUSE_NUM_GANGS:
1957 case OMP_CLAUSE_NUM_WORKERS:
1958 case OMP_CLAUSE_VECTOR_LENGTH:
1959 case OMP_CLAUSE_GANG:
1960 case OMP_CLAUSE_WORKER:
1961 case OMP_CLAUSE_VECTOR:
1962 case OMP_CLAUSE_INDEPENDENT:
1963 case OMP_CLAUSE_AUTO:
1964 case OMP_CLAUSE_SEQ:
1965 case OMP_CLAUSE_TILE:
1966 case OMP_CLAUSE__SIMT_:
1967 case OMP_CLAUSE_IF_PRESENT:
1968 case OMP_CLAUSE_FINALIZE:
1969 case OMP_CLAUSE_FILTER:
1970 case OMP_CLAUSE__CONDTEMP_:
1971 break;
1972
1973 case OMP_CLAUSE__CACHE_:
1974 case OMP_CLAUSE_NOHOST:
1975 default:
1976 gcc_unreachable ();
1977 }
1978 }
1979
1980 gcc_checking_assert (!scan_array_reductions
1981 || !is_gimple_omp_oacc (ctx->stmt));
1982 if (scan_array_reductions)
1983 {
1984 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1985 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1986 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1987 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1988 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1989 {
1990 omp_context *rctx = ctx;
1991 if (is_omp_target (ctx->stmt))
1992 rctx = ctx->outer;
1993 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), rctx);
1994 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), rctx);
1995 }
1996 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1997 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1998 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1999 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
2000 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
2001 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
2002 }
2003 }
2004
2005 /* Create a new name for omp child function. Returns an identifier. */
2006
2007 static tree
2008 create_omp_child_function_name (bool task_copy)
2009 {
2010 return clone_function_name_numbered (current_function_decl,
2011 task_copy ? "_omp_cpyfn" : "_omp_fn");
2012 }
2013
2014 /* Return true if CTX may belong to offloaded code: either if current function
2015 is offloaded, or any enclosing context corresponds to a target region. */
2016
2017 static bool
2018 omp_maybe_offloaded_ctx (omp_context *ctx)
2019 {
2020 if (cgraph_node::get (current_function_decl)->offloadable)
2021 return true;
2022 for (; ctx; ctx = ctx->outer)
2023 if (is_gimple_omp_offloaded (ctx->stmt))
2024 return true;
2025 return false;
2026 }
2027
2028 /* Build a decl for the omp child function. It'll not contain a body
2029 yet, just the bare decl. */
2030
2031 static void
2032 create_omp_child_function (omp_context *ctx, bool task_copy)
2033 {
2034 tree decl, type, name, t;
2035
2036 name = create_omp_child_function_name (task_copy);
2037 if (task_copy)
2038 type = build_function_type_list (void_type_node, ptr_type_node,
2039 ptr_type_node, NULL_TREE);
2040 else
2041 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2042
2043 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
2044
2045 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
2046 || !task_copy);
2047 if (!task_copy)
2048 ctx->cb.dst_fn = decl;
2049 else
2050 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
2051
2052 TREE_STATIC (decl) = 1;
2053 TREE_USED (decl) = 1;
2054 DECL_ARTIFICIAL (decl) = 1;
2055 DECL_IGNORED_P (decl) = 0;
2056 TREE_PUBLIC (decl) = 0;
2057 DECL_UNINLINABLE (decl) = 1;
2058 DECL_EXTERNAL (decl) = 0;
2059 DECL_CONTEXT (decl) = NULL_TREE;
2060 DECL_INITIAL (decl) = make_node (BLOCK);
2061 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
2062 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
2063 /* Remove omp declare simd attribute from the new attributes. */
2064 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
2065 {
2066 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
2067 a = a2;
2068 a = TREE_CHAIN (a);
2069 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
2070 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
2071 *p = TREE_CHAIN (*p);
2072 else
2073 {
2074 tree chain = TREE_CHAIN (*p);
2075 *p = copy_node (*p);
2076 p = &TREE_CHAIN (*p);
2077 *p = chain;
2078 }
2079 }
2080 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
2081 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
2082 DECL_FUNCTION_SPECIFIC_TARGET (decl)
2083 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
2084 DECL_FUNCTION_VERSIONED (decl)
2085 = DECL_FUNCTION_VERSIONED (current_function_decl);
2086
2087 if (omp_maybe_offloaded_ctx (ctx))
2088 {
2089 cgraph_node::get_create (decl)->offloadable = 1;
2090 if (ENABLE_OFFLOADING)
2091 g->have_offload = true;
2092 }
2093
2094 if (cgraph_node::get_create (decl)->offloadable)
2095 {
2096 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
2097 ? "omp target entrypoint"
2098 : "omp declare target");
2099 if (lookup_attribute ("omp declare target",
2100 DECL_ATTRIBUTES (current_function_decl)))
2101 {
2102 if (is_gimple_omp_offloaded (ctx->stmt))
2103 DECL_ATTRIBUTES (decl)
2104 = remove_attribute ("omp declare target",
2105 copy_list (DECL_ATTRIBUTES (decl)));
2106 else
2107 target_attr = NULL;
2108 }
2109 if (target_attr
2110 && is_gimple_omp_offloaded (ctx->stmt)
2111 && lookup_attribute ("noclone", DECL_ATTRIBUTES (decl)) == NULL_TREE)
2112 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("noclone"),
2113 NULL_TREE, DECL_ATTRIBUTES (decl));
2114 if (target_attr)
2115 DECL_ATTRIBUTES (decl)
2116 = tree_cons (get_identifier (target_attr),
2117 NULL_TREE, DECL_ATTRIBUTES (decl));
2118 }
2119
2120 t = build_decl (DECL_SOURCE_LOCATION (decl),
2121 RESULT_DECL, NULL_TREE, void_type_node);
2122 DECL_ARTIFICIAL (t) = 1;
2123 DECL_IGNORED_P (t) = 1;
2124 DECL_CONTEXT (t) = decl;
2125 DECL_RESULT (decl) = t;
2126
2127 tree data_name = get_identifier (".omp_data_i");
2128 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
2129 ptr_type_node);
2130 DECL_ARTIFICIAL (t) = 1;
2131 DECL_NAMELESS (t) = 1;
2132 DECL_ARG_TYPE (t) = ptr_type_node;
2133 DECL_CONTEXT (t) = current_function_decl;
2134 TREE_USED (t) = 1;
2135 TREE_READONLY (t) = 1;
2136 DECL_ARGUMENTS (decl) = t;
2137 if (!task_copy)
2138 ctx->receiver_decl = t;
2139 else
2140 {
2141 t = build_decl (DECL_SOURCE_LOCATION (decl),
2142 PARM_DECL, get_identifier (".omp_data_o"),
2143 ptr_type_node);
2144 DECL_ARTIFICIAL (t) = 1;
2145 DECL_NAMELESS (t) = 1;
2146 DECL_ARG_TYPE (t) = ptr_type_node;
2147 DECL_CONTEXT (t) = current_function_decl;
2148 TREE_USED (t) = 1;
2149 TREE_ADDRESSABLE (t) = 1;
2150 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
2151 DECL_ARGUMENTS (decl) = t;
2152 }
2153
2154 /* Allocate memory for the function structure. The call to
2155 allocate_struct_function clobbers CFUN, so we need to restore
2156 it afterward. */
2157 push_struct_function (decl);
2158 cfun->function_end_locus = gimple_location (ctx->stmt);
2159 init_tree_ssa (cfun);
2160 pop_cfun ();
2161 }
2162
2163 /* Callback for walk_gimple_seq. Check if combined parallel
2164 contains gimple_omp_for_combined_into_p OMP_FOR. */
2165
2166 tree
2167 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
2168 bool *handled_ops_p,
2169 struct walk_stmt_info *wi)
2170 {
2171 gimple *stmt = gsi_stmt (*gsi_p);
2172
2173 *handled_ops_p = true;
2174 switch (gimple_code (stmt))
2175 {
2176 WALK_SUBSTMTS;
2177
2178 case GIMPLE_OMP_FOR:
2179 if (gimple_omp_for_combined_into_p (stmt)
2180 && gimple_omp_for_kind (stmt)
2181 == *(const enum gf_mask *) (wi->info))
2182 {
2183 wi->info = stmt;
2184 return integer_zero_node;
2185 }
2186 break;
2187 default:
2188 break;
2189 }
2190 return NULL;
2191 }
2192
2193 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2194
2195 static void
2196 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
2197 omp_context *outer_ctx)
2198 {
2199 struct walk_stmt_info wi;
2200
2201 memset (&wi, 0, sizeof (wi));
2202 wi.val_only = true;
2203 wi.info = (void *) &msk;
2204 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
2205 if (wi.info != (void *) &msk)
2206 {
2207 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
2208 struct omp_for_data fd;
2209 omp_extract_for_data (for_stmt, &fd, NULL);
2210 /* We need two temporaries with fd.loop.v type (istart/iend)
2211 and then (fd.collapse - 1) temporaries with the same
2212 type for count2 ... countN-1 vars if not constant. */
2213 size_t count = 2, i;
2214 tree type = fd.iter_type;
2215 if (fd.collapse > 1
2216 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
2217 {
2218 count += fd.collapse - 1;
2219 /* If there are lastprivate clauses on the inner
2220 GIMPLE_OMP_FOR, add one more temporaries for the total number
2221 of iterations (product of count1 ... countN-1). */
2222 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
2223 OMP_CLAUSE_LASTPRIVATE)
2224 || (msk == GF_OMP_FOR_KIND_FOR
2225 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2226 OMP_CLAUSE_LASTPRIVATE)))
2227 {
2228 tree temp = create_tmp_var (type);
2229 tree c = build_omp_clause (UNKNOWN_LOCATION,
2230 OMP_CLAUSE__LOOPTEMP_);
2231 insert_decl_map (&outer_ctx->cb, temp, temp);
2232 OMP_CLAUSE_DECL (c) = temp;
2233 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2234 gimple_omp_taskreg_set_clauses (stmt, c);
2235 }
2236 if (fd.non_rect
2237 && fd.last_nonrect == fd.first_nonrect + 1)
2238 if (tree v = gimple_omp_for_index (for_stmt, fd.last_nonrect))
2239 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
2240 {
2241 v = gimple_omp_for_index (for_stmt, fd.first_nonrect);
2242 tree type2 = TREE_TYPE (v);
2243 count++;
2244 for (i = 0; i < 3; i++)
2245 {
2246 tree temp = create_tmp_var (type2);
2247 tree c = build_omp_clause (UNKNOWN_LOCATION,
2248 OMP_CLAUSE__LOOPTEMP_);
2249 insert_decl_map (&outer_ctx->cb, temp, temp);
2250 OMP_CLAUSE_DECL (c) = temp;
2251 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2252 gimple_omp_taskreg_set_clauses (stmt, c);
2253 }
2254 }
2255 }
2256 for (i = 0; i < count; i++)
2257 {
2258 tree temp = create_tmp_var (type);
2259 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
2260 insert_decl_map (&outer_ctx->cb, temp, temp);
2261 OMP_CLAUSE_DECL (c) = temp;
2262 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2263 gimple_omp_taskreg_set_clauses (stmt, c);
2264 }
2265 }
2266 if (msk == GF_OMP_FOR_KIND_TASKLOOP
2267 && omp_find_clause (gimple_omp_task_clauses (stmt),
2268 OMP_CLAUSE_REDUCTION))
2269 {
2270 tree type = build_pointer_type (pointer_sized_int_node);
2271 tree temp = create_tmp_var (type);
2272 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2273 insert_decl_map (&outer_ctx->cb, temp, temp);
2274 OMP_CLAUSE_DECL (c) = temp;
2275 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
2276 gimple_omp_task_set_clauses (stmt, c);
2277 }
2278 }
2279
2280 /* Scan an OpenMP parallel directive. */
2281
2282 static void
2283 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2284 {
2285 omp_context *ctx;
2286 tree name;
2287 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
2288
2289 /* Ignore parallel directives with empty bodies, unless there
2290 are copyin clauses. */
2291 if (optimize > 0
2292 && empty_body_p (gimple_omp_body (stmt))
2293 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2294 OMP_CLAUSE_COPYIN) == NULL)
2295 {
2296 gsi_replace (gsi, gimple_build_nop (), false);
2297 return;
2298 }
2299
2300 if (gimple_omp_parallel_combined_p (stmt))
2301 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
2302 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
2303 OMP_CLAUSE_REDUCTION);
2304 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
2305 if (OMP_CLAUSE_REDUCTION_TASK (c))
2306 {
2307 tree type = build_pointer_type (pointer_sized_int_node);
2308 tree temp = create_tmp_var (type);
2309 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2310 if (outer_ctx)
2311 insert_decl_map (&outer_ctx->cb, temp, temp);
2312 OMP_CLAUSE_DECL (c) = temp;
2313 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
2314 gimple_omp_parallel_set_clauses (stmt, c);
2315 break;
2316 }
2317 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2318 break;
2319
2320 ctx = new_omp_context (stmt, outer_ctx);
2321 taskreg_contexts.safe_push (ctx);
2322 if (taskreg_nesting_level > 1)
2323 ctx->is_nested = true;
2324 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2325 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2326 name = create_tmp_var_name (".omp_data_s");
2327 name = build_decl (gimple_location (stmt),
2328 TYPE_DECL, name, ctx->record_type);
2329 DECL_ARTIFICIAL (name) = 1;
2330 DECL_NAMELESS (name) = 1;
2331 TYPE_NAME (ctx->record_type) = name;
2332 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2333 create_omp_child_function (ctx, false);
2334 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2335
2336 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2337 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2338
2339 if (TYPE_FIELDS (ctx->record_type) == NULL)
2340 ctx->record_type = ctx->receiver_decl = NULL;
2341 }
2342
2343 /* Scan an OpenMP task directive. */
2344
2345 static void
2346 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2347 {
2348 omp_context *ctx;
2349 tree name, t;
2350 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2351
2352 /* Ignore task directives with empty bodies, unless they have depend
2353 clause. */
2354 if (optimize > 0
2355 && gimple_omp_body (stmt)
2356 && empty_body_p (gimple_omp_body (stmt))
2357 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2358 {
2359 gsi_replace (gsi, gimple_build_nop (), false);
2360 return;
2361 }
2362
2363 if (gimple_omp_task_taskloop_p (stmt))
2364 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2365
2366 ctx = new_omp_context (stmt, outer_ctx);
2367
2368 if (gimple_omp_task_taskwait_p (stmt))
2369 {
2370 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2371 return;
2372 }
2373
2374 taskreg_contexts.safe_push (ctx);
2375 if (taskreg_nesting_level > 1)
2376 ctx->is_nested = true;
2377 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2378 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2379 name = create_tmp_var_name (".omp_data_s");
2380 name = build_decl (gimple_location (stmt),
2381 TYPE_DECL, name, ctx->record_type);
2382 DECL_ARTIFICIAL (name) = 1;
2383 DECL_NAMELESS (name) = 1;
2384 TYPE_NAME (ctx->record_type) = name;
2385 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2386 create_omp_child_function (ctx, false);
2387 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2388
2389 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2390
2391 if (ctx->srecord_type)
2392 {
2393 name = create_tmp_var_name (".omp_data_a");
2394 name = build_decl (gimple_location (stmt),
2395 TYPE_DECL, name, ctx->srecord_type);
2396 DECL_ARTIFICIAL (name) = 1;
2397 DECL_NAMELESS (name) = 1;
2398 TYPE_NAME (ctx->srecord_type) = name;
2399 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2400 create_omp_child_function (ctx, true);
2401 }
2402
2403 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2404
2405 if (TYPE_FIELDS (ctx->record_type) == NULL)
2406 {
2407 ctx->record_type = ctx->receiver_decl = NULL;
2408 t = build_int_cst (long_integer_type_node, 0);
2409 gimple_omp_task_set_arg_size (stmt, t);
2410 t = build_int_cst (long_integer_type_node, 1);
2411 gimple_omp_task_set_arg_align (stmt, t);
2412 }
2413 }
2414
2415 /* Helper function for finish_taskreg_scan, called through walk_tree.
2416 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2417 tree, replace it in the expression. */
2418
2419 static tree
2420 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2421 {
2422 if (VAR_P (*tp))
2423 {
2424 omp_context *ctx = (omp_context *) data;
2425 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2426 if (t != *tp)
2427 {
2428 if (DECL_HAS_VALUE_EXPR_P (t))
2429 t = unshare_expr (DECL_VALUE_EXPR (t));
2430 *tp = t;
2431 }
2432 *walk_subtrees = 0;
2433 }
2434 else if (IS_TYPE_OR_DECL_P (*tp))
2435 *walk_subtrees = 0;
2436 return NULL_TREE;
2437 }
2438
2439 /* If any decls have been made addressable during scan_omp,
2440 adjust their fields if needed, and layout record types
2441 of parallel/task constructs. */
2442
2443 static void
2444 finish_taskreg_scan (omp_context *ctx)
2445 {
2446 if (ctx->record_type == NULL_TREE)
2447 return;
2448
2449 /* If any make_addressable_vars were needed, verify all
2450 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2451 statements if use_pointer_for_field hasn't changed
2452 because of that. If it did, update field types now. */
2453 if (make_addressable_vars)
2454 {
2455 tree c;
2456
2457 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2458 c; c = OMP_CLAUSE_CHAIN (c))
2459 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2460 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2461 {
2462 tree decl = OMP_CLAUSE_DECL (c);
2463
2464 /* Global variables don't need to be copied,
2465 the receiver side will use them directly. */
2466 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2467 continue;
2468 if (!bitmap_bit_p (make_addressable_vars, DECL_UID (decl))
2469 || !use_pointer_for_field (decl, ctx))
2470 continue;
2471 tree field = lookup_field (decl, ctx);
2472 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2473 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2474 continue;
2475 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2476 TREE_THIS_VOLATILE (field) = 0;
2477 DECL_USER_ALIGN (field) = 0;
2478 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2479 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2480 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2481 if (ctx->srecord_type)
2482 {
2483 tree sfield = lookup_sfield (decl, ctx);
2484 TREE_TYPE (sfield) = TREE_TYPE (field);
2485 TREE_THIS_VOLATILE (sfield) = 0;
2486 DECL_USER_ALIGN (sfield) = 0;
2487 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2488 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2489 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2490 }
2491 }
2492 }
2493
2494 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2495 {
2496 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2497 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2498 if (c)
2499 {
2500 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2501 expects to find it at the start of data. */
2502 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2503 tree *p = &TYPE_FIELDS (ctx->record_type);
2504 while (*p)
2505 if (*p == f)
2506 {
2507 *p = DECL_CHAIN (*p);
2508 break;
2509 }
2510 else
2511 p = &DECL_CHAIN (*p);
2512 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2513 TYPE_FIELDS (ctx->record_type) = f;
2514 }
2515 layout_type (ctx->record_type);
2516 fixup_child_record_type (ctx);
2517 }
2518 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2519 {
2520 layout_type (ctx->record_type);
2521 fixup_child_record_type (ctx);
2522 }
2523 else
2524 {
2525 location_t loc = gimple_location (ctx->stmt);
2526 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2527 tree detach_clause
2528 = omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
2529 OMP_CLAUSE_DETACH);
2530 /* Move VLA fields to the end. */
2531 p = &TYPE_FIELDS (ctx->record_type);
2532 while (*p)
2533 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2534 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2535 {
2536 *q = *p;
2537 *p = TREE_CHAIN (*p);
2538 TREE_CHAIN (*q) = NULL_TREE;
2539 q = &TREE_CHAIN (*q);
2540 }
2541 else
2542 p = &DECL_CHAIN (*p);
2543 *p = vla_fields;
2544 if (gimple_omp_task_taskloop_p (ctx->stmt))
2545 {
2546 /* Move fields corresponding to first and second _looptemp_
2547 clause first. There are filled by GOMP_taskloop
2548 and thus need to be in specific positions. */
2549 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2550 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2551 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2552 OMP_CLAUSE__LOOPTEMP_);
2553 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2554 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2555 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2556 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2557 p = &TYPE_FIELDS (ctx->record_type);
2558 while (*p)
2559 if (*p == f1 || *p == f2 || *p == f3)
2560 *p = DECL_CHAIN (*p);
2561 else
2562 p = &DECL_CHAIN (*p);
2563 DECL_CHAIN (f1) = f2;
2564 if (c3)
2565 {
2566 DECL_CHAIN (f2) = f3;
2567 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2568 }
2569 else
2570 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2571 TYPE_FIELDS (ctx->record_type) = f1;
2572 if (ctx->srecord_type)
2573 {
2574 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2575 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2576 if (c3)
2577 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2578 p = &TYPE_FIELDS (ctx->srecord_type);
2579 while (*p)
2580 if (*p == f1 || *p == f2 || *p == f3)
2581 *p = DECL_CHAIN (*p);
2582 else
2583 p = &DECL_CHAIN (*p);
2584 DECL_CHAIN (f1) = f2;
2585 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2586 if (c3)
2587 {
2588 DECL_CHAIN (f2) = f3;
2589 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2590 }
2591 else
2592 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2593 TYPE_FIELDS (ctx->srecord_type) = f1;
2594 }
2595 }
2596 if (detach_clause)
2597 {
2598 tree c, field;
2599
2600 /* Look for a firstprivate clause with the detach event handle. */
2601 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2602 c; c = OMP_CLAUSE_CHAIN (c))
2603 {
2604 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2605 continue;
2606 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c), ctx)
2607 == OMP_CLAUSE_DECL (detach_clause))
2608 break;
2609 }
2610
2611 gcc_assert (c);
2612 field = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2613
2614 /* Move field corresponding to the detach clause first.
2615 This is filled by GOMP_task and needs to be in a
2616 specific position. */
2617 p = &TYPE_FIELDS (ctx->record_type);
2618 while (*p)
2619 if (*p == field)
2620 *p = DECL_CHAIN (*p);
2621 else
2622 p = &DECL_CHAIN (*p);
2623 DECL_CHAIN (field) = TYPE_FIELDS (ctx->record_type);
2624 TYPE_FIELDS (ctx->record_type) = field;
2625 if (ctx->srecord_type)
2626 {
2627 field = lookup_sfield (OMP_CLAUSE_DECL (c), ctx);
2628 p = &TYPE_FIELDS (ctx->srecord_type);
2629 while (*p)
2630 if (*p == field)
2631 *p = DECL_CHAIN (*p);
2632 else
2633 p = &DECL_CHAIN (*p);
2634 DECL_CHAIN (field) = TYPE_FIELDS (ctx->srecord_type);
2635 TYPE_FIELDS (ctx->srecord_type) = field;
2636 }
2637 }
2638 layout_type (ctx->record_type);
2639 fixup_child_record_type (ctx);
2640 if (ctx->srecord_type)
2641 layout_type (ctx->srecord_type);
2642 tree t = fold_convert_loc (loc, long_integer_type_node,
2643 TYPE_SIZE_UNIT (ctx->record_type));
2644 if (TREE_CODE (t) != INTEGER_CST)
2645 {
2646 t = unshare_expr (t);
2647 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2648 }
2649 gimple_omp_task_set_arg_size (ctx->stmt, t);
2650 t = build_int_cst (long_integer_type_node,
2651 TYPE_ALIGN_UNIT (ctx->record_type));
2652 gimple_omp_task_set_arg_align (ctx->stmt, t);
2653 }
2654 }
2655
2656 /* Find the enclosing offload context. */
2657
2658 static omp_context *
2659 enclosing_target_ctx (omp_context *ctx)
2660 {
2661 for (; ctx; ctx = ctx->outer)
2662 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2663 break;
2664
2665 return ctx;
2666 }
2667
2668 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2669 construct.
2670 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2671
2672 static bool
2673 ctx_in_oacc_kernels_region (omp_context *ctx)
2674 {
2675 for (;ctx != NULL; ctx = ctx->outer)
2676 {
2677 gimple *stmt = ctx->stmt;
2678 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2679 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2680 return true;
2681 }
2682
2683 return false;
2684 }
2685
2686 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2687 (This doesn't include OpenACC 'kernels' decomposed parts.)
2688 Until kernels handling moves to use the same loop indirection
2689 scheme as parallel, we need to do this checking early. */
2690
2691 static unsigned
2692 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2693 {
2694 bool checking = true;
2695 unsigned outer_mask = 0;
2696 unsigned this_mask = 0;
2697 bool has_seq = false, has_auto = false;
2698
2699 if (ctx->outer)
2700 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2701 if (!stmt)
2702 {
2703 checking = false;
2704 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2705 return outer_mask;
2706 stmt = as_a <gomp_for *> (ctx->stmt);
2707 }
2708
2709 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2710 {
2711 switch (OMP_CLAUSE_CODE (c))
2712 {
2713 case OMP_CLAUSE_GANG:
2714 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2715 break;
2716 case OMP_CLAUSE_WORKER:
2717 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2718 break;
2719 case OMP_CLAUSE_VECTOR:
2720 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2721 break;
2722 case OMP_CLAUSE_SEQ:
2723 has_seq = true;
2724 break;
2725 case OMP_CLAUSE_AUTO:
2726 has_auto = true;
2727 break;
2728 default:
2729 break;
2730 }
2731 }
2732
2733 if (checking)
2734 {
2735 if (has_seq && (this_mask || has_auto))
2736 error_at (gimple_location (stmt), "%<seq%> overrides other"
2737 " OpenACC loop specifiers");
2738 else if (has_auto && this_mask)
2739 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2740 " OpenACC loop specifiers");
2741
2742 if (this_mask & outer_mask)
2743 error_at (gimple_location (stmt), "inner loop uses same"
2744 " OpenACC parallelism as containing loop");
2745 }
2746
2747 return outer_mask | this_mask;
2748 }
2749
2750 /* Scan a GIMPLE_OMP_FOR. */
2751
2752 static omp_context *
2753 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2754 {
2755 omp_context *ctx;
2756 size_t i;
2757 tree clauses = gimple_omp_for_clauses (stmt);
2758
2759 ctx = new_omp_context (stmt, outer_ctx);
2760
2761 if (is_gimple_omp_oacc (stmt))
2762 {
2763 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2764
2765 if (!(tgt && is_oacc_kernels (tgt)))
2766 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2767 {
2768 tree c_op0;
2769 switch (OMP_CLAUSE_CODE (c))
2770 {
2771 case OMP_CLAUSE_GANG:
2772 c_op0 = OMP_CLAUSE_GANG_EXPR (c);
2773 break;
2774
2775 case OMP_CLAUSE_WORKER:
2776 c_op0 = OMP_CLAUSE_WORKER_EXPR (c);
2777 break;
2778
2779 case OMP_CLAUSE_VECTOR:
2780 c_op0 = OMP_CLAUSE_VECTOR_EXPR (c);
2781 break;
2782
2783 default:
2784 continue;
2785 }
2786
2787 if (c_op0)
2788 {
2789 /* By construction, this is impossible for OpenACC 'kernels'
2790 decomposed parts. */
2791 gcc_assert (!(tgt && is_oacc_kernels_decomposed_part (tgt)));
2792
2793 error_at (OMP_CLAUSE_LOCATION (c),
2794 "argument not permitted on %qs clause",
2795 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
2796 if (tgt)
2797 inform (gimple_location (tgt->stmt),
2798 "enclosing parent compute construct");
2799 else if (oacc_get_fn_attrib (current_function_decl))
2800 inform (DECL_SOURCE_LOCATION (current_function_decl),
2801 "enclosing routine");
2802 else
2803 gcc_unreachable ();
2804 }
2805 }
2806
2807 if (tgt && is_oacc_kernels (tgt))
2808 check_oacc_kernel_gwv (stmt, ctx);
2809
2810 /* Collect all variables named in reductions on this loop. Ensure
2811 that, if this loop has a reduction on some variable v, and there is
2812 a reduction on v somewhere in an outer context, then there is a
2813 reduction on v on all intervening loops as well. */
2814 tree local_reduction_clauses = NULL;
2815 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2816 {
2817 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2818 local_reduction_clauses
2819 = tree_cons (NULL, c, local_reduction_clauses);
2820 }
2821 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2822 ctx->outer_reduction_clauses
2823 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2824 ctx->outer->outer_reduction_clauses);
2825 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2826 tree local_iter = local_reduction_clauses;
2827 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2828 {
2829 tree local_clause = TREE_VALUE (local_iter);
2830 tree local_var = OMP_CLAUSE_DECL (local_clause);
2831 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2832 bool have_outer_reduction = false;
2833 tree ctx_iter = outer_reduction_clauses;
2834 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2835 {
2836 tree outer_clause = TREE_VALUE (ctx_iter);
2837 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2838 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2839 if (outer_var == local_var && outer_op != local_op)
2840 {
2841 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2842 "conflicting reduction operations for %qE",
2843 local_var);
2844 inform (OMP_CLAUSE_LOCATION (outer_clause),
2845 "location of the previous reduction for %qE",
2846 outer_var);
2847 }
2848 if (outer_var == local_var)
2849 {
2850 have_outer_reduction = true;
2851 break;
2852 }
2853 }
2854 if (have_outer_reduction)
2855 {
2856 /* There is a reduction on outer_var both on this loop and on
2857 some enclosing loop. Walk up the context tree until such a
2858 loop with a reduction on outer_var is found, and complain
2859 about all intervening loops that do not have such a
2860 reduction. */
2861 struct omp_context *curr_loop = ctx->outer;
2862 bool found = false;
2863 while (curr_loop != NULL)
2864 {
2865 tree curr_iter = curr_loop->local_reduction_clauses;
2866 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2867 {
2868 tree curr_clause = TREE_VALUE (curr_iter);
2869 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2870 if (curr_var == local_var)
2871 {
2872 found = true;
2873 break;
2874 }
2875 }
2876 if (!found)
2877 warning_at (gimple_location (curr_loop->stmt), 0,
2878 "nested loop in reduction needs "
2879 "reduction clause for %qE",
2880 local_var);
2881 else
2882 break;
2883 curr_loop = curr_loop->outer;
2884 }
2885 }
2886 }
2887 ctx->local_reduction_clauses = local_reduction_clauses;
2888 ctx->outer_reduction_clauses
2889 = chainon (unshare_expr (ctx->local_reduction_clauses),
2890 ctx->outer_reduction_clauses);
2891
2892 if (tgt && is_oacc_kernels (tgt))
2893 {
2894 /* Strip out reductions, as they are not handled yet. */
2895 tree *prev_ptr = &clauses;
2896
2897 while (tree probe = *prev_ptr)
2898 {
2899 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2900
2901 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2902 *prev_ptr = *next_ptr;
2903 else
2904 prev_ptr = next_ptr;
2905 }
2906
2907 gimple_omp_for_set_clauses (stmt, clauses);
2908 }
2909 }
2910
2911 scan_sharing_clauses (clauses, ctx);
2912
2913 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2914 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2915 {
2916 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2917 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2918 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2919 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2920 }
2921 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2922 return ctx;
2923 }
2924
2925 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2926
2927 static void
2928 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2929 omp_context *outer_ctx)
2930 {
2931 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2932 gsi_replace (gsi, bind, false);
2933 gimple_seq seq = NULL;
2934 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2935 tree cond = create_tmp_var_raw (integer_type_node);
2936 DECL_CONTEXT (cond) = current_function_decl;
2937 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2938 gimple_bind_set_vars (bind, cond);
2939 gimple_call_set_lhs (g, cond);
2940 gimple_seq_add_stmt (&seq, g);
2941 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2942 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2943 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2944 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2945 gimple_seq_add_stmt (&seq, g);
2946 g = gimple_build_label (lab1);
2947 gimple_seq_add_stmt (&seq, g);
2948 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2949 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2950 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2951 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2952 gimple_omp_for_set_clauses (new_stmt, clause);
2953 gimple_seq_add_stmt (&seq, new_stmt);
2954 g = gimple_build_goto (lab3);
2955 gimple_seq_add_stmt (&seq, g);
2956 g = gimple_build_label (lab2);
2957 gimple_seq_add_stmt (&seq, g);
2958 gimple_seq_add_stmt (&seq, stmt);
2959 g = gimple_build_label (lab3);
2960 gimple_seq_add_stmt (&seq, g);
2961 gimple_bind_set_body (bind, seq);
2962 update_stmt (bind);
2963 scan_omp_for (new_stmt, outer_ctx);
2964 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2965 }
2966
2967 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2968 struct walk_stmt_info *);
2969 static omp_context *maybe_lookup_ctx (gimple *);
2970
2971 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2972 for scan phase loop. */
2973
2974 static void
2975 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2976 omp_context *outer_ctx)
2977 {
2978 /* The only change between inclusive and exclusive scan will be
2979 within the first simd loop, so just use inclusive in the
2980 worksharing loop. */
2981 outer_ctx->scan_inclusive = true;
2982 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2983 OMP_CLAUSE_DECL (c) = integer_zero_node;
2984
2985 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2986 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2987 gsi_replace (gsi, input_stmt, false);
2988 gimple_seq input_body = NULL;
2989 gimple_seq_add_stmt (&input_body, stmt);
2990 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2991
2992 gimple_stmt_iterator input1_gsi = gsi_none ();
2993 struct walk_stmt_info wi;
2994 memset (&wi, 0, sizeof (wi));
2995 wi.val_only = true;
2996 wi.info = (void *) &input1_gsi;
2997 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2998 gcc_assert (!gsi_end_p (input1_gsi));
2999
3000 gimple *input_stmt1 = gsi_stmt (input1_gsi);
3001 gsi_next (&input1_gsi);
3002 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
3003 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
3004 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
3005 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
3006 std::swap (input_stmt1, scan_stmt1);
3007
3008 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
3009 gimple_omp_set_body (input_stmt1, NULL);
3010
3011 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
3012 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
3013
3014 gimple_omp_set_body (input_stmt1, input_body1);
3015 gimple_omp_set_body (scan_stmt1, NULL);
3016
3017 gimple_stmt_iterator input2_gsi = gsi_none ();
3018 memset (&wi, 0, sizeof (wi));
3019 wi.val_only = true;
3020 wi.info = (void *) &input2_gsi;
3021 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
3022 NULL, &wi);
3023 gcc_assert (!gsi_end_p (input2_gsi));
3024
3025 gimple *input_stmt2 = gsi_stmt (input2_gsi);
3026 gsi_next (&input2_gsi);
3027 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
3028 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
3029 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
3030 std::swap (input_stmt2, scan_stmt2);
3031
3032 gimple_omp_set_body (input_stmt2, NULL);
3033
3034 gimple_omp_set_body (input_stmt, input_body);
3035 gimple_omp_set_body (scan_stmt, scan_body);
3036
3037 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
3038 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
3039
3040 ctx = new_omp_context (scan_stmt, outer_ctx);
3041 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
3042
3043 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
3044 }
3045
3046 /* Scan an OpenMP sections directive. */
3047
3048 static void
3049 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
3050 {
3051 omp_context *ctx;
3052
3053 ctx = new_omp_context (stmt, outer_ctx);
3054 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
3055 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3056 }
3057
3058 /* Scan an OpenMP single directive. */
3059
3060 static void
3061 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
3062 {
3063 omp_context *ctx;
3064 tree name;
3065
3066 ctx = new_omp_context (stmt, outer_ctx);
3067 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3068 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3069 name = create_tmp_var_name (".omp_copy_s");
3070 name = build_decl (gimple_location (stmt),
3071 TYPE_DECL, name, ctx->record_type);
3072 TYPE_NAME (ctx->record_type) = name;
3073
3074 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
3075 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3076
3077 if (TYPE_FIELDS (ctx->record_type) == NULL)
3078 ctx->record_type = NULL;
3079 else
3080 layout_type (ctx->record_type);
3081 }
3082
3083 /* Scan a GIMPLE_OMP_TARGET. */
3084
3085 static void
3086 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
3087 {
3088 omp_context *ctx;
3089 tree name;
3090 bool offloaded = is_gimple_omp_offloaded (stmt);
3091 tree clauses = gimple_omp_target_clauses (stmt);
3092
3093 ctx = new_omp_context (stmt, outer_ctx);
3094 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3095 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3096 name = create_tmp_var_name (".omp_data_t");
3097 name = build_decl (gimple_location (stmt),
3098 TYPE_DECL, name, ctx->record_type);
3099 DECL_ARTIFICIAL (name) = 1;
3100 DECL_NAMELESS (name) = 1;
3101 TYPE_NAME (ctx->record_type) = name;
3102 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3103
3104 if (offloaded)
3105 {
3106 create_omp_child_function (ctx, false);
3107 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
3108 }
3109
3110 scan_sharing_clauses (clauses, ctx);
3111 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3112
3113 if (TYPE_FIELDS (ctx->record_type) == NULL)
3114 ctx->record_type = ctx->receiver_decl = NULL;
3115 else
3116 {
3117 TYPE_FIELDS (ctx->record_type)
3118 = nreverse (TYPE_FIELDS (ctx->record_type));
3119 if (flag_checking)
3120 {
3121 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
3122 for (tree field = TYPE_FIELDS (ctx->record_type);
3123 field;
3124 field = DECL_CHAIN (field))
3125 gcc_assert (DECL_ALIGN (field) == align);
3126 }
3127 layout_type (ctx->record_type);
3128 if (offloaded)
3129 fixup_child_record_type (ctx);
3130 }
3131
3132 if (ctx->teams_nested_p && ctx->nonteams_nested_p)
3133 {
3134 error_at (gimple_location (stmt),
3135 "%<target%> construct with nested %<teams%> construct "
3136 "contains directives outside of the %<teams%> construct");
3137 gimple_omp_set_body (stmt, gimple_build_bind (NULL, NULL, NULL));
3138 }
3139 }
3140
3141 /* Scan an OpenMP teams directive. */
3142
3143 static void
3144 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
3145 {
3146 omp_context *ctx = new_omp_context (stmt, outer_ctx);
3147
3148 if (!gimple_omp_teams_host (stmt))
3149 {
3150 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3151 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3152 return;
3153 }
3154 taskreg_contexts.safe_push (ctx);
3155 gcc_assert (taskreg_nesting_level == 1);
3156 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3157 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3158 tree name = create_tmp_var_name (".omp_data_s");
3159 name = build_decl (gimple_location (stmt),
3160 TYPE_DECL, name, ctx->record_type);
3161 DECL_ARTIFICIAL (name) = 1;
3162 DECL_NAMELESS (name) = 1;
3163 TYPE_NAME (ctx->record_type) = name;
3164 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3165 create_omp_child_function (ctx, false);
3166 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
3167
3168 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3169 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3170
3171 if (TYPE_FIELDS (ctx->record_type) == NULL)
3172 ctx->record_type = ctx->receiver_decl = NULL;
3173 }
3174
3175 /* Check nesting restrictions. */
3176 static bool
3177 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
3178 {
3179 tree c;
3180
3181 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3182 inside an OpenACC CTX. */
3183 if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3184 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE)
3185 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3186 ;
3187 else if (!(is_gimple_omp (stmt)
3188 && is_gimple_omp_oacc (stmt)))
3189 {
3190 if (oacc_get_fn_attrib (cfun->decl) != NULL)
3191 {
3192 error_at (gimple_location (stmt),
3193 "non-OpenACC construct inside of OpenACC routine");
3194 return false;
3195 }
3196 else
3197 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
3198 if (is_gimple_omp (octx->stmt)
3199 && is_gimple_omp_oacc (octx->stmt))
3200 {
3201 error_at (gimple_location (stmt),
3202 "non-OpenACC construct inside of OpenACC region");
3203 return false;
3204 }
3205 }
3206
3207 if (ctx != NULL)
3208 {
3209 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
3210 && gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_REGION)
3211 {
3212 c = omp_find_clause (gimple_omp_target_clauses (ctx->stmt),
3213 OMP_CLAUSE_DEVICE);
3214 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
3215 {
3216 error_at (gimple_location (stmt),
3217 "OpenMP constructs are not allowed in target region "
3218 "with %<ancestor%>");
3219 return false;
3220 }
3221
3222 if (gimple_code (stmt) == GIMPLE_OMP_TEAMS && !ctx->teams_nested_p)
3223 ctx->teams_nested_p = true;
3224 else
3225 ctx->nonteams_nested_p = true;
3226 }
3227 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
3228 && ctx->outer
3229 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
3230 ctx = ctx->outer;
3231 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3232 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3233 && !ctx->loop_p)
3234 {
3235 c = NULL_TREE;
3236 if (ctx->order_concurrent
3237 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
3238 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3239 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
3240 {
3241 error_at (gimple_location (stmt),
3242 "OpenMP constructs other than %<parallel%>, %<loop%>"
3243 " or %<simd%> may not be nested inside a region with"
3244 " the %<order(concurrent)%> clause");
3245 return false;
3246 }
3247 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
3248 {
3249 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3250 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3251 {
3252 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
3253 && (ctx->outer == NULL
3254 || !gimple_omp_for_combined_into_p (ctx->stmt)
3255 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
3256 || (gimple_omp_for_kind (ctx->outer->stmt)
3257 != GF_OMP_FOR_KIND_FOR)
3258 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
3259 {
3260 error_at (gimple_location (stmt),
3261 "%<ordered simd threads%> must be closely "
3262 "nested inside of %<%s simd%> region",
3263 lang_GNU_Fortran () ? "do" : "for");
3264 return false;
3265 }
3266 return true;
3267 }
3268 }
3269 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3270 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
3271 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
3272 return true;
3273 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
3274 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
3275 return true;
3276 error_at (gimple_location (stmt),
3277 "OpenMP constructs other than "
3278 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3279 "not be nested inside %<simd%> region");
3280 return false;
3281 }
3282 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3283 {
3284 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
3285 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
3286 && omp_find_clause (gimple_omp_for_clauses (stmt),
3287 OMP_CLAUSE_BIND) == NULL_TREE))
3288 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
3289 {
3290 error_at (gimple_location (stmt),
3291 "only %<distribute%>, %<parallel%> or %<loop%> "
3292 "regions are allowed to be strictly nested inside "
3293 "%<teams%> region");
3294 return false;
3295 }
3296 }
3297 else if (ctx->order_concurrent
3298 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
3299 && (gimple_code (stmt) != GIMPLE_OMP_FOR
3300 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
3301 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
3302 {
3303 if (ctx->loop_p)
3304 error_at (gimple_location (stmt),
3305 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3306 "%<simd%> may not be nested inside a %<loop%> region");
3307 else
3308 error_at (gimple_location (stmt),
3309 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3310 "%<simd%> may not be nested inside a region with "
3311 "the %<order(concurrent)%> clause");
3312 return false;
3313 }
3314 }
3315 switch (gimple_code (stmt))
3316 {
3317 case GIMPLE_OMP_FOR:
3318 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
3319 return true;
3320 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
3321 {
3322 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
3323 {
3324 error_at (gimple_location (stmt),
3325 "%<distribute%> region must be strictly nested "
3326 "inside %<teams%> construct");
3327 return false;
3328 }
3329 return true;
3330 }
3331 /* We split taskloop into task and nested taskloop in it. */
3332 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3333 return true;
3334 /* For now, hope this will change and loop bind(parallel) will not
3335 be allowed in lots of contexts. */
3336 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
3337 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
3338 return true;
3339 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
3340 {
3341 bool ok = false;
3342
3343 if (ctx)
3344 switch (gimple_code (ctx->stmt))
3345 {
3346 case GIMPLE_OMP_FOR:
3347 ok = (gimple_omp_for_kind (ctx->stmt)
3348 == GF_OMP_FOR_KIND_OACC_LOOP);
3349 break;
3350
3351 case GIMPLE_OMP_TARGET:
3352 switch (gimple_omp_target_kind (ctx->stmt))
3353 {
3354 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3355 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3356 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3357 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3358 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3359 ok = true;
3360 break;
3361
3362 default:
3363 break;
3364 }
3365
3366 default:
3367 break;
3368 }
3369 else if (oacc_get_fn_attrib (current_function_decl))
3370 ok = true;
3371 if (!ok)
3372 {
3373 error_at (gimple_location (stmt),
3374 "OpenACC loop directive must be associated with"
3375 " an OpenACC compute region");
3376 return false;
3377 }
3378 }
3379 /* FALLTHRU */
3380 case GIMPLE_CALL:
3381 if (is_gimple_call (stmt)
3382 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3383 == BUILT_IN_GOMP_CANCEL
3384 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3385 == BUILT_IN_GOMP_CANCELLATION_POINT))
3386 {
3387 const char *bad = NULL;
3388 const char *kind = NULL;
3389 const char *construct
3390 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3391 == BUILT_IN_GOMP_CANCEL)
3392 ? "cancel"
3393 : "cancellation point";
3394 if (ctx == NULL)
3395 {
3396 error_at (gimple_location (stmt), "orphaned %qs construct",
3397 construct);
3398 return false;
3399 }
3400 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
3401 ? tree_to_shwi (gimple_call_arg (stmt, 0))
3402 : 0)
3403 {
3404 case 1:
3405 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
3406 bad = "parallel";
3407 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3408 == BUILT_IN_GOMP_CANCEL
3409 && !integer_zerop (gimple_call_arg (stmt, 1)))
3410 ctx->cancellable = true;
3411 kind = "parallel";
3412 break;
3413 case 2:
3414 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3415 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3416 bad = "for";
3417 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3418 == BUILT_IN_GOMP_CANCEL
3419 && !integer_zerop (gimple_call_arg (stmt, 1)))
3420 {
3421 ctx->cancellable = true;
3422 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3423 OMP_CLAUSE_NOWAIT))
3424 warning_at (gimple_location (stmt), 0,
3425 "%<cancel for%> inside "
3426 "%<nowait%> for construct");
3427 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3428 OMP_CLAUSE_ORDERED))
3429 warning_at (gimple_location (stmt), 0,
3430 "%<cancel for%> inside "
3431 "%<ordered%> for construct");
3432 }
3433 kind = "for";
3434 break;
3435 case 4:
3436 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3437 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3438 bad = "sections";
3439 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3440 == BUILT_IN_GOMP_CANCEL
3441 && !integer_zerop (gimple_call_arg (stmt, 1)))
3442 {
3443 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3444 {
3445 ctx->cancellable = true;
3446 if (omp_find_clause (gimple_omp_sections_clauses
3447 (ctx->stmt),
3448 OMP_CLAUSE_NOWAIT))
3449 warning_at (gimple_location (stmt), 0,
3450 "%<cancel sections%> inside "
3451 "%<nowait%> sections construct");
3452 }
3453 else
3454 {
3455 gcc_assert (ctx->outer
3456 && gimple_code (ctx->outer->stmt)
3457 == GIMPLE_OMP_SECTIONS);
3458 ctx->outer->cancellable = true;
3459 if (omp_find_clause (gimple_omp_sections_clauses
3460 (ctx->outer->stmt),
3461 OMP_CLAUSE_NOWAIT))
3462 warning_at (gimple_location (stmt), 0,
3463 "%<cancel sections%> inside "
3464 "%<nowait%> sections construct");
3465 }
3466 }
3467 kind = "sections";
3468 break;
3469 case 8:
3470 if (!is_task_ctx (ctx)
3471 && (!is_taskloop_ctx (ctx)
3472 || ctx->outer == NULL
3473 || !is_task_ctx (ctx->outer)))
3474 bad = "task";
3475 else
3476 {
3477 for (omp_context *octx = ctx->outer;
3478 octx; octx = octx->outer)
3479 {
3480 switch (gimple_code (octx->stmt))
3481 {
3482 case GIMPLE_OMP_TASKGROUP:
3483 break;
3484 case GIMPLE_OMP_TARGET:
3485 if (gimple_omp_target_kind (octx->stmt)
3486 != GF_OMP_TARGET_KIND_REGION)
3487 continue;
3488 /* FALLTHRU */
3489 case GIMPLE_OMP_PARALLEL:
3490 case GIMPLE_OMP_TEAMS:
3491 error_at (gimple_location (stmt),
3492 "%<%s taskgroup%> construct not closely "
3493 "nested inside of %<taskgroup%> region",
3494 construct);
3495 return false;
3496 case GIMPLE_OMP_TASK:
3497 if (gimple_omp_task_taskloop_p (octx->stmt)
3498 && octx->outer
3499 && is_taskloop_ctx (octx->outer))
3500 {
3501 tree clauses
3502 = gimple_omp_for_clauses (octx->outer->stmt);
3503 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3504 break;
3505 }
3506 continue;
3507 default:
3508 continue;
3509 }
3510 break;
3511 }
3512 ctx->cancellable = true;
3513 }
3514 kind = "taskgroup";
3515 break;
3516 default:
3517 error_at (gimple_location (stmt), "invalid arguments");
3518 return false;
3519 }
3520 if (bad)
3521 {
3522 error_at (gimple_location (stmt),
3523 "%<%s %s%> construct not closely nested inside of %qs",
3524 construct, kind, bad);
3525 return false;
3526 }
3527 }
3528 /* FALLTHRU */
3529 case GIMPLE_OMP_SECTIONS:
3530 case GIMPLE_OMP_SINGLE:
3531 for (; ctx != NULL; ctx = ctx->outer)
3532 switch (gimple_code (ctx->stmt))
3533 {
3534 case GIMPLE_OMP_FOR:
3535 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3536 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3537 break;
3538 /* FALLTHRU */
3539 case GIMPLE_OMP_SECTIONS:
3540 case GIMPLE_OMP_SINGLE:
3541 case GIMPLE_OMP_ORDERED:
3542 case GIMPLE_OMP_MASTER:
3543 case GIMPLE_OMP_MASKED:
3544 case GIMPLE_OMP_TASK:
3545 case GIMPLE_OMP_CRITICAL:
3546 if (is_gimple_call (stmt))
3547 {
3548 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3549 != BUILT_IN_GOMP_BARRIER)
3550 return true;
3551 error_at (gimple_location (stmt),
3552 "barrier region may not be closely nested inside "
3553 "of work-sharing, %<loop%>, %<critical%>, "
3554 "%<ordered%>, %<master%>, %<masked%>, explicit "
3555 "%<task%> or %<taskloop%> region");
3556 return false;
3557 }
3558 error_at (gimple_location (stmt),
3559 "work-sharing region may not be closely nested inside "
3560 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3561 "%<master%>, %<masked%>, explicit %<task%> or "
3562 "%<taskloop%> region");
3563 return false;
3564 case GIMPLE_OMP_PARALLEL:
3565 case GIMPLE_OMP_TEAMS:
3566 return true;
3567 case GIMPLE_OMP_TARGET:
3568 if (gimple_omp_target_kind (ctx->stmt)
3569 == GF_OMP_TARGET_KIND_REGION)
3570 return true;
3571 break;
3572 default:
3573 break;
3574 }
3575 break;
3576 case GIMPLE_OMP_MASTER:
3577 case GIMPLE_OMP_MASKED:
3578 for (; ctx != NULL; ctx = ctx->outer)
3579 switch (gimple_code (ctx->stmt))
3580 {
3581 case GIMPLE_OMP_FOR:
3582 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3583 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3584 break;
3585 /* FALLTHRU */
3586 case GIMPLE_OMP_SECTIONS:
3587 case GIMPLE_OMP_SINGLE:
3588 case GIMPLE_OMP_TASK:
3589 error_at (gimple_location (stmt),
3590 "%qs region may not be closely nested inside "
3591 "of work-sharing, %<loop%>, explicit %<task%> or "
3592 "%<taskloop%> region",
3593 gimple_code (stmt) == GIMPLE_OMP_MASTER
3594 ? "master" : "masked");
3595 return false;
3596 case GIMPLE_OMP_PARALLEL:
3597 case GIMPLE_OMP_TEAMS:
3598 return true;
3599 case GIMPLE_OMP_TARGET:
3600 if (gimple_omp_target_kind (ctx->stmt)
3601 == GF_OMP_TARGET_KIND_REGION)
3602 return true;
3603 break;
3604 default:
3605 break;
3606 }
3607 break;
3608 case GIMPLE_OMP_SCOPE:
3609 for (; ctx != NULL; ctx = ctx->outer)
3610 switch (gimple_code (ctx->stmt))
3611 {
3612 case GIMPLE_OMP_FOR:
3613 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3614 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3615 break;
3616 /* FALLTHRU */
3617 case GIMPLE_OMP_SECTIONS:
3618 case GIMPLE_OMP_SINGLE:
3619 case GIMPLE_OMP_TASK:
3620 case GIMPLE_OMP_CRITICAL:
3621 case GIMPLE_OMP_ORDERED:
3622 case GIMPLE_OMP_MASTER:
3623 case GIMPLE_OMP_MASKED:
3624 error_at (gimple_location (stmt),
3625 "%<scope%> region may not be closely nested inside "
3626 "of work-sharing, %<loop%>, explicit %<task%>, "
3627 "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, "
3628 "or %<masked%> region");
3629 return false;
3630 case GIMPLE_OMP_PARALLEL:
3631 case GIMPLE_OMP_TEAMS:
3632 return true;
3633 case GIMPLE_OMP_TARGET:
3634 if (gimple_omp_target_kind (ctx->stmt)
3635 == GF_OMP_TARGET_KIND_REGION)
3636 return true;
3637 break;
3638 default:
3639 break;
3640 }
3641 break;
3642 case GIMPLE_OMP_TASK:
3643 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3644 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS)
3645 {
3646 enum omp_clause_doacross_kind kind = OMP_CLAUSE_DOACROSS_KIND (c);
3647 error_at (OMP_CLAUSE_LOCATION (c),
3648 "%<%s(%s)%> is only allowed in %<omp ordered%>",
3649 OMP_CLAUSE_DOACROSS_DEPEND (c) ? "depend" : "doacross",
3650 kind == OMP_CLAUSE_DOACROSS_SOURCE ? "source" : "sink");
3651 return false;
3652 }
3653 break;
3654 case GIMPLE_OMP_ORDERED:
3655 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3656 c; c = OMP_CLAUSE_CHAIN (c))
3657 {
3658 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DOACROSS)
3659 {
3660 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
3661 {
3662 error_at (OMP_CLAUSE_LOCATION (c),
3663 "invalid depend kind in omp %<ordered%> %<depend%>");
3664 return false;
3665 }
3666 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3667 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3668 continue;
3669 }
3670
3671 tree oclause;
3672 /* Look for containing ordered(N) loop. */
3673 if (ctx == NULL
3674 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3675 || (oclause
3676 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3677 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3678 {
3679 error_at (OMP_CLAUSE_LOCATION (c),
3680 "%<ordered%> construct with %<depend%> clause "
3681 "must be closely nested inside an %<ordered%> loop");
3682 return false;
3683 }
3684 }
3685 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3686 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3687 {
3688 /* ordered simd must be closely nested inside of simd region,
3689 and simd region must not encounter constructs other than
3690 ordered simd, therefore ordered simd may be either orphaned,
3691 or ctx->stmt must be simd. The latter case is handled already
3692 earlier. */
3693 if (ctx != NULL)
3694 {
3695 error_at (gimple_location (stmt),
3696 "%<ordered%> %<simd%> must be closely nested inside "
3697 "%<simd%> region");
3698 return false;
3699 }
3700 }
3701 for (; ctx != NULL; ctx = ctx->outer)
3702 switch (gimple_code (ctx->stmt))
3703 {
3704 case GIMPLE_OMP_CRITICAL:
3705 case GIMPLE_OMP_TASK:
3706 case GIMPLE_OMP_ORDERED:
3707 ordered_in_taskloop:
3708 error_at (gimple_location (stmt),
3709 "%<ordered%> region may not be closely nested inside "
3710 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3711 "%<taskloop%> region");
3712 return false;
3713 case GIMPLE_OMP_FOR:
3714 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3715 goto ordered_in_taskloop;
3716 tree o;
3717 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3718 OMP_CLAUSE_ORDERED);
3719 if (o == NULL)
3720 {
3721 error_at (gimple_location (stmt),
3722 "%<ordered%> region must be closely nested inside "
3723 "a loop region with an %<ordered%> clause");
3724 return false;
3725 }
3726 if (!gimple_omp_ordered_standalone_p (stmt))
3727 {
3728 if (OMP_CLAUSE_ORDERED_DOACROSS (o))
3729 {
3730 error_at (gimple_location (stmt),
3731 "%<ordered%> construct without %<doacross%> or "
3732 "%<depend%> clauses must not have the same "
3733 "binding region as %<ordered%> construct with "
3734 "those clauses");
3735 return false;
3736 }
3737 else if (OMP_CLAUSE_ORDERED_EXPR (o))
3738 {
3739 tree co
3740 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3741 OMP_CLAUSE_COLLAPSE);
3742 HOST_WIDE_INT
3743 o_n = tree_to_shwi (OMP_CLAUSE_ORDERED_EXPR (o));
3744 HOST_WIDE_INT c_n = 1;
3745 if (co)
3746 c_n = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (co));
3747 if (o_n != c_n)
3748 {
3749 error_at (gimple_location (stmt),
3750 "%<ordered%> construct without %<doacross%> "
3751 "or %<depend%> clauses binds to loop where "
3752 "%<collapse%> argument %wd is different from "
3753 "%<ordered%> argument %wd", c_n, o_n);
3754 return false;
3755 }
3756 }
3757 }
3758 return true;
3759 case GIMPLE_OMP_TARGET:
3760 if (gimple_omp_target_kind (ctx->stmt)
3761 != GF_OMP_TARGET_KIND_REGION)
3762 break;
3763 /* FALLTHRU */
3764 case GIMPLE_OMP_PARALLEL:
3765 case GIMPLE_OMP_TEAMS:
3766 error_at (gimple_location (stmt),
3767 "%<ordered%> region must be closely nested inside "
3768 "a loop region with an %<ordered%> clause");
3769 return false;
3770 default:
3771 break;
3772 }
3773 break;
3774 case GIMPLE_OMP_CRITICAL:
3775 {
3776 tree this_stmt_name
3777 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3778 for (; ctx != NULL; ctx = ctx->outer)
3779 if (gomp_critical *other_crit
3780 = dyn_cast <gomp_critical *> (ctx->stmt))
3781 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3782 {
3783 error_at (gimple_location (stmt),
3784 "%<critical%> region may not be nested inside "
3785 "a %<critical%> region with the same name");
3786 return false;
3787 }
3788 }
3789 break;
3790 case GIMPLE_OMP_TEAMS:
3791 if (ctx == NULL)
3792 break;
3793 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3794 || (gimple_omp_target_kind (ctx->stmt)
3795 != GF_OMP_TARGET_KIND_REGION))
3796 {
3797 /* Teams construct can appear either strictly nested inside of
3798 target construct with no intervening stmts, or can be encountered
3799 only by initial task (so must not appear inside any OpenMP
3800 construct. */
3801 error_at (gimple_location (stmt),
3802 "%<teams%> construct must be closely nested inside of "
3803 "%<target%> construct or not nested in any OpenMP "
3804 "construct");
3805 return false;
3806 }
3807 break;
3808 case GIMPLE_OMP_TARGET:
3809 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3810 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS)
3811 {
3812 enum omp_clause_doacross_kind kind = OMP_CLAUSE_DOACROSS_KIND (c);
3813 error_at (OMP_CLAUSE_LOCATION (c),
3814 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3815 kind == OMP_CLAUSE_DOACROSS_SOURCE ? "source" : "sink");
3816 return false;
3817 }
3818 if (is_gimple_omp_offloaded (stmt)
3819 && oacc_get_fn_attrib (cfun->decl) != NULL)
3820 {
3821 error_at (gimple_location (stmt),
3822 "OpenACC region inside of OpenACC routine, nested "
3823 "parallelism not supported yet");
3824 return false;
3825 }
3826 for (; ctx != NULL; ctx = ctx->outer)
3827 {
3828 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3829 {
3830 if (is_gimple_omp (stmt)
3831 && is_gimple_omp_oacc (stmt)
3832 && is_gimple_omp (ctx->stmt))
3833 {
3834 error_at (gimple_location (stmt),
3835 "OpenACC construct inside of non-OpenACC region");
3836 return false;
3837 }
3838 continue;
3839 }
3840
3841 const char *stmt_name, *ctx_stmt_name;
3842 switch (gimple_omp_target_kind (stmt))
3843 {
3844 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3845 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3846 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3847 case GF_OMP_TARGET_KIND_ENTER_DATA:
3848 stmt_name = "target enter data"; break;
3849 case GF_OMP_TARGET_KIND_EXIT_DATA:
3850 stmt_name = "target exit data"; break;
3851 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3852 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3853 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3854 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3855 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3856 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
3857 stmt_name = "enter data"; break;
3858 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
3859 stmt_name = "exit data"; break;
3860 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3861 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3862 break;
3863 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3864 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3865 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3866 /* OpenACC 'kernels' decomposed parts. */
3867 stmt_name = "kernels"; break;
3868 default: gcc_unreachable ();
3869 }
3870 switch (gimple_omp_target_kind (ctx->stmt))
3871 {
3872 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3873 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3874 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3875 ctx_stmt_name = "parallel"; break;
3876 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3877 ctx_stmt_name = "kernels"; break;
3878 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3879 ctx_stmt_name = "serial"; break;
3880 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3881 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3882 ctx_stmt_name = "host_data"; break;
3883 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3884 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3885 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3886 /* OpenACC 'kernels' decomposed parts. */
3887 ctx_stmt_name = "kernels"; break;
3888 default: gcc_unreachable ();
3889 }
3890
3891 /* OpenACC/OpenMP mismatch? */
3892 if (is_gimple_omp_oacc (stmt)
3893 != is_gimple_omp_oacc (ctx->stmt))
3894 {
3895 error_at (gimple_location (stmt),
3896 "%s %qs construct inside of %s %qs region",
3897 (is_gimple_omp_oacc (stmt)
3898 ? "OpenACC" : "OpenMP"), stmt_name,
3899 (is_gimple_omp_oacc (ctx->stmt)
3900 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3901 return false;
3902 }
3903 if (is_gimple_omp_offloaded (ctx->stmt))
3904 {
3905 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3906 if (is_gimple_omp_oacc (ctx->stmt))
3907 {
3908 error_at (gimple_location (stmt),
3909 "%qs construct inside of %qs region",
3910 stmt_name, ctx_stmt_name);
3911 return false;
3912 }
3913 else
3914 {
3915 if ((gimple_omp_target_kind (ctx->stmt)
3916 == GF_OMP_TARGET_KIND_REGION)
3917 && (gimple_omp_target_kind (stmt)
3918 == GF_OMP_TARGET_KIND_REGION))
3919 {
3920 c = omp_find_clause (gimple_omp_target_clauses (stmt),
3921 OMP_CLAUSE_DEVICE);
3922 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
3923 break;
3924 }
3925 warning_at (gimple_location (stmt), 0,
3926 "%qs construct inside of %qs region",
3927 stmt_name, ctx_stmt_name);
3928 }
3929 }
3930 }
3931 break;
3932 default:
3933 break;
3934 }
3935 return true;
3936 }
3937
3938
3939 /* Helper function scan_omp.
3940
3941 Callback for walk_tree or operators in walk_gimple_stmt used to
3942 scan for OMP directives in TP. */
3943
3944 static tree
3945 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3946 {
3947 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3948 omp_context *ctx = (omp_context *) wi->info;
3949 tree t = *tp;
3950
3951 switch (TREE_CODE (t))
3952 {
3953 case VAR_DECL:
3954 case PARM_DECL:
3955 case LABEL_DECL:
3956 case RESULT_DECL:
3957 if (ctx)
3958 {
3959 tree repl = remap_decl (t, &ctx->cb);
3960 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3961 *tp = repl;
3962 }
3963 break;
3964
3965 default:
3966 if (ctx && TYPE_P (t))
3967 *tp = remap_type (t, &ctx->cb);
3968 else if (!DECL_P (t))
3969 {
3970 *walk_subtrees = 1;
3971 if (ctx)
3972 {
3973 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3974 if (tem != TREE_TYPE (t))
3975 {
3976 if (TREE_CODE (t) == INTEGER_CST)
3977 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3978 else
3979 TREE_TYPE (t) = tem;
3980 }
3981 }
3982 }
3983 break;
3984 }
3985
3986 return NULL_TREE;
3987 }
3988
3989 /* Return true if FNDECL is a setjmp or a longjmp. */
3990
3991 static bool
3992 setjmp_or_longjmp_p (const_tree fndecl)
3993 {
3994 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3995 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3996 return true;
3997
3998 tree declname = DECL_NAME (fndecl);
3999 if (!declname
4000 || (DECL_CONTEXT (fndecl) != NULL_TREE
4001 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
4002 || !TREE_PUBLIC (fndecl))
4003 return false;
4004
4005 const char *name = IDENTIFIER_POINTER (declname);
4006 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
4007 }
4008
4009 /* Return true if FNDECL is an omp_* runtime API call. */
4010
4011 static bool
4012 omp_runtime_api_call (const_tree fndecl)
4013 {
4014 tree declname = DECL_NAME (fndecl);
4015 if (!declname
4016 || (DECL_CONTEXT (fndecl) != NULL_TREE
4017 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
4018 || !TREE_PUBLIC (fndecl))
4019 return false;
4020
4021 const char *name = IDENTIFIER_POINTER (declname);
4022 if (!startswith (name, "omp_"))
4023 return false;
4024
4025 static const char *omp_runtime_apis[] =
4026 {
4027 /* This array has 3 sections. First omp_* calls that don't
4028 have any suffixes. */
4029 "aligned_alloc",
4030 "aligned_calloc",
4031 "alloc",
4032 "calloc",
4033 "free",
4034 "get_mapped_ptr",
4035 "realloc",
4036 "target_alloc",
4037 "target_associate_ptr",
4038 "target_disassociate_ptr",
4039 "target_free",
4040 "target_is_accessible",
4041 "target_is_present",
4042 "target_memcpy",
4043 "target_memcpy_async",
4044 "target_memcpy_rect",
4045 "target_memcpy_rect_async",
4046 NULL,
4047 /* Now omp_* calls that are available as omp_* and omp_*_; however, the
4048 DECL_NAME is always omp_* without tailing underscore. */
4049 "capture_affinity",
4050 "destroy_allocator",
4051 "destroy_lock",
4052 "destroy_nest_lock",
4053 "display_affinity",
4054 "fulfill_event",
4055 "get_active_level",
4056 "get_affinity_format",
4057 "get_cancellation",
4058 "get_default_allocator",
4059 "get_default_device",
4060 "get_device_num",
4061 "get_dynamic",
4062 "get_initial_device",
4063 "get_level",
4064 "get_max_active_levels",
4065 "get_max_task_priority",
4066 "get_max_teams",
4067 "get_max_threads",
4068 "get_nested",
4069 "get_num_devices",
4070 "get_num_places",
4071 "get_num_procs",
4072 "get_num_teams",
4073 "get_num_threads",
4074 "get_partition_num_places",
4075 "get_place_num",
4076 "get_proc_bind",
4077 "get_supported_active_levels",
4078 "get_team_num",
4079 "get_teams_thread_limit",
4080 "get_thread_limit",
4081 "get_thread_num",
4082 "get_wtick",
4083 "get_wtime",
4084 "in_final",
4085 "in_parallel",
4086 "init_lock",
4087 "init_nest_lock",
4088 "is_initial_device",
4089 "pause_resource",
4090 "pause_resource_all",
4091 "set_affinity_format",
4092 "set_default_allocator",
4093 "set_lock",
4094 "set_nest_lock",
4095 "test_lock",
4096 "test_nest_lock",
4097 "unset_lock",
4098 "unset_nest_lock",
4099 NULL,
4100 /* And finally calls available as omp_*, omp_*_ and omp_*_8_; however,
4101 as DECL_NAME only omp_* and omp_*_8 appear. */
4102 "display_env",
4103 "get_ancestor_thread_num",
4104 "init_allocator",
4105 "get_partition_place_nums",
4106 "get_place_num_procs",
4107 "get_place_proc_ids",
4108 "get_schedule",
4109 "get_team_size",
4110 "set_default_device",
4111 "set_dynamic",
4112 "set_max_active_levels",
4113 "set_nested",
4114 "set_num_teams",
4115 "set_num_threads",
4116 "set_schedule",
4117 "set_teams_thread_limit"
4118 };
4119
4120 int mode = 0;
4121 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
4122 {
4123 if (omp_runtime_apis[i] == NULL)
4124 {
4125 mode++;
4126 continue;
4127 }
4128 size_t len = strlen (omp_runtime_apis[i]);
4129 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
4130 && (name[4 + len] == '\0'
4131 || (mode > 1 && strcmp (name + 4 + len, "_8") == 0)))
4132 return true;
4133 }
4134 return false;
4135 }
4136
4137 /* Helper function for scan_omp.
4138
4139 Callback for walk_gimple_stmt used to scan for OMP directives in
4140 the current statement in GSI. */
4141
4142 static tree
4143 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
4144 struct walk_stmt_info *wi)
4145 {
4146 gimple *stmt = gsi_stmt (*gsi);
4147 omp_context *ctx = (omp_context *) wi->info;
4148
4149 if (gimple_has_location (stmt))
4150 input_location = gimple_location (stmt);
4151
4152 /* Check the nesting restrictions. */
4153 bool remove = false;
4154 if (is_gimple_omp (stmt))
4155 remove = !check_omp_nesting_restrictions (stmt, ctx);
4156 else if (is_gimple_call (stmt))
4157 {
4158 tree fndecl = gimple_call_fndecl (stmt);
4159 if (fndecl)
4160 {
4161 if (ctx
4162 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4163 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
4164 && setjmp_or_longjmp_p (fndecl)
4165 && !ctx->loop_p)
4166 {
4167 remove = true;
4168 error_at (gimple_location (stmt),
4169 "setjmp/longjmp inside %<simd%> construct");
4170 }
4171 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
4172 switch (DECL_FUNCTION_CODE (fndecl))
4173 {
4174 case BUILT_IN_GOMP_BARRIER:
4175 case BUILT_IN_GOMP_CANCEL:
4176 case BUILT_IN_GOMP_CANCELLATION_POINT:
4177 case BUILT_IN_GOMP_TASKYIELD:
4178 case BUILT_IN_GOMP_TASKWAIT:
4179 case BUILT_IN_GOMP_TASKGROUP_START:
4180 case BUILT_IN_GOMP_TASKGROUP_END:
4181 remove = !check_omp_nesting_restrictions (stmt, ctx);
4182 break;
4183 default:
4184 break;
4185 }
4186 else if (ctx)
4187 {
4188 omp_context *octx = ctx;
4189 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
4190 octx = ctx->outer;
4191 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
4192 {
4193 remove = true;
4194 error_at (gimple_location (stmt),
4195 "OpenMP runtime API call %qD in a region with "
4196 "%<order(concurrent)%> clause", fndecl);
4197 }
4198 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4199 && omp_runtime_api_call (fndecl)
4200 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl))
4201 != strlen ("omp_get_num_teams"))
4202 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)),
4203 "omp_get_num_teams") != 0)
4204 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl))
4205 != strlen ("omp_get_team_num"))
4206 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)),
4207 "omp_get_team_num") != 0))
4208 {
4209 remove = true;
4210 error_at (gimple_location (stmt),
4211 "OpenMP runtime API call %qD strictly nested in a "
4212 "%<teams%> region", fndecl);
4213 }
4214 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
4215 && (gimple_omp_target_kind (ctx->stmt)
4216 == GF_OMP_TARGET_KIND_REGION)
4217 && omp_runtime_api_call (fndecl))
4218 {
4219 tree tgt_clauses = gimple_omp_target_clauses (ctx->stmt);
4220 tree c = omp_find_clause (tgt_clauses, OMP_CLAUSE_DEVICE);
4221 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
4222 error_at (gimple_location (stmt),
4223 "OpenMP runtime API call %qD in a region with "
4224 "%<device(ancestor)%> clause", fndecl);
4225 }
4226 }
4227 }
4228 }
4229 if (remove)
4230 {
4231 stmt = gimple_build_nop ();
4232 gsi_replace (gsi, stmt, false);
4233 }
4234
4235 *handled_ops_p = true;
4236
4237 switch (gimple_code (stmt))
4238 {
4239 case GIMPLE_OMP_PARALLEL:
4240 taskreg_nesting_level++;
4241 scan_omp_parallel (gsi, ctx);
4242 taskreg_nesting_level--;
4243 break;
4244
4245 case GIMPLE_OMP_TASK:
4246 taskreg_nesting_level++;
4247 scan_omp_task (gsi, ctx);
4248 taskreg_nesting_level--;
4249 break;
4250
4251 case GIMPLE_OMP_FOR:
4252 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4253 == GF_OMP_FOR_KIND_SIMD)
4254 && gimple_omp_for_combined_into_p (stmt)
4255 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
4256 {
4257 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
4258 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
4259 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
4260 {
4261 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
4262 break;
4263 }
4264 }
4265 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4266 == GF_OMP_FOR_KIND_SIMD)
4267 && omp_maybe_offloaded_ctx (ctx)
4268 && omp_max_simt_vf ()
4269 && gimple_omp_for_collapse (stmt) == 1)
4270 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
4271 else
4272 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
4273 break;
4274
4275 case GIMPLE_OMP_SCOPE:
4276 ctx = new_omp_context (stmt, ctx);
4277 scan_sharing_clauses (gimple_omp_scope_clauses (stmt), ctx);
4278 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4279 break;
4280
4281 case GIMPLE_OMP_SECTIONS:
4282 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
4283 break;
4284
4285 case GIMPLE_OMP_SINGLE:
4286 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
4287 break;
4288
4289 case GIMPLE_OMP_SCAN:
4290 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
4291 {
4292 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
4293 ctx->scan_inclusive = true;
4294 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
4295 ctx->scan_exclusive = true;
4296 }
4297 /* FALLTHRU */
4298 case GIMPLE_OMP_SECTION:
4299 case GIMPLE_OMP_MASTER:
4300 case GIMPLE_OMP_ORDERED:
4301 case GIMPLE_OMP_CRITICAL:
4302 ctx = new_omp_context (stmt, ctx);
4303 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4304 break;
4305
4306 case GIMPLE_OMP_MASKED:
4307 ctx = new_omp_context (stmt, ctx);
4308 scan_sharing_clauses (gimple_omp_masked_clauses (stmt), ctx);
4309 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4310 break;
4311
4312 case GIMPLE_OMP_TASKGROUP:
4313 ctx = new_omp_context (stmt, ctx);
4314 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
4315 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4316 break;
4317
4318 case GIMPLE_OMP_TARGET:
4319 if (is_gimple_omp_offloaded (stmt))
4320 {
4321 taskreg_nesting_level++;
4322 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4323 taskreg_nesting_level--;
4324 }
4325 else
4326 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4327 break;
4328
4329 case GIMPLE_OMP_TEAMS:
4330 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
4331 {
4332 taskreg_nesting_level++;
4333 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4334 taskreg_nesting_level--;
4335 }
4336 else
4337 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4338 break;
4339
4340 case GIMPLE_BIND:
4341 {
4342 tree var;
4343
4344 *handled_ops_p = false;
4345 if (ctx)
4346 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
4347 var ;
4348 var = DECL_CHAIN (var))
4349 insert_decl_map (&ctx->cb, var, var);
4350 }
4351 break;
4352 default:
4353 *handled_ops_p = false;
4354 break;
4355 }
4356
4357 return NULL_TREE;
4358 }
4359
4360
4361 /* Scan all the statements starting at the current statement. CTX
4362 contains context information about the OMP directives and
4363 clauses found during the scan. */
4364
4365 static void
4366 scan_omp (gimple_seq *body_p, omp_context *ctx)
4367 {
4368 location_t saved_location;
4369 struct walk_stmt_info wi;
4370
4371 memset (&wi, 0, sizeof (wi));
4372 wi.info = ctx;
4373 wi.want_locations = true;
4374
4375 saved_location = input_location;
4376 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
4377 input_location = saved_location;
4378 }
4379 \f
4380 /* Re-gimplification and code generation routines. */
4381
4382 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4383 of BIND if in a method. */
4384
4385 static void
4386 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
4387 {
4388 if (DECL_ARGUMENTS (current_function_decl)
4389 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
4390 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
4391 == POINTER_TYPE))
4392 {
4393 tree vars = gimple_bind_vars (bind);
4394 for (tree *pvar = &vars; *pvar; )
4395 if (omp_member_access_dummy_var (*pvar))
4396 *pvar = DECL_CHAIN (*pvar);
4397 else
4398 pvar = &DECL_CHAIN (*pvar);
4399 gimple_bind_set_vars (bind, vars);
4400 }
4401 }
4402
4403 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4404 block and its subblocks. */
4405
4406 static void
4407 remove_member_access_dummy_vars (tree block)
4408 {
4409 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
4410 if (omp_member_access_dummy_var (*pvar))
4411 *pvar = DECL_CHAIN (*pvar);
4412 else
4413 pvar = &DECL_CHAIN (*pvar);
4414
4415 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
4416 remove_member_access_dummy_vars (block);
4417 }
4418
4419 /* If a context was created for STMT when it was scanned, return it. */
4420
4421 static omp_context *
4422 maybe_lookup_ctx (gimple *stmt)
4423 {
4424 splay_tree_node n;
4425 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
4426 return n ? (omp_context *) n->value : NULL;
4427 }
4428
4429
4430 /* Find the mapping for DECL in CTX or the immediately enclosing
4431 context that has a mapping for DECL.
4432
4433 If CTX is a nested parallel directive, we may have to use the decl
4434 mappings created in CTX's parent context. Suppose that we have the
4435 following parallel nesting (variable UIDs showed for clarity):
4436
4437 iD.1562 = 0;
4438 #omp parallel shared(iD.1562) -> outer parallel
4439 iD.1562 = iD.1562 + 1;
4440
4441 #omp parallel shared (iD.1562) -> inner parallel
4442 iD.1562 = iD.1562 - 1;
4443
4444 Each parallel structure will create a distinct .omp_data_s structure
4445 for copying iD.1562 in/out of the directive:
4446
4447 outer parallel .omp_data_s.1.i -> iD.1562
4448 inner parallel .omp_data_s.2.i -> iD.1562
4449
4450 A shared variable mapping will produce a copy-out operation before
4451 the parallel directive and a copy-in operation after it. So, in
4452 this case we would have:
4453
4454 iD.1562 = 0;
4455 .omp_data_o.1.i = iD.1562;
4456 #omp parallel shared(iD.1562) -> outer parallel
4457 .omp_data_i.1 = &.omp_data_o.1
4458 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4459
4460 .omp_data_o.2.i = iD.1562; -> **
4461 #omp parallel shared(iD.1562) -> inner parallel
4462 .omp_data_i.2 = &.omp_data_o.2
4463 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4464
4465
4466 ** This is a problem. The symbol iD.1562 cannot be referenced
4467 inside the body of the outer parallel region. But since we are
4468 emitting this copy operation while expanding the inner parallel
4469 directive, we need to access the CTX structure of the outer
4470 parallel directive to get the correct mapping:
4471
4472 .omp_data_o.2.i = .omp_data_i.1->i
4473
4474 Since there may be other workshare or parallel directives enclosing
4475 the parallel directive, it may be necessary to walk up the context
4476 parent chain. This is not a problem in general because nested
4477 parallelism happens only rarely. */
4478
4479 static tree
4480 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4481 {
4482 tree t;
4483 omp_context *up;
4484
4485 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4486 t = maybe_lookup_decl (decl, up);
4487
4488 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
4489
4490 return t ? t : decl;
4491 }
4492
4493
4494 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4495 in outer contexts. */
4496
4497 static tree
4498 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4499 {
4500 tree t = NULL;
4501 omp_context *up;
4502
4503 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4504 t = maybe_lookup_decl (decl, up);
4505
4506 return t ? t : decl;
4507 }
4508
4509
4510 /* Construct the initialization value for reduction operation OP. */
4511
4512 tree
4513 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
4514 {
4515 switch (op)
4516 {
4517 case PLUS_EXPR:
4518 case MINUS_EXPR:
4519 case BIT_IOR_EXPR:
4520 case BIT_XOR_EXPR:
4521 case TRUTH_OR_EXPR:
4522 case TRUTH_ORIF_EXPR:
4523 case TRUTH_XOR_EXPR:
4524 case NE_EXPR:
4525 return build_zero_cst (type);
4526
4527 case MULT_EXPR:
4528 case TRUTH_AND_EXPR:
4529 case TRUTH_ANDIF_EXPR:
4530 case EQ_EXPR:
4531 return fold_convert_loc (loc, type, integer_one_node);
4532
4533 case BIT_AND_EXPR:
4534 return fold_convert_loc (loc, type, integer_minus_one_node);
4535
4536 case MAX_EXPR:
4537 if (SCALAR_FLOAT_TYPE_P (type))
4538 {
4539 REAL_VALUE_TYPE min;
4540 if (HONOR_INFINITIES (type))
4541 real_arithmetic (&min, NEGATE_EXPR, &dconstinf, NULL);
4542 else
4543 real_maxval (&min, 1, TYPE_MODE (type));
4544 return build_real (type, min);
4545 }
4546 else if (POINTER_TYPE_P (type))
4547 {
4548 wide_int min
4549 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4550 return wide_int_to_tree (type, min);
4551 }
4552 else
4553 {
4554 gcc_assert (INTEGRAL_TYPE_P (type));
4555 return TYPE_MIN_VALUE (type);
4556 }
4557
4558 case MIN_EXPR:
4559 if (SCALAR_FLOAT_TYPE_P (type))
4560 {
4561 REAL_VALUE_TYPE max;
4562 if (HONOR_INFINITIES (type))
4563 max = dconstinf;
4564 else
4565 real_maxval (&max, 0, TYPE_MODE (type));
4566 return build_real (type, max);
4567 }
4568 else if (POINTER_TYPE_P (type))
4569 {
4570 wide_int max
4571 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4572 return wide_int_to_tree (type, max);
4573 }
4574 else
4575 {
4576 gcc_assert (INTEGRAL_TYPE_P (type));
4577 return TYPE_MAX_VALUE (type);
4578 }
4579
4580 default:
4581 gcc_unreachable ();
4582 }
4583 }
4584
4585 /* Construct the initialization value for reduction CLAUSE. */
4586
4587 tree
4588 omp_reduction_init (tree clause, tree type)
4589 {
4590 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4591 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4592 }
4593
4594 /* Return alignment to be assumed for var in CLAUSE, which should be
4595 OMP_CLAUSE_ALIGNED. */
4596
4597 static tree
4598 omp_clause_aligned_alignment (tree clause)
4599 {
4600 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4601 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4602
4603 /* Otherwise return implementation defined alignment. */
4604 unsigned int al = 1;
4605 opt_scalar_mode mode_iter;
4606 auto_vector_modes modes;
4607 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4608 static enum mode_class classes[]
4609 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4610 for (int i = 0; i < 4; i += 2)
4611 /* The for loop above dictates that we only walk through scalar classes. */
4612 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4613 {
4614 scalar_mode mode = mode_iter.require ();
4615 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4616 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4617 continue;
4618 machine_mode alt_vmode;
4619 for (unsigned int j = 0; j < modes.length (); ++j)
4620 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4621 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4622 vmode = alt_vmode;
4623
4624 tree type = lang_hooks.types.type_for_mode (mode, 1);
4625 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4626 continue;
4627 type = build_vector_type_for_mode (type, vmode);
4628 if (TYPE_MODE (type) != vmode)
4629 continue;
4630 if (TYPE_ALIGN_UNIT (type) > al)
4631 al = TYPE_ALIGN_UNIT (type);
4632 }
4633 return build_int_cst (integer_type_node, al);
4634 }
4635
4636
4637 /* This structure is part of the interface between lower_rec_simd_input_clauses
4638 and lower_rec_input_clauses. */
4639
4640 class omplow_simd_context {
4641 public:
4642 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4643 tree idx;
4644 tree lane;
4645 tree lastlane;
4646 vec<tree, va_heap> simt_eargs;
4647 gimple_seq simt_dlist;
4648 poly_uint64_pod max_vf;
4649 bool is_simt;
4650 };
4651
4652 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4653 privatization. */
4654
4655 static bool
4656 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4657 omplow_simd_context *sctx, tree &ivar,
4658 tree &lvar, tree *rvar = NULL,
4659 tree *rvar2 = NULL)
4660 {
4661 if (known_eq (sctx->max_vf, 0U))
4662 {
4663 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4664 if (maybe_gt (sctx->max_vf, 1U))
4665 {
4666 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4667 OMP_CLAUSE_SAFELEN);
4668 if (c)
4669 {
4670 poly_uint64 safe_len;
4671 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4672 || maybe_lt (safe_len, 1U))
4673 sctx->max_vf = 1;
4674 else
4675 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4676 }
4677 }
4678 if (sctx->is_simt && !known_eq (sctx->max_vf, 1U))
4679 {
4680 for (tree c = gimple_omp_for_clauses (ctx->stmt); c;
4681 c = OMP_CLAUSE_CHAIN (c))
4682 {
4683 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4684 continue;
4685
4686 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4687 {
4688 /* UDR reductions are not supported yet for SIMT, disable
4689 SIMT. */
4690 sctx->max_vf = 1;
4691 break;
4692 }
4693
4694 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c))
4695 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var)))
4696 {
4697 /* Doing boolean operations on non-integral types is
4698 for conformance only, it's not worth supporting this
4699 for SIMT. */
4700 sctx->max_vf = 1;
4701 break;
4702 }
4703 }
4704 }
4705 if (maybe_gt (sctx->max_vf, 1U))
4706 {
4707 sctx->idx = create_tmp_var (unsigned_type_node);
4708 sctx->lane = create_tmp_var (unsigned_type_node);
4709 }
4710 }
4711 if (known_eq (sctx->max_vf, 1U))
4712 return false;
4713
4714 if (sctx->is_simt)
4715 {
4716 if (is_gimple_reg (new_var))
4717 {
4718 ivar = lvar = new_var;
4719 return true;
4720 }
4721 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4722 ivar = lvar = create_tmp_var (type);
4723 TREE_ADDRESSABLE (ivar) = 1;
4724 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4725 NULL, DECL_ATTRIBUTES (ivar));
4726 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4727 tree clobber = build_clobber (type);
4728 gimple *g = gimple_build_assign (ivar, clobber);
4729 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4730 }
4731 else
4732 {
4733 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4734 tree avar = create_tmp_var_raw (atype);
4735 if (TREE_ADDRESSABLE (new_var))
4736 TREE_ADDRESSABLE (avar) = 1;
4737 DECL_ATTRIBUTES (avar)
4738 = tree_cons (get_identifier ("omp simd array"), NULL,
4739 DECL_ATTRIBUTES (avar));
4740 gimple_add_tmp_var (avar);
4741 tree iavar = avar;
4742 if (rvar && !ctx->for_simd_scan_phase)
4743 {
4744 /* For inscan reductions, create another array temporary,
4745 which will hold the reduced value. */
4746 iavar = create_tmp_var_raw (atype);
4747 if (TREE_ADDRESSABLE (new_var))
4748 TREE_ADDRESSABLE (iavar) = 1;
4749 DECL_ATTRIBUTES (iavar)
4750 = tree_cons (get_identifier ("omp simd array"), NULL,
4751 tree_cons (get_identifier ("omp simd inscan"), NULL,
4752 DECL_ATTRIBUTES (iavar)));
4753 gimple_add_tmp_var (iavar);
4754 ctx->cb.decl_map->put (avar, iavar);
4755 if (sctx->lastlane == NULL_TREE)
4756 sctx->lastlane = create_tmp_var (unsigned_type_node);
4757 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4758 sctx->lastlane, NULL_TREE, NULL_TREE);
4759 TREE_THIS_NOTRAP (*rvar) = 1;
4760
4761 if (ctx->scan_exclusive)
4762 {
4763 /* And for exclusive scan yet another one, which will
4764 hold the value during the scan phase. */
4765 tree savar = create_tmp_var_raw (atype);
4766 if (TREE_ADDRESSABLE (new_var))
4767 TREE_ADDRESSABLE (savar) = 1;
4768 DECL_ATTRIBUTES (savar)
4769 = tree_cons (get_identifier ("omp simd array"), NULL,
4770 tree_cons (get_identifier ("omp simd inscan "
4771 "exclusive"), NULL,
4772 DECL_ATTRIBUTES (savar)));
4773 gimple_add_tmp_var (savar);
4774 ctx->cb.decl_map->put (iavar, savar);
4775 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4776 sctx->idx, NULL_TREE, NULL_TREE);
4777 TREE_THIS_NOTRAP (*rvar2) = 1;
4778 }
4779 }
4780 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4781 NULL_TREE, NULL_TREE);
4782 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4783 NULL_TREE, NULL_TREE);
4784 TREE_THIS_NOTRAP (ivar) = 1;
4785 TREE_THIS_NOTRAP (lvar) = 1;
4786 }
4787 if (DECL_P (new_var))
4788 {
4789 SET_DECL_VALUE_EXPR (new_var, lvar);
4790 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4791 }
4792 return true;
4793 }
4794
4795 /* Helper function of lower_rec_input_clauses. For a reference
4796 in simd reduction, add an underlying variable it will reference. */
4797
4798 static void
4799 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4800 {
4801 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4802 if (TREE_CONSTANT (z))
4803 {
4804 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4805 get_name (new_vard));
4806 gimple_add_tmp_var (z);
4807 TREE_ADDRESSABLE (z) = 1;
4808 z = build_fold_addr_expr_loc (loc, z);
4809 gimplify_assign (new_vard, z, ilist);
4810 }
4811 }
4812
4813 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4814 code to emit (type) (tskred_temp[idx]). */
4815
4816 static tree
4817 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4818 unsigned idx)
4819 {
4820 unsigned HOST_WIDE_INT sz
4821 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4822 tree r = build2 (MEM_REF, pointer_sized_int_node,
4823 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4824 idx * sz));
4825 tree v = create_tmp_var (pointer_sized_int_node);
4826 gimple *g = gimple_build_assign (v, r);
4827 gimple_seq_add_stmt (ilist, g);
4828 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4829 {
4830 v = create_tmp_var (type);
4831 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4832 gimple_seq_add_stmt (ilist, g);
4833 }
4834 return v;
4835 }
4836
4837 /* Lower early initialization of privatized variable NEW_VAR
4838 if it needs an allocator (has allocate clause). */
4839
4840 static bool
4841 lower_private_allocate (tree var, tree new_var, tree &allocator,
4842 tree &allocate_ptr, gimple_seq *ilist,
4843 omp_context *ctx, bool is_ref, tree size)
4844 {
4845 if (allocator)
4846 return false;
4847 gcc_assert (allocate_ptr == NULL_TREE);
4848 if (ctx->allocate_map
4849 && (DECL_P (new_var) || (TYPE_P (new_var) && size)))
4850 if (tree *allocatorp = ctx->allocate_map->get (var))
4851 allocator = *allocatorp;
4852 if (allocator == NULL_TREE)
4853 return false;
4854 if (!is_ref && omp_privatize_by_reference (var))
4855 {
4856 allocator = NULL_TREE;
4857 return false;
4858 }
4859
4860 unsigned HOST_WIDE_INT ialign = 0;
4861 if (TREE_CODE (allocator) == TREE_LIST)
4862 {
4863 ialign = tree_to_uhwi (TREE_VALUE (allocator));
4864 allocator = TREE_PURPOSE (allocator);
4865 }
4866 if (TREE_CODE (allocator) != INTEGER_CST)
4867 allocator = build_outer_var_ref (allocator, ctx, OMP_CLAUSE_ALLOCATE);
4868 allocator = fold_convert (pointer_sized_int_node, allocator);
4869 if (TREE_CODE (allocator) != INTEGER_CST)
4870 {
4871 tree var = create_tmp_var (TREE_TYPE (allocator));
4872 gimplify_assign (var, allocator, ilist);
4873 allocator = var;
4874 }
4875
4876 tree ptr_type, align, sz = size;
4877 if (TYPE_P (new_var))
4878 {
4879 ptr_type = build_pointer_type (new_var);
4880 ialign = MAX (ialign, TYPE_ALIGN_UNIT (new_var));
4881 }
4882 else if (is_ref)
4883 {
4884 ptr_type = build_pointer_type (TREE_TYPE (TREE_TYPE (new_var)));
4885 ialign = MAX (ialign, TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type)));
4886 }
4887 else
4888 {
4889 ptr_type = build_pointer_type (TREE_TYPE (new_var));
4890 ialign = MAX (ialign, DECL_ALIGN_UNIT (new_var));
4891 if (sz == NULL_TREE)
4892 sz = fold_convert (size_type_node, DECL_SIZE_UNIT (new_var));
4893 }
4894 align = build_int_cst (size_type_node, ialign);
4895 if (TREE_CODE (sz) != INTEGER_CST)
4896 {
4897 tree szvar = create_tmp_var (size_type_node);
4898 gimplify_assign (szvar, sz, ilist);
4899 sz = szvar;
4900 }
4901 allocate_ptr = create_tmp_var (ptr_type);
4902 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
4903 gimple *g = gimple_build_call (a, 3, align, sz, allocator);
4904 gimple_call_set_lhs (g, allocate_ptr);
4905 gimple_seq_add_stmt (ilist, g);
4906 if (!is_ref)
4907 {
4908 tree x = build_simple_mem_ref (allocate_ptr);
4909 TREE_THIS_NOTRAP (x) = 1;
4910 SET_DECL_VALUE_EXPR (new_var, x);
4911 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4912 }
4913 return true;
4914 }
4915
4916 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4917 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4918 private variables. Initialization statements go in ILIST, while calls
4919 to destructors go in DLIST. */
4920
4921 static void
4922 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4923 omp_context *ctx, struct omp_for_data *fd)
4924 {
4925 tree c, copyin_seq, x, ptr;
4926 bool copyin_by_ref = false;
4927 bool lastprivate_firstprivate = false;
4928 bool reduction_omp_orig_ref = false;
4929 int pass;
4930 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4931 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4932 omplow_simd_context sctx = omplow_simd_context ();
4933 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4934 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4935 gimple_seq llist[4] = { };
4936 tree nonconst_simd_if = NULL_TREE;
4937
4938 copyin_seq = NULL;
4939 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4940
4941 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4942 with data sharing clauses referencing variable sized vars. That
4943 is unnecessarily hard to support and very unlikely to result in
4944 vectorized code anyway. */
4945 if (is_simd)
4946 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4947 switch (OMP_CLAUSE_CODE (c))
4948 {
4949 case OMP_CLAUSE_LINEAR:
4950 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4951 sctx.max_vf = 1;
4952 /* FALLTHRU */
4953 case OMP_CLAUSE_PRIVATE:
4954 case OMP_CLAUSE_FIRSTPRIVATE:
4955 case OMP_CLAUSE_LASTPRIVATE:
4956 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4957 sctx.max_vf = 1;
4958 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4959 {
4960 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4961 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4962 sctx.max_vf = 1;
4963 }
4964 break;
4965 case OMP_CLAUSE_REDUCTION:
4966 case OMP_CLAUSE_IN_REDUCTION:
4967 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4968 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4969 sctx.max_vf = 1;
4970 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4971 {
4972 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4973 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4974 sctx.max_vf = 1;
4975 }
4976 break;
4977 case OMP_CLAUSE_IF:
4978 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4979 sctx.max_vf = 1;
4980 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4981 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4982 break;
4983 case OMP_CLAUSE_SIMDLEN:
4984 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4985 sctx.max_vf = 1;
4986 break;
4987 case OMP_CLAUSE__CONDTEMP_:
4988 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4989 if (sctx.is_simt)
4990 sctx.max_vf = 1;
4991 break;
4992 default:
4993 continue;
4994 }
4995
4996 /* Add a placeholder for simduid. */
4997 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4998 sctx.simt_eargs.safe_push (NULL_TREE);
4999
5000 unsigned task_reduction_cnt = 0;
5001 unsigned task_reduction_cntorig = 0;
5002 unsigned task_reduction_cnt_full = 0;
5003 unsigned task_reduction_cntorig_full = 0;
5004 unsigned task_reduction_other_cnt = 0;
5005 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
5006 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
5007 /* Do all the fixed sized types in the first pass, and the variable sized
5008 types in the second pass. This makes sure that the scalar arguments to
5009 the variable sized types are processed before we use them in the
5010 variable sized operations. For task reductions we use 4 passes, in the
5011 first two we ignore them, in the third one gather arguments for
5012 GOMP_task_reduction_remap call and in the last pass actually handle
5013 the task reductions. */
5014 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
5015 ? 4 : 2); ++pass)
5016 {
5017 if (pass == 2 && task_reduction_cnt)
5018 {
5019 tskred_atype
5020 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
5021 + task_reduction_cntorig);
5022 tskred_avar = create_tmp_var_raw (tskred_atype);
5023 gimple_add_tmp_var (tskred_avar);
5024 TREE_ADDRESSABLE (tskred_avar) = 1;
5025 task_reduction_cnt_full = task_reduction_cnt;
5026 task_reduction_cntorig_full = task_reduction_cntorig;
5027 }
5028 else if (pass == 3 && task_reduction_cnt)
5029 {
5030 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
5031 gimple *g
5032 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
5033 size_int (task_reduction_cntorig),
5034 build_fold_addr_expr (tskred_avar));
5035 gimple_seq_add_stmt (ilist, g);
5036 }
5037 if (pass == 3 && task_reduction_other_cnt)
5038 {
5039 /* For reduction clauses, build
5040 tskred_base = (void *) tskred_temp[2]
5041 + omp_get_thread_num () * tskred_temp[1]
5042 or if tskred_temp[1] is known to be constant, that constant
5043 directly. This is the start of the private reduction copy block
5044 for the current thread. */
5045 tree v = create_tmp_var (integer_type_node);
5046 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
5047 gimple *g = gimple_build_call (x, 0);
5048 gimple_call_set_lhs (g, v);
5049 gimple_seq_add_stmt (ilist, g);
5050 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
5051 tskred_temp = OMP_CLAUSE_DECL (c);
5052 if (is_taskreg_ctx (ctx))
5053 tskred_temp = lookup_decl (tskred_temp, ctx);
5054 tree v2 = create_tmp_var (sizetype);
5055 g = gimple_build_assign (v2, NOP_EXPR, v);
5056 gimple_seq_add_stmt (ilist, g);
5057 if (ctx->task_reductions[0])
5058 v = fold_convert (sizetype, ctx->task_reductions[0]);
5059 else
5060 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
5061 tree v3 = create_tmp_var (sizetype);
5062 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
5063 gimple_seq_add_stmt (ilist, g);
5064 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
5065 tskred_base = create_tmp_var (ptr_type_node);
5066 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
5067 gimple_seq_add_stmt (ilist, g);
5068 }
5069 task_reduction_cnt = 0;
5070 task_reduction_cntorig = 0;
5071 task_reduction_other_cnt = 0;
5072 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5073 {
5074 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
5075 tree var, new_var;
5076 bool by_ref;
5077 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5078 bool task_reduction_p = false;
5079 bool task_reduction_needs_orig_p = false;
5080 tree cond = NULL_TREE;
5081 tree allocator, allocate_ptr;
5082
5083 switch (c_kind)
5084 {
5085 case OMP_CLAUSE_PRIVATE:
5086 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
5087 continue;
5088 break;
5089 case OMP_CLAUSE_SHARED:
5090 /* Ignore shared directives in teams construct inside
5091 of target construct. */
5092 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5093 && !is_host_teams_ctx (ctx))
5094 continue;
5095 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
5096 {
5097 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
5098 || is_global_var (OMP_CLAUSE_DECL (c)));
5099 continue;
5100 }
5101 case OMP_CLAUSE_FIRSTPRIVATE:
5102 case OMP_CLAUSE_COPYIN:
5103 break;
5104 case OMP_CLAUSE_LINEAR:
5105 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
5106 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5107 lastprivate_firstprivate = true;
5108 break;
5109 case OMP_CLAUSE_REDUCTION:
5110 case OMP_CLAUSE_IN_REDUCTION:
5111 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
5112 || is_task_ctx (ctx)
5113 || OMP_CLAUSE_REDUCTION_TASK (c))
5114 {
5115 task_reduction_p = true;
5116 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5117 {
5118 task_reduction_other_cnt++;
5119 if (pass == 2)
5120 continue;
5121 }
5122 else
5123 task_reduction_cnt++;
5124 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5125 {
5126 var = OMP_CLAUSE_DECL (c);
5127 /* If var is a global variable that isn't privatized
5128 in outer contexts, we don't need to look up the
5129 original address, it is always the address of the
5130 global variable itself. */
5131 if (!DECL_P (var)
5132 || omp_privatize_by_reference (var)
5133 || !is_global_var
5134 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5135 {
5136 task_reduction_needs_orig_p = true;
5137 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5138 task_reduction_cntorig++;
5139 }
5140 }
5141 }
5142 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5143 reduction_omp_orig_ref = true;
5144 break;
5145 case OMP_CLAUSE__REDUCTEMP_:
5146 if (!is_taskreg_ctx (ctx))
5147 continue;
5148 /* FALLTHRU */
5149 case OMP_CLAUSE__LOOPTEMP_:
5150 /* Handle _looptemp_/_reductemp_ clauses only on
5151 parallel/task. */
5152 if (fd)
5153 continue;
5154 break;
5155 case OMP_CLAUSE_LASTPRIVATE:
5156 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5157 {
5158 lastprivate_firstprivate = true;
5159 if (pass != 0 || is_taskloop_ctx (ctx))
5160 continue;
5161 }
5162 /* Even without corresponding firstprivate, if
5163 decl is Fortran allocatable, it needs outer var
5164 reference. */
5165 else if (pass == 0
5166 && lang_hooks.decls.omp_private_outer_ref
5167 (OMP_CLAUSE_DECL (c)))
5168 lastprivate_firstprivate = true;
5169 break;
5170 case OMP_CLAUSE_ALIGNED:
5171 if (pass != 1)
5172 continue;
5173 var = OMP_CLAUSE_DECL (c);
5174 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
5175 && !is_global_var (var))
5176 {
5177 new_var = maybe_lookup_decl (var, ctx);
5178 if (new_var == NULL_TREE)
5179 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
5180 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5181 tree alarg = omp_clause_aligned_alignment (c);
5182 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5183 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
5184 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5185 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5186 gimplify_and_add (x, ilist);
5187 }
5188 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
5189 && is_global_var (var))
5190 {
5191 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
5192 new_var = lookup_decl (var, ctx);
5193 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
5194 t = build_fold_addr_expr_loc (clause_loc, t);
5195 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5196 tree alarg = omp_clause_aligned_alignment (c);
5197 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5198 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
5199 t = fold_convert_loc (clause_loc, ptype, t);
5200 x = create_tmp_var (ptype);
5201 t = build2 (MODIFY_EXPR, ptype, x, t);
5202 gimplify_and_add (t, ilist);
5203 t = build_simple_mem_ref_loc (clause_loc, x);
5204 SET_DECL_VALUE_EXPR (new_var, t);
5205 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5206 }
5207 continue;
5208 case OMP_CLAUSE__CONDTEMP_:
5209 if (is_parallel_ctx (ctx)
5210 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
5211 break;
5212 continue;
5213 default:
5214 continue;
5215 }
5216
5217 if (task_reduction_p != (pass >= 2))
5218 continue;
5219
5220 allocator = NULL_TREE;
5221 allocate_ptr = NULL_TREE;
5222 new_var = var = OMP_CLAUSE_DECL (c);
5223 if ((c_kind == OMP_CLAUSE_REDUCTION
5224 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5225 && TREE_CODE (var) == MEM_REF)
5226 {
5227 var = TREE_OPERAND (var, 0);
5228 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5229 var = TREE_OPERAND (var, 0);
5230 if (TREE_CODE (var) == INDIRECT_REF
5231 || TREE_CODE (var) == ADDR_EXPR)
5232 var = TREE_OPERAND (var, 0);
5233 if (is_variable_sized (var))
5234 {
5235 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5236 var = DECL_VALUE_EXPR (var);
5237 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5238 var = TREE_OPERAND (var, 0);
5239 gcc_assert (DECL_P (var));
5240 }
5241 new_var = var;
5242 }
5243 if (c_kind == OMP_CLAUSE_IN_REDUCTION && is_omp_target (ctx->stmt))
5244 {
5245 splay_tree_key key = (splay_tree_key) &DECL_CONTEXT (var);
5246 new_var = (tree) splay_tree_lookup (ctx->field_map, key)->value;
5247 }
5248 else if (c_kind != OMP_CLAUSE_COPYIN)
5249 new_var = lookup_decl (var, ctx);
5250
5251 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
5252 {
5253 if (pass != 0)
5254 continue;
5255 }
5256 /* C/C++ array section reductions. */
5257 else if ((c_kind == OMP_CLAUSE_REDUCTION
5258 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5259 && var != OMP_CLAUSE_DECL (c))
5260 {
5261 if (pass == 0)
5262 continue;
5263
5264 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
5265 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
5266
5267 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
5268 {
5269 tree b = TREE_OPERAND (orig_var, 1);
5270 if (is_omp_target (ctx->stmt))
5271 b = NULL_TREE;
5272 else
5273 b = maybe_lookup_decl (b, ctx);
5274 if (b == NULL)
5275 {
5276 b = TREE_OPERAND (orig_var, 1);
5277 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5278 }
5279 if (integer_zerop (bias))
5280 bias = b;
5281 else
5282 {
5283 bias = fold_convert_loc (clause_loc,
5284 TREE_TYPE (b), bias);
5285 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5286 TREE_TYPE (b), b, bias);
5287 }
5288 orig_var = TREE_OPERAND (orig_var, 0);
5289 }
5290 if (pass == 2)
5291 {
5292 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5293 if (is_global_var (out)
5294 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
5295 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
5296 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
5297 != POINTER_TYPE)))
5298 x = var;
5299 else if (is_omp_target (ctx->stmt))
5300 x = out;
5301 else
5302 {
5303 bool by_ref = use_pointer_for_field (var, NULL);
5304 x = build_receiver_ref (var, by_ref, ctx);
5305 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
5306 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
5307 == POINTER_TYPE))
5308 x = build_fold_addr_expr (x);
5309 }
5310 if (TREE_CODE (orig_var) == INDIRECT_REF)
5311 x = build_simple_mem_ref (x);
5312 else if (TREE_CODE (orig_var) == ADDR_EXPR)
5313 {
5314 if (var == TREE_OPERAND (orig_var, 0))
5315 x = build_fold_addr_expr (x);
5316 }
5317 bias = fold_convert (sizetype, bias);
5318 x = fold_convert (ptr_type_node, x);
5319 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5320 TREE_TYPE (x), x, bias);
5321 unsigned cnt = task_reduction_cnt - 1;
5322 if (!task_reduction_needs_orig_p)
5323 cnt += (task_reduction_cntorig_full
5324 - task_reduction_cntorig);
5325 else
5326 cnt = task_reduction_cntorig - 1;
5327 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5328 size_int (cnt), NULL_TREE, NULL_TREE);
5329 gimplify_assign (r, x, ilist);
5330 continue;
5331 }
5332
5333 if (TREE_CODE (orig_var) == INDIRECT_REF
5334 || TREE_CODE (orig_var) == ADDR_EXPR)
5335 orig_var = TREE_OPERAND (orig_var, 0);
5336 tree d = OMP_CLAUSE_DECL (c);
5337 tree type = TREE_TYPE (d);
5338 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
5339 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5340 tree sz = v;
5341 const char *name = get_name (orig_var);
5342 if (pass != 3 && !TREE_CONSTANT (v))
5343 {
5344 tree t;
5345 if (is_omp_target (ctx->stmt))
5346 t = NULL_TREE;
5347 else
5348 t = maybe_lookup_decl (v, ctx);
5349 if (t)
5350 v = t;
5351 else
5352 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5353 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
5354 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5355 TREE_TYPE (v), v,
5356 build_int_cst (TREE_TYPE (v), 1));
5357 sz = fold_build2_loc (clause_loc, MULT_EXPR,
5358 TREE_TYPE (v), t,
5359 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5360 }
5361 if (pass == 3)
5362 {
5363 tree xv = create_tmp_var (ptr_type_node);
5364 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5365 {
5366 unsigned cnt = task_reduction_cnt - 1;
5367 if (!task_reduction_needs_orig_p)
5368 cnt += (task_reduction_cntorig_full
5369 - task_reduction_cntorig);
5370 else
5371 cnt = task_reduction_cntorig - 1;
5372 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5373 size_int (cnt), NULL_TREE, NULL_TREE);
5374
5375 gimple *g = gimple_build_assign (xv, x);
5376 gimple_seq_add_stmt (ilist, g);
5377 }
5378 else
5379 {
5380 unsigned int idx = *ctx->task_reduction_map->get (c);
5381 tree off;
5382 if (ctx->task_reductions[1 + idx])
5383 off = fold_convert (sizetype,
5384 ctx->task_reductions[1 + idx]);
5385 else
5386 off = task_reduction_read (ilist, tskred_temp, sizetype,
5387 7 + 3 * idx + 1);
5388 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
5389 tskred_base, off);
5390 gimple_seq_add_stmt (ilist, g);
5391 }
5392 x = fold_convert (build_pointer_type (boolean_type_node),
5393 xv);
5394 if (TREE_CONSTANT (v))
5395 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
5396 TYPE_SIZE_UNIT (type));
5397 else
5398 {
5399 tree t;
5400 if (is_omp_target (ctx->stmt))
5401 t = NULL_TREE;
5402 else
5403 t = maybe_lookup_decl (v, ctx);
5404 if (t)
5405 v = t;
5406 else
5407 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5408 gimplify_expr (&v, ilist, NULL, is_gimple_val,
5409 fb_rvalue);
5410 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5411 TREE_TYPE (v), v,
5412 build_int_cst (TREE_TYPE (v), 1));
5413 t = fold_build2_loc (clause_loc, MULT_EXPR,
5414 TREE_TYPE (v), t,
5415 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5416 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
5417 }
5418 cond = create_tmp_var (TREE_TYPE (x));
5419 gimplify_assign (cond, x, ilist);
5420 x = xv;
5421 }
5422 else if (lower_private_allocate (var, type, allocator,
5423 allocate_ptr, ilist, ctx,
5424 true,
5425 TREE_CONSTANT (v)
5426 ? TYPE_SIZE_UNIT (type)
5427 : sz))
5428 x = allocate_ptr;
5429 else if (TREE_CONSTANT (v))
5430 {
5431 x = create_tmp_var_raw (type, name);
5432 gimple_add_tmp_var (x);
5433 TREE_ADDRESSABLE (x) = 1;
5434 x = build_fold_addr_expr_loc (clause_loc, x);
5435 }
5436 else
5437 {
5438 tree atmp
5439 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5440 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
5441 x = build_call_expr_loc (clause_loc, atmp, 2, sz, al);
5442 }
5443
5444 tree ptype = build_pointer_type (TREE_TYPE (type));
5445 x = fold_convert_loc (clause_loc, ptype, x);
5446 tree y = create_tmp_var (ptype, name);
5447 gimplify_assign (y, x, ilist);
5448 x = y;
5449 tree yb = y;
5450
5451 if (!integer_zerop (bias))
5452 {
5453 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
5454 bias);
5455 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
5456 x);
5457 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
5458 pointer_sized_int_node, yb, bias);
5459 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
5460 yb = create_tmp_var (ptype, name);
5461 gimplify_assign (yb, x, ilist);
5462 x = yb;
5463 }
5464
5465 d = TREE_OPERAND (d, 0);
5466 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5467 d = TREE_OPERAND (d, 0);
5468 if (TREE_CODE (d) == ADDR_EXPR)
5469 {
5470 if (orig_var != var)
5471 {
5472 gcc_assert (is_variable_sized (orig_var));
5473 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
5474 x);
5475 gimplify_assign (new_var, x, ilist);
5476 tree new_orig_var = lookup_decl (orig_var, ctx);
5477 tree t = build_fold_indirect_ref (new_var);
5478 DECL_IGNORED_P (new_var) = 0;
5479 TREE_THIS_NOTRAP (t) = 1;
5480 SET_DECL_VALUE_EXPR (new_orig_var, t);
5481 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
5482 }
5483 else
5484 {
5485 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
5486 build_int_cst (ptype, 0));
5487 SET_DECL_VALUE_EXPR (new_var, x);
5488 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5489 }
5490 }
5491 else
5492 {
5493 gcc_assert (orig_var == var);
5494 if (TREE_CODE (d) == INDIRECT_REF)
5495 {
5496 x = create_tmp_var (ptype, name);
5497 TREE_ADDRESSABLE (x) = 1;
5498 gimplify_assign (x, yb, ilist);
5499 x = build_fold_addr_expr_loc (clause_loc, x);
5500 }
5501 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5502 gimplify_assign (new_var, x, ilist);
5503 }
5504 /* GOMP_taskgroup_reduction_register memsets the whole
5505 array to zero. If the initializer is zero, we don't
5506 need to initialize it again, just mark it as ever
5507 used unconditionally, i.e. cond = true. */
5508 if (cond
5509 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
5510 && initializer_zerop (omp_reduction_init (c,
5511 TREE_TYPE (type))))
5512 {
5513 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
5514 boolean_true_node);
5515 gimple_seq_add_stmt (ilist, g);
5516 continue;
5517 }
5518 tree end = create_artificial_label (UNKNOWN_LOCATION);
5519 if (cond)
5520 {
5521 gimple *g;
5522 if (!is_parallel_ctx (ctx))
5523 {
5524 tree condv = create_tmp_var (boolean_type_node);
5525 g = gimple_build_assign (condv,
5526 build_simple_mem_ref (cond));
5527 gimple_seq_add_stmt (ilist, g);
5528 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
5529 g = gimple_build_cond (NE_EXPR, condv,
5530 boolean_false_node, end, lab1);
5531 gimple_seq_add_stmt (ilist, g);
5532 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
5533 }
5534 g = gimple_build_assign (build_simple_mem_ref (cond),
5535 boolean_true_node);
5536 gimple_seq_add_stmt (ilist, g);
5537 }
5538
5539 tree y1 = create_tmp_var (ptype);
5540 gimplify_assign (y1, y, ilist);
5541 tree i2 = NULL_TREE, y2 = NULL_TREE;
5542 tree body2 = NULL_TREE, end2 = NULL_TREE;
5543 tree y3 = NULL_TREE, y4 = NULL_TREE;
5544 if (task_reduction_needs_orig_p)
5545 {
5546 y3 = create_tmp_var (ptype);
5547 tree ref;
5548 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5549 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5550 size_int (task_reduction_cnt_full
5551 + task_reduction_cntorig - 1),
5552 NULL_TREE, NULL_TREE);
5553 else
5554 {
5555 unsigned int idx = *ctx->task_reduction_map->get (c);
5556 ref = task_reduction_read (ilist, tskred_temp, ptype,
5557 7 + 3 * idx);
5558 }
5559 gimplify_assign (y3, ref, ilist);
5560 }
5561 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
5562 {
5563 if (pass != 3)
5564 {
5565 y2 = create_tmp_var (ptype);
5566 gimplify_assign (y2, y, ilist);
5567 }
5568 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5569 {
5570 tree ref = build_outer_var_ref (var, ctx);
5571 /* For ref build_outer_var_ref already performs this. */
5572 if (TREE_CODE (d) == INDIRECT_REF)
5573 gcc_assert (omp_privatize_by_reference (var));
5574 else if (TREE_CODE (d) == ADDR_EXPR)
5575 ref = build_fold_addr_expr (ref);
5576 else if (omp_privatize_by_reference (var))
5577 ref = build_fold_addr_expr (ref);
5578 ref = fold_convert_loc (clause_loc, ptype, ref);
5579 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5580 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5581 {
5582 y3 = create_tmp_var (ptype);
5583 gimplify_assign (y3, unshare_expr (ref), ilist);
5584 }
5585 if (is_simd)
5586 {
5587 y4 = create_tmp_var (ptype);
5588 gimplify_assign (y4, ref, dlist);
5589 }
5590 }
5591 }
5592 tree i = create_tmp_var (TREE_TYPE (v));
5593 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
5594 tree body = create_artificial_label (UNKNOWN_LOCATION);
5595 gimple_seq_add_stmt (ilist, gimple_build_label (body));
5596 if (y2)
5597 {
5598 i2 = create_tmp_var (TREE_TYPE (v));
5599 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
5600 body2 = create_artificial_label (UNKNOWN_LOCATION);
5601 end2 = create_artificial_label (UNKNOWN_LOCATION);
5602 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
5603 }
5604 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5605 {
5606 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5607 tree decl_placeholder
5608 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5609 SET_DECL_VALUE_EXPR (decl_placeholder,
5610 build_simple_mem_ref (y1));
5611 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5612 SET_DECL_VALUE_EXPR (placeholder,
5613 y3 ? build_simple_mem_ref (y3)
5614 : error_mark_node);
5615 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5616 x = lang_hooks.decls.omp_clause_default_ctor
5617 (c, build_simple_mem_ref (y1),
5618 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
5619 if (x)
5620 gimplify_and_add (x, ilist);
5621 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5622 {
5623 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5624 lower_omp (&tseq, ctx);
5625 gimple_seq_add_seq (ilist, tseq);
5626 }
5627 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5628 if (is_simd)
5629 {
5630 SET_DECL_VALUE_EXPR (decl_placeholder,
5631 build_simple_mem_ref (y2));
5632 SET_DECL_VALUE_EXPR (placeholder,
5633 build_simple_mem_ref (y4));
5634 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5635 lower_omp (&tseq, ctx);
5636 gimple_seq_add_seq (dlist, tseq);
5637 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5638 }
5639 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5640 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
5641 if (y2)
5642 {
5643 x = lang_hooks.decls.omp_clause_dtor
5644 (c, build_simple_mem_ref (y2));
5645 if (x)
5646 gimplify_and_add (x, dlist);
5647 }
5648 }
5649 else
5650 {
5651 x = omp_reduction_init (c, TREE_TYPE (type));
5652 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5653
5654 /* reduction(-:var) sums up the partial results, so it
5655 acts identically to reduction(+:var). */
5656 if (code == MINUS_EXPR)
5657 code = PLUS_EXPR;
5658
5659 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5660 if (is_simd)
5661 {
5662 x = build2 (code, TREE_TYPE (type),
5663 build_simple_mem_ref (y4),
5664 build_simple_mem_ref (y2));
5665 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5666 }
5667 }
5668 gimple *g
5669 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5670 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5671 gimple_seq_add_stmt (ilist, g);
5672 if (y3)
5673 {
5674 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5675 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5676 gimple_seq_add_stmt (ilist, g);
5677 }
5678 g = gimple_build_assign (i, PLUS_EXPR, i,
5679 build_int_cst (TREE_TYPE (i), 1));
5680 gimple_seq_add_stmt (ilist, g);
5681 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5682 gimple_seq_add_stmt (ilist, g);
5683 gimple_seq_add_stmt (ilist, gimple_build_label (end));
5684 if (y2)
5685 {
5686 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5687 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5688 gimple_seq_add_stmt (dlist, g);
5689 if (y4)
5690 {
5691 g = gimple_build_assign
5692 (y4, POINTER_PLUS_EXPR, y4,
5693 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5694 gimple_seq_add_stmt (dlist, g);
5695 }
5696 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5697 build_int_cst (TREE_TYPE (i2), 1));
5698 gimple_seq_add_stmt (dlist, g);
5699 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5700 gimple_seq_add_stmt (dlist, g);
5701 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5702 }
5703 if (allocator)
5704 {
5705 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5706 g = gimple_build_call (f, 2, allocate_ptr, allocator);
5707 gimple_seq_add_stmt (dlist, g);
5708 }
5709 continue;
5710 }
5711 else if (pass == 2)
5712 {
5713 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5714 if (is_global_var (out))
5715 x = var;
5716 else if (is_omp_target (ctx->stmt))
5717 x = out;
5718 else
5719 {
5720 bool by_ref = use_pointer_for_field (var, ctx);
5721 x = build_receiver_ref (var, by_ref, ctx);
5722 }
5723 if (!omp_privatize_by_reference (var))
5724 x = build_fold_addr_expr (x);
5725 x = fold_convert (ptr_type_node, x);
5726 unsigned cnt = task_reduction_cnt - 1;
5727 if (!task_reduction_needs_orig_p)
5728 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5729 else
5730 cnt = task_reduction_cntorig - 1;
5731 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5732 size_int (cnt), NULL_TREE, NULL_TREE);
5733 gimplify_assign (r, x, ilist);
5734 continue;
5735 }
5736 else if (pass == 3)
5737 {
5738 tree type = TREE_TYPE (new_var);
5739 if (!omp_privatize_by_reference (var))
5740 type = build_pointer_type (type);
5741 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5742 {
5743 unsigned cnt = task_reduction_cnt - 1;
5744 if (!task_reduction_needs_orig_p)
5745 cnt += (task_reduction_cntorig_full
5746 - task_reduction_cntorig);
5747 else
5748 cnt = task_reduction_cntorig - 1;
5749 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5750 size_int (cnt), NULL_TREE, NULL_TREE);
5751 }
5752 else
5753 {
5754 unsigned int idx = *ctx->task_reduction_map->get (c);
5755 tree off;
5756 if (ctx->task_reductions[1 + idx])
5757 off = fold_convert (sizetype,
5758 ctx->task_reductions[1 + idx]);
5759 else
5760 off = task_reduction_read (ilist, tskred_temp, sizetype,
5761 7 + 3 * idx + 1);
5762 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5763 tskred_base, off);
5764 }
5765 x = fold_convert (type, x);
5766 tree t;
5767 if (omp_privatize_by_reference (var))
5768 {
5769 gimplify_assign (new_var, x, ilist);
5770 t = new_var;
5771 new_var = build_simple_mem_ref (new_var);
5772 }
5773 else
5774 {
5775 t = create_tmp_var (type);
5776 gimplify_assign (t, x, ilist);
5777 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5778 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5779 }
5780 t = fold_convert (build_pointer_type (boolean_type_node), t);
5781 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5782 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5783 cond = create_tmp_var (TREE_TYPE (t));
5784 gimplify_assign (cond, t, ilist);
5785 }
5786 else if (is_variable_sized (var))
5787 {
5788 /* For variable sized types, we need to allocate the
5789 actual storage here. Call alloca and store the
5790 result in the pointer decl that we created elsewhere. */
5791 if (pass == 0)
5792 continue;
5793
5794 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5795 {
5796 tree tmp;
5797
5798 ptr = DECL_VALUE_EXPR (new_var);
5799 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5800 ptr = TREE_OPERAND (ptr, 0);
5801 gcc_assert (DECL_P (ptr));
5802 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5803
5804 if (lower_private_allocate (var, new_var, allocator,
5805 allocate_ptr, ilist, ctx,
5806 false, x))
5807 tmp = allocate_ptr;
5808 else
5809 {
5810 /* void *tmp = __builtin_alloca */
5811 tree atmp
5812 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5813 gcall *stmt
5814 = gimple_build_call (atmp, 2, x,
5815 size_int (DECL_ALIGN (var)));
5816 cfun->calls_alloca = 1;
5817 tmp = create_tmp_var_raw (ptr_type_node);
5818 gimple_add_tmp_var (tmp);
5819 gimple_call_set_lhs (stmt, tmp);
5820
5821 gimple_seq_add_stmt (ilist, stmt);
5822 }
5823
5824 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5825 gimplify_assign (ptr, x, ilist);
5826 }
5827 }
5828 else if (omp_privatize_by_reference (var)
5829 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5830 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5831 {
5832 /* For references that are being privatized for Fortran,
5833 allocate new backing storage for the new pointer
5834 variable. This allows us to avoid changing all the
5835 code that expects a pointer to something that expects
5836 a direct variable. */
5837 if (pass == 0)
5838 continue;
5839
5840 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5841 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5842 {
5843 x = build_receiver_ref (var, false, ctx);
5844 if (ctx->allocate_map)
5845 if (tree *allocatep = ctx->allocate_map->get (var))
5846 {
5847 allocator = *allocatep;
5848 if (TREE_CODE (allocator) == TREE_LIST)
5849 allocator = TREE_PURPOSE (allocator);
5850 if (TREE_CODE (allocator) != INTEGER_CST)
5851 allocator = build_outer_var_ref (allocator, ctx);
5852 allocator = fold_convert (pointer_sized_int_node,
5853 allocator);
5854 allocate_ptr = unshare_expr (x);
5855 }
5856 if (allocator == NULL_TREE)
5857 x = build_fold_addr_expr_loc (clause_loc, x);
5858 }
5859 else if (lower_private_allocate (var, new_var, allocator,
5860 allocate_ptr,
5861 ilist, ctx, true, x))
5862 x = allocate_ptr;
5863 else if (TREE_CONSTANT (x))
5864 {
5865 /* For reduction in SIMD loop, defer adding the
5866 initialization of the reference, because if we decide
5867 to use SIMD array for it, the initilization could cause
5868 expansion ICE. Ditto for other privatization clauses. */
5869 if (is_simd)
5870 x = NULL_TREE;
5871 else
5872 {
5873 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5874 get_name (var));
5875 gimple_add_tmp_var (x);
5876 TREE_ADDRESSABLE (x) = 1;
5877 x = build_fold_addr_expr_loc (clause_loc, x);
5878 }
5879 }
5880 else
5881 {
5882 tree atmp
5883 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5884 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5885 tree al = size_int (TYPE_ALIGN (rtype));
5886 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5887 }
5888
5889 if (x)
5890 {
5891 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5892 gimplify_assign (new_var, x, ilist);
5893 }
5894
5895 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5896 }
5897 else if ((c_kind == OMP_CLAUSE_REDUCTION
5898 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5899 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5900 {
5901 if (pass == 0)
5902 continue;
5903 }
5904 else if (pass != 0)
5905 continue;
5906
5907 switch (OMP_CLAUSE_CODE (c))
5908 {
5909 case OMP_CLAUSE_SHARED:
5910 /* Ignore shared directives in teams construct inside
5911 target construct. */
5912 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5913 && !is_host_teams_ctx (ctx))
5914 continue;
5915 /* Shared global vars are just accessed directly. */
5916 if (is_global_var (new_var))
5917 break;
5918 /* For taskloop firstprivate/lastprivate, represented
5919 as firstprivate and shared clause on the task, new_var
5920 is the firstprivate var. */
5921 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5922 break;
5923 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5924 needs to be delayed until after fixup_child_record_type so
5925 that we get the correct type during the dereference. */
5926 by_ref = use_pointer_for_field (var, ctx);
5927 x = build_receiver_ref (var, by_ref, ctx);
5928 SET_DECL_VALUE_EXPR (new_var, x);
5929 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5930
5931 /* ??? If VAR is not passed by reference, and the variable
5932 hasn't been initialized yet, then we'll get a warning for
5933 the store into the omp_data_s structure. Ideally, we'd be
5934 able to notice this and not store anything at all, but
5935 we're generating code too early. Suppress the warning. */
5936 if (!by_ref)
5937 suppress_warning (var, OPT_Wuninitialized);
5938 break;
5939
5940 case OMP_CLAUSE__CONDTEMP_:
5941 if (is_parallel_ctx (ctx))
5942 {
5943 x = build_receiver_ref (var, false, ctx);
5944 SET_DECL_VALUE_EXPR (new_var, x);
5945 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5946 }
5947 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5948 {
5949 x = build_zero_cst (TREE_TYPE (var));
5950 goto do_private;
5951 }
5952 break;
5953
5954 case OMP_CLAUSE_LASTPRIVATE:
5955 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5956 break;
5957 /* FALLTHRU */
5958
5959 case OMP_CLAUSE_PRIVATE:
5960 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5961 x = build_outer_var_ref (var, ctx);
5962 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5963 {
5964 if (is_task_ctx (ctx))
5965 x = build_receiver_ref (var, false, ctx);
5966 else
5967 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5968 }
5969 else
5970 x = NULL;
5971 do_private:
5972 tree nx;
5973 bool copy_ctor;
5974 copy_ctor = false;
5975 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5976 ilist, ctx, false, NULL_TREE);
5977 nx = unshare_expr (new_var);
5978 if (is_simd
5979 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5980 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5981 copy_ctor = true;
5982 if (copy_ctor)
5983 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5984 else
5985 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5986 if (is_simd)
5987 {
5988 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5989 if ((TREE_ADDRESSABLE (new_var) || nx || y
5990 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5991 && (gimple_omp_for_collapse (ctx->stmt) != 1
5992 || (gimple_omp_for_index (ctx->stmt, 0)
5993 != new_var)))
5994 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5995 || omp_privatize_by_reference (var))
5996 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5997 ivar, lvar))
5998 {
5999 if (omp_privatize_by_reference (var))
6000 {
6001 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6002 tree new_vard = TREE_OPERAND (new_var, 0);
6003 gcc_assert (DECL_P (new_vard));
6004 SET_DECL_VALUE_EXPR (new_vard,
6005 build_fold_addr_expr (lvar));
6006 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6007 }
6008
6009 if (nx)
6010 {
6011 tree iv = unshare_expr (ivar);
6012 if (copy_ctor)
6013 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
6014 x);
6015 else
6016 x = lang_hooks.decls.omp_clause_default_ctor (c,
6017 iv,
6018 x);
6019 }
6020 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
6021 {
6022 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
6023 unshare_expr (ivar), x);
6024 nx = x;
6025 }
6026 if (nx && x)
6027 gimplify_and_add (x, &llist[0]);
6028 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6029 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6030 {
6031 tree v = new_var;
6032 if (!DECL_P (v))
6033 {
6034 gcc_assert (TREE_CODE (v) == MEM_REF);
6035 v = TREE_OPERAND (v, 0);
6036 gcc_assert (DECL_P (v));
6037 }
6038 v = *ctx->lastprivate_conditional_map->get (v);
6039 tree t = create_tmp_var (TREE_TYPE (v));
6040 tree z = build_zero_cst (TREE_TYPE (v));
6041 tree orig_v
6042 = build_outer_var_ref (var, ctx,
6043 OMP_CLAUSE_LASTPRIVATE);
6044 gimple_seq_add_stmt (dlist,
6045 gimple_build_assign (t, z));
6046 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
6047 tree civar = DECL_VALUE_EXPR (v);
6048 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
6049 civar = unshare_expr (civar);
6050 TREE_OPERAND (civar, 1) = sctx.idx;
6051 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
6052 unshare_expr (civar));
6053 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
6054 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
6055 orig_v, unshare_expr (ivar)));
6056 tree cond = build2 (LT_EXPR, boolean_type_node, t,
6057 civar);
6058 x = build3 (COND_EXPR, void_type_node, cond, x,
6059 void_node);
6060 gimple_seq tseq = NULL;
6061 gimplify_and_add (x, &tseq);
6062 if (ctx->outer)
6063 lower_omp (&tseq, ctx->outer);
6064 gimple_seq_add_seq (&llist[1], tseq);
6065 }
6066 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6067 && ctx->for_simd_scan_phase)
6068 {
6069 x = unshare_expr (ivar);
6070 tree orig_v
6071 = build_outer_var_ref (var, ctx,
6072 OMP_CLAUSE_LASTPRIVATE);
6073 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6074 orig_v);
6075 gimplify_and_add (x, &llist[0]);
6076 }
6077 if (y)
6078 {
6079 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
6080 if (y)
6081 gimplify_and_add (y, &llist[1]);
6082 }
6083 break;
6084 }
6085 if (omp_privatize_by_reference (var))
6086 {
6087 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6088 tree new_vard = TREE_OPERAND (new_var, 0);
6089 gcc_assert (DECL_P (new_vard));
6090 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6091 x = TYPE_SIZE_UNIT (type);
6092 if (TREE_CONSTANT (x))
6093 {
6094 x = create_tmp_var_raw (type, get_name (var));
6095 gimple_add_tmp_var (x);
6096 TREE_ADDRESSABLE (x) = 1;
6097 x = build_fold_addr_expr_loc (clause_loc, x);
6098 x = fold_convert_loc (clause_loc,
6099 TREE_TYPE (new_vard), x);
6100 gimplify_assign (new_vard, x, ilist);
6101 }
6102 }
6103 }
6104 if (nx)
6105 gimplify_and_add (nx, ilist);
6106 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6107 && is_simd
6108 && ctx->for_simd_scan_phase)
6109 {
6110 tree orig_v = build_outer_var_ref (var, ctx,
6111 OMP_CLAUSE_LASTPRIVATE);
6112 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
6113 orig_v);
6114 gimplify_and_add (x, ilist);
6115 }
6116 /* FALLTHRU */
6117
6118 do_dtor:
6119 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
6120 if (x)
6121 gimplify_and_add (x, dlist);
6122 if (allocator)
6123 {
6124 if (!is_gimple_val (allocator))
6125 {
6126 tree avar = create_tmp_var (TREE_TYPE (allocator));
6127 gimplify_assign (avar, allocator, dlist);
6128 allocator = avar;
6129 }
6130 if (!is_gimple_val (allocate_ptr))
6131 {
6132 tree apvar = create_tmp_var (TREE_TYPE (allocate_ptr));
6133 gimplify_assign (apvar, allocate_ptr, dlist);
6134 allocate_ptr = apvar;
6135 }
6136 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
6137 gimple *g
6138 = gimple_build_call (f, 2, allocate_ptr, allocator);
6139 gimple_seq_add_stmt (dlist, g);
6140 }
6141 break;
6142
6143 case OMP_CLAUSE_LINEAR:
6144 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6145 goto do_firstprivate;
6146 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6147 x = NULL;
6148 else
6149 x = build_outer_var_ref (var, ctx);
6150 goto do_private;
6151
6152 case OMP_CLAUSE_FIRSTPRIVATE:
6153 if (is_task_ctx (ctx))
6154 {
6155 if ((omp_privatize_by_reference (var)
6156 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
6157 || is_variable_sized (var))
6158 goto do_dtor;
6159 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
6160 ctx))
6161 || use_pointer_for_field (var, NULL))
6162 {
6163 x = build_receiver_ref (var, false, ctx);
6164 if (ctx->allocate_map)
6165 if (tree *allocatep = ctx->allocate_map->get (var))
6166 {
6167 allocator = *allocatep;
6168 if (TREE_CODE (allocator) == TREE_LIST)
6169 allocator = TREE_PURPOSE (allocator);
6170 if (TREE_CODE (allocator) != INTEGER_CST)
6171 allocator = build_outer_var_ref (allocator, ctx);
6172 allocator = fold_convert (pointer_sized_int_node,
6173 allocator);
6174 allocate_ptr = unshare_expr (x);
6175 x = build_simple_mem_ref (x);
6176 TREE_THIS_NOTRAP (x) = 1;
6177 }
6178 SET_DECL_VALUE_EXPR (new_var, x);
6179 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
6180 goto do_dtor;
6181 }
6182 }
6183 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
6184 && omp_privatize_by_reference (var))
6185 {
6186 x = build_outer_var_ref (var, ctx);
6187 gcc_assert (TREE_CODE (x) == MEM_REF
6188 && integer_zerop (TREE_OPERAND (x, 1)));
6189 x = TREE_OPERAND (x, 0);
6190 x = lang_hooks.decls.omp_clause_copy_ctor
6191 (c, unshare_expr (new_var), x);
6192 gimplify_and_add (x, ilist);
6193 goto do_dtor;
6194 }
6195 do_firstprivate:
6196 lower_private_allocate (var, new_var, allocator, allocate_ptr,
6197 ilist, ctx, false, NULL_TREE);
6198 x = build_outer_var_ref (var, ctx);
6199 if (is_simd)
6200 {
6201 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6202 && gimple_omp_for_combined_into_p (ctx->stmt))
6203 {
6204 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6205 if (DECL_P (t))
6206 t = build_outer_var_ref (t, ctx);
6207 tree stept = TREE_TYPE (t);
6208 tree ct = omp_find_clause (clauses,
6209 OMP_CLAUSE__LOOPTEMP_);
6210 gcc_assert (ct);
6211 tree l = OMP_CLAUSE_DECL (ct);
6212 tree n1 = fd->loop.n1;
6213 tree step = fd->loop.step;
6214 tree itype = TREE_TYPE (l);
6215 if (POINTER_TYPE_P (itype))
6216 itype = signed_type_for (itype);
6217 l = fold_build2 (MINUS_EXPR, itype, l, n1);
6218 if (TYPE_UNSIGNED (itype)
6219 && fd->loop.cond_code == GT_EXPR)
6220 l = fold_build2 (TRUNC_DIV_EXPR, itype,
6221 fold_build1 (NEGATE_EXPR, itype, l),
6222 fold_build1 (NEGATE_EXPR,
6223 itype, step));
6224 else
6225 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
6226 t = fold_build2 (MULT_EXPR, stept,
6227 fold_convert (stept, l), t);
6228
6229 if (OMP_CLAUSE_LINEAR_ARRAY (c))
6230 {
6231 if (omp_privatize_by_reference (var))
6232 {
6233 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6234 tree new_vard = TREE_OPERAND (new_var, 0);
6235 gcc_assert (DECL_P (new_vard));
6236 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6237 nx = TYPE_SIZE_UNIT (type);
6238 if (TREE_CONSTANT (nx))
6239 {
6240 nx = create_tmp_var_raw (type,
6241 get_name (var));
6242 gimple_add_tmp_var (nx);
6243 TREE_ADDRESSABLE (nx) = 1;
6244 nx = build_fold_addr_expr_loc (clause_loc,
6245 nx);
6246 nx = fold_convert_loc (clause_loc,
6247 TREE_TYPE (new_vard),
6248 nx);
6249 gimplify_assign (new_vard, nx, ilist);
6250 }
6251 }
6252
6253 x = lang_hooks.decls.omp_clause_linear_ctor
6254 (c, new_var, x, t);
6255 gimplify_and_add (x, ilist);
6256 goto do_dtor;
6257 }
6258
6259 if (POINTER_TYPE_P (TREE_TYPE (x)))
6260 x = fold_build_pointer_plus (x, t);
6261 else
6262 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x,
6263 fold_convert (TREE_TYPE (x), t));
6264 }
6265
6266 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
6267 || TREE_ADDRESSABLE (new_var)
6268 || omp_privatize_by_reference (var))
6269 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6270 ivar, lvar))
6271 {
6272 if (omp_privatize_by_reference (var))
6273 {
6274 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6275 tree new_vard = TREE_OPERAND (new_var, 0);
6276 gcc_assert (DECL_P (new_vard));
6277 SET_DECL_VALUE_EXPR (new_vard,
6278 build_fold_addr_expr (lvar));
6279 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6280 }
6281 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
6282 {
6283 tree iv = create_tmp_var (TREE_TYPE (new_var));
6284 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
6285 gimplify_and_add (x, ilist);
6286 gimple_stmt_iterator gsi
6287 = gsi_start (*gimple_omp_body_ptr (ctx->stmt));
6288 gassign *g
6289 = gimple_build_assign (unshare_expr (lvar), iv);
6290 gsi_insert_before_without_update (&gsi, g,
6291 GSI_SAME_STMT);
6292 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6293 enum tree_code code = PLUS_EXPR;
6294 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
6295 code = POINTER_PLUS_EXPR;
6296 g = gimple_build_assign (iv, code, iv, t);
6297 gsi_insert_before_without_update (&gsi, g,
6298 GSI_SAME_STMT);
6299 break;
6300 }
6301 x = lang_hooks.decls.omp_clause_copy_ctor
6302 (c, unshare_expr (ivar), x);
6303 gimplify_and_add (x, &llist[0]);
6304 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6305 if (x)
6306 gimplify_and_add (x, &llist[1]);
6307 break;
6308 }
6309 if (omp_privatize_by_reference (var))
6310 {
6311 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6312 tree new_vard = TREE_OPERAND (new_var, 0);
6313 gcc_assert (DECL_P (new_vard));
6314 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6315 nx = TYPE_SIZE_UNIT (type);
6316 if (TREE_CONSTANT (nx))
6317 {
6318 nx = create_tmp_var_raw (type, get_name (var));
6319 gimple_add_tmp_var (nx);
6320 TREE_ADDRESSABLE (nx) = 1;
6321 nx = build_fold_addr_expr_loc (clause_loc, nx);
6322 nx = fold_convert_loc (clause_loc,
6323 TREE_TYPE (new_vard), nx);
6324 gimplify_assign (new_vard, nx, ilist);
6325 }
6326 }
6327 }
6328 x = lang_hooks.decls.omp_clause_copy_ctor
6329 (c, unshare_expr (new_var), x);
6330 gimplify_and_add (x, ilist);
6331 goto do_dtor;
6332
6333 case OMP_CLAUSE__LOOPTEMP_:
6334 case OMP_CLAUSE__REDUCTEMP_:
6335 gcc_assert (is_taskreg_ctx (ctx));
6336 x = build_outer_var_ref (var, ctx);
6337 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
6338 gimplify_and_add (x, ilist);
6339 break;
6340
6341 case OMP_CLAUSE_COPYIN:
6342 by_ref = use_pointer_for_field (var, NULL);
6343 x = build_receiver_ref (var, by_ref, ctx);
6344 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
6345 append_to_statement_list (x, &copyin_seq);
6346 copyin_by_ref |= by_ref;
6347 break;
6348
6349 case OMP_CLAUSE_REDUCTION:
6350 case OMP_CLAUSE_IN_REDUCTION:
6351 /* OpenACC reductions are initialized using the
6352 GOACC_REDUCTION internal function. */
6353 if (is_gimple_omp_oacc (ctx->stmt))
6354 break;
6355 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6356 {
6357 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6358 gimple *tseq;
6359 tree ptype = TREE_TYPE (placeholder);
6360 if (cond)
6361 {
6362 x = error_mark_node;
6363 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
6364 && !task_reduction_needs_orig_p)
6365 x = var;
6366 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
6367 {
6368 tree pptype = build_pointer_type (ptype);
6369 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
6370 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
6371 size_int (task_reduction_cnt_full
6372 + task_reduction_cntorig - 1),
6373 NULL_TREE, NULL_TREE);
6374 else
6375 {
6376 unsigned int idx
6377 = *ctx->task_reduction_map->get (c);
6378 x = task_reduction_read (ilist, tskred_temp,
6379 pptype, 7 + 3 * idx);
6380 }
6381 x = fold_convert (pptype, x);
6382 x = build_simple_mem_ref (x);
6383 }
6384 }
6385 else
6386 {
6387 lower_private_allocate (var, new_var, allocator,
6388 allocate_ptr, ilist, ctx, false,
6389 NULL_TREE);
6390 x = build_outer_var_ref (var, ctx);
6391
6392 if (omp_privatize_by_reference (var)
6393 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
6394 x = build_fold_addr_expr_loc (clause_loc, x);
6395 }
6396 SET_DECL_VALUE_EXPR (placeholder, x);
6397 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6398 tree new_vard = new_var;
6399 if (omp_privatize_by_reference (var))
6400 {
6401 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6402 new_vard = TREE_OPERAND (new_var, 0);
6403 gcc_assert (DECL_P (new_vard));
6404 }
6405 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6406 if (is_simd
6407 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6408 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6409 rvarp = &rvar;
6410 if (is_simd
6411 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6412 ivar, lvar, rvarp,
6413 &rvar2))
6414 {
6415 if (new_vard == new_var)
6416 {
6417 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
6418 SET_DECL_VALUE_EXPR (new_var, ivar);
6419 }
6420 else
6421 {
6422 SET_DECL_VALUE_EXPR (new_vard,
6423 build_fold_addr_expr (ivar));
6424 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6425 }
6426 x = lang_hooks.decls.omp_clause_default_ctor
6427 (c, unshare_expr (ivar),
6428 build_outer_var_ref (var, ctx));
6429 if (rvarp && ctx->for_simd_scan_phase)
6430 {
6431 if (x)
6432 gimplify_and_add (x, &llist[0]);
6433 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6434 if (x)
6435 gimplify_and_add (x, &llist[1]);
6436 break;
6437 }
6438 else if (rvarp)
6439 {
6440 if (x)
6441 {
6442 gimplify_and_add (x, &llist[0]);
6443
6444 tree ivar2 = unshare_expr (lvar);
6445 TREE_OPERAND (ivar2, 1) = sctx.idx;
6446 x = lang_hooks.decls.omp_clause_default_ctor
6447 (c, ivar2, build_outer_var_ref (var, ctx));
6448 gimplify_and_add (x, &llist[0]);
6449
6450 if (rvar2)
6451 {
6452 x = lang_hooks.decls.omp_clause_default_ctor
6453 (c, unshare_expr (rvar2),
6454 build_outer_var_ref (var, ctx));
6455 gimplify_and_add (x, &llist[0]);
6456 }
6457
6458 /* For types that need construction, add another
6459 private var which will be default constructed
6460 and optionally initialized with
6461 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6462 loop we want to assign this value instead of
6463 constructing and destructing it in each
6464 iteration. */
6465 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
6466 gimple_add_tmp_var (nv);
6467 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
6468 ? rvar2
6469 : ivar, 0),
6470 nv);
6471 x = lang_hooks.decls.omp_clause_default_ctor
6472 (c, nv, build_outer_var_ref (var, ctx));
6473 gimplify_and_add (x, ilist);
6474
6475 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6476 {
6477 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6478 x = DECL_VALUE_EXPR (new_vard);
6479 tree vexpr = nv;
6480 if (new_vard != new_var)
6481 vexpr = build_fold_addr_expr (nv);
6482 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6483 lower_omp (&tseq, ctx);
6484 SET_DECL_VALUE_EXPR (new_vard, x);
6485 gimple_seq_add_seq (ilist, tseq);
6486 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6487 }
6488
6489 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6490 if (x)
6491 gimplify_and_add (x, dlist);
6492 }
6493
6494 tree ref = build_outer_var_ref (var, ctx);
6495 x = unshare_expr (ivar);
6496 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6497 ref);
6498 gimplify_and_add (x, &llist[0]);
6499
6500 ref = build_outer_var_ref (var, ctx);
6501 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
6502 rvar);
6503 gimplify_and_add (x, &llist[3]);
6504
6505 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6506 if (new_vard == new_var)
6507 SET_DECL_VALUE_EXPR (new_var, lvar);
6508 else
6509 SET_DECL_VALUE_EXPR (new_vard,
6510 build_fold_addr_expr (lvar));
6511
6512 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6513 if (x)
6514 gimplify_and_add (x, &llist[1]);
6515
6516 tree ivar2 = unshare_expr (lvar);
6517 TREE_OPERAND (ivar2, 1) = sctx.idx;
6518 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
6519 if (x)
6520 gimplify_and_add (x, &llist[1]);
6521
6522 if (rvar2)
6523 {
6524 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
6525 if (x)
6526 gimplify_and_add (x, &llist[1]);
6527 }
6528 break;
6529 }
6530 if (x)
6531 gimplify_and_add (x, &llist[0]);
6532 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6533 {
6534 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6535 lower_omp (&tseq, ctx);
6536 gimple_seq_add_seq (&llist[0], tseq);
6537 }
6538 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6539 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6540 lower_omp (&tseq, ctx);
6541 gimple_seq_add_seq (&llist[1], tseq);
6542 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6543 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6544 if (new_vard == new_var)
6545 SET_DECL_VALUE_EXPR (new_var, lvar);
6546 else
6547 SET_DECL_VALUE_EXPR (new_vard,
6548 build_fold_addr_expr (lvar));
6549 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6550 if (x)
6551 gimplify_and_add (x, &llist[1]);
6552 break;
6553 }
6554 /* If this is a reference to constant size reduction var
6555 with placeholder, we haven't emitted the initializer
6556 for it because it is undesirable if SIMD arrays are used.
6557 But if they aren't used, we need to emit the deferred
6558 initialization now. */
6559 else if (omp_privatize_by_reference (var) && is_simd)
6560 handle_simd_reference (clause_loc, new_vard, ilist);
6561
6562 tree lab2 = NULL_TREE;
6563 if (cond)
6564 {
6565 gimple *g;
6566 if (!is_parallel_ctx (ctx))
6567 {
6568 tree condv = create_tmp_var (boolean_type_node);
6569 tree m = build_simple_mem_ref (cond);
6570 g = gimple_build_assign (condv, m);
6571 gimple_seq_add_stmt (ilist, g);
6572 tree lab1
6573 = create_artificial_label (UNKNOWN_LOCATION);
6574 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6575 g = gimple_build_cond (NE_EXPR, condv,
6576 boolean_false_node,
6577 lab2, lab1);
6578 gimple_seq_add_stmt (ilist, g);
6579 gimple_seq_add_stmt (ilist,
6580 gimple_build_label (lab1));
6581 }
6582 g = gimple_build_assign (build_simple_mem_ref (cond),
6583 boolean_true_node);
6584 gimple_seq_add_stmt (ilist, g);
6585 }
6586 x = lang_hooks.decls.omp_clause_default_ctor
6587 (c, unshare_expr (new_var),
6588 cond ? NULL_TREE
6589 : build_outer_var_ref (var, ctx));
6590 if (x)
6591 gimplify_and_add (x, ilist);
6592
6593 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6594 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6595 {
6596 if (ctx->for_simd_scan_phase)
6597 goto do_dtor;
6598 if (x || (!is_simd
6599 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
6600 {
6601 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
6602 gimple_add_tmp_var (nv);
6603 ctx->cb.decl_map->put (new_vard, nv);
6604 x = lang_hooks.decls.omp_clause_default_ctor
6605 (c, nv, build_outer_var_ref (var, ctx));
6606 if (x)
6607 gimplify_and_add (x, ilist);
6608 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6609 {
6610 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6611 tree vexpr = nv;
6612 if (new_vard != new_var)
6613 vexpr = build_fold_addr_expr (nv);
6614 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6615 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6616 lower_omp (&tseq, ctx);
6617 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
6618 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
6619 gimple_seq_add_seq (ilist, tseq);
6620 }
6621 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6622 if (is_simd && ctx->scan_exclusive)
6623 {
6624 tree nv2
6625 = create_tmp_var_raw (TREE_TYPE (new_var));
6626 gimple_add_tmp_var (nv2);
6627 ctx->cb.decl_map->put (nv, nv2);
6628 x = lang_hooks.decls.omp_clause_default_ctor
6629 (c, nv2, build_outer_var_ref (var, ctx));
6630 gimplify_and_add (x, ilist);
6631 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6632 if (x)
6633 gimplify_and_add (x, dlist);
6634 }
6635 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6636 if (x)
6637 gimplify_and_add (x, dlist);
6638 }
6639 else if (is_simd
6640 && ctx->scan_exclusive
6641 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
6642 {
6643 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
6644 gimple_add_tmp_var (nv2);
6645 ctx->cb.decl_map->put (new_vard, nv2);
6646 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6647 if (x)
6648 gimplify_and_add (x, dlist);
6649 }
6650 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6651 goto do_dtor;
6652 }
6653
6654 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6655 {
6656 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6657 if (c_kind == OMP_CLAUSE_IN_REDUCTION
6658 && is_omp_target (ctx->stmt))
6659 {
6660 tree d = maybe_lookup_decl_in_outer_ctx (var, ctx);
6661 tree oldv = NULL_TREE;
6662 gcc_assert (d);
6663 if (DECL_HAS_VALUE_EXPR_P (d))
6664 oldv = DECL_VALUE_EXPR (d);
6665 SET_DECL_VALUE_EXPR (d, new_vard);
6666 DECL_HAS_VALUE_EXPR_P (d) = 1;
6667 lower_omp (&tseq, ctx);
6668 if (oldv)
6669 SET_DECL_VALUE_EXPR (d, oldv);
6670 else
6671 {
6672 SET_DECL_VALUE_EXPR (d, NULL_TREE);
6673 DECL_HAS_VALUE_EXPR_P (d) = 0;
6674 }
6675 }
6676 else
6677 lower_omp (&tseq, ctx);
6678 gimple_seq_add_seq (ilist, tseq);
6679 }
6680 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6681 if (is_simd)
6682 {
6683 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6684 lower_omp (&tseq, ctx);
6685 gimple_seq_add_seq (dlist, tseq);
6686 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6687 }
6688 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6689 if (cond)
6690 {
6691 if (lab2)
6692 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6693 break;
6694 }
6695 goto do_dtor;
6696 }
6697 else
6698 {
6699 x = omp_reduction_init (c, TREE_TYPE (new_var));
6700 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
6701 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
6702
6703 if (cond)
6704 {
6705 gimple *g;
6706 tree lab2 = NULL_TREE;
6707 /* GOMP_taskgroup_reduction_register memsets the whole
6708 array to zero. If the initializer is zero, we don't
6709 need to initialize it again, just mark it as ever
6710 used unconditionally, i.e. cond = true. */
6711 if (initializer_zerop (x))
6712 {
6713 g = gimple_build_assign (build_simple_mem_ref (cond),
6714 boolean_true_node);
6715 gimple_seq_add_stmt (ilist, g);
6716 break;
6717 }
6718
6719 /* Otherwise, emit
6720 if (!cond) { cond = true; new_var = x; } */
6721 if (!is_parallel_ctx (ctx))
6722 {
6723 tree condv = create_tmp_var (boolean_type_node);
6724 tree m = build_simple_mem_ref (cond);
6725 g = gimple_build_assign (condv, m);
6726 gimple_seq_add_stmt (ilist, g);
6727 tree lab1
6728 = create_artificial_label (UNKNOWN_LOCATION);
6729 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6730 g = gimple_build_cond (NE_EXPR, condv,
6731 boolean_false_node,
6732 lab2, lab1);
6733 gimple_seq_add_stmt (ilist, g);
6734 gimple_seq_add_stmt (ilist,
6735 gimple_build_label (lab1));
6736 }
6737 g = gimple_build_assign (build_simple_mem_ref (cond),
6738 boolean_true_node);
6739 gimple_seq_add_stmt (ilist, g);
6740 gimplify_assign (new_var, x, ilist);
6741 if (lab2)
6742 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6743 break;
6744 }
6745
6746 /* reduction(-:var) sums up the partial results, so it
6747 acts identically to reduction(+:var). */
6748 if (code == MINUS_EXPR)
6749 code = PLUS_EXPR;
6750
6751 bool is_truth_op
6752 = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
6753 tree new_vard = new_var;
6754 if (is_simd && omp_privatize_by_reference (var))
6755 {
6756 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6757 new_vard = TREE_OPERAND (new_var, 0);
6758 gcc_assert (DECL_P (new_vard));
6759 }
6760 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6761 if (is_simd
6762 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6763 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6764 rvarp = &rvar;
6765 if (is_simd
6766 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6767 ivar, lvar, rvarp,
6768 &rvar2))
6769 {
6770 if (new_vard != new_var)
6771 {
6772 SET_DECL_VALUE_EXPR (new_vard,
6773 build_fold_addr_expr (lvar));
6774 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6775 }
6776
6777 tree ref = build_outer_var_ref (var, ctx);
6778
6779 if (rvarp)
6780 {
6781 if (ctx->for_simd_scan_phase)
6782 break;
6783 gimplify_assign (ivar, ref, &llist[0]);
6784 ref = build_outer_var_ref (var, ctx);
6785 gimplify_assign (ref, rvar, &llist[3]);
6786 break;
6787 }
6788
6789 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6790
6791 if (sctx.is_simt)
6792 {
6793 if (!simt_lane)
6794 simt_lane = create_tmp_var (unsigned_type_node);
6795 x = build_call_expr_internal_loc
6796 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6797 TREE_TYPE (ivar), 2, ivar, simt_lane);
6798 /* Make sure x is evaluated unconditionally. */
6799 tree bfly_var = create_tmp_var (TREE_TYPE (ivar));
6800 gimplify_assign (bfly_var, x, &llist[2]);
6801 x = build2 (code, TREE_TYPE (ivar), ivar, bfly_var);
6802 gimplify_assign (ivar, x, &llist[2]);
6803 }
6804 tree ivar2 = ivar;
6805 tree ref2 = ref;
6806 if (is_truth_op)
6807 {
6808 tree zero = build_zero_cst (TREE_TYPE (ivar));
6809 ivar2 = fold_build2_loc (clause_loc, NE_EXPR,
6810 boolean_type_node, ivar,
6811 zero);
6812 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6813 boolean_type_node, ref,
6814 zero);
6815 }
6816 x = build2 (code, TREE_TYPE (ref), ref2, ivar2);
6817 if (is_truth_op)
6818 x = fold_convert (TREE_TYPE (ref), x);
6819 ref = build_outer_var_ref (var, ctx);
6820 gimplify_assign (ref, x, &llist[1]);
6821
6822 }
6823 else
6824 {
6825 lower_private_allocate (var, new_var, allocator,
6826 allocate_ptr, ilist, ctx,
6827 false, NULL_TREE);
6828 if (omp_privatize_by_reference (var) && is_simd)
6829 handle_simd_reference (clause_loc, new_vard, ilist);
6830 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6831 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6832 break;
6833 gimplify_assign (new_var, x, ilist);
6834 if (is_simd)
6835 {
6836 tree ref = build_outer_var_ref (var, ctx);
6837 tree new_var2 = new_var;
6838 tree ref2 = ref;
6839 if (is_truth_op)
6840 {
6841 tree zero = build_zero_cst (TREE_TYPE (new_var));
6842 new_var2
6843 = fold_build2_loc (clause_loc, NE_EXPR,
6844 boolean_type_node, new_var,
6845 zero);
6846 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6847 boolean_type_node, ref,
6848 zero);
6849 }
6850 x = build2 (code, TREE_TYPE (ref2), ref2, new_var2);
6851 if (is_truth_op)
6852 x = fold_convert (TREE_TYPE (new_var), x);
6853 ref = build_outer_var_ref (var, ctx);
6854 gimplify_assign (ref, x, dlist);
6855 }
6856 if (allocator)
6857 goto do_dtor;
6858 }
6859 }
6860 break;
6861
6862 default:
6863 gcc_unreachable ();
6864 }
6865 }
6866 }
6867 if (tskred_avar)
6868 {
6869 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6870 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6871 }
6872
6873 if (known_eq (sctx.max_vf, 1U))
6874 {
6875 sctx.is_simt = false;
6876 if (ctx->lastprivate_conditional_map)
6877 {
6878 if (gimple_omp_for_combined_into_p (ctx->stmt))
6879 {
6880 /* Signal to lower_omp_1 that it should use parent context. */
6881 ctx->combined_into_simd_safelen1 = true;
6882 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6883 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6884 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6885 {
6886 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6887 omp_context *outer = ctx->outer;
6888 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6889 outer = outer->outer;
6890 tree *v = ctx->lastprivate_conditional_map->get (o);
6891 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6892 tree *pv = outer->lastprivate_conditional_map->get (po);
6893 *v = *pv;
6894 }
6895 }
6896 else
6897 {
6898 /* When not vectorized, treat lastprivate(conditional:) like
6899 normal lastprivate, as there will be just one simd lane
6900 writing the privatized variable. */
6901 delete ctx->lastprivate_conditional_map;
6902 ctx->lastprivate_conditional_map = NULL;
6903 }
6904 }
6905 }
6906
6907 if (nonconst_simd_if)
6908 {
6909 if (sctx.lane == NULL_TREE)
6910 {
6911 sctx.idx = create_tmp_var (unsigned_type_node);
6912 sctx.lane = create_tmp_var (unsigned_type_node);
6913 }
6914 /* FIXME: For now. */
6915 sctx.is_simt = false;
6916 }
6917
6918 if (sctx.lane || sctx.is_simt)
6919 {
6920 uid = create_tmp_var (ptr_type_node, "simduid");
6921 /* Don't want uninit warnings on simduid, it is always uninitialized,
6922 but we use it not for the value, but for the DECL_UID only. */
6923 suppress_warning (uid, OPT_Wuninitialized);
6924 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6925 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6926 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6927 gimple_omp_for_set_clauses (ctx->stmt, c);
6928 }
6929 /* Emit calls denoting privatized variables and initializing a pointer to
6930 structure that holds private variables as fields after ompdevlow pass. */
6931 if (sctx.is_simt)
6932 {
6933 sctx.simt_eargs[0] = uid;
6934 gimple *g
6935 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6936 gimple_call_set_lhs (g, uid);
6937 gimple_seq_add_stmt (ilist, g);
6938 sctx.simt_eargs.release ();
6939
6940 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6941 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6942 gimple_call_set_lhs (g, simtrec);
6943 gimple_seq_add_stmt (ilist, g);
6944 }
6945 if (sctx.lane)
6946 {
6947 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6948 2 + (nonconst_simd_if != NULL),
6949 uid, integer_zero_node,
6950 nonconst_simd_if);
6951 gimple_call_set_lhs (g, sctx.lane);
6952 gimple_stmt_iterator gsi = gsi_start (*gimple_omp_body_ptr (ctx->stmt));
6953 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6954 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6955 build_int_cst (unsigned_type_node, 0));
6956 gimple_seq_add_stmt (ilist, g);
6957 if (sctx.lastlane)
6958 {
6959 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6960 2, uid, sctx.lane);
6961 gimple_call_set_lhs (g, sctx.lastlane);
6962 gimple_seq_add_stmt (dlist, g);
6963 gimple_seq_add_seq (dlist, llist[3]);
6964 }
6965 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6966 if (llist[2])
6967 {
6968 tree simt_vf = create_tmp_var (unsigned_type_node);
6969 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6970 gimple_call_set_lhs (g, simt_vf);
6971 gimple_seq_add_stmt (dlist, g);
6972
6973 tree t = build_int_cst (unsigned_type_node, 1);
6974 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6975 gimple_seq_add_stmt (dlist, g);
6976
6977 t = build_int_cst (unsigned_type_node, 0);
6978 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6979 gimple_seq_add_stmt (dlist, g);
6980
6981 tree body = create_artificial_label (UNKNOWN_LOCATION);
6982 tree header = create_artificial_label (UNKNOWN_LOCATION);
6983 tree end = create_artificial_label (UNKNOWN_LOCATION);
6984 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6985 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6986
6987 gimple_seq_add_seq (dlist, llist[2]);
6988
6989 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6990 gimple_seq_add_stmt (dlist, g);
6991
6992 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6993 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6994 gimple_seq_add_stmt (dlist, g);
6995
6996 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6997 }
6998 for (int i = 0; i < 2; i++)
6999 if (llist[i])
7000 {
7001 tree vf = create_tmp_var (unsigned_type_node);
7002 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
7003 gimple_call_set_lhs (g, vf);
7004 gimple_seq *seq = i == 0 ? ilist : dlist;
7005 gimple_seq_add_stmt (seq, g);
7006 tree t = build_int_cst (unsigned_type_node, 0);
7007 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
7008 gimple_seq_add_stmt (seq, g);
7009 tree body = create_artificial_label (UNKNOWN_LOCATION);
7010 tree header = create_artificial_label (UNKNOWN_LOCATION);
7011 tree end = create_artificial_label (UNKNOWN_LOCATION);
7012 gimple_seq_add_stmt (seq, gimple_build_goto (header));
7013 gimple_seq_add_stmt (seq, gimple_build_label (body));
7014 gimple_seq_add_seq (seq, llist[i]);
7015 t = build_int_cst (unsigned_type_node, 1);
7016 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
7017 gimple_seq_add_stmt (seq, g);
7018 gimple_seq_add_stmt (seq, gimple_build_label (header));
7019 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
7020 gimple_seq_add_stmt (seq, g);
7021 gimple_seq_add_stmt (seq, gimple_build_label (end));
7022 }
7023 }
7024 if (sctx.is_simt)
7025 {
7026 gimple_seq_add_seq (dlist, sctx.simt_dlist);
7027 gimple *g
7028 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
7029 gimple_seq_add_stmt (dlist, g);
7030 }
7031
7032 /* The copyin sequence is not to be executed by the main thread, since
7033 that would result in self-copies. Perhaps not visible to scalars,
7034 but it certainly is to C++ operator=. */
7035 if (copyin_seq)
7036 {
7037 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
7038 0);
7039 x = build2 (NE_EXPR, boolean_type_node, x,
7040 build_int_cst (TREE_TYPE (x), 0));
7041 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
7042 gimplify_and_add (x, ilist);
7043 }
7044
7045 /* If any copyin variable is passed by reference, we must ensure the
7046 master thread doesn't modify it before it is copied over in all
7047 threads. Similarly for variables in both firstprivate and
7048 lastprivate clauses we need to ensure the lastprivate copying
7049 happens after firstprivate copying in all threads. And similarly
7050 for UDRs if initializer expression refers to omp_orig. */
7051 if (copyin_by_ref || lastprivate_firstprivate
7052 || (reduction_omp_orig_ref
7053 && !ctx->scan_inclusive
7054 && !ctx->scan_exclusive))
7055 {
7056 /* Don't add any barrier for #pragma omp simd or
7057 #pragma omp distribute. */
7058 if (!is_task_ctx (ctx)
7059 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
7060 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
7061 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
7062 }
7063
7064 /* If max_vf is non-zero, then we can use only a vectorization factor
7065 up to the max_vf we chose. So stick it into the safelen clause. */
7066 if (maybe_ne (sctx.max_vf, 0U))
7067 {
7068 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
7069 OMP_CLAUSE_SAFELEN);
7070 poly_uint64 safe_len;
7071 if (c == NULL_TREE
7072 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
7073 && maybe_gt (safe_len, sctx.max_vf)))
7074 {
7075 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
7076 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
7077 sctx.max_vf);
7078 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
7079 gimple_omp_for_set_clauses (ctx->stmt, c);
7080 }
7081 }
7082 }
7083
7084 /* Create temporary variables for lastprivate(conditional:) implementation
7085 in context CTX with CLAUSES. */
7086
7087 static void
7088 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
7089 {
7090 tree iter_type = NULL_TREE;
7091 tree cond_ptr = NULL_TREE;
7092 tree iter_var = NULL_TREE;
7093 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7094 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
7095 tree next = *clauses;
7096 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
7097 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7098 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
7099 {
7100 if (is_simd)
7101 {
7102 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
7103 gcc_assert (cc);
7104 if (iter_type == NULL_TREE)
7105 {
7106 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
7107 iter_var = create_tmp_var_raw (iter_type);
7108 DECL_CONTEXT (iter_var) = current_function_decl;
7109 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7110 DECL_CHAIN (iter_var) = ctx->block_vars;
7111 ctx->block_vars = iter_var;
7112 tree c3
7113 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7114 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7115 OMP_CLAUSE_DECL (c3) = iter_var;
7116 OMP_CLAUSE_CHAIN (c3) = *clauses;
7117 *clauses = c3;
7118 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7119 }
7120 next = OMP_CLAUSE_CHAIN (cc);
7121 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7122 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
7123 ctx->lastprivate_conditional_map->put (o, v);
7124 continue;
7125 }
7126 if (iter_type == NULL)
7127 {
7128 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
7129 {
7130 struct omp_for_data fd;
7131 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
7132 NULL);
7133 iter_type = unsigned_type_for (fd.iter_type);
7134 }
7135 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
7136 iter_type = unsigned_type_node;
7137 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
7138 if (c2)
7139 {
7140 cond_ptr
7141 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
7142 OMP_CLAUSE_DECL (c2) = cond_ptr;
7143 }
7144 else
7145 {
7146 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
7147 DECL_CONTEXT (cond_ptr) = current_function_decl;
7148 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
7149 DECL_CHAIN (cond_ptr) = ctx->block_vars;
7150 ctx->block_vars = cond_ptr;
7151 c2 = build_omp_clause (UNKNOWN_LOCATION,
7152 OMP_CLAUSE__CONDTEMP_);
7153 OMP_CLAUSE_DECL (c2) = cond_ptr;
7154 OMP_CLAUSE_CHAIN (c2) = *clauses;
7155 *clauses = c2;
7156 }
7157 iter_var = create_tmp_var_raw (iter_type);
7158 DECL_CONTEXT (iter_var) = current_function_decl;
7159 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7160 DECL_CHAIN (iter_var) = ctx->block_vars;
7161 ctx->block_vars = iter_var;
7162 tree c3
7163 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7164 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7165 OMP_CLAUSE_DECL (c3) = iter_var;
7166 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
7167 OMP_CLAUSE_CHAIN (c2) = c3;
7168 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7169 }
7170 tree v = create_tmp_var_raw (iter_type);
7171 DECL_CONTEXT (v) = current_function_decl;
7172 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
7173 DECL_CHAIN (v) = ctx->block_vars;
7174 ctx->block_vars = v;
7175 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7176 ctx->lastprivate_conditional_map->put (o, v);
7177 }
7178 }
7179
7180
7181 /* Generate code to implement the LASTPRIVATE clauses. This is used for
7182 both parallel and workshare constructs. PREDICATE may be NULL if it's
7183 always true. BODY_P is the sequence to insert early initialization
7184 if needed, STMT_LIST is where the non-conditional lastprivate handling
7185 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
7186 section. */
7187
7188 static void
7189 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
7190 gimple_seq *stmt_list, gimple_seq *cstmt_list,
7191 omp_context *ctx)
7192 {
7193 tree x, c, label = NULL, orig_clauses = clauses;
7194 bool par_clauses = false;
7195 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
7196 unsigned HOST_WIDE_INT conditional_off = 0;
7197 gimple_seq post_stmt_list = NULL;
7198
7199 /* Early exit if there are no lastprivate or linear clauses. */
7200 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
7201 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
7202 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
7203 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
7204 break;
7205 if (clauses == NULL)
7206 {
7207 /* If this was a workshare clause, see if it had been combined
7208 with its parallel. In that case, look for the clauses on the
7209 parallel statement itself. */
7210 if (is_parallel_ctx (ctx))
7211 return;
7212
7213 ctx = ctx->outer;
7214 if (ctx == NULL || !is_parallel_ctx (ctx))
7215 return;
7216
7217 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7218 OMP_CLAUSE_LASTPRIVATE);
7219 if (clauses == NULL)
7220 return;
7221 par_clauses = true;
7222 }
7223
7224 bool maybe_simt = false;
7225 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7226 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7227 {
7228 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
7229 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
7230 if (simduid)
7231 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
7232 }
7233
7234 if (predicate)
7235 {
7236 gcond *stmt;
7237 tree label_true, arm1, arm2;
7238 enum tree_code pred_code = TREE_CODE (predicate);
7239
7240 label = create_artificial_label (UNKNOWN_LOCATION);
7241 label_true = create_artificial_label (UNKNOWN_LOCATION);
7242 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
7243 {
7244 arm1 = TREE_OPERAND (predicate, 0);
7245 arm2 = TREE_OPERAND (predicate, 1);
7246 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7247 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
7248 }
7249 else
7250 {
7251 arm1 = predicate;
7252 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7253 arm2 = boolean_false_node;
7254 pred_code = NE_EXPR;
7255 }
7256 if (maybe_simt)
7257 {
7258 c = build2 (pred_code, boolean_type_node, arm1, arm2);
7259 c = fold_convert (integer_type_node, c);
7260 simtcond = create_tmp_var (integer_type_node);
7261 gimplify_assign (simtcond, c, stmt_list);
7262 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
7263 1, simtcond);
7264 c = create_tmp_var (integer_type_node);
7265 gimple_call_set_lhs (g, c);
7266 gimple_seq_add_stmt (stmt_list, g);
7267 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
7268 label_true, label);
7269 }
7270 else
7271 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
7272 gimple_seq_add_stmt (stmt_list, stmt);
7273 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
7274 }
7275
7276 tree cond_ptr = NULL_TREE;
7277 for (c = clauses; c ;)
7278 {
7279 tree var, new_var;
7280 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7281 gimple_seq *this_stmt_list = stmt_list;
7282 tree lab2 = NULL_TREE;
7283
7284 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7285 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7286 && ctx->lastprivate_conditional_map
7287 && !ctx->combined_into_simd_safelen1)
7288 {
7289 gcc_assert (body_p);
7290 if (simduid)
7291 goto next;
7292 if (cond_ptr == NULL_TREE)
7293 {
7294 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
7295 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
7296 }
7297 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
7298 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7299 tree v = *ctx->lastprivate_conditional_map->get (o);
7300 gimplify_assign (v, build_zero_cst (type), body_p);
7301 this_stmt_list = cstmt_list;
7302 tree mem;
7303 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
7304 {
7305 mem = build2 (MEM_REF, type, cond_ptr,
7306 build_int_cst (TREE_TYPE (cond_ptr),
7307 conditional_off));
7308 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
7309 }
7310 else
7311 mem = build4 (ARRAY_REF, type, cond_ptr,
7312 size_int (conditional_off++), NULL_TREE, NULL_TREE);
7313 tree mem2 = copy_node (mem);
7314 gimple_seq seq = NULL;
7315 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
7316 gimple_seq_add_seq (this_stmt_list, seq);
7317 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7318 lab2 = create_artificial_label (UNKNOWN_LOCATION);
7319 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
7320 gimple_seq_add_stmt (this_stmt_list, g);
7321 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
7322 gimplify_assign (mem2, v, this_stmt_list);
7323 }
7324 else if (predicate
7325 && ctx->combined_into_simd_safelen1
7326 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7327 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7328 && ctx->lastprivate_conditional_map)
7329 this_stmt_list = &post_stmt_list;
7330
7331 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7332 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7333 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7334 {
7335 var = OMP_CLAUSE_DECL (c);
7336 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7337 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
7338 && is_taskloop_ctx (ctx))
7339 {
7340 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
7341 new_var = lookup_decl (var, ctx->outer);
7342 }
7343 else
7344 {
7345 new_var = lookup_decl (var, ctx);
7346 /* Avoid uninitialized warnings for lastprivate and
7347 for linear iterators. */
7348 if (predicate
7349 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7350 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
7351 suppress_warning (new_var, OPT_Wuninitialized);
7352 }
7353
7354 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
7355 {
7356 tree val = DECL_VALUE_EXPR (new_var);
7357 if (TREE_CODE (val) == ARRAY_REF
7358 && VAR_P (TREE_OPERAND (val, 0))
7359 && lookup_attribute ("omp simd array",
7360 DECL_ATTRIBUTES (TREE_OPERAND (val,
7361 0))))
7362 {
7363 if (lastlane == NULL)
7364 {
7365 lastlane = create_tmp_var (unsigned_type_node);
7366 gcall *g
7367 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
7368 2, simduid,
7369 TREE_OPERAND (val, 1));
7370 gimple_call_set_lhs (g, lastlane);
7371 gimple_seq_add_stmt (this_stmt_list, g);
7372 }
7373 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
7374 TREE_OPERAND (val, 0), lastlane,
7375 NULL_TREE, NULL_TREE);
7376 TREE_THIS_NOTRAP (new_var) = 1;
7377 }
7378 }
7379 else if (maybe_simt)
7380 {
7381 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
7382 ? DECL_VALUE_EXPR (new_var)
7383 : new_var);
7384 if (simtlast == NULL)
7385 {
7386 simtlast = create_tmp_var (unsigned_type_node);
7387 gcall *g = gimple_build_call_internal
7388 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
7389 gimple_call_set_lhs (g, simtlast);
7390 gimple_seq_add_stmt (this_stmt_list, g);
7391 }
7392 x = build_call_expr_internal_loc
7393 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
7394 TREE_TYPE (val), 2, val, simtlast);
7395 new_var = unshare_expr (new_var);
7396 gimplify_assign (new_var, x, this_stmt_list);
7397 new_var = unshare_expr (new_var);
7398 }
7399
7400 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7401 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
7402 {
7403 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
7404 gimple_seq_add_seq (this_stmt_list,
7405 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7406 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
7407 }
7408 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7409 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
7410 {
7411 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
7412 gimple_seq_add_seq (this_stmt_list,
7413 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7414 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
7415 }
7416
7417 x = NULL_TREE;
7418 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7419 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
7420 && is_taskloop_ctx (ctx))
7421 {
7422 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
7423 ctx->outer->outer);
7424 if (is_global_var (ovar))
7425 x = ovar;
7426 }
7427 if (!x)
7428 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
7429 if (omp_privatize_by_reference (var))
7430 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7431 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
7432 gimplify_and_add (x, this_stmt_list);
7433
7434 if (lab2)
7435 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
7436 }
7437
7438 next:
7439 c = OMP_CLAUSE_CHAIN (c);
7440 if (c == NULL && !par_clauses)
7441 {
7442 /* If this was a workshare clause, see if it had been combined
7443 with its parallel. In that case, continue looking for the
7444 clauses also on the parallel statement itself. */
7445 if (is_parallel_ctx (ctx))
7446 break;
7447
7448 ctx = ctx->outer;
7449 if (ctx == NULL || !is_parallel_ctx (ctx))
7450 break;
7451
7452 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7453 OMP_CLAUSE_LASTPRIVATE);
7454 par_clauses = true;
7455 }
7456 }
7457
7458 if (label)
7459 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
7460 gimple_seq_add_seq (stmt_list, post_stmt_list);
7461 }
7462
7463 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7464 (which might be a placeholder). INNER is true if this is an inner
7465 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7466 join markers. Generate the before-loop forking sequence in
7467 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7468 general form of these sequences is
7469
7470 GOACC_REDUCTION_SETUP
7471 GOACC_FORK
7472 GOACC_REDUCTION_INIT
7473 ...
7474 GOACC_REDUCTION_FINI
7475 GOACC_JOIN
7476 GOACC_REDUCTION_TEARDOWN. */
7477
7478 static void
7479 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
7480 gcall *fork, gcall *private_marker, gcall *join,
7481 gimple_seq *fork_seq, gimple_seq *join_seq,
7482 omp_context *ctx)
7483 {
7484 gimple_seq before_fork = NULL;
7485 gimple_seq after_fork = NULL;
7486 gimple_seq before_join = NULL;
7487 gimple_seq after_join = NULL;
7488 tree init_code = NULL_TREE, fini_code = NULL_TREE,
7489 setup_code = NULL_TREE, teardown_code = NULL_TREE;
7490 unsigned offset = 0;
7491
7492 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7493 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
7494 {
7495 /* No 'reduction' clauses on OpenACC 'kernels'. */
7496 gcc_checking_assert (!is_oacc_kernels (ctx));
7497 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7498 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
7499
7500 tree orig = OMP_CLAUSE_DECL (c);
7501 tree var = maybe_lookup_decl (orig, ctx);
7502 tree ref_to_res = NULL_TREE;
7503 tree incoming, outgoing, v1, v2, v3;
7504 bool is_private = false;
7505
7506 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7507 if (rcode == MINUS_EXPR)
7508 rcode = PLUS_EXPR;
7509 else if (rcode == TRUTH_ANDIF_EXPR)
7510 rcode = BIT_AND_EXPR;
7511 else if (rcode == TRUTH_ORIF_EXPR)
7512 rcode = BIT_IOR_EXPR;
7513 tree op = build_int_cst (unsigned_type_node, rcode);
7514
7515 if (!var)
7516 var = orig;
7517
7518 incoming = outgoing = var;
7519
7520 if (!inner)
7521 {
7522 /* See if an outer construct also reduces this variable. */
7523 omp_context *outer = ctx;
7524
7525 while (omp_context *probe = outer->outer)
7526 {
7527 enum gimple_code type = gimple_code (probe->stmt);
7528 tree cls;
7529
7530 switch (type)
7531 {
7532 case GIMPLE_OMP_FOR:
7533 cls = gimple_omp_for_clauses (probe->stmt);
7534 break;
7535
7536 case GIMPLE_OMP_TARGET:
7537 /* No 'reduction' clauses inside OpenACC 'kernels'
7538 regions. */
7539 gcc_checking_assert (!is_oacc_kernels (probe));
7540
7541 if (!is_gimple_omp_offloaded (probe->stmt))
7542 goto do_lookup;
7543
7544 cls = gimple_omp_target_clauses (probe->stmt);
7545 break;
7546
7547 default:
7548 goto do_lookup;
7549 }
7550
7551 outer = probe;
7552 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
7553 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
7554 && orig == OMP_CLAUSE_DECL (cls))
7555 {
7556 incoming = outgoing = lookup_decl (orig, probe);
7557 goto has_outer_reduction;
7558 }
7559 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
7560 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
7561 && orig == OMP_CLAUSE_DECL (cls))
7562 {
7563 is_private = true;
7564 goto do_lookup;
7565 }
7566 }
7567
7568 do_lookup:
7569 /* This is the outermost construct with this reduction,
7570 see if there's a mapping for it. */
7571 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
7572 && maybe_lookup_field (orig, outer) && !is_private)
7573 {
7574 ref_to_res = build_receiver_ref (orig, false, outer);
7575 if (omp_privatize_by_reference (orig))
7576 ref_to_res = build_simple_mem_ref (ref_to_res);
7577
7578 tree type = TREE_TYPE (var);
7579 if (POINTER_TYPE_P (type))
7580 type = TREE_TYPE (type);
7581
7582 outgoing = var;
7583 incoming = omp_reduction_init_op (loc, rcode, type);
7584 }
7585 else
7586 {
7587 /* Try to look at enclosing contexts for reduction var,
7588 use original if no mapping found. */
7589 tree t = NULL_TREE;
7590 omp_context *c = ctx->outer;
7591 while (c && !t)
7592 {
7593 t = maybe_lookup_decl (orig, c);
7594 c = c->outer;
7595 }
7596 incoming = outgoing = (t ? t : orig);
7597 }
7598
7599 has_outer_reduction:;
7600 }
7601
7602 if (!ref_to_res)
7603 ref_to_res = integer_zero_node;
7604
7605 if (omp_privatize_by_reference (orig))
7606 {
7607 tree type = TREE_TYPE (var);
7608 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
7609
7610 if (!inner)
7611 {
7612 tree x = create_tmp_var (TREE_TYPE (type), id);
7613 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
7614 }
7615
7616 v1 = create_tmp_var (type, id);
7617 v2 = create_tmp_var (type, id);
7618 v3 = create_tmp_var (type, id);
7619
7620 gimplify_assign (v1, var, fork_seq);
7621 gimplify_assign (v2, var, fork_seq);
7622 gimplify_assign (v3, var, fork_seq);
7623
7624 var = build_simple_mem_ref (var);
7625 v1 = build_simple_mem_ref (v1);
7626 v2 = build_simple_mem_ref (v2);
7627 v3 = build_simple_mem_ref (v3);
7628 outgoing = build_simple_mem_ref (outgoing);
7629
7630 if (!TREE_CONSTANT (incoming))
7631 incoming = build_simple_mem_ref (incoming);
7632 }
7633 else
7634 v1 = v2 = v3 = var;
7635
7636 /* Determine position in reduction buffer, which may be used
7637 by target. The parser has ensured that this is not a
7638 variable-sized type. */
7639 fixed_size_mode mode
7640 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
7641 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7642 offset = (offset + align - 1) & ~(align - 1);
7643 tree off = build_int_cst (sizetype, offset);
7644 offset += GET_MODE_SIZE (mode);
7645
7646 if (!init_code)
7647 {
7648 init_code = build_int_cst (integer_type_node,
7649 IFN_GOACC_REDUCTION_INIT);
7650 fini_code = build_int_cst (integer_type_node,
7651 IFN_GOACC_REDUCTION_FINI);
7652 setup_code = build_int_cst (integer_type_node,
7653 IFN_GOACC_REDUCTION_SETUP);
7654 teardown_code = build_int_cst (integer_type_node,
7655 IFN_GOACC_REDUCTION_TEARDOWN);
7656 }
7657
7658 tree setup_call
7659 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7660 TREE_TYPE (var), 6, setup_code,
7661 unshare_expr (ref_to_res),
7662 incoming, level, op, off);
7663 tree init_call
7664 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7665 TREE_TYPE (var), 6, init_code,
7666 unshare_expr (ref_to_res),
7667 v1, level, op, off);
7668 tree fini_call
7669 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7670 TREE_TYPE (var), 6, fini_code,
7671 unshare_expr (ref_to_res),
7672 v2, level, op, off);
7673 tree teardown_call
7674 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7675 TREE_TYPE (var), 6, teardown_code,
7676 ref_to_res, v3, level, op, off);
7677
7678 gimplify_assign (v1, setup_call, &before_fork);
7679 gimplify_assign (v2, init_call, &after_fork);
7680 gimplify_assign (v3, fini_call, &before_join);
7681 gimplify_assign (outgoing, teardown_call, &after_join);
7682 }
7683
7684 /* Now stitch things together. */
7685 gimple_seq_add_seq (fork_seq, before_fork);
7686 if (private_marker)
7687 gimple_seq_add_stmt (fork_seq, private_marker);
7688 if (fork)
7689 gimple_seq_add_stmt (fork_seq, fork);
7690 gimple_seq_add_seq (fork_seq, after_fork);
7691
7692 gimple_seq_add_seq (join_seq, before_join);
7693 if (join)
7694 gimple_seq_add_stmt (join_seq, join);
7695 gimple_seq_add_seq (join_seq, after_join);
7696 }
7697
7698 /* Generate code to implement the REDUCTION clauses, append it
7699 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7700 that should be emitted also inside of the critical section,
7701 in that case clear *CLIST afterwards, otherwise leave it as is
7702 and let the caller emit it itself. */
7703
7704 static void
7705 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
7706 gimple_seq *clist, omp_context *ctx)
7707 {
7708 gimple_seq sub_seq = NULL;
7709 gimple *stmt;
7710 tree x, c;
7711 int count = 0;
7712
7713 /* OpenACC loop reductions are handled elsewhere. */
7714 if (is_gimple_omp_oacc (ctx->stmt))
7715 return;
7716
7717 /* SIMD reductions are handled in lower_rec_input_clauses. */
7718 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7719 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7720 return;
7721
7722 /* inscan reductions are handled elsewhere. */
7723 if (ctx->scan_inclusive || ctx->scan_exclusive)
7724 return;
7725
7726 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7727 update in that case, otherwise use a lock. */
7728 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
7729 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7730 && !OMP_CLAUSE_REDUCTION_TASK (c))
7731 {
7732 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
7733 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7734 {
7735 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7736 count = -1;
7737 break;
7738 }
7739 count++;
7740 }
7741
7742 if (count == 0)
7743 return;
7744
7745 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7746 {
7747 tree var, ref, new_var, orig_var;
7748 enum tree_code code;
7749 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7750
7751 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7752 || OMP_CLAUSE_REDUCTION_TASK (c))
7753 continue;
7754
7755 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
7756 orig_var = var = OMP_CLAUSE_DECL (c);
7757 if (TREE_CODE (var) == MEM_REF)
7758 {
7759 var = TREE_OPERAND (var, 0);
7760 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7761 var = TREE_OPERAND (var, 0);
7762 if (TREE_CODE (var) == ADDR_EXPR)
7763 var = TREE_OPERAND (var, 0);
7764 else
7765 {
7766 /* If this is a pointer or referenced based array
7767 section, the var could be private in the outer
7768 context e.g. on orphaned loop construct. Pretend this
7769 is private variable's outer reference. */
7770 ccode = OMP_CLAUSE_PRIVATE;
7771 if (TREE_CODE (var) == INDIRECT_REF)
7772 var = TREE_OPERAND (var, 0);
7773 }
7774 orig_var = var;
7775 if (is_variable_sized (var))
7776 {
7777 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7778 var = DECL_VALUE_EXPR (var);
7779 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7780 var = TREE_OPERAND (var, 0);
7781 gcc_assert (DECL_P (var));
7782 }
7783 }
7784 new_var = lookup_decl (var, ctx);
7785 if (var == OMP_CLAUSE_DECL (c)
7786 && omp_privatize_by_reference (var))
7787 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7788 ref = build_outer_var_ref (var, ctx, ccode);
7789 code = OMP_CLAUSE_REDUCTION_CODE (c);
7790
7791 /* reduction(-:var) sums up the partial results, so it acts
7792 identically to reduction(+:var). */
7793 if (code == MINUS_EXPR)
7794 code = PLUS_EXPR;
7795
7796 bool is_truth_op = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
7797 if (count == 1)
7798 {
7799 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
7800
7801 addr = save_expr (addr);
7802 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
7803 tree new_var2 = new_var;
7804 tree ref2 = ref;
7805 if (is_truth_op)
7806 {
7807 tree zero = build_zero_cst (TREE_TYPE (new_var));
7808 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7809 boolean_type_node, new_var, zero);
7810 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7811 ref, zero);
7812 }
7813 x = fold_build2_loc (clause_loc, code, TREE_TYPE (new_var2), ref2,
7814 new_var2);
7815 if (is_truth_op)
7816 x = fold_convert (TREE_TYPE (new_var), x);
7817 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7818 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7819 gimplify_and_add (x, stmt_seqp);
7820 return;
7821 }
7822 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7823 {
7824 tree d = OMP_CLAUSE_DECL (c);
7825 tree type = TREE_TYPE (d);
7826 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7827 tree i = create_tmp_var (TREE_TYPE (v));
7828 tree ptype = build_pointer_type (TREE_TYPE (type));
7829 tree bias = TREE_OPERAND (d, 1);
7830 d = TREE_OPERAND (d, 0);
7831 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7832 {
7833 tree b = TREE_OPERAND (d, 1);
7834 b = maybe_lookup_decl (b, ctx);
7835 if (b == NULL)
7836 {
7837 b = TREE_OPERAND (d, 1);
7838 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7839 }
7840 if (integer_zerop (bias))
7841 bias = b;
7842 else
7843 {
7844 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7845 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7846 TREE_TYPE (b), b, bias);
7847 }
7848 d = TREE_OPERAND (d, 0);
7849 }
7850 /* For ref build_outer_var_ref already performs this, so
7851 only new_var needs a dereference. */
7852 if (TREE_CODE (d) == INDIRECT_REF)
7853 {
7854 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7855 gcc_assert (omp_privatize_by_reference (var)
7856 && var == orig_var);
7857 }
7858 else if (TREE_CODE (d) == ADDR_EXPR)
7859 {
7860 if (orig_var == var)
7861 {
7862 new_var = build_fold_addr_expr (new_var);
7863 ref = build_fold_addr_expr (ref);
7864 }
7865 }
7866 else
7867 {
7868 gcc_assert (orig_var == var);
7869 if (omp_privatize_by_reference (var))
7870 ref = build_fold_addr_expr (ref);
7871 }
7872 if (DECL_P (v))
7873 {
7874 tree t = maybe_lookup_decl (v, ctx);
7875 if (t)
7876 v = t;
7877 else
7878 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7879 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7880 }
7881 if (!integer_zerop (bias))
7882 {
7883 bias = fold_convert_loc (clause_loc, sizetype, bias);
7884 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7885 TREE_TYPE (new_var), new_var,
7886 unshare_expr (bias));
7887 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7888 TREE_TYPE (ref), ref, bias);
7889 }
7890 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7891 ref = fold_convert_loc (clause_loc, ptype, ref);
7892 tree m = create_tmp_var (ptype);
7893 gimplify_assign (m, new_var, stmt_seqp);
7894 new_var = m;
7895 m = create_tmp_var (ptype);
7896 gimplify_assign (m, ref, stmt_seqp);
7897 ref = m;
7898 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7899 tree body = create_artificial_label (UNKNOWN_LOCATION);
7900 tree end = create_artificial_label (UNKNOWN_LOCATION);
7901 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7902 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7903 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7904 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7905 {
7906 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7907 tree decl_placeholder
7908 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7909 SET_DECL_VALUE_EXPR (placeholder, out);
7910 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7911 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7912 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7913 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7914 gimple_seq_add_seq (&sub_seq,
7915 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7916 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7917 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7918 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7919 }
7920 else
7921 {
7922 tree out2 = out;
7923 tree priv2 = priv;
7924 if (is_truth_op)
7925 {
7926 tree zero = build_zero_cst (TREE_TYPE (out));
7927 out2 = fold_build2_loc (clause_loc, NE_EXPR,
7928 boolean_type_node, out, zero);
7929 priv2 = fold_build2_loc (clause_loc, NE_EXPR,
7930 boolean_type_node, priv, zero);
7931 }
7932 x = build2 (code, TREE_TYPE (out2), out2, priv2);
7933 if (is_truth_op)
7934 x = fold_convert (TREE_TYPE (out), x);
7935 out = unshare_expr (out);
7936 gimplify_assign (out, x, &sub_seq);
7937 }
7938 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7939 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7940 gimple_seq_add_stmt (&sub_seq, g);
7941 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7942 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7943 gimple_seq_add_stmt (&sub_seq, g);
7944 g = gimple_build_assign (i, PLUS_EXPR, i,
7945 build_int_cst (TREE_TYPE (i), 1));
7946 gimple_seq_add_stmt (&sub_seq, g);
7947 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7948 gimple_seq_add_stmt (&sub_seq, g);
7949 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7950 }
7951 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7952 {
7953 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7954
7955 if (omp_privatize_by_reference (var)
7956 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7957 TREE_TYPE (ref)))
7958 ref = build_fold_addr_expr_loc (clause_loc, ref);
7959 SET_DECL_VALUE_EXPR (placeholder, ref);
7960 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7961 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7962 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7963 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7964 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7965 }
7966 else
7967 {
7968 tree new_var2 = new_var;
7969 tree ref2 = ref;
7970 if (is_truth_op)
7971 {
7972 tree zero = build_zero_cst (TREE_TYPE (new_var));
7973 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7974 boolean_type_node, new_var, zero);
7975 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7976 ref, zero);
7977 }
7978 x = build2 (code, TREE_TYPE (ref), ref2, new_var2);
7979 if (is_truth_op)
7980 x = fold_convert (TREE_TYPE (new_var), x);
7981 ref = build_outer_var_ref (var, ctx);
7982 gimplify_assign (ref, x, &sub_seq);
7983 }
7984 }
7985
7986 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7987 0);
7988 gimple_seq_add_stmt (stmt_seqp, stmt);
7989
7990 gimple_seq_add_seq (stmt_seqp, sub_seq);
7991
7992 if (clist)
7993 {
7994 gimple_seq_add_seq (stmt_seqp, *clist);
7995 *clist = NULL;
7996 }
7997
7998 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7999 0);
8000 gimple_seq_add_stmt (stmt_seqp, stmt);
8001 }
8002
8003
8004 /* Generate code to implement the COPYPRIVATE clauses. */
8005
8006 static void
8007 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
8008 omp_context *ctx)
8009 {
8010 tree c;
8011
8012 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8013 {
8014 tree var, new_var, ref, x;
8015 bool by_ref;
8016 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8017
8018 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
8019 continue;
8020
8021 var = OMP_CLAUSE_DECL (c);
8022 by_ref = use_pointer_for_field (var, NULL);
8023
8024 ref = build_sender_ref (var, ctx);
8025 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
8026 if (by_ref)
8027 {
8028 x = build_fold_addr_expr_loc (clause_loc, new_var);
8029 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
8030 }
8031 gimplify_assign (ref, x, slist);
8032
8033 ref = build_receiver_ref (var, false, ctx);
8034 if (by_ref)
8035 {
8036 ref = fold_convert_loc (clause_loc,
8037 build_pointer_type (TREE_TYPE (new_var)),
8038 ref);
8039 ref = build_fold_indirect_ref_loc (clause_loc, ref);
8040 }
8041 if (omp_privatize_by_reference (var))
8042 {
8043 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
8044 ref = build_simple_mem_ref_loc (clause_loc, ref);
8045 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
8046 }
8047 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
8048 gimplify_and_add (x, rlist);
8049 }
8050 }
8051
8052
8053 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
8054 and REDUCTION from the sender (aka parent) side. */
8055
8056 static void
8057 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
8058 omp_context *ctx)
8059 {
8060 tree c, t;
8061 int ignored_looptemp = 0;
8062 bool is_taskloop = false;
8063
8064 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
8065 by GOMP_taskloop. */
8066 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
8067 {
8068 ignored_looptemp = 2;
8069 is_taskloop = true;
8070 }
8071
8072 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8073 {
8074 tree val, ref, x, var;
8075 bool by_ref, do_in = false, do_out = false;
8076 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8077
8078 switch (OMP_CLAUSE_CODE (c))
8079 {
8080 case OMP_CLAUSE_PRIVATE:
8081 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
8082 break;
8083 continue;
8084 case OMP_CLAUSE_FIRSTPRIVATE:
8085 case OMP_CLAUSE_COPYIN:
8086 case OMP_CLAUSE_LASTPRIVATE:
8087 case OMP_CLAUSE_IN_REDUCTION:
8088 case OMP_CLAUSE__REDUCTEMP_:
8089 break;
8090 case OMP_CLAUSE_REDUCTION:
8091 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
8092 continue;
8093 break;
8094 case OMP_CLAUSE_SHARED:
8095 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8096 break;
8097 continue;
8098 case OMP_CLAUSE__LOOPTEMP_:
8099 if (ignored_looptemp)
8100 {
8101 ignored_looptemp--;
8102 continue;
8103 }
8104 break;
8105 default:
8106 continue;
8107 }
8108
8109 val = OMP_CLAUSE_DECL (c);
8110 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8111 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
8112 && TREE_CODE (val) == MEM_REF)
8113 {
8114 val = TREE_OPERAND (val, 0);
8115 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
8116 val = TREE_OPERAND (val, 0);
8117 if (TREE_CODE (val) == INDIRECT_REF
8118 || TREE_CODE (val) == ADDR_EXPR)
8119 val = TREE_OPERAND (val, 0);
8120 if (is_variable_sized (val))
8121 continue;
8122 }
8123
8124 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
8125 outer taskloop region. */
8126 omp_context *ctx_for_o = ctx;
8127 if (is_taskloop
8128 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8129 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8130 ctx_for_o = ctx->outer;
8131
8132 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
8133
8134 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
8135 && is_global_var (var)
8136 && (val == OMP_CLAUSE_DECL (c)
8137 || !is_task_ctx (ctx)
8138 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
8139 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
8140 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
8141 != POINTER_TYPE)))))
8142 continue;
8143
8144 t = omp_member_access_dummy_var (var);
8145 if (t)
8146 {
8147 var = DECL_VALUE_EXPR (var);
8148 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
8149 if (o != t)
8150 var = unshare_and_remap (var, t, o);
8151 else
8152 var = unshare_expr (var);
8153 }
8154
8155 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
8156 {
8157 /* Handle taskloop firstprivate/lastprivate, where the
8158 lastprivate on GIMPLE_OMP_TASK is represented as
8159 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
8160 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
8161 x = omp_build_component_ref (ctx->sender_decl, f);
8162 if (use_pointer_for_field (val, ctx))
8163 var = build_fold_addr_expr (var);
8164 gimplify_assign (x, var, ilist);
8165 DECL_ABSTRACT_ORIGIN (f) = NULL;
8166 continue;
8167 }
8168
8169 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8170 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
8171 || val == OMP_CLAUSE_DECL (c))
8172 && is_variable_sized (val))
8173 continue;
8174 by_ref = use_pointer_for_field (val, NULL);
8175
8176 switch (OMP_CLAUSE_CODE (c))
8177 {
8178 case OMP_CLAUSE_FIRSTPRIVATE:
8179 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
8180 && !by_ref
8181 && is_task_ctx (ctx))
8182 suppress_warning (var);
8183 do_in = true;
8184 break;
8185
8186 case OMP_CLAUSE_PRIVATE:
8187 case OMP_CLAUSE_COPYIN:
8188 case OMP_CLAUSE__LOOPTEMP_:
8189 case OMP_CLAUSE__REDUCTEMP_:
8190 do_in = true;
8191 break;
8192
8193 case OMP_CLAUSE_LASTPRIVATE:
8194 if (by_ref || omp_privatize_by_reference (val))
8195 {
8196 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8197 continue;
8198 do_in = true;
8199 }
8200 else
8201 {
8202 do_out = true;
8203 if (lang_hooks.decls.omp_private_outer_ref (val))
8204 do_in = true;
8205 }
8206 break;
8207
8208 case OMP_CLAUSE_REDUCTION:
8209 case OMP_CLAUSE_IN_REDUCTION:
8210 do_in = true;
8211 if (val == OMP_CLAUSE_DECL (c))
8212 {
8213 if (is_task_ctx (ctx))
8214 by_ref = use_pointer_for_field (val, ctx);
8215 else
8216 do_out = !(by_ref || omp_privatize_by_reference (val));
8217 }
8218 else
8219 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
8220 break;
8221
8222 default:
8223 gcc_unreachable ();
8224 }
8225
8226 if (do_in)
8227 {
8228 ref = build_sender_ref (val, ctx);
8229 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
8230 gimplify_assign (ref, x, ilist);
8231 if (is_task_ctx (ctx))
8232 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
8233 }
8234
8235 if (do_out)
8236 {
8237 ref = build_sender_ref (val, ctx);
8238 gimplify_assign (var, ref, olist);
8239 }
8240 }
8241 }
8242
8243 /* Generate code to implement SHARED from the sender (aka parent)
8244 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8245 list things that got automatically shared. */
8246
8247 static void
8248 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
8249 {
8250 tree var, ovar, nvar, t, f, x, record_type;
8251
8252 if (ctx->record_type == NULL)
8253 return;
8254
8255 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
8256 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
8257 {
8258 ovar = DECL_ABSTRACT_ORIGIN (f);
8259 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
8260 continue;
8261
8262 nvar = maybe_lookup_decl (ovar, ctx);
8263 if (!nvar
8264 || !DECL_HAS_VALUE_EXPR_P (nvar)
8265 || (ctx->allocate_map
8266 && ctx->allocate_map->get (ovar)))
8267 continue;
8268
8269 /* If CTX is a nested parallel directive. Find the immediately
8270 enclosing parallel or workshare construct that contains a
8271 mapping for OVAR. */
8272 var = lookup_decl_in_outer_ctx (ovar, ctx);
8273
8274 t = omp_member_access_dummy_var (var);
8275 if (t)
8276 {
8277 var = DECL_VALUE_EXPR (var);
8278 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
8279 if (o != t)
8280 var = unshare_and_remap (var, t, o);
8281 else
8282 var = unshare_expr (var);
8283 }
8284
8285 if (use_pointer_for_field (ovar, ctx))
8286 {
8287 x = build_sender_ref (ovar, ctx);
8288 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
8289 && TREE_TYPE (f) == TREE_TYPE (ovar))
8290 {
8291 gcc_assert (is_parallel_ctx (ctx)
8292 && DECL_ARTIFICIAL (ovar));
8293 /* _condtemp_ clause. */
8294 var = build_constructor (TREE_TYPE (x), NULL);
8295 }
8296 else
8297 var = build_fold_addr_expr (var);
8298 gimplify_assign (x, var, ilist);
8299 }
8300 else
8301 {
8302 x = build_sender_ref (ovar, ctx);
8303 gimplify_assign (x, var, ilist);
8304
8305 if (!TREE_READONLY (var)
8306 /* We don't need to receive a new reference to a result
8307 or parm decl. In fact we may not store to it as we will
8308 invalidate any pending RSO and generate wrong gimple
8309 during inlining. */
8310 && !((TREE_CODE (var) == RESULT_DECL
8311 || TREE_CODE (var) == PARM_DECL)
8312 && DECL_BY_REFERENCE (var)))
8313 {
8314 x = build_sender_ref (ovar, ctx);
8315 gimplify_assign (var, x, olist);
8316 }
8317 }
8318 }
8319 }
8320
8321 /* Emit an OpenACC head marker call, encapulating the partitioning and
8322 other information that must be processed by the target compiler.
8323 Return the maximum number of dimensions the associated loop might
8324 be partitioned over. */
8325
8326 static unsigned
8327 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
8328 gimple_seq *seq, omp_context *ctx)
8329 {
8330 unsigned levels = 0;
8331 unsigned tag = 0;
8332 tree gang_static = NULL_TREE;
8333 auto_vec<tree, 5> args;
8334
8335 args.quick_push (build_int_cst
8336 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
8337 args.quick_push (ddvar);
8338 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8339 {
8340 switch (OMP_CLAUSE_CODE (c))
8341 {
8342 case OMP_CLAUSE_GANG:
8343 tag |= OLF_DIM_GANG;
8344 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
8345 /* static:* is represented by -1, and we can ignore it, as
8346 scheduling is always static. */
8347 if (gang_static && integer_minus_onep (gang_static))
8348 gang_static = NULL_TREE;
8349 levels++;
8350 break;
8351
8352 case OMP_CLAUSE_WORKER:
8353 tag |= OLF_DIM_WORKER;
8354 levels++;
8355 break;
8356
8357 case OMP_CLAUSE_VECTOR:
8358 tag |= OLF_DIM_VECTOR;
8359 levels++;
8360 break;
8361
8362 case OMP_CLAUSE_SEQ:
8363 tag |= OLF_SEQ;
8364 break;
8365
8366 case OMP_CLAUSE_AUTO:
8367 tag |= OLF_AUTO;
8368 break;
8369
8370 case OMP_CLAUSE_INDEPENDENT:
8371 tag |= OLF_INDEPENDENT;
8372 break;
8373
8374 case OMP_CLAUSE_TILE:
8375 tag |= OLF_TILE;
8376 break;
8377
8378 case OMP_CLAUSE_REDUCTION:
8379 tag |= OLF_REDUCTION;
8380 break;
8381
8382 default:
8383 continue;
8384 }
8385 }
8386
8387 if (gang_static)
8388 {
8389 if (DECL_P (gang_static))
8390 gang_static = build_outer_var_ref (gang_static, ctx);
8391 tag |= OLF_GANG_STATIC;
8392 }
8393
8394 omp_context *tgt = enclosing_target_ctx (ctx);
8395 if (!tgt || is_oacc_parallel_or_serial (tgt))
8396 ;
8397 else if (is_oacc_kernels (tgt))
8398 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8399 gcc_unreachable ();
8400 else if (is_oacc_kernels_decomposed_part (tgt))
8401 ;
8402 else
8403 gcc_unreachable ();
8404
8405 /* In a parallel region, loops are implicitly INDEPENDENT. */
8406 if (!tgt || is_oacc_parallel_or_serial (tgt))
8407 tag |= OLF_INDEPENDENT;
8408
8409 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8410 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8411 if (tgt && is_oacc_kernels_decomposed_part (tgt))
8412 {
8413 gcc_assert (tag & (OLF_SEQ | OLF_INDEPENDENT));
8414 gcc_assert (!(tag & OLF_AUTO));
8415 }
8416
8417 if (tag & OLF_TILE)
8418 /* Tiling could use all 3 levels. */
8419 levels = 3;
8420 else
8421 {
8422 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8423 Ensure at least one level, or 2 for possible auto
8424 partitioning */
8425 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
8426 << OLF_DIM_BASE) | OLF_SEQ));
8427
8428 if (levels < 1u + maybe_auto)
8429 levels = 1u + maybe_auto;
8430 }
8431
8432 args.quick_push (build_int_cst (integer_type_node, levels));
8433 args.quick_push (build_int_cst (integer_type_node, tag));
8434 if (gang_static)
8435 args.quick_push (gang_static);
8436
8437 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
8438 gimple_set_location (call, loc);
8439 gimple_set_lhs (call, ddvar);
8440 gimple_seq_add_stmt (seq, call);
8441
8442 return levels;
8443 }
8444
8445 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8446 partitioning level of the enclosed region. */
8447
8448 static void
8449 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
8450 tree tofollow, gimple_seq *seq)
8451 {
8452 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
8453 : IFN_UNIQUE_OACC_TAIL_MARK);
8454 tree marker = build_int_cst (integer_type_node, marker_kind);
8455 int nargs = 2 + (tofollow != NULL_TREE);
8456 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
8457 marker, ddvar, tofollow);
8458 gimple_set_location (call, loc);
8459 gimple_set_lhs (call, ddvar);
8460 gimple_seq_add_stmt (seq, call);
8461 }
8462
8463 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8464 the loop clauses, from which we extract reductions. Initialize
8465 HEAD and TAIL. */
8466
8467 static void
8468 lower_oacc_head_tail (location_t loc, tree clauses, gcall *private_marker,
8469 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
8470 {
8471 bool inner = false;
8472 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
8473 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
8474
8475 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
8476
8477 if (private_marker)
8478 {
8479 gimple_set_location (private_marker, loc);
8480 gimple_call_set_lhs (private_marker, ddvar);
8481 gimple_call_set_arg (private_marker, 1, ddvar);
8482 }
8483
8484 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
8485 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
8486
8487 gcc_assert (count);
8488 for (unsigned done = 1; count; count--, done++)
8489 {
8490 gimple_seq fork_seq = NULL;
8491 gimple_seq join_seq = NULL;
8492
8493 tree place = build_int_cst (integer_type_node, -1);
8494 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
8495 fork_kind, ddvar, place);
8496 gimple_set_location (fork, loc);
8497 gimple_set_lhs (fork, ddvar);
8498
8499 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
8500 join_kind, ddvar, place);
8501 gimple_set_location (join, loc);
8502 gimple_set_lhs (join, ddvar);
8503
8504 /* Mark the beginning of this level sequence. */
8505 if (inner)
8506 lower_oacc_loop_marker (loc, ddvar, true,
8507 build_int_cst (integer_type_node, count),
8508 &fork_seq);
8509 lower_oacc_loop_marker (loc, ddvar, false,
8510 build_int_cst (integer_type_node, done),
8511 &join_seq);
8512
8513 lower_oacc_reductions (loc, clauses, place, inner,
8514 fork, (count == 1) ? private_marker : NULL,
8515 join, &fork_seq, &join_seq, ctx);
8516
8517 /* Append this level to head. */
8518 gimple_seq_add_seq (head, fork_seq);
8519 /* Prepend it to tail. */
8520 gimple_seq_add_seq (&join_seq, *tail);
8521 *tail = join_seq;
8522
8523 inner = true;
8524 }
8525
8526 /* Mark the end of the sequence. */
8527 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
8528 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
8529 }
8530
8531 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8532 catch handler and return it. This prevents programs from violating the
8533 structured block semantics with throws. */
8534
8535 static gimple_seq
8536 maybe_catch_exception (gimple_seq body)
8537 {
8538 gimple *g;
8539 tree decl;
8540
8541 if (!flag_exceptions)
8542 return body;
8543
8544 if (lang_hooks.eh_protect_cleanup_actions != NULL)
8545 decl = lang_hooks.eh_protect_cleanup_actions ();
8546 else
8547 decl = builtin_decl_explicit (BUILT_IN_TRAP);
8548
8549 g = gimple_build_eh_must_not_throw (decl);
8550 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
8551 GIMPLE_TRY_CATCH);
8552
8553 return gimple_seq_alloc_with_stmt (g);
8554 }
8555
8556 \f
8557 /* Routines to lower OMP directives into OMP-GIMPLE. */
8558
8559 /* If ctx is a worksharing context inside of a cancellable parallel
8560 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8561 and conditional branch to parallel's cancel_label to handle
8562 cancellation in the implicit barrier. */
8563
8564 static void
8565 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
8566 gimple_seq *body)
8567 {
8568 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
8569 if (gimple_omp_return_nowait_p (omp_return))
8570 return;
8571 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8572 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8573 && outer->cancellable)
8574 {
8575 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
8576 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
8577 tree lhs = create_tmp_var (c_bool_type);
8578 gimple_omp_return_set_lhs (omp_return, lhs);
8579 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8580 gimple *g = gimple_build_cond (NE_EXPR, lhs,
8581 fold_convert (c_bool_type,
8582 boolean_false_node),
8583 outer->cancel_label, fallthru_label);
8584 gimple_seq_add_stmt (body, g);
8585 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
8586 }
8587 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
8588 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
8589 return;
8590 }
8591
8592 /* Find the first task_reduction or reduction clause or return NULL
8593 if there are none. */
8594
8595 static inline tree
8596 omp_task_reductions_find_first (tree clauses, enum tree_code code,
8597 enum omp_clause_code ccode)
8598 {
8599 while (1)
8600 {
8601 clauses = omp_find_clause (clauses, ccode);
8602 if (clauses == NULL_TREE)
8603 return NULL_TREE;
8604 if (ccode != OMP_CLAUSE_REDUCTION
8605 || code == OMP_TASKLOOP
8606 || OMP_CLAUSE_REDUCTION_TASK (clauses))
8607 return clauses;
8608 clauses = OMP_CLAUSE_CHAIN (clauses);
8609 }
8610 }
8611
8612 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
8613 gimple_seq *, gimple_seq *);
8614
8615 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8616 CTX is the enclosing OMP context for the current statement. */
8617
8618 static void
8619 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8620 {
8621 tree block, control;
8622 gimple_stmt_iterator tgsi;
8623 gomp_sections *stmt;
8624 gimple *t;
8625 gbind *new_stmt, *bind;
8626 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
8627
8628 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
8629
8630 push_gimplify_context ();
8631
8632 dlist = NULL;
8633 ilist = NULL;
8634
8635 tree rclauses
8636 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
8637 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
8638 tree rtmp = NULL_TREE;
8639 if (rclauses)
8640 {
8641 tree type = build_pointer_type (pointer_sized_int_node);
8642 tree temp = create_tmp_var (type);
8643 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8644 OMP_CLAUSE_DECL (c) = temp;
8645 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
8646 gimple_omp_sections_set_clauses (stmt, c);
8647 lower_omp_task_reductions (ctx, OMP_SECTIONS,
8648 gimple_omp_sections_clauses (stmt),
8649 &ilist, &tred_dlist);
8650 rclauses = c;
8651 rtmp = make_ssa_name (type);
8652 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
8653 }
8654
8655 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
8656 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
8657
8658 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
8659 &ilist, &dlist, ctx, NULL);
8660
8661 control = create_tmp_var (unsigned_type_node, ".section");
8662 gimple_omp_sections_set_control (stmt, control);
8663
8664 new_body = gimple_omp_body (stmt);
8665 gimple_omp_set_body (stmt, NULL);
8666 tgsi = gsi_start (new_body);
8667 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
8668 {
8669 omp_context *sctx;
8670 gimple *sec_start;
8671
8672 sec_start = gsi_stmt (tgsi);
8673 sctx = maybe_lookup_ctx (sec_start);
8674 gcc_assert (sctx);
8675
8676 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
8677 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
8678 GSI_CONTINUE_LINKING);
8679 gimple_omp_set_body (sec_start, NULL);
8680
8681 if (gsi_one_before_end_p (tgsi))
8682 {
8683 gimple_seq l = NULL;
8684 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
8685 &ilist, &l, &clist, ctx);
8686 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
8687 gimple_omp_section_set_last (sec_start);
8688 }
8689
8690 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8691 GSI_CONTINUE_LINKING);
8692 }
8693
8694 block = make_node (BLOCK);
8695 bind = gimple_build_bind (NULL, new_body, block);
8696
8697 olist = NULL;
8698 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
8699 &clist, ctx);
8700 if (clist)
8701 {
8702 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8703 gcall *g = gimple_build_call (fndecl, 0);
8704 gimple_seq_add_stmt (&olist, g);
8705 gimple_seq_add_seq (&olist, clist);
8706 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8707 g = gimple_build_call (fndecl, 0);
8708 gimple_seq_add_stmt (&olist, g);
8709 }
8710
8711 block = make_node (BLOCK);
8712 new_stmt = gimple_build_bind (NULL, NULL, block);
8713 gsi_replace (gsi_p, new_stmt, true);
8714
8715 pop_gimplify_context (new_stmt);
8716 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8717 BLOCK_VARS (block) = gimple_bind_vars (bind);
8718 if (BLOCK_VARS (block))
8719 TREE_USED (block) = 1;
8720
8721 new_body = NULL;
8722 gimple_seq_add_seq (&new_body, ilist);
8723 gimple_seq_add_stmt (&new_body, stmt);
8724 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8725 gimple_seq_add_stmt (&new_body, bind);
8726
8727 t = gimple_build_omp_continue (control, control);
8728 gimple_seq_add_stmt (&new_body, t);
8729
8730 gimple_seq_add_seq (&new_body, olist);
8731 if (ctx->cancellable)
8732 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
8733 gimple_seq_add_seq (&new_body, dlist);
8734
8735 new_body = maybe_catch_exception (new_body);
8736
8737 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
8738 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8739 t = gimple_build_omp_return (nowait);
8740 gimple_seq_add_stmt (&new_body, t);
8741 gimple_seq_add_seq (&new_body, tred_dlist);
8742 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
8743
8744 if (rclauses)
8745 OMP_CLAUSE_DECL (rclauses) = rtmp;
8746
8747 gimple_bind_set_body (new_stmt, new_body);
8748 }
8749
8750
8751 /* A subroutine of lower_omp_single. Expand the simple form of
8752 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8753
8754 if (GOMP_single_start ())
8755 BODY;
8756 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8757
8758 FIXME. It may be better to delay expanding the logic of this until
8759 pass_expand_omp. The expanded logic may make the job more difficult
8760 to a synchronization analysis pass. */
8761
8762 static void
8763 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
8764 {
8765 location_t loc = gimple_location (single_stmt);
8766 tree tlabel = create_artificial_label (loc);
8767 tree flabel = create_artificial_label (loc);
8768 gimple *call, *cond;
8769 tree lhs, decl;
8770
8771 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
8772 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
8773 call = gimple_build_call (decl, 0);
8774 gimple_call_set_lhs (call, lhs);
8775 gimple_seq_add_stmt (pre_p, call);
8776
8777 cond = gimple_build_cond (EQ_EXPR, lhs,
8778 fold_convert_loc (loc, TREE_TYPE (lhs),
8779 boolean_true_node),
8780 tlabel, flabel);
8781 gimple_seq_add_stmt (pre_p, cond);
8782 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8783 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8784 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
8785 }
8786
8787
8788 /* A subroutine of lower_omp_single. Expand the simple form of
8789 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8790
8791 #pragma omp single copyprivate (a, b, c)
8792
8793 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8794
8795 {
8796 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8797 {
8798 BODY;
8799 copyout.a = a;
8800 copyout.b = b;
8801 copyout.c = c;
8802 GOMP_single_copy_end (&copyout);
8803 }
8804 else
8805 {
8806 a = copyout_p->a;
8807 b = copyout_p->b;
8808 c = copyout_p->c;
8809 }
8810 GOMP_barrier ();
8811 }
8812
8813 FIXME. It may be better to delay expanding the logic of this until
8814 pass_expand_omp. The expanded logic may make the job more difficult
8815 to a synchronization analysis pass. */
8816
8817 static void
8818 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
8819 omp_context *ctx)
8820 {
8821 tree ptr_type, t, l0, l1, l2, bfn_decl;
8822 gimple_seq copyin_seq;
8823 location_t loc = gimple_location (single_stmt);
8824
8825 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8826
8827 ptr_type = build_pointer_type (ctx->record_type);
8828 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8829
8830 l0 = create_artificial_label (loc);
8831 l1 = create_artificial_label (loc);
8832 l2 = create_artificial_label (loc);
8833
8834 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8835 t = build_call_expr_loc (loc, bfn_decl, 0);
8836 t = fold_convert_loc (loc, ptr_type, t);
8837 gimplify_assign (ctx->receiver_decl, t, pre_p);
8838
8839 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8840 build_int_cst (ptr_type, 0));
8841 t = build3 (COND_EXPR, void_type_node, t,
8842 build_and_jump (&l0), build_and_jump (&l1));
8843 gimplify_and_add (t, pre_p);
8844
8845 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
8846
8847 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8848
8849 copyin_seq = NULL;
8850 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
8851 &copyin_seq, ctx);
8852
8853 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8854 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8855 t = build_call_expr_loc (loc, bfn_decl, 1, t);
8856 gimplify_and_add (t, pre_p);
8857
8858 t = build_and_jump (&l2);
8859 gimplify_and_add (t, pre_p);
8860
8861 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
8862
8863 gimple_seq_add_seq (pre_p, copyin_seq);
8864
8865 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
8866 }
8867
8868
8869 /* Expand code for an OpenMP single directive. */
8870
8871 static void
8872 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8873 {
8874 tree block;
8875 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
8876 gbind *bind;
8877 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8878
8879 push_gimplify_context ();
8880
8881 block = make_node (BLOCK);
8882 bind = gimple_build_bind (NULL, NULL, block);
8883 gsi_replace (gsi_p, bind, true);
8884 bind_body = NULL;
8885 dlist = NULL;
8886 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
8887 &bind_body, &dlist, ctx, NULL);
8888 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
8889
8890 gimple_seq_add_stmt (&bind_body, single_stmt);
8891
8892 if (ctx->record_type)
8893 lower_omp_single_copy (single_stmt, &bind_body, ctx);
8894 else
8895 lower_omp_single_simple (single_stmt, &bind_body);
8896
8897 gimple_omp_set_body (single_stmt, NULL);
8898
8899 gimple_seq_add_seq (&bind_body, dlist);
8900
8901 bind_body = maybe_catch_exception (bind_body);
8902
8903 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8904 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8905 gimple *g = gimple_build_omp_return (nowait);
8906 gimple_seq_add_stmt (&bind_body_tail, g);
8907 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8908 if (ctx->record_type)
8909 {
8910 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8911 tree clobber = build_clobber (ctx->record_type);
8912 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8913 clobber), GSI_SAME_STMT);
8914 }
8915 gimple_seq_add_seq (&bind_body, bind_body_tail);
8916 gimple_bind_set_body (bind, bind_body);
8917
8918 pop_gimplify_context (bind);
8919
8920 gimple_bind_append_vars (bind, ctx->block_vars);
8921 BLOCK_VARS (block) = ctx->block_vars;
8922 if (BLOCK_VARS (block))
8923 TREE_USED (block) = 1;
8924 }
8925
8926
8927 /* Lower code for an OMP scope directive. */
8928
8929 static void
8930 lower_omp_scope (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8931 {
8932 tree block;
8933 gimple *scope_stmt = gsi_stmt (*gsi_p);
8934 gbind *bind;
8935 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8936 gimple_seq tred_dlist = NULL;
8937
8938 push_gimplify_context ();
8939
8940 block = make_node (BLOCK);
8941 bind = gimple_build_bind (NULL, NULL, block);
8942 gsi_replace (gsi_p, bind, true);
8943 bind_body = NULL;
8944 dlist = NULL;
8945
8946 tree rclauses
8947 = omp_task_reductions_find_first (gimple_omp_scope_clauses (scope_stmt),
8948 OMP_SCOPE, OMP_CLAUSE_REDUCTION);
8949 if (rclauses)
8950 {
8951 tree type = build_pointer_type (pointer_sized_int_node);
8952 tree temp = create_tmp_var (type);
8953 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8954 OMP_CLAUSE_DECL (c) = temp;
8955 OMP_CLAUSE_CHAIN (c) = gimple_omp_scope_clauses (scope_stmt);
8956 gimple_omp_scope_set_clauses (scope_stmt, c);
8957 lower_omp_task_reductions (ctx, OMP_SCOPE,
8958 gimple_omp_scope_clauses (scope_stmt),
8959 &bind_body, &tred_dlist);
8960 rclauses = c;
8961 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_SCOPE_START);
8962 gimple *stmt = gimple_build_call (fndecl, 1, temp);
8963 gimple_seq_add_stmt (&bind_body, stmt);
8964 }
8965
8966 lower_rec_input_clauses (gimple_omp_scope_clauses (scope_stmt),
8967 &bind_body, &dlist, ctx, NULL);
8968 lower_omp (gimple_omp_body_ptr (scope_stmt), ctx);
8969
8970 gimple_seq_add_stmt (&bind_body, scope_stmt);
8971
8972 gimple_seq_add_seq (&bind_body, gimple_omp_body (scope_stmt));
8973
8974 gimple_omp_set_body (scope_stmt, NULL);
8975
8976 gimple_seq clist = NULL;
8977 lower_reduction_clauses (gimple_omp_scope_clauses (scope_stmt),
8978 &bind_body, &clist, ctx);
8979 if (clist)
8980 {
8981 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8982 gcall *g = gimple_build_call (fndecl, 0);
8983 gimple_seq_add_stmt (&bind_body, g);
8984 gimple_seq_add_seq (&bind_body, clist);
8985 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8986 g = gimple_build_call (fndecl, 0);
8987 gimple_seq_add_stmt (&bind_body, g);
8988 }
8989
8990 gimple_seq_add_seq (&bind_body, dlist);
8991
8992 bind_body = maybe_catch_exception (bind_body);
8993
8994 bool nowait = omp_find_clause (gimple_omp_scope_clauses (scope_stmt),
8995 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8996 gimple *g = gimple_build_omp_return (nowait);
8997 gimple_seq_add_stmt (&bind_body_tail, g);
8998 gimple_seq_add_seq (&bind_body_tail, tred_dlist);
8999 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
9000 if (ctx->record_type)
9001 {
9002 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
9003 tree clobber = build_clobber (ctx->record_type);
9004 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
9005 clobber), GSI_SAME_STMT);
9006 }
9007 gimple_seq_add_seq (&bind_body, bind_body_tail);
9008
9009 gimple_bind_set_body (bind, bind_body);
9010
9011 pop_gimplify_context (bind);
9012
9013 gimple_bind_append_vars (bind, ctx->block_vars);
9014 BLOCK_VARS (block) = ctx->block_vars;
9015 if (BLOCK_VARS (block))
9016 TREE_USED (block) = 1;
9017 }
9018 /* Expand code for an OpenMP master or masked directive. */
9019
9020 static void
9021 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9022 {
9023 tree block, lab = NULL, x, bfn_decl;
9024 gimple *stmt = gsi_stmt (*gsi_p);
9025 gbind *bind;
9026 location_t loc = gimple_location (stmt);
9027 gimple_seq tseq;
9028 tree filter = integer_zero_node;
9029
9030 push_gimplify_context ();
9031
9032 if (gimple_code (stmt) == GIMPLE_OMP_MASKED)
9033 {
9034 filter = omp_find_clause (gimple_omp_masked_clauses (stmt),
9035 OMP_CLAUSE_FILTER);
9036 if (filter)
9037 filter = fold_convert (integer_type_node,
9038 OMP_CLAUSE_FILTER_EXPR (filter));
9039 else
9040 filter = integer_zero_node;
9041 }
9042 block = make_node (BLOCK);
9043 bind = gimple_build_bind (NULL, NULL, block);
9044 gsi_replace (gsi_p, bind, true);
9045 gimple_bind_add_stmt (bind, stmt);
9046
9047 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9048 x = build_call_expr_loc (loc, bfn_decl, 0);
9049 x = build2 (EQ_EXPR, boolean_type_node, x, filter);
9050 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
9051 tseq = NULL;
9052 gimplify_and_add (x, &tseq);
9053 gimple_bind_add_seq (bind, tseq);
9054
9055 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9056 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9057 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9058 gimple_omp_set_body (stmt, NULL);
9059
9060 gimple_bind_add_stmt (bind, gimple_build_label (lab));
9061
9062 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9063
9064 pop_gimplify_context (bind);
9065
9066 gimple_bind_append_vars (bind, ctx->block_vars);
9067 BLOCK_VARS (block) = ctx->block_vars;
9068 }
9069
9070 /* Helper function for lower_omp_task_reductions. For a specific PASS
9071 find out the current clause it should be processed, or return false
9072 if all have been processed already. */
9073
9074 static inline bool
9075 omp_task_reduction_iterate (int pass, enum tree_code code,
9076 enum omp_clause_code ccode, tree *c, tree *decl,
9077 tree *type, tree *next)
9078 {
9079 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
9080 {
9081 if (ccode == OMP_CLAUSE_REDUCTION
9082 && code != OMP_TASKLOOP
9083 && !OMP_CLAUSE_REDUCTION_TASK (*c))
9084 continue;
9085 *decl = OMP_CLAUSE_DECL (*c);
9086 *type = TREE_TYPE (*decl);
9087 if (TREE_CODE (*decl) == MEM_REF)
9088 {
9089 if (pass != 1)
9090 continue;
9091 }
9092 else
9093 {
9094 if (omp_privatize_by_reference (*decl))
9095 *type = TREE_TYPE (*type);
9096 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
9097 continue;
9098 }
9099 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
9100 return true;
9101 }
9102 *decl = NULL_TREE;
9103 *type = NULL_TREE;
9104 *next = NULL_TREE;
9105 return false;
9106 }
9107
9108 /* Lower task_reduction and reduction clauses (the latter unless CODE is
9109 OMP_TASKGROUP only with task modifier). Register mapping of those in
9110 START sequence and reducing them and unregister them in the END sequence. */
9111
9112 static void
9113 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
9114 gimple_seq *start, gimple_seq *end)
9115 {
9116 enum omp_clause_code ccode
9117 = (code == OMP_TASKGROUP
9118 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
9119 tree cancellable = NULL_TREE;
9120 clauses = omp_task_reductions_find_first (clauses, code, ccode);
9121 if (clauses == NULL_TREE)
9122 return;
9123 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9124 {
9125 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
9126 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
9127 && outer->cancellable)
9128 {
9129 cancellable = error_mark_node;
9130 break;
9131 }
9132 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
9133 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
9134 break;
9135 }
9136 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
9137 tree *last = &TYPE_FIELDS (record_type);
9138 unsigned cnt = 0;
9139 if (cancellable)
9140 {
9141 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9142 ptr_type_node);
9143 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9144 integer_type_node);
9145 *last = field;
9146 DECL_CHAIN (field) = ifield;
9147 last = &DECL_CHAIN (ifield);
9148 DECL_CONTEXT (field) = record_type;
9149 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9150 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9151 DECL_CONTEXT (ifield) = record_type;
9152 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
9153 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
9154 }
9155 for (int pass = 0; pass < 2; pass++)
9156 {
9157 tree decl, type, next;
9158 for (tree c = clauses;
9159 omp_task_reduction_iterate (pass, code, ccode,
9160 &c, &decl, &type, &next); c = next)
9161 {
9162 ++cnt;
9163 tree new_type = type;
9164 if (ctx->outer)
9165 new_type = remap_type (type, &ctx->outer->cb);
9166 tree field
9167 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
9168 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
9169 new_type);
9170 if (DECL_P (decl) && type == TREE_TYPE (decl))
9171 {
9172 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
9173 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
9174 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
9175 }
9176 else
9177 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
9178 DECL_CONTEXT (field) = record_type;
9179 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9180 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9181 *last = field;
9182 last = &DECL_CHAIN (field);
9183 tree bfield
9184 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
9185 boolean_type_node);
9186 DECL_CONTEXT (bfield) = record_type;
9187 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
9188 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
9189 *last = bfield;
9190 last = &DECL_CHAIN (bfield);
9191 }
9192 }
9193 *last = NULL_TREE;
9194 layout_type (record_type);
9195
9196 /* Build up an array which registers with the runtime all the reductions
9197 and deregisters them at the end. Format documented in libgomp/task.c. */
9198 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
9199 tree avar = create_tmp_var_raw (atype);
9200 gimple_add_tmp_var (avar);
9201 TREE_ADDRESSABLE (avar) = 1;
9202 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
9203 NULL_TREE, NULL_TREE);
9204 tree t = build_int_cst (pointer_sized_int_node, cnt);
9205 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9206 gimple_seq seq = NULL;
9207 tree sz = fold_convert (pointer_sized_int_node,
9208 TYPE_SIZE_UNIT (record_type));
9209 int cachesz = 64;
9210 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
9211 build_int_cst (pointer_sized_int_node, cachesz - 1));
9212 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
9213 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
9214 ctx->task_reductions.create (1 + cnt);
9215 ctx->task_reduction_map = new hash_map<tree, unsigned>;
9216 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
9217 ? sz : NULL_TREE);
9218 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
9219 gimple_seq_add_seq (start, seq);
9220 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
9221 NULL_TREE, NULL_TREE);
9222 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
9223 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9224 NULL_TREE, NULL_TREE);
9225 t = build_int_cst (pointer_sized_int_node,
9226 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
9227 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9228 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
9229 NULL_TREE, NULL_TREE);
9230 t = build_int_cst (pointer_sized_int_node, -1);
9231 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9232 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
9233 NULL_TREE, NULL_TREE);
9234 t = build_int_cst (pointer_sized_int_node, 0);
9235 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9236
9237 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
9238 and for each task reduction checks a bool right after the private variable
9239 within that thread's chunk; if the bool is clear, it hasn't been
9240 initialized and thus isn't going to be reduced nor destructed, otherwise
9241 reduce and destruct it. */
9242 tree idx = create_tmp_var (size_type_node);
9243 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
9244 tree num_thr_sz = create_tmp_var (size_type_node);
9245 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
9246 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
9247 tree lab3 = NULL_TREE, lab7 = NULL_TREE;
9248 gimple *g;
9249 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9250 {
9251 /* For worksharing constructs or scope, only perform it in the master
9252 thread, with the exception of cancelled implicit barriers - then only
9253 handle the current thread. */
9254 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9255 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9256 tree thr_num = create_tmp_var (integer_type_node);
9257 g = gimple_build_call (t, 0);
9258 gimple_call_set_lhs (g, thr_num);
9259 gimple_seq_add_stmt (end, g);
9260 if (cancellable)
9261 {
9262 tree c;
9263 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9264 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
9265 lab3 = create_artificial_label (UNKNOWN_LOCATION);
9266 if (code == OMP_FOR)
9267 c = gimple_omp_for_clauses (ctx->stmt);
9268 else if (code == OMP_SECTIONS)
9269 c = gimple_omp_sections_clauses (ctx->stmt);
9270 else /* if (code == OMP_SCOPE) */
9271 c = gimple_omp_scope_clauses (ctx->stmt);
9272 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
9273 cancellable = c;
9274 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
9275 lab5, lab6);
9276 gimple_seq_add_stmt (end, g);
9277 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9278 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
9279 gimple_seq_add_stmt (end, g);
9280 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
9281 build_one_cst (TREE_TYPE (idx)));
9282 gimple_seq_add_stmt (end, g);
9283 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
9284 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9285 }
9286 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
9287 gimple_seq_add_stmt (end, g);
9288 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9289 }
9290 if (code != OMP_PARALLEL)
9291 {
9292 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9293 tree num_thr = create_tmp_var (integer_type_node);
9294 g = gimple_build_call (t, 0);
9295 gimple_call_set_lhs (g, num_thr);
9296 gimple_seq_add_stmt (end, g);
9297 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
9298 gimple_seq_add_stmt (end, g);
9299 if (cancellable)
9300 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9301 }
9302 else
9303 {
9304 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
9305 OMP_CLAUSE__REDUCTEMP_);
9306 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
9307 t = fold_convert (size_type_node, t);
9308 gimplify_assign (num_thr_sz, t, end);
9309 }
9310 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9311 NULL_TREE, NULL_TREE);
9312 tree data = create_tmp_var (pointer_sized_int_node);
9313 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
9314 if (code == OMP_TASKLOOP)
9315 {
9316 lab7 = create_artificial_label (UNKNOWN_LOCATION);
9317 g = gimple_build_cond (NE_EXPR, data,
9318 build_zero_cst (pointer_sized_int_node),
9319 lab1, lab7);
9320 gimple_seq_add_stmt (end, g);
9321 }
9322 gimple_seq_add_stmt (end, gimple_build_label (lab1));
9323 tree ptr;
9324 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
9325 ptr = create_tmp_var (build_pointer_type (record_type));
9326 else
9327 ptr = create_tmp_var (ptr_type_node);
9328 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
9329
9330 tree field = TYPE_FIELDS (record_type);
9331 cnt = 0;
9332 if (cancellable)
9333 field = DECL_CHAIN (DECL_CHAIN (field));
9334 for (int pass = 0; pass < 2; pass++)
9335 {
9336 tree decl, type, next;
9337 for (tree c = clauses;
9338 omp_task_reduction_iterate (pass, code, ccode,
9339 &c, &decl, &type, &next); c = next)
9340 {
9341 tree var = decl, ref;
9342 if (TREE_CODE (decl) == MEM_REF)
9343 {
9344 var = TREE_OPERAND (var, 0);
9345 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
9346 var = TREE_OPERAND (var, 0);
9347 tree v = var;
9348 if (TREE_CODE (var) == ADDR_EXPR)
9349 var = TREE_OPERAND (var, 0);
9350 else if (TREE_CODE (var) == INDIRECT_REF)
9351 var = TREE_OPERAND (var, 0);
9352 tree orig_var = var;
9353 if (is_variable_sized (var))
9354 {
9355 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
9356 var = DECL_VALUE_EXPR (var);
9357 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
9358 var = TREE_OPERAND (var, 0);
9359 gcc_assert (DECL_P (var));
9360 }
9361 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9362 if (orig_var != var)
9363 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
9364 else if (TREE_CODE (v) == ADDR_EXPR)
9365 t = build_fold_addr_expr (t);
9366 else if (TREE_CODE (v) == INDIRECT_REF)
9367 t = build_fold_indirect_ref (t);
9368 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
9369 {
9370 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
9371 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
9372 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
9373 }
9374 if (!integer_zerop (TREE_OPERAND (decl, 1)))
9375 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
9376 fold_convert (size_type_node,
9377 TREE_OPERAND (decl, 1)));
9378 }
9379 else
9380 {
9381 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9382 if (!omp_privatize_by_reference (decl))
9383 t = build_fold_addr_expr (t);
9384 }
9385 t = fold_convert (pointer_sized_int_node, t);
9386 seq = NULL;
9387 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9388 gimple_seq_add_seq (start, seq);
9389 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9390 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9391 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9392 t = unshare_expr (byte_position (field));
9393 t = fold_convert (pointer_sized_int_node, t);
9394 ctx->task_reduction_map->put (c, cnt);
9395 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
9396 ? t : NULL_TREE);
9397 seq = NULL;
9398 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9399 gimple_seq_add_seq (start, seq);
9400 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9401 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
9402 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9403
9404 tree bfield = DECL_CHAIN (field);
9405 tree cond;
9406 if (code == OMP_PARALLEL
9407 || code == OMP_FOR
9408 || code == OMP_SECTIONS
9409 || code == OMP_SCOPE)
9410 /* In parallel, worksharing or scope all threads unconditionally
9411 initialize all their task reduction private variables. */
9412 cond = boolean_true_node;
9413 else if (TREE_TYPE (ptr) == ptr_type_node)
9414 {
9415 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9416 unshare_expr (byte_position (bfield)));
9417 seq = NULL;
9418 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
9419 gimple_seq_add_seq (end, seq);
9420 tree pbool = build_pointer_type (TREE_TYPE (bfield));
9421 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
9422 build_int_cst (pbool, 0));
9423 }
9424 else
9425 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
9426 build_simple_mem_ref (ptr), bfield, NULL_TREE);
9427 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
9428 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9429 tree condv = create_tmp_var (boolean_type_node);
9430 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
9431 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
9432 lab3, lab4);
9433 gimple_seq_add_stmt (end, g);
9434 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9435 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
9436 {
9437 /* If this reduction doesn't need destruction and parallel
9438 has been cancelled, there is nothing to do for this
9439 reduction, so jump around the merge operation. */
9440 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9441 g = gimple_build_cond (NE_EXPR, cancellable,
9442 build_zero_cst (TREE_TYPE (cancellable)),
9443 lab4, lab5);
9444 gimple_seq_add_stmt (end, g);
9445 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9446 }
9447
9448 tree new_var;
9449 if (TREE_TYPE (ptr) == ptr_type_node)
9450 {
9451 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9452 unshare_expr (byte_position (field)));
9453 seq = NULL;
9454 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
9455 gimple_seq_add_seq (end, seq);
9456 tree pbool = build_pointer_type (TREE_TYPE (field));
9457 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
9458 build_int_cst (pbool, 0));
9459 }
9460 else
9461 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
9462 build_simple_mem_ref (ptr), field, NULL_TREE);
9463
9464 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
9465 if (TREE_CODE (decl) != MEM_REF
9466 && omp_privatize_by_reference (decl))
9467 ref = build_simple_mem_ref (ref);
9468 /* reduction(-:var) sums up the partial results, so it acts
9469 identically to reduction(+:var). */
9470 if (rcode == MINUS_EXPR)
9471 rcode = PLUS_EXPR;
9472 if (TREE_CODE (decl) == MEM_REF)
9473 {
9474 tree type = TREE_TYPE (new_var);
9475 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
9476 tree i = create_tmp_var (TREE_TYPE (v));
9477 tree ptype = build_pointer_type (TREE_TYPE (type));
9478 if (DECL_P (v))
9479 {
9480 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
9481 tree vv = create_tmp_var (TREE_TYPE (v));
9482 gimplify_assign (vv, v, start);
9483 v = vv;
9484 }
9485 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9486 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9487 new_var = build_fold_addr_expr (new_var);
9488 new_var = fold_convert (ptype, new_var);
9489 ref = fold_convert (ptype, ref);
9490 tree m = create_tmp_var (ptype);
9491 gimplify_assign (m, new_var, end);
9492 new_var = m;
9493 m = create_tmp_var (ptype);
9494 gimplify_assign (m, ref, end);
9495 ref = m;
9496 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
9497 tree body = create_artificial_label (UNKNOWN_LOCATION);
9498 tree endl = create_artificial_label (UNKNOWN_LOCATION);
9499 gimple_seq_add_stmt (end, gimple_build_label (body));
9500 tree priv = build_simple_mem_ref (new_var);
9501 tree out = build_simple_mem_ref (ref);
9502 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9503 {
9504 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9505 tree decl_placeholder
9506 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
9507 tree lab6 = NULL_TREE;
9508 if (cancellable)
9509 {
9510 /* If this reduction needs destruction and parallel
9511 has been cancelled, jump around the merge operation
9512 to the destruction. */
9513 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9514 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9515 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9516 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9517 lab6, lab5);
9518 gimple_seq_add_stmt (end, g);
9519 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9520 }
9521 SET_DECL_VALUE_EXPR (placeholder, out);
9522 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9523 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
9524 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
9525 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9526 gimple_seq_add_seq (end,
9527 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9528 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9529 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9530 {
9531 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9532 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
9533 }
9534 if (cancellable)
9535 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9536 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
9537 if (x)
9538 {
9539 gimple_seq tseq = NULL;
9540 gimplify_stmt (&x, &tseq);
9541 gimple_seq_add_seq (end, tseq);
9542 }
9543 }
9544 else
9545 {
9546 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
9547 out = unshare_expr (out);
9548 gimplify_assign (out, x, end);
9549 }
9550 gimple *g
9551 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
9552 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9553 gimple_seq_add_stmt (end, g);
9554 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
9555 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9556 gimple_seq_add_stmt (end, g);
9557 g = gimple_build_assign (i, PLUS_EXPR, i,
9558 build_int_cst (TREE_TYPE (i), 1));
9559 gimple_seq_add_stmt (end, g);
9560 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
9561 gimple_seq_add_stmt (end, g);
9562 gimple_seq_add_stmt (end, gimple_build_label (endl));
9563 }
9564 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9565 {
9566 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9567 tree oldv = NULL_TREE;
9568 tree lab6 = NULL_TREE;
9569 if (cancellable)
9570 {
9571 /* If this reduction needs destruction and parallel
9572 has been cancelled, jump around the merge operation
9573 to the destruction. */
9574 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9575 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9576 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9577 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9578 lab6, lab5);
9579 gimple_seq_add_stmt (end, g);
9580 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9581 }
9582 if (omp_privatize_by_reference (decl)
9583 && !useless_type_conversion_p (TREE_TYPE (placeholder),
9584 TREE_TYPE (ref)))
9585 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9586 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9587 tree refv = create_tmp_var (TREE_TYPE (ref));
9588 gimplify_assign (refv, ref, end);
9589 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
9590 SET_DECL_VALUE_EXPR (placeholder, ref);
9591 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9592 tree d = maybe_lookup_decl (decl, ctx);
9593 gcc_assert (d);
9594 if (DECL_HAS_VALUE_EXPR_P (d))
9595 oldv = DECL_VALUE_EXPR (d);
9596 if (omp_privatize_by_reference (var))
9597 {
9598 tree v = fold_convert (TREE_TYPE (d),
9599 build_fold_addr_expr (new_var));
9600 SET_DECL_VALUE_EXPR (d, v);
9601 }
9602 else
9603 SET_DECL_VALUE_EXPR (d, new_var);
9604 DECL_HAS_VALUE_EXPR_P (d) = 1;
9605 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9606 if (oldv)
9607 SET_DECL_VALUE_EXPR (d, oldv);
9608 else
9609 {
9610 SET_DECL_VALUE_EXPR (d, NULL_TREE);
9611 DECL_HAS_VALUE_EXPR_P (d) = 0;
9612 }
9613 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9614 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9615 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9616 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9617 if (cancellable)
9618 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9619 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
9620 if (x)
9621 {
9622 gimple_seq tseq = NULL;
9623 gimplify_stmt (&x, &tseq);
9624 gimple_seq_add_seq (end, tseq);
9625 }
9626 }
9627 else
9628 {
9629 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
9630 ref = unshare_expr (ref);
9631 gimplify_assign (ref, x, end);
9632 }
9633 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9634 ++cnt;
9635 field = DECL_CHAIN (bfield);
9636 }
9637 }
9638
9639 if (code == OMP_TASKGROUP)
9640 {
9641 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
9642 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9643 gimple_seq_add_stmt (start, g);
9644 }
9645 else
9646 {
9647 tree c;
9648 if (code == OMP_FOR)
9649 c = gimple_omp_for_clauses (ctx->stmt);
9650 else if (code == OMP_SECTIONS)
9651 c = gimple_omp_sections_clauses (ctx->stmt);
9652 else if (code == OMP_SCOPE)
9653 c = gimple_omp_scope_clauses (ctx->stmt);
9654 else
9655 c = gimple_omp_taskreg_clauses (ctx->stmt);
9656 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
9657 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
9658 build_fold_addr_expr (avar));
9659 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
9660 }
9661
9662 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
9663 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
9664 size_one_node));
9665 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
9666 gimple_seq_add_stmt (end, g);
9667 gimple_seq_add_stmt (end, gimple_build_label (lab2));
9668 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9669 {
9670 enum built_in_function bfn
9671 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
9672 t = builtin_decl_explicit (bfn);
9673 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
9674 tree arg;
9675 if (cancellable)
9676 {
9677 arg = create_tmp_var (c_bool_type);
9678 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
9679 cancellable));
9680 }
9681 else
9682 arg = build_int_cst (c_bool_type, 0);
9683 g = gimple_build_call (t, 1, arg);
9684 }
9685 else
9686 {
9687 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
9688 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9689 }
9690 gimple_seq_add_stmt (end, g);
9691 if (lab7)
9692 gimple_seq_add_stmt (end, gimple_build_label (lab7));
9693 t = build_constructor (atype, NULL);
9694 TREE_THIS_VOLATILE (t) = 1;
9695 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
9696 }
9697
9698 /* Expand code for an OpenMP taskgroup directive. */
9699
9700 static void
9701 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9702 {
9703 gimple *stmt = gsi_stmt (*gsi_p);
9704 gcall *x;
9705 gbind *bind;
9706 gimple_seq dseq = NULL;
9707 tree block = make_node (BLOCK);
9708
9709 bind = gimple_build_bind (NULL, NULL, block);
9710 gsi_replace (gsi_p, bind, true);
9711 gimple_bind_add_stmt (bind, stmt);
9712
9713 push_gimplify_context ();
9714
9715 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
9716 0);
9717 gimple_bind_add_stmt (bind, x);
9718
9719 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
9720 gimple_omp_taskgroup_clauses (stmt),
9721 gimple_bind_body_ptr (bind), &dseq);
9722
9723 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9724 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9725 gimple_omp_set_body (stmt, NULL);
9726
9727 gimple_bind_add_seq (bind, dseq);
9728
9729 pop_gimplify_context (bind);
9730
9731 gimple_bind_append_vars (bind, ctx->block_vars);
9732 BLOCK_VARS (block) = ctx->block_vars;
9733 }
9734
9735
9736 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9737
9738 static void
9739 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
9740 omp_context *ctx)
9741 {
9742 struct omp_for_data fd;
9743 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
9744 return;
9745
9746 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
9747 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
9748 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
9749 if (!fd.ordered)
9750 return;
9751
9752 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9753 tree c = gimple_omp_ordered_clauses (ord_stmt);
9754 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
9755 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
9756 {
9757 /* Merge depend clauses from multiple adjacent
9758 #pragma omp ordered depend(sink:...) constructs
9759 into one #pragma omp ordered depend(sink:...), so that
9760 we can optimize them together. */
9761 gimple_stmt_iterator gsi = *gsi_p;
9762 gsi_next (&gsi);
9763 while (!gsi_end_p (gsi))
9764 {
9765 gimple *stmt = gsi_stmt (gsi);
9766 if (is_gimple_debug (stmt)
9767 || gimple_code (stmt) == GIMPLE_NOP)
9768 {
9769 gsi_next (&gsi);
9770 continue;
9771 }
9772 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
9773 break;
9774 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
9775 c = gimple_omp_ordered_clauses (ord_stmt2);
9776 if (c == NULL_TREE
9777 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DOACROSS
9778 || OMP_CLAUSE_DOACROSS_KIND (c) != OMP_CLAUSE_DOACROSS_SINK)
9779 break;
9780 while (*list_p)
9781 list_p = &OMP_CLAUSE_CHAIN (*list_p);
9782 *list_p = c;
9783 gsi_remove (&gsi, true);
9784 }
9785 }
9786
9787 /* Canonicalize sink dependence clauses into one folded clause if
9788 possible.
9789
9790 The basic algorithm is to create a sink vector whose first
9791 element is the GCD of all the first elements, and whose remaining
9792 elements are the minimum of the subsequent columns.
9793
9794 We ignore dependence vectors whose first element is zero because
9795 such dependencies are known to be executed by the same thread.
9796
9797 We take into account the direction of the loop, so a minimum
9798 becomes a maximum if the loop is iterating forwards. We also
9799 ignore sink clauses where the loop direction is unknown, or where
9800 the offsets are clearly invalid because they are not a multiple
9801 of the loop increment.
9802
9803 For example:
9804
9805 #pragma omp for ordered(2)
9806 for (i=0; i < N; ++i)
9807 for (j=0; j < M; ++j)
9808 {
9809 #pragma omp ordered \
9810 depend(sink:i-8,j-2) \
9811 depend(sink:i,j-1) \ // Completely ignored because i+0.
9812 depend(sink:i-4,j-3) \
9813 depend(sink:i-6,j-4)
9814 #pragma omp ordered depend(source)
9815 }
9816
9817 Folded clause is:
9818
9819 depend(sink:-gcd(8,4,6),-min(2,3,4))
9820 -or-
9821 depend(sink:-2,-2)
9822 */
9823
9824 /* FIXME: Computing GCD's where the first element is zero is
9825 non-trivial in the presence of collapsed loops. Do this later. */
9826 if (fd.collapse > 1)
9827 return;
9828
9829 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
9830
9831 /* wide_int is not a POD so it must be default-constructed. */
9832 for (unsigned i = 0; i != 2 * len - 1; ++i)
9833 new (static_cast<void*>(folded_deps + i)) wide_int ();
9834
9835 tree folded_dep = NULL_TREE;
9836 /* TRUE if the first dimension's offset is negative. */
9837 bool neg_offset_p = false;
9838
9839 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9840 unsigned int i;
9841 while ((c = *list_p) != NULL)
9842 {
9843 bool remove = false;
9844
9845 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS);
9846 if (OMP_CLAUSE_DOACROSS_KIND (c) != OMP_CLAUSE_DOACROSS_SINK)
9847 goto next_ordered_clause;
9848
9849 tree vec;
9850 for (vec = OMP_CLAUSE_DECL (c), i = 0;
9851 vec && TREE_CODE (vec) == TREE_LIST;
9852 vec = TREE_CHAIN (vec), ++i)
9853 {
9854 gcc_assert (i < len);
9855
9856 /* omp_extract_for_data has canonicalized the condition. */
9857 gcc_assert (fd.loops[i].cond_code == LT_EXPR
9858 || fd.loops[i].cond_code == GT_EXPR);
9859 bool forward = fd.loops[i].cond_code == LT_EXPR;
9860 bool maybe_lexically_later = true;
9861
9862 /* While the committee makes up its mind, bail if we have any
9863 non-constant steps. */
9864 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
9865 goto lower_omp_ordered_ret;
9866
9867 tree itype = TREE_TYPE (TREE_VALUE (vec));
9868 if (POINTER_TYPE_P (itype))
9869 itype = sizetype;
9870 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
9871 TYPE_PRECISION (itype),
9872 TYPE_SIGN (itype));
9873
9874 /* Ignore invalid offsets that are not multiples of the step. */
9875 if (!wi::multiple_of_p (wi::abs (offset),
9876 wi::abs (wi::to_wide (fd.loops[i].step)),
9877 UNSIGNED))
9878 {
9879 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9880 "ignoring sink clause with offset that is not "
9881 "a multiple of the loop step");
9882 remove = true;
9883 goto next_ordered_clause;
9884 }
9885
9886 /* Calculate the first dimension. The first dimension of
9887 the folded dependency vector is the GCD of the first
9888 elements, while ignoring any first elements whose offset
9889 is 0. */
9890 if (i == 0)
9891 {
9892 /* Ignore dependence vectors whose first dimension is 0. */
9893 if (offset == 0)
9894 {
9895 remove = true;
9896 goto next_ordered_clause;
9897 }
9898 else
9899 {
9900 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
9901 {
9902 error_at (OMP_CLAUSE_LOCATION (c),
9903 "first offset must be in opposite direction "
9904 "of loop iterations");
9905 goto lower_omp_ordered_ret;
9906 }
9907 if (forward)
9908 offset = -offset;
9909 neg_offset_p = forward;
9910 /* Initialize the first time around. */
9911 if (folded_dep == NULL_TREE)
9912 {
9913 folded_dep = c;
9914 folded_deps[0] = offset;
9915 }
9916 else
9917 folded_deps[0] = wi::gcd (folded_deps[0],
9918 offset, UNSIGNED);
9919 }
9920 }
9921 /* Calculate minimum for the remaining dimensions. */
9922 else
9923 {
9924 folded_deps[len + i - 1] = offset;
9925 if (folded_dep == c)
9926 folded_deps[i] = offset;
9927 else if (maybe_lexically_later
9928 && !wi::eq_p (folded_deps[i], offset))
9929 {
9930 if (forward ^ wi::gts_p (folded_deps[i], offset))
9931 {
9932 unsigned int j;
9933 folded_dep = c;
9934 for (j = 1; j <= i; j++)
9935 folded_deps[j] = folded_deps[len + j - 1];
9936 }
9937 else
9938 maybe_lexically_later = false;
9939 }
9940 }
9941 }
9942 gcc_assert (i == len);
9943
9944 remove = true;
9945
9946 next_ordered_clause:
9947 if (remove)
9948 *list_p = OMP_CLAUSE_CHAIN (c);
9949 else
9950 list_p = &OMP_CLAUSE_CHAIN (c);
9951 }
9952
9953 if (folded_dep)
9954 {
9955 if (neg_offset_p)
9956 folded_deps[0] = -folded_deps[0];
9957
9958 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
9959 if (POINTER_TYPE_P (itype))
9960 itype = sizetype;
9961
9962 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
9963 = wide_int_to_tree (itype, folded_deps[0]);
9964 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
9965 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
9966 }
9967
9968 lower_omp_ordered_ret:
9969
9970 /* Ordered without clauses is #pragma omp threads, while we want
9971 a nop instead if we remove all clauses. */
9972 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
9973 gsi_replace (gsi_p, gimple_build_nop (), true);
9974 }
9975
9976
9977 /* Expand code for an OpenMP ordered directive. */
9978
9979 static void
9980 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9981 {
9982 tree block;
9983 gimple *stmt = gsi_stmt (*gsi_p), *g;
9984 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
9985 gcall *x;
9986 gbind *bind;
9987 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9988 OMP_CLAUSE_SIMD);
9989 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9990 loop. */
9991 bool maybe_simt
9992 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
9993 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9994 OMP_CLAUSE_THREADS);
9995
9996 if (gimple_omp_ordered_standalone_p (ord_stmt))
9997 {
9998 /* FIXME: This is needs to be moved to the expansion to verify various
9999 conditions only testable on cfg with dominators computed, and also
10000 all the depend clauses to be merged still might need to be available
10001 for the runtime checks. */
10002 if (0)
10003 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
10004 return;
10005 }
10006
10007 push_gimplify_context ();
10008
10009 block = make_node (BLOCK);
10010 bind = gimple_build_bind (NULL, NULL, block);
10011 gsi_replace (gsi_p, bind, true);
10012 gimple_bind_add_stmt (bind, stmt);
10013
10014 if (simd)
10015 {
10016 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
10017 build_int_cst (NULL_TREE, threads));
10018 cfun->has_simduid_loops = true;
10019 }
10020 else
10021 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
10022 0);
10023 gimple_bind_add_stmt (bind, x);
10024
10025 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
10026 if (maybe_simt)
10027 {
10028 counter = create_tmp_var (integer_type_node);
10029 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
10030 gimple_call_set_lhs (g, counter);
10031 gimple_bind_add_stmt (bind, g);
10032
10033 body = create_artificial_label (UNKNOWN_LOCATION);
10034 test = create_artificial_label (UNKNOWN_LOCATION);
10035 gimple_bind_add_stmt (bind, gimple_build_label (body));
10036
10037 tree simt_pred = create_tmp_var (integer_type_node);
10038 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
10039 gimple_call_set_lhs (g, simt_pred);
10040 gimple_bind_add_stmt (bind, g);
10041
10042 tree t = create_artificial_label (UNKNOWN_LOCATION);
10043 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
10044 gimple_bind_add_stmt (bind, g);
10045
10046 gimple_bind_add_stmt (bind, gimple_build_label (t));
10047 }
10048 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10049 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
10050 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
10051 gimple_omp_set_body (stmt, NULL);
10052
10053 if (maybe_simt)
10054 {
10055 gimple_bind_add_stmt (bind, gimple_build_label (test));
10056 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
10057 gimple_bind_add_stmt (bind, g);
10058
10059 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
10060 tree nonneg = create_tmp_var (integer_type_node);
10061 gimple_seq tseq = NULL;
10062 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
10063 gimple_bind_add_seq (bind, tseq);
10064
10065 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
10066 gimple_call_set_lhs (g, nonneg);
10067 gimple_bind_add_stmt (bind, g);
10068
10069 tree end = create_artificial_label (UNKNOWN_LOCATION);
10070 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
10071 gimple_bind_add_stmt (bind, g);
10072
10073 gimple_bind_add_stmt (bind, gimple_build_label (end));
10074 }
10075 if (simd)
10076 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
10077 build_int_cst (NULL_TREE, threads));
10078 else
10079 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
10080 0);
10081 gimple_bind_add_stmt (bind, x);
10082
10083 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
10084
10085 pop_gimplify_context (bind);
10086
10087 gimple_bind_append_vars (bind, ctx->block_vars);
10088 BLOCK_VARS (block) = gimple_bind_vars (bind);
10089 }
10090
10091
10092 /* Expand code for an OpenMP scan directive and the structured block
10093 before the scan directive. */
10094
10095 static void
10096 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10097 {
10098 gimple *stmt = gsi_stmt (*gsi_p);
10099 bool has_clauses
10100 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
10101 tree lane = NULL_TREE;
10102 gimple_seq before = NULL;
10103 omp_context *octx = ctx->outer;
10104 gcc_assert (octx);
10105 if (octx->scan_exclusive && !has_clauses)
10106 {
10107 gimple_stmt_iterator gsi2 = *gsi_p;
10108 gsi_next (&gsi2);
10109 gimple *stmt2 = gsi_stmt (gsi2);
10110 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
10111 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
10112 the one with exclusive clause(s), comes first. */
10113 if (stmt2
10114 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
10115 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
10116 {
10117 gsi_remove (gsi_p, false);
10118 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
10119 ctx = maybe_lookup_ctx (stmt2);
10120 gcc_assert (ctx);
10121 lower_omp_scan (gsi_p, ctx);
10122 return;
10123 }
10124 }
10125
10126 bool input_phase = has_clauses ^ octx->scan_inclusive;
10127 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
10128 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
10129 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
10130 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
10131 && !gimple_omp_for_combined_p (octx->stmt));
10132 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
10133 if (is_for_simd && octx->for_simd_scan_phase)
10134 is_simd = false;
10135 if (is_simd)
10136 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
10137 OMP_CLAUSE__SIMDUID_))
10138 {
10139 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
10140 lane = create_tmp_var (unsigned_type_node);
10141 tree t = build_int_cst (integer_type_node,
10142 input_phase ? 1
10143 : octx->scan_inclusive ? 2 : 3);
10144 gimple *g
10145 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
10146 gimple_call_set_lhs (g, lane);
10147 gimple_seq_add_stmt (&before, g);
10148 }
10149
10150 if (is_simd || is_for)
10151 {
10152 for (tree c = gimple_omp_for_clauses (octx->stmt);
10153 c; c = OMP_CLAUSE_CHAIN (c))
10154 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10155 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10156 {
10157 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10158 tree var = OMP_CLAUSE_DECL (c);
10159 tree new_var = lookup_decl (var, octx);
10160 tree val = new_var;
10161 tree var2 = NULL_TREE;
10162 tree var3 = NULL_TREE;
10163 tree var4 = NULL_TREE;
10164 tree lane0 = NULL_TREE;
10165 tree new_vard = new_var;
10166 if (omp_privatize_by_reference (var))
10167 {
10168 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10169 val = new_var;
10170 }
10171 if (DECL_HAS_VALUE_EXPR_P (new_vard))
10172 {
10173 val = DECL_VALUE_EXPR (new_vard);
10174 if (new_vard != new_var)
10175 {
10176 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
10177 val = TREE_OPERAND (val, 0);
10178 }
10179 if (TREE_CODE (val) == ARRAY_REF
10180 && VAR_P (TREE_OPERAND (val, 0)))
10181 {
10182 tree v = TREE_OPERAND (val, 0);
10183 if (lookup_attribute ("omp simd array",
10184 DECL_ATTRIBUTES (v)))
10185 {
10186 val = unshare_expr (val);
10187 lane0 = TREE_OPERAND (val, 1);
10188 TREE_OPERAND (val, 1) = lane;
10189 var2 = lookup_decl (v, octx);
10190 if (octx->scan_exclusive)
10191 var4 = lookup_decl (var2, octx);
10192 if (input_phase
10193 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10194 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
10195 if (!input_phase)
10196 {
10197 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
10198 var2, lane, NULL_TREE, NULL_TREE);
10199 TREE_THIS_NOTRAP (var2) = 1;
10200 if (octx->scan_exclusive)
10201 {
10202 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
10203 var4, lane, NULL_TREE,
10204 NULL_TREE);
10205 TREE_THIS_NOTRAP (var4) = 1;
10206 }
10207 }
10208 else
10209 var2 = val;
10210 }
10211 }
10212 gcc_assert (var2);
10213 }
10214 else
10215 {
10216 var2 = build_outer_var_ref (var, octx);
10217 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10218 {
10219 var3 = maybe_lookup_decl (new_vard, octx);
10220 if (var3 == new_vard || var3 == NULL_TREE)
10221 var3 = NULL_TREE;
10222 else if (is_simd && octx->scan_exclusive && !input_phase)
10223 {
10224 var4 = maybe_lookup_decl (var3, octx);
10225 if (var4 == var3 || var4 == NULL_TREE)
10226 {
10227 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
10228 {
10229 var4 = var3;
10230 var3 = NULL_TREE;
10231 }
10232 else
10233 var4 = NULL_TREE;
10234 }
10235 }
10236 }
10237 if (is_simd
10238 && octx->scan_exclusive
10239 && !input_phase
10240 && var4 == NULL_TREE)
10241 var4 = create_tmp_var (TREE_TYPE (val));
10242 }
10243 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10244 {
10245 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10246 if (input_phase)
10247 {
10248 if (var3)
10249 {
10250 /* If we've added a separate identity element
10251 variable, copy it over into val. */
10252 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
10253 var3);
10254 gimplify_and_add (x, &before);
10255 }
10256 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10257 {
10258 /* Otherwise, assign to it the identity element. */
10259 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10260 if (is_for)
10261 tseq = copy_gimple_seq_and_replace_locals (tseq);
10262 tree ref = build_outer_var_ref (var, octx);
10263 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10264 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10265 if (x)
10266 {
10267 if (new_vard != new_var)
10268 val = build_fold_addr_expr_loc (clause_loc, val);
10269 SET_DECL_VALUE_EXPR (new_vard, val);
10270 }
10271 SET_DECL_VALUE_EXPR (placeholder, ref);
10272 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10273 lower_omp (&tseq, octx);
10274 if (x)
10275 SET_DECL_VALUE_EXPR (new_vard, x);
10276 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10277 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10278 gimple_seq_add_seq (&before, tseq);
10279 if (is_simd)
10280 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10281 }
10282 }
10283 else if (is_simd)
10284 {
10285 tree x;
10286 if (octx->scan_exclusive)
10287 {
10288 tree v4 = unshare_expr (var4);
10289 tree v2 = unshare_expr (var2);
10290 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
10291 gimplify_and_add (x, &before);
10292 }
10293 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10294 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10295 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10296 tree vexpr = val;
10297 if (x && new_vard != new_var)
10298 vexpr = build_fold_addr_expr_loc (clause_loc, val);
10299 if (x)
10300 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10301 SET_DECL_VALUE_EXPR (placeholder, var2);
10302 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10303 lower_omp (&tseq, octx);
10304 gimple_seq_add_seq (&before, tseq);
10305 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10306 if (x)
10307 SET_DECL_VALUE_EXPR (new_vard, x);
10308 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10309 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10310 if (octx->scan_inclusive)
10311 {
10312 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10313 var2);
10314 gimplify_and_add (x, &before);
10315 }
10316 else if (lane0 == NULL_TREE)
10317 {
10318 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10319 var4);
10320 gimplify_and_add (x, &before);
10321 }
10322 }
10323 }
10324 else
10325 {
10326 if (input_phase)
10327 {
10328 /* input phase. Set val to initializer before
10329 the body. */
10330 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
10331 gimplify_assign (val, x, &before);
10332 }
10333 else if (is_simd)
10334 {
10335 /* scan phase. */
10336 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10337 if (code == MINUS_EXPR)
10338 code = PLUS_EXPR;
10339
10340 tree x = build2 (code, TREE_TYPE (var2),
10341 unshare_expr (var2), unshare_expr (val));
10342 if (octx->scan_inclusive)
10343 {
10344 gimplify_assign (unshare_expr (var2), x, &before);
10345 gimplify_assign (val, var2, &before);
10346 }
10347 else
10348 {
10349 gimplify_assign (unshare_expr (var4),
10350 unshare_expr (var2), &before);
10351 gimplify_assign (var2, x, &before);
10352 if (lane0 == NULL_TREE)
10353 gimplify_assign (val, var4, &before);
10354 }
10355 }
10356 }
10357 if (octx->scan_exclusive && !input_phase && lane0)
10358 {
10359 tree vexpr = unshare_expr (var4);
10360 TREE_OPERAND (vexpr, 1) = lane0;
10361 if (new_vard != new_var)
10362 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
10363 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10364 }
10365 }
10366 }
10367 if (is_simd && !is_for_simd)
10368 {
10369 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
10370 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
10371 gsi_replace (gsi_p, gimple_build_nop (), true);
10372 return;
10373 }
10374 lower_omp (gimple_omp_body_ptr (stmt), octx);
10375 if (before)
10376 {
10377 gimple_stmt_iterator gsi = gsi_start (*gimple_omp_body_ptr (stmt));
10378 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
10379 }
10380 }
10381
10382
10383 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
10384 substitution of a couple of function calls. But in the NAMED case,
10385 requires that languages coordinate a symbol name. It is therefore
10386 best put here in common code. */
10387
10388 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
10389
10390 static void
10391 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10392 {
10393 tree block;
10394 tree name, lock, unlock;
10395 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
10396 gbind *bind;
10397 location_t loc = gimple_location (stmt);
10398 gimple_seq tbody;
10399
10400 name = gimple_omp_critical_name (stmt);
10401 if (name)
10402 {
10403 tree decl;
10404
10405 if (!critical_name_mutexes)
10406 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
10407
10408 tree *n = critical_name_mutexes->get (name);
10409 if (n == NULL)
10410 {
10411 char *new_str;
10412
10413 decl = create_tmp_var_raw (ptr_type_node);
10414
10415 new_str = ACONCAT ((".gomp_critical_user_",
10416 IDENTIFIER_POINTER (name), NULL));
10417 DECL_NAME (decl) = get_identifier (new_str);
10418 TREE_PUBLIC (decl) = 1;
10419 TREE_STATIC (decl) = 1;
10420 DECL_COMMON (decl) = 1;
10421 DECL_ARTIFICIAL (decl) = 1;
10422 DECL_IGNORED_P (decl) = 1;
10423
10424 varpool_node::finalize_decl (decl);
10425
10426 critical_name_mutexes->put (name, decl);
10427 }
10428 else
10429 decl = *n;
10430
10431 /* If '#pragma omp critical' is inside offloaded region or
10432 inside function marked as offloadable, the symbol must be
10433 marked as offloadable too. */
10434 omp_context *octx;
10435 if (cgraph_node::get (current_function_decl)->offloadable)
10436 varpool_node::get_create (decl)->offloadable = 1;
10437 else
10438 for (octx = ctx->outer; octx; octx = octx->outer)
10439 if (is_gimple_omp_offloaded (octx->stmt))
10440 {
10441 varpool_node::get_create (decl)->offloadable = 1;
10442 break;
10443 }
10444
10445 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
10446 lock = build_call_expr_loc (loc, lock, 1,
10447 build_fold_addr_expr_loc (loc, decl));
10448
10449 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
10450 unlock = build_call_expr_loc (loc, unlock, 1,
10451 build_fold_addr_expr_loc (loc, decl));
10452 }
10453 else
10454 {
10455 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
10456 lock = build_call_expr_loc (loc, lock, 0);
10457
10458 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
10459 unlock = build_call_expr_loc (loc, unlock, 0);
10460 }
10461
10462 push_gimplify_context ();
10463
10464 block = make_node (BLOCK);
10465 bind = gimple_build_bind (NULL, NULL, block);
10466 gsi_replace (gsi_p, bind, true);
10467 gimple_bind_add_stmt (bind, stmt);
10468
10469 tbody = gimple_bind_body (bind);
10470 gimplify_and_add (lock, &tbody);
10471 gimple_bind_set_body (bind, tbody);
10472
10473 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10474 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
10475 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
10476 gimple_omp_set_body (stmt, NULL);
10477
10478 tbody = gimple_bind_body (bind);
10479 gimplify_and_add (unlock, &tbody);
10480 gimple_bind_set_body (bind, tbody);
10481
10482 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
10483
10484 pop_gimplify_context (bind);
10485 gimple_bind_append_vars (bind, ctx->block_vars);
10486 BLOCK_VARS (block) = gimple_bind_vars (bind);
10487 }
10488
10489 /* A subroutine of lower_omp_for. Generate code to emit the predicate
10490 for a lastprivate clause. Given a loop control predicate of (V
10491 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10492 is appended to *DLIST, iterator initialization is appended to
10493 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10494 to be emitted in a critical section. */
10495
10496 static void
10497 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
10498 gimple_seq *dlist, gimple_seq *clist,
10499 struct omp_context *ctx)
10500 {
10501 tree clauses, cond, vinit;
10502 enum tree_code cond_code;
10503 gimple_seq stmts;
10504
10505 cond_code = fd->loop.cond_code;
10506 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
10507
10508 /* When possible, use a strict equality expression. This can let VRP
10509 type optimizations deduce the value and remove a copy. */
10510 if (tree_fits_shwi_p (fd->loop.step))
10511 {
10512 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
10513 if (step == 1 || step == -1)
10514 cond_code = EQ_EXPR;
10515 }
10516
10517 tree n2 = fd->loop.n2;
10518 if (fd->collapse > 1
10519 && TREE_CODE (n2) != INTEGER_CST
10520 && gimple_omp_for_combined_into_p (fd->for_stmt))
10521 {
10522 struct omp_context *taskreg_ctx = NULL;
10523 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
10524 {
10525 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
10526 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
10527 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
10528 {
10529 if (gimple_omp_for_combined_into_p (gfor))
10530 {
10531 gcc_assert (ctx->outer->outer
10532 && is_parallel_ctx (ctx->outer->outer));
10533 taskreg_ctx = ctx->outer->outer;
10534 }
10535 else
10536 {
10537 struct omp_for_data outer_fd;
10538 omp_extract_for_data (gfor, &outer_fd, NULL);
10539 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
10540 }
10541 }
10542 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
10543 taskreg_ctx = ctx->outer->outer;
10544 }
10545 else if (is_taskreg_ctx (ctx->outer))
10546 taskreg_ctx = ctx->outer;
10547 if (taskreg_ctx)
10548 {
10549 int i;
10550 tree taskreg_clauses
10551 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
10552 tree innerc = omp_find_clause (taskreg_clauses,
10553 OMP_CLAUSE__LOOPTEMP_);
10554 gcc_assert (innerc);
10555 int count = fd->collapse;
10556 if (fd->non_rect
10557 && fd->last_nonrect == fd->first_nonrect + 1)
10558 if (tree v = gimple_omp_for_index (fd->for_stmt, fd->last_nonrect))
10559 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
10560 count += 4;
10561 for (i = 0; i < count; i++)
10562 {
10563 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10564 OMP_CLAUSE__LOOPTEMP_);
10565 gcc_assert (innerc);
10566 }
10567 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10568 OMP_CLAUSE__LOOPTEMP_);
10569 if (innerc)
10570 n2 = fold_convert (TREE_TYPE (n2),
10571 lookup_decl (OMP_CLAUSE_DECL (innerc),
10572 taskreg_ctx));
10573 }
10574 }
10575 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
10576
10577 clauses = gimple_omp_for_clauses (fd->for_stmt);
10578 stmts = NULL;
10579 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
10580 if (!gimple_seq_empty_p (stmts))
10581 {
10582 gimple_seq_add_seq (&stmts, *dlist);
10583 *dlist = stmts;
10584
10585 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10586 vinit = fd->loop.n1;
10587 if (cond_code == EQ_EXPR
10588 && tree_fits_shwi_p (fd->loop.n2)
10589 && ! integer_zerop (fd->loop.n2))
10590 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
10591 else
10592 vinit = unshare_expr (vinit);
10593
10594 /* Initialize the iterator variable, so that threads that don't execute
10595 any iterations don't execute the lastprivate clauses by accident. */
10596 gimplify_assign (fd->loop.v, vinit, body_p);
10597 }
10598 }
10599
10600 /* OpenACC privatization.
10601
10602 Or, in other words, *sharing* at the respective OpenACC level of
10603 parallelism.
10604
10605 From a correctness perspective, a non-addressable variable can't be accessed
10606 outside the current thread, so it can go in a (faster than shared memory)
10607 register -- though that register may need to be broadcast in some
10608 circumstances. A variable can only meaningfully be "shared" across workers
10609 or vector lanes if its address is taken, e.g. by a call to an atomic
10610 builtin.
10611
10612 From an optimisation perspective, the answer might be fuzzier: maybe
10613 sometimes, using shared memory directly would be faster than
10614 broadcasting. */
10615
10616 static void
10617 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags,
10618 const location_t loc, const tree c,
10619 const tree decl)
10620 {
10621 const dump_user_location_t d_u_loc
10622 = dump_user_location_t::from_location_t (loc);
10623 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10624 #if __GNUC__ >= 10
10625 # pragma GCC diagnostic push
10626 # pragma GCC diagnostic ignored "-Wformat"
10627 #endif
10628 dump_printf_loc (l_dump_flags, d_u_loc,
10629 "variable %<%T%> ", decl);
10630 #if __GNUC__ >= 10
10631 # pragma GCC diagnostic pop
10632 #endif
10633 if (c)
10634 dump_printf (l_dump_flags,
10635 "in %qs clause ",
10636 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
10637 else
10638 dump_printf (l_dump_flags,
10639 "declared in block ");
10640 }
10641
10642 static bool
10643 oacc_privatization_candidate_p (const location_t loc, const tree c,
10644 const tree decl)
10645 {
10646 dump_flags_t l_dump_flags = get_openacc_privatization_dump_flags ();
10647
10648 /* There is some differentiation depending on block vs. clause. */
10649 bool block = !c;
10650
10651 bool res = true;
10652
10653 if (res && !VAR_P (decl))
10654 {
10655 /* A PARM_DECL (appearing in a 'private' clause) is expected to have been
10656 privatized into a new VAR_DECL. */
10657 gcc_checking_assert (TREE_CODE (decl) != PARM_DECL);
10658
10659 res = false;
10660
10661 if (dump_enabled_p ())
10662 {
10663 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10664 dump_printf (l_dump_flags,
10665 "potentially has improper OpenACC privatization level: %qs\n",
10666 get_tree_code_name (TREE_CODE (decl)));
10667 }
10668 }
10669
10670 if (res && block && TREE_STATIC (decl))
10671 {
10672 res = false;
10673
10674 if (dump_enabled_p ())
10675 {
10676 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10677 dump_printf (l_dump_flags,
10678 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10679 "static");
10680 }
10681 }
10682
10683 if (res && block && DECL_EXTERNAL (decl))
10684 {
10685 res = false;
10686
10687 if (dump_enabled_p ())
10688 {
10689 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10690 dump_printf (l_dump_flags,
10691 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10692 "external");
10693 }
10694 }
10695
10696 if (res && !TREE_ADDRESSABLE (decl))
10697 {
10698 res = false;
10699
10700 if (dump_enabled_p ())
10701 {
10702 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10703 dump_printf (l_dump_flags,
10704 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10705 "not addressable");
10706 }
10707 }
10708
10709 if (res)
10710 {
10711 if (dump_enabled_p ())
10712 {
10713 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10714 dump_printf (l_dump_flags,
10715 "is candidate for adjusting OpenACC privatization level\n");
10716 }
10717 }
10718
10719 if (dump_file && (dump_flags & TDF_DETAILS))
10720 {
10721 print_generic_decl (dump_file, decl, dump_flags);
10722 fprintf (dump_file, "\n");
10723 }
10724
10725 return res;
10726 }
10727
10728 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10729 CTX. */
10730
10731 static void
10732 oacc_privatization_scan_clause_chain (omp_context *ctx, tree clauses)
10733 {
10734 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10735 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE)
10736 {
10737 tree decl = OMP_CLAUSE_DECL (c);
10738
10739 tree new_decl = lookup_decl (decl, ctx);
10740
10741 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c), c,
10742 new_decl))
10743 continue;
10744
10745 gcc_checking_assert
10746 (!ctx->oacc_privatization_candidates.contains (new_decl));
10747 ctx->oacc_privatization_candidates.safe_push (new_decl);
10748 }
10749 }
10750
10751 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10752 CTX. */
10753
10754 static void
10755 oacc_privatization_scan_decl_chain (omp_context *ctx, tree decls)
10756 {
10757 for (tree decl = decls; decl; decl = DECL_CHAIN (decl))
10758 {
10759 tree new_decl = lookup_decl (decl, ctx);
10760 gcc_checking_assert (new_decl == decl);
10761
10762 if (!oacc_privatization_candidate_p (gimple_location (ctx->stmt), NULL,
10763 new_decl))
10764 continue;
10765
10766 gcc_checking_assert
10767 (!ctx->oacc_privatization_candidates.contains (new_decl));
10768 ctx->oacc_privatization_candidates.safe_push (new_decl);
10769 }
10770 }
10771
10772 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10773
10774 static tree
10775 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10776 struct walk_stmt_info *wi)
10777 {
10778 gimple *stmt = gsi_stmt (*gsi_p);
10779
10780 *handled_ops_p = true;
10781 switch (gimple_code (stmt))
10782 {
10783 WALK_SUBSTMTS;
10784
10785 case GIMPLE_OMP_FOR:
10786 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
10787 && gimple_omp_for_combined_into_p (stmt))
10788 *handled_ops_p = false;
10789 break;
10790
10791 case GIMPLE_OMP_SCAN:
10792 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
10793 return integer_zero_node;
10794 default:
10795 break;
10796 }
10797 return NULL;
10798 }
10799
10800 /* Helper function for lower_omp_for, add transformations for a worksharing
10801 loop with scan directives inside of it.
10802 For worksharing loop not combined with simd, transform:
10803 #pragma omp for reduction(inscan,+:r) private(i)
10804 for (i = 0; i < n; i = i + 1)
10805 {
10806 {
10807 update (r);
10808 }
10809 #pragma omp scan inclusive(r)
10810 {
10811 use (r);
10812 }
10813 }
10814
10815 into two worksharing loops + code to merge results:
10816
10817 num_threads = omp_get_num_threads ();
10818 thread_num = omp_get_thread_num ();
10819 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10820 <D.2099>:
10821 var2 = r;
10822 goto <D.2101>;
10823 <D.2100>:
10824 // For UDRs this is UDR init, or if ctors are needed, copy from
10825 // var3 that has been constructed to contain the neutral element.
10826 var2 = 0;
10827 <D.2101>:
10828 ivar = 0;
10829 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10830 // a shared array with num_threads elements and rprivb to a local array
10831 // number of elements equal to the number of (contiguous) iterations the
10832 // current thread will perform. controlb and controlp variables are
10833 // temporaries to handle deallocation of rprivb at the end of second
10834 // GOMP_FOR.
10835 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10836 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10837 for (i = 0; i < n; i = i + 1)
10838 {
10839 {
10840 // For UDRs this is UDR init or copy from var3.
10841 r = 0;
10842 // This is the input phase from user code.
10843 update (r);
10844 }
10845 {
10846 // For UDRs this is UDR merge.
10847 var2 = var2 + r;
10848 // Rather than handing it over to the user, save to local thread's
10849 // array.
10850 rprivb[ivar] = var2;
10851 // For exclusive scan, the above two statements are swapped.
10852 ivar = ivar + 1;
10853 }
10854 }
10855 // And remember the final value from this thread's into the shared
10856 // rpriva array.
10857 rpriva[(sizetype) thread_num] = var2;
10858 // If more than one thread, compute using Work-Efficient prefix sum
10859 // the inclusive parallel scan of the rpriva array.
10860 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10861 <D.2102>:
10862 GOMP_barrier ();
10863 down = 0;
10864 k = 1;
10865 num_threadsu = (unsigned int) num_threads;
10866 thread_numup1 = (unsigned int) thread_num + 1;
10867 <D.2108>:
10868 twok = k << 1;
10869 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10870 <D.2110>:
10871 down = 4294967295;
10872 k = k >> 1;
10873 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10874 <D.2112>:
10875 k = k >> 1;
10876 <D.2111>:
10877 twok = k << 1;
10878 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10879 mul = REALPART_EXPR <cplx>;
10880 ovf = IMAGPART_EXPR <cplx>;
10881 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10882 <D.2116>:
10883 andv = k & down;
10884 andvm1 = andv + 4294967295;
10885 l = mul + andvm1;
10886 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10887 <D.2120>:
10888 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10889 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10890 rpriva[l] = rpriva[l - k] + rpriva[l];
10891 <D.2117>:
10892 if (down == 0) goto <D.2121>; else goto <D.2122>;
10893 <D.2121>:
10894 k = k << 1;
10895 goto <D.2123>;
10896 <D.2122>:
10897 k = k >> 1;
10898 <D.2123>:
10899 GOMP_barrier ();
10900 if (k != 0) goto <D.2108>; else goto <D.2103>;
10901 <D.2103>:
10902 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10903 <D.2124>:
10904 // For UDRs this is UDR init or copy from var3.
10905 var2 = 0;
10906 goto <D.2126>;
10907 <D.2125>:
10908 var2 = rpriva[thread_num - 1];
10909 <D.2126>:
10910 ivar = 0;
10911 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10912 reduction(inscan,+:r) private(i)
10913 for (i = 0; i < n; i = i + 1)
10914 {
10915 {
10916 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10917 r = var2 + rprivb[ivar];
10918 }
10919 {
10920 // This is the scan phase from user code.
10921 use (r);
10922 // Plus a bump of the iterator.
10923 ivar = ivar + 1;
10924 }
10925 } */
10926
10927 static void
10928 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
10929 struct omp_for_data *fd, omp_context *ctx)
10930 {
10931 bool is_for_simd = gimple_omp_for_combined_p (stmt);
10932 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
10933
10934 gimple_seq body = gimple_omp_body (stmt);
10935 gimple_stmt_iterator input1_gsi = gsi_none ();
10936 struct walk_stmt_info wi;
10937 memset (&wi, 0, sizeof (wi));
10938 wi.val_only = true;
10939 wi.info = (void *) &input1_gsi;
10940 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
10941 gcc_assert (!gsi_end_p (input1_gsi));
10942
10943 gimple *input_stmt1 = gsi_stmt (input1_gsi);
10944 gimple_stmt_iterator gsi = input1_gsi;
10945 gsi_next (&gsi);
10946 gimple_stmt_iterator scan1_gsi = gsi;
10947 gimple *scan_stmt1 = gsi_stmt (gsi);
10948 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
10949
10950 gimple_seq input_body = gimple_omp_body (input_stmt1);
10951 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
10952 gimple_omp_set_body (input_stmt1, NULL);
10953 gimple_omp_set_body (scan_stmt1, NULL);
10954 gimple_omp_set_body (stmt, NULL);
10955
10956 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
10957 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
10958 gimple_omp_set_body (stmt, body);
10959 gimple_omp_set_body (input_stmt1, input_body);
10960
10961 gimple_stmt_iterator input2_gsi = gsi_none ();
10962 memset (&wi, 0, sizeof (wi));
10963 wi.val_only = true;
10964 wi.info = (void *) &input2_gsi;
10965 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
10966 gcc_assert (!gsi_end_p (input2_gsi));
10967
10968 gimple *input_stmt2 = gsi_stmt (input2_gsi);
10969 gsi = input2_gsi;
10970 gsi_next (&gsi);
10971 gimple_stmt_iterator scan2_gsi = gsi;
10972 gimple *scan_stmt2 = gsi_stmt (gsi);
10973 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
10974 gimple_omp_set_body (scan_stmt2, scan_body);
10975
10976 gimple_stmt_iterator input3_gsi = gsi_none ();
10977 gimple_stmt_iterator scan3_gsi = gsi_none ();
10978 gimple_stmt_iterator input4_gsi = gsi_none ();
10979 gimple_stmt_iterator scan4_gsi = gsi_none ();
10980 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
10981 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
10982 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
10983 if (is_for_simd)
10984 {
10985 memset (&wi, 0, sizeof (wi));
10986 wi.val_only = true;
10987 wi.info = (void *) &input3_gsi;
10988 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
10989 gcc_assert (!gsi_end_p (input3_gsi));
10990
10991 input_stmt3 = gsi_stmt (input3_gsi);
10992 gsi = input3_gsi;
10993 gsi_next (&gsi);
10994 scan3_gsi = gsi;
10995 scan_stmt3 = gsi_stmt (gsi);
10996 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
10997
10998 memset (&wi, 0, sizeof (wi));
10999 wi.val_only = true;
11000 wi.info = (void *) &input4_gsi;
11001 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
11002 gcc_assert (!gsi_end_p (input4_gsi));
11003
11004 input_stmt4 = gsi_stmt (input4_gsi);
11005 gsi = input4_gsi;
11006 gsi_next (&gsi);
11007 scan4_gsi = gsi;
11008 scan_stmt4 = gsi_stmt (gsi);
11009 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
11010
11011 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
11012 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
11013 }
11014
11015 tree num_threads = create_tmp_var (integer_type_node);
11016 tree thread_num = create_tmp_var (integer_type_node);
11017 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
11018 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
11019 gimple *g = gimple_build_call (nthreads_decl, 0);
11020 gimple_call_set_lhs (g, num_threads);
11021 gimple_seq_add_stmt (body_p, g);
11022 g = gimple_build_call (threadnum_decl, 0);
11023 gimple_call_set_lhs (g, thread_num);
11024 gimple_seq_add_stmt (body_p, g);
11025
11026 tree ivar = create_tmp_var (sizetype);
11027 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
11028 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
11029 tree k = create_tmp_var (unsigned_type_node);
11030 tree l = create_tmp_var (unsigned_type_node);
11031
11032 gimple_seq clist = NULL, mdlist = NULL;
11033 gimple_seq thr01_list = NULL, thrn1_list = NULL;
11034 gimple_seq thr02_list = NULL, thrn2_list = NULL;
11035 gimple_seq scan1_list = NULL, input2_list = NULL;
11036 gimple_seq last_list = NULL, reduc_list = NULL;
11037 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
11038 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11039 && OMP_CLAUSE_REDUCTION_INSCAN (c))
11040 {
11041 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
11042 tree var = OMP_CLAUSE_DECL (c);
11043 tree new_var = lookup_decl (var, ctx);
11044 tree var3 = NULL_TREE;
11045 tree new_vard = new_var;
11046 if (omp_privatize_by_reference (var))
11047 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
11048 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11049 {
11050 var3 = maybe_lookup_decl (new_vard, ctx);
11051 if (var3 == new_vard)
11052 var3 = NULL_TREE;
11053 }
11054
11055 tree ptype = build_pointer_type (TREE_TYPE (new_var));
11056 tree rpriva = create_tmp_var (ptype);
11057 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
11058 OMP_CLAUSE_DECL (nc) = rpriva;
11059 *cp1 = nc;
11060 cp1 = &OMP_CLAUSE_CHAIN (nc);
11061
11062 tree rprivb = create_tmp_var (ptype);
11063 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
11064 OMP_CLAUSE_DECL (nc) = rprivb;
11065 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
11066 *cp1 = nc;
11067 cp1 = &OMP_CLAUSE_CHAIN (nc);
11068
11069 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
11070 if (new_vard != new_var)
11071 TREE_ADDRESSABLE (var2) = 1;
11072 gimple_add_tmp_var (var2);
11073
11074 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
11075 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11076 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11077 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11078 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
11079
11080 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
11081 thread_num, integer_minus_one_node);
11082 x = fold_convert_loc (clause_loc, sizetype, x);
11083 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11084 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11085 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11086 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
11087
11088 x = fold_convert_loc (clause_loc, sizetype, l);
11089 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11090 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11091 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11092 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
11093
11094 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
11095 x = fold_convert_loc (clause_loc, sizetype, x);
11096 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11097 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11098 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11099 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
11100
11101 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
11102 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11103 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
11104 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
11105
11106 tree var4 = is_for_simd ? new_var : var2;
11107 tree var5 = NULL_TREE, var6 = NULL_TREE;
11108 if (is_for_simd)
11109 {
11110 var5 = lookup_decl (var, input_simd_ctx);
11111 var6 = lookup_decl (var, scan_simd_ctx);
11112 if (new_vard != new_var)
11113 {
11114 var5 = build_simple_mem_ref_loc (clause_loc, var5);
11115 var6 = build_simple_mem_ref_loc (clause_loc, var6);
11116 }
11117 }
11118 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11119 {
11120 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
11121 tree val = var2;
11122
11123 x = lang_hooks.decls.omp_clause_default_ctor
11124 (c, var2, build_outer_var_ref (var, ctx));
11125 if (x)
11126 gimplify_and_add (x, &clist);
11127
11128 x = build_outer_var_ref (var, ctx);
11129 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
11130 x);
11131 gimplify_and_add (x, &thr01_list);
11132
11133 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
11134 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
11135 if (var3)
11136 {
11137 x = unshare_expr (var4);
11138 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11139 gimplify_and_add (x, &thrn1_list);
11140 x = unshare_expr (var4);
11141 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11142 gimplify_and_add (x, &thr02_list);
11143 }
11144 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
11145 {
11146 /* Otherwise, assign to it the identity element. */
11147 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11148 tseq = copy_gimple_seq_and_replace_locals (tseq);
11149 if (!is_for_simd)
11150 {
11151 if (new_vard != new_var)
11152 val = build_fold_addr_expr_loc (clause_loc, val);
11153 SET_DECL_VALUE_EXPR (new_vard, val);
11154 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11155 }
11156 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
11157 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11158 lower_omp (&tseq, ctx);
11159 gimple_seq_add_seq (&thrn1_list, tseq);
11160 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11161 lower_omp (&tseq, ctx);
11162 gimple_seq_add_seq (&thr02_list, tseq);
11163 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11164 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11165 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
11166 if (y)
11167 SET_DECL_VALUE_EXPR (new_vard, y);
11168 else
11169 {
11170 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11171 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11172 }
11173 }
11174
11175 x = unshare_expr (var4);
11176 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
11177 gimplify_and_add (x, &thrn2_list);
11178
11179 if (is_for_simd)
11180 {
11181 x = unshare_expr (rprivb_ref);
11182 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
11183 gimplify_and_add (x, &scan1_list);
11184 }
11185 else
11186 {
11187 if (ctx->scan_exclusive)
11188 {
11189 x = unshare_expr (rprivb_ref);
11190 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11191 gimplify_and_add (x, &scan1_list);
11192 }
11193
11194 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11195 tseq = copy_gimple_seq_and_replace_locals (tseq);
11196 SET_DECL_VALUE_EXPR (placeholder, var2);
11197 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11198 lower_omp (&tseq, ctx);
11199 gimple_seq_add_seq (&scan1_list, tseq);
11200
11201 if (ctx->scan_inclusive)
11202 {
11203 x = unshare_expr (rprivb_ref);
11204 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11205 gimplify_and_add (x, &scan1_list);
11206 }
11207 }
11208
11209 x = unshare_expr (rpriva_ref);
11210 x = lang_hooks.decls.omp_clause_assign_op (c, x,
11211 unshare_expr (var4));
11212 gimplify_and_add (x, &mdlist);
11213
11214 x = unshare_expr (is_for_simd ? var6 : new_var);
11215 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
11216 gimplify_and_add (x, &input2_list);
11217
11218 val = rprivb_ref;
11219 if (new_vard != new_var)
11220 val = build_fold_addr_expr_loc (clause_loc, val);
11221
11222 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11223 tseq = copy_gimple_seq_and_replace_locals (tseq);
11224 SET_DECL_VALUE_EXPR (new_vard, val);
11225 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11226 if (is_for_simd)
11227 {
11228 SET_DECL_VALUE_EXPR (placeholder, var6);
11229 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11230 }
11231 else
11232 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11233 lower_omp (&tseq, ctx);
11234 if (y)
11235 SET_DECL_VALUE_EXPR (new_vard, y);
11236 else
11237 {
11238 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11239 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11240 }
11241 if (!is_for_simd)
11242 {
11243 SET_DECL_VALUE_EXPR (placeholder, new_var);
11244 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11245 lower_omp (&tseq, ctx);
11246 }
11247 gimple_seq_add_seq (&input2_list, tseq);
11248
11249 x = build_outer_var_ref (var, ctx);
11250 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
11251 gimplify_and_add (x, &last_list);
11252
11253 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
11254 gimplify_and_add (x, &reduc_list);
11255 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11256 tseq = copy_gimple_seq_and_replace_locals (tseq);
11257 val = rprival_ref;
11258 if (new_vard != new_var)
11259 val = build_fold_addr_expr_loc (clause_loc, val);
11260 SET_DECL_VALUE_EXPR (new_vard, val);
11261 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11262 SET_DECL_VALUE_EXPR (placeholder, var2);
11263 lower_omp (&tseq, ctx);
11264 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11265 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11266 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11267 if (y)
11268 SET_DECL_VALUE_EXPR (new_vard, y);
11269 else
11270 {
11271 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11272 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11273 }
11274 gimple_seq_add_seq (&reduc_list, tseq);
11275 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
11276 gimplify_and_add (x, &reduc_list);
11277
11278 x = lang_hooks.decls.omp_clause_dtor (c, var2);
11279 if (x)
11280 gimplify_and_add (x, dlist);
11281 }
11282 else
11283 {
11284 x = build_outer_var_ref (var, ctx);
11285 gimplify_assign (unshare_expr (var4), x, &thr01_list);
11286
11287 x = omp_reduction_init (c, TREE_TYPE (new_var));
11288 gimplify_assign (unshare_expr (var4), unshare_expr (x),
11289 &thrn1_list);
11290 gimplify_assign (unshare_expr (var4), x, &thr02_list);
11291
11292 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
11293
11294 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
11295 if (code == MINUS_EXPR)
11296 code = PLUS_EXPR;
11297
11298 if (is_for_simd)
11299 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
11300 else
11301 {
11302 if (ctx->scan_exclusive)
11303 gimplify_assign (unshare_expr (rprivb_ref), var2,
11304 &scan1_list);
11305 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
11306 gimplify_assign (var2, x, &scan1_list);
11307 if (ctx->scan_inclusive)
11308 gimplify_assign (unshare_expr (rprivb_ref), var2,
11309 &scan1_list);
11310 }
11311
11312 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
11313 &mdlist);
11314
11315 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
11316 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
11317
11318 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
11319 &last_list);
11320
11321 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
11322 unshare_expr (rprival_ref));
11323 gimplify_assign (rprival_ref, x, &reduc_list);
11324 }
11325 }
11326
11327 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11328 gimple_seq_add_stmt (&scan1_list, g);
11329 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11330 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
11331 ? scan_stmt4 : scan_stmt2), g);
11332
11333 tree controlb = create_tmp_var (boolean_type_node);
11334 tree controlp = create_tmp_var (ptr_type_node);
11335 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11336 OMP_CLAUSE_DECL (nc) = controlb;
11337 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11338 *cp1 = nc;
11339 cp1 = &OMP_CLAUSE_CHAIN (nc);
11340 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11341 OMP_CLAUSE_DECL (nc) = controlp;
11342 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11343 *cp1 = nc;
11344 cp1 = &OMP_CLAUSE_CHAIN (nc);
11345 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11346 OMP_CLAUSE_DECL (nc) = controlb;
11347 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11348 *cp2 = nc;
11349 cp2 = &OMP_CLAUSE_CHAIN (nc);
11350 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11351 OMP_CLAUSE_DECL (nc) = controlp;
11352 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11353 *cp2 = nc;
11354 cp2 = &OMP_CLAUSE_CHAIN (nc);
11355
11356 *cp1 = gimple_omp_for_clauses (stmt);
11357 gimple_omp_for_set_clauses (stmt, new_clauses1);
11358 *cp2 = gimple_omp_for_clauses (new_stmt);
11359 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
11360
11361 if (is_for_simd)
11362 {
11363 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
11364 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
11365
11366 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
11367 GSI_SAME_STMT);
11368 gsi_remove (&input3_gsi, true);
11369 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
11370 GSI_SAME_STMT);
11371 gsi_remove (&scan3_gsi, true);
11372 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
11373 GSI_SAME_STMT);
11374 gsi_remove (&input4_gsi, true);
11375 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
11376 GSI_SAME_STMT);
11377 gsi_remove (&scan4_gsi, true);
11378 }
11379 else
11380 {
11381 gimple_omp_set_body (scan_stmt1, scan1_list);
11382 gimple_omp_set_body (input_stmt2, input2_list);
11383 }
11384
11385 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
11386 GSI_SAME_STMT);
11387 gsi_remove (&input1_gsi, true);
11388 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
11389 GSI_SAME_STMT);
11390 gsi_remove (&scan1_gsi, true);
11391 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
11392 GSI_SAME_STMT);
11393 gsi_remove (&input2_gsi, true);
11394 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
11395 GSI_SAME_STMT);
11396 gsi_remove (&scan2_gsi, true);
11397
11398 gimple_seq_add_seq (body_p, clist);
11399
11400 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
11401 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
11402 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
11403 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11404 gimple_seq_add_stmt (body_p, g);
11405 g = gimple_build_label (lab1);
11406 gimple_seq_add_stmt (body_p, g);
11407 gimple_seq_add_seq (body_p, thr01_list);
11408 g = gimple_build_goto (lab3);
11409 gimple_seq_add_stmt (body_p, g);
11410 g = gimple_build_label (lab2);
11411 gimple_seq_add_stmt (body_p, g);
11412 gimple_seq_add_seq (body_p, thrn1_list);
11413 g = gimple_build_label (lab3);
11414 gimple_seq_add_stmt (body_p, g);
11415
11416 g = gimple_build_assign (ivar, size_zero_node);
11417 gimple_seq_add_stmt (body_p, g);
11418
11419 gimple_seq_add_stmt (body_p, stmt);
11420 gimple_seq_add_seq (body_p, body);
11421 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
11422 fd->loop.v));
11423
11424 g = gimple_build_omp_return (true);
11425 gimple_seq_add_stmt (body_p, g);
11426 gimple_seq_add_seq (body_p, mdlist);
11427
11428 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11429 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11430 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
11431 gimple_seq_add_stmt (body_p, g);
11432 g = gimple_build_label (lab1);
11433 gimple_seq_add_stmt (body_p, g);
11434
11435 g = omp_build_barrier (NULL);
11436 gimple_seq_add_stmt (body_p, g);
11437
11438 tree down = create_tmp_var (unsigned_type_node);
11439 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
11440 gimple_seq_add_stmt (body_p, g);
11441
11442 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
11443 gimple_seq_add_stmt (body_p, g);
11444
11445 tree num_threadsu = create_tmp_var (unsigned_type_node);
11446 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
11447 gimple_seq_add_stmt (body_p, g);
11448
11449 tree thread_numu = create_tmp_var (unsigned_type_node);
11450 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
11451 gimple_seq_add_stmt (body_p, g);
11452
11453 tree thread_nump1 = create_tmp_var (unsigned_type_node);
11454 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
11455 build_int_cst (unsigned_type_node, 1));
11456 gimple_seq_add_stmt (body_p, g);
11457
11458 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11459 g = gimple_build_label (lab3);
11460 gimple_seq_add_stmt (body_p, g);
11461
11462 tree twok = create_tmp_var (unsigned_type_node);
11463 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11464 gimple_seq_add_stmt (body_p, g);
11465
11466 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
11467 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
11468 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
11469 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
11470 gimple_seq_add_stmt (body_p, g);
11471 g = gimple_build_label (lab4);
11472 gimple_seq_add_stmt (body_p, g);
11473 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
11474 gimple_seq_add_stmt (body_p, g);
11475 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11476 gimple_seq_add_stmt (body_p, g);
11477
11478 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
11479 gimple_seq_add_stmt (body_p, g);
11480 g = gimple_build_label (lab6);
11481 gimple_seq_add_stmt (body_p, g);
11482
11483 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11484 gimple_seq_add_stmt (body_p, g);
11485
11486 g = gimple_build_label (lab5);
11487 gimple_seq_add_stmt (body_p, g);
11488
11489 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11490 gimple_seq_add_stmt (body_p, g);
11491
11492 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
11493 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
11494 gimple_call_set_lhs (g, cplx);
11495 gimple_seq_add_stmt (body_p, g);
11496 tree mul = create_tmp_var (unsigned_type_node);
11497 g = gimple_build_assign (mul, REALPART_EXPR,
11498 build1 (REALPART_EXPR, unsigned_type_node, cplx));
11499 gimple_seq_add_stmt (body_p, g);
11500 tree ovf = create_tmp_var (unsigned_type_node);
11501 g = gimple_build_assign (ovf, IMAGPART_EXPR,
11502 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
11503 gimple_seq_add_stmt (body_p, g);
11504
11505 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
11506 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
11507 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
11508 lab7, lab8);
11509 gimple_seq_add_stmt (body_p, g);
11510 g = gimple_build_label (lab7);
11511 gimple_seq_add_stmt (body_p, g);
11512
11513 tree andv = create_tmp_var (unsigned_type_node);
11514 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
11515 gimple_seq_add_stmt (body_p, g);
11516 tree andvm1 = create_tmp_var (unsigned_type_node);
11517 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
11518 build_minus_one_cst (unsigned_type_node));
11519 gimple_seq_add_stmt (body_p, g);
11520
11521 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
11522 gimple_seq_add_stmt (body_p, g);
11523
11524 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
11525 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
11526 gimple_seq_add_stmt (body_p, g);
11527 g = gimple_build_label (lab9);
11528 gimple_seq_add_stmt (body_p, g);
11529 gimple_seq_add_seq (body_p, reduc_list);
11530 g = gimple_build_label (lab8);
11531 gimple_seq_add_stmt (body_p, g);
11532
11533 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
11534 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
11535 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
11536 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
11537 lab10, lab11);
11538 gimple_seq_add_stmt (body_p, g);
11539 g = gimple_build_label (lab10);
11540 gimple_seq_add_stmt (body_p, g);
11541 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
11542 gimple_seq_add_stmt (body_p, g);
11543 g = gimple_build_goto (lab12);
11544 gimple_seq_add_stmt (body_p, g);
11545 g = gimple_build_label (lab11);
11546 gimple_seq_add_stmt (body_p, g);
11547 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11548 gimple_seq_add_stmt (body_p, g);
11549 g = gimple_build_label (lab12);
11550 gimple_seq_add_stmt (body_p, g);
11551
11552 g = omp_build_barrier (NULL);
11553 gimple_seq_add_stmt (body_p, g);
11554
11555 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
11556 lab3, lab2);
11557 gimple_seq_add_stmt (body_p, g);
11558
11559 g = gimple_build_label (lab2);
11560 gimple_seq_add_stmt (body_p, g);
11561
11562 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11563 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11564 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11565 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11566 gimple_seq_add_stmt (body_p, g);
11567 g = gimple_build_label (lab1);
11568 gimple_seq_add_stmt (body_p, g);
11569 gimple_seq_add_seq (body_p, thr02_list);
11570 g = gimple_build_goto (lab3);
11571 gimple_seq_add_stmt (body_p, g);
11572 g = gimple_build_label (lab2);
11573 gimple_seq_add_stmt (body_p, g);
11574 gimple_seq_add_seq (body_p, thrn2_list);
11575 g = gimple_build_label (lab3);
11576 gimple_seq_add_stmt (body_p, g);
11577
11578 g = gimple_build_assign (ivar, size_zero_node);
11579 gimple_seq_add_stmt (body_p, g);
11580 gimple_seq_add_stmt (body_p, new_stmt);
11581 gimple_seq_add_seq (body_p, new_body);
11582
11583 gimple_seq new_dlist = NULL;
11584 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11585 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11586 tree num_threadsm1 = create_tmp_var (integer_type_node);
11587 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
11588 integer_minus_one_node);
11589 gimple_seq_add_stmt (&new_dlist, g);
11590 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
11591 gimple_seq_add_stmt (&new_dlist, g);
11592 g = gimple_build_label (lab1);
11593 gimple_seq_add_stmt (&new_dlist, g);
11594 gimple_seq_add_seq (&new_dlist, last_list);
11595 g = gimple_build_label (lab2);
11596 gimple_seq_add_stmt (&new_dlist, g);
11597 gimple_seq_add_seq (&new_dlist, *dlist);
11598 *dlist = new_dlist;
11599 }
11600
11601 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11602 the addresses of variables to be made private at the surrounding
11603 parallelism level. Such functions appear in the gimple code stream in two
11604 forms, e.g. for a partitioned loop:
11605
11606 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11607 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11608 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11609 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11610
11611 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11612 not as part of a HEAD_MARK sequence:
11613
11614 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11615
11616 For such stand-alone appearances, the 3rd argument is always 0, denoting
11617 gang partitioning. */
11618
11619 static gcall *
11620 lower_oacc_private_marker (omp_context *ctx)
11621 {
11622 if (ctx->oacc_privatization_candidates.length () == 0)
11623 return NULL;
11624
11625 auto_vec<tree, 5> args;
11626
11627 args.quick_push (build_int_cst (integer_type_node, IFN_UNIQUE_OACC_PRIVATE));
11628 args.quick_push (integer_zero_node);
11629 args.quick_push (integer_minus_one_node);
11630
11631 int i;
11632 tree decl;
11633 FOR_EACH_VEC_ELT (ctx->oacc_privatization_candidates, i, decl)
11634 {
11635 gcc_checking_assert (TREE_ADDRESSABLE (decl));
11636 tree addr = build_fold_addr_expr (decl);
11637 args.safe_push (addr);
11638 }
11639
11640 return gimple_build_call_internal_vec (IFN_UNIQUE, args);
11641 }
11642
11643 /* Lower code for an OMP loop directive. */
11644
11645 static void
11646 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11647 {
11648 tree *rhs_p, block;
11649 struct omp_for_data fd, *fdp = NULL;
11650 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
11651 gbind *new_stmt;
11652 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
11653 gimple_seq cnt_list = NULL, clist = NULL;
11654 gimple_seq oacc_head = NULL, oacc_tail = NULL;
11655 size_t i;
11656
11657 push_gimplify_context ();
11658
11659 if (is_gimple_omp_oacc (ctx->stmt))
11660 oacc_privatization_scan_clause_chain (ctx, gimple_omp_for_clauses (stmt));
11661
11662 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
11663
11664 block = make_node (BLOCK);
11665 new_stmt = gimple_build_bind (NULL, NULL, block);
11666 /* Replace at gsi right away, so that 'stmt' is no member
11667 of a sequence anymore as we're going to add to a different
11668 one below. */
11669 gsi_replace (gsi_p, new_stmt, true);
11670
11671 /* Move declaration of temporaries in the loop body before we make
11672 it go away. */
11673 omp_for_body = gimple_omp_body (stmt);
11674 if (!gimple_seq_empty_p (omp_for_body)
11675 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
11676 {
11677 gbind *inner_bind
11678 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
11679 tree vars = gimple_bind_vars (inner_bind);
11680 if (is_gimple_omp_oacc (ctx->stmt))
11681 oacc_privatization_scan_decl_chain (ctx, vars);
11682 gimple_bind_append_vars (new_stmt, vars);
11683 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11684 keep them on the inner_bind and it's block. */
11685 gimple_bind_set_vars (inner_bind, NULL_TREE);
11686 if (gimple_bind_block (inner_bind))
11687 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
11688 }
11689
11690 if (gimple_omp_for_combined_into_p (stmt))
11691 {
11692 omp_extract_for_data (stmt, &fd, NULL);
11693 fdp = &fd;
11694
11695 /* We need two temporaries with fd.loop.v type (istart/iend)
11696 and then (fd.collapse - 1) temporaries with the same
11697 type for count2 ... countN-1 vars if not constant. */
11698 size_t count = 2;
11699 tree type = fd.iter_type;
11700 if (fd.collapse > 1
11701 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11702 count += fd.collapse - 1;
11703 size_t count2 = 0;
11704 tree type2 = NULL_TREE;
11705 bool taskreg_for
11706 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
11707 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
11708 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
11709 tree simtc = NULL;
11710 tree clauses = *pc;
11711 if (fd.collapse > 1
11712 && fd.non_rect
11713 && fd.last_nonrect == fd.first_nonrect + 1
11714 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11715 if (tree v = gimple_omp_for_index (stmt, fd.last_nonrect))
11716 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
11717 {
11718 v = gimple_omp_for_index (stmt, fd.first_nonrect);
11719 type2 = TREE_TYPE (v);
11720 count++;
11721 count2 = 3;
11722 }
11723 if (taskreg_for)
11724 outerc
11725 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
11726 OMP_CLAUSE__LOOPTEMP_);
11727 if (ctx->simt_stmt)
11728 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
11729 OMP_CLAUSE__LOOPTEMP_);
11730 for (i = 0; i < count + count2; i++)
11731 {
11732 tree temp;
11733 if (taskreg_for)
11734 {
11735 gcc_assert (outerc);
11736 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
11737 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
11738 OMP_CLAUSE__LOOPTEMP_);
11739 }
11740 else
11741 {
11742 /* If there are 2 adjacent SIMD stmts, one with _simt_
11743 clause, another without, make sure they have the same
11744 decls in _looptemp_ clauses, because the outer stmt
11745 they are combined into will look up just one inner_stmt. */
11746 if (ctx->simt_stmt)
11747 temp = OMP_CLAUSE_DECL (simtc);
11748 else
11749 temp = create_tmp_var (i >= count ? type2 : type);
11750 insert_decl_map (&ctx->outer->cb, temp, temp);
11751 }
11752 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
11753 OMP_CLAUSE_DECL (*pc) = temp;
11754 pc = &OMP_CLAUSE_CHAIN (*pc);
11755 if (ctx->simt_stmt)
11756 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
11757 OMP_CLAUSE__LOOPTEMP_);
11758 }
11759 *pc = clauses;
11760 }
11761
11762 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11763 dlist = NULL;
11764 body = NULL;
11765 tree rclauses
11766 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
11767 OMP_CLAUSE_REDUCTION);
11768 tree rtmp = NULL_TREE;
11769 if (rclauses)
11770 {
11771 tree type = build_pointer_type (pointer_sized_int_node);
11772 tree temp = create_tmp_var (type);
11773 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
11774 OMP_CLAUSE_DECL (c) = temp;
11775 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
11776 gimple_omp_for_set_clauses (stmt, c);
11777 lower_omp_task_reductions (ctx, OMP_FOR,
11778 gimple_omp_for_clauses (stmt),
11779 &tred_ilist, &tred_dlist);
11780 rclauses = c;
11781 rtmp = make_ssa_name (type);
11782 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
11783 }
11784
11785 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
11786 ctx);
11787
11788 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
11789 fdp);
11790 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
11791 gimple_omp_for_pre_body (stmt));
11792
11793 lower_omp (gimple_omp_body_ptr (stmt), ctx);
11794
11795 gcall *private_marker = NULL;
11796 if (is_gimple_omp_oacc (ctx->stmt)
11797 && !gimple_seq_empty_p (omp_for_body))
11798 private_marker = lower_oacc_private_marker (ctx);
11799
11800 /* Lower the header expressions. At this point, we can assume that
11801 the header is of the form:
11802
11803 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11804
11805 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11806 using the .omp_data_s mapping, if needed. */
11807 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
11808 {
11809 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
11810 if (TREE_CODE (*rhs_p) == TREE_VEC)
11811 {
11812 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11813 TREE_VEC_ELT (*rhs_p, 1)
11814 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11815 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11816 TREE_VEC_ELT (*rhs_p, 2)
11817 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11818 }
11819 else if (!is_gimple_min_invariant (*rhs_p))
11820 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11821 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11822 recompute_tree_invariant_for_addr_expr (*rhs_p);
11823
11824 rhs_p = gimple_omp_for_final_ptr (stmt, i);
11825 if (TREE_CODE (*rhs_p) == TREE_VEC)
11826 {
11827 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11828 TREE_VEC_ELT (*rhs_p, 1)
11829 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11830 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11831 TREE_VEC_ELT (*rhs_p, 2)
11832 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11833 }
11834 else if (!is_gimple_min_invariant (*rhs_p))
11835 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11836 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11837 recompute_tree_invariant_for_addr_expr (*rhs_p);
11838
11839 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
11840 if (!is_gimple_min_invariant (*rhs_p))
11841 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11842 }
11843 if (rclauses)
11844 gimple_seq_add_seq (&tred_ilist, cnt_list);
11845 else
11846 gimple_seq_add_seq (&body, cnt_list);
11847
11848 /* Once lowered, extract the bounds and clauses. */
11849 omp_extract_for_data (stmt, &fd, NULL);
11850
11851 if (is_gimple_omp_oacc (ctx->stmt)
11852 && !ctx_in_oacc_kernels_region (ctx))
11853 lower_oacc_head_tail (gimple_location (stmt),
11854 gimple_omp_for_clauses (stmt), private_marker,
11855 &oacc_head, &oacc_tail, ctx);
11856
11857 /* Add OpenACC partitioning and reduction markers just before the loop. */
11858 if (oacc_head)
11859 gimple_seq_add_seq (&body, oacc_head);
11860
11861 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
11862
11863 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11864 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
11865 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11866 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11867 {
11868 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
11869 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
11870 OMP_CLAUSE_LINEAR_STEP (c)
11871 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
11872 ctx);
11873 }
11874
11875 if ((ctx->scan_inclusive || ctx->scan_exclusive)
11876 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11877 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
11878 else
11879 {
11880 gimple_seq_add_stmt (&body, stmt);
11881 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
11882 }
11883
11884 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
11885 fd.loop.v));
11886
11887 /* After the loop, add exit clauses. */
11888 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
11889
11890 if (clist)
11891 {
11892 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
11893 gcall *g = gimple_build_call (fndecl, 0);
11894 gimple_seq_add_stmt (&body, g);
11895 gimple_seq_add_seq (&body, clist);
11896 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
11897 g = gimple_build_call (fndecl, 0);
11898 gimple_seq_add_stmt (&body, g);
11899 }
11900
11901 if (ctx->cancellable)
11902 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
11903
11904 gimple_seq_add_seq (&body, dlist);
11905
11906 if (rclauses)
11907 {
11908 gimple_seq_add_seq (&tred_ilist, body);
11909 body = tred_ilist;
11910 }
11911
11912 body = maybe_catch_exception (body);
11913
11914 /* Region exit marker goes at the end of the loop body. */
11915 gimple *g = gimple_build_omp_return (fd.have_nowait);
11916 gimple_seq_add_stmt (&body, g);
11917
11918 gimple_seq_add_seq (&body, tred_dlist);
11919
11920 maybe_add_implicit_barrier_cancel (ctx, g, &body);
11921
11922 if (rclauses)
11923 OMP_CLAUSE_DECL (rclauses) = rtmp;
11924
11925 /* Add OpenACC joining and reduction markers just after the loop. */
11926 if (oacc_tail)
11927 gimple_seq_add_seq (&body, oacc_tail);
11928
11929 pop_gimplify_context (new_stmt);
11930
11931 gimple_bind_append_vars (new_stmt, ctx->block_vars);
11932 maybe_remove_omp_member_access_dummy_vars (new_stmt);
11933 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
11934 if (BLOCK_VARS (block))
11935 TREE_USED (block) = 1;
11936
11937 gimple_bind_set_body (new_stmt, body);
11938 gimple_omp_set_body (stmt, NULL);
11939 gimple_omp_for_set_pre_body (stmt, NULL);
11940 }
11941
11942 /* Callback for walk_stmts. Check if the current statement only contains
11943 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11944
11945 static tree
11946 check_combined_parallel (gimple_stmt_iterator *gsi_p,
11947 bool *handled_ops_p,
11948 struct walk_stmt_info *wi)
11949 {
11950 int *info = (int *) wi->info;
11951 gimple *stmt = gsi_stmt (*gsi_p);
11952
11953 *handled_ops_p = true;
11954 switch (gimple_code (stmt))
11955 {
11956 WALK_SUBSTMTS;
11957
11958 case GIMPLE_DEBUG:
11959 break;
11960 case GIMPLE_OMP_FOR:
11961 case GIMPLE_OMP_SECTIONS:
11962 *info = *info == 0 ? 1 : -1;
11963 break;
11964 default:
11965 *info = -1;
11966 break;
11967 }
11968 return NULL;
11969 }
11970
11971 struct omp_taskcopy_context
11972 {
11973 /* This field must be at the beginning, as we do "inheritance": Some
11974 callback functions for tree-inline.cc (e.g., omp_copy_decl)
11975 receive a copy_body_data pointer that is up-casted to an
11976 omp_context pointer. */
11977 copy_body_data cb;
11978 omp_context *ctx;
11979 };
11980
11981 static tree
11982 task_copyfn_copy_decl (tree var, copy_body_data *cb)
11983 {
11984 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
11985
11986 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
11987 return create_tmp_var (TREE_TYPE (var));
11988
11989 return var;
11990 }
11991
11992 static tree
11993 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
11994 {
11995 tree name, new_fields = NULL, type, f;
11996
11997 type = lang_hooks.types.make_type (RECORD_TYPE);
11998 name = DECL_NAME (TYPE_NAME (orig_type));
11999 name = build_decl (gimple_location (tcctx->ctx->stmt),
12000 TYPE_DECL, name, type);
12001 TYPE_NAME (type) = name;
12002
12003 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
12004 {
12005 tree new_f = copy_node (f);
12006 DECL_CONTEXT (new_f) = type;
12007 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
12008 TREE_CHAIN (new_f) = new_fields;
12009 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
12010 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
12011 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
12012 &tcctx->cb, NULL);
12013 new_fields = new_f;
12014 tcctx->cb.decl_map->put (f, new_f);
12015 }
12016 TYPE_FIELDS (type) = nreverse (new_fields);
12017 layout_type (type);
12018 return type;
12019 }
12020
12021 /* Create task copyfn. */
12022
12023 static void
12024 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
12025 {
12026 struct function *child_cfun;
12027 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
12028 tree record_type, srecord_type, bind, list;
12029 bool record_needs_remap = false, srecord_needs_remap = false;
12030 splay_tree_node n;
12031 struct omp_taskcopy_context tcctx;
12032 location_t loc = gimple_location (task_stmt);
12033 size_t looptempno = 0;
12034
12035 child_fn = gimple_omp_task_copy_fn (task_stmt);
12036 task_cpyfns.safe_push (task_stmt);
12037 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
12038 gcc_assert (child_cfun->cfg == NULL);
12039 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
12040
12041 /* Reset DECL_CONTEXT on function arguments. */
12042 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
12043 DECL_CONTEXT (t) = child_fn;
12044
12045 /* Populate the function. */
12046 push_gimplify_context ();
12047 push_cfun (child_cfun);
12048
12049 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
12050 TREE_SIDE_EFFECTS (bind) = 1;
12051 list = NULL;
12052 DECL_SAVED_TREE (child_fn) = bind;
12053 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
12054
12055 /* Remap src and dst argument types if needed. */
12056 record_type = ctx->record_type;
12057 srecord_type = ctx->srecord_type;
12058 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
12059 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
12060 {
12061 record_needs_remap = true;
12062 break;
12063 }
12064 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
12065 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
12066 {
12067 srecord_needs_remap = true;
12068 break;
12069 }
12070
12071 if (record_needs_remap || srecord_needs_remap)
12072 {
12073 memset (&tcctx, '\0', sizeof (tcctx));
12074 tcctx.cb.src_fn = ctx->cb.src_fn;
12075 tcctx.cb.dst_fn = child_fn;
12076 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
12077 gcc_checking_assert (tcctx.cb.src_node);
12078 tcctx.cb.dst_node = tcctx.cb.src_node;
12079 tcctx.cb.src_cfun = ctx->cb.src_cfun;
12080 tcctx.cb.copy_decl = task_copyfn_copy_decl;
12081 tcctx.cb.eh_lp_nr = 0;
12082 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
12083 tcctx.cb.decl_map = new hash_map<tree, tree>;
12084 tcctx.ctx = ctx;
12085
12086 if (record_needs_remap)
12087 record_type = task_copyfn_remap_type (&tcctx, record_type);
12088 if (srecord_needs_remap)
12089 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
12090 }
12091 else
12092 tcctx.cb.decl_map = NULL;
12093
12094 arg = DECL_ARGUMENTS (child_fn);
12095 TREE_TYPE (arg) = build_pointer_type (record_type);
12096 sarg = DECL_CHAIN (arg);
12097 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
12098
12099 /* First pass: initialize temporaries used in record_type and srecord_type
12100 sizes and field offsets. */
12101 if (tcctx.cb.decl_map)
12102 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12103 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12104 {
12105 tree *p;
12106
12107 decl = OMP_CLAUSE_DECL (c);
12108 p = tcctx.cb.decl_map->get (decl);
12109 if (p == NULL)
12110 continue;
12111 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12112 sf = (tree) n->value;
12113 sf = *tcctx.cb.decl_map->get (sf);
12114 src = build_simple_mem_ref_loc (loc, sarg);
12115 src = omp_build_component_ref (src, sf);
12116 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
12117 append_to_statement_list (t, &list);
12118 }
12119
12120 /* Second pass: copy shared var pointers and copy construct non-VLA
12121 firstprivate vars. */
12122 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12123 switch (OMP_CLAUSE_CODE (c))
12124 {
12125 splay_tree_key key;
12126 case OMP_CLAUSE_SHARED:
12127 decl = OMP_CLAUSE_DECL (c);
12128 key = (splay_tree_key) decl;
12129 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
12130 key = (splay_tree_key) &DECL_UID (decl);
12131 n = splay_tree_lookup (ctx->field_map, key);
12132 if (n == NULL)
12133 break;
12134 f = (tree) n->value;
12135 if (tcctx.cb.decl_map)
12136 f = *tcctx.cb.decl_map->get (f);
12137 n = splay_tree_lookup (ctx->sfield_map, key);
12138 sf = (tree) n->value;
12139 if (tcctx.cb.decl_map)
12140 sf = *tcctx.cb.decl_map->get (sf);
12141 src = build_simple_mem_ref_loc (loc, sarg);
12142 src = omp_build_component_ref (src, sf);
12143 dst = build_simple_mem_ref_loc (loc, arg);
12144 dst = omp_build_component_ref (dst, f);
12145 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12146 append_to_statement_list (t, &list);
12147 break;
12148 case OMP_CLAUSE_REDUCTION:
12149 case OMP_CLAUSE_IN_REDUCTION:
12150 decl = OMP_CLAUSE_DECL (c);
12151 if (TREE_CODE (decl) == MEM_REF)
12152 {
12153 decl = TREE_OPERAND (decl, 0);
12154 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
12155 decl = TREE_OPERAND (decl, 0);
12156 if (TREE_CODE (decl) == INDIRECT_REF
12157 || TREE_CODE (decl) == ADDR_EXPR)
12158 decl = TREE_OPERAND (decl, 0);
12159 }
12160 key = (splay_tree_key) decl;
12161 n = splay_tree_lookup (ctx->field_map, key);
12162 if (n == NULL)
12163 break;
12164 f = (tree) n->value;
12165 if (tcctx.cb.decl_map)
12166 f = *tcctx.cb.decl_map->get (f);
12167 n = splay_tree_lookup (ctx->sfield_map, key);
12168 sf = (tree) n->value;
12169 if (tcctx.cb.decl_map)
12170 sf = *tcctx.cb.decl_map->get (sf);
12171 src = build_simple_mem_ref_loc (loc, sarg);
12172 src = omp_build_component_ref (src, sf);
12173 if (decl != OMP_CLAUSE_DECL (c)
12174 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12175 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12176 src = build_simple_mem_ref_loc (loc, src);
12177 dst = build_simple_mem_ref_loc (loc, arg);
12178 dst = omp_build_component_ref (dst, f);
12179 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12180 append_to_statement_list (t, &list);
12181 break;
12182 case OMP_CLAUSE__LOOPTEMP_:
12183 /* Fields for first two _looptemp_ clauses are initialized by
12184 GOMP_taskloop*, the rest are handled like firstprivate. */
12185 if (looptempno < 2)
12186 {
12187 looptempno++;
12188 break;
12189 }
12190 /* FALLTHRU */
12191 case OMP_CLAUSE__REDUCTEMP_:
12192 case OMP_CLAUSE_FIRSTPRIVATE:
12193 decl = OMP_CLAUSE_DECL (c);
12194 if (is_variable_sized (decl))
12195 break;
12196 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12197 if (n == NULL)
12198 break;
12199 f = (tree) n->value;
12200 if (tcctx.cb.decl_map)
12201 f = *tcctx.cb.decl_map->get (f);
12202 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12203 if (n != NULL)
12204 {
12205 sf = (tree) n->value;
12206 if (tcctx.cb.decl_map)
12207 sf = *tcctx.cb.decl_map->get (sf);
12208 src = build_simple_mem_ref_loc (loc, sarg);
12209 src = omp_build_component_ref (src, sf);
12210 if (use_pointer_for_field (decl, NULL)
12211 || omp_privatize_by_reference (decl))
12212 src = build_simple_mem_ref_loc (loc, src);
12213 }
12214 else
12215 src = decl;
12216 dst = build_simple_mem_ref_loc (loc, arg);
12217 dst = omp_build_component_ref (dst, f);
12218 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
12219 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12220 else
12221 {
12222 if (ctx->allocate_map)
12223 if (tree *allocatorp = ctx->allocate_map->get (decl))
12224 {
12225 tree allocator = *allocatorp;
12226 HOST_WIDE_INT ialign = 0;
12227 if (TREE_CODE (allocator) == TREE_LIST)
12228 {
12229 ialign = tree_to_uhwi (TREE_VALUE (allocator));
12230 allocator = TREE_PURPOSE (allocator);
12231 }
12232 if (TREE_CODE (allocator) != INTEGER_CST)
12233 {
12234 n = splay_tree_lookup (ctx->sfield_map,
12235 (splay_tree_key) allocator);
12236 allocator = (tree) n->value;
12237 if (tcctx.cb.decl_map)
12238 allocator = *tcctx.cb.decl_map->get (allocator);
12239 tree a = build_simple_mem_ref_loc (loc, sarg);
12240 allocator = omp_build_component_ref (a, allocator);
12241 }
12242 allocator = fold_convert (pointer_sized_int_node, allocator);
12243 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
12244 tree align = build_int_cst (size_type_node,
12245 MAX (ialign,
12246 DECL_ALIGN_UNIT (decl)));
12247 tree sz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst)));
12248 tree ptr = build_call_expr_loc (loc, a, 3, align, sz,
12249 allocator);
12250 ptr = fold_convert (TREE_TYPE (dst), ptr);
12251 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, ptr);
12252 append_to_statement_list (t, &list);
12253 dst = build_simple_mem_ref_loc (loc, dst);
12254 }
12255 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12256 }
12257 append_to_statement_list (t, &list);
12258 break;
12259 case OMP_CLAUSE_PRIVATE:
12260 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
12261 break;
12262 decl = OMP_CLAUSE_DECL (c);
12263 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12264 f = (tree) n->value;
12265 if (tcctx.cb.decl_map)
12266 f = *tcctx.cb.decl_map->get (f);
12267 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12268 if (n != NULL)
12269 {
12270 sf = (tree) n->value;
12271 if (tcctx.cb.decl_map)
12272 sf = *tcctx.cb.decl_map->get (sf);
12273 src = build_simple_mem_ref_loc (loc, sarg);
12274 src = omp_build_component_ref (src, sf);
12275 if (use_pointer_for_field (decl, NULL))
12276 src = build_simple_mem_ref_loc (loc, src);
12277 }
12278 else
12279 src = decl;
12280 dst = build_simple_mem_ref_loc (loc, arg);
12281 dst = omp_build_component_ref (dst, f);
12282 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12283 append_to_statement_list (t, &list);
12284 break;
12285 default:
12286 break;
12287 }
12288
12289 /* Last pass: handle VLA firstprivates. */
12290 if (tcctx.cb.decl_map)
12291 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12292 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12293 {
12294 tree ind, ptr, df;
12295
12296 decl = OMP_CLAUSE_DECL (c);
12297 if (!is_variable_sized (decl))
12298 continue;
12299 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12300 if (n == NULL)
12301 continue;
12302 f = (tree) n->value;
12303 f = *tcctx.cb.decl_map->get (f);
12304 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
12305 ind = DECL_VALUE_EXPR (decl);
12306 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
12307 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
12308 n = splay_tree_lookup (ctx->sfield_map,
12309 (splay_tree_key) TREE_OPERAND (ind, 0));
12310 sf = (tree) n->value;
12311 sf = *tcctx.cb.decl_map->get (sf);
12312 src = build_simple_mem_ref_loc (loc, sarg);
12313 src = omp_build_component_ref (src, sf);
12314 src = build_simple_mem_ref_loc (loc, src);
12315 dst = build_simple_mem_ref_loc (loc, arg);
12316 dst = omp_build_component_ref (dst, f);
12317 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12318 append_to_statement_list (t, &list);
12319 n = splay_tree_lookup (ctx->field_map,
12320 (splay_tree_key) TREE_OPERAND (ind, 0));
12321 df = (tree) n->value;
12322 df = *tcctx.cb.decl_map->get (df);
12323 ptr = build_simple_mem_ref_loc (loc, arg);
12324 ptr = omp_build_component_ref (ptr, df);
12325 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
12326 build_fold_addr_expr_loc (loc, dst));
12327 append_to_statement_list (t, &list);
12328 }
12329
12330 t = build1 (RETURN_EXPR, void_type_node, NULL);
12331 append_to_statement_list (t, &list);
12332
12333 if (tcctx.cb.decl_map)
12334 delete tcctx.cb.decl_map;
12335 pop_gimplify_context (NULL);
12336 BIND_EXPR_BODY (bind) = list;
12337 pop_cfun ();
12338 }
12339
12340 static void
12341 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
12342 {
12343 tree c, clauses;
12344 gimple *g;
12345 size_t cnt[5] = { 0, 0, 0, 0, 0 }, idx = 2, i;
12346
12347 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
12348 gcc_assert (clauses);
12349 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12350 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
12351 switch (OMP_CLAUSE_DEPEND_KIND (c))
12352 {
12353 case OMP_CLAUSE_DEPEND_LAST:
12354 /* Lowering already done at gimplification. */
12355 return;
12356 case OMP_CLAUSE_DEPEND_IN:
12357 cnt[2]++;
12358 break;
12359 case OMP_CLAUSE_DEPEND_OUT:
12360 case OMP_CLAUSE_DEPEND_INOUT:
12361 cnt[0]++;
12362 break;
12363 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12364 cnt[1]++;
12365 break;
12366 case OMP_CLAUSE_DEPEND_DEPOBJ:
12367 cnt[3]++;
12368 break;
12369 case OMP_CLAUSE_DEPEND_INOUTSET:
12370 cnt[4]++;
12371 break;
12372 default:
12373 gcc_unreachable ();
12374 }
12375 if (cnt[1] || cnt[3] || cnt[4])
12376 idx = 5;
12377 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3] + cnt[4];
12378 size_t inoutidx = total + idx;
12379 tree type = build_array_type_nelts (ptr_type_node, total + idx + 2 * cnt[4]);
12380 tree array = create_tmp_var (type);
12381 TREE_ADDRESSABLE (array) = 1;
12382 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
12383 NULL_TREE);
12384 if (idx == 5)
12385 {
12386 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
12387 gimple_seq_add_stmt (iseq, g);
12388 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
12389 NULL_TREE);
12390 }
12391 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
12392 gimple_seq_add_stmt (iseq, g);
12393 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
12394 {
12395 r = build4 (ARRAY_REF, ptr_type_node, array,
12396 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
12397 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
12398 gimple_seq_add_stmt (iseq, g);
12399 }
12400 for (i = 0; i < 5; i++)
12401 {
12402 if (cnt[i] == 0)
12403 continue;
12404 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12405 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
12406 continue;
12407 else
12408 {
12409 switch (OMP_CLAUSE_DEPEND_KIND (c))
12410 {
12411 case OMP_CLAUSE_DEPEND_IN:
12412 if (i != 2)
12413 continue;
12414 break;
12415 case OMP_CLAUSE_DEPEND_OUT:
12416 case OMP_CLAUSE_DEPEND_INOUT:
12417 if (i != 0)
12418 continue;
12419 break;
12420 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12421 if (i != 1)
12422 continue;
12423 break;
12424 case OMP_CLAUSE_DEPEND_DEPOBJ:
12425 if (i != 3)
12426 continue;
12427 break;
12428 case OMP_CLAUSE_DEPEND_INOUTSET:
12429 if (i != 4)
12430 continue;
12431 break;
12432 default:
12433 gcc_unreachable ();
12434 }
12435 tree t = OMP_CLAUSE_DECL (c);
12436 if (i == 4)
12437 {
12438 t = build4 (ARRAY_REF, ptr_type_node, array,
12439 size_int (inoutidx), NULL_TREE, NULL_TREE);
12440 t = build_fold_addr_expr (t);
12441 inoutidx += 2;
12442 }
12443 t = fold_convert (ptr_type_node, t);
12444 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
12445 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12446 NULL_TREE, NULL_TREE);
12447 g = gimple_build_assign (r, t);
12448 gimple_seq_add_stmt (iseq, g);
12449 }
12450 }
12451 if (cnt[4])
12452 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12453 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12454 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_INOUTSET)
12455 {
12456 tree t = OMP_CLAUSE_DECL (c);
12457 t = fold_convert (ptr_type_node, t);
12458 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
12459 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12460 NULL_TREE, NULL_TREE);
12461 g = gimple_build_assign (r, t);
12462 gimple_seq_add_stmt (iseq, g);
12463 t = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
12464 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12465 NULL_TREE, NULL_TREE);
12466 g = gimple_build_assign (r, t);
12467 gimple_seq_add_stmt (iseq, g);
12468 }
12469
12470 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
12471 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
12472 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
12473 OMP_CLAUSE_CHAIN (c) = *pclauses;
12474 *pclauses = c;
12475 tree clobber = build_clobber (type);
12476 g = gimple_build_assign (array, clobber);
12477 gimple_seq_add_stmt (oseq, g);
12478 }
12479
12480 /* Lower the OpenMP parallel or task directive in the current statement
12481 in GSI_P. CTX holds context information for the directive. */
12482
12483 static void
12484 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12485 {
12486 tree clauses;
12487 tree child_fn, t;
12488 gimple *stmt = gsi_stmt (*gsi_p);
12489 gbind *par_bind, *bind, *dep_bind = NULL;
12490 gimple_seq par_body;
12491 location_t loc = gimple_location (stmt);
12492
12493 clauses = gimple_omp_taskreg_clauses (stmt);
12494 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12495 && gimple_omp_task_taskwait_p (stmt))
12496 {
12497 par_bind = NULL;
12498 par_body = NULL;
12499 }
12500 else
12501 {
12502 par_bind
12503 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
12504 par_body = gimple_bind_body (par_bind);
12505 }
12506 child_fn = ctx->cb.dst_fn;
12507 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
12508 && !gimple_omp_parallel_combined_p (stmt))
12509 {
12510 struct walk_stmt_info wi;
12511 int ws_num = 0;
12512
12513 memset (&wi, 0, sizeof (wi));
12514 wi.info = &ws_num;
12515 wi.val_only = true;
12516 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
12517 if (ws_num == 1)
12518 gimple_omp_parallel_set_combined_p (stmt, true);
12519 }
12520 gimple_seq dep_ilist = NULL;
12521 gimple_seq dep_olist = NULL;
12522 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12523 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
12524 {
12525 push_gimplify_context ();
12526 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12527 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
12528 &dep_ilist, &dep_olist);
12529 }
12530
12531 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12532 && gimple_omp_task_taskwait_p (stmt))
12533 {
12534 if (dep_bind)
12535 {
12536 gsi_replace (gsi_p, dep_bind, true);
12537 gimple_bind_add_seq (dep_bind, dep_ilist);
12538 gimple_bind_add_stmt (dep_bind, stmt);
12539 gimple_bind_add_seq (dep_bind, dep_olist);
12540 pop_gimplify_context (dep_bind);
12541 }
12542 return;
12543 }
12544
12545 if (ctx->srecord_type)
12546 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
12547
12548 gimple_seq tskred_ilist = NULL;
12549 gimple_seq tskred_olist = NULL;
12550 if ((is_task_ctx (ctx)
12551 && gimple_omp_task_taskloop_p (ctx->stmt)
12552 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
12553 OMP_CLAUSE_REDUCTION))
12554 || (is_parallel_ctx (ctx)
12555 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
12556 OMP_CLAUSE__REDUCTEMP_)))
12557 {
12558 if (dep_bind == NULL)
12559 {
12560 push_gimplify_context ();
12561 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12562 }
12563 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
12564 : OMP_PARALLEL,
12565 gimple_omp_taskreg_clauses (ctx->stmt),
12566 &tskred_ilist, &tskred_olist);
12567 }
12568
12569 push_gimplify_context ();
12570
12571 gimple_seq par_olist = NULL;
12572 gimple_seq par_ilist = NULL;
12573 gimple_seq par_rlist = NULL;
12574 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
12575 lower_omp (&par_body, ctx);
12576 if (gimple_code (stmt) != GIMPLE_OMP_TASK)
12577 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
12578
12579 /* Declare all the variables created by mapping and the variables
12580 declared in the scope of the parallel body. */
12581 record_vars_into (ctx->block_vars, child_fn);
12582 maybe_remove_omp_member_access_dummy_vars (par_bind);
12583 record_vars_into (gimple_bind_vars (par_bind), child_fn);
12584
12585 if (ctx->record_type)
12586 {
12587 ctx->sender_decl
12588 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
12589 : ctx->record_type, ".omp_data_o");
12590 DECL_NAMELESS (ctx->sender_decl) = 1;
12591 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12592 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
12593 }
12594
12595 gimple_seq olist = NULL;
12596 gimple_seq ilist = NULL;
12597 lower_send_clauses (clauses, &ilist, &olist, ctx);
12598 lower_send_shared_vars (&ilist, &olist, ctx);
12599
12600 if (ctx->record_type)
12601 {
12602 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
12603 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12604 clobber));
12605 }
12606
12607 /* Once all the expansions are done, sequence all the different
12608 fragments inside gimple_omp_body. */
12609
12610 gimple_seq new_body = NULL;
12611
12612 if (ctx->record_type)
12613 {
12614 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12615 /* fixup_child_record_type might have changed receiver_decl's type. */
12616 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12617 gimple_seq_add_stmt (&new_body,
12618 gimple_build_assign (ctx->receiver_decl, t));
12619 }
12620
12621 gimple_seq_add_seq (&new_body, par_ilist);
12622 gimple_seq_add_seq (&new_body, par_body);
12623 gimple_seq_add_seq (&new_body, par_rlist);
12624 if (ctx->cancellable)
12625 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
12626 gimple_seq_add_seq (&new_body, par_olist);
12627 new_body = maybe_catch_exception (new_body);
12628 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
12629 gimple_seq_add_stmt (&new_body,
12630 gimple_build_omp_continue (integer_zero_node,
12631 integer_zero_node));
12632 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12633 gimple_omp_set_body (stmt, new_body);
12634
12635 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
12636 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12637 else
12638 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
12639 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12640 gimple_bind_add_seq (bind, ilist);
12641 gimple_bind_add_stmt (bind, stmt);
12642 gimple_bind_add_seq (bind, olist);
12643
12644 pop_gimplify_context (NULL);
12645
12646 if (dep_bind)
12647 {
12648 gimple_bind_add_seq (dep_bind, dep_ilist);
12649 gimple_bind_add_seq (dep_bind, tskred_ilist);
12650 gimple_bind_add_stmt (dep_bind, bind);
12651 gimple_bind_add_seq (dep_bind, tskred_olist);
12652 gimple_bind_add_seq (dep_bind, dep_olist);
12653 pop_gimplify_context (dep_bind);
12654 }
12655 }
12656
12657 /* Lower the GIMPLE_OMP_TARGET in the current statement
12658 in GSI_P. CTX holds context information for the directive. */
12659
12660 static void
12661 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12662 {
12663 tree clauses;
12664 tree child_fn, t, c;
12665 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
12666 gbind *tgt_bind, *bind, *dep_bind = NULL;
12667 gimple_seq tgt_body, olist, ilist, fplist, new_body;
12668 location_t loc = gimple_location (stmt);
12669 bool offloaded, data_region;
12670 unsigned int map_cnt = 0;
12671 tree in_reduction_clauses = NULL_TREE;
12672
12673 offloaded = is_gimple_omp_offloaded (stmt);
12674 switch (gimple_omp_target_kind (stmt))
12675 {
12676 case GF_OMP_TARGET_KIND_REGION:
12677 tree *p, *q;
12678 q = &in_reduction_clauses;
12679 for (p = gimple_omp_target_clauses_ptr (stmt); *p; )
12680 if (OMP_CLAUSE_CODE (*p) == OMP_CLAUSE_IN_REDUCTION)
12681 {
12682 *q = *p;
12683 q = &OMP_CLAUSE_CHAIN (*q);
12684 *p = OMP_CLAUSE_CHAIN (*p);
12685 }
12686 else
12687 p = &OMP_CLAUSE_CHAIN (*p);
12688 *q = NULL_TREE;
12689 *p = in_reduction_clauses;
12690 /* FALLTHRU */
12691 case GF_OMP_TARGET_KIND_UPDATE:
12692 case GF_OMP_TARGET_KIND_ENTER_DATA:
12693 case GF_OMP_TARGET_KIND_EXIT_DATA:
12694 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
12695 case GF_OMP_TARGET_KIND_OACC_KERNELS:
12696 case GF_OMP_TARGET_KIND_OACC_SERIAL:
12697 case GF_OMP_TARGET_KIND_OACC_UPDATE:
12698 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
12699 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
12700 case GF_OMP_TARGET_KIND_OACC_DECLARE:
12701 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
12702 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
12703 data_region = false;
12704 break;
12705 case GF_OMP_TARGET_KIND_DATA:
12706 case GF_OMP_TARGET_KIND_OACC_DATA:
12707 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
12708 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
12709 data_region = true;
12710 break;
12711 default:
12712 gcc_unreachable ();
12713 }
12714
12715 /* Ensure that requires map is written via output_offload_tables, even if only
12716 'target (enter/exit) data' is used in the translation unit. */
12717 if (ENABLE_OFFLOADING && (omp_requires_mask & OMP_REQUIRES_TARGET_USED))
12718 g->have_offload = true;
12719
12720 clauses = gimple_omp_target_clauses (stmt);
12721
12722 gimple_seq dep_ilist = NULL;
12723 gimple_seq dep_olist = NULL;
12724 bool has_depend = omp_find_clause (clauses, OMP_CLAUSE_DEPEND) != NULL_TREE;
12725 if (has_depend || in_reduction_clauses)
12726 {
12727 push_gimplify_context ();
12728 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12729 if (has_depend)
12730 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
12731 &dep_ilist, &dep_olist);
12732 if (in_reduction_clauses)
12733 lower_rec_input_clauses (in_reduction_clauses, &dep_ilist, &dep_olist,
12734 ctx, NULL);
12735 }
12736
12737 tgt_bind = NULL;
12738 tgt_body = NULL;
12739 if (offloaded)
12740 {
12741 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
12742 tgt_body = gimple_bind_body (tgt_bind);
12743 }
12744 else if (data_region)
12745 tgt_body = gimple_omp_body (stmt);
12746 child_fn = ctx->cb.dst_fn;
12747
12748 push_gimplify_context ();
12749 fplist = NULL;
12750
12751 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12752 switch (OMP_CLAUSE_CODE (c))
12753 {
12754 tree var, x;
12755
12756 default:
12757 break;
12758 case OMP_CLAUSE_MAP:
12759 #if CHECKING_P
12760 /* First check what we're prepared to handle in the following. */
12761 switch (OMP_CLAUSE_MAP_KIND (c))
12762 {
12763 case GOMP_MAP_ALLOC:
12764 case GOMP_MAP_TO:
12765 case GOMP_MAP_FROM:
12766 case GOMP_MAP_TOFROM:
12767 case GOMP_MAP_POINTER:
12768 case GOMP_MAP_TO_PSET:
12769 case GOMP_MAP_DELETE:
12770 case GOMP_MAP_RELEASE:
12771 case GOMP_MAP_ALWAYS_TO:
12772 case GOMP_MAP_ALWAYS_FROM:
12773 case GOMP_MAP_ALWAYS_TOFROM:
12774 case GOMP_MAP_FIRSTPRIVATE_POINTER:
12775 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
12776 case GOMP_MAP_STRUCT:
12777 case GOMP_MAP_ALWAYS_POINTER:
12778 case GOMP_MAP_ATTACH:
12779 case GOMP_MAP_DETACH:
12780 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
12781 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
12782 break;
12783 case GOMP_MAP_IF_PRESENT:
12784 case GOMP_MAP_FORCE_ALLOC:
12785 case GOMP_MAP_FORCE_TO:
12786 case GOMP_MAP_FORCE_FROM:
12787 case GOMP_MAP_FORCE_TOFROM:
12788 case GOMP_MAP_FORCE_PRESENT:
12789 case GOMP_MAP_FORCE_DEVICEPTR:
12790 case GOMP_MAP_DEVICE_RESIDENT:
12791 case GOMP_MAP_LINK:
12792 case GOMP_MAP_FORCE_DETACH:
12793 gcc_assert (is_gimple_omp_oacc (stmt));
12794 break;
12795 default:
12796 gcc_unreachable ();
12797 }
12798 #endif
12799 /* FALLTHRU */
12800 case OMP_CLAUSE_TO:
12801 case OMP_CLAUSE_FROM:
12802 oacc_firstprivate:
12803 var = OMP_CLAUSE_DECL (c);
12804 if (!DECL_P (var))
12805 {
12806 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
12807 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12808 && (OMP_CLAUSE_MAP_KIND (c)
12809 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
12810 map_cnt++;
12811 continue;
12812 }
12813
12814 if (DECL_SIZE (var)
12815 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12816 {
12817 tree var2 = DECL_VALUE_EXPR (var);
12818 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12819 var2 = TREE_OPERAND (var2, 0);
12820 gcc_assert (DECL_P (var2));
12821 var = var2;
12822 }
12823
12824 if (offloaded
12825 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12826 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12827 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12828 {
12829 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12830 {
12831 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
12832 && varpool_node::get_create (var)->offloadable)
12833 continue;
12834
12835 tree type = build_pointer_type (TREE_TYPE (var));
12836 tree new_var = lookup_decl (var, ctx);
12837 x = create_tmp_var_raw (type, get_name (new_var));
12838 gimple_add_tmp_var (x);
12839 x = build_simple_mem_ref (x);
12840 SET_DECL_VALUE_EXPR (new_var, x);
12841 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12842 }
12843 continue;
12844 }
12845
12846 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12847 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12848 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12849 && is_omp_target (stmt))
12850 {
12851 gcc_assert (maybe_lookup_field (c, ctx));
12852 map_cnt++;
12853 continue;
12854 }
12855
12856 if (!maybe_lookup_field (var, ctx))
12857 continue;
12858
12859 /* Don't remap compute constructs' reduction variables, because the
12860 intermediate result must be local to each gang. */
12861 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12862 && is_gimple_omp_oacc (ctx->stmt)
12863 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
12864 {
12865 x = build_receiver_ref (var, true, ctx);
12866 tree new_var = lookup_decl (var, ctx);
12867
12868 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12869 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
12870 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12871 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12872 x = build_simple_mem_ref (x);
12873 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12874 {
12875 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
12876 if (omp_privatize_by_reference (new_var)
12877 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
12878 || DECL_BY_REFERENCE (var)))
12879 {
12880 /* Create a local object to hold the instance
12881 value. */
12882 tree type = TREE_TYPE (TREE_TYPE (new_var));
12883 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
12884 tree inst = create_tmp_var (type, id);
12885 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
12886 x = build_fold_addr_expr (inst);
12887 }
12888 gimplify_assign (new_var, x, &fplist);
12889 }
12890 else if (DECL_P (new_var))
12891 {
12892 SET_DECL_VALUE_EXPR (new_var, x);
12893 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12894 }
12895 else
12896 gcc_unreachable ();
12897 }
12898 map_cnt++;
12899 break;
12900
12901 case OMP_CLAUSE_FIRSTPRIVATE:
12902 omp_firstprivate_recv:
12903 gcc_checking_assert (offloaded);
12904 if (is_gimple_omp_oacc (ctx->stmt))
12905 {
12906 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12907 gcc_checking_assert (!is_oacc_kernels (ctx));
12908 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12909 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12910
12911 goto oacc_firstprivate;
12912 }
12913 map_cnt++;
12914 var = OMP_CLAUSE_DECL (c);
12915 if (!omp_privatize_by_reference (var)
12916 && !is_gimple_reg_type (TREE_TYPE (var)))
12917 {
12918 tree new_var = lookup_decl (var, ctx);
12919 if (is_variable_sized (var))
12920 {
12921 tree pvar = DECL_VALUE_EXPR (var);
12922 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12923 pvar = TREE_OPERAND (pvar, 0);
12924 gcc_assert (DECL_P (pvar));
12925 tree new_pvar = lookup_decl (pvar, ctx);
12926 x = build_fold_indirect_ref (new_pvar);
12927 TREE_THIS_NOTRAP (x) = 1;
12928 }
12929 else
12930 x = build_receiver_ref (var, true, ctx);
12931 SET_DECL_VALUE_EXPR (new_var, x);
12932 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12933 }
12934 /* Fortran array descriptors: firstprivate of data + attach. */
12935 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
12936 && lang_hooks.decls.omp_array_data (var, true))
12937 map_cnt += 2;
12938 break;
12939
12940 case OMP_CLAUSE_PRIVATE:
12941 gcc_checking_assert (offloaded);
12942 if (is_gimple_omp_oacc (ctx->stmt))
12943 {
12944 /* No 'private' clauses on OpenACC 'kernels'. */
12945 gcc_checking_assert (!is_oacc_kernels (ctx));
12946 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12947 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12948
12949 break;
12950 }
12951 var = OMP_CLAUSE_DECL (c);
12952 if (is_variable_sized (var))
12953 {
12954 tree new_var = lookup_decl (var, ctx);
12955 tree pvar = DECL_VALUE_EXPR (var);
12956 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12957 pvar = TREE_OPERAND (pvar, 0);
12958 gcc_assert (DECL_P (pvar));
12959 tree new_pvar = lookup_decl (pvar, ctx);
12960 x = build_fold_indirect_ref (new_pvar);
12961 TREE_THIS_NOTRAP (x) = 1;
12962 SET_DECL_VALUE_EXPR (new_var, x);
12963 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12964 }
12965 break;
12966
12967 case OMP_CLAUSE_USE_DEVICE_PTR:
12968 case OMP_CLAUSE_USE_DEVICE_ADDR:
12969 case OMP_CLAUSE_HAS_DEVICE_ADDR:
12970 case OMP_CLAUSE_IS_DEVICE_PTR:
12971 var = OMP_CLAUSE_DECL (c);
12972 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
12973 {
12974 while (TREE_CODE (var) == INDIRECT_REF
12975 || TREE_CODE (var) == ARRAY_REF)
12976 var = TREE_OPERAND (var, 0);
12977 if (lang_hooks.decls.omp_array_data (var, true))
12978 goto omp_firstprivate_recv;
12979 }
12980 map_cnt++;
12981 if (is_variable_sized (var))
12982 {
12983 tree new_var = lookup_decl (var, ctx);
12984 tree pvar = DECL_VALUE_EXPR (var);
12985 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12986 pvar = TREE_OPERAND (pvar, 0);
12987 gcc_assert (DECL_P (pvar));
12988 tree new_pvar = lookup_decl (pvar, ctx);
12989 x = build_fold_indirect_ref (new_pvar);
12990 TREE_THIS_NOTRAP (x) = 1;
12991 SET_DECL_VALUE_EXPR (new_var, x);
12992 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12993 }
12994 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12995 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
12996 && !omp_privatize_by_reference (var)
12997 && !omp_is_allocatable_or_ptr (var)
12998 && !lang_hooks.decls.omp_array_data (var, true))
12999 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
13000 {
13001 tree new_var = lookup_decl (var, ctx);
13002 tree type = build_pointer_type (TREE_TYPE (var));
13003 x = create_tmp_var_raw (type, get_name (new_var));
13004 gimple_add_tmp_var (x);
13005 x = build_simple_mem_ref (x);
13006 SET_DECL_VALUE_EXPR (new_var, x);
13007 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
13008 }
13009 else
13010 {
13011 tree new_var = lookup_decl (var, ctx);
13012 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
13013 gimple_add_tmp_var (x);
13014 SET_DECL_VALUE_EXPR (new_var, x);
13015 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
13016 }
13017 break;
13018 }
13019
13020 if (offloaded)
13021 {
13022 target_nesting_level++;
13023 lower_omp (&tgt_body, ctx);
13024 target_nesting_level--;
13025 }
13026 else if (data_region)
13027 lower_omp (&tgt_body, ctx);
13028
13029 if (offloaded)
13030 {
13031 /* Declare all the variables created by mapping and the variables
13032 declared in the scope of the target body. */
13033 record_vars_into (ctx->block_vars, child_fn);
13034 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
13035 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
13036 }
13037
13038 olist = NULL;
13039 ilist = NULL;
13040 if (ctx->record_type)
13041 {
13042 ctx->sender_decl
13043 = create_tmp_var (ctx->record_type, ".omp_data_arr");
13044 DECL_NAMELESS (ctx->sender_decl) = 1;
13045 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
13046 t = make_tree_vec (3);
13047 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
13048 TREE_VEC_ELT (t, 1)
13049 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
13050 ".omp_data_sizes");
13051 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
13052 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
13053 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
13054 tree tkind_type = short_unsigned_type_node;
13055 int talign_shift = 8;
13056 TREE_VEC_ELT (t, 2)
13057 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
13058 ".omp_data_kinds");
13059 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
13060 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
13061 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
13062 gimple_omp_target_set_data_arg (stmt, t);
13063
13064 vec<constructor_elt, va_gc> *vsize;
13065 vec<constructor_elt, va_gc> *vkind;
13066 vec_alloc (vsize, map_cnt);
13067 vec_alloc (vkind, map_cnt);
13068 unsigned int map_idx = 0;
13069
13070 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13071 switch (OMP_CLAUSE_CODE (c))
13072 {
13073 tree ovar, nc, s, purpose, var, x, type;
13074 unsigned int talign;
13075
13076 default:
13077 break;
13078
13079 case OMP_CLAUSE_MAP:
13080 case OMP_CLAUSE_TO:
13081 case OMP_CLAUSE_FROM:
13082 oacc_firstprivate_map:
13083 nc = c;
13084 ovar = OMP_CLAUSE_DECL (c);
13085 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13086 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13087 || (OMP_CLAUSE_MAP_KIND (c)
13088 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
13089 break;
13090 if (!DECL_P (ovar))
13091 {
13092 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13093 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
13094 {
13095 nc = OMP_CLAUSE_CHAIN (c);
13096 gcc_checking_assert (OMP_CLAUSE_DECL (nc)
13097 == get_base_address (ovar));
13098 ovar = OMP_CLAUSE_DECL (nc);
13099 }
13100 else
13101 {
13102 tree x = build_sender_ref (ovar, ctx);
13103 tree v = ovar;
13104 if (in_reduction_clauses
13105 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13106 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
13107 {
13108 v = unshare_expr (v);
13109 tree *p = &v;
13110 while (handled_component_p (*p)
13111 || TREE_CODE (*p) == INDIRECT_REF
13112 || TREE_CODE (*p) == ADDR_EXPR
13113 || TREE_CODE (*p) == MEM_REF
13114 || TREE_CODE (*p) == NON_LVALUE_EXPR)
13115 p = &TREE_OPERAND (*p, 0);
13116 tree d = *p;
13117 if (is_variable_sized (d))
13118 {
13119 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
13120 d = DECL_VALUE_EXPR (d);
13121 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
13122 d = TREE_OPERAND (d, 0);
13123 gcc_assert (DECL_P (d));
13124 }
13125 splay_tree_key key
13126 = (splay_tree_key) &DECL_CONTEXT (d);
13127 tree nd = (tree) splay_tree_lookup (ctx->field_map,
13128 key)->value;
13129 if (d == *p)
13130 *p = nd;
13131 else
13132 *p = build_fold_indirect_ref (nd);
13133 }
13134 v = build_fold_addr_expr_with_type (v, ptr_type_node);
13135 gimplify_assign (x, v, &ilist);
13136 nc = NULL_TREE;
13137 }
13138 }
13139 else
13140 {
13141 if (DECL_SIZE (ovar)
13142 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
13143 {
13144 tree ovar2 = DECL_VALUE_EXPR (ovar);
13145 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
13146 ovar2 = TREE_OPERAND (ovar2, 0);
13147 gcc_assert (DECL_P (ovar2));
13148 ovar = ovar2;
13149 }
13150 if (!maybe_lookup_field (ovar, ctx)
13151 && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13152 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13153 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)))
13154 continue;
13155 }
13156
13157 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
13158 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
13159 talign = DECL_ALIGN_UNIT (ovar);
13160
13161 var = NULL_TREE;
13162 if (nc)
13163 {
13164 if (in_reduction_clauses
13165 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13166 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
13167 {
13168 tree d = ovar;
13169 if (is_variable_sized (d))
13170 {
13171 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
13172 d = DECL_VALUE_EXPR (d);
13173 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
13174 d = TREE_OPERAND (d, 0);
13175 gcc_assert (DECL_P (d));
13176 }
13177 splay_tree_key key
13178 = (splay_tree_key) &DECL_CONTEXT (d);
13179 tree nd = (tree) splay_tree_lookup (ctx->field_map,
13180 key)->value;
13181 if (d == ovar)
13182 var = nd;
13183 else
13184 var = build_fold_indirect_ref (nd);
13185 }
13186 else
13187 var = lookup_decl_in_outer_ctx (ovar, ctx);
13188 }
13189 if (nc
13190 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13191 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13192 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
13193 && is_omp_target (stmt))
13194 {
13195 x = build_sender_ref (c, ctx);
13196 gimplify_assign (x, build_fold_addr_expr (var), &ilist);
13197 }
13198 else if (nc)
13199 {
13200 x = build_sender_ref (ovar, ctx);
13201
13202 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13203 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
13204 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
13205 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
13206 {
13207 gcc_assert (offloaded);
13208 tree avar
13209 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
13210 mark_addressable (avar);
13211 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
13212 talign = DECL_ALIGN_UNIT (avar);
13213 avar = build_fold_addr_expr (avar);
13214 gimplify_assign (x, avar, &ilist);
13215 }
13216 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13217 {
13218 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
13219 if (!omp_privatize_by_reference (var))
13220 {
13221 if (is_gimple_reg (var)
13222 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13223 suppress_warning (var);
13224 var = build_fold_addr_expr (var);
13225 }
13226 else
13227 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13228 gimplify_assign (x, var, &ilist);
13229 }
13230 else if (is_gimple_reg (var))
13231 {
13232 gcc_assert (offloaded);
13233 tree avar = create_tmp_var (TREE_TYPE (var));
13234 mark_addressable (avar);
13235 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
13236 if (GOMP_MAP_COPY_TO_P (map_kind)
13237 || map_kind == GOMP_MAP_POINTER
13238 || map_kind == GOMP_MAP_TO_PSET
13239 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13240 {
13241 /* If we need to initialize a temporary
13242 with VAR because it is not addressable, and
13243 the variable hasn't been initialized yet, then
13244 we'll get a warning for the store to avar.
13245 Don't warn in that case, the mapping might
13246 be implicit. */
13247 suppress_warning (var, OPT_Wuninitialized);
13248 gimplify_assign (avar, var, &ilist);
13249 }
13250 avar = build_fold_addr_expr (avar);
13251 gimplify_assign (x, avar, &ilist);
13252 if ((GOMP_MAP_COPY_FROM_P (map_kind)
13253 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13254 && !TYPE_READONLY (TREE_TYPE (var)))
13255 {
13256 x = unshare_expr (x);
13257 x = build_simple_mem_ref (x);
13258 gimplify_assign (var, x, &olist);
13259 }
13260 }
13261 else
13262 {
13263 /* While MAP is handled explicitly by the FE,
13264 for 'target update', only the identified is passed. */
13265 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
13266 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
13267 && (omp_is_allocatable_or_ptr (var)
13268 && omp_check_optional_argument (var, false)))
13269 var = build_fold_indirect_ref (var);
13270 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
13271 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
13272 || (!omp_is_allocatable_or_ptr (var)
13273 && !omp_check_optional_argument (var, false)))
13274 var = build_fold_addr_expr (var);
13275 gimplify_assign (x, var, &ilist);
13276 }
13277 }
13278 s = NULL_TREE;
13279 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13280 {
13281 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13282 s = TREE_TYPE (ovar);
13283 if (TREE_CODE (s) == REFERENCE_TYPE
13284 || omp_check_optional_argument (ovar, false))
13285 s = TREE_TYPE (s);
13286 s = TYPE_SIZE_UNIT (s);
13287 }
13288 else
13289 s = OMP_CLAUSE_SIZE (c);
13290 if (s == NULL_TREE)
13291 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13292 s = fold_convert (size_type_node, s);
13293 purpose = size_int (map_idx++);
13294 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13295 if (TREE_CODE (s) != INTEGER_CST)
13296 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13297
13298 unsigned HOST_WIDE_INT tkind, tkind_zero;
13299 switch (OMP_CLAUSE_CODE (c))
13300 {
13301 case OMP_CLAUSE_MAP:
13302 tkind = OMP_CLAUSE_MAP_KIND (c);
13303 tkind_zero = tkind;
13304 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
13305 switch (tkind)
13306 {
13307 case GOMP_MAP_ALLOC:
13308 case GOMP_MAP_IF_PRESENT:
13309 case GOMP_MAP_TO:
13310 case GOMP_MAP_FROM:
13311 case GOMP_MAP_TOFROM:
13312 case GOMP_MAP_ALWAYS_TO:
13313 case GOMP_MAP_ALWAYS_FROM:
13314 case GOMP_MAP_ALWAYS_TOFROM:
13315 case GOMP_MAP_RELEASE:
13316 case GOMP_MAP_FORCE_TO:
13317 case GOMP_MAP_FORCE_FROM:
13318 case GOMP_MAP_FORCE_TOFROM:
13319 case GOMP_MAP_FORCE_PRESENT:
13320 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
13321 break;
13322 case GOMP_MAP_DELETE:
13323 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
13324 default:
13325 break;
13326 }
13327 if (tkind_zero != tkind)
13328 {
13329 if (integer_zerop (s))
13330 tkind = tkind_zero;
13331 else if (integer_nonzerop (s))
13332 tkind_zero = tkind;
13333 }
13334 if (tkind_zero == tkind
13335 && OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (c)
13336 && (((tkind & GOMP_MAP_FLAG_SPECIAL_BITS)
13337 & ~GOMP_MAP_IMPLICIT)
13338 == 0))
13339 {
13340 /* If this is an implicit map, and the GOMP_MAP_IMPLICIT
13341 bits are not interfered by other special bit encodings,
13342 then turn the GOMP_IMPLICIT_BIT flag on for the runtime
13343 to see. */
13344 tkind |= GOMP_MAP_IMPLICIT;
13345 tkind_zero = tkind;
13346 }
13347 break;
13348 case OMP_CLAUSE_FIRSTPRIVATE:
13349 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13350 tkind = GOMP_MAP_TO;
13351 tkind_zero = tkind;
13352 break;
13353 case OMP_CLAUSE_TO:
13354 tkind = GOMP_MAP_TO;
13355 tkind_zero = tkind;
13356 break;
13357 case OMP_CLAUSE_FROM:
13358 tkind = GOMP_MAP_FROM;
13359 tkind_zero = tkind;
13360 break;
13361 default:
13362 gcc_unreachable ();
13363 }
13364 gcc_checking_assert (tkind
13365 < (HOST_WIDE_INT_C (1U) << talign_shift));
13366 gcc_checking_assert (tkind_zero
13367 < (HOST_WIDE_INT_C (1U) << talign_shift));
13368 talign = ceil_log2 (talign);
13369 tkind |= talign << talign_shift;
13370 tkind_zero |= talign << talign_shift;
13371 gcc_checking_assert (tkind
13372 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13373 gcc_checking_assert (tkind_zero
13374 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13375 if (tkind == tkind_zero)
13376 x = build_int_cstu (tkind_type, tkind);
13377 else
13378 {
13379 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
13380 x = build3 (COND_EXPR, tkind_type,
13381 fold_build2 (EQ_EXPR, boolean_type_node,
13382 unshare_expr (s), size_zero_node),
13383 build_int_cstu (tkind_type, tkind_zero),
13384 build_int_cstu (tkind_type, tkind));
13385 }
13386 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
13387 if (nc && nc != c)
13388 c = nc;
13389 break;
13390
13391 case OMP_CLAUSE_FIRSTPRIVATE:
13392 omp_has_device_addr_descr:
13393 if (is_gimple_omp_oacc (ctx->stmt))
13394 goto oacc_firstprivate_map;
13395 ovar = OMP_CLAUSE_DECL (c);
13396 if (omp_privatize_by_reference (ovar))
13397 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13398 else
13399 talign = DECL_ALIGN_UNIT (ovar);
13400 var = lookup_decl_in_outer_ctx (ovar, ctx);
13401 x = build_sender_ref (ovar, ctx);
13402 tkind = GOMP_MAP_FIRSTPRIVATE;
13403 type = TREE_TYPE (ovar);
13404 if (omp_privatize_by_reference (ovar))
13405 type = TREE_TYPE (type);
13406 if ((INTEGRAL_TYPE_P (type)
13407 && TYPE_PRECISION (type) <= POINTER_SIZE)
13408 || TREE_CODE (type) == POINTER_TYPE)
13409 {
13410 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13411 tree t = var;
13412 if (omp_privatize_by_reference (var))
13413 t = build_simple_mem_ref (var);
13414 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13415 suppress_warning (var);
13416 if (TREE_CODE (type) != POINTER_TYPE)
13417 t = fold_convert (pointer_sized_int_node, t);
13418 t = fold_convert (TREE_TYPE (x), t);
13419 gimplify_assign (x, t, &ilist);
13420 }
13421 else if (omp_privatize_by_reference (var))
13422 gimplify_assign (x, var, &ilist);
13423 else if (is_gimple_reg (var))
13424 {
13425 tree avar = create_tmp_var (TREE_TYPE (var));
13426 mark_addressable (avar);
13427 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13428 suppress_warning (var);
13429 gimplify_assign (avar, var, &ilist);
13430 avar = build_fold_addr_expr (avar);
13431 gimplify_assign (x, avar, &ilist);
13432 }
13433 else
13434 {
13435 var = build_fold_addr_expr (var);
13436 gimplify_assign (x, var, &ilist);
13437 }
13438 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
13439 s = size_int (0);
13440 else if (omp_privatize_by_reference (ovar))
13441 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13442 else
13443 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13444 s = fold_convert (size_type_node, s);
13445 purpose = size_int (map_idx++);
13446 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13447 if (TREE_CODE (s) != INTEGER_CST)
13448 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13449
13450 gcc_checking_assert (tkind
13451 < (HOST_WIDE_INT_C (1U) << talign_shift));
13452 talign = ceil_log2 (talign);
13453 tkind |= talign << talign_shift;
13454 gcc_checking_assert (tkind
13455 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13456 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13457 build_int_cstu (tkind_type, tkind));
13458 /* Fortran array descriptors: firstprivate of data + attach. */
13459 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
13460 && lang_hooks.decls.omp_array_data (ovar, true))
13461 {
13462 tree not_null_lb, null_lb, after_lb;
13463 tree var1, var2, size1, size2;
13464 tree present = omp_check_optional_argument (ovar, true);
13465 if (present)
13466 {
13467 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13468 not_null_lb = create_artificial_label (clause_loc);
13469 null_lb = create_artificial_label (clause_loc);
13470 after_lb = create_artificial_label (clause_loc);
13471 gimple_seq seq = NULL;
13472 present = force_gimple_operand (present, &seq, true,
13473 NULL_TREE);
13474 gimple_seq_add_seq (&ilist, seq);
13475 gimple_seq_add_stmt (&ilist,
13476 gimple_build_cond_from_tree (present,
13477 not_null_lb, null_lb));
13478 gimple_seq_add_stmt (&ilist,
13479 gimple_build_label (not_null_lb));
13480 }
13481 var1 = lang_hooks.decls.omp_array_data (var, false);
13482 size1 = lang_hooks.decls.omp_array_size (var, &ilist);
13483 var2 = build_fold_addr_expr (x);
13484 if (!POINTER_TYPE_P (TREE_TYPE (var)))
13485 var = build_fold_addr_expr (var);
13486 size2 = fold_build2 (POINTER_DIFF_EXPR, ssizetype,
13487 build_fold_addr_expr (var1), var);
13488 size2 = fold_convert (sizetype, size2);
13489 if (present)
13490 {
13491 tree tmp = create_tmp_var (TREE_TYPE (var1));
13492 gimplify_assign (tmp, var1, &ilist);
13493 var1 = tmp;
13494 tmp = create_tmp_var (TREE_TYPE (var2));
13495 gimplify_assign (tmp, var2, &ilist);
13496 var2 = tmp;
13497 tmp = create_tmp_var (TREE_TYPE (size1));
13498 gimplify_assign (tmp, size1, &ilist);
13499 size1 = tmp;
13500 tmp = create_tmp_var (TREE_TYPE (size2));
13501 gimplify_assign (tmp, size2, &ilist);
13502 size2 = tmp;
13503 gimple_seq_add_stmt (&ilist, gimple_build_goto (after_lb));
13504 gimple_seq_add_stmt (&ilist, gimple_build_label (null_lb));
13505 gimplify_assign (var1, null_pointer_node, &ilist);
13506 gimplify_assign (var2, null_pointer_node, &ilist);
13507 gimplify_assign (size1, size_zero_node, &ilist);
13508 gimplify_assign (size2, size_zero_node, &ilist);
13509 gimple_seq_add_stmt (&ilist, gimple_build_label (after_lb));
13510 }
13511 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
13512 gimplify_assign (x, var1, &ilist);
13513 tkind = GOMP_MAP_FIRSTPRIVATE;
13514 talign = DECL_ALIGN_UNIT (ovar);
13515 talign = ceil_log2 (talign);
13516 tkind |= talign << talign_shift;
13517 gcc_checking_assert (tkind
13518 <= tree_to_uhwi (
13519 TYPE_MAX_VALUE (tkind_type)));
13520 purpose = size_int (map_idx++);
13521 CONSTRUCTOR_APPEND_ELT (vsize, purpose, size1);
13522 if (TREE_CODE (size1) != INTEGER_CST)
13523 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13524 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13525 build_int_cstu (tkind_type, tkind));
13526 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
13527 gimplify_assign (x, var2, &ilist);
13528 tkind = GOMP_MAP_ATTACH;
13529 purpose = size_int (map_idx++);
13530 CONSTRUCTOR_APPEND_ELT (vsize, purpose, size2);
13531 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13532 build_int_cstu (tkind_type, tkind));
13533 }
13534 break;
13535
13536 case OMP_CLAUSE_USE_DEVICE_PTR:
13537 case OMP_CLAUSE_USE_DEVICE_ADDR:
13538 case OMP_CLAUSE_HAS_DEVICE_ADDR:
13539 case OMP_CLAUSE_IS_DEVICE_PTR:
13540 ovar = OMP_CLAUSE_DECL (c);
13541 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13542 {
13543 if (lang_hooks.decls.omp_array_data (ovar, true))
13544 goto omp_has_device_addr_descr;
13545 while (TREE_CODE (ovar) == INDIRECT_REF
13546 || TREE_CODE (ovar) == ARRAY_REF)
13547 ovar = TREE_OPERAND (ovar, 0);
13548 }
13549 var = lookup_decl_in_outer_ctx (ovar, ctx);
13550
13551 if (lang_hooks.decls.omp_array_data (ovar, true))
13552 {
13553 tkind = ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13554 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13555 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
13556 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
13557 }
13558 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13559 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13560 {
13561 tkind = GOMP_MAP_USE_DEVICE_PTR;
13562 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
13563 }
13564 else
13565 {
13566 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13567 x = build_sender_ref (ovar, ctx);
13568 }
13569
13570 if (is_gimple_omp_oacc (ctx->stmt))
13571 {
13572 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
13573
13574 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
13575 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
13576 }
13577
13578 type = TREE_TYPE (ovar);
13579 if (lang_hooks.decls.omp_array_data (ovar, true))
13580 var = lang_hooks.decls.omp_array_data (var, false);
13581 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13582 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13583 && !omp_privatize_by_reference (ovar)
13584 && !omp_is_allocatable_or_ptr (ovar))
13585 || TREE_CODE (type) == ARRAY_TYPE)
13586 var = build_fold_addr_expr (var);
13587 else
13588 {
13589 if (omp_privatize_by_reference (ovar)
13590 || omp_check_optional_argument (ovar, false)
13591 || omp_is_allocatable_or_ptr (ovar))
13592 {
13593 type = TREE_TYPE (type);
13594 if (POINTER_TYPE_P (type)
13595 && TREE_CODE (type) != ARRAY_TYPE
13596 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13597 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
13598 && !omp_is_allocatable_or_ptr (ovar))
13599 || (omp_privatize_by_reference (ovar)
13600 && omp_is_allocatable_or_ptr (ovar))))
13601 var = build_simple_mem_ref (var);
13602 var = fold_convert (TREE_TYPE (x), var);
13603 }
13604 }
13605 tree present;
13606 present = omp_check_optional_argument (ovar, true);
13607 if (present)
13608 {
13609 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13610 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13611 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13612 tree new_x = unshare_expr (x);
13613 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
13614 fb_rvalue);
13615 gcond *cond = gimple_build_cond_from_tree (present,
13616 notnull_label,
13617 null_label);
13618 gimple_seq_add_stmt (&ilist, cond);
13619 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
13620 gimplify_assign (new_x, null_pointer_node, &ilist);
13621 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
13622 gimple_seq_add_stmt (&ilist,
13623 gimple_build_label (notnull_label));
13624 gimplify_assign (x, var, &ilist);
13625 gimple_seq_add_stmt (&ilist,
13626 gimple_build_label (opt_arg_label));
13627 }
13628 else
13629 gimplify_assign (x, var, &ilist);
13630 s = size_int (0);
13631 purpose = size_int (map_idx++);
13632 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13633 gcc_checking_assert (tkind
13634 < (HOST_WIDE_INT_C (1U) << talign_shift));
13635 gcc_checking_assert (tkind
13636 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13637 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13638 build_int_cstu (tkind_type, tkind));
13639 break;
13640 }
13641
13642 gcc_assert (map_idx == map_cnt);
13643
13644 DECL_INITIAL (TREE_VEC_ELT (t, 1))
13645 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
13646 DECL_INITIAL (TREE_VEC_ELT (t, 2))
13647 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
13648 for (int i = 1; i <= 2; i++)
13649 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
13650 {
13651 gimple_seq initlist = NULL;
13652 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
13653 TREE_VEC_ELT (t, i)),
13654 &initlist, true, NULL_TREE);
13655 gimple_seq_add_seq (&ilist, initlist);
13656
13657 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
13658 gimple_seq_add_stmt (&olist,
13659 gimple_build_assign (TREE_VEC_ELT (t, i),
13660 clobber));
13661 }
13662 else if (omp_maybe_offloaded_ctx (ctx->outer))
13663 {
13664 tree id = get_identifier ("omp declare target");
13665 tree decl = TREE_VEC_ELT (t, i);
13666 DECL_ATTRIBUTES (decl)
13667 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
13668 varpool_node *node = varpool_node::get (decl);
13669 if (node)
13670 {
13671 node->offloadable = 1;
13672 if (ENABLE_OFFLOADING)
13673 {
13674 g->have_offload = true;
13675 vec_safe_push (offload_vars, t);
13676 }
13677 }
13678 }
13679
13680 tree clobber = build_clobber (ctx->record_type);
13681 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
13682 clobber));
13683 }
13684
13685 /* Once all the expansions are done, sequence all the different
13686 fragments inside gimple_omp_body. */
13687
13688 new_body = NULL;
13689
13690 if (offloaded
13691 && ctx->record_type)
13692 {
13693 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
13694 /* fixup_child_record_type might have changed receiver_decl's type. */
13695 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
13696 gimple_seq_add_stmt (&new_body,
13697 gimple_build_assign (ctx->receiver_decl, t));
13698 }
13699 gimple_seq_add_seq (&new_body, fplist);
13700
13701 if (offloaded || data_region)
13702 {
13703 tree prev = NULL_TREE;
13704 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13705 switch (OMP_CLAUSE_CODE (c))
13706 {
13707 tree var, x;
13708 default:
13709 break;
13710 case OMP_CLAUSE_FIRSTPRIVATE:
13711 omp_firstprivatize_data_region:
13712 if (is_gimple_omp_oacc (ctx->stmt))
13713 break;
13714 var = OMP_CLAUSE_DECL (c);
13715 if (omp_privatize_by_reference (var)
13716 || is_gimple_reg_type (TREE_TYPE (var)))
13717 {
13718 tree new_var = lookup_decl (var, ctx);
13719 tree type;
13720 type = TREE_TYPE (var);
13721 if (omp_privatize_by_reference (var))
13722 type = TREE_TYPE (type);
13723 if ((INTEGRAL_TYPE_P (type)
13724 && TYPE_PRECISION (type) <= POINTER_SIZE)
13725 || TREE_CODE (type) == POINTER_TYPE)
13726 {
13727 x = build_receiver_ref (var, false, ctx);
13728 if (TREE_CODE (type) != POINTER_TYPE)
13729 x = fold_convert (pointer_sized_int_node, x);
13730 x = fold_convert (type, x);
13731 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13732 fb_rvalue);
13733 if (omp_privatize_by_reference (var))
13734 {
13735 tree v = create_tmp_var_raw (type, get_name (var));
13736 gimple_add_tmp_var (v);
13737 TREE_ADDRESSABLE (v) = 1;
13738 gimple_seq_add_stmt (&new_body,
13739 gimple_build_assign (v, x));
13740 x = build_fold_addr_expr (v);
13741 }
13742 gimple_seq_add_stmt (&new_body,
13743 gimple_build_assign (new_var, x));
13744 }
13745 else
13746 {
13747 bool by_ref = !omp_privatize_by_reference (var);
13748 x = build_receiver_ref (var, by_ref, ctx);
13749 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13750 fb_rvalue);
13751 gimple_seq_add_stmt (&new_body,
13752 gimple_build_assign (new_var, x));
13753 }
13754 }
13755 else if (is_variable_sized (var))
13756 {
13757 tree pvar = DECL_VALUE_EXPR (var);
13758 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13759 pvar = TREE_OPERAND (pvar, 0);
13760 gcc_assert (DECL_P (pvar));
13761 tree new_var = lookup_decl (pvar, ctx);
13762 x = build_receiver_ref (var, false, ctx);
13763 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13764 gimple_seq_add_stmt (&new_body,
13765 gimple_build_assign (new_var, x));
13766 }
13767 break;
13768 case OMP_CLAUSE_PRIVATE:
13769 if (is_gimple_omp_oacc (ctx->stmt))
13770 break;
13771 var = OMP_CLAUSE_DECL (c);
13772 if (omp_privatize_by_reference (var))
13773 {
13774 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13775 tree new_var = lookup_decl (var, ctx);
13776 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13777 if (TREE_CONSTANT (x))
13778 {
13779 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
13780 get_name (var));
13781 gimple_add_tmp_var (x);
13782 TREE_ADDRESSABLE (x) = 1;
13783 x = build_fold_addr_expr_loc (clause_loc, x);
13784 }
13785 else
13786 break;
13787
13788 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13789 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13790 gimple_seq_add_stmt (&new_body,
13791 gimple_build_assign (new_var, x));
13792 }
13793 break;
13794 case OMP_CLAUSE_USE_DEVICE_PTR:
13795 case OMP_CLAUSE_USE_DEVICE_ADDR:
13796 case OMP_CLAUSE_HAS_DEVICE_ADDR:
13797 case OMP_CLAUSE_IS_DEVICE_PTR:
13798 tree new_var;
13799 gimple_seq assign_body;
13800 bool is_array_data;
13801 bool do_optional_check;
13802 assign_body = NULL;
13803 do_optional_check = false;
13804 var = OMP_CLAUSE_DECL (c);
13805 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
13806 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR && is_array_data)
13807 goto omp_firstprivatize_data_region;
13808
13809 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13810 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13811 x = build_sender_ref (is_array_data
13812 ? (splay_tree_key) &DECL_NAME (var)
13813 : (splay_tree_key) &DECL_UID (var), ctx);
13814 else
13815 {
13816 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13817 {
13818 while (TREE_CODE (var) == INDIRECT_REF
13819 || TREE_CODE (var) == ARRAY_REF)
13820 var = TREE_OPERAND (var, 0);
13821 }
13822 x = build_receiver_ref (var, false, ctx);
13823 }
13824
13825 if (is_array_data)
13826 {
13827 bool is_ref = omp_privatize_by_reference (var);
13828 do_optional_check = true;
13829 /* First, we copy the descriptor data from the host; then
13830 we update its data to point to the target address. */
13831 new_var = lookup_decl (var, ctx);
13832 new_var = DECL_VALUE_EXPR (new_var);
13833 tree v = new_var;
13834 tree v2 = var;
13835 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR
13836 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR)
13837 v2 = maybe_lookup_decl_in_outer_ctx (var, ctx);
13838
13839 if (is_ref)
13840 {
13841 v2 = build_fold_indirect_ref (v2);
13842 v = create_tmp_var_raw (TREE_TYPE (v2), get_name (var));
13843 gimple_add_tmp_var (v);
13844 TREE_ADDRESSABLE (v) = 1;
13845 gimplify_assign (v, v2, &assign_body);
13846 tree rhs = build_fold_addr_expr (v);
13847 gimple_seq_add_stmt (&assign_body,
13848 gimple_build_assign (new_var, rhs));
13849 }
13850 else
13851 gimplify_assign (new_var, v2, &assign_body);
13852
13853 v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
13854 gcc_assert (v2);
13855 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13856 gimple_seq_add_stmt (&assign_body,
13857 gimple_build_assign (v2, x));
13858 }
13859 else if (is_variable_sized (var))
13860 {
13861 tree pvar = DECL_VALUE_EXPR (var);
13862 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13863 pvar = TREE_OPERAND (pvar, 0);
13864 gcc_assert (DECL_P (pvar));
13865 new_var = lookup_decl (pvar, ctx);
13866 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13867 gimple_seq_add_stmt (&assign_body,
13868 gimple_build_assign (new_var, x));
13869 }
13870 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13871 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13872 && !omp_privatize_by_reference (var)
13873 && !omp_is_allocatable_or_ptr (var))
13874 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
13875 {
13876 new_var = lookup_decl (var, ctx);
13877 new_var = DECL_VALUE_EXPR (new_var);
13878 gcc_assert (TREE_CODE (new_var) == MEM_REF);
13879 new_var = TREE_OPERAND (new_var, 0);
13880 gcc_assert (DECL_P (new_var));
13881 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13882 gimple_seq_add_stmt (&assign_body,
13883 gimple_build_assign (new_var, x));
13884 }
13885 else
13886 {
13887 tree type = TREE_TYPE (var);
13888 new_var = lookup_decl (var, ctx);
13889 if (omp_privatize_by_reference (var))
13890 {
13891 type = TREE_TYPE (type);
13892 if (POINTER_TYPE_P (type)
13893 && TREE_CODE (type) != ARRAY_TYPE
13894 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13895 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13896 || (omp_privatize_by_reference (var)
13897 && omp_is_allocatable_or_ptr (var))))
13898 {
13899 tree v = create_tmp_var_raw (type, get_name (var));
13900 gimple_add_tmp_var (v);
13901 TREE_ADDRESSABLE (v) = 1;
13902 x = fold_convert (type, x);
13903 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
13904 fb_rvalue);
13905 gimple_seq_add_stmt (&assign_body,
13906 gimple_build_assign (v, x));
13907 x = build_fold_addr_expr (v);
13908 do_optional_check = true;
13909 }
13910 }
13911 new_var = DECL_VALUE_EXPR (new_var);
13912 x = fold_convert (TREE_TYPE (new_var), x);
13913 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13914 gimple_seq_add_stmt (&assign_body,
13915 gimple_build_assign (new_var, x));
13916 }
13917 tree present;
13918 present = ((do_optional_check
13919 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13920 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
13921 : NULL_TREE);
13922 if (present)
13923 {
13924 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13925 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13926 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13927 glabel *null_glabel = gimple_build_label (null_label);
13928 glabel *notnull_glabel = gimple_build_label (notnull_label);
13929 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
13930 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13931 fb_rvalue);
13932 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
13933 fb_rvalue);
13934 gcond *cond = gimple_build_cond_from_tree (present,
13935 notnull_label,
13936 null_label);
13937 gimple_seq_add_stmt (&new_body, cond);
13938 gimple_seq_add_stmt (&new_body, null_glabel);
13939 gimplify_assign (new_var, null_pointer_node, &new_body);
13940 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
13941 gimple_seq_add_stmt (&new_body, notnull_glabel);
13942 gimple_seq_add_seq (&new_body, assign_body);
13943 gimple_seq_add_stmt (&new_body,
13944 gimple_build_label (opt_arg_label));
13945 }
13946 else
13947 gimple_seq_add_seq (&new_body, assign_body);
13948 break;
13949 }
13950 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13951 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13952 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
13953 or references to VLAs. */
13954 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
13955 switch (OMP_CLAUSE_CODE (c))
13956 {
13957 tree var;
13958 default:
13959 break;
13960 case OMP_CLAUSE_MAP:
13961 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13962 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13963 {
13964 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13965 poly_int64 offset = 0;
13966 gcc_assert (prev);
13967 var = OMP_CLAUSE_DECL (c);
13968 if (DECL_P (var)
13969 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
13970 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
13971 ctx))
13972 && varpool_node::get_create (var)->offloadable)
13973 break;
13974 if (TREE_CODE (var) == INDIRECT_REF
13975 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
13976 var = TREE_OPERAND (var, 0);
13977 if (TREE_CODE (var) == COMPONENT_REF)
13978 {
13979 var = get_addr_base_and_unit_offset (var, &offset);
13980 gcc_assert (var != NULL_TREE && DECL_P (var));
13981 }
13982 else if (DECL_SIZE (var)
13983 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
13984 {
13985 tree var2 = DECL_VALUE_EXPR (var);
13986 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
13987 var2 = TREE_OPERAND (var2, 0);
13988 gcc_assert (DECL_P (var2));
13989 var = var2;
13990 }
13991 tree new_var = lookup_decl (var, ctx), x;
13992 tree type = TREE_TYPE (new_var);
13993 bool is_ref;
13994 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
13995 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
13996 == COMPONENT_REF))
13997 {
13998 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
13999 is_ref = true;
14000 new_var = build2 (MEM_REF, type,
14001 build_fold_addr_expr (new_var),
14002 build_int_cst (build_pointer_type (type),
14003 offset));
14004 }
14005 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
14006 {
14007 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
14008 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
14009 new_var = build2 (MEM_REF, type,
14010 build_fold_addr_expr (new_var),
14011 build_int_cst (build_pointer_type (type),
14012 offset));
14013 }
14014 else
14015 is_ref = omp_privatize_by_reference (var);
14016 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
14017 is_ref = false;
14018 bool ref_to_array = false;
14019 bool ref_to_ptr = false;
14020 if (is_ref)
14021 {
14022 type = TREE_TYPE (type);
14023 if (TREE_CODE (type) == ARRAY_TYPE)
14024 {
14025 type = build_pointer_type (type);
14026 ref_to_array = true;
14027 }
14028 }
14029 else if (TREE_CODE (type) == ARRAY_TYPE)
14030 {
14031 tree decl2 = DECL_VALUE_EXPR (new_var);
14032 gcc_assert (TREE_CODE (decl2) == MEM_REF);
14033 decl2 = TREE_OPERAND (decl2, 0);
14034 gcc_assert (DECL_P (decl2));
14035 new_var = decl2;
14036 type = TREE_TYPE (new_var);
14037 }
14038 else if (TREE_CODE (type) == REFERENCE_TYPE
14039 && TREE_CODE (TREE_TYPE (type)) == POINTER_TYPE)
14040 {
14041 type = TREE_TYPE (type);
14042 ref_to_ptr = true;
14043 }
14044 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
14045 x = fold_convert_loc (clause_loc, type, x);
14046 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
14047 {
14048 tree bias = OMP_CLAUSE_SIZE (c);
14049 if (DECL_P (bias))
14050 bias = lookup_decl (bias, ctx);
14051 bias = fold_convert_loc (clause_loc, sizetype, bias);
14052 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
14053 bias);
14054 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
14055 TREE_TYPE (x), x, bias);
14056 }
14057 if (ref_to_array)
14058 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
14059 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
14060 if ((is_ref && !ref_to_array)
14061 || ref_to_ptr)
14062 {
14063 tree t = create_tmp_var_raw (type, get_name (var));
14064 gimple_add_tmp_var (t);
14065 TREE_ADDRESSABLE (t) = 1;
14066 gimple_seq_add_stmt (&new_body,
14067 gimple_build_assign (t, x));
14068 x = build_fold_addr_expr_loc (clause_loc, t);
14069 }
14070 gimple_seq_add_stmt (&new_body,
14071 gimple_build_assign (new_var, x));
14072 prev = NULL_TREE;
14073 }
14074 else if (OMP_CLAUSE_CHAIN (c)
14075 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
14076 == OMP_CLAUSE_MAP
14077 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
14078 == GOMP_MAP_FIRSTPRIVATE_POINTER
14079 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
14080 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
14081 prev = c;
14082 break;
14083 case OMP_CLAUSE_PRIVATE:
14084 var = OMP_CLAUSE_DECL (c);
14085 if (is_variable_sized (var))
14086 {
14087 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
14088 tree new_var = lookup_decl (var, ctx);
14089 tree pvar = DECL_VALUE_EXPR (var);
14090 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
14091 pvar = TREE_OPERAND (pvar, 0);
14092 gcc_assert (DECL_P (pvar));
14093 tree new_pvar = lookup_decl (pvar, ctx);
14094 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
14095 tree al = size_int (DECL_ALIGN (var));
14096 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
14097 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
14098 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
14099 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
14100 gimple_seq_add_stmt (&new_body,
14101 gimple_build_assign (new_pvar, x));
14102 }
14103 else if (omp_privatize_by_reference (var)
14104 && !is_gimple_omp_oacc (ctx->stmt))
14105 {
14106 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
14107 tree new_var = lookup_decl (var, ctx);
14108 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
14109 if (TREE_CONSTANT (x))
14110 break;
14111 else
14112 {
14113 tree atmp
14114 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
14115 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
14116 tree al = size_int (TYPE_ALIGN (rtype));
14117 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
14118 }
14119
14120 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
14121 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
14122 gimple_seq_add_stmt (&new_body,
14123 gimple_build_assign (new_var, x));
14124 }
14125 break;
14126 }
14127
14128 gimple_seq fork_seq = NULL;
14129 gimple_seq join_seq = NULL;
14130
14131 if (offloaded && is_gimple_omp_oacc (ctx->stmt))
14132 {
14133 /* If there are reductions on the offloaded region itself, treat
14134 them as a dummy GANG loop. */
14135 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
14136
14137 gcall *private_marker = lower_oacc_private_marker (ctx);
14138
14139 if (private_marker)
14140 gimple_call_set_arg (private_marker, 2, level);
14141
14142 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
14143 false, NULL, private_marker, NULL, &fork_seq,
14144 &join_seq, ctx);
14145 }
14146
14147 gimple_seq_add_seq (&new_body, fork_seq);
14148 gimple_seq_add_seq (&new_body, tgt_body);
14149 gimple_seq_add_seq (&new_body, join_seq);
14150
14151 if (offloaded)
14152 {
14153 new_body = maybe_catch_exception (new_body);
14154 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
14155 }
14156 gimple_omp_set_body (stmt, new_body);
14157 }
14158
14159 bind = gimple_build_bind (NULL, NULL,
14160 tgt_bind ? gimple_bind_block (tgt_bind)
14161 : NULL_TREE);
14162 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
14163 gimple_bind_add_seq (bind, ilist);
14164 gimple_bind_add_stmt (bind, stmt);
14165 gimple_bind_add_seq (bind, olist);
14166
14167 pop_gimplify_context (NULL);
14168
14169 if (dep_bind)
14170 {
14171 gimple_bind_add_seq (dep_bind, dep_ilist);
14172 gimple_bind_add_stmt (dep_bind, bind);
14173 gimple_bind_add_seq (dep_bind, dep_olist);
14174 pop_gimplify_context (dep_bind);
14175 }
14176 }
14177
14178 /* Expand code for an OpenMP teams directive. */
14179
14180 static void
14181 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
14182 {
14183 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
14184 push_gimplify_context ();
14185
14186 tree block = make_node (BLOCK);
14187 gbind *bind = gimple_build_bind (NULL, NULL, block);
14188 gsi_replace (gsi_p, bind, true);
14189 gimple_seq bind_body = NULL;
14190 gimple_seq dlist = NULL;
14191 gimple_seq olist = NULL;
14192
14193 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
14194 OMP_CLAUSE_NUM_TEAMS);
14195 tree num_teams_lower = NULL_TREE;
14196 if (num_teams == NULL_TREE)
14197 num_teams = build_int_cst (unsigned_type_node, 0);
14198 else
14199 {
14200 num_teams_lower = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (num_teams);
14201 if (num_teams_lower)
14202 {
14203 num_teams_lower = fold_convert (unsigned_type_node, num_teams_lower);
14204 gimplify_expr (&num_teams_lower, &bind_body, NULL, is_gimple_val,
14205 fb_rvalue);
14206 }
14207 num_teams = OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (num_teams);
14208 num_teams = fold_convert (unsigned_type_node, num_teams);
14209 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
14210 }
14211 if (num_teams_lower == NULL_TREE)
14212 num_teams_lower = num_teams;
14213 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
14214 OMP_CLAUSE_THREAD_LIMIT);
14215 if (thread_limit == NULL_TREE)
14216 thread_limit = build_int_cst (unsigned_type_node, 0);
14217 else
14218 {
14219 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
14220 thread_limit = fold_convert (unsigned_type_node, thread_limit);
14221 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
14222 fb_rvalue);
14223 }
14224 location_t loc = gimple_location (teams_stmt);
14225 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS4);
14226 tree rettype = TREE_TYPE (TREE_TYPE (decl));
14227 tree first = create_tmp_var (rettype);
14228 gimple_seq_add_stmt (&bind_body,
14229 gimple_build_assign (first, build_one_cst (rettype)));
14230 tree llabel = create_artificial_label (loc);
14231 gimple_seq_add_stmt (&bind_body, gimple_build_label (llabel));
14232 gimple *call
14233 = gimple_build_call (decl, 4, num_teams_lower, num_teams, thread_limit,
14234 first);
14235 gimple_set_location (call, loc);
14236 tree temp = create_tmp_var (rettype);
14237 gimple_call_set_lhs (call, temp);
14238 gimple_seq_add_stmt (&bind_body, call);
14239
14240 tree tlabel = create_artificial_label (loc);
14241 tree flabel = create_artificial_label (loc);
14242 gimple *cond = gimple_build_cond (NE_EXPR, temp, build_zero_cst (rettype),
14243 tlabel, flabel);
14244 gimple_seq_add_stmt (&bind_body, cond);
14245 gimple_seq_add_stmt (&bind_body, gimple_build_label (tlabel));
14246 gimple_seq_add_stmt (&bind_body,
14247 gimple_build_assign (first, build_zero_cst (rettype)));
14248
14249 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
14250 &bind_body, &dlist, ctx, NULL);
14251 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
14252 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
14253 NULL, ctx);
14254 gimple_seq_add_stmt (&bind_body, teams_stmt);
14255
14256 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
14257 gimple_omp_set_body (teams_stmt, NULL);
14258 gimple_seq_add_seq (&bind_body, olist);
14259 gimple_seq_add_seq (&bind_body, dlist);
14260 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
14261 gimple_seq_add_stmt (&bind_body, gimple_build_goto (llabel));
14262 gimple_seq_add_stmt (&bind_body, gimple_build_label (flabel));
14263 gimple_bind_set_body (bind, bind_body);
14264
14265 pop_gimplify_context (bind);
14266
14267 gimple_bind_append_vars (bind, ctx->block_vars);
14268 BLOCK_VARS (block) = ctx->block_vars;
14269 if (BLOCK_VARS (block))
14270 TREE_USED (block) = 1;
14271 }
14272
14273 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
14274 regimplified. If DATA is non-NULL, lower_omp_1 is outside
14275 of OMP context, but with make_addressable_vars set. */
14276
14277 static tree
14278 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
14279 void *data)
14280 {
14281 tree t = *tp;
14282
14283 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
14284 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
14285 && data == NULL
14286 && DECL_HAS_VALUE_EXPR_P (t))
14287 return t;
14288
14289 if (make_addressable_vars
14290 && DECL_P (t)
14291 && bitmap_bit_p (make_addressable_vars, DECL_UID (t)))
14292 return t;
14293
14294 /* If a global variable has been privatized, TREE_CONSTANT on
14295 ADDR_EXPR might be wrong. */
14296 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
14297 recompute_tree_invariant_for_addr_expr (t);
14298
14299 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
14300 return NULL_TREE;
14301 }
14302
14303 /* Data to be communicated between lower_omp_regimplify_operands and
14304 lower_omp_regimplify_operands_p. */
14305
14306 struct lower_omp_regimplify_operands_data
14307 {
14308 omp_context *ctx;
14309 vec<tree> *decls;
14310 };
14311
14312 /* Helper function for lower_omp_regimplify_operands. Find
14313 omp_member_access_dummy_var vars and adjust temporarily their
14314 DECL_VALUE_EXPRs if needed. */
14315
14316 static tree
14317 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
14318 void *data)
14319 {
14320 tree t = omp_member_access_dummy_var (*tp);
14321 if (t)
14322 {
14323 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
14324 lower_omp_regimplify_operands_data *ldata
14325 = (lower_omp_regimplify_operands_data *) wi->info;
14326 tree o = maybe_lookup_decl (t, ldata->ctx);
14327 if (o != t)
14328 {
14329 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
14330 ldata->decls->safe_push (*tp);
14331 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
14332 SET_DECL_VALUE_EXPR (*tp, v);
14333 }
14334 }
14335 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
14336 return NULL_TREE;
14337 }
14338
14339 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
14340 of omp_member_access_dummy_var vars during regimplification. */
14341
14342 static void
14343 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
14344 gimple_stmt_iterator *gsi_p)
14345 {
14346 auto_vec<tree, 10> decls;
14347 if (ctx)
14348 {
14349 struct walk_stmt_info wi;
14350 memset (&wi, '\0', sizeof (wi));
14351 struct lower_omp_regimplify_operands_data data;
14352 data.ctx = ctx;
14353 data.decls = &decls;
14354 wi.info = &data;
14355 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
14356 }
14357 gimple_regimplify_operands (stmt, gsi_p);
14358 while (!decls.is_empty ())
14359 {
14360 tree t = decls.pop ();
14361 tree v = decls.pop ();
14362 SET_DECL_VALUE_EXPR (t, v);
14363 }
14364 }
14365
14366 static void
14367 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
14368 {
14369 gimple *stmt = gsi_stmt (*gsi_p);
14370 struct walk_stmt_info wi;
14371 gcall *call_stmt;
14372
14373 if (gimple_has_location (stmt))
14374 input_location = gimple_location (stmt);
14375
14376 if (make_addressable_vars)
14377 memset (&wi, '\0', sizeof (wi));
14378
14379 /* If we have issued syntax errors, avoid doing any heavy lifting.
14380 Just replace the OMP directives with a NOP to avoid
14381 confusing RTL expansion. */
14382 if (seen_error () && is_gimple_omp (stmt))
14383 {
14384 gsi_replace (gsi_p, gimple_build_nop (), true);
14385 return;
14386 }
14387
14388 switch (gimple_code (stmt))
14389 {
14390 case GIMPLE_COND:
14391 {
14392 gcond *cond_stmt = as_a <gcond *> (stmt);
14393 if ((ctx || make_addressable_vars)
14394 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
14395 lower_omp_regimplify_p,
14396 ctx ? NULL : &wi, NULL)
14397 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
14398 lower_omp_regimplify_p,
14399 ctx ? NULL : &wi, NULL)))
14400 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
14401 }
14402 break;
14403 case GIMPLE_CATCH:
14404 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
14405 break;
14406 case GIMPLE_EH_FILTER:
14407 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
14408 break;
14409 case GIMPLE_TRY:
14410 lower_omp (gimple_try_eval_ptr (stmt), ctx);
14411 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
14412 break;
14413 case GIMPLE_TRANSACTION:
14414 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
14415 ctx);
14416 break;
14417 case GIMPLE_BIND:
14418 if (ctx && is_gimple_omp_oacc (ctx->stmt))
14419 {
14420 tree vars = gimple_bind_vars (as_a <gbind *> (stmt));
14421 oacc_privatization_scan_decl_chain (ctx, vars);
14422 }
14423 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
14424 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
14425 break;
14426 case GIMPLE_OMP_PARALLEL:
14427 case GIMPLE_OMP_TASK:
14428 ctx = maybe_lookup_ctx (stmt);
14429 gcc_assert (ctx);
14430 if (ctx->cancellable)
14431 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14432 lower_omp_taskreg (gsi_p, ctx);
14433 break;
14434 case GIMPLE_OMP_FOR:
14435 ctx = maybe_lookup_ctx (stmt);
14436 gcc_assert (ctx);
14437 if (ctx->cancellable)
14438 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14439 lower_omp_for (gsi_p, ctx);
14440 break;
14441 case GIMPLE_OMP_SECTIONS:
14442 ctx = maybe_lookup_ctx (stmt);
14443 gcc_assert (ctx);
14444 if (ctx->cancellable)
14445 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14446 lower_omp_sections (gsi_p, ctx);
14447 break;
14448 case GIMPLE_OMP_SCOPE:
14449 ctx = maybe_lookup_ctx (stmt);
14450 gcc_assert (ctx);
14451 lower_omp_scope (gsi_p, ctx);
14452 break;
14453 case GIMPLE_OMP_SINGLE:
14454 ctx = maybe_lookup_ctx (stmt);
14455 gcc_assert (ctx);
14456 lower_omp_single (gsi_p, ctx);
14457 break;
14458 case GIMPLE_OMP_MASTER:
14459 case GIMPLE_OMP_MASKED:
14460 ctx = maybe_lookup_ctx (stmt);
14461 gcc_assert (ctx);
14462 lower_omp_master (gsi_p, ctx);
14463 break;
14464 case GIMPLE_OMP_TASKGROUP:
14465 ctx = maybe_lookup_ctx (stmt);
14466 gcc_assert (ctx);
14467 lower_omp_taskgroup (gsi_p, ctx);
14468 break;
14469 case GIMPLE_OMP_ORDERED:
14470 ctx = maybe_lookup_ctx (stmt);
14471 gcc_assert (ctx);
14472 lower_omp_ordered (gsi_p, ctx);
14473 break;
14474 case GIMPLE_OMP_SCAN:
14475 ctx = maybe_lookup_ctx (stmt);
14476 gcc_assert (ctx);
14477 lower_omp_scan (gsi_p, ctx);
14478 break;
14479 case GIMPLE_OMP_CRITICAL:
14480 ctx = maybe_lookup_ctx (stmt);
14481 gcc_assert (ctx);
14482 lower_omp_critical (gsi_p, ctx);
14483 break;
14484 case GIMPLE_OMP_ATOMIC_LOAD:
14485 if ((ctx || make_addressable_vars)
14486 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
14487 as_a <gomp_atomic_load *> (stmt)),
14488 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
14489 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14490 break;
14491 case GIMPLE_OMP_TARGET:
14492 ctx = maybe_lookup_ctx (stmt);
14493 gcc_assert (ctx);
14494 lower_omp_target (gsi_p, ctx);
14495 break;
14496 case GIMPLE_OMP_TEAMS:
14497 ctx = maybe_lookup_ctx (stmt);
14498 gcc_assert (ctx);
14499 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
14500 lower_omp_taskreg (gsi_p, ctx);
14501 else
14502 lower_omp_teams (gsi_p, ctx);
14503 break;
14504 case GIMPLE_CALL:
14505 tree fndecl;
14506 call_stmt = as_a <gcall *> (stmt);
14507 fndecl = gimple_call_fndecl (call_stmt);
14508 if (fndecl
14509 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14510 switch (DECL_FUNCTION_CODE (fndecl))
14511 {
14512 case BUILT_IN_GOMP_BARRIER:
14513 if (ctx == NULL)
14514 break;
14515 /* FALLTHRU */
14516 case BUILT_IN_GOMP_CANCEL:
14517 case BUILT_IN_GOMP_CANCELLATION_POINT:
14518 omp_context *cctx;
14519 cctx = ctx;
14520 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
14521 cctx = cctx->outer;
14522 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
14523 if (!cctx->cancellable)
14524 {
14525 if (DECL_FUNCTION_CODE (fndecl)
14526 == BUILT_IN_GOMP_CANCELLATION_POINT)
14527 {
14528 stmt = gimple_build_nop ();
14529 gsi_replace (gsi_p, stmt, false);
14530 }
14531 break;
14532 }
14533 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
14534 {
14535 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
14536 gimple_call_set_fndecl (call_stmt, fndecl);
14537 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
14538 }
14539 tree lhs;
14540 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
14541 gimple_call_set_lhs (call_stmt, lhs);
14542 tree fallthru_label;
14543 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
14544 gimple *g;
14545 g = gimple_build_label (fallthru_label);
14546 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14547 g = gimple_build_cond (NE_EXPR, lhs,
14548 fold_convert (TREE_TYPE (lhs),
14549 boolean_false_node),
14550 cctx->cancel_label, fallthru_label);
14551 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14552 break;
14553 default:
14554 break;
14555 }
14556 goto regimplify;
14557
14558 case GIMPLE_ASSIGN:
14559 for (omp_context *up = ctx; up; up = up->outer)
14560 {
14561 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
14562 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
14563 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
14564 || gimple_code (up->stmt) == GIMPLE_OMP_SCOPE
14565 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
14566 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
14567 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
14568 && (gimple_omp_target_kind (up->stmt)
14569 == GF_OMP_TARGET_KIND_DATA)))
14570 continue;
14571 else if (!up->lastprivate_conditional_map)
14572 break;
14573 tree lhs = get_base_address (gimple_assign_lhs (stmt));
14574 if (TREE_CODE (lhs) == MEM_REF
14575 && DECL_P (TREE_OPERAND (lhs, 0))
14576 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
14577 0))) == REFERENCE_TYPE)
14578 lhs = TREE_OPERAND (lhs, 0);
14579 if (DECL_P (lhs))
14580 if (tree *v = up->lastprivate_conditional_map->get (lhs))
14581 {
14582 tree clauses;
14583 if (up->combined_into_simd_safelen1)
14584 {
14585 up = up->outer;
14586 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
14587 up = up->outer;
14588 }
14589 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
14590 clauses = gimple_omp_for_clauses (up->stmt);
14591 else
14592 clauses = gimple_omp_sections_clauses (up->stmt);
14593 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
14594 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
14595 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14596 OMP_CLAUSE__CONDTEMP_);
14597 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
14598 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
14599 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14600 }
14601 }
14602 /* FALLTHRU */
14603
14604 default:
14605 regimplify:
14606 if ((ctx || make_addressable_vars)
14607 && walk_gimple_op (stmt, lower_omp_regimplify_p,
14608 ctx ? NULL : &wi))
14609 {
14610 /* Just remove clobbers, this should happen only if we have
14611 "privatized" local addressable variables in SIMD regions,
14612 the clobber isn't needed in that case and gimplifying address
14613 of the ARRAY_REF into a pointer and creating MEM_REF based
14614 clobber would create worse code than we get with the clobber
14615 dropped. */
14616 if (gimple_clobber_p (stmt))
14617 {
14618 gsi_replace (gsi_p, gimple_build_nop (), true);
14619 break;
14620 }
14621 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14622 }
14623 break;
14624 }
14625 }
14626
14627 static void
14628 lower_omp (gimple_seq *body, omp_context *ctx)
14629 {
14630 location_t saved_location = input_location;
14631 gimple_stmt_iterator gsi;
14632 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14633 lower_omp_1 (&gsi, ctx);
14634 /* During gimplification, we haven't folded statments inside offloading
14635 or taskreg regions (gimplify.cc:maybe_fold_stmt); do that now. */
14636 if (target_nesting_level || taskreg_nesting_level)
14637 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14638 fold_stmt (&gsi);
14639 input_location = saved_location;
14640 }
14641
14642 /* Main entry point. */
14643
14644 static unsigned int
14645 execute_lower_omp (void)
14646 {
14647 gimple_seq body;
14648 int i;
14649 omp_context *ctx;
14650
14651 /* This pass always runs, to provide PROP_gimple_lomp.
14652 But often, there is nothing to do. */
14653 if (flag_openacc == 0 && flag_openmp == 0
14654 && flag_openmp_simd == 0)
14655 return 0;
14656
14657 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
14658 delete_omp_context);
14659
14660 body = gimple_body (current_function_decl);
14661
14662 scan_omp (&body, NULL);
14663 gcc_assert (taskreg_nesting_level == 0);
14664 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
14665 finish_taskreg_scan (ctx);
14666 taskreg_contexts.release ();
14667
14668 if (all_contexts->root)
14669 {
14670 if (make_addressable_vars)
14671 push_gimplify_context ();
14672 lower_omp (&body, NULL);
14673 if (make_addressable_vars)
14674 pop_gimplify_context (NULL);
14675 }
14676
14677 if (all_contexts)
14678 {
14679 splay_tree_delete (all_contexts);
14680 all_contexts = NULL;
14681 }
14682 BITMAP_FREE (make_addressable_vars);
14683 BITMAP_FREE (global_nonaddressable_vars);
14684
14685 /* If current function is a method, remove artificial dummy VAR_DECL created
14686 for non-static data member privatization, they aren't needed for
14687 debuginfo nor anything else, have been already replaced everywhere in the
14688 IL and cause problems with LTO. */
14689 if (DECL_ARGUMENTS (current_function_decl)
14690 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
14691 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
14692 == POINTER_TYPE))
14693 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
14694
14695 for (auto task_stmt : task_cpyfns)
14696 finalize_task_copyfn (task_stmt);
14697 task_cpyfns.release ();
14698 return 0;
14699 }
14700
14701 namespace {
14702
14703 const pass_data pass_data_lower_omp =
14704 {
14705 GIMPLE_PASS, /* type */
14706 "omplower", /* name */
14707 OPTGROUP_OMP, /* optinfo_flags */
14708 TV_NONE, /* tv_id */
14709 PROP_gimple_any, /* properties_required */
14710 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
14711 0, /* properties_destroyed */
14712 0, /* todo_flags_start */
14713 0, /* todo_flags_finish */
14714 };
14715
14716 class pass_lower_omp : public gimple_opt_pass
14717 {
14718 public:
14719 pass_lower_omp (gcc::context *ctxt)
14720 : gimple_opt_pass (pass_data_lower_omp, ctxt)
14721 {}
14722
14723 /* opt_pass methods: */
14724 unsigned int execute (function *) final override
14725 {
14726 return execute_lower_omp ();
14727 }
14728
14729 }; // class pass_lower_omp
14730
14731 } // anon namespace
14732
14733 gimple_opt_pass *
14734 make_pass_lower_omp (gcc::context *ctxt)
14735 {
14736 return new pass_lower_omp (ctxt);
14737 }
14738 \f
14739 /* The following is a utility to diagnose structured block violations.
14740 It is not part of the "omplower" pass, as that's invoked too late. It
14741 should be invoked by the respective front ends after gimplification. */
14742
14743 static splay_tree all_labels;
14744
14745 /* Check for mismatched contexts and generate an error if needed. Return
14746 true if an error is detected. */
14747
14748 static bool
14749 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
14750 gimple *branch_ctx, gimple *label_ctx)
14751 {
14752 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
14753 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
14754
14755 if (label_ctx == branch_ctx)
14756 return false;
14757
14758 const char* kind = NULL;
14759
14760 if (flag_openacc)
14761 {
14762 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
14763 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
14764 {
14765 gcc_checking_assert (kind == NULL);
14766 kind = "OpenACC";
14767 }
14768 }
14769 if (kind == NULL)
14770 {
14771 gcc_checking_assert (flag_openmp || flag_openmp_simd);
14772 kind = "OpenMP";
14773 }
14774
14775 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14776 so we could traverse it and issue a correct "exit" or "enter" error
14777 message upon a structured block violation.
14778
14779 We built the context by building a list with tree_cons'ing, but there is
14780 no easy counterpart in gimple tuples. It seems like far too much work
14781 for issuing exit/enter error messages. If someone really misses the
14782 distinct error message... patches welcome. */
14783
14784 #if 0
14785 /* Try to avoid confusing the user by producing and error message
14786 with correct "exit" or "enter" verbiage. We prefer "exit"
14787 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14788 if (branch_ctx == NULL)
14789 exit_p = false;
14790 else
14791 {
14792 while (label_ctx)
14793 {
14794 if (TREE_VALUE (label_ctx) == branch_ctx)
14795 {
14796 exit_p = false;
14797 break;
14798 }
14799 label_ctx = TREE_CHAIN (label_ctx);
14800 }
14801 }
14802
14803 if (exit_p)
14804 error ("invalid exit from %s structured block", kind);
14805 else
14806 error ("invalid entry to %s structured block", kind);
14807 #endif
14808
14809 /* If it's obvious we have an invalid entry, be specific about the error. */
14810 if (branch_ctx == NULL)
14811 error ("invalid entry to %s structured block", kind);
14812 else
14813 {
14814 /* Otherwise, be vague and lazy, but efficient. */
14815 error ("invalid branch to/from %s structured block", kind);
14816 }
14817
14818 gsi_replace (gsi_p, gimple_build_nop (), false);
14819 return true;
14820 }
14821
14822 /* Pass 1: Create a minimal tree of structured blocks, and record
14823 where each label is found. */
14824
14825 static tree
14826 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14827 struct walk_stmt_info *wi)
14828 {
14829 gimple *context = (gimple *) wi->info;
14830 gimple *inner_context;
14831 gimple *stmt = gsi_stmt (*gsi_p);
14832
14833 *handled_ops_p = true;
14834
14835 switch (gimple_code (stmt))
14836 {
14837 WALK_SUBSTMTS;
14838
14839 case GIMPLE_OMP_PARALLEL:
14840 case GIMPLE_OMP_TASK:
14841 case GIMPLE_OMP_SCOPE:
14842 case GIMPLE_OMP_SECTIONS:
14843 case GIMPLE_OMP_SINGLE:
14844 case GIMPLE_OMP_SECTION:
14845 case GIMPLE_OMP_MASTER:
14846 case GIMPLE_OMP_MASKED:
14847 case GIMPLE_OMP_ORDERED:
14848 case GIMPLE_OMP_SCAN:
14849 case GIMPLE_OMP_CRITICAL:
14850 case GIMPLE_OMP_TARGET:
14851 case GIMPLE_OMP_TEAMS:
14852 case GIMPLE_OMP_TASKGROUP:
14853 /* The minimal context here is just the current OMP construct. */
14854 inner_context = stmt;
14855 wi->info = inner_context;
14856 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14857 wi->info = context;
14858 break;
14859
14860 case GIMPLE_OMP_FOR:
14861 inner_context = stmt;
14862 wi->info = inner_context;
14863 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14864 walk them. */
14865 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
14866 diagnose_sb_1, NULL, wi);
14867 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14868 wi->info = context;
14869 break;
14870
14871 case GIMPLE_LABEL:
14872 splay_tree_insert (all_labels,
14873 (splay_tree_key) gimple_label_label (
14874 as_a <glabel *> (stmt)),
14875 (splay_tree_value) context);
14876 break;
14877
14878 default:
14879 break;
14880 }
14881
14882 return NULL_TREE;
14883 }
14884
14885 /* Pass 2: Check each branch and see if its context differs from that of
14886 the destination label's context. */
14887
14888 static tree
14889 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14890 struct walk_stmt_info *wi)
14891 {
14892 gimple *context = (gimple *) wi->info;
14893 splay_tree_node n;
14894 gimple *stmt = gsi_stmt (*gsi_p);
14895
14896 *handled_ops_p = true;
14897
14898 switch (gimple_code (stmt))
14899 {
14900 WALK_SUBSTMTS;
14901
14902 case GIMPLE_OMP_PARALLEL:
14903 case GIMPLE_OMP_TASK:
14904 case GIMPLE_OMP_SCOPE:
14905 case GIMPLE_OMP_SECTIONS:
14906 case GIMPLE_OMP_SINGLE:
14907 case GIMPLE_OMP_SECTION:
14908 case GIMPLE_OMP_MASTER:
14909 case GIMPLE_OMP_MASKED:
14910 case GIMPLE_OMP_ORDERED:
14911 case GIMPLE_OMP_SCAN:
14912 case GIMPLE_OMP_CRITICAL:
14913 case GIMPLE_OMP_TARGET:
14914 case GIMPLE_OMP_TEAMS:
14915 case GIMPLE_OMP_TASKGROUP:
14916 wi->info = stmt;
14917 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14918 wi->info = context;
14919 break;
14920
14921 case GIMPLE_OMP_FOR:
14922 wi->info = stmt;
14923 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14924 walk them. */
14925 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
14926 diagnose_sb_2, NULL, wi);
14927 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14928 wi->info = context;
14929 break;
14930
14931 case GIMPLE_COND:
14932 {
14933 gcond *cond_stmt = as_a <gcond *> (stmt);
14934 tree lab = gimple_cond_true_label (cond_stmt);
14935 if (lab)
14936 {
14937 n = splay_tree_lookup (all_labels,
14938 (splay_tree_key) lab);
14939 diagnose_sb_0 (gsi_p, context,
14940 n ? (gimple *) n->value : NULL);
14941 }
14942 lab = gimple_cond_false_label (cond_stmt);
14943 if (lab)
14944 {
14945 n = splay_tree_lookup (all_labels,
14946 (splay_tree_key) lab);
14947 diagnose_sb_0 (gsi_p, context,
14948 n ? (gimple *) n->value : NULL);
14949 }
14950 }
14951 break;
14952
14953 case GIMPLE_GOTO:
14954 {
14955 tree lab = gimple_goto_dest (stmt);
14956 if (TREE_CODE (lab) != LABEL_DECL)
14957 break;
14958
14959 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14960 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
14961 }
14962 break;
14963
14964 case GIMPLE_SWITCH:
14965 {
14966 gswitch *switch_stmt = as_a <gswitch *> (stmt);
14967 unsigned int i;
14968 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
14969 {
14970 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
14971 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14972 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
14973 break;
14974 }
14975 }
14976 break;
14977
14978 case GIMPLE_RETURN:
14979 diagnose_sb_0 (gsi_p, context, NULL);
14980 break;
14981
14982 default:
14983 break;
14984 }
14985
14986 return NULL_TREE;
14987 }
14988
14989 static unsigned int
14990 diagnose_omp_structured_block_errors (void)
14991 {
14992 struct walk_stmt_info wi;
14993 gimple_seq body = gimple_body (current_function_decl);
14994
14995 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
14996
14997 memset (&wi, 0, sizeof (wi));
14998 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
14999
15000 memset (&wi, 0, sizeof (wi));
15001 wi.want_locations = true;
15002 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
15003
15004 gimple_set_body (current_function_decl, body);
15005
15006 splay_tree_delete (all_labels);
15007 all_labels = NULL;
15008
15009 return 0;
15010 }
15011
15012 namespace {
15013
15014 const pass_data pass_data_diagnose_omp_blocks =
15015 {
15016 GIMPLE_PASS, /* type */
15017 "*diagnose_omp_blocks", /* name */
15018 OPTGROUP_OMP, /* optinfo_flags */
15019 TV_NONE, /* tv_id */
15020 PROP_gimple_any, /* properties_required */
15021 0, /* properties_provided */
15022 0, /* properties_destroyed */
15023 0, /* todo_flags_start */
15024 0, /* todo_flags_finish */
15025 };
15026
15027 class pass_diagnose_omp_blocks : public gimple_opt_pass
15028 {
15029 public:
15030 pass_diagnose_omp_blocks (gcc::context *ctxt)
15031 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
15032 {}
15033
15034 /* opt_pass methods: */
15035 bool gate (function *) final override
15036 {
15037 return flag_openacc || flag_openmp || flag_openmp_simd;
15038 }
15039 unsigned int execute (function *) final override
15040 {
15041 return diagnose_omp_structured_block_errors ();
15042 }
15043
15044 }; // class pass_diagnose_omp_blocks
15045
15046 } // anon namespace
15047
15048 gimple_opt_pass *
15049 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
15050 {
15051 return new pass_diagnose_omp_blocks (ctxt);
15052 }
15053 \f
15054
15055 #include "gt-omp-low.h"
This page took 0.737985 seconds and 5 git commands to generate.