1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 Copyright (C) 2005-2022 Free Software Foundation, Inc.
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "tree-pass.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
62 #include "omp-offload.h"
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.cc (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context
*outer
;
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map
;
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec
<tree
> task_reductions
;
122 /* A hash map from the reduction clauses to the registered array
124 hash_map
<tree
, unsigned> *task_reduction_map
;
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map
<tree
, tree
> *lastprivate_conditional_map
;
130 /* And a hash map from the allocate variables to their corresponding
132 hash_map
<tree
, tree
> *allocate_map
;
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses
;
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses
;
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
151 /* True if this parallel directive is nested within another. */
154 /* True if this construct can be cancelled. */
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
159 bool combined_into_simd_safelen1
;
161 /* True if there is nested scan context with inclusive clause. */
164 /* True if there is nested scan context with exclusive clause. */
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase
;
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent
;
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p
;
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec
<tree
> oacc_privatization_candidates
;
188 static splay_tree all_contexts
;
189 static int taskreg_nesting_level
;
190 static int target_nesting_level
;
191 static bitmap make_addressable_vars
;
192 static bitmap global_nonaddressable_vars
;
193 static vec
<omp_context
*> taskreg_contexts
;
194 static vec
<gomp_task
*> task_cpyfns
;
196 static void scan_omp (gimple_seq
*, omp_context
*);
197 static tree
scan_omp_1_op (tree
*, int *, void *);
198 static bool omp_maybe_offloaded_ctx (omp_context
*ctx
);
200 #define WALK_SUBSTMTS \
204 case GIMPLE_EH_FILTER: \
205 case GIMPLE_TRANSACTION: \
206 /* The sub-statements for these should be walked. */ \
207 *handled_ops_p = false; \
210 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
211 (This doesn't include OpenACC 'kernels' decomposed parts.) */
214 is_oacc_parallel_or_serial (omp_context
*ctx
)
216 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
217 return ((outer_type
== GIMPLE_OMP_TARGET
)
218 && ((gimple_omp_target_kind (ctx
->stmt
)
219 == GF_OMP_TARGET_KIND_OACC_PARALLEL
)
220 || (gimple_omp_target_kind (ctx
->stmt
)
221 == GF_OMP_TARGET_KIND_OACC_SERIAL
)));
224 /* Return whether CTX represents an OpenACC 'kernels' construct.
225 (This doesn't include OpenACC 'kernels' decomposed parts.) */
228 is_oacc_kernels (omp_context
*ctx
)
230 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
231 return ((outer_type
== GIMPLE_OMP_TARGET
)
232 && (gimple_omp_target_kind (ctx
->stmt
)
233 == GF_OMP_TARGET_KIND_OACC_KERNELS
));
236 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
239 is_oacc_kernels_decomposed_part (omp_context
*ctx
)
241 enum gimple_code outer_type
= gimple_code (ctx
->stmt
);
242 return ((outer_type
== GIMPLE_OMP_TARGET
)
243 && ((gimple_omp_target_kind (ctx
->stmt
)
244 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
)
245 || (gimple_omp_target_kind (ctx
->stmt
)
246 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
)
247 || (gimple_omp_target_kind (ctx
->stmt
)
248 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
)));
251 /* Return true if STMT corresponds to an OpenMP target region. */
253 is_omp_target (gimple
*stmt
)
255 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
)
257 int kind
= gimple_omp_target_kind (stmt
);
258 return (kind
== GF_OMP_TARGET_KIND_REGION
259 || kind
== GF_OMP_TARGET_KIND_DATA
260 || kind
== GF_OMP_TARGET_KIND_ENTER_DATA
261 || kind
== GF_OMP_TARGET_KIND_EXIT_DATA
);
266 /* If DECL is the artificial dummy VAR_DECL created for non-static
267 data member privatization, return the underlying "this" parameter,
268 otherwise return NULL. */
271 omp_member_access_dummy_var (tree decl
)
274 || !DECL_ARTIFICIAL (decl
)
275 || !DECL_IGNORED_P (decl
)
276 || !DECL_HAS_VALUE_EXPR_P (decl
)
277 || !lang_hooks
.decls
.omp_disregard_value_expr (decl
, false))
280 tree v
= DECL_VALUE_EXPR (decl
);
281 if (TREE_CODE (v
) != COMPONENT_REF
)
285 switch (TREE_CODE (v
))
291 case POINTER_PLUS_EXPR
:
292 v
= TREE_OPERAND (v
, 0);
295 if (DECL_CONTEXT (v
) == current_function_decl
296 && DECL_ARTIFICIAL (v
)
297 && TREE_CODE (TREE_TYPE (v
)) == POINTER_TYPE
)
305 /* Helper for unshare_and_remap, called through walk_tree. */
308 unshare_and_remap_1 (tree
*tp
, int *walk_subtrees
, void *data
)
310 tree
*pair
= (tree
*) data
;
313 *tp
= unshare_expr (pair
[1]);
316 else if (IS_TYPE_OR_DECL_P (*tp
))
321 /* Return unshare_expr (X) with all occurrences of FROM
325 unshare_and_remap (tree x
, tree from
, tree to
)
327 tree pair
[2] = { from
, to
};
328 x
= unshare_expr (x
);
329 walk_tree (&x
, unshare_and_remap_1
, pair
, NULL
);
333 /* Convenience function for calling scan_omp_1_op on tree operands. */
336 scan_omp_op (tree
*tp
, omp_context
*ctx
)
338 struct walk_stmt_info wi
;
340 memset (&wi
, 0, sizeof (wi
));
342 wi
.want_locations
= true;
344 return walk_tree (tp
, scan_omp_1_op
, &wi
, NULL
);
347 static void lower_omp (gimple_seq
*, omp_context
*);
348 static tree
lookup_decl_in_outer_ctx (tree
, omp_context
*);
349 static tree
maybe_lookup_decl_in_outer_ctx (tree
, omp_context
*);
351 /* Return true if CTX is for an omp parallel. */
354 is_parallel_ctx (omp_context
*ctx
)
356 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
;
360 /* Return true if CTX is for an omp task. */
363 is_task_ctx (omp_context
*ctx
)
365 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TASK
;
369 /* Return true if CTX is for an omp taskloop. */
372 is_taskloop_ctx (omp_context
*ctx
)
374 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
375 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
;
379 /* Return true if CTX is for a host omp teams. */
382 is_host_teams_ctx (omp_context
*ctx
)
384 return gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
385 && gimple_omp_teams_host (as_a
<gomp_teams
*> (ctx
->stmt
));
388 /* Return true if CTX is for an omp parallel or omp task or host omp teams
389 (the last one is strictly not a task region in OpenMP speak, but we
390 need to treat it similarly). */
393 is_taskreg_ctx (omp_context
*ctx
)
395 return is_parallel_ctx (ctx
) || is_task_ctx (ctx
) || is_host_teams_ctx (ctx
);
398 /* Return true if EXPR is variable sized. */
401 is_variable_sized (const_tree expr
)
403 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
406 /* Lookup variables. The "maybe" form
407 allows for the variable form to not have been entered, otherwise we
408 assert that the variable must have been entered. */
411 lookup_decl (tree var
, omp_context
*ctx
)
413 tree
*n
= ctx
->cb
.decl_map
->get (var
);
418 maybe_lookup_decl (const_tree var
, omp_context
*ctx
)
420 tree
*n
= ctx
->cb
.decl_map
->get (const_cast<tree
> (var
));
421 return n
? *n
: NULL_TREE
;
425 lookup_field (tree var
, omp_context
*ctx
)
428 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) var
);
429 return (tree
) n
->value
;
433 lookup_sfield (splay_tree_key key
, omp_context
*ctx
)
436 n
= splay_tree_lookup (ctx
->sfield_map
437 ? ctx
->sfield_map
: ctx
->field_map
, key
);
438 return (tree
) n
->value
;
442 lookup_sfield (tree var
, omp_context
*ctx
)
444 return lookup_sfield ((splay_tree_key
) var
, ctx
);
448 maybe_lookup_field (splay_tree_key key
, omp_context
*ctx
)
451 n
= splay_tree_lookup (ctx
->field_map
, key
);
452 return n
? (tree
) n
->value
: NULL_TREE
;
456 maybe_lookup_field (tree var
, omp_context
*ctx
)
458 return maybe_lookup_field ((splay_tree_key
) var
, ctx
);
461 /* Return true if DECL should be copied by pointer. SHARED_CTX is
462 the parallel context if DECL is to be shared. */
465 use_pointer_for_field (tree decl
, omp_context
*shared_ctx
)
467 if (AGGREGATE_TYPE_P (TREE_TYPE (decl
))
468 || TYPE_ATOMIC (TREE_TYPE (decl
)))
471 /* We can only use copy-in/copy-out semantics for shared variables
472 when we know the value is not accessible from an outer scope. */
475 gcc_assert (!is_gimple_omp_oacc (shared_ctx
->stmt
));
477 /* ??? Trivially accessible from anywhere. But why would we even
478 be passing an address in this case? Should we simply assert
479 this to be false, or should we have a cleanup pass that removes
480 these from the list of mappings? */
481 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
)))
484 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
485 without analyzing the expression whether or not its location
486 is accessible to anyone else. In the case of nested parallel
487 regions it certainly may be. */
488 if (TREE_CODE (decl
) != RESULT_DECL
&& DECL_HAS_VALUE_EXPR_P (decl
))
491 /* Do not use copy-in/copy-out for variables that have their
493 if (is_global_var (decl
))
495 /* For file scope vars, track whether we've seen them as
496 non-addressable initially and in that case, keep the same
497 answer for the duration of the pass, even when they are made
498 addressable later on e.g. through reduction expansion. Global
499 variables which weren't addressable before the pass will not
500 have their privatized copies address taken. See PR91216. */
501 if (!TREE_ADDRESSABLE (decl
))
503 if (!global_nonaddressable_vars
)
504 global_nonaddressable_vars
= BITMAP_ALLOC (NULL
);
505 bitmap_set_bit (global_nonaddressable_vars
, DECL_UID (decl
));
507 else if (!global_nonaddressable_vars
508 || !bitmap_bit_p (global_nonaddressable_vars
,
512 else if (TREE_ADDRESSABLE (decl
))
515 /* lower_send_shared_vars only uses copy-in, but not copy-out
517 if (TREE_READONLY (decl
)
518 || ((TREE_CODE (decl
) == RESULT_DECL
519 || TREE_CODE (decl
) == PARM_DECL
)
520 && DECL_BY_REFERENCE (decl
)))
523 /* Disallow copy-in/out in nested parallel if
524 decl is shared in outer parallel, otherwise
525 each thread could store the shared variable
526 in its own copy-in location, making the
527 variable no longer really shared. */
528 if (shared_ctx
->is_nested
)
532 for (up
= shared_ctx
->outer
; up
; up
= up
->outer
)
533 if ((is_taskreg_ctx (up
)
534 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
535 && is_gimple_omp_offloaded (up
->stmt
)))
536 && maybe_lookup_decl (decl
, up
))
543 if (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
)
545 for (c
= gimple_omp_target_clauses (up
->stmt
);
546 c
; c
= OMP_CLAUSE_CHAIN (c
))
547 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
548 && OMP_CLAUSE_DECL (c
) == decl
)
552 for (c
= gimple_omp_taskreg_clauses (up
->stmt
);
553 c
; c
= OMP_CLAUSE_CHAIN (c
))
554 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
555 && OMP_CLAUSE_DECL (c
) == decl
)
559 goto maybe_mark_addressable_and_ret
;
563 /* For tasks avoid using copy-in/out. As tasks can be
564 deferred or executed in different thread, when GOMP_task
565 returns, the task hasn't necessarily terminated. */
566 if (is_task_ctx (shared_ctx
))
569 maybe_mark_addressable_and_ret
:
570 outer
= maybe_lookup_decl_in_outer_ctx (decl
, shared_ctx
);
571 if (is_gimple_reg (outer
) && !omp_member_access_dummy_var (outer
))
573 /* Taking address of OUTER in lower_send_shared_vars
574 might need regimplification of everything that uses the
576 if (!make_addressable_vars
)
577 make_addressable_vars
= BITMAP_ALLOC (NULL
);
578 bitmap_set_bit (make_addressable_vars
, DECL_UID (outer
));
579 TREE_ADDRESSABLE (outer
) = 1;
588 /* Construct a new automatic decl similar to VAR. */
591 omp_copy_decl_2 (tree var
, tree name
, tree type
, omp_context
*ctx
)
593 tree copy
= copy_var_decl (var
, name
, type
);
595 DECL_CONTEXT (copy
) = current_function_decl
;
599 DECL_CHAIN (copy
) = ctx
->block_vars
;
600 ctx
->block_vars
= copy
;
605 /* If VAR is listed in make_addressable_vars, it wasn't
606 originally addressable, but was only later made so.
607 We don't need to take address of privatizations
609 if (TREE_ADDRESSABLE (var
)
610 && ((make_addressable_vars
611 && bitmap_bit_p (make_addressable_vars
, DECL_UID (var
)))
612 || (global_nonaddressable_vars
613 && bitmap_bit_p (global_nonaddressable_vars
, DECL_UID (var
)))))
614 TREE_ADDRESSABLE (copy
) = 0;
620 omp_copy_decl_1 (tree var
, omp_context
*ctx
)
622 return omp_copy_decl_2 (var
, DECL_NAME (var
), TREE_TYPE (var
), ctx
);
625 /* Build tree nodes to access the field for VAR on the receiver side. */
628 build_receiver_ref (tree var
, bool by_ref
, omp_context
*ctx
)
630 tree x
, field
= lookup_field (var
, ctx
);
632 /* If the receiver record type was remapped in the child function,
633 remap the field into the new record type. */
634 x
= maybe_lookup_field (field
, ctx
);
638 x
= build_simple_mem_ref (ctx
->receiver_decl
);
639 TREE_THIS_NOTRAP (x
) = 1;
640 x
= omp_build_component_ref (x
, field
);
643 x
= build_simple_mem_ref (x
);
644 TREE_THIS_NOTRAP (x
) = 1;
650 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
651 of a parallel, this is a component reference; for workshare constructs
652 this is some variable. */
655 build_outer_var_ref (tree var
, omp_context
*ctx
,
656 enum omp_clause_code code
= OMP_CLAUSE_ERROR
)
659 omp_context
*outer
= ctx
->outer
;
660 for (; outer
; outer
= outer
->outer
)
662 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TASKGROUP
)
664 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_SCOPE
665 && !maybe_lookup_decl (var
, outer
))
670 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
672 else if (is_variable_sized (var
))
674 x
= TREE_OPERAND (DECL_VALUE_EXPR (var
), 0);
675 x
= build_outer_var_ref (x
, ctx
, code
);
676 x
= build_simple_mem_ref (x
);
678 else if (is_taskreg_ctx (ctx
))
680 bool by_ref
= use_pointer_for_field (var
, NULL
);
681 x
= build_receiver_ref (var
, by_ref
, ctx
);
683 else if ((gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
684 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
686 || code
== OMP_CLAUSE_ALLOCATE
687 || (code
== OMP_CLAUSE_PRIVATE
688 && (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
689 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
690 || gimple_code (ctx
->stmt
) == GIMPLE_OMP_SINGLE
)))
692 /* #pragma omp simd isn't a worksharing construct, and can reference
693 even private vars in its linear etc. clauses.
694 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
695 to private vars in all worksharing constructs. */
697 if (outer
&& is_taskreg_ctx (outer
))
698 x
= lookup_decl (var
, outer
);
700 x
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
704 else if (code
== OMP_CLAUSE_LASTPRIVATE
&& is_taskloop_ctx (ctx
))
708 = splay_tree_lookup (outer
->field_map
,
709 (splay_tree_key
) &DECL_UID (var
));
712 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, outer
)))
715 x
= lookup_decl (var
, outer
);
719 tree field
= (tree
) n
->value
;
720 /* If the receiver record type was remapped in the child function,
721 remap the field into the new record type. */
722 x
= maybe_lookup_field (field
, outer
);
726 x
= build_simple_mem_ref (outer
->receiver_decl
);
727 x
= omp_build_component_ref (x
, field
);
728 if (use_pointer_for_field (var
, outer
))
729 x
= build_simple_mem_ref (x
);
733 x
= lookup_decl (var
, outer
);
734 else if (omp_privatize_by_reference (var
))
735 /* This can happen with orphaned constructs. If var is reference, it is
736 possible it is shared and as such valid. */
738 else if (omp_member_access_dummy_var (var
))
745 tree t
= omp_member_access_dummy_var (var
);
748 x
= DECL_VALUE_EXPR (var
);
749 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
751 x
= unshare_and_remap (x
, t
, o
);
753 x
= unshare_expr (x
);
757 if (omp_privatize_by_reference (var
))
758 x
= build_simple_mem_ref (x
);
763 /* Build tree nodes to access the field for VAR on the sender side. */
766 build_sender_ref (splay_tree_key key
, omp_context
*ctx
)
768 tree field
= lookup_sfield (key
, ctx
);
769 return omp_build_component_ref (ctx
->sender_decl
, field
);
773 build_sender_ref (tree var
, omp_context
*ctx
)
775 return build_sender_ref ((splay_tree_key
) var
, ctx
);
778 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
779 BASE_POINTERS_RESTRICT, declare the field with restrict. */
782 install_var_field (tree var
, bool by_ref
, int mask
, omp_context
*ctx
)
784 tree field
, type
, sfield
= NULL_TREE
;
785 splay_tree_key key
= (splay_tree_key
) var
;
787 if ((mask
& 16) != 0)
789 key
= (splay_tree_key
) &DECL_NAME (var
);
790 gcc_checking_assert (key
!= (splay_tree_key
) var
);
794 key
= (splay_tree_key
) &DECL_UID (var
);
795 gcc_checking_assert (key
!= (splay_tree_key
) var
);
797 gcc_assert ((mask
& 1) == 0
798 || !splay_tree_lookup (ctx
->field_map
, key
));
799 gcc_assert ((mask
& 2) == 0 || !ctx
->sfield_map
800 || !splay_tree_lookup (ctx
->sfield_map
, key
));
801 gcc_assert ((mask
& 3) == 3
802 || !is_gimple_omp_oacc (ctx
->stmt
));
804 type
= TREE_TYPE (var
);
805 if ((mask
& 16) != 0)
806 type
= lang_hooks
.decls
.omp_array_data (var
, true);
808 /* Prevent redeclaring the var in the split-off function with a restrict
809 pointer type. Note that we only clear type itself, restrict qualifiers in
810 the pointed-to type will be ignored by points-to analysis. */
811 if (POINTER_TYPE_P (type
)
812 && TYPE_RESTRICT (type
))
813 type
= build_qualified_type (type
, TYPE_QUALS (type
) & ~TYPE_QUAL_RESTRICT
);
817 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
818 type
= build_pointer_type (build_pointer_type (type
));
821 type
= build_pointer_type (type
);
822 else if ((mask
& (32 | 3)) == 1
823 && omp_privatize_by_reference (var
))
824 type
= TREE_TYPE (type
);
826 field
= build_decl (DECL_SOURCE_LOCATION (var
),
827 FIELD_DECL
, DECL_NAME (var
), type
);
829 /* Remember what variable this field was created for. This does have a
830 side effect of making dwarf2out ignore this member, so for helpful
831 debugging we clear it later in delete_omp_context. */
832 DECL_ABSTRACT_ORIGIN (field
) = var
;
833 if ((mask
& 16) == 0 && type
== TREE_TYPE (var
))
835 SET_DECL_ALIGN (field
, DECL_ALIGN (var
));
836 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (var
);
837 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (var
);
840 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
844 insert_field_into_struct (ctx
->record_type
, field
);
845 if (ctx
->srecord_type
)
847 sfield
= build_decl (DECL_SOURCE_LOCATION (var
),
848 FIELD_DECL
, DECL_NAME (var
), type
);
849 DECL_ABSTRACT_ORIGIN (sfield
) = var
;
850 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
851 DECL_USER_ALIGN (sfield
) = DECL_USER_ALIGN (field
);
852 TREE_THIS_VOLATILE (sfield
) = TREE_THIS_VOLATILE (field
);
853 insert_field_into_struct (ctx
->srecord_type
, sfield
);
858 if (ctx
->srecord_type
== NULL_TREE
)
862 ctx
->srecord_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
863 ctx
->sfield_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
864 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= TREE_CHAIN (t
))
866 sfield
= build_decl (DECL_SOURCE_LOCATION (t
),
867 FIELD_DECL
, DECL_NAME (t
), TREE_TYPE (t
));
868 DECL_ABSTRACT_ORIGIN (sfield
) = DECL_ABSTRACT_ORIGIN (t
);
869 insert_field_into_struct (ctx
->srecord_type
, sfield
);
870 splay_tree_insert (ctx
->sfield_map
,
871 (splay_tree_key
) DECL_ABSTRACT_ORIGIN (t
),
872 (splay_tree_value
) sfield
);
876 insert_field_into_struct ((mask
& 1) ? ctx
->record_type
877 : ctx
->srecord_type
, field
);
881 splay_tree_insert (ctx
->field_map
, key
, (splay_tree_value
) field
);
882 if ((mask
& 2) && ctx
->sfield_map
)
883 splay_tree_insert (ctx
->sfield_map
, key
, (splay_tree_value
) sfield
);
887 install_var_local (tree var
, omp_context
*ctx
)
889 tree new_var
= omp_copy_decl_1 (var
, ctx
);
890 insert_decl_map (&ctx
->cb
, var
, new_var
);
894 /* Adjust the replacement for DECL in CTX for the new context. This means
895 copying the DECL_VALUE_EXPR, and fixing up the type. */
898 fixup_remapped_decl (tree decl
, omp_context
*ctx
, bool private_debug
)
902 new_decl
= lookup_decl (decl
, ctx
);
904 TREE_TYPE (new_decl
) = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
906 if ((!TREE_CONSTANT (DECL_SIZE (new_decl
)) || private_debug
)
907 && DECL_HAS_VALUE_EXPR_P (decl
))
909 tree ve
= DECL_VALUE_EXPR (decl
);
910 walk_tree (&ve
, copy_tree_body_r
, &ctx
->cb
, NULL
);
911 SET_DECL_VALUE_EXPR (new_decl
, ve
);
912 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
915 if (!TREE_CONSTANT (DECL_SIZE (new_decl
)))
917 size
= remap_decl (DECL_SIZE (decl
), &ctx
->cb
);
918 if (size
== error_mark_node
)
919 size
= TYPE_SIZE (TREE_TYPE (new_decl
));
920 DECL_SIZE (new_decl
) = size
;
922 size
= remap_decl (DECL_SIZE_UNIT (decl
), &ctx
->cb
);
923 if (size
== error_mark_node
)
924 size
= TYPE_SIZE_UNIT (TREE_TYPE (new_decl
));
925 DECL_SIZE_UNIT (new_decl
) = size
;
929 /* The callback for remap_decl. Search all containing contexts for a
930 mapping of the variable; this avoids having to duplicate the splay
931 tree ahead of time. We know a mapping doesn't already exist in the
932 given context. Create new mappings to implement default semantics. */
935 omp_copy_decl (tree var
, copy_body_data
*cb
)
937 omp_context
*ctx
= (omp_context
*) cb
;
940 if (TREE_CODE (var
) == LABEL_DECL
)
942 if (FORCED_LABEL (var
) || DECL_NONLOCAL (var
))
944 new_var
= create_artificial_label (DECL_SOURCE_LOCATION (var
));
945 DECL_CONTEXT (new_var
) = current_function_decl
;
946 insert_decl_map (&ctx
->cb
, var
, new_var
);
950 while (!is_taskreg_ctx (ctx
))
955 new_var
= maybe_lookup_decl (var
, ctx
);
960 if (is_global_var (var
) || decl_function_context (var
) != ctx
->cb
.src_fn
)
963 return error_mark_node
;
966 /* Create a new context, with OUTER_CTX being the surrounding context. */
969 new_omp_context (gimple
*stmt
, omp_context
*outer_ctx
)
971 omp_context
*ctx
= XCNEW (omp_context
);
973 splay_tree_insert (all_contexts
, (splay_tree_key
) stmt
,
974 (splay_tree_value
) ctx
);
979 ctx
->outer
= outer_ctx
;
980 ctx
->cb
= outer_ctx
->cb
;
981 ctx
->cb
.block
= NULL
;
982 ctx
->depth
= outer_ctx
->depth
+ 1;
986 ctx
->cb
.src_fn
= current_function_decl
;
987 ctx
->cb
.dst_fn
= current_function_decl
;
988 ctx
->cb
.src_node
= cgraph_node::get (current_function_decl
);
989 gcc_checking_assert (ctx
->cb
.src_node
);
990 ctx
->cb
.dst_node
= ctx
->cb
.src_node
;
991 ctx
->cb
.src_cfun
= cfun
;
992 ctx
->cb
.copy_decl
= omp_copy_decl
;
993 ctx
->cb
.eh_lp_nr
= 0;
994 ctx
->cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
995 ctx
->cb
.adjust_array_error_bounds
= true;
996 ctx
->cb
.dont_remap_vla_if_no_change
= true;
1000 ctx
->cb
.decl_map
= new hash_map
<tree
, tree
>;
1005 static gimple_seq
maybe_catch_exception (gimple_seq
);
1007 /* Finalize task copyfn. */
1010 finalize_task_copyfn (gomp_task
*task_stmt
)
1012 struct function
*child_cfun
;
1014 gimple_seq seq
= NULL
, new_seq
;
1017 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
1018 if (child_fn
== NULL_TREE
)
1021 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
1022 DECL_STRUCT_FUNCTION (child_fn
)->curr_properties
= cfun
->curr_properties
;
1024 push_cfun (child_cfun
);
1025 bind
= gimplify_body (child_fn
, false);
1026 gimple_seq_add_stmt (&seq
, bind
);
1027 new_seq
= maybe_catch_exception (seq
);
1030 bind
= gimple_build_bind (NULL
, new_seq
, NULL
);
1032 gimple_seq_add_stmt (&seq
, bind
);
1034 gimple_set_body (child_fn
, seq
);
1037 /* Inform the callgraph about the new function. */
1038 cgraph_node
*node
= cgraph_node::get_create (child_fn
);
1039 node
->parallelized_function
= 1;
1040 cgraph_node::add_new_function (child_fn
, false);
1043 /* Destroy a omp_context data structures. Called through the splay tree
1044 value delete callback. */
1047 delete_omp_context (splay_tree_value value
)
1049 omp_context
*ctx
= (omp_context
*) value
;
1051 delete ctx
->cb
.decl_map
;
1054 splay_tree_delete (ctx
->field_map
);
1055 if (ctx
->sfield_map
)
1056 splay_tree_delete (ctx
->sfield_map
);
1058 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1059 it produces corrupt debug information. */
1060 if (ctx
->record_type
)
1063 for (t
= TYPE_FIELDS (ctx
->record_type
); t
; t
= DECL_CHAIN (t
))
1064 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1066 if (ctx
->srecord_type
)
1069 for (t
= TYPE_FIELDS (ctx
->srecord_type
); t
; t
= DECL_CHAIN (t
))
1070 DECL_ABSTRACT_ORIGIN (t
) = NULL
;
1073 if (ctx
->task_reduction_map
)
1075 ctx
->task_reductions
.release ();
1076 delete ctx
->task_reduction_map
;
1079 delete ctx
->lastprivate_conditional_map
;
1080 delete ctx
->allocate_map
;
1085 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1089 fixup_child_record_type (omp_context
*ctx
)
1091 tree f
, type
= ctx
->record_type
;
1093 if (!ctx
->receiver_decl
)
1095 /* ??? It isn't sufficient to just call remap_type here, because
1096 variably_modified_type_p doesn't work the way we expect for
1097 record types. Testing each field for whether it needs remapping
1098 and creating a new record by hand works, however. */
1099 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
1100 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
1104 tree name
, new_fields
= NULL
;
1106 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
1107 name
= DECL_NAME (TYPE_NAME (ctx
->record_type
));
1108 name
= build_decl (DECL_SOURCE_LOCATION (ctx
->receiver_decl
),
1109 TYPE_DECL
, name
, type
);
1110 TYPE_NAME (type
) = name
;
1112 for (f
= TYPE_FIELDS (ctx
->record_type
); f
; f
= DECL_CHAIN (f
))
1114 tree new_f
= copy_node (f
);
1115 DECL_CONTEXT (new_f
) = type
;
1116 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &ctx
->cb
);
1117 DECL_CHAIN (new_f
) = new_fields
;
1118 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &ctx
->cb
, NULL
);
1119 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
,
1121 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
1125 /* Arrange to be able to look up the receiver field
1126 given the sender field. */
1127 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) f
,
1128 (splay_tree_value
) new_f
);
1130 TYPE_FIELDS (type
) = nreverse (new_fields
);
1134 /* In a target region we never modify any of the pointers in *.omp_data_i,
1135 so attempt to help the optimizers. */
1136 if (is_gimple_omp_offloaded (ctx
->stmt
))
1137 type
= build_qualified_type (type
, TYPE_QUAL_CONST
);
1139 TREE_TYPE (ctx
->receiver_decl
)
1140 = build_qualified_type (build_reference_type (type
), TYPE_QUAL_RESTRICT
);
1143 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1144 specified by CLAUSES. */
1147 scan_sharing_clauses (tree clauses
, omp_context
*ctx
)
1150 bool scan_array_reductions
= false;
1152 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1153 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_ALLOCATE
1154 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
) == NULL_TREE
1155 /* omp_default_mem_alloc is 1 */
1156 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
1157 || OMP_CLAUSE_ALLOCATE_ALIGN (c
) != NULL_TREE
))
1159 /* The allocate clauses that appear on a target construct or on
1160 constructs in a target region must specify an allocator expression
1161 unless a requires directive with the dynamic_allocators clause
1162 is present in the same compilation unit. */
1163 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
) == NULL_TREE
1164 && ((omp_requires_mask
& OMP_REQUIRES_DYNAMIC_ALLOCATORS
) == 0)
1165 && omp_maybe_offloaded_ctx (ctx
))
1166 error_at (OMP_CLAUSE_LOCATION (c
), "%<allocate%> clause must"
1167 " specify an allocator here");
1168 if (ctx
->allocate_map
== NULL
)
1169 ctx
->allocate_map
= new hash_map
<tree
, tree
>;
1170 tree val
= integer_zero_node
;
1171 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
1172 val
= OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
1173 if (OMP_CLAUSE_ALLOCATE_ALIGN (c
))
1174 val
= build_tree_list (val
, OMP_CLAUSE_ALLOCATE_ALIGN (c
));
1175 ctx
->allocate_map
->put (OMP_CLAUSE_DECL (c
), val
);
1178 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1182 switch (OMP_CLAUSE_CODE (c
))
1184 case OMP_CLAUSE_PRIVATE
:
1185 decl
= OMP_CLAUSE_DECL (c
);
1186 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
1188 else if (!is_variable_sized (decl
))
1189 install_var_local (decl
, ctx
);
1192 case OMP_CLAUSE_SHARED
:
1193 decl
= OMP_CLAUSE_DECL (c
);
1194 if (ctx
->allocate_map
&& ctx
->allocate_map
->get (decl
))
1195 ctx
->allocate_map
->remove (decl
);
1196 /* Ignore shared directives in teams construct inside of
1197 target construct. */
1198 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1199 && !is_host_teams_ctx (ctx
))
1201 /* Global variables don't need to be copied,
1202 the receiver side will use them directly. */
1203 tree odecl
= maybe_lookup_decl_in_outer_ctx (decl
, ctx
);
1204 if (is_global_var (odecl
))
1206 insert_decl_map (&ctx
->cb
, decl
, odecl
);
1209 gcc_assert (is_taskreg_ctx (ctx
));
1210 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl
))
1211 || !is_variable_sized (decl
));
1212 /* Global variables don't need to be copied,
1213 the receiver side will use them directly. */
1214 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1216 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1218 use_pointer_for_field (decl
, ctx
);
1221 by_ref
= use_pointer_for_field (decl
, NULL
);
1222 if ((! TREE_READONLY (decl
) && !OMP_CLAUSE_SHARED_READONLY (c
))
1223 || TREE_ADDRESSABLE (decl
)
1225 || omp_privatize_by_reference (decl
))
1227 by_ref
= use_pointer_for_field (decl
, ctx
);
1228 install_var_field (decl
, by_ref
, 3, ctx
);
1229 install_var_local (decl
, ctx
);
1232 /* We don't need to copy const scalar vars back. */
1233 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_FIRSTPRIVATE
);
1236 case OMP_CLAUSE_REDUCTION
:
1237 /* Collect 'reduction' clauses on OpenACC compute construct. */
1238 if (is_gimple_omp_oacc (ctx
->stmt
)
1239 && is_gimple_omp_offloaded (ctx
->stmt
))
1241 /* No 'reduction' clauses on OpenACC 'kernels'. */
1242 gcc_checking_assert (!is_oacc_kernels (ctx
));
1243 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1244 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
1246 ctx
->local_reduction_clauses
1247 = tree_cons (NULL
, c
, ctx
->local_reduction_clauses
);
1251 case OMP_CLAUSE_IN_REDUCTION
:
1252 decl
= OMP_CLAUSE_DECL (c
);
1253 if (ctx
->allocate_map
1254 && ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1255 && (OMP_CLAUSE_REDUCTION_INSCAN (c
)
1256 || OMP_CLAUSE_REDUCTION_TASK (c
)))
1257 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1258 || is_task_ctx (ctx
)))
1261 if (ctx
->allocate_map
->get (decl
))
1262 ctx
->allocate_map
->remove (decl
);
1264 if (TREE_CODE (decl
) == MEM_REF
)
1266 tree t
= TREE_OPERAND (decl
, 0);
1267 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
1268 t
= TREE_OPERAND (t
, 0);
1269 if (TREE_CODE (t
) == INDIRECT_REF
1270 || TREE_CODE (t
) == ADDR_EXPR
)
1271 t
= TREE_OPERAND (t
, 0);
1272 if (is_omp_target (ctx
->stmt
))
1274 if (is_variable_sized (t
))
1276 gcc_assert (DECL_HAS_VALUE_EXPR_P (t
));
1277 t
= DECL_VALUE_EXPR (t
);
1278 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
1279 t
= TREE_OPERAND (t
, 0);
1280 gcc_assert (DECL_P (t
));
1284 scan_omp_op (&at
, ctx
->outer
);
1285 tree nt
= omp_copy_decl_1 (at
, ctx
->outer
);
1286 splay_tree_insert (ctx
->field_map
,
1287 (splay_tree_key
) &DECL_CONTEXT (t
),
1288 (splay_tree_value
) nt
);
1290 splay_tree_insert (ctx
->field_map
,
1291 (splay_tree_key
) &DECL_CONTEXT (at
),
1292 (splay_tree_value
) nt
);
1295 install_var_local (t
, ctx
);
1296 if (is_taskreg_ctx (ctx
)
1297 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t
, ctx
))
1298 || (is_task_ctx (ctx
)
1299 && (TREE_CODE (TREE_TYPE (t
)) == POINTER_TYPE
1300 || (TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1301 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t
)))
1302 == POINTER_TYPE
)))))
1303 && !is_variable_sized (t
)
1304 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
1305 || (!OMP_CLAUSE_REDUCTION_TASK (c
)
1306 && !is_task_ctx (ctx
))))
1308 by_ref
= use_pointer_for_field (t
, NULL
);
1309 if (is_task_ctx (ctx
)
1310 && TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
1311 && TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == POINTER_TYPE
)
1313 install_var_field (t
, false, 1, ctx
);
1314 install_var_field (t
, by_ref
, 2, ctx
);
1317 install_var_field (t
, by_ref
, 3, ctx
);
1321 if (is_omp_target (ctx
->stmt
))
1325 scan_omp_op (&at
, ctx
->outer
);
1326 tree nt
= omp_copy_decl_1 (at
, ctx
->outer
);
1327 splay_tree_insert (ctx
->field_map
,
1328 (splay_tree_key
) &DECL_CONTEXT (decl
),
1329 (splay_tree_value
) nt
);
1331 splay_tree_insert (ctx
->field_map
,
1332 (splay_tree_key
) &DECL_CONTEXT (at
),
1333 (splay_tree_value
) nt
);
1336 if (is_task_ctx (ctx
)
1337 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1338 && OMP_CLAUSE_REDUCTION_TASK (c
)
1339 && is_parallel_ctx (ctx
)))
1341 /* Global variables don't need to be copied,
1342 the receiver side will use them directly. */
1343 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1345 by_ref
= use_pointer_for_field (decl
, ctx
);
1346 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
1347 install_var_field (decl
, by_ref
, 3, ctx
);
1349 install_var_local (decl
, ctx
);
1352 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1353 && OMP_CLAUSE_REDUCTION_TASK (c
))
1355 install_var_local (decl
, ctx
);
1360 case OMP_CLAUSE_LASTPRIVATE
:
1361 /* Let the corresponding firstprivate clause create
1363 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1367 case OMP_CLAUSE_FIRSTPRIVATE
:
1368 case OMP_CLAUSE_LINEAR
:
1369 decl
= OMP_CLAUSE_DECL (c
);
1371 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1372 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
1373 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
1374 && is_gimple_omp_offloaded (ctx
->stmt
))
1376 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1377 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
1378 && lang_hooks
.decls
.omp_array_data (decl
, true)))
1380 by_ref
= !omp_privatize_by_reference (decl
);
1381 install_var_field (decl
, by_ref
, 3, ctx
);
1383 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
1385 if (TREE_CODE (decl
) == INDIRECT_REF
)
1386 decl
= TREE_OPERAND (decl
, 0);
1387 install_var_field (decl
, true, 3, ctx
);
1389 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1390 install_var_field (decl
, true, 3, ctx
);
1392 install_var_field (decl
, false, 3, ctx
);
1394 if (is_variable_sized (decl
))
1396 if (is_task_ctx (ctx
))
1398 if (ctx
->allocate_map
1399 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
1402 if (ctx
->allocate_map
->get (decl
))
1403 ctx
->allocate_map
->remove (decl
);
1405 install_var_field (decl
, false, 1, ctx
);
1409 else if (is_taskreg_ctx (ctx
))
1412 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
));
1413 by_ref
= use_pointer_for_field (decl
, NULL
);
1415 if (is_task_ctx (ctx
)
1416 && (global
|| by_ref
|| omp_privatize_by_reference (decl
)))
1418 if (ctx
->allocate_map
1419 && ctx
->allocate_map
->get (decl
))
1420 install_var_field (decl
, by_ref
, 32 | 1, ctx
);
1422 install_var_field (decl
, false, 1, ctx
);
1424 install_var_field (decl
, by_ref
, 2, ctx
);
1427 install_var_field (decl
, by_ref
, 3, ctx
);
1429 install_var_local (decl
, ctx
);
1430 /* For descr arrays on target: firstprivatize data + attach ptr. */
1431 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1432 && is_gimple_omp_offloaded (ctx
->stmt
)
1433 && !is_gimple_omp_oacc (ctx
->stmt
)
1434 && lang_hooks
.decls
.omp_array_data (decl
, true))
1436 install_var_field (decl
, false, 16 | 3, ctx
);
1437 install_var_field (decl
, true, 8 | 3, ctx
);
1441 case OMP_CLAUSE_USE_DEVICE_PTR
:
1442 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1443 decl
= OMP_CLAUSE_DECL (c
);
1445 /* Fortran array descriptors. */
1446 if (lang_hooks
.decls
.omp_array_data (decl
, true))
1447 install_var_field (decl
, false, 19, ctx
);
1448 else if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
1449 && !omp_privatize_by_reference (decl
)
1450 && !omp_is_allocatable_or_ptr (decl
))
1451 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1452 install_var_field (decl
, true, 11, ctx
);
1454 install_var_field (decl
, false, 11, ctx
);
1455 if (DECL_SIZE (decl
)
1456 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1458 tree decl2
= DECL_VALUE_EXPR (decl
);
1459 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1460 decl2
= TREE_OPERAND (decl2
, 0);
1461 gcc_assert (DECL_P (decl2
));
1462 install_var_local (decl2
, ctx
);
1464 install_var_local (decl
, ctx
);
1467 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
1468 decl
= OMP_CLAUSE_DECL (c
);
1469 while (TREE_CODE (decl
) == INDIRECT_REF
1470 || TREE_CODE (decl
) == ARRAY_REF
)
1471 decl
= TREE_OPERAND (decl
, 0);
1474 case OMP_CLAUSE_IS_DEVICE_PTR
:
1475 decl
= OMP_CLAUSE_DECL (c
);
1478 case OMP_CLAUSE__LOOPTEMP_
:
1479 case OMP_CLAUSE__REDUCTEMP_
:
1480 gcc_assert (is_taskreg_ctx (ctx
));
1481 decl
= OMP_CLAUSE_DECL (c
);
1482 install_var_field (decl
, false, 3, ctx
);
1483 install_var_local (decl
, ctx
);
1486 case OMP_CLAUSE_COPYPRIVATE
:
1487 case OMP_CLAUSE_COPYIN
:
1488 decl
= OMP_CLAUSE_DECL (c
);
1489 by_ref
= use_pointer_for_field (decl
, NULL
);
1490 install_var_field (decl
, by_ref
, 3, ctx
);
1493 case OMP_CLAUSE_FINAL
:
1495 case OMP_CLAUSE_NUM_THREADS
:
1496 case OMP_CLAUSE_NUM_TEAMS
:
1497 case OMP_CLAUSE_THREAD_LIMIT
:
1498 case OMP_CLAUSE_DEVICE
:
1499 case OMP_CLAUSE_SCHEDULE
:
1500 case OMP_CLAUSE_DIST_SCHEDULE
:
1501 case OMP_CLAUSE_DEPEND
:
1502 case OMP_CLAUSE_PRIORITY
:
1503 case OMP_CLAUSE_GRAINSIZE
:
1504 case OMP_CLAUSE_NUM_TASKS
:
1505 case OMP_CLAUSE_NUM_GANGS
:
1506 case OMP_CLAUSE_NUM_WORKERS
:
1507 case OMP_CLAUSE_VECTOR_LENGTH
:
1508 case OMP_CLAUSE_DETACH
:
1509 case OMP_CLAUSE_FILTER
:
1511 scan_omp_op (&OMP_CLAUSE_OPERAND (c
, 0), ctx
->outer
);
1515 case OMP_CLAUSE_FROM
:
1516 case OMP_CLAUSE_MAP
:
1518 scan_omp_op (&OMP_CLAUSE_SIZE (c
), ctx
->outer
);
1519 decl
= OMP_CLAUSE_DECL (c
);
1520 /* If requested, make 'decl' addressable. */
1521 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1522 && OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c
))
1524 gcc_checking_assert (DECL_P (decl
));
1526 bool decl_addressable
= TREE_ADDRESSABLE (decl
);
1527 if (!decl_addressable
)
1529 if (!make_addressable_vars
)
1530 make_addressable_vars
= BITMAP_ALLOC (NULL
);
1531 bitmap_set_bit (make_addressable_vars
, DECL_UID (decl
));
1532 TREE_ADDRESSABLE (decl
) = 1;
1535 if (dump_enabled_p ())
1537 location_t loc
= OMP_CLAUSE_LOCATION (c
);
1538 const dump_user_location_t d_u_loc
1539 = dump_user_location_t::from_location_t (loc
);
1540 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
1542 # pragma GCC diagnostic push
1543 # pragma GCC diagnostic ignored "-Wformat"
1545 if (!decl_addressable
)
1546 dump_printf_loc (MSG_NOTE
, d_u_loc
,
1548 " made addressable\n",
1551 dump_printf_loc (MSG_NOTE
, d_u_loc
,
1553 " already made addressable\n",
1556 # pragma GCC diagnostic pop
1561 OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c
) = 0;
1563 /* Global variables with "omp declare target" attribute
1564 don't need to be copied, the receiver side will use them
1565 directly. However, global variables with "omp declare target link"
1566 attribute need to be copied. Or when ALWAYS modifier is used. */
1567 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1569 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1570 && (OMP_CLAUSE_MAP_KIND (c
)
1571 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
1572 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH
1573 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_DETACH
)
1574 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1575 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TO
1576 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_FROM
1577 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_TOFROM
1578 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
1579 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1580 && varpool_node::get_create (decl
)->offloadable
1581 && !lookup_attribute ("omp declare target link",
1582 DECL_ATTRIBUTES (decl
)))
1584 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1585 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
)
1587 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1588 not offloaded; there is nothing to map for those. */
1589 if (!is_gimple_omp_offloaded (ctx
->stmt
)
1590 && !POINTER_TYPE_P (TREE_TYPE (decl
))
1591 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
1594 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1596 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
1597 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
1598 && is_omp_target (ctx
->stmt
))
1600 /* If this is an offloaded region, an attach operation should
1601 only exist when the pointer variable is mapped in a prior
1603 If we had an error, we may not have attempted to sort clauses
1604 properly, so avoid the test. */
1605 if (is_gimple_omp_offloaded (ctx
->stmt
)
1608 (maybe_lookup_decl (decl
, ctx
)
1609 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1610 && lookup_attribute ("omp declare target",
1611 DECL_ATTRIBUTES (decl
))));
1613 /* By itself, attach/detach is generated as part of pointer
1614 variable mapping and should not create new variables in the
1615 offloaded region, however sender refs for it must be created
1616 for its address to be passed to the runtime. */
1618 = build_decl (OMP_CLAUSE_LOCATION (c
),
1619 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1620 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1621 insert_field_into_struct (ctx
->record_type
, field
);
1622 /* To not clash with a map of the pointer variable itself,
1623 attach/detach maps have their field looked up by the *clause*
1624 tree expression, not the decl. */
1625 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1626 (splay_tree_key
) c
));
1627 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) c
,
1628 (splay_tree_value
) field
);
1631 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1632 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
1633 || (OMP_CLAUSE_MAP_KIND (c
)
1634 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
1636 if (TREE_CODE (decl
) == COMPONENT_REF
1637 || (TREE_CODE (decl
) == INDIRECT_REF
1638 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
1639 && (((TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1641 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
1642 == POINTER_TYPE
)))))
1644 if (DECL_SIZE (decl
)
1645 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1647 tree decl2
= DECL_VALUE_EXPR (decl
);
1648 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1649 decl2
= TREE_OPERAND (decl2
, 0);
1650 gcc_assert (DECL_P (decl2
));
1651 install_var_local (decl2
, ctx
);
1653 install_var_local (decl
, ctx
);
1658 if (DECL_SIZE (decl
)
1659 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1661 tree decl2
= DECL_VALUE_EXPR (decl
);
1662 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1663 decl2
= TREE_OPERAND (decl2
, 0);
1664 gcc_assert (DECL_P (decl2
));
1665 install_var_field (decl2
, true, 3, ctx
);
1666 install_var_local (decl2
, ctx
);
1667 install_var_local (decl
, ctx
);
1671 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
1672 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1673 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
1674 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1675 install_var_field (decl
, true, 7, ctx
);
1677 install_var_field (decl
, true, 3, ctx
);
1678 if (is_gimple_omp_offloaded (ctx
->stmt
)
1679 && !(is_gimple_omp_oacc (ctx
->stmt
)
1680 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
1681 install_var_local (decl
, ctx
);
1686 tree base
= get_base_address (decl
);
1687 tree nc
= OMP_CLAUSE_CHAIN (c
);
1690 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
1691 && OMP_CLAUSE_DECL (nc
) == base
1692 && OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
1693 && integer_zerop (OMP_CLAUSE_SIZE (nc
)))
1695 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
) = 1;
1696 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc
) = 1;
1702 scan_omp_op (&OMP_CLAUSE_DECL (c
), ctx
->outer
);
1703 decl
= OMP_CLAUSE_DECL (c
);
1705 gcc_assert (!splay_tree_lookup (ctx
->field_map
,
1706 (splay_tree_key
) decl
));
1708 = build_decl (OMP_CLAUSE_LOCATION (c
),
1709 FIELD_DECL
, NULL_TREE
, ptr_type_node
);
1710 SET_DECL_ALIGN (field
, TYPE_ALIGN (ptr_type_node
));
1711 insert_field_into_struct (ctx
->record_type
, field
);
1712 splay_tree_insert (ctx
->field_map
, (splay_tree_key
) decl
,
1713 (splay_tree_value
) field
);
1718 case OMP_CLAUSE_ORDER
:
1719 ctx
->order_concurrent
= true;
1722 case OMP_CLAUSE_BIND
:
1726 case OMP_CLAUSE_NOWAIT
:
1727 case OMP_CLAUSE_ORDERED
:
1728 case OMP_CLAUSE_COLLAPSE
:
1729 case OMP_CLAUSE_UNTIED
:
1730 case OMP_CLAUSE_MERGEABLE
:
1731 case OMP_CLAUSE_PROC_BIND
:
1732 case OMP_CLAUSE_SAFELEN
:
1733 case OMP_CLAUSE_SIMDLEN
:
1734 case OMP_CLAUSE_THREADS
:
1735 case OMP_CLAUSE_SIMD
:
1736 case OMP_CLAUSE_NOGROUP
:
1737 case OMP_CLAUSE_DEFAULTMAP
:
1738 case OMP_CLAUSE_ASYNC
:
1739 case OMP_CLAUSE_WAIT
:
1740 case OMP_CLAUSE_GANG
:
1741 case OMP_CLAUSE_WORKER
:
1742 case OMP_CLAUSE_VECTOR
:
1743 case OMP_CLAUSE_INDEPENDENT
:
1744 case OMP_CLAUSE_AUTO
:
1745 case OMP_CLAUSE_SEQ
:
1746 case OMP_CLAUSE_TILE
:
1747 case OMP_CLAUSE__SIMT_
:
1748 case OMP_CLAUSE_DEFAULT
:
1749 case OMP_CLAUSE_NONTEMPORAL
:
1750 case OMP_CLAUSE_IF_PRESENT
:
1751 case OMP_CLAUSE_FINALIZE
:
1752 case OMP_CLAUSE_TASK_REDUCTION
:
1753 case OMP_CLAUSE_ALLOCATE
:
1756 case OMP_CLAUSE_ALIGNED
:
1757 decl
= OMP_CLAUSE_DECL (c
);
1758 if (is_global_var (decl
)
1759 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1760 install_var_local (decl
, ctx
);
1763 case OMP_CLAUSE__CONDTEMP_
:
1764 decl
= OMP_CLAUSE_DECL (c
);
1765 if (is_parallel_ctx (ctx
))
1767 install_var_field (decl
, false, 3, ctx
);
1768 install_var_local (decl
, ctx
);
1770 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
1771 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
1772 && !OMP_CLAUSE__CONDTEMP__ITER (c
))
1773 install_var_local (decl
, ctx
);
1776 case OMP_CLAUSE__CACHE_
:
1777 case OMP_CLAUSE_NOHOST
:
1783 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1785 switch (OMP_CLAUSE_CODE (c
))
1787 case OMP_CLAUSE_LASTPRIVATE
:
1788 /* Let the corresponding firstprivate clause create
1790 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1791 scan_array_reductions
= true;
1792 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
1796 case OMP_CLAUSE_FIRSTPRIVATE
:
1797 case OMP_CLAUSE_PRIVATE
:
1798 case OMP_CLAUSE_LINEAR
:
1799 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
1800 case OMP_CLAUSE_IS_DEVICE_PTR
:
1801 decl
= OMP_CLAUSE_DECL (c
);
1802 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
1804 while (TREE_CODE (decl
) == INDIRECT_REF
1805 || TREE_CODE (decl
) == ARRAY_REF
)
1806 decl
= TREE_OPERAND (decl
, 0);
1809 if (is_variable_sized (decl
))
1811 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1812 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IS_DEVICE_PTR
1813 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
1814 && is_gimple_omp_offloaded (ctx
->stmt
))
1816 tree decl2
= DECL_VALUE_EXPR (decl
);
1817 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1818 decl2
= TREE_OPERAND (decl2
, 0);
1819 gcc_assert (DECL_P (decl2
));
1820 install_var_local (decl2
, ctx
);
1821 fixup_remapped_decl (decl2
, ctx
, false);
1823 install_var_local (decl
, ctx
);
1825 fixup_remapped_decl (decl
, ctx
,
1826 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1827 && OMP_CLAUSE_PRIVATE_DEBUG (c
));
1828 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
1829 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
1830 scan_array_reductions
= true;
1833 case OMP_CLAUSE_REDUCTION
:
1834 case OMP_CLAUSE_IN_REDUCTION
:
1835 decl
= OMP_CLAUSE_DECL (c
);
1836 if (TREE_CODE (decl
) != MEM_REF
&& !is_omp_target (ctx
->stmt
))
1838 if (is_variable_sized (decl
))
1839 install_var_local (decl
, ctx
);
1840 fixup_remapped_decl (decl
, ctx
, false);
1842 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1843 scan_array_reductions
= true;
1846 case OMP_CLAUSE_TASK_REDUCTION
:
1847 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1848 scan_array_reductions
= true;
1851 case OMP_CLAUSE_SHARED
:
1852 /* Ignore shared directives in teams construct inside of
1853 target construct. */
1854 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
1855 && !is_host_teams_ctx (ctx
))
1857 decl
= OMP_CLAUSE_DECL (c
);
1858 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
1860 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
1862 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
,
1865 bool by_ref
= use_pointer_for_field (decl
, ctx
);
1866 install_var_field (decl
, by_ref
, 11, ctx
);
1869 fixup_remapped_decl (decl
, ctx
, false);
1872 case OMP_CLAUSE_MAP
:
1873 if (!is_gimple_omp_offloaded (ctx
->stmt
))
1875 decl
= OMP_CLAUSE_DECL (c
);
1877 && ((OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
1878 && (OMP_CLAUSE_MAP_KIND (c
)
1879 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
1880 || TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
1881 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
))
1882 && varpool_node::get_create (decl
)->offloadable
)
1884 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
1885 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
1886 && is_omp_target (ctx
->stmt
)
1887 && !is_gimple_omp_offloaded (ctx
->stmt
))
1891 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
1892 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
1893 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1894 && !COMPLETE_TYPE_P (TREE_TYPE (decl
)))
1896 tree new_decl
= lookup_decl (decl
, ctx
);
1897 TREE_TYPE (new_decl
)
1898 = remap_type (TREE_TYPE (decl
), &ctx
->cb
);
1900 else if (DECL_SIZE (decl
)
1901 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
1903 tree decl2
= DECL_VALUE_EXPR (decl
);
1904 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
1905 decl2
= TREE_OPERAND (decl2
, 0);
1906 gcc_assert (DECL_P (decl2
));
1907 fixup_remapped_decl (decl2
, ctx
, false);
1908 fixup_remapped_decl (decl
, ctx
, true);
1911 fixup_remapped_decl (decl
, ctx
, false);
1915 case OMP_CLAUSE_COPYPRIVATE
:
1916 case OMP_CLAUSE_COPYIN
:
1917 case OMP_CLAUSE_DEFAULT
:
1919 case OMP_CLAUSE_NUM_THREADS
:
1920 case OMP_CLAUSE_NUM_TEAMS
:
1921 case OMP_CLAUSE_THREAD_LIMIT
:
1922 case OMP_CLAUSE_DEVICE
:
1923 case OMP_CLAUSE_SCHEDULE
:
1924 case OMP_CLAUSE_DIST_SCHEDULE
:
1925 case OMP_CLAUSE_NOWAIT
:
1926 case OMP_CLAUSE_ORDERED
:
1927 case OMP_CLAUSE_COLLAPSE
:
1928 case OMP_CLAUSE_UNTIED
:
1929 case OMP_CLAUSE_FINAL
:
1930 case OMP_CLAUSE_MERGEABLE
:
1931 case OMP_CLAUSE_PROC_BIND
:
1932 case OMP_CLAUSE_SAFELEN
:
1933 case OMP_CLAUSE_SIMDLEN
:
1934 case OMP_CLAUSE_ALIGNED
:
1935 case OMP_CLAUSE_DEPEND
:
1936 case OMP_CLAUSE_DETACH
:
1937 case OMP_CLAUSE_ALLOCATE
:
1938 case OMP_CLAUSE__LOOPTEMP_
:
1939 case OMP_CLAUSE__REDUCTEMP_
:
1941 case OMP_CLAUSE_FROM
:
1942 case OMP_CLAUSE_PRIORITY
:
1943 case OMP_CLAUSE_GRAINSIZE
:
1944 case OMP_CLAUSE_NUM_TASKS
:
1945 case OMP_CLAUSE_THREADS
:
1946 case OMP_CLAUSE_SIMD
:
1947 case OMP_CLAUSE_NOGROUP
:
1948 case OMP_CLAUSE_DEFAULTMAP
:
1949 case OMP_CLAUSE_ORDER
:
1950 case OMP_CLAUSE_BIND
:
1951 case OMP_CLAUSE_USE_DEVICE_PTR
:
1952 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1953 case OMP_CLAUSE_NONTEMPORAL
:
1954 case OMP_CLAUSE_ASYNC
:
1955 case OMP_CLAUSE_WAIT
:
1956 case OMP_CLAUSE_NUM_GANGS
:
1957 case OMP_CLAUSE_NUM_WORKERS
:
1958 case OMP_CLAUSE_VECTOR_LENGTH
:
1959 case OMP_CLAUSE_GANG
:
1960 case OMP_CLAUSE_WORKER
:
1961 case OMP_CLAUSE_VECTOR
:
1962 case OMP_CLAUSE_INDEPENDENT
:
1963 case OMP_CLAUSE_AUTO
:
1964 case OMP_CLAUSE_SEQ
:
1965 case OMP_CLAUSE_TILE
:
1966 case OMP_CLAUSE__SIMT_
:
1967 case OMP_CLAUSE_IF_PRESENT
:
1968 case OMP_CLAUSE_FINALIZE
:
1969 case OMP_CLAUSE_FILTER
:
1970 case OMP_CLAUSE__CONDTEMP_
:
1973 case OMP_CLAUSE__CACHE_
:
1974 case OMP_CLAUSE_NOHOST
:
1980 gcc_checking_assert (!scan_array_reductions
1981 || !is_gimple_omp_oacc (ctx
->stmt
));
1982 if (scan_array_reductions
)
1984 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
1985 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
1986 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
1987 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
1988 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
1990 omp_context
*rctx
= ctx
;
1991 if (is_omp_target (ctx
->stmt
))
1993 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
), rctx
);
1994 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), rctx
);
1996 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
1997 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
1998 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
1999 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
2000 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
2001 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
2005 /* Create a new name for omp child function. Returns an identifier. */
2008 create_omp_child_function_name (bool task_copy
)
2010 return clone_function_name_numbered (current_function_decl
,
2011 task_copy
? "_omp_cpyfn" : "_omp_fn");
2014 /* Return true if CTX may belong to offloaded code: either if current function
2015 is offloaded, or any enclosing context corresponds to a target region. */
2018 omp_maybe_offloaded_ctx (omp_context
*ctx
)
2020 if (cgraph_node::get (current_function_decl
)->offloadable
)
2022 for (; ctx
; ctx
= ctx
->outer
)
2023 if (is_gimple_omp_offloaded (ctx
->stmt
))
2028 /* Build a decl for the omp child function. It'll not contain a body
2029 yet, just the bare decl. */
2032 create_omp_child_function (omp_context
*ctx
, bool task_copy
)
2034 tree decl
, type
, name
, t
;
2036 name
= create_omp_child_function_name (task_copy
);
2038 type
= build_function_type_list (void_type_node
, ptr_type_node
,
2039 ptr_type_node
, NULL_TREE
);
2041 type
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
2043 decl
= build_decl (gimple_location (ctx
->stmt
), FUNCTION_DECL
, name
, type
);
2045 gcc_checking_assert (!is_gimple_omp_oacc (ctx
->stmt
)
2048 ctx
->cb
.dst_fn
= decl
;
2050 gimple_omp_task_set_copy_fn (ctx
->stmt
, decl
);
2052 TREE_STATIC (decl
) = 1;
2053 TREE_USED (decl
) = 1;
2054 DECL_ARTIFICIAL (decl
) = 1;
2055 DECL_IGNORED_P (decl
) = 0;
2056 TREE_PUBLIC (decl
) = 0;
2057 DECL_UNINLINABLE (decl
) = 1;
2058 DECL_EXTERNAL (decl
) = 0;
2059 DECL_CONTEXT (decl
) = NULL_TREE
;
2060 DECL_INITIAL (decl
) = make_node (BLOCK
);
2061 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
2062 DECL_ATTRIBUTES (decl
) = DECL_ATTRIBUTES (current_function_decl
);
2063 /* Remove omp declare simd attribute from the new attributes. */
2064 if (tree a
= lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl
)))
2066 while (tree a2
= lookup_attribute ("omp declare simd", TREE_CHAIN (a
)))
2069 for (tree
*p
= &DECL_ATTRIBUTES (decl
); *p
!= a
;)
2070 if (is_attribute_p ("omp declare simd", get_attribute_name (*p
)))
2071 *p
= TREE_CHAIN (*p
);
2074 tree chain
= TREE_CHAIN (*p
);
2075 *p
= copy_node (*p
);
2076 p
= &TREE_CHAIN (*p
);
2080 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
2081 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl
);
2082 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
2083 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl
);
2084 DECL_FUNCTION_VERSIONED (decl
)
2085 = DECL_FUNCTION_VERSIONED (current_function_decl
);
2087 if (omp_maybe_offloaded_ctx (ctx
))
2089 cgraph_node::get_create (decl
)->offloadable
= 1;
2090 if (ENABLE_OFFLOADING
)
2091 g
->have_offload
= true;
2094 if (cgraph_node::get_create (decl
)->offloadable
)
2096 const char *target_attr
= (is_gimple_omp_offloaded (ctx
->stmt
)
2097 ? "omp target entrypoint"
2098 : "omp declare target");
2099 if (lookup_attribute ("omp declare target",
2100 DECL_ATTRIBUTES (current_function_decl
)))
2102 if (is_gimple_omp_offloaded (ctx
->stmt
))
2103 DECL_ATTRIBUTES (decl
)
2104 = remove_attribute ("omp declare target",
2105 copy_list (DECL_ATTRIBUTES (decl
)));
2110 && is_gimple_omp_offloaded (ctx
->stmt
)
2111 && lookup_attribute ("noclone", DECL_ATTRIBUTES (decl
)) == NULL_TREE
)
2112 DECL_ATTRIBUTES (decl
) = tree_cons (get_identifier ("noclone"),
2113 NULL_TREE
, DECL_ATTRIBUTES (decl
));
2115 DECL_ATTRIBUTES (decl
)
2116 = tree_cons (get_identifier (target_attr
),
2117 NULL_TREE
, DECL_ATTRIBUTES (decl
));
2120 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
2121 RESULT_DECL
, NULL_TREE
, void_type_node
);
2122 DECL_ARTIFICIAL (t
) = 1;
2123 DECL_IGNORED_P (t
) = 1;
2124 DECL_CONTEXT (t
) = decl
;
2125 DECL_RESULT (decl
) = t
;
2127 tree data_name
= get_identifier (".omp_data_i");
2128 t
= build_decl (DECL_SOURCE_LOCATION (decl
), PARM_DECL
, data_name
,
2130 DECL_ARTIFICIAL (t
) = 1;
2131 DECL_NAMELESS (t
) = 1;
2132 DECL_ARG_TYPE (t
) = ptr_type_node
;
2133 DECL_CONTEXT (t
) = current_function_decl
;
2135 TREE_READONLY (t
) = 1;
2136 DECL_ARGUMENTS (decl
) = t
;
2138 ctx
->receiver_decl
= t
;
2141 t
= build_decl (DECL_SOURCE_LOCATION (decl
),
2142 PARM_DECL
, get_identifier (".omp_data_o"),
2144 DECL_ARTIFICIAL (t
) = 1;
2145 DECL_NAMELESS (t
) = 1;
2146 DECL_ARG_TYPE (t
) = ptr_type_node
;
2147 DECL_CONTEXT (t
) = current_function_decl
;
2149 TREE_ADDRESSABLE (t
) = 1;
2150 DECL_CHAIN (t
) = DECL_ARGUMENTS (decl
);
2151 DECL_ARGUMENTS (decl
) = t
;
2154 /* Allocate memory for the function structure. The call to
2155 allocate_struct_function clobbers CFUN, so we need to restore
2157 push_struct_function (decl
);
2158 cfun
->function_end_locus
= gimple_location (ctx
->stmt
);
2159 init_tree_ssa (cfun
);
2163 /* Callback for walk_gimple_seq. Check if combined parallel
2164 contains gimple_omp_for_combined_into_p OMP_FOR. */
2167 omp_find_combined_for (gimple_stmt_iterator
*gsi_p
,
2168 bool *handled_ops_p
,
2169 struct walk_stmt_info
*wi
)
2171 gimple
*stmt
= gsi_stmt (*gsi_p
);
2173 *handled_ops_p
= true;
2174 switch (gimple_code (stmt
))
2178 case GIMPLE_OMP_FOR
:
2179 if (gimple_omp_for_combined_into_p (stmt
)
2180 && gimple_omp_for_kind (stmt
)
2181 == *(const enum gf_mask
*) (wi
->info
))
2184 return integer_zero_node
;
2193 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2196 add_taskreg_looptemp_clauses (enum gf_mask msk
, gimple
*stmt
,
2197 omp_context
*outer_ctx
)
2199 struct walk_stmt_info wi
;
2201 memset (&wi
, 0, sizeof (wi
));
2203 wi
.info
= (void *) &msk
;
2204 walk_gimple_seq (gimple_omp_body (stmt
), omp_find_combined_for
, NULL
, &wi
);
2205 if (wi
.info
!= (void *) &msk
)
2207 gomp_for
*for_stmt
= as_a
<gomp_for
*> ((gimple
*) wi
.info
);
2208 struct omp_for_data fd
;
2209 omp_extract_for_data (for_stmt
, &fd
, NULL
);
2210 /* We need two temporaries with fd.loop.v type (istart/iend)
2211 and then (fd.collapse - 1) temporaries with the same
2212 type for count2 ... countN-1 vars if not constant. */
2213 size_t count
= 2, i
;
2214 tree type
= fd
.iter_type
;
2216 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
2218 count
+= fd
.collapse
- 1;
2219 /* If there are lastprivate clauses on the inner
2220 GIMPLE_OMP_FOR, add one more temporaries for the total number
2221 of iterations (product of count1 ... countN-1). */
2222 if (omp_find_clause (gimple_omp_for_clauses (for_stmt
),
2223 OMP_CLAUSE_LASTPRIVATE
)
2224 || (msk
== GF_OMP_FOR_KIND_FOR
2225 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2226 OMP_CLAUSE_LASTPRIVATE
)))
2228 tree temp
= create_tmp_var (type
);
2229 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
2230 OMP_CLAUSE__LOOPTEMP_
);
2231 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2232 OMP_CLAUSE_DECL (c
) = temp
;
2233 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2234 gimple_omp_taskreg_set_clauses (stmt
, c
);
2237 && fd
.last_nonrect
== fd
.first_nonrect
+ 1)
2238 if (tree v
= gimple_omp_for_index (for_stmt
, fd
.last_nonrect
))
2239 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
2241 v
= gimple_omp_for_index (for_stmt
, fd
.first_nonrect
);
2242 tree type2
= TREE_TYPE (v
);
2244 for (i
= 0; i
< 3; i
++)
2246 tree temp
= create_tmp_var (type2
);
2247 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
2248 OMP_CLAUSE__LOOPTEMP_
);
2249 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2250 OMP_CLAUSE_DECL (c
) = temp
;
2251 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2252 gimple_omp_taskreg_set_clauses (stmt
, c
);
2256 for (i
= 0; i
< count
; i
++)
2258 tree temp
= create_tmp_var (type
);
2259 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
2260 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2261 OMP_CLAUSE_DECL (c
) = temp
;
2262 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2263 gimple_omp_taskreg_set_clauses (stmt
, c
);
2266 if (msk
== GF_OMP_FOR_KIND_TASKLOOP
2267 && omp_find_clause (gimple_omp_task_clauses (stmt
),
2268 OMP_CLAUSE_REDUCTION
))
2270 tree type
= build_pointer_type (pointer_sized_int_node
);
2271 tree temp
= create_tmp_var (type
);
2272 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
2273 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2274 OMP_CLAUSE_DECL (c
) = temp
;
2275 OMP_CLAUSE_CHAIN (c
) = gimple_omp_task_clauses (stmt
);
2276 gimple_omp_task_set_clauses (stmt
, c
);
2280 /* Scan an OpenMP parallel directive. */
2283 scan_omp_parallel (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
2287 gomp_parallel
*stmt
= as_a
<gomp_parallel
*> (gsi_stmt (*gsi
));
2289 /* Ignore parallel directives with empty bodies, unless there
2290 are copyin clauses. */
2292 && empty_body_p (gimple_omp_body (stmt
))
2293 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2294 OMP_CLAUSE_COPYIN
) == NULL
)
2296 gsi_replace (gsi
, gimple_build_nop (), false);
2300 if (gimple_omp_parallel_combined_p (stmt
))
2301 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR
, stmt
, outer_ctx
);
2302 for (tree c
= omp_find_clause (gimple_omp_parallel_clauses (stmt
),
2303 OMP_CLAUSE_REDUCTION
);
2304 c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
), OMP_CLAUSE_REDUCTION
))
2305 if (OMP_CLAUSE_REDUCTION_TASK (c
))
2307 tree type
= build_pointer_type (pointer_sized_int_node
);
2308 tree temp
= create_tmp_var (type
);
2309 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
2311 insert_decl_map (&outer_ctx
->cb
, temp
, temp
);
2312 OMP_CLAUSE_DECL (c
) = temp
;
2313 OMP_CLAUSE_CHAIN (c
) = gimple_omp_parallel_clauses (stmt
);
2314 gimple_omp_parallel_set_clauses (stmt
, c
);
2317 else if (OMP_CLAUSE_CHAIN (c
) == NULL_TREE
)
2320 ctx
= new_omp_context (stmt
, outer_ctx
);
2321 taskreg_contexts
.safe_push (ctx
);
2322 if (taskreg_nesting_level
> 1)
2323 ctx
->is_nested
= true;
2324 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2325 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2326 name
= create_tmp_var_name (".omp_data_s");
2327 name
= build_decl (gimple_location (stmt
),
2328 TYPE_DECL
, name
, ctx
->record_type
);
2329 DECL_ARTIFICIAL (name
) = 1;
2330 DECL_NAMELESS (name
) = 1;
2331 TYPE_NAME (ctx
->record_type
) = name
;
2332 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2333 create_omp_child_function (ctx
, false);
2334 gimple_omp_parallel_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2336 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt
), ctx
);
2337 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2339 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2340 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2343 /* Scan an OpenMP task directive. */
2346 scan_omp_task (gimple_stmt_iterator
*gsi
, omp_context
*outer_ctx
)
2350 gomp_task
*stmt
= as_a
<gomp_task
*> (gsi_stmt (*gsi
));
2352 /* Ignore task directives with empty bodies, unless they have depend
2355 && gimple_omp_body (stmt
)
2356 && empty_body_p (gimple_omp_body (stmt
))
2357 && !omp_find_clause (gimple_omp_task_clauses (stmt
), OMP_CLAUSE_DEPEND
))
2359 gsi_replace (gsi
, gimple_build_nop (), false);
2363 if (gimple_omp_task_taskloop_p (stmt
))
2364 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP
, stmt
, outer_ctx
);
2366 ctx
= new_omp_context (stmt
, outer_ctx
);
2368 if (gimple_omp_task_taskwait_p (stmt
))
2370 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2374 taskreg_contexts
.safe_push (ctx
);
2375 if (taskreg_nesting_level
> 1)
2376 ctx
->is_nested
= true;
2377 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
2378 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
2379 name
= create_tmp_var_name (".omp_data_s");
2380 name
= build_decl (gimple_location (stmt
),
2381 TYPE_DECL
, name
, ctx
->record_type
);
2382 DECL_ARTIFICIAL (name
) = 1;
2383 DECL_NAMELESS (name
) = 1;
2384 TYPE_NAME (ctx
->record_type
) = name
;
2385 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
2386 create_omp_child_function (ctx
, false);
2387 gimple_omp_task_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
2389 scan_sharing_clauses (gimple_omp_task_clauses (stmt
), ctx
);
2391 if (ctx
->srecord_type
)
2393 name
= create_tmp_var_name (".omp_data_a");
2394 name
= build_decl (gimple_location (stmt
),
2395 TYPE_DECL
, name
, ctx
->srecord_type
);
2396 DECL_ARTIFICIAL (name
) = 1;
2397 DECL_NAMELESS (name
) = 1;
2398 TYPE_NAME (ctx
->srecord_type
) = name
;
2399 TYPE_ARTIFICIAL (ctx
->srecord_type
) = 1;
2400 create_omp_child_function (ctx
, true);
2403 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2405 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
2407 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
2408 t
= build_int_cst (long_integer_type_node
, 0);
2409 gimple_omp_task_set_arg_size (stmt
, t
);
2410 t
= build_int_cst (long_integer_type_node
, 1);
2411 gimple_omp_task_set_arg_align (stmt
, t
);
2415 /* Helper function for finish_taskreg_scan, called through walk_tree.
2416 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2417 tree, replace it in the expression. */
2420 finish_taskreg_remap (tree
*tp
, int *walk_subtrees
, void *data
)
2424 omp_context
*ctx
= (omp_context
*) data
;
2425 tree t
= maybe_lookup_decl_in_outer_ctx (*tp
, ctx
);
2428 if (DECL_HAS_VALUE_EXPR_P (t
))
2429 t
= unshare_expr (DECL_VALUE_EXPR (t
));
2434 else if (IS_TYPE_OR_DECL_P (*tp
))
2439 /* If any decls have been made addressable during scan_omp,
2440 adjust their fields if needed, and layout record types
2441 of parallel/task constructs. */
2444 finish_taskreg_scan (omp_context
*ctx
)
2446 if (ctx
->record_type
== NULL_TREE
)
2449 /* If any make_addressable_vars were needed, verify all
2450 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2451 statements if use_pointer_for_field hasn't changed
2452 because of that. If it did, update field types now. */
2453 if (make_addressable_vars
)
2457 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2458 c
; c
= OMP_CLAUSE_CHAIN (c
))
2459 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
2460 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
2462 tree decl
= OMP_CLAUSE_DECL (c
);
2464 /* Global variables don't need to be copied,
2465 the receiver side will use them directly. */
2466 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl
, ctx
)))
2468 if (!bitmap_bit_p (make_addressable_vars
, DECL_UID (decl
))
2469 || !use_pointer_for_field (decl
, ctx
))
2471 tree field
= lookup_field (decl
, ctx
);
2472 if (TREE_CODE (TREE_TYPE (field
)) == POINTER_TYPE
2473 && TREE_TYPE (TREE_TYPE (field
)) == TREE_TYPE (decl
))
2475 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
2476 TREE_THIS_VOLATILE (field
) = 0;
2477 DECL_USER_ALIGN (field
) = 0;
2478 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
2479 if (TYPE_ALIGN (ctx
->record_type
) < DECL_ALIGN (field
))
2480 SET_TYPE_ALIGN (ctx
->record_type
, DECL_ALIGN (field
));
2481 if (ctx
->srecord_type
)
2483 tree sfield
= lookup_sfield (decl
, ctx
);
2484 TREE_TYPE (sfield
) = TREE_TYPE (field
);
2485 TREE_THIS_VOLATILE (sfield
) = 0;
2486 DECL_USER_ALIGN (sfield
) = 0;
2487 SET_DECL_ALIGN (sfield
, DECL_ALIGN (field
));
2488 if (TYPE_ALIGN (ctx
->srecord_type
) < DECL_ALIGN (sfield
))
2489 SET_TYPE_ALIGN (ctx
->srecord_type
, DECL_ALIGN (sfield
));
2494 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_PARALLEL
)
2496 tree clauses
= gimple_omp_parallel_clauses (ctx
->stmt
);
2497 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2500 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2501 expects to find it at the start of data. */
2502 tree f
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2503 tree
*p
= &TYPE_FIELDS (ctx
->record_type
);
2507 *p
= DECL_CHAIN (*p
);
2511 p
= &DECL_CHAIN (*p
);
2512 DECL_CHAIN (f
) = TYPE_FIELDS (ctx
->record_type
);
2513 TYPE_FIELDS (ctx
->record_type
) = f
;
2515 layout_type (ctx
->record_type
);
2516 fixup_child_record_type (ctx
);
2518 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
2520 layout_type (ctx
->record_type
);
2521 fixup_child_record_type (ctx
);
2525 location_t loc
= gimple_location (ctx
->stmt
);
2526 tree
*p
, vla_fields
= NULL_TREE
, *q
= &vla_fields
;
2528 = omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
2530 /* Move VLA fields to the end. */
2531 p
= &TYPE_FIELDS (ctx
->record_type
);
2533 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p
))
2534 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p
))))
2537 *p
= TREE_CHAIN (*p
);
2538 TREE_CHAIN (*q
) = NULL_TREE
;
2539 q
= &TREE_CHAIN (*q
);
2542 p
= &DECL_CHAIN (*p
);
2544 if (gimple_omp_task_taskloop_p (ctx
->stmt
))
2546 /* Move fields corresponding to first and second _looptemp_
2547 clause first. There are filled by GOMP_taskloop
2548 and thus need to be in specific positions. */
2549 tree clauses
= gimple_omp_task_clauses (ctx
->stmt
);
2550 tree c1
= omp_find_clause (clauses
, OMP_CLAUSE__LOOPTEMP_
);
2551 tree c2
= omp_find_clause (OMP_CLAUSE_CHAIN (c1
),
2552 OMP_CLAUSE__LOOPTEMP_
);
2553 tree c3
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
2554 tree f1
= lookup_field (OMP_CLAUSE_DECL (c1
), ctx
);
2555 tree f2
= lookup_field (OMP_CLAUSE_DECL (c2
), ctx
);
2556 tree f3
= c3
? lookup_field (OMP_CLAUSE_DECL (c3
), ctx
) : NULL_TREE
;
2557 p
= &TYPE_FIELDS (ctx
->record_type
);
2559 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2560 *p
= DECL_CHAIN (*p
);
2562 p
= &DECL_CHAIN (*p
);
2563 DECL_CHAIN (f1
) = f2
;
2566 DECL_CHAIN (f2
) = f3
;
2567 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->record_type
);
2570 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->record_type
);
2571 TYPE_FIELDS (ctx
->record_type
) = f1
;
2572 if (ctx
->srecord_type
)
2574 f1
= lookup_sfield (OMP_CLAUSE_DECL (c1
), ctx
);
2575 f2
= lookup_sfield (OMP_CLAUSE_DECL (c2
), ctx
);
2577 f3
= lookup_sfield (OMP_CLAUSE_DECL (c3
), ctx
);
2578 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2580 if (*p
== f1
|| *p
== f2
|| *p
== f3
)
2581 *p
= DECL_CHAIN (*p
);
2583 p
= &DECL_CHAIN (*p
);
2584 DECL_CHAIN (f1
) = f2
;
2585 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2588 DECL_CHAIN (f2
) = f3
;
2589 DECL_CHAIN (f3
) = TYPE_FIELDS (ctx
->srecord_type
);
2592 DECL_CHAIN (f2
) = TYPE_FIELDS (ctx
->srecord_type
);
2593 TYPE_FIELDS (ctx
->srecord_type
) = f1
;
2600 /* Look for a firstprivate clause with the detach event handle. */
2601 for (c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
2602 c
; c
= OMP_CLAUSE_CHAIN (c
))
2604 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
2606 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c
), ctx
)
2607 == OMP_CLAUSE_DECL (detach_clause
))
2612 field
= lookup_field (OMP_CLAUSE_DECL (c
), ctx
);
2614 /* Move field corresponding to the detach clause first.
2615 This is filled by GOMP_task and needs to be in a
2616 specific position. */
2617 p
= &TYPE_FIELDS (ctx
->record_type
);
2620 *p
= DECL_CHAIN (*p
);
2622 p
= &DECL_CHAIN (*p
);
2623 DECL_CHAIN (field
) = TYPE_FIELDS (ctx
->record_type
);
2624 TYPE_FIELDS (ctx
->record_type
) = field
;
2625 if (ctx
->srecord_type
)
2627 field
= lookup_sfield (OMP_CLAUSE_DECL (c
), ctx
);
2628 p
= &TYPE_FIELDS (ctx
->srecord_type
);
2631 *p
= DECL_CHAIN (*p
);
2633 p
= &DECL_CHAIN (*p
);
2634 DECL_CHAIN (field
) = TYPE_FIELDS (ctx
->srecord_type
);
2635 TYPE_FIELDS (ctx
->srecord_type
) = field
;
2638 layout_type (ctx
->record_type
);
2639 fixup_child_record_type (ctx
);
2640 if (ctx
->srecord_type
)
2641 layout_type (ctx
->srecord_type
);
2642 tree t
= fold_convert_loc (loc
, long_integer_type_node
,
2643 TYPE_SIZE_UNIT (ctx
->record_type
));
2644 if (TREE_CODE (t
) != INTEGER_CST
)
2646 t
= unshare_expr (t
);
2647 walk_tree (&t
, finish_taskreg_remap
, ctx
, NULL
);
2649 gimple_omp_task_set_arg_size (ctx
->stmt
, t
);
2650 t
= build_int_cst (long_integer_type_node
,
2651 TYPE_ALIGN_UNIT (ctx
->record_type
));
2652 gimple_omp_task_set_arg_align (ctx
->stmt
, t
);
2656 /* Find the enclosing offload context. */
2658 static omp_context
*
2659 enclosing_target_ctx (omp_context
*ctx
)
2661 for (; ctx
; ctx
= ctx
->outer
)
2662 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
)
2668 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2670 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2673 ctx_in_oacc_kernels_region (omp_context
*ctx
)
2675 for (;ctx
!= NULL
; ctx
= ctx
->outer
)
2677 gimple
*stmt
= ctx
->stmt
;
2678 if (gimple_code (stmt
) == GIMPLE_OMP_TARGET
2679 && gimple_omp_target_kind (stmt
) == GF_OMP_TARGET_KIND_OACC_KERNELS
)
2686 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2687 (This doesn't include OpenACC 'kernels' decomposed parts.)
2688 Until kernels handling moves to use the same loop indirection
2689 scheme as parallel, we need to do this checking early. */
2692 check_oacc_kernel_gwv (gomp_for
*stmt
, omp_context
*ctx
)
2694 bool checking
= true;
2695 unsigned outer_mask
= 0;
2696 unsigned this_mask
= 0;
2697 bool has_seq
= false, has_auto
= false;
2700 outer_mask
= check_oacc_kernel_gwv (NULL
, ctx
->outer
);
2704 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
)
2706 stmt
= as_a
<gomp_for
*> (ctx
->stmt
);
2709 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2711 switch (OMP_CLAUSE_CODE (c
))
2713 case OMP_CLAUSE_GANG
:
2714 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_GANG
);
2716 case OMP_CLAUSE_WORKER
:
2717 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_WORKER
);
2719 case OMP_CLAUSE_VECTOR
:
2720 this_mask
|= GOMP_DIM_MASK (GOMP_DIM_VECTOR
);
2722 case OMP_CLAUSE_SEQ
:
2725 case OMP_CLAUSE_AUTO
:
2735 if (has_seq
&& (this_mask
|| has_auto
))
2736 error_at (gimple_location (stmt
), "%<seq%> overrides other"
2737 " OpenACC loop specifiers");
2738 else if (has_auto
&& this_mask
)
2739 error_at (gimple_location (stmt
), "%<auto%> conflicts with other"
2740 " OpenACC loop specifiers");
2742 if (this_mask
& outer_mask
)
2743 error_at (gimple_location (stmt
), "inner loop uses same"
2744 " OpenACC parallelism as containing loop");
2747 return outer_mask
| this_mask
;
2750 /* Scan a GIMPLE_OMP_FOR. */
2752 static omp_context
*
2753 scan_omp_for (gomp_for
*stmt
, omp_context
*outer_ctx
)
2757 tree clauses
= gimple_omp_for_clauses (stmt
);
2759 ctx
= new_omp_context (stmt
, outer_ctx
);
2761 if (is_gimple_omp_oacc (stmt
))
2763 omp_context
*tgt
= enclosing_target_ctx (outer_ctx
);
2765 if (!(tgt
&& is_oacc_kernels (tgt
)))
2766 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2769 switch (OMP_CLAUSE_CODE (c
))
2771 case OMP_CLAUSE_GANG
:
2772 c_op0
= OMP_CLAUSE_GANG_EXPR (c
);
2775 case OMP_CLAUSE_WORKER
:
2776 c_op0
= OMP_CLAUSE_WORKER_EXPR (c
);
2779 case OMP_CLAUSE_VECTOR
:
2780 c_op0
= OMP_CLAUSE_VECTOR_EXPR (c
);
2789 /* By construction, this is impossible for OpenACC 'kernels'
2790 decomposed parts. */
2791 gcc_assert (!(tgt
&& is_oacc_kernels_decomposed_part (tgt
)));
2793 error_at (OMP_CLAUSE_LOCATION (c
),
2794 "argument not permitted on %qs clause",
2795 omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
2797 inform (gimple_location (tgt
->stmt
),
2798 "enclosing parent compute construct");
2799 else if (oacc_get_fn_attrib (current_function_decl
))
2800 inform (DECL_SOURCE_LOCATION (current_function_decl
),
2801 "enclosing routine");
2807 if (tgt
&& is_oacc_kernels (tgt
))
2808 check_oacc_kernel_gwv (stmt
, ctx
);
2810 /* Collect all variables named in reductions on this loop. Ensure
2811 that, if this loop has a reduction on some variable v, and there is
2812 a reduction on v somewhere in an outer context, then there is a
2813 reduction on v on all intervening loops as well. */
2814 tree local_reduction_clauses
= NULL
;
2815 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
2817 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
2818 local_reduction_clauses
2819 = tree_cons (NULL
, c
, local_reduction_clauses
);
2821 if (ctx
->outer_reduction_clauses
== NULL
&& ctx
->outer
!= NULL
)
2822 ctx
->outer_reduction_clauses
2823 = chainon (unshare_expr (ctx
->outer
->local_reduction_clauses
),
2824 ctx
->outer
->outer_reduction_clauses
);
2825 tree outer_reduction_clauses
= ctx
->outer_reduction_clauses
;
2826 tree local_iter
= local_reduction_clauses
;
2827 for (; local_iter
; local_iter
= TREE_CHAIN (local_iter
))
2829 tree local_clause
= TREE_VALUE (local_iter
);
2830 tree local_var
= OMP_CLAUSE_DECL (local_clause
);
2831 tree_code local_op
= OMP_CLAUSE_REDUCTION_CODE (local_clause
);
2832 bool have_outer_reduction
= false;
2833 tree ctx_iter
= outer_reduction_clauses
;
2834 for (; ctx_iter
; ctx_iter
= TREE_CHAIN (ctx_iter
))
2836 tree outer_clause
= TREE_VALUE (ctx_iter
);
2837 tree outer_var
= OMP_CLAUSE_DECL (outer_clause
);
2838 tree_code outer_op
= OMP_CLAUSE_REDUCTION_CODE (outer_clause
);
2839 if (outer_var
== local_var
&& outer_op
!= local_op
)
2841 warning_at (OMP_CLAUSE_LOCATION (local_clause
), 0,
2842 "conflicting reduction operations for %qE",
2844 inform (OMP_CLAUSE_LOCATION (outer_clause
),
2845 "location of the previous reduction for %qE",
2848 if (outer_var
== local_var
)
2850 have_outer_reduction
= true;
2854 if (have_outer_reduction
)
2856 /* There is a reduction on outer_var both on this loop and on
2857 some enclosing loop. Walk up the context tree until such a
2858 loop with a reduction on outer_var is found, and complain
2859 about all intervening loops that do not have such a
2861 struct omp_context
*curr_loop
= ctx
->outer
;
2863 while (curr_loop
!= NULL
)
2865 tree curr_iter
= curr_loop
->local_reduction_clauses
;
2866 for (; curr_iter
; curr_iter
= TREE_CHAIN (curr_iter
))
2868 tree curr_clause
= TREE_VALUE (curr_iter
);
2869 tree curr_var
= OMP_CLAUSE_DECL (curr_clause
);
2870 if (curr_var
== local_var
)
2877 warning_at (gimple_location (curr_loop
->stmt
), 0,
2878 "nested loop in reduction needs "
2879 "reduction clause for %qE",
2883 curr_loop
= curr_loop
->outer
;
2887 ctx
->local_reduction_clauses
= local_reduction_clauses
;
2888 ctx
->outer_reduction_clauses
2889 = chainon (unshare_expr (ctx
->local_reduction_clauses
),
2890 ctx
->outer_reduction_clauses
);
2892 if (tgt
&& is_oacc_kernels (tgt
))
2894 /* Strip out reductions, as they are not handled yet. */
2895 tree
*prev_ptr
= &clauses
;
2897 while (tree probe
= *prev_ptr
)
2899 tree
*next_ptr
= &OMP_CLAUSE_CHAIN (probe
);
2901 if (OMP_CLAUSE_CODE (probe
) == OMP_CLAUSE_REDUCTION
)
2902 *prev_ptr
= *next_ptr
;
2904 prev_ptr
= next_ptr
;
2907 gimple_omp_for_set_clauses (stmt
, clauses
);
2911 scan_sharing_clauses (clauses
, ctx
);
2913 scan_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
2914 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2916 scan_omp_op (gimple_omp_for_index_ptr (stmt
, i
), ctx
);
2917 scan_omp_op (gimple_omp_for_initial_ptr (stmt
, i
), ctx
);
2918 scan_omp_op (gimple_omp_for_final_ptr (stmt
, i
), ctx
);
2919 scan_omp_op (gimple_omp_for_incr_ptr (stmt
, i
), ctx
);
2921 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
2925 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2928 scan_omp_simd (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2929 omp_context
*outer_ctx
)
2931 gbind
*bind
= gimple_build_bind (NULL
, NULL
, NULL
);
2932 gsi_replace (gsi
, bind
, false);
2933 gimple_seq seq
= NULL
;
2934 gimple
*g
= gimple_build_call_internal (IFN_GOMP_USE_SIMT
, 0);
2935 tree cond
= create_tmp_var_raw (integer_type_node
);
2936 DECL_CONTEXT (cond
) = current_function_decl
;
2937 DECL_SEEN_IN_BIND_EXPR_P (cond
) = 1;
2938 gimple_bind_set_vars (bind
, cond
);
2939 gimple_call_set_lhs (g
, cond
);
2940 gimple_seq_add_stmt (&seq
, g
);
2941 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
2942 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
2943 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
2944 g
= gimple_build_cond (NE_EXPR
, cond
, integer_zero_node
, lab1
, lab2
);
2945 gimple_seq_add_stmt (&seq
, g
);
2946 g
= gimple_build_label (lab1
);
2947 gimple_seq_add_stmt (&seq
, g
);
2948 gimple_seq new_seq
= copy_gimple_seq_and_replace_locals (stmt
);
2949 gomp_for
*new_stmt
= as_a
<gomp_for
*> (new_seq
);
2950 tree clause
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE__SIMT_
);
2951 OMP_CLAUSE_CHAIN (clause
) = gimple_omp_for_clauses (new_stmt
);
2952 gimple_omp_for_set_clauses (new_stmt
, clause
);
2953 gimple_seq_add_stmt (&seq
, new_stmt
);
2954 g
= gimple_build_goto (lab3
);
2955 gimple_seq_add_stmt (&seq
, g
);
2956 g
= gimple_build_label (lab2
);
2957 gimple_seq_add_stmt (&seq
, g
);
2958 gimple_seq_add_stmt (&seq
, stmt
);
2959 g
= gimple_build_label (lab3
);
2960 gimple_seq_add_stmt (&seq
, g
);
2961 gimple_bind_set_body (bind
, seq
);
2963 scan_omp_for (new_stmt
, outer_ctx
);
2964 scan_omp_for (stmt
, outer_ctx
)->simt_stmt
= new_stmt
;
2967 static tree
omp_find_scan (gimple_stmt_iterator
*, bool *,
2968 struct walk_stmt_info
*);
2969 static omp_context
*maybe_lookup_ctx (gimple
*);
2971 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2972 for scan phase loop. */
2975 scan_omp_simd_scan (gimple_stmt_iterator
*gsi
, gomp_for
*stmt
,
2976 omp_context
*outer_ctx
)
2978 /* The only change between inclusive and exclusive scan will be
2979 within the first simd loop, so just use inclusive in the
2980 worksharing loop. */
2981 outer_ctx
->scan_inclusive
= true;
2982 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_INCLUSIVE
);
2983 OMP_CLAUSE_DECL (c
) = integer_zero_node
;
2985 gomp_scan
*input_stmt
= gimple_build_omp_scan (NULL
, NULL_TREE
);
2986 gomp_scan
*scan_stmt
= gimple_build_omp_scan (NULL
, c
);
2987 gsi_replace (gsi
, input_stmt
, false);
2988 gimple_seq input_body
= NULL
;
2989 gimple_seq_add_stmt (&input_body
, stmt
);
2990 gsi_insert_after (gsi
, scan_stmt
, GSI_NEW_STMT
);
2992 gimple_stmt_iterator input1_gsi
= gsi_none ();
2993 struct walk_stmt_info wi
;
2994 memset (&wi
, 0, sizeof (wi
));
2996 wi
.info
= (void *) &input1_gsi
;
2997 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), omp_find_scan
, NULL
, &wi
);
2998 gcc_assert (!gsi_end_p (input1_gsi
));
3000 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
3001 gsi_next (&input1_gsi
);
3002 gimple
*scan_stmt1
= gsi_stmt (input1_gsi
);
3003 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
3004 c
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (scan_stmt1
));
3005 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
3006 std::swap (input_stmt1
, scan_stmt1
);
3008 gimple_seq input_body1
= gimple_omp_body (input_stmt1
);
3009 gimple_omp_set_body (input_stmt1
, NULL
);
3011 gimple_seq scan_body
= copy_gimple_seq_and_replace_locals (stmt
);
3012 gomp_for
*new_stmt
= as_a
<gomp_for
*> (scan_body
);
3014 gimple_omp_set_body (input_stmt1
, input_body1
);
3015 gimple_omp_set_body (scan_stmt1
, NULL
);
3017 gimple_stmt_iterator input2_gsi
= gsi_none ();
3018 memset (&wi
, 0, sizeof (wi
));
3020 wi
.info
= (void *) &input2_gsi
;
3021 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt
), omp_find_scan
,
3023 gcc_assert (!gsi_end_p (input2_gsi
));
3025 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
3026 gsi_next (&input2_gsi
);
3027 gimple
*scan_stmt2
= gsi_stmt (input2_gsi
);
3028 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
3029 if (c
&& OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_EXCLUSIVE
)
3030 std::swap (input_stmt2
, scan_stmt2
);
3032 gimple_omp_set_body (input_stmt2
, NULL
);
3034 gimple_omp_set_body (input_stmt
, input_body
);
3035 gimple_omp_set_body (scan_stmt
, scan_body
);
3037 omp_context
*ctx
= new_omp_context (input_stmt
, outer_ctx
);
3038 scan_omp (gimple_omp_body_ptr (input_stmt
), ctx
);
3040 ctx
= new_omp_context (scan_stmt
, outer_ctx
);
3041 scan_omp (gimple_omp_body_ptr (scan_stmt
), ctx
);
3043 maybe_lookup_ctx (new_stmt
)->for_simd_scan_phase
= true;
3046 /* Scan an OpenMP sections directive. */
3049 scan_omp_sections (gomp_sections
*stmt
, omp_context
*outer_ctx
)
3053 ctx
= new_omp_context (stmt
, outer_ctx
);
3054 scan_sharing_clauses (gimple_omp_sections_clauses (stmt
), ctx
);
3055 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3058 /* Scan an OpenMP single directive. */
3061 scan_omp_single (gomp_single
*stmt
, omp_context
*outer_ctx
)
3066 ctx
= new_omp_context (stmt
, outer_ctx
);
3067 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
3068 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
3069 name
= create_tmp_var_name (".omp_copy_s");
3070 name
= build_decl (gimple_location (stmt
),
3071 TYPE_DECL
, name
, ctx
->record_type
);
3072 TYPE_NAME (ctx
->record_type
) = name
;
3074 scan_sharing_clauses (gimple_omp_single_clauses (stmt
), ctx
);
3075 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3077 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
3078 ctx
->record_type
= NULL
;
3080 layout_type (ctx
->record_type
);
3083 /* Scan a GIMPLE_OMP_TARGET. */
3086 scan_omp_target (gomp_target
*stmt
, omp_context
*outer_ctx
)
3090 bool offloaded
= is_gimple_omp_offloaded (stmt
);
3091 tree clauses
= gimple_omp_target_clauses (stmt
);
3093 ctx
= new_omp_context (stmt
, outer_ctx
);
3094 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
3095 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
3096 name
= create_tmp_var_name (".omp_data_t");
3097 name
= build_decl (gimple_location (stmt
),
3098 TYPE_DECL
, name
, ctx
->record_type
);
3099 DECL_ARTIFICIAL (name
) = 1;
3100 DECL_NAMELESS (name
) = 1;
3101 TYPE_NAME (ctx
->record_type
) = name
;
3102 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
3106 create_omp_child_function (ctx
, false);
3107 gimple_omp_target_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
3110 scan_sharing_clauses (clauses
, ctx
);
3111 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3113 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
3114 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
3117 TYPE_FIELDS (ctx
->record_type
)
3118 = nreverse (TYPE_FIELDS (ctx
->record_type
));
3121 unsigned int align
= DECL_ALIGN (TYPE_FIELDS (ctx
->record_type
));
3122 for (tree field
= TYPE_FIELDS (ctx
->record_type
);
3124 field
= DECL_CHAIN (field
))
3125 gcc_assert (DECL_ALIGN (field
) == align
);
3127 layout_type (ctx
->record_type
);
3129 fixup_child_record_type (ctx
);
3132 if (ctx
->teams_nested_p
&& ctx
->nonteams_nested_p
)
3134 error_at (gimple_location (stmt
),
3135 "%<target%> construct with nested %<teams%> construct "
3136 "contains directives outside of the %<teams%> construct");
3137 gimple_omp_set_body (stmt
, gimple_build_bind (NULL
, NULL
, NULL
));
3141 /* Scan an OpenMP teams directive. */
3144 scan_omp_teams (gomp_teams
*stmt
, omp_context
*outer_ctx
)
3146 omp_context
*ctx
= new_omp_context (stmt
, outer_ctx
);
3148 if (!gimple_omp_teams_host (stmt
))
3150 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
3151 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3154 taskreg_contexts
.safe_push (ctx
);
3155 gcc_assert (taskreg_nesting_level
== 1);
3156 ctx
->field_map
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
3157 ctx
->record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
3158 tree name
= create_tmp_var_name (".omp_data_s");
3159 name
= build_decl (gimple_location (stmt
),
3160 TYPE_DECL
, name
, ctx
->record_type
);
3161 DECL_ARTIFICIAL (name
) = 1;
3162 DECL_NAMELESS (name
) = 1;
3163 TYPE_NAME (ctx
->record_type
) = name
;
3164 TYPE_ARTIFICIAL (ctx
->record_type
) = 1;
3165 create_omp_child_function (ctx
, false);
3166 gimple_omp_teams_set_child_fn (stmt
, ctx
->cb
.dst_fn
);
3168 scan_sharing_clauses (gimple_omp_teams_clauses (stmt
), ctx
);
3169 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
3171 if (TYPE_FIELDS (ctx
->record_type
) == NULL
)
3172 ctx
->record_type
= ctx
->receiver_decl
= NULL
;
3175 /* Check nesting restrictions. */
3177 check_omp_nesting_restrictions (gimple
*stmt
, omp_context
*ctx
)
3181 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3182 inside an OpenACC CTX. */
3183 if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3184 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
)
3185 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3187 else if (!(is_gimple_omp (stmt
)
3188 && is_gimple_omp_oacc (stmt
)))
3190 if (oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3192 error_at (gimple_location (stmt
),
3193 "non-OpenACC construct inside of OpenACC routine");
3197 for (omp_context
*octx
= ctx
; octx
!= NULL
; octx
= octx
->outer
)
3198 if (is_gimple_omp (octx
->stmt
)
3199 && is_gimple_omp_oacc (octx
->stmt
))
3201 error_at (gimple_location (stmt
),
3202 "non-OpenACC construct inside of OpenACC region");
3209 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
3210 && gimple_omp_target_kind (ctx
->stmt
) == GF_OMP_TARGET_KIND_REGION
)
3212 c
= omp_find_clause (gimple_omp_target_clauses (ctx
->stmt
),
3214 if (c
&& OMP_CLAUSE_DEVICE_ANCESTOR (c
))
3216 error_at (gimple_location (stmt
),
3217 "OpenMP constructs are not allowed in target region "
3218 "with %<ancestor%>");
3222 if (gimple_code (stmt
) == GIMPLE_OMP_TEAMS
&& !ctx
->teams_nested_p
)
3223 ctx
->teams_nested_p
= true;
3225 ctx
->nonteams_nested_p
= true;
3227 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
3229 && gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
3231 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
3232 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
3236 if (ctx
->order_concurrent
3237 && (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
3238 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3239 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
))
3241 error_at (gimple_location (stmt
),
3242 "OpenMP constructs other than %<parallel%>, %<loop%>"
3243 " or %<simd%> may not be nested inside a region with"
3244 " the %<order(concurrent)%> clause");
3247 if (gimple_code (stmt
) == GIMPLE_OMP_ORDERED
)
3249 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3250 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
3252 if (omp_find_clause (c
, OMP_CLAUSE_THREADS
)
3253 && (ctx
->outer
== NULL
3254 || !gimple_omp_for_combined_into_p (ctx
->stmt
)
3255 || gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
3256 || (gimple_omp_for_kind (ctx
->outer
->stmt
)
3257 != GF_OMP_FOR_KIND_FOR
)
3258 || !gimple_omp_for_combined_p (ctx
->outer
->stmt
)))
3260 error_at (gimple_location (stmt
),
3261 "%<ordered simd threads%> must be closely "
3262 "nested inside of %<%s simd%> region",
3263 lang_GNU_Fortran () ? "do" : "for");
3269 else if (gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_LOAD
3270 || gimple_code (stmt
) == GIMPLE_OMP_ATOMIC_STORE
3271 || gimple_code (stmt
) == GIMPLE_OMP_SCAN
)
3273 else if (gimple_code (stmt
) == GIMPLE_OMP_FOR
3274 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
3276 error_at (gimple_location (stmt
),
3277 "OpenMP constructs other than "
3278 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3279 "not be nested inside %<simd%> region");
3282 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
)
3284 if ((gimple_code (stmt
) != GIMPLE_OMP_FOR
3285 || (gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_DISTRIBUTE
3286 && omp_find_clause (gimple_omp_for_clauses (stmt
),
3287 OMP_CLAUSE_BIND
) == NULL_TREE
))
3288 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
)
3290 error_at (gimple_location (stmt
),
3291 "only %<distribute%>, %<parallel%> or %<loop%> "
3292 "regions are allowed to be strictly nested inside "
3293 "%<teams%> region");
3297 else if (ctx
->order_concurrent
3298 && gimple_code (stmt
) != GIMPLE_OMP_PARALLEL
3299 && (gimple_code (stmt
) != GIMPLE_OMP_FOR
3300 || gimple_omp_for_kind (stmt
) != GF_OMP_FOR_KIND_SIMD
)
3301 && gimple_code (stmt
) != GIMPLE_OMP_SCAN
)
3304 error_at (gimple_location (stmt
),
3305 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3306 "%<simd%> may not be nested inside a %<loop%> region");
3308 error_at (gimple_location (stmt
),
3309 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3310 "%<simd%> may not be nested inside a region with "
3311 "the %<order(concurrent)%> clause");
3315 switch (gimple_code (stmt
))
3317 case GIMPLE_OMP_FOR
:
3318 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
)
3320 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
3322 if (ctx
!= NULL
&& gimple_code (ctx
->stmt
) != GIMPLE_OMP_TEAMS
)
3324 error_at (gimple_location (stmt
),
3325 "%<distribute%> region must be strictly nested "
3326 "inside %<teams%> construct");
3331 /* We split taskloop into task and nested taskloop in it. */
3332 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3334 /* For now, hope this will change and loop bind(parallel) will not
3335 be allowed in lots of contexts. */
3336 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
3337 && omp_find_clause (gimple_omp_for_clauses (stmt
), OMP_CLAUSE_BIND
))
3339 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_OACC_LOOP
)
3344 switch (gimple_code (ctx
->stmt
))
3346 case GIMPLE_OMP_FOR
:
3347 ok
= (gimple_omp_for_kind (ctx
->stmt
)
3348 == GF_OMP_FOR_KIND_OACC_LOOP
);
3351 case GIMPLE_OMP_TARGET
:
3352 switch (gimple_omp_target_kind (ctx
->stmt
))
3354 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3355 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3356 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
3357 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3358 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3369 else if (oacc_get_fn_attrib (current_function_decl
))
3373 error_at (gimple_location (stmt
),
3374 "OpenACC loop directive must be associated with"
3375 " an OpenACC compute region");
3381 if (is_gimple_call (stmt
)
3382 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3383 == BUILT_IN_GOMP_CANCEL
3384 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3385 == BUILT_IN_GOMP_CANCELLATION_POINT
))
3387 const char *bad
= NULL
;
3388 const char *kind
= NULL
;
3389 const char *construct
3390 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3391 == BUILT_IN_GOMP_CANCEL
)
3393 : "cancellation point";
3396 error_at (gimple_location (stmt
), "orphaned %qs construct",
3400 switch (tree_fits_shwi_p (gimple_call_arg (stmt
, 0))
3401 ? tree_to_shwi (gimple_call_arg (stmt
, 0))
3405 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_PARALLEL
)
3407 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3408 == BUILT_IN_GOMP_CANCEL
3409 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3410 ctx
->cancellable
= true;
3414 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3415 || gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
)
3417 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3418 == BUILT_IN_GOMP_CANCEL
3419 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3421 ctx
->cancellable
= true;
3422 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3424 warning_at (gimple_location (stmt
), 0,
3425 "%<cancel for%> inside "
3426 "%<nowait%> for construct");
3427 if (omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3428 OMP_CLAUSE_ORDERED
))
3429 warning_at (gimple_location (stmt
), 0,
3430 "%<cancel for%> inside "
3431 "%<ordered%> for construct");
3436 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTIONS
3437 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SECTION
)
3439 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3440 == BUILT_IN_GOMP_CANCEL
3441 && !integer_zerop (gimple_call_arg (stmt
, 1)))
3443 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
3445 ctx
->cancellable
= true;
3446 if (omp_find_clause (gimple_omp_sections_clauses
3449 warning_at (gimple_location (stmt
), 0,
3450 "%<cancel sections%> inside "
3451 "%<nowait%> sections construct");
3455 gcc_assert (ctx
->outer
3456 && gimple_code (ctx
->outer
->stmt
)
3457 == GIMPLE_OMP_SECTIONS
);
3458 ctx
->outer
->cancellable
= true;
3459 if (omp_find_clause (gimple_omp_sections_clauses
3462 warning_at (gimple_location (stmt
), 0,
3463 "%<cancel sections%> inside "
3464 "%<nowait%> sections construct");
3470 if (!is_task_ctx (ctx
)
3471 && (!is_taskloop_ctx (ctx
)
3472 || ctx
->outer
== NULL
3473 || !is_task_ctx (ctx
->outer
)))
3477 for (omp_context
*octx
= ctx
->outer
;
3478 octx
; octx
= octx
->outer
)
3480 switch (gimple_code (octx
->stmt
))
3482 case GIMPLE_OMP_TASKGROUP
:
3484 case GIMPLE_OMP_TARGET
:
3485 if (gimple_omp_target_kind (octx
->stmt
)
3486 != GF_OMP_TARGET_KIND_REGION
)
3489 case GIMPLE_OMP_PARALLEL
:
3490 case GIMPLE_OMP_TEAMS
:
3491 error_at (gimple_location (stmt
),
3492 "%<%s taskgroup%> construct not closely "
3493 "nested inside of %<taskgroup%> region",
3496 case GIMPLE_OMP_TASK
:
3497 if (gimple_omp_task_taskloop_p (octx
->stmt
)
3499 && is_taskloop_ctx (octx
->outer
))
3502 = gimple_omp_for_clauses (octx
->outer
->stmt
);
3503 if (!omp_find_clause (clauses
, OMP_CLAUSE_NOGROUP
))
3512 ctx
->cancellable
= true;
3517 error_at (gimple_location (stmt
), "invalid arguments");
3522 error_at (gimple_location (stmt
),
3523 "%<%s %s%> construct not closely nested inside of %qs",
3524 construct
, kind
, bad
);
3529 case GIMPLE_OMP_SECTIONS
:
3530 case GIMPLE_OMP_SINGLE
:
3531 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3532 switch (gimple_code (ctx
->stmt
))
3534 case GIMPLE_OMP_FOR
:
3535 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3536 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3539 case GIMPLE_OMP_SECTIONS
:
3540 case GIMPLE_OMP_SINGLE
:
3541 case GIMPLE_OMP_ORDERED
:
3542 case GIMPLE_OMP_MASTER
:
3543 case GIMPLE_OMP_MASKED
:
3544 case GIMPLE_OMP_TASK
:
3545 case GIMPLE_OMP_CRITICAL
:
3546 if (is_gimple_call (stmt
))
3548 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
))
3549 != BUILT_IN_GOMP_BARRIER
)
3551 error_at (gimple_location (stmt
),
3552 "barrier region may not be closely nested inside "
3553 "of work-sharing, %<loop%>, %<critical%>, "
3554 "%<ordered%>, %<master%>, %<masked%>, explicit "
3555 "%<task%> or %<taskloop%> region");
3558 error_at (gimple_location (stmt
),
3559 "work-sharing region may not be closely nested inside "
3560 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3561 "%<master%>, %<masked%>, explicit %<task%> or "
3562 "%<taskloop%> region");
3564 case GIMPLE_OMP_PARALLEL
:
3565 case GIMPLE_OMP_TEAMS
:
3567 case GIMPLE_OMP_TARGET
:
3568 if (gimple_omp_target_kind (ctx
->stmt
)
3569 == GF_OMP_TARGET_KIND_REGION
)
3576 case GIMPLE_OMP_MASTER
:
3577 case GIMPLE_OMP_MASKED
:
3578 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3579 switch (gimple_code (ctx
->stmt
))
3581 case GIMPLE_OMP_FOR
:
3582 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3583 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3586 case GIMPLE_OMP_SECTIONS
:
3587 case GIMPLE_OMP_SINGLE
:
3588 case GIMPLE_OMP_TASK
:
3589 error_at (gimple_location (stmt
),
3590 "%qs region may not be closely nested inside "
3591 "of work-sharing, %<loop%>, explicit %<task%> or "
3592 "%<taskloop%> region",
3593 gimple_code (stmt
) == GIMPLE_OMP_MASTER
3594 ? "master" : "masked");
3596 case GIMPLE_OMP_PARALLEL
:
3597 case GIMPLE_OMP_TEAMS
:
3599 case GIMPLE_OMP_TARGET
:
3600 if (gimple_omp_target_kind (ctx
->stmt
)
3601 == GF_OMP_TARGET_KIND_REGION
)
3608 case GIMPLE_OMP_SCOPE
:
3609 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3610 switch (gimple_code (ctx
->stmt
))
3612 case GIMPLE_OMP_FOR
:
3613 if (gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_FOR
3614 && gimple_omp_for_kind (ctx
->stmt
) != GF_OMP_FOR_KIND_TASKLOOP
)
3617 case GIMPLE_OMP_SECTIONS
:
3618 case GIMPLE_OMP_SINGLE
:
3619 case GIMPLE_OMP_TASK
:
3620 case GIMPLE_OMP_CRITICAL
:
3621 case GIMPLE_OMP_ORDERED
:
3622 case GIMPLE_OMP_MASTER
:
3623 case GIMPLE_OMP_MASKED
:
3624 error_at (gimple_location (stmt
),
3625 "%<scope%> region may not be closely nested inside "
3626 "of work-sharing, %<loop%>, explicit %<task%>, "
3627 "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, "
3628 "or %<masked%> region");
3630 case GIMPLE_OMP_PARALLEL
:
3631 case GIMPLE_OMP_TEAMS
:
3633 case GIMPLE_OMP_TARGET
:
3634 if (gimple_omp_target_kind (ctx
->stmt
)
3635 == GF_OMP_TARGET_KIND_REGION
)
3642 case GIMPLE_OMP_TASK
:
3643 for (c
= gimple_omp_task_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3644 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
)
3646 enum omp_clause_doacross_kind kind
= OMP_CLAUSE_DOACROSS_KIND (c
);
3647 error_at (OMP_CLAUSE_LOCATION (c
),
3648 "%<%s(%s)%> is only allowed in %<omp ordered%>",
3649 OMP_CLAUSE_DOACROSS_DEPEND (c
) ? "depend" : "doacross",
3650 kind
== OMP_CLAUSE_DOACROSS_SOURCE
? "source" : "sink");
3654 case GIMPLE_OMP_ORDERED
:
3655 for (c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3656 c
; c
= OMP_CLAUSE_CHAIN (c
))
3658 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DOACROSS
)
3660 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
3662 error_at (OMP_CLAUSE_LOCATION (c
),
3663 "invalid depend kind in omp %<ordered%> %<depend%>");
3666 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREADS
3667 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SIMD
);
3672 /* Look for containing ordered(N) loop. */
3674 || gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
3676 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3677 OMP_CLAUSE_ORDERED
)) == NULL_TREE
)
3679 error_at (OMP_CLAUSE_LOCATION (c
),
3680 "%<ordered%> construct with %<depend%> clause "
3681 "must be closely nested inside an %<ordered%> loop");
3685 c
= gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
));
3686 if (omp_find_clause (c
, OMP_CLAUSE_SIMD
))
3688 /* ordered simd must be closely nested inside of simd region,
3689 and simd region must not encounter constructs other than
3690 ordered simd, therefore ordered simd may be either orphaned,
3691 or ctx->stmt must be simd. The latter case is handled already
3695 error_at (gimple_location (stmt
),
3696 "%<ordered%> %<simd%> must be closely nested inside "
3701 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3702 switch (gimple_code (ctx
->stmt
))
3704 case GIMPLE_OMP_CRITICAL
:
3705 case GIMPLE_OMP_TASK
:
3706 case GIMPLE_OMP_ORDERED
:
3707 ordered_in_taskloop
:
3708 error_at (gimple_location (stmt
),
3709 "%<ordered%> region may not be closely nested inside "
3710 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3711 "%<taskloop%> region");
3713 case GIMPLE_OMP_FOR
:
3714 if (gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_TASKLOOP
)
3715 goto ordered_in_taskloop
;
3717 o
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3718 OMP_CLAUSE_ORDERED
);
3721 error_at (gimple_location (stmt
),
3722 "%<ordered%> region must be closely nested inside "
3723 "a loop region with an %<ordered%> clause");
3726 if (!gimple_omp_ordered_standalone_p (stmt
))
3728 if (OMP_CLAUSE_ORDERED_DOACROSS (o
))
3730 error_at (gimple_location (stmt
),
3731 "%<ordered%> construct without %<doacross%> or "
3732 "%<depend%> clauses must not have the same "
3733 "binding region as %<ordered%> construct with "
3737 else if (OMP_CLAUSE_ORDERED_EXPR (o
))
3740 = omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
3741 OMP_CLAUSE_COLLAPSE
);
3743 o_n
= tree_to_shwi (OMP_CLAUSE_ORDERED_EXPR (o
));
3744 HOST_WIDE_INT c_n
= 1;
3746 c_n
= tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (co
));
3749 error_at (gimple_location (stmt
),
3750 "%<ordered%> construct without %<doacross%> "
3751 "or %<depend%> clauses binds to loop where "
3752 "%<collapse%> argument %wd is different from "
3753 "%<ordered%> argument %wd", c_n
, o_n
);
3759 case GIMPLE_OMP_TARGET
:
3760 if (gimple_omp_target_kind (ctx
->stmt
)
3761 != GF_OMP_TARGET_KIND_REGION
)
3764 case GIMPLE_OMP_PARALLEL
:
3765 case GIMPLE_OMP_TEAMS
:
3766 error_at (gimple_location (stmt
),
3767 "%<ordered%> region must be closely nested inside "
3768 "a loop region with an %<ordered%> clause");
3774 case GIMPLE_OMP_CRITICAL
:
3777 = gimple_omp_critical_name (as_a
<gomp_critical
*> (stmt
));
3778 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3779 if (gomp_critical
*other_crit
3780 = dyn_cast
<gomp_critical
*> (ctx
->stmt
))
3781 if (this_stmt_name
== gimple_omp_critical_name (other_crit
))
3783 error_at (gimple_location (stmt
),
3784 "%<critical%> region may not be nested inside "
3785 "a %<critical%> region with the same name");
3790 case GIMPLE_OMP_TEAMS
:
3793 else if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
3794 || (gimple_omp_target_kind (ctx
->stmt
)
3795 != GF_OMP_TARGET_KIND_REGION
))
3797 /* Teams construct can appear either strictly nested inside of
3798 target construct with no intervening stmts, or can be encountered
3799 only by initial task (so must not appear inside any OpenMP
3801 error_at (gimple_location (stmt
),
3802 "%<teams%> construct must be closely nested inside of "
3803 "%<target%> construct or not nested in any OpenMP "
3808 case GIMPLE_OMP_TARGET
:
3809 for (c
= gimple_omp_target_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
3810 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
)
3812 enum omp_clause_doacross_kind kind
= OMP_CLAUSE_DOACROSS_KIND (c
);
3813 error_at (OMP_CLAUSE_LOCATION (c
),
3814 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3815 kind
== OMP_CLAUSE_DOACROSS_SOURCE
? "source" : "sink");
3818 if (is_gimple_omp_offloaded (stmt
)
3819 && oacc_get_fn_attrib (cfun
->decl
) != NULL
)
3821 error_at (gimple_location (stmt
),
3822 "OpenACC region inside of OpenACC routine, nested "
3823 "parallelism not supported yet");
3826 for (; ctx
!= NULL
; ctx
= ctx
->outer
)
3828 if (gimple_code (ctx
->stmt
) != GIMPLE_OMP_TARGET
)
3830 if (is_gimple_omp (stmt
)
3831 && is_gimple_omp_oacc (stmt
)
3832 && is_gimple_omp (ctx
->stmt
))
3834 error_at (gimple_location (stmt
),
3835 "OpenACC construct inside of non-OpenACC region");
3841 const char *stmt_name
, *ctx_stmt_name
;
3842 switch (gimple_omp_target_kind (stmt
))
3844 case GF_OMP_TARGET_KIND_REGION
: stmt_name
= "target"; break;
3845 case GF_OMP_TARGET_KIND_DATA
: stmt_name
= "target data"; break;
3846 case GF_OMP_TARGET_KIND_UPDATE
: stmt_name
= "target update"; break;
3847 case GF_OMP_TARGET_KIND_ENTER_DATA
:
3848 stmt_name
= "target enter data"; break;
3849 case GF_OMP_TARGET_KIND_EXIT_DATA
:
3850 stmt_name
= "target exit data"; break;
3851 case GF_OMP_TARGET_KIND_OACC_PARALLEL
: stmt_name
= "parallel"; break;
3852 case GF_OMP_TARGET_KIND_OACC_KERNELS
: stmt_name
= "kernels"; break;
3853 case GF_OMP_TARGET_KIND_OACC_SERIAL
: stmt_name
= "serial"; break;
3854 case GF_OMP_TARGET_KIND_OACC_DATA
: stmt_name
= "data"; break;
3855 case GF_OMP_TARGET_KIND_OACC_UPDATE
: stmt_name
= "update"; break;
3856 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA
:
3857 stmt_name
= "enter data"; break;
3858 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA
:
3859 stmt_name
= "exit data"; break;
3860 case GF_OMP_TARGET_KIND_OACC_DECLARE
: stmt_name
= "declare"; break;
3861 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
: stmt_name
= "host_data";
3863 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3864 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3865 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
3866 /* OpenACC 'kernels' decomposed parts. */
3867 stmt_name
= "kernels"; break;
3868 default: gcc_unreachable ();
3870 switch (gimple_omp_target_kind (ctx
->stmt
))
3872 case GF_OMP_TARGET_KIND_REGION
: ctx_stmt_name
= "target"; break;
3873 case GF_OMP_TARGET_KIND_DATA
: ctx_stmt_name
= "target data"; break;
3874 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
3875 ctx_stmt_name
= "parallel"; break;
3876 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
3877 ctx_stmt_name
= "kernels"; break;
3878 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
3879 ctx_stmt_name
= "serial"; break;
3880 case GF_OMP_TARGET_KIND_OACC_DATA
: ctx_stmt_name
= "data"; break;
3881 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
3882 ctx_stmt_name
= "host_data"; break;
3883 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
3884 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
3885 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
3886 /* OpenACC 'kernels' decomposed parts. */
3887 ctx_stmt_name
= "kernels"; break;
3888 default: gcc_unreachable ();
3891 /* OpenACC/OpenMP mismatch? */
3892 if (is_gimple_omp_oacc (stmt
)
3893 != is_gimple_omp_oacc (ctx
->stmt
))
3895 error_at (gimple_location (stmt
),
3896 "%s %qs construct inside of %s %qs region",
3897 (is_gimple_omp_oacc (stmt
)
3898 ? "OpenACC" : "OpenMP"), stmt_name
,
3899 (is_gimple_omp_oacc (ctx
->stmt
)
3900 ? "OpenACC" : "OpenMP"), ctx_stmt_name
);
3903 if (is_gimple_omp_offloaded (ctx
->stmt
))
3905 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3906 if (is_gimple_omp_oacc (ctx
->stmt
))
3908 error_at (gimple_location (stmt
),
3909 "%qs construct inside of %qs region",
3910 stmt_name
, ctx_stmt_name
);
3915 if ((gimple_omp_target_kind (ctx
->stmt
)
3916 == GF_OMP_TARGET_KIND_REGION
)
3917 && (gimple_omp_target_kind (stmt
)
3918 == GF_OMP_TARGET_KIND_REGION
))
3920 c
= omp_find_clause (gimple_omp_target_clauses (stmt
),
3922 if (c
&& OMP_CLAUSE_DEVICE_ANCESTOR (c
))
3925 warning_at (gimple_location (stmt
), 0,
3926 "%qs construct inside of %qs region",
3927 stmt_name
, ctx_stmt_name
);
3939 /* Helper function scan_omp.
3941 Callback for walk_tree or operators in walk_gimple_stmt used to
3942 scan for OMP directives in TP. */
3945 scan_omp_1_op (tree
*tp
, int *walk_subtrees
, void *data
)
3947 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
3948 omp_context
*ctx
= (omp_context
*) wi
->info
;
3951 switch (TREE_CODE (t
))
3959 tree repl
= remap_decl (t
, &ctx
->cb
);
3960 gcc_checking_assert (TREE_CODE (repl
) != ERROR_MARK
);
3966 if (ctx
&& TYPE_P (t
))
3967 *tp
= remap_type (t
, &ctx
->cb
);
3968 else if (!DECL_P (t
))
3973 tree tem
= remap_type (TREE_TYPE (t
), &ctx
->cb
);
3974 if (tem
!= TREE_TYPE (t
))
3976 if (TREE_CODE (t
) == INTEGER_CST
)
3977 *tp
= wide_int_to_tree (tem
, wi::to_wide (t
));
3979 TREE_TYPE (t
) = tem
;
3989 /* Return true if FNDECL is a setjmp or a longjmp. */
3992 setjmp_or_longjmp_p (const_tree fndecl
)
3994 if (fndecl_built_in_p (fndecl
, BUILT_IN_SETJMP
)
3995 || fndecl_built_in_p (fndecl
, BUILT_IN_LONGJMP
))
3998 tree declname
= DECL_NAME (fndecl
);
4000 || (DECL_CONTEXT (fndecl
) != NULL_TREE
4001 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
4002 || !TREE_PUBLIC (fndecl
))
4005 const char *name
= IDENTIFIER_POINTER (declname
);
4006 return !strcmp (name
, "setjmp") || !strcmp (name
, "longjmp");
4009 /* Return true if FNDECL is an omp_* runtime API call. */
4012 omp_runtime_api_call (const_tree fndecl
)
4014 tree declname
= DECL_NAME (fndecl
);
4016 || (DECL_CONTEXT (fndecl
) != NULL_TREE
4017 && TREE_CODE (DECL_CONTEXT (fndecl
)) != TRANSLATION_UNIT_DECL
)
4018 || !TREE_PUBLIC (fndecl
))
4021 const char *name
= IDENTIFIER_POINTER (declname
);
4022 if (!startswith (name
, "omp_"))
4025 static const char *omp_runtime_apis
[] =
4027 /* This array has 3 sections. First omp_* calls that don't
4028 have any suffixes. */
4037 "target_associate_ptr",
4038 "target_disassociate_ptr",
4040 "target_is_accessible",
4041 "target_is_present",
4043 "target_memcpy_async",
4044 "target_memcpy_rect",
4045 "target_memcpy_rect_async",
4047 /* Now omp_* calls that are available as omp_* and omp_*_; however, the
4048 DECL_NAME is always omp_* without tailing underscore. */
4050 "destroy_allocator",
4052 "destroy_nest_lock",
4056 "get_affinity_format",
4058 "get_default_allocator",
4059 "get_default_device",
4062 "get_initial_device",
4064 "get_max_active_levels",
4065 "get_max_task_priority",
4074 "get_partition_num_places",
4077 "get_supported_active_levels",
4079 "get_teams_thread_limit",
4088 "is_initial_device",
4090 "pause_resource_all",
4091 "set_affinity_format",
4092 "set_default_allocator",
4100 /* And finally calls available as omp_*, omp_*_ and omp_*_8_; however,
4101 as DECL_NAME only omp_* and omp_*_8 appear. */
4103 "get_ancestor_thread_num",
4105 "get_partition_place_nums",
4106 "get_place_num_procs",
4107 "get_place_proc_ids",
4110 "set_default_device",
4112 "set_max_active_levels",
4117 "set_teams_thread_limit"
4121 for (unsigned i
= 0; i
< ARRAY_SIZE (omp_runtime_apis
); i
++)
4123 if (omp_runtime_apis
[i
] == NULL
)
4128 size_t len
= strlen (omp_runtime_apis
[i
]);
4129 if (strncmp (name
+ 4, omp_runtime_apis
[i
], len
) == 0
4130 && (name
[4 + len
] == '\0'
4131 || (mode
> 1 && strcmp (name
+ 4 + len
, "_8") == 0)))
4137 /* Helper function for scan_omp.
4139 Callback for walk_gimple_stmt used to scan for OMP directives in
4140 the current statement in GSI. */
4143 scan_omp_1_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
4144 struct walk_stmt_info
*wi
)
4146 gimple
*stmt
= gsi_stmt (*gsi
);
4147 omp_context
*ctx
= (omp_context
*) wi
->info
;
4149 if (gimple_has_location (stmt
))
4150 input_location
= gimple_location (stmt
);
4152 /* Check the nesting restrictions. */
4153 bool remove
= false;
4154 if (is_gimple_omp (stmt
))
4155 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
4156 else if (is_gimple_call (stmt
))
4158 tree fndecl
= gimple_call_fndecl (stmt
);
4162 && gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4163 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
4164 && setjmp_or_longjmp_p (fndecl
)
4168 error_at (gimple_location (stmt
),
4169 "setjmp/longjmp inside %<simd%> construct");
4171 else if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
4172 switch (DECL_FUNCTION_CODE (fndecl
))
4174 case BUILT_IN_GOMP_BARRIER
:
4175 case BUILT_IN_GOMP_CANCEL
:
4176 case BUILT_IN_GOMP_CANCELLATION_POINT
:
4177 case BUILT_IN_GOMP_TASKYIELD
:
4178 case BUILT_IN_GOMP_TASKWAIT
:
4179 case BUILT_IN_GOMP_TASKGROUP_START
:
4180 case BUILT_IN_GOMP_TASKGROUP_END
:
4181 remove
= !check_omp_nesting_restrictions (stmt
, ctx
);
4188 omp_context
*octx
= ctx
;
4189 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SCAN
&& ctx
->outer
)
4191 if (octx
->order_concurrent
&& omp_runtime_api_call (fndecl
))
4194 error_at (gimple_location (stmt
),
4195 "OpenMP runtime API call %qD in a region with "
4196 "%<order(concurrent)%> clause", fndecl
);
4198 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
4199 && omp_runtime_api_call (fndecl
)
4200 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl
))
4201 != strlen ("omp_get_num_teams"))
4202 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl
)),
4203 "omp_get_num_teams") != 0)
4204 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl
))
4205 != strlen ("omp_get_team_num"))
4206 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl
)),
4207 "omp_get_team_num") != 0))
4210 error_at (gimple_location (stmt
),
4211 "OpenMP runtime API call %qD strictly nested in a "
4212 "%<teams%> region", fndecl
);
4214 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TARGET
4215 && (gimple_omp_target_kind (ctx
->stmt
)
4216 == GF_OMP_TARGET_KIND_REGION
)
4217 && omp_runtime_api_call (fndecl
))
4219 tree tgt_clauses
= gimple_omp_target_clauses (ctx
->stmt
);
4220 tree c
= omp_find_clause (tgt_clauses
, OMP_CLAUSE_DEVICE
);
4221 if (c
&& OMP_CLAUSE_DEVICE_ANCESTOR (c
))
4222 error_at (gimple_location (stmt
),
4223 "OpenMP runtime API call %qD in a region with "
4224 "%<device(ancestor)%> clause", fndecl
);
4231 stmt
= gimple_build_nop ();
4232 gsi_replace (gsi
, stmt
, false);
4235 *handled_ops_p
= true;
4237 switch (gimple_code (stmt
))
4239 case GIMPLE_OMP_PARALLEL
:
4240 taskreg_nesting_level
++;
4241 scan_omp_parallel (gsi
, ctx
);
4242 taskreg_nesting_level
--;
4245 case GIMPLE_OMP_TASK
:
4246 taskreg_nesting_level
++;
4247 scan_omp_task (gsi
, ctx
);
4248 taskreg_nesting_level
--;
4251 case GIMPLE_OMP_FOR
:
4252 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
4253 == GF_OMP_FOR_KIND_SIMD
)
4254 && gimple_omp_for_combined_into_p (stmt
)
4255 && gimple_code (ctx
->stmt
) != GIMPLE_OMP_SCAN
)
4257 tree clauses
= gimple_omp_for_clauses (as_a
<gomp_for
*> (stmt
));
4258 tree c
= omp_find_clause (clauses
, OMP_CLAUSE_REDUCTION
);
4259 if (c
&& OMP_CLAUSE_REDUCTION_INSCAN (c
) && !seen_error ())
4261 scan_omp_simd_scan (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
4265 if ((gimple_omp_for_kind (as_a
<gomp_for
*> (stmt
))
4266 == GF_OMP_FOR_KIND_SIMD
)
4267 && omp_maybe_offloaded_ctx (ctx
)
4268 && omp_max_simt_vf ()
4269 && gimple_omp_for_collapse (stmt
) == 1)
4270 scan_omp_simd (gsi
, as_a
<gomp_for
*> (stmt
), ctx
);
4272 scan_omp_for (as_a
<gomp_for
*> (stmt
), ctx
);
4275 case GIMPLE_OMP_SCOPE
:
4276 ctx
= new_omp_context (stmt
, ctx
);
4277 scan_sharing_clauses (gimple_omp_scope_clauses (stmt
), ctx
);
4278 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4281 case GIMPLE_OMP_SECTIONS
:
4282 scan_omp_sections (as_a
<gomp_sections
*> (stmt
), ctx
);
4285 case GIMPLE_OMP_SINGLE
:
4286 scan_omp_single (as_a
<gomp_single
*> (stmt
), ctx
);
4289 case GIMPLE_OMP_SCAN
:
4290 if (tree clauses
= gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)))
4292 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_INCLUSIVE
)
4293 ctx
->scan_inclusive
= true;
4294 else if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_EXCLUSIVE
)
4295 ctx
->scan_exclusive
= true;
4298 case GIMPLE_OMP_SECTION
:
4299 case GIMPLE_OMP_MASTER
:
4300 case GIMPLE_OMP_ORDERED
:
4301 case GIMPLE_OMP_CRITICAL
:
4302 ctx
= new_omp_context (stmt
, ctx
);
4303 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4306 case GIMPLE_OMP_MASKED
:
4307 ctx
= new_omp_context (stmt
, ctx
);
4308 scan_sharing_clauses (gimple_omp_masked_clauses (stmt
), ctx
);
4309 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4312 case GIMPLE_OMP_TASKGROUP
:
4313 ctx
= new_omp_context (stmt
, ctx
);
4314 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt
), ctx
);
4315 scan_omp (gimple_omp_body_ptr (stmt
), ctx
);
4318 case GIMPLE_OMP_TARGET
:
4319 if (is_gimple_omp_offloaded (stmt
))
4321 taskreg_nesting_level
++;
4322 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
4323 taskreg_nesting_level
--;
4326 scan_omp_target (as_a
<gomp_target
*> (stmt
), ctx
);
4329 case GIMPLE_OMP_TEAMS
:
4330 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
4332 taskreg_nesting_level
++;
4333 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
4334 taskreg_nesting_level
--;
4337 scan_omp_teams (as_a
<gomp_teams
*> (stmt
), ctx
);
4344 *handled_ops_p
= false;
4346 for (var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
4348 var
= DECL_CHAIN (var
))
4349 insert_decl_map (&ctx
->cb
, var
, var
);
4353 *handled_ops_p
= false;
4361 /* Scan all the statements starting at the current statement. CTX
4362 contains context information about the OMP directives and
4363 clauses found during the scan. */
4366 scan_omp (gimple_seq
*body_p
, omp_context
*ctx
)
4368 location_t saved_location
;
4369 struct walk_stmt_info wi
;
4371 memset (&wi
, 0, sizeof (wi
));
4373 wi
.want_locations
= true;
4375 saved_location
= input_location
;
4376 walk_gimple_seq_mod (body_p
, scan_omp_1_stmt
, scan_omp_1_op
, &wi
);
4377 input_location
= saved_location
;
4380 /* Re-gimplification and code generation routines. */
4382 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4383 of BIND if in a method. */
4386 maybe_remove_omp_member_access_dummy_vars (gbind
*bind
)
4388 if (DECL_ARGUMENTS (current_function_decl
)
4389 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
4390 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
4393 tree vars
= gimple_bind_vars (bind
);
4394 for (tree
*pvar
= &vars
; *pvar
; )
4395 if (omp_member_access_dummy_var (*pvar
))
4396 *pvar
= DECL_CHAIN (*pvar
);
4398 pvar
= &DECL_CHAIN (*pvar
);
4399 gimple_bind_set_vars (bind
, vars
);
4403 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4404 block and its subblocks. */
4407 remove_member_access_dummy_vars (tree block
)
4409 for (tree
*pvar
= &BLOCK_VARS (block
); *pvar
; )
4410 if (omp_member_access_dummy_var (*pvar
))
4411 *pvar
= DECL_CHAIN (*pvar
);
4413 pvar
= &DECL_CHAIN (*pvar
);
4415 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
4416 remove_member_access_dummy_vars (block
);
4419 /* If a context was created for STMT when it was scanned, return it. */
4421 static omp_context
*
4422 maybe_lookup_ctx (gimple
*stmt
)
4425 n
= splay_tree_lookup (all_contexts
, (splay_tree_key
) stmt
);
4426 return n
? (omp_context
*) n
->value
: NULL
;
4430 /* Find the mapping for DECL in CTX or the immediately enclosing
4431 context that has a mapping for DECL.
4433 If CTX is a nested parallel directive, we may have to use the decl
4434 mappings created in CTX's parent context. Suppose that we have the
4435 following parallel nesting (variable UIDs showed for clarity):
4438 #omp parallel shared(iD.1562) -> outer parallel
4439 iD.1562 = iD.1562 + 1;
4441 #omp parallel shared (iD.1562) -> inner parallel
4442 iD.1562 = iD.1562 - 1;
4444 Each parallel structure will create a distinct .omp_data_s structure
4445 for copying iD.1562 in/out of the directive:
4447 outer parallel .omp_data_s.1.i -> iD.1562
4448 inner parallel .omp_data_s.2.i -> iD.1562
4450 A shared variable mapping will produce a copy-out operation before
4451 the parallel directive and a copy-in operation after it. So, in
4452 this case we would have:
4455 .omp_data_o.1.i = iD.1562;
4456 #omp parallel shared(iD.1562) -> outer parallel
4457 .omp_data_i.1 = &.omp_data_o.1
4458 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4460 .omp_data_o.2.i = iD.1562; -> **
4461 #omp parallel shared(iD.1562) -> inner parallel
4462 .omp_data_i.2 = &.omp_data_o.2
4463 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4466 ** This is a problem. The symbol iD.1562 cannot be referenced
4467 inside the body of the outer parallel region. But since we are
4468 emitting this copy operation while expanding the inner parallel
4469 directive, we need to access the CTX structure of the outer
4470 parallel directive to get the correct mapping:
4472 .omp_data_o.2.i = .omp_data_i.1->i
4474 Since there may be other workshare or parallel directives enclosing
4475 the parallel directive, it may be necessary to walk up the context
4476 parent chain. This is not a problem in general because nested
4477 parallelism happens only rarely. */
4480 lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
4485 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
4486 t
= maybe_lookup_decl (decl
, up
);
4488 gcc_assert (!ctx
->is_nested
|| t
|| is_global_var (decl
));
4490 return t
? t
: decl
;
4494 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4495 in outer contexts. */
4498 maybe_lookup_decl_in_outer_ctx (tree decl
, omp_context
*ctx
)
4503 for (up
= ctx
->outer
, t
= NULL
; up
&& t
== NULL
; up
= up
->outer
)
4504 t
= maybe_lookup_decl (decl
, up
);
4506 return t
? t
: decl
;
4510 /* Construct the initialization value for reduction operation OP. */
4513 omp_reduction_init_op (location_t loc
, enum tree_code op
, tree type
)
4522 case TRUTH_ORIF_EXPR
:
4523 case TRUTH_XOR_EXPR
:
4525 return build_zero_cst (type
);
4528 case TRUTH_AND_EXPR
:
4529 case TRUTH_ANDIF_EXPR
:
4531 return fold_convert_loc (loc
, type
, integer_one_node
);
4534 return fold_convert_loc (loc
, type
, integer_minus_one_node
);
4537 if (SCALAR_FLOAT_TYPE_P (type
))
4539 REAL_VALUE_TYPE min
;
4540 if (HONOR_INFINITIES (type
))
4541 real_arithmetic (&min
, NEGATE_EXPR
, &dconstinf
, NULL
);
4543 real_maxval (&min
, 1, TYPE_MODE (type
));
4544 return build_real (type
, min
);
4546 else if (POINTER_TYPE_P (type
))
4549 = wi::min_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4550 return wide_int_to_tree (type
, min
);
4554 gcc_assert (INTEGRAL_TYPE_P (type
));
4555 return TYPE_MIN_VALUE (type
);
4559 if (SCALAR_FLOAT_TYPE_P (type
))
4561 REAL_VALUE_TYPE max
;
4562 if (HONOR_INFINITIES (type
))
4565 real_maxval (&max
, 0, TYPE_MODE (type
));
4566 return build_real (type
, max
);
4568 else if (POINTER_TYPE_P (type
))
4571 = wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
4572 return wide_int_to_tree (type
, max
);
4576 gcc_assert (INTEGRAL_TYPE_P (type
));
4577 return TYPE_MAX_VALUE (type
);
4585 /* Construct the initialization value for reduction CLAUSE. */
4588 omp_reduction_init (tree clause
, tree type
)
4590 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause
),
4591 OMP_CLAUSE_REDUCTION_CODE (clause
), type
);
4594 /* Return alignment to be assumed for var in CLAUSE, which should be
4595 OMP_CLAUSE_ALIGNED. */
4598 omp_clause_aligned_alignment (tree clause
)
4600 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
4601 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
);
4603 /* Otherwise return implementation defined alignment. */
4604 unsigned int al
= 1;
4605 opt_scalar_mode mode_iter
;
4606 auto_vector_modes modes
;
4607 targetm
.vectorize
.autovectorize_vector_modes (&modes
, true);
4608 static enum mode_class classes
[]
4609 = { MODE_INT
, MODE_VECTOR_INT
, MODE_FLOAT
, MODE_VECTOR_FLOAT
};
4610 for (int i
= 0; i
< 4; i
+= 2)
4611 /* The for loop above dictates that we only walk through scalar classes. */
4612 FOR_EACH_MODE_IN_CLASS (mode_iter
, classes
[i
])
4614 scalar_mode mode
= mode_iter
.require ();
4615 machine_mode vmode
= targetm
.vectorize
.preferred_simd_mode (mode
);
4616 if (GET_MODE_CLASS (vmode
) != classes
[i
+ 1])
4618 machine_mode alt_vmode
;
4619 for (unsigned int j
= 0; j
< modes
.length (); ++j
)
4620 if (related_vector_mode (modes
[j
], mode
).exists (&alt_vmode
)
4621 && known_ge (GET_MODE_SIZE (alt_vmode
), GET_MODE_SIZE (vmode
)))
4624 tree type
= lang_hooks
.types
.type_for_mode (mode
, 1);
4625 if (type
== NULL_TREE
|| TYPE_MODE (type
) != mode
)
4627 type
= build_vector_type_for_mode (type
, vmode
);
4628 if (TYPE_MODE (type
) != vmode
)
4630 if (TYPE_ALIGN_UNIT (type
) > al
)
4631 al
= TYPE_ALIGN_UNIT (type
);
4633 return build_int_cst (integer_type_node
, al
);
4637 /* This structure is part of the interface between lower_rec_simd_input_clauses
4638 and lower_rec_input_clauses. */
4640 class omplow_simd_context
{
4642 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4646 vec
<tree
, va_heap
> simt_eargs
;
4647 gimple_seq simt_dlist
;
4648 poly_uint64_pod max_vf
;
4652 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4656 lower_rec_simd_input_clauses (tree new_var
, omp_context
*ctx
,
4657 omplow_simd_context
*sctx
, tree
&ivar
,
4658 tree
&lvar
, tree
*rvar
= NULL
,
4661 if (known_eq (sctx
->max_vf
, 0U))
4663 sctx
->max_vf
= sctx
->is_simt
? omp_max_simt_vf () : omp_max_vf ();
4664 if (maybe_gt (sctx
->max_vf
, 1U))
4666 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
4667 OMP_CLAUSE_SAFELEN
);
4670 poly_uint64 safe_len
;
4671 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
4672 || maybe_lt (safe_len
, 1U))
4675 sctx
->max_vf
= lower_bound (sctx
->max_vf
, safe_len
);
4678 if (sctx
->is_simt
&& !known_eq (sctx
->max_vf
, 1U))
4680 for (tree c
= gimple_omp_for_clauses (ctx
->stmt
); c
;
4681 c
= OMP_CLAUSE_CHAIN (c
))
4683 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
4686 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
4688 /* UDR reductions are not supported yet for SIMT, disable
4694 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c
))
4695 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var
)))
4697 /* Doing boolean operations on non-integral types is
4698 for conformance only, it's not worth supporting this
4705 if (maybe_gt (sctx
->max_vf
, 1U))
4707 sctx
->idx
= create_tmp_var (unsigned_type_node
);
4708 sctx
->lane
= create_tmp_var (unsigned_type_node
);
4711 if (known_eq (sctx
->max_vf
, 1U))
4716 if (is_gimple_reg (new_var
))
4718 ivar
= lvar
= new_var
;
4721 tree type
= TREE_TYPE (new_var
), ptype
= build_pointer_type (type
);
4722 ivar
= lvar
= create_tmp_var (type
);
4723 TREE_ADDRESSABLE (ivar
) = 1;
4724 DECL_ATTRIBUTES (ivar
) = tree_cons (get_identifier ("omp simt private"),
4725 NULL
, DECL_ATTRIBUTES (ivar
));
4726 sctx
->simt_eargs
.safe_push (build1 (ADDR_EXPR
, ptype
, ivar
));
4727 tree clobber
= build_clobber (type
);
4728 gimple
*g
= gimple_build_assign (ivar
, clobber
);
4729 gimple_seq_add_stmt (&sctx
->simt_dlist
, g
);
4733 tree atype
= build_array_type_nelts (TREE_TYPE (new_var
), sctx
->max_vf
);
4734 tree avar
= create_tmp_var_raw (atype
);
4735 if (TREE_ADDRESSABLE (new_var
))
4736 TREE_ADDRESSABLE (avar
) = 1;
4737 DECL_ATTRIBUTES (avar
)
4738 = tree_cons (get_identifier ("omp simd array"), NULL
,
4739 DECL_ATTRIBUTES (avar
));
4740 gimple_add_tmp_var (avar
);
4742 if (rvar
&& !ctx
->for_simd_scan_phase
)
4744 /* For inscan reductions, create another array temporary,
4745 which will hold the reduced value. */
4746 iavar
= create_tmp_var_raw (atype
);
4747 if (TREE_ADDRESSABLE (new_var
))
4748 TREE_ADDRESSABLE (iavar
) = 1;
4749 DECL_ATTRIBUTES (iavar
)
4750 = tree_cons (get_identifier ("omp simd array"), NULL
,
4751 tree_cons (get_identifier ("omp simd inscan"), NULL
,
4752 DECL_ATTRIBUTES (iavar
)));
4753 gimple_add_tmp_var (iavar
);
4754 ctx
->cb
.decl_map
->put (avar
, iavar
);
4755 if (sctx
->lastlane
== NULL_TREE
)
4756 sctx
->lastlane
= create_tmp_var (unsigned_type_node
);
4757 *rvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
,
4758 sctx
->lastlane
, NULL_TREE
, NULL_TREE
);
4759 TREE_THIS_NOTRAP (*rvar
) = 1;
4761 if (ctx
->scan_exclusive
)
4763 /* And for exclusive scan yet another one, which will
4764 hold the value during the scan phase. */
4765 tree savar
= create_tmp_var_raw (atype
);
4766 if (TREE_ADDRESSABLE (new_var
))
4767 TREE_ADDRESSABLE (savar
) = 1;
4768 DECL_ATTRIBUTES (savar
)
4769 = tree_cons (get_identifier ("omp simd array"), NULL
,
4770 tree_cons (get_identifier ("omp simd inscan "
4772 DECL_ATTRIBUTES (savar
)));
4773 gimple_add_tmp_var (savar
);
4774 ctx
->cb
.decl_map
->put (iavar
, savar
);
4775 *rvar2
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), savar
,
4776 sctx
->idx
, NULL_TREE
, NULL_TREE
);
4777 TREE_THIS_NOTRAP (*rvar2
) = 1;
4780 ivar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), iavar
, sctx
->idx
,
4781 NULL_TREE
, NULL_TREE
);
4782 lvar
= build4 (ARRAY_REF
, TREE_TYPE (new_var
), avar
, sctx
->lane
,
4783 NULL_TREE
, NULL_TREE
);
4784 TREE_THIS_NOTRAP (ivar
) = 1;
4785 TREE_THIS_NOTRAP (lvar
) = 1;
4787 if (DECL_P (new_var
))
4789 SET_DECL_VALUE_EXPR (new_var
, lvar
);
4790 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4795 /* Helper function of lower_rec_input_clauses. For a reference
4796 in simd reduction, add an underlying variable it will reference. */
4799 handle_simd_reference (location_t loc
, tree new_vard
, gimple_seq
*ilist
)
4801 tree z
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard
)));
4802 if (TREE_CONSTANT (z
))
4804 z
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard
)),
4805 get_name (new_vard
));
4806 gimple_add_tmp_var (z
);
4807 TREE_ADDRESSABLE (z
) = 1;
4808 z
= build_fold_addr_expr_loc (loc
, z
);
4809 gimplify_assign (new_vard
, z
, ilist
);
4813 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4814 code to emit (type) (tskred_temp[idx]). */
4817 task_reduction_read (gimple_seq
*ilist
, tree tskred_temp
, tree type
,
4820 unsigned HOST_WIDE_INT sz
4821 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node
));
4822 tree r
= build2 (MEM_REF
, pointer_sized_int_node
,
4823 tskred_temp
, build_int_cst (TREE_TYPE (tskred_temp
),
4825 tree v
= create_tmp_var (pointer_sized_int_node
);
4826 gimple
*g
= gimple_build_assign (v
, r
);
4827 gimple_seq_add_stmt (ilist
, g
);
4828 if (!useless_type_conversion_p (type
, pointer_sized_int_node
))
4830 v
= create_tmp_var (type
);
4831 g
= gimple_build_assign (v
, NOP_EXPR
, gimple_assign_lhs (g
));
4832 gimple_seq_add_stmt (ilist
, g
);
4837 /* Lower early initialization of privatized variable NEW_VAR
4838 if it needs an allocator (has allocate clause). */
4841 lower_private_allocate (tree var
, tree new_var
, tree
&allocator
,
4842 tree
&allocate_ptr
, gimple_seq
*ilist
,
4843 omp_context
*ctx
, bool is_ref
, tree size
)
4847 gcc_assert (allocate_ptr
== NULL_TREE
);
4848 if (ctx
->allocate_map
4849 && (DECL_P (new_var
) || (TYPE_P (new_var
) && size
)))
4850 if (tree
*allocatorp
= ctx
->allocate_map
->get (var
))
4851 allocator
= *allocatorp
;
4852 if (allocator
== NULL_TREE
)
4854 if (!is_ref
&& omp_privatize_by_reference (var
))
4856 allocator
= NULL_TREE
;
4860 unsigned HOST_WIDE_INT ialign
= 0;
4861 if (TREE_CODE (allocator
) == TREE_LIST
)
4863 ialign
= tree_to_uhwi (TREE_VALUE (allocator
));
4864 allocator
= TREE_PURPOSE (allocator
);
4866 if (TREE_CODE (allocator
) != INTEGER_CST
)
4867 allocator
= build_outer_var_ref (allocator
, ctx
, OMP_CLAUSE_ALLOCATE
);
4868 allocator
= fold_convert (pointer_sized_int_node
, allocator
);
4869 if (TREE_CODE (allocator
) != INTEGER_CST
)
4871 tree var
= create_tmp_var (TREE_TYPE (allocator
));
4872 gimplify_assign (var
, allocator
, ilist
);
4876 tree ptr_type
, align
, sz
= size
;
4877 if (TYPE_P (new_var
))
4879 ptr_type
= build_pointer_type (new_var
);
4880 ialign
= MAX (ialign
, TYPE_ALIGN_UNIT (new_var
));
4884 ptr_type
= build_pointer_type (TREE_TYPE (TREE_TYPE (new_var
)));
4885 ialign
= MAX (ialign
, TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type
)));
4889 ptr_type
= build_pointer_type (TREE_TYPE (new_var
));
4890 ialign
= MAX (ialign
, DECL_ALIGN_UNIT (new_var
));
4891 if (sz
== NULL_TREE
)
4892 sz
= fold_convert (size_type_node
, DECL_SIZE_UNIT (new_var
));
4894 align
= build_int_cst (size_type_node
, ialign
);
4895 if (TREE_CODE (sz
) != INTEGER_CST
)
4897 tree szvar
= create_tmp_var (size_type_node
);
4898 gimplify_assign (szvar
, sz
, ilist
);
4901 allocate_ptr
= create_tmp_var (ptr_type
);
4902 tree a
= builtin_decl_explicit (BUILT_IN_GOMP_ALLOC
);
4903 gimple
*g
= gimple_build_call (a
, 3, align
, sz
, allocator
);
4904 gimple_call_set_lhs (g
, allocate_ptr
);
4905 gimple_seq_add_stmt (ilist
, g
);
4908 tree x
= build_simple_mem_ref (allocate_ptr
);
4909 TREE_THIS_NOTRAP (x
) = 1;
4910 SET_DECL_VALUE_EXPR (new_var
, x
);
4911 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
4916 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4917 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4918 private variables. Initialization statements go in ILIST, while calls
4919 to destructors go in DLIST. */
4922 lower_rec_input_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*dlist
,
4923 omp_context
*ctx
, struct omp_for_data
*fd
)
4925 tree c
, copyin_seq
, x
, ptr
;
4926 bool copyin_by_ref
= false;
4927 bool lastprivate_firstprivate
= false;
4928 bool reduction_omp_orig_ref
= false;
4930 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
4931 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
4932 omplow_simd_context sctx
= omplow_simd_context ();
4933 tree simt_lane
= NULL_TREE
, simtrec
= NULL_TREE
;
4934 tree ivar
= NULL_TREE
, lvar
= NULL_TREE
, uid
= NULL_TREE
;
4935 gimple_seq llist
[4] = { };
4936 tree nonconst_simd_if
= NULL_TREE
;
4939 sctx
.is_simt
= is_simd
&& omp_find_clause (clauses
, OMP_CLAUSE__SIMT_
);
4941 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4942 with data sharing clauses referencing variable sized vars. That
4943 is unnecessarily hard to support and very unlikely to result in
4944 vectorized code anyway. */
4946 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
4947 switch (OMP_CLAUSE_CODE (c
))
4949 case OMP_CLAUSE_LINEAR
:
4950 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
4953 case OMP_CLAUSE_PRIVATE
:
4954 case OMP_CLAUSE_FIRSTPRIVATE
:
4955 case OMP_CLAUSE_LASTPRIVATE
:
4956 if (is_variable_sized (OMP_CLAUSE_DECL (c
)))
4958 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c
)))
4960 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4961 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4965 case OMP_CLAUSE_REDUCTION
:
4966 case OMP_CLAUSE_IN_REDUCTION
:
4967 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
4968 || is_variable_sized (OMP_CLAUSE_DECL (c
)))
4970 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c
)))
4972 tree rtype
= TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c
)));
4973 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype
)))
4978 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c
)))
4980 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c
)) != INTEGER_CST
)
4981 nonconst_simd_if
= OMP_CLAUSE_IF_EXPR (c
);
4983 case OMP_CLAUSE_SIMDLEN
:
4984 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c
)))
4987 case OMP_CLAUSE__CONDTEMP_
:
4988 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4996 /* Add a placeholder for simduid. */
4997 if (sctx
.is_simt
&& maybe_ne (sctx
.max_vf
, 1U))
4998 sctx
.simt_eargs
.safe_push (NULL_TREE
);
5000 unsigned task_reduction_cnt
= 0;
5001 unsigned task_reduction_cntorig
= 0;
5002 unsigned task_reduction_cnt_full
= 0;
5003 unsigned task_reduction_cntorig_full
= 0;
5004 unsigned task_reduction_other_cnt
= 0;
5005 tree tskred_atype
= NULL_TREE
, tskred_avar
= NULL_TREE
;
5006 tree tskred_base
= NULL_TREE
, tskred_temp
= NULL_TREE
;
5007 /* Do all the fixed sized types in the first pass, and the variable sized
5008 types in the second pass. This makes sure that the scalar arguments to
5009 the variable sized types are processed before we use them in the
5010 variable sized operations. For task reductions we use 4 passes, in the
5011 first two we ignore them, in the third one gather arguments for
5012 GOMP_task_reduction_remap call and in the last pass actually handle
5013 the task reductions. */
5014 for (pass
= 0; pass
< ((task_reduction_cnt
|| task_reduction_other_cnt
)
5017 if (pass
== 2 && task_reduction_cnt
)
5020 = build_array_type_nelts (ptr_type_node
, task_reduction_cnt
5021 + task_reduction_cntorig
);
5022 tskred_avar
= create_tmp_var_raw (tskred_atype
);
5023 gimple_add_tmp_var (tskred_avar
);
5024 TREE_ADDRESSABLE (tskred_avar
) = 1;
5025 task_reduction_cnt_full
= task_reduction_cnt
;
5026 task_reduction_cntorig_full
= task_reduction_cntorig
;
5028 else if (pass
== 3 && task_reduction_cnt
)
5030 x
= builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP
);
5032 = gimple_build_call (x
, 3, size_int (task_reduction_cnt
),
5033 size_int (task_reduction_cntorig
),
5034 build_fold_addr_expr (tskred_avar
));
5035 gimple_seq_add_stmt (ilist
, g
);
5037 if (pass
== 3 && task_reduction_other_cnt
)
5039 /* For reduction clauses, build
5040 tskred_base = (void *) tskred_temp[2]
5041 + omp_get_thread_num () * tskred_temp[1]
5042 or if tskred_temp[1] is known to be constant, that constant
5043 directly. This is the start of the private reduction copy block
5044 for the current thread. */
5045 tree v
= create_tmp_var (integer_type_node
);
5046 x
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
5047 gimple
*g
= gimple_build_call (x
, 0);
5048 gimple_call_set_lhs (g
, v
);
5049 gimple_seq_add_stmt (ilist
, g
);
5050 c
= omp_find_clause (clauses
, OMP_CLAUSE__REDUCTEMP_
);
5051 tskred_temp
= OMP_CLAUSE_DECL (c
);
5052 if (is_taskreg_ctx (ctx
))
5053 tskred_temp
= lookup_decl (tskred_temp
, ctx
);
5054 tree v2
= create_tmp_var (sizetype
);
5055 g
= gimple_build_assign (v2
, NOP_EXPR
, v
);
5056 gimple_seq_add_stmt (ilist
, g
);
5057 if (ctx
->task_reductions
[0])
5058 v
= fold_convert (sizetype
, ctx
->task_reductions
[0]);
5060 v
= task_reduction_read (ilist
, tskred_temp
, sizetype
, 1);
5061 tree v3
= create_tmp_var (sizetype
);
5062 g
= gimple_build_assign (v3
, MULT_EXPR
, v2
, v
);
5063 gimple_seq_add_stmt (ilist
, g
);
5064 v
= task_reduction_read (ilist
, tskred_temp
, ptr_type_node
, 2);
5065 tskred_base
= create_tmp_var (ptr_type_node
);
5066 g
= gimple_build_assign (tskred_base
, POINTER_PLUS_EXPR
, v
, v3
);
5067 gimple_seq_add_stmt (ilist
, g
);
5069 task_reduction_cnt
= 0;
5070 task_reduction_cntorig
= 0;
5071 task_reduction_other_cnt
= 0;
5072 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
5074 enum omp_clause_code c_kind
= OMP_CLAUSE_CODE (c
);
5077 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
5078 bool task_reduction_p
= false;
5079 bool task_reduction_needs_orig_p
= false;
5080 tree cond
= NULL_TREE
;
5081 tree allocator
, allocate_ptr
;
5085 case OMP_CLAUSE_PRIVATE
:
5086 if (OMP_CLAUSE_PRIVATE_DEBUG (c
))
5089 case OMP_CLAUSE_SHARED
:
5090 /* Ignore shared directives in teams construct inside
5091 of target construct. */
5092 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
5093 && !is_host_teams_ctx (ctx
))
5095 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c
), ctx
) == NULL
)
5097 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
)
5098 || is_global_var (OMP_CLAUSE_DECL (c
)));
5101 case OMP_CLAUSE_FIRSTPRIVATE
:
5102 case OMP_CLAUSE_COPYIN
:
5104 case OMP_CLAUSE_LINEAR
:
5105 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
5106 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
5107 lastprivate_firstprivate
= true;
5109 case OMP_CLAUSE_REDUCTION
:
5110 case OMP_CLAUSE_IN_REDUCTION
:
5111 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
5112 || is_task_ctx (ctx
)
5113 || OMP_CLAUSE_REDUCTION_TASK (c
))
5115 task_reduction_p
= true;
5116 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
5118 task_reduction_other_cnt
++;
5123 task_reduction_cnt
++;
5124 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5126 var
= OMP_CLAUSE_DECL (c
);
5127 /* If var is a global variable that isn't privatized
5128 in outer contexts, we don't need to look up the
5129 original address, it is always the address of the
5130 global variable itself. */
5132 || omp_privatize_by_reference (var
)
5134 (maybe_lookup_decl_in_outer_ctx (var
, ctx
)))
5136 task_reduction_needs_orig_p
= true;
5137 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5138 task_reduction_cntorig
++;
5142 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5143 reduction_omp_orig_ref
= true;
5145 case OMP_CLAUSE__REDUCTEMP_
:
5146 if (!is_taskreg_ctx (ctx
))
5149 case OMP_CLAUSE__LOOPTEMP_
:
5150 /* Handle _looptemp_/_reductemp_ clauses only on
5155 case OMP_CLAUSE_LASTPRIVATE
:
5156 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5158 lastprivate_firstprivate
= true;
5159 if (pass
!= 0 || is_taskloop_ctx (ctx
))
5162 /* Even without corresponding firstprivate, if
5163 decl is Fortran allocatable, it needs outer var
5166 && lang_hooks
.decls
.omp_private_outer_ref
5167 (OMP_CLAUSE_DECL (c
)))
5168 lastprivate_firstprivate
= true;
5170 case OMP_CLAUSE_ALIGNED
:
5173 var
= OMP_CLAUSE_DECL (c
);
5174 if (TREE_CODE (TREE_TYPE (var
)) == POINTER_TYPE
5175 && !is_global_var (var
))
5177 new_var
= maybe_lookup_decl (var
, ctx
);
5178 if (new_var
== NULL_TREE
)
5179 new_var
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5180 x
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
5181 tree alarg
= omp_clause_aligned_alignment (c
);
5182 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
5183 x
= build_call_expr_loc (clause_loc
, x
, 2, new_var
, alarg
);
5184 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5185 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
5186 gimplify_and_add (x
, ilist
);
5188 else if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
5189 && is_global_var (var
))
5191 tree ptype
= build_pointer_type (TREE_TYPE (var
)), t
, t2
;
5192 new_var
= lookup_decl (var
, ctx
);
5193 t
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5194 t
= build_fold_addr_expr_loc (clause_loc
, t
);
5195 t2
= builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED
);
5196 tree alarg
= omp_clause_aligned_alignment (c
);
5197 alarg
= fold_convert_loc (clause_loc
, size_type_node
, alarg
);
5198 t
= build_call_expr_loc (clause_loc
, t2
, 2, t
, alarg
);
5199 t
= fold_convert_loc (clause_loc
, ptype
, t
);
5200 x
= create_tmp_var (ptype
);
5201 t
= build2 (MODIFY_EXPR
, ptype
, x
, t
);
5202 gimplify_and_add (t
, ilist
);
5203 t
= build_simple_mem_ref_loc (clause_loc
, x
);
5204 SET_DECL_VALUE_EXPR (new_var
, t
);
5205 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5208 case OMP_CLAUSE__CONDTEMP_
:
5209 if (is_parallel_ctx (ctx
)
5210 || (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
)))
5217 if (task_reduction_p
!= (pass
>= 2))
5220 allocator
= NULL_TREE
;
5221 allocate_ptr
= NULL_TREE
;
5222 new_var
= var
= OMP_CLAUSE_DECL (c
);
5223 if ((c_kind
== OMP_CLAUSE_REDUCTION
5224 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5225 && TREE_CODE (var
) == MEM_REF
)
5227 var
= TREE_OPERAND (var
, 0);
5228 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
5229 var
= TREE_OPERAND (var
, 0);
5230 if (TREE_CODE (var
) == INDIRECT_REF
5231 || TREE_CODE (var
) == ADDR_EXPR
)
5232 var
= TREE_OPERAND (var
, 0);
5233 if (is_variable_sized (var
))
5235 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
5236 var
= DECL_VALUE_EXPR (var
);
5237 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
5238 var
= TREE_OPERAND (var
, 0);
5239 gcc_assert (DECL_P (var
));
5243 if (c_kind
== OMP_CLAUSE_IN_REDUCTION
&& is_omp_target (ctx
->stmt
))
5245 splay_tree_key key
= (splay_tree_key
) &DECL_CONTEXT (var
);
5246 new_var
= (tree
) splay_tree_lookup (ctx
->field_map
, key
)->value
;
5248 else if (c_kind
!= OMP_CLAUSE_COPYIN
)
5249 new_var
= lookup_decl (var
, ctx
);
5251 if (c_kind
== OMP_CLAUSE_SHARED
|| c_kind
== OMP_CLAUSE_COPYIN
)
5256 /* C/C++ array section reductions. */
5257 else if ((c_kind
== OMP_CLAUSE_REDUCTION
5258 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5259 && var
!= OMP_CLAUSE_DECL (c
))
5264 tree bias
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
5265 tree orig_var
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0);
5267 if (TREE_CODE (orig_var
) == POINTER_PLUS_EXPR
)
5269 tree b
= TREE_OPERAND (orig_var
, 1);
5270 if (is_omp_target (ctx
->stmt
))
5273 b
= maybe_lookup_decl (b
, ctx
);
5276 b
= TREE_OPERAND (orig_var
, 1);
5277 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
5279 if (integer_zerop (bias
))
5283 bias
= fold_convert_loc (clause_loc
,
5284 TREE_TYPE (b
), bias
);
5285 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5286 TREE_TYPE (b
), b
, bias
);
5288 orig_var
= TREE_OPERAND (orig_var
, 0);
5292 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5293 if (is_global_var (out
)
5294 && TREE_CODE (TREE_TYPE (out
)) != POINTER_TYPE
5295 && (TREE_CODE (TREE_TYPE (out
)) != REFERENCE_TYPE
5296 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out
)))
5299 else if (is_omp_target (ctx
->stmt
))
5303 bool by_ref
= use_pointer_for_field (var
, NULL
);
5304 x
= build_receiver_ref (var
, by_ref
, ctx
);
5305 if (TREE_CODE (TREE_TYPE (var
)) == REFERENCE_TYPE
5306 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var
)))
5308 x
= build_fold_addr_expr (x
);
5310 if (TREE_CODE (orig_var
) == INDIRECT_REF
)
5311 x
= build_simple_mem_ref (x
);
5312 else if (TREE_CODE (orig_var
) == ADDR_EXPR
)
5314 if (var
== TREE_OPERAND (orig_var
, 0))
5315 x
= build_fold_addr_expr (x
);
5317 bias
= fold_convert (sizetype
, bias
);
5318 x
= fold_convert (ptr_type_node
, x
);
5319 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
5320 TREE_TYPE (x
), x
, bias
);
5321 unsigned cnt
= task_reduction_cnt
- 1;
5322 if (!task_reduction_needs_orig_p
)
5323 cnt
+= (task_reduction_cntorig_full
5324 - task_reduction_cntorig
);
5326 cnt
= task_reduction_cntorig
- 1;
5327 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5328 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5329 gimplify_assign (r
, x
, ilist
);
5333 if (TREE_CODE (orig_var
) == INDIRECT_REF
5334 || TREE_CODE (orig_var
) == ADDR_EXPR
)
5335 orig_var
= TREE_OPERAND (orig_var
, 0);
5336 tree d
= OMP_CLAUSE_DECL (c
);
5337 tree type
= TREE_TYPE (d
);
5338 gcc_assert (TREE_CODE (type
) == ARRAY_TYPE
);
5339 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
5341 const char *name
= get_name (orig_var
);
5342 if (pass
!= 3 && !TREE_CONSTANT (v
))
5345 if (is_omp_target (ctx
->stmt
))
5348 t
= maybe_lookup_decl (v
, ctx
);
5352 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
5353 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
, fb_rvalue
);
5354 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5356 build_int_cst (TREE_TYPE (v
), 1));
5357 sz
= fold_build2_loc (clause_loc
, MULT_EXPR
,
5359 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5363 tree xv
= create_tmp_var (ptr_type_node
);
5364 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5366 unsigned cnt
= task_reduction_cnt
- 1;
5367 if (!task_reduction_needs_orig_p
)
5368 cnt
+= (task_reduction_cntorig_full
5369 - task_reduction_cntorig
);
5371 cnt
= task_reduction_cntorig
- 1;
5372 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5373 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5375 gimple
*g
= gimple_build_assign (xv
, x
);
5376 gimple_seq_add_stmt (ilist
, g
);
5380 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5382 if (ctx
->task_reductions
[1 + idx
])
5383 off
= fold_convert (sizetype
,
5384 ctx
->task_reductions
[1 + idx
]);
5386 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
5388 gimple
*g
= gimple_build_assign (xv
, POINTER_PLUS_EXPR
,
5390 gimple_seq_add_stmt (ilist
, g
);
5392 x
= fold_convert (build_pointer_type (boolean_type_node
),
5394 if (TREE_CONSTANT (v
))
5395 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
,
5396 TYPE_SIZE_UNIT (type
));
5400 if (is_omp_target (ctx
->stmt
))
5403 t
= maybe_lookup_decl (v
, ctx
);
5407 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
5408 gimplify_expr (&v
, ilist
, NULL
, is_gimple_val
,
5410 t
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
5412 build_int_cst (TREE_TYPE (v
), 1));
5413 t
= fold_build2_loc (clause_loc
, MULT_EXPR
,
5415 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5416 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (x
), x
, t
);
5418 cond
= create_tmp_var (TREE_TYPE (x
));
5419 gimplify_assign (cond
, x
, ilist
);
5422 else if (lower_private_allocate (var
, type
, allocator
,
5423 allocate_ptr
, ilist
, ctx
,
5426 ? TYPE_SIZE_UNIT (type
)
5429 else if (TREE_CONSTANT (v
))
5431 x
= create_tmp_var_raw (type
, name
);
5432 gimple_add_tmp_var (x
);
5433 TREE_ADDRESSABLE (x
) = 1;
5434 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5439 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5440 tree al
= size_int (TYPE_ALIGN (TREE_TYPE (type
)));
5441 x
= build_call_expr_loc (clause_loc
, atmp
, 2, sz
, al
);
5444 tree ptype
= build_pointer_type (TREE_TYPE (type
));
5445 x
= fold_convert_loc (clause_loc
, ptype
, x
);
5446 tree y
= create_tmp_var (ptype
, name
);
5447 gimplify_assign (y
, x
, ilist
);
5451 if (!integer_zerop (bias
))
5453 bias
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
5455 yb
= fold_convert_loc (clause_loc
, pointer_sized_int_node
,
5457 yb
= fold_build2_loc (clause_loc
, MINUS_EXPR
,
5458 pointer_sized_int_node
, yb
, bias
);
5459 x
= fold_convert_loc (clause_loc
, TREE_TYPE (x
), yb
);
5460 yb
= create_tmp_var (ptype
, name
);
5461 gimplify_assign (yb
, x
, ilist
);
5465 d
= TREE_OPERAND (d
, 0);
5466 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
5467 d
= TREE_OPERAND (d
, 0);
5468 if (TREE_CODE (d
) == ADDR_EXPR
)
5470 if (orig_var
!= var
)
5472 gcc_assert (is_variable_sized (orig_var
));
5473 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
),
5475 gimplify_assign (new_var
, x
, ilist
);
5476 tree new_orig_var
= lookup_decl (orig_var
, ctx
);
5477 tree t
= build_fold_indirect_ref (new_var
);
5478 DECL_IGNORED_P (new_var
) = 0;
5479 TREE_THIS_NOTRAP (t
) = 1;
5480 SET_DECL_VALUE_EXPR (new_orig_var
, t
);
5481 DECL_HAS_VALUE_EXPR_P (new_orig_var
) = 1;
5485 x
= build2 (MEM_REF
, TREE_TYPE (new_var
), x
,
5486 build_int_cst (ptype
, 0));
5487 SET_DECL_VALUE_EXPR (new_var
, x
);
5488 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5493 gcc_assert (orig_var
== var
);
5494 if (TREE_CODE (d
) == INDIRECT_REF
)
5496 x
= create_tmp_var (ptype
, name
);
5497 TREE_ADDRESSABLE (x
) = 1;
5498 gimplify_assign (x
, yb
, ilist
);
5499 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5501 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5502 gimplify_assign (new_var
, x
, ilist
);
5504 /* GOMP_taskgroup_reduction_register memsets the whole
5505 array to zero. If the initializer is zero, we don't
5506 need to initialize it again, just mark it as ever
5507 used unconditionally, i.e. cond = true. */
5509 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
5510 && initializer_zerop (omp_reduction_init (c
,
5513 gimple
*g
= gimple_build_assign (build_simple_mem_ref (cond
),
5515 gimple_seq_add_stmt (ilist
, g
);
5518 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
5522 if (!is_parallel_ctx (ctx
))
5524 tree condv
= create_tmp_var (boolean_type_node
);
5525 g
= gimple_build_assign (condv
,
5526 build_simple_mem_ref (cond
));
5527 gimple_seq_add_stmt (ilist
, g
);
5528 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
5529 g
= gimple_build_cond (NE_EXPR
, condv
,
5530 boolean_false_node
, end
, lab1
);
5531 gimple_seq_add_stmt (ilist
, g
);
5532 gimple_seq_add_stmt (ilist
, gimple_build_label (lab1
));
5534 g
= gimple_build_assign (build_simple_mem_ref (cond
),
5536 gimple_seq_add_stmt (ilist
, g
);
5539 tree y1
= create_tmp_var (ptype
);
5540 gimplify_assign (y1
, y
, ilist
);
5541 tree i2
= NULL_TREE
, y2
= NULL_TREE
;
5542 tree body2
= NULL_TREE
, end2
= NULL_TREE
;
5543 tree y3
= NULL_TREE
, y4
= NULL_TREE
;
5544 if (task_reduction_needs_orig_p
)
5546 y3
= create_tmp_var (ptype
);
5548 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5549 ref
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5550 size_int (task_reduction_cnt_full
5551 + task_reduction_cntorig
- 1),
5552 NULL_TREE
, NULL_TREE
);
5555 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5556 ref
= task_reduction_read (ilist
, tskred_temp
, ptype
,
5559 gimplify_assign (y3
, ref
, ilist
);
5561 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) || is_simd
)
5565 y2
= create_tmp_var (ptype
);
5566 gimplify_assign (y2
, y
, ilist
);
5568 if (is_simd
|| OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5570 tree ref
= build_outer_var_ref (var
, ctx
);
5571 /* For ref build_outer_var_ref already performs this. */
5572 if (TREE_CODE (d
) == INDIRECT_REF
)
5573 gcc_assert (omp_privatize_by_reference (var
));
5574 else if (TREE_CODE (d
) == ADDR_EXPR
)
5575 ref
= build_fold_addr_expr (ref
);
5576 else if (omp_privatize_by_reference (var
))
5577 ref
= build_fold_addr_expr (ref
);
5578 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
5579 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
5580 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
5582 y3
= create_tmp_var (ptype
);
5583 gimplify_assign (y3
, unshare_expr (ref
), ilist
);
5587 y4
= create_tmp_var (ptype
);
5588 gimplify_assign (y4
, ref
, dlist
);
5592 tree i
= create_tmp_var (TREE_TYPE (v
));
5593 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), ilist
);
5594 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
5595 gimple_seq_add_stmt (ilist
, gimple_build_label (body
));
5598 i2
= create_tmp_var (TREE_TYPE (v
));
5599 gimplify_assign (i2
, build_int_cst (TREE_TYPE (v
), 0), dlist
);
5600 body2
= create_artificial_label (UNKNOWN_LOCATION
);
5601 end2
= create_artificial_label (UNKNOWN_LOCATION
);
5602 gimple_seq_add_stmt (dlist
, gimple_build_label (body2
));
5604 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5606 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
5607 tree decl_placeholder
5608 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
5609 SET_DECL_VALUE_EXPR (decl_placeholder
,
5610 build_simple_mem_ref (y1
));
5611 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
5612 SET_DECL_VALUE_EXPR (placeholder
,
5613 y3
? build_simple_mem_ref (y3
)
5615 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
5616 x
= lang_hooks
.decls
.omp_clause_default_ctor
5617 (c
, build_simple_mem_ref (y1
),
5618 y3
? build_simple_mem_ref (y3
) : NULL_TREE
);
5620 gimplify_and_add (x
, ilist
);
5621 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
5623 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
5624 lower_omp (&tseq
, ctx
);
5625 gimple_seq_add_seq (ilist
, tseq
);
5627 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
5630 SET_DECL_VALUE_EXPR (decl_placeholder
,
5631 build_simple_mem_ref (y2
));
5632 SET_DECL_VALUE_EXPR (placeholder
,
5633 build_simple_mem_ref (y4
));
5634 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
5635 lower_omp (&tseq
, ctx
);
5636 gimple_seq_add_seq (dlist
, tseq
);
5637 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
5639 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
5640 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 0;
5643 x
= lang_hooks
.decls
.omp_clause_dtor
5644 (c
, build_simple_mem_ref (y2
));
5646 gimplify_and_add (x
, dlist
);
5651 x
= omp_reduction_init (c
, TREE_TYPE (type
));
5652 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
5654 /* reduction(-:var) sums up the partial results, so it
5655 acts identically to reduction(+:var). */
5656 if (code
== MINUS_EXPR
)
5659 gimplify_assign (build_simple_mem_ref (y1
), x
, ilist
);
5662 x
= build2 (code
, TREE_TYPE (type
),
5663 build_simple_mem_ref (y4
),
5664 build_simple_mem_ref (y2
));
5665 gimplify_assign (build_simple_mem_ref (y4
), x
, dlist
);
5669 = gimple_build_assign (y1
, POINTER_PLUS_EXPR
, y1
,
5670 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5671 gimple_seq_add_stmt (ilist
, g
);
5674 g
= gimple_build_assign (y3
, POINTER_PLUS_EXPR
, y3
,
5675 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5676 gimple_seq_add_stmt (ilist
, g
);
5678 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
5679 build_int_cst (TREE_TYPE (i
), 1));
5680 gimple_seq_add_stmt (ilist
, g
);
5681 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
5682 gimple_seq_add_stmt (ilist
, g
);
5683 gimple_seq_add_stmt (ilist
, gimple_build_label (end
));
5686 g
= gimple_build_assign (y2
, POINTER_PLUS_EXPR
, y2
,
5687 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5688 gimple_seq_add_stmt (dlist
, g
);
5691 g
= gimple_build_assign
5692 (y4
, POINTER_PLUS_EXPR
, y4
,
5693 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5694 gimple_seq_add_stmt (dlist
, g
);
5696 g
= gimple_build_assign (i2
, PLUS_EXPR
, i2
,
5697 build_int_cst (TREE_TYPE (i2
), 1));
5698 gimple_seq_add_stmt (dlist
, g
);
5699 g
= gimple_build_cond (LE_EXPR
, i2
, v
, body2
, end2
);
5700 gimple_seq_add_stmt (dlist
, g
);
5701 gimple_seq_add_stmt (dlist
, gimple_build_label (end2
));
5705 tree f
= builtin_decl_explicit (BUILT_IN_GOMP_FREE
);
5706 g
= gimple_build_call (f
, 2, allocate_ptr
, allocator
);
5707 gimple_seq_add_stmt (dlist
, g
);
5713 tree out
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
5714 if (is_global_var (out
))
5716 else if (is_omp_target (ctx
->stmt
))
5720 bool by_ref
= use_pointer_for_field (var
, ctx
);
5721 x
= build_receiver_ref (var
, by_ref
, ctx
);
5723 if (!omp_privatize_by_reference (var
))
5724 x
= build_fold_addr_expr (x
);
5725 x
= fold_convert (ptr_type_node
, x
);
5726 unsigned cnt
= task_reduction_cnt
- 1;
5727 if (!task_reduction_needs_orig_p
)
5728 cnt
+= task_reduction_cntorig_full
- task_reduction_cntorig
;
5730 cnt
= task_reduction_cntorig
- 1;
5731 tree r
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5732 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5733 gimplify_assign (r
, x
, ilist
);
5738 tree type
= TREE_TYPE (new_var
);
5739 if (!omp_privatize_by_reference (var
))
5740 type
= build_pointer_type (type
);
5741 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
5743 unsigned cnt
= task_reduction_cnt
- 1;
5744 if (!task_reduction_needs_orig_p
)
5745 cnt
+= (task_reduction_cntorig_full
5746 - task_reduction_cntorig
);
5748 cnt
= task_reduction_cntorig
- 1;
5749 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
5750 size_int (cnt
), NULL_TREE
, NULL_TREE
);
5754 unsigned int idx
= *ctx
->task_reduction_map
->get (c
);
5756 if (ctx
->task_reductions
[1 + idx
])
5757 off
= fold_convert (sizetype
,
5758 ctx
->task_reductions
[1 + idx
]);
5760 off
= task_reduction_read (ilist
, tskred_temp
, sizetype
,
5762 x
= fold_build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
5765 x
= fold_convert (type
, x
);
5767 if (omp_privatize_by_reference (var
))
5769 gimplify_assign (new_var
, x
, ilist
);
5771 new_var
= build_simple_mem_ref (new_var
);
5775 t
= create_tmp_var (type
);
5776 gimplify_assign (t
, x
, ilist
);
5777 SET_DECL_VALUE_EXPR (new_var
, build_simple_mem_ref (t
));
5778 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5780 t
= fold_convert (build_pointer_type (boolean_type_node
), t
);
5781 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
5782 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
5783 cond
= create_tmp_var (TREE_TYPE (t
));
5784 gimplify_assign (cond
, t
, ilist
);
5786 else if (is_variable_sized (var
))
5788 /* For variable sized types, we need to allocate the
5789 actual storage here. Call alloca and store the
5790 result in the pointer decl that we created elsewhere. */
5794 if (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
|| !is_task_ctx (ctx
))
5798 ptr
= DECL_VALUE_EXPR (new_var
);
5799 gcc_assert (TREE_CODE (ptr
) == INDIRECT_REF
);
5800 ptr
= TREE_OPERAND (ptr
, 0);
5801 gcc_assert (DECL_P (ptr
));
5802 x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
5804 if (lower_private_allocate (var
, new_var
, allocator
,
5805 allocate_ptr
, ilist
, ctx
,
5810 /* void *tmp = __builtin_alloca */
5812 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5814 = gimple_build_call (atmp
, 2, x
,
5815 size_int (DECL_ALIGN (var
)));
5816 cfun
->calls_alloca
= 1;
5817 tmp
= create_tmp_var_raw (ptr_type_node
);
5818 gimple_add_tmp_var (tmp
);
5819 gimple_call_set_lhs (stmt
, tmp
);
5821 gimple_seq_add_stmt (ilist
, stmt
);
5824 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ptr
), tmp
);
5825 gimplify_assign (ptr
, x
, ilist
);
5828 else if (omp_privatize_by_reference (var
)
5829 && (c_kind
!= OMP_CLAUSE_FIRSTPRIVATE
5830 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)))
5832 /* For references that are being privatized for Fortran,
5833 allocate new backing storage for the new pointer
5834 variable. This allows us to avoid changing all the
5835 code that expects a pointer to something that expects
5836 a direct variable. */
5840 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
5841 if (c_kind
== OMP_CLAUSE_FIRSTPRIVATE
&& is_task_ctx (ctx
))
5843 x
= build_receiver_ref (var
, false, ctx
);
5844 if (ctx
->allocate_map
)
5845 if (tree
*allocatep
= ctx
->allocate_map
->get (var
))
5847 allocator
= *allocatep
;
5848 if (TREE_CODE (allocator
) == TREE_LIST
)
5849 allocator
= TREE_PURPOSE (allocator
);
5850 if (TREE_CODE (allocator
) != INTEGER_CST
)
5851 allocator
= build_outer_var_ref (allocator
, ctx
);
5852 allocator
= fold_convert (pointer_sized_int_node
,
5854 allocate_ptr
= unshare_expr (x
);
5856 if (allocator
== NULL_TREE
)
5857 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5859 else if (lower_private_allocate (var
, new_var
, allocator
,
5861 ilist
, ctx
, true, x
))
5863 else if (TREE_CONSTANT (x
))
5865 /* For reduction in SIMD loop, defer adding the
5866 initialization of the reference, because if we decide
5867 to use SIMD array for it, the initilization could cause
5868 expansion ICE. Ditto for other privatization clauses. */
5873 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
5875 gimple_add_tmp_var (x
);
5876 TREE_ADDRESSABLE (x
) = 1;
5877 x
= build_fold_addr_expr_loc (clause_loc
, x
);
5883 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
5884 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
5885 tree al
= size_int (TYPE_ALIGN (rtype
));
5886 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
5891 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
5892 gimplify_assign (new_var
, x
, ilist
);
5895 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
5897 else if ((c_kind
== OMP_CLAUSE_REDUCTION
5898 || c_kind
== OMP_CLAUSE_IN_REDUCTION
)
5899 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
5907 switch (OMP_CLAUSE_CODE (c
))
5909 case OMP_CLAUSE_SHARED
:
5910 /* Ignore shared directives in teams construct inside
5911 target construct. */
5912 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_TEAMS
5913 && !is_host_teams_ctx (ctx
))
5915 /* Shared global vars are just accessed directly. */
5916 if (is_global_var (new_var
))
5918 /* For taskloop firstprivate/lastprivate, represented
5919 as firstprivate and shared clause on the task, new_var
5920 is the firstprivate var. */
5921 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
5923 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5924 needs to be delayed until after fixup_child_record_type so
5925 that we get the correct type during the dereference. */
5926 by_ref
= use_pointer_for_field (var
, ctx
);
5927 x
= build_receiver_ref (var
, by_ref
, ctx
);
5928 SET_DECL_VALUE_EXPR (new_var
, x
);
5929 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5931 /* ??? If VAR is not passed by reference, and the variable
5932 hasn't been initialized yet, then we'll get a warning for
5933 the store into the omp_data_s structure. Ideally, we'd be
5934 able to notice this and not store anything at all, but
5935 we're generating code too early. Suppress the warning. */
5937 suppress_warning (var
, OPT_Wuninitialized
);
5940 case OMP_CLAUSE__CONDTEMP_
:
5941 if (is_parallel_ctx (ctx
))
5943 x
= build_receiver_ref (var
, false, ctx
);
5944 SET_DECL_VALUE_EXPR (new_var
, x
);
5945 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
5947 else if (is_simd
&& !OMP_CLAUSE__CONDTEMP__ITER (c
))
5949 x
= build_zero_cst (TREE_TYPE (var
));
5954 case OMP_CLAUSE_LASTPRIVATE
:
5955 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
5959 case OMP_CLAUSE_PRIVATE
:
5960 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
)
5961 x
= build_outer_var_ref (var
, ctx
);
5962 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
5964 if (is_task_ctx (ctx
))
5965 x
= build_receiver_ref (var
, false, ctx
);
5967 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_PRIVATE
);
5975 lower_private_allocate (var
, new_var
, allocator
, allocate_ptr
,
5976 ilist
, ctx
, false, NULL_TREE
);
5977 nx
= unshare_expr (new_var
);
5979 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5980 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
5983 nx
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, nx
, x
);
5985 nx
= lang_hooks
.decls
.omp_clause_default_ctor (c
, nx
, x
);
5988 tree y
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
5989 if ((TREE_ADDRESSABLE (new_var
) || nx
|| y
5990 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
5991 && (gimple_omp_for_collapse (ctx
->stmt
) != 1
5992 || (gimple_omp_for_index (ctx
->stmt
, 0)
5994 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
5995 || omp_privatize_by_reference (var
))
5996 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
5999 if (omp_privatize_by_reference (var
))
6001 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6002 tree new_vard
= TREE_OPERAND (new_var
, 0);
6003 gcc_assert (DECL_P (new_vard
));
6004 SET_DECL_VALUE_EXPR (new_vard
,
6005 build_fold_addr_expr (lvar
));
6006 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6011 tree iv
= unshare_expr (ivar
);
6013 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
,
6016 x
= lang_hooks
.decls
.omp_clause_default_ctor (c
,
6020 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE__CONDTEMP_
)
6022 x
= build2 (MODIFY_EXPR
, TREE_TYPE (ivar
),
6023 unshare_expr (ivar
), x
);
6027 gimplify_and_add (x
, &llist
[0]);
6028 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6029 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6034 gcc_assert (TREE_CODE (v
) == MEM_REF
);
6035 v
= TREE_OPERAND (v
, 0);
6036 gcc_assert (DECL_P (v
));
6038 v
= *ctx
->lastprivate_conditional_map
->get (v
);
6039 tree t
= create_tmp_var (TREE_TYPE (v
));
6040 tree z
= build_zero_cst (TREE_TYPE (v
));
6042 = build_outer_var_ref (var
, ctx
,
6043 OMP_CLAUSE_LASTPRIVATE
);
6044 gimple_seq_add_stmt (dlist
,
6045 gimple_build_assign (t
, z
));
6046 gcc_assert (DECL_HAS_VALUE_EXPR_P (v
));
6047 tree civar
= DECL_VALUE_EXPR (v
);
6048 gcc_assert (TREE_CODE (civar
) == ARRAY_REF
);
6049 civar
= unshare_expr (civar
);
6050 TREE_OPERAND (civar
, 1) = sctx
.idx
;
6051 x
= build2 (MODIFY_EXPR
, TREE_TYPE (t
), t
,
6052 unshare_expr (civar
));
6053 x
= build2 (COMPOUND_EXPR
, TREE_TYPE (orig_v
), x
,
6054 build2 (MODIFY_EXPR
, TREE_TYPE (orig_v
),
6055 orig_v
, unshare_expr (ivar
)));
6056 tree cond
= build2 (LT_EXPR
, boolean_type_node
, t
,
6058 x
= build3 (COND_EXPR
, void_type_node
, cond
, x
,
6060 gimple_seq tseq
= NULL
;
6061 gimplify_and_add (x
, &tseq
);
6063 lower_omp (&tseq
, ctx
->outer
);
6064 gimple_seq_add_seq (&llist
[1], tseq
);
6066 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6067 && ctx
->for_simd_scan_phase
)
6069 x
= unshare_expr (ivar
);
6071 = build_outer_var_ref (var
, ctx
,
6072 OMP_CLAUSE_LASTPRIVATE
);
6073 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
6075 gimplify_and_add (x
, &llist
[0]);
6079 y
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6081 gimplify_and_add (y
, &llist
[1]);
6085 if (omp_privatize_by_reference (var
))
6087 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6088 tree new_vard
= TREE_OPERAND (new_var
, 0);
6089 gcc_assert (DECL_P (new_vard
));
6090 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
6091 x
= TYPE_SIZE_UNIT (type
);
6092 if (TREE_CONSTANT (x
))
6094 x
= create_tmp_var_raw (type
, get_name (var
));
6095 gimple_add_tmp_var (x
);
6096 TREE_ADDRESSABLE (x
) = 1;
6097 x
= build_fold_addr_expr_loc (clause_loc
, x
);
6098 x
= fold_convert_loc (clause_loc
,
6099 TREE_TYPE (new_vard
), x
);
6100 gimplify_assign (new_vard
, x
, ilist
);
6105 gimplify_and_add (nx
, ilist
);
6106 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6108 && ctx
->for_simd_scan_phase
)
6110 tree orig_v
= build_outer_var_ref (var
, ctx
,
6111 OMP_CLAUSE_LASTPRIVATE
);
6112 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
,
6114 gimplify_and_add (x
, ilist
);
6119 x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
6121 gimplify_and_add (x
, dlist
);
6124 if (!is_gimple_val (allocator
))
6126 tree avar
= create_tmp_var (TREE_TYPE (allocator
));
6127 gimplify_assign (avar
, allocator
, dlist
);
6130 if (!is_gimple_val (allocate_ptr
))
6132 tree apvar
= create_tmp_var (TREE_TYPE (allocate_ptr
));
6133 gimplify_assign (apvar
, allocate_ptr
, dlist
);
6134 allocate_ptr
= apvar
;
6136 tree f
= builtin_decl_explicit (BUILT_IN_GOMP_FREE
);
6138 = gimple_build_call (f
, 2, allocate_ptr
, allocator
);
6139 gimple_seq_add_stmt (dlist
, g
);
6143 case OMP_CLAUSE_LINEAR
:
6144 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
6145 goto do_firstprivate
;
6146 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
6149 x
= build_outer_var_ref (var
, ctx
);
6152 case OMP_CLAUSE_FIRSTPRIVATE
:
6153 if (is_task_ctx (ctx
))
6155 if ((omp_privatize_by_reference (var
)
6156 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
))
6157 || is_variable_sized (var
))
6159 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
6161 || use_pointer_for_field (var
, NULL
))
6163 x
= build_receiver_ref (var
, false, ctx
);
6164 if (ctx
->allocate_map
)
6165 if (tree
*allocatep
= ctx
->allocate_map
->get (var
))
6167 allocator
= *allocatep
;
6168 if (TREE_CODE (allocator
) == TREE_LIST
)
6169 allocator
= TREE_PURPOSE (allocator
);
6170 if (TREE_CODE (allocator
) != INTEGER_CST
)
6171 allocator
= build_outer_var_ref (allocator
, ctx
);
6172 allocator
= fold_convert (pointer_sized_int_node
,
6174 allocate_ptr
= unshare_expr (x
);
6175 x
= build_simple_mem_ref (x
);
6176 TREE_THIS_NOTRAP (x
) = 1;
6178 SET_DECL_VALUE_EXPR (new_var
, x
);
6179 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
6183 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
)
6184 && omp_privatize_by_reference (var
))
6186 x
= build_outer_var_ref (var
, ctx
);
6187 gcc_assert (TREE_CODE (x
) == MEM_REF
6188 && integer_zerop (TREE_OPERAND (x
, 1)));
6189 x
= TREE_OPERAND (x
, 0);
6190 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6191 (c
, unshare_expr (new_var
), x
);
6192 gimplify_and_add (x
, ilist
);
6196 lower_private_allocate (var
, new_var
, allocator
, allocate_ptr
,
6197 ilist
, ctx
, false, NULL_TREE
);
6198 x
= build_outer_var_ref (var
, ctx
);
6201 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6202 && gimple_omp_for_combined_into_p (ctx
->stmt
))
6204 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
6206 t
= build_outer_var_ref (t
, ctx
);
6207 tree stept
= TREE_TYPE (t
);
6208 tree ct
= omp_find_clause (clauses
,
6209 OMP_CLAUSE__LOOPTEMP_
);
6211 tree l
= OMP_CLAUSE_DECL (ct
);
6212 tree n1
= fd
->loop
.n1
;
6213 tree step
= fd
->loop
.step
;
6214 tree itype
= TREE_TYPE (l
);
6215 if (POINTER_TYPE_P (itype
))
6216 itype
= signed_type_for (itype
);
6217 l
= fold_build2 (MINUS_EXPR
, itype
, l
, n1
);
6218 if (TYPE_UNSIGNED (itype
)
6219 && fd
->loop
.cond_code
== GT_EXPR
)
6220 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
,
6221 fold_build1 (NEGATE_EXPR
, itype
, l
),
6222 fold_build1 (NEGATE_EXPR
,
6225 l
= fold_build2 (TRUNC_DIV_EXPR
, itype
, l
, step
);
6226 t
= fold_build2 (MULT_EXPR
, stept
,
6227 fold_convert (stept
, l
), t
);
6229 if (OMP_CLAUSE_LINEAR_ARRAY (c
))
6231 if (omp_privatize_by_reference (var
))
6233 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6234 tree new_vard
= TREE_OPERAND (new_var
, 0);
6235 gcc_assert (DECL_P (new_vard
));
6236 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
6237 nx
= TYPE_SIZE_UNIT (type
);
6238 if (TREE_CONSTANT (nx
))
6240 nx
= create_tmp_var_raw (type
,
6242 gimple_add_tmp_var (nx
);
6243 TREE_ADDRESSABLE (nx
) = 1;
6244 nx
= build_fold_addr_expr_loc (clause_loc
,
6246 nx
= fold_convert_loc (clause_loc
,
6247 TREE_TYPE (new_vard
),
6249 gimplify_assign (new_vard
, nx
, ilist
);
6253 x
= lang_hooks
.decls
.omp_clause_linear_ctor
6255 gimplify_and_add (x
, ilist
);
6259 if (POINTER_TYPE_P (TREE_TYPE (x
)))
6260 x
= fold_build_pointer_plus (x
, t
);
6262 x
= fold_build2 (PLUS_EXPR
, TREE_TYPE (x
), x
,
6263 fold_convert (TREE_TYPE (x
), t
));
6266 if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LINEAR
6267 || TREE_ADDRESSABLE (new_var
)
6268 || omp_privatize_by_reference (var
))
6269 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6272 if (omp_privatize_by_reference (var
))
6274 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6275 tree new_vard
= TREE_OPERAND (new_var
, 0);
6276 gcc_assert (DECL_P (new_vard
));
6277 SET_DECL_VALUE_EXPR (new_vard
,
6278 build_fold_addr_expr (lvar
));
6279 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6281 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
)
6283 tree iv
= create_tmp_var (TREE_TYPE (new_var
));
6284 x
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, iv
, x
);
6285 gimplify_and_add (x
, ilist
);
6286 gimple_stmt_iterator gsi
6287 = gsi_start (*gimple_omp_body_ptr (ctx
->stmt
));
6289 = gimple_build_assign (unshare_expr (lvar
), iv
);
6290 gsi_insert_before_without_update (&gsi
, g
,
6292 tree t
= OMP_CLAUSE_LINEAR_STEP (c
);
6293 enum tree_code code
= PLUS_EXPR
;
6294 if (POINTER_TYPE_P (TREE_TYPE (new_var
)))
6295 code
= POINTER_PLUS_EXPR
;
6296 g
= gimple_build_assign (iv
, code
, iv
, t
);
6297 gsi_insert_before_without_update (&gsi
, g
,
6301 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6302 (c
, unshare_expr (ivar
), x
);
6303 gimplify_and_add (x
, &llist
[0]);
6304 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6306 gimplify_and_add (x
, &llist
[1]);
6309 if (omp_privatize_by_reference (var
))
6311 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6312 tree new_vard
= TREE_OPERAND (new_var
, 0);
6313 gcc_assert (DECL_P (new_vard
));
6314 tree type
= TREE_TYPE (TREE_TYPE (new_vard
));
6315 nx
= TYPE_SIZE_UNIT (type
);
6316 if (TREE_CONSTANT (nx
))
6318 nx
= create_tmp_var_raw (type
, get_name (var
));
6319 gimple_add_tmp_var (nx
);
6320 TREE_ADDRESSABLE (nx
) = 1;
6321 nx
= build_fold_addr_expr_loc (clause_loc
, nx
);
6322 nx
= fold_convert_loc (clause_loc
,
6323 TREE_TYPE (new_vard
), nx
);
6324 gimplify_assign (new_vard
, nx
, ilist
);
6328 x
= lang_hooks
.decls
.omp_clause_copy_ctor
6329 (c
, unshare_expr (new_var
), x
);
6330 gimplify_and_add (x
, ilist
);
6333 case OMP_CLAUSE__LOOPTEMP_
:
6334 case OMP_CLAUSE__REDUCTEMP_
:
6335 gcc_assert (is_taskreg_ctx (ctx
));
6336 x
= build_outer_var_ref (var
, ctx
);
6337 x
= build2 (MODIFY_EXPR
, TREE_TYPE (new_var
), new_var
, x
);
6338 gimplify_and_add (x
, ilist
);
6341 case OMP_CLAUSE_COPYIN
:
6342 by_ref
= use_pointer_for_field (var
, NULL
);
6343 x
= build_receiver_ref (var
, by_ref
, ctx
);
6344 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, x
);
6345 append_to_statement_list (x
, ©in_seq
);
6346 copyin_by_ref
|= by_ref
;
6349 case OMP_CLAUSE_REDUCTION
:
6350 case OMP_CLAUSE_IN_REDUCTION
:
6351 /* OpenACC reductions are initialized using the
6352 GOACC_REDUCTION internal function. */
6353 if (is_gimple_omp_oacc (ctx
->stmt
))
6355 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
6357 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
6359 tree ptype
= TREE_TYPE (placeholder
);
6362 x
= error_mark_node
;
6363 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)
6364 && !task_reduction_needs_orig_p
)
6366 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
))
6368 tree pptype
= build_pointer_type (ptype
);
6369 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
)
6370 x
= build4 (ARRAY_REF
, ptr_type_node
, tskred_avar
,
6371 size_int (task_reduction_cnt_full
6372 + task_reduction_cntorig
- 1),
6373 NULL_TREE
, NULL_TREE
);
6377 = *ctx
->task_reduction_map
->get (c
);
6378 x
= task_reduction_read (ilist
, tskred_temp
,
6379 pptype
, 7 + 3 * idx
);
6381 x
= fold_convert (pptype
, x
);
6382 x
= build_simple_mem_ref (x
);
6387 lower_private_allocate (var
, new_var
, allocator
,
6388 allocate_ptr
, ilist
, ctx
, false,
6390 x
= build_outer_var_ref (var
, ctx
);
6392 if (omp_privatize_by_reference (var
)
6393 && !useless_type_conversion_p (ptype
, TREE_TYPE (x
)))
6394 x
= build_fold_addr_expr_loc (clause_loc
, x
);
6396 SET_DECL_VALUE_EXPR (placeholder
, x
);
6397 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
6398 tree new_vard
= new_var
;
6399 if (omp_privatize_by_reference (var
))
6401 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6402 new_vard
= TREE_OPERAND (new_var
, 0);
6403 gcc_assert (DECL_P (new_vard
));
6405 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
6407 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6408 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6411 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6415 if (new_vard
== new_var
)
6417 gcc_assert (DECL_VALUE_EXPR (new_var
) == lvar
);
6418 SET_DECL_VALUE_EXPR (new_var
, ivar
);
6422 SET_DECL_VALUE_EXPR (new_vard
,
6423 build_fold_addr_expr (ivar
));
6424 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6426 x
= lang_hooks
.decls
.omp_clause_default_ctor
6427 (c
, unshare_expr (ivar
),
6428 build_outer_var_ref (var
, ctx
));
6429 if (rvarp
&& ctx
->for_simd_scan_phase
)
6432 gimplify_and_add (x
, &llist
[0]);
6433 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6435 gimplify_and_add (x
, &llist
[1]);
6442 gimplify_and_add (x
, &llist
[0]);
6444 tree ivar2
= unshare_expr (lvar
);
6445 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
6446 x
= lang_hooks
.decls
.omp_clause_default_ctor
6447 (c
, ivar2
, build_outer_var_ref (var
, ctx
));
6448 gimplify_and_add (x
, &llist
[0]);
6452 x
= lang_hooks
.decls
.omp_clause_default_ctor
6453 (c
, unshare_expr (rvar2
),
6454 build_outer_var_ref (var
, ctx
));
6455 gimplify_and_add (x
, &llist
[0]);
6458 /* For types that need construction, add another
6459 private var which will be default constructed
6460 and optionally initialized with
6461 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6462 loop we want to assign this value instead of
6463 constructing and destructing it in each
6465 tree nv
= create_tmp_var_raw (TREE_TYPE (ivar
));
6466 gimple_add_tmp_var (nv
);
6467 ctx
->cb
.decl_map
->put (TREE_OPERAND (rvar2
6471 x
= lang_hooks
.decls
.omp_clause_default_ctor
6472 (c
, nv
, build_outer_var_ref (var
, ctx
));
6473 gimplify_and_add (x
, ilist
);
6475 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6477 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6478 x
= DECL_VALUE_EXPR (new_vard
);
6480 if (new_vard
!= new_var
)
6481 vexpr
= build_fold_addr_expr (nv
);
6482 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
6483 lower_omp (&tseq
, ctx
);
6484 SET_DECL_VALUE_EXPR (new_vard
, x
);
6485 gimple_seq_add_seq (ilist
, tseq
);
6486 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6489 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
6491 gimplify_and_add (x
, dlist
);
6494 tree ref
= build_outer_var_ref (var
, ctx
);
6495 x
= unshare_expr (ivar
);
6496 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
6498 gimplify_and_add (x
, &llist
[0]);
6500 ref
= build_outer_var_ref (var
, ctx
);
6501 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, ref
,
6503 gimplify_and_add (x
, &llist
[3]);
6505 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6506 if (new_vard
== new_var
)
6507 SET_DECL_VALUE_EXPR (new_var
, lvar
);
6509 SET_DECL_VALUE_EXPR (new_vard
,
6510 build_fold_addr_expr (lvar
));
6512 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6514 gimplify_and_add (x
, &llist
[1]);
6516 tree ivar2
= unshare_expr (lvar
);
6517 TREE_OPERAND (ivar2
, 1) = sctx
.idx
;
6518 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar2
);
6520 gimplify_and_add (x
, &llist
[1]);
6524 x
= lang_hooks
.decls
.omp_clause_dtor (c
, rvar2
);
6526 gimplify_and_add (x
, &llist
[1]);
6531 gimplify_and_add (x
, &llist
[0]);
6532 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6534 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6535 lower_omp (&tseq
, ctx
);
6536 gimple_seq_add_seq (&llist
[0], tseq
);
6538 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6539 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
6540 lower_omp (&tseq
, ctx
);
6541 gimple_seq_add_seq (&llist
[1], tseq
);
6542 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6543 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6544 if (new_vard
== new_var
)
6545 SET_DECL_VALUE_EXPR (new_var
, lvar
);
6547 SET_DECL_VALUE_EXPR (new_vard
,
6548 build_fold_addr_expr (lvar
));
6549 x
= lang_hooks
.decls
.omp_clause_dtor (c
, ivar
);
6551 gimplify_and_add (x
, &llist
[1]);
6554 /* If this is a reference to constant size reduction var
6555 with placeholder, we haven't emitted the initializer
6556 for it because it is undesirable if SIMD arrays are used.
6557 But if they aren't used, we need to emit the deferred
6558 initialization now. */
6559 else if (omp_privatize_by_reference (var
) && is_simd
)
6560 handle_simd_reference (clause_loc
, new_vard
, ilist
);
6562 tree lab2
= NULL_TREE
;
6566 if (!is_parallel_ctx (ctx
))
6568 tree condv
= create_tmp_var (boolean_type_node
);
6569 tree m
= build_simple_mem_ref (cond
);
6570 g
= gimple_build_assign (condv
, m
);
6571 gimple_seq_add_stmt (ilist
, g
);
6573 = create_artificial_label (UNKNOWN_LOCATION
);
6574 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6575 g
= gimple_build_cond (NE_EXPR
, condv
,
6578 gimple_seq_add_stmt (ilist
, g
);
6579 gimple_seq_add_stmt (ilist
,
6580 gimple_build_label (lab1
));
6582 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6584 gimple_seq_add_stmt (ilist
, g
);
6586 x
= lang_hooks
.decls
.omp_clause_default_ctor
6587 (c
, unshare_expr (new_var
),
6589 : build_outer_var_ref (var
, ctx
));
6591 gimplify_and_add (x
, ilist
);
6593 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6594 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6596 if (ctx
->for_simd_scan_phase
)
6599 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c
)))
6601 tree nv
= create_tmp_var_raw (TREE_TYPE (new_var
));
6602 gimple_add_tmp_var (nv
);
6603 ctx
->cb
.decl_map
->put (new_vard
, nv
);
6604 x
= lang_hooks
.decls
.omp_clause_default_ctor
6605 (c
, nv
, build_outer_var_ref (var
, ctx
));
6607 gimplify_and_add (x
, ilist
);
6608 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6610 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6612 if (new_vard
!= new_var
)
6613 vexpr
= build_fold_addr_expr (nv
);
6614 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
6615 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6616 lower_omp (&tseq
, ctx
);
6617 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
6618 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
6619 gimple_seq_add_seq (ilist
, tseq
);
6621 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6622 if (is_simd
&& ctx
->scan_exclusive
)
6625 = create_tmp_var_raw (TREE_TYPE (new_var
));
6626 gimple_add_tmp_var (nv2
);
6627 ctx
->cb
.decl_map
->put (nv
, nv2
);
6628 x
= lang_hooks
.decls
.omp_clause_default_ctor
6629 (c
, nv2
, build_outer_var_ref (var
, ctx
));
6630 gimplify_and_add (x
, ilist
);
6631 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
6633 gimplify_and_add (x
, dlist
);
6635 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv
);
6637 gimplify_and_add (x
, dlist
);
6640 && ctx
->scan_exclusive
6641 && TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
6643 tree nv2
= create_tmp_var_raw (TREE_TYPE (new_var
));
6644 gimple_add_tmp_var (nv2
);
6645 ctx
->cb
.decl_map
->put (new_vard
, nv2
);
6646 x
= lang_hooks
.decls
.omp_clause_dtor (c
, nv2
);
6648 gimplify_and_add (x
, dlist
);
6650 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6654 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
6656 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
6657 if (c_kind
== OMP_CLAUSE_IN_REDUCTION
6658 && is_omp_target (ctx
->stmt
))
6660 tree d
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
6661 tree oldv
= NULL_TREE
;
6663 if (DECL_HAS_VALUE_EXPR_P (d
))
6664 oldv
= DECL_VALUE_EXPR (d
);
6665 SET_DECL_VALUE_EXPR (d
, new_vard
);
6666 DECL_HAS_VALUE_EXPR_P (d
) = 1;
6667 lower_omp (&tseq
, ctx
);
6669 SET_DECL_VALUE_EXPR (d
, oldv
);
6672 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
6673 DECL_HAS_VALUE_EXPR_P (d
) = 0;
6677 lower_omp (&tseq
, ctx
);
6678 gimple_seq_add_seq (ilist
, tseq
);
6680 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6683 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
6684 lower_omp (&tseq
, ctx
);
6685 gimple_seq_add_seq (dlist
, tseq
);
6686 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6688 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
6692 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
6699 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
6700 gcc_assert (TREE_CODE (TREE_TYPE (new_var
)) != ARRAY_TYPE
);
6701 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
6706 tree lab2
= NULL_TREE
;
6707 /* GOMP_taskgroup_reduction_register memsets the whole
6708 array to zero. If the initializer is zero, we don't
6709 need to initialize it again, just mark it as ever
6710 used unconditionally, i.e. cond = true. */
6711 if (initializer_zerop (x
))
6713 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6715 gimple_seq_add_stmt (ilist
, g
);
6720 if (!cond) { cond = true; new_var = x; } */
6721 if (!is_parallel_ctx (ctx
))
6723 tree condv
= create_tmp_var (boolean_type_node
);
6724 tree m
= build_simple_mem_ref (cond
);
6725 g
= gimple_build_assign (condv
, m
);
6726 gimple_seq_add_stmt (ilist
, g
);
6728 = create_artificial_label (UNKNOWN_LOCATION
);
6729 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
6730 g
= gimple_build_cond (NE_EXPR
, condv
,
6733 gimple_seq_add_stmt (ilist
, g
);
6734 gimple_seq_add_stmt (ilist
,
6735 gimple_build_label (lab1
));
6737 g
= gimple_build_assign (build_simple_mem_ref (cond
),
6739 gimple_seq_add_stmt (ilist
, g
);
6740 gimplify_assign (new_var
, x
, ilist
);
6742 gimple_seq_add_stmt (ilist
, gimple_build_label (lab2
));
6746 /* reduction(-:var) sums up the partial results, so it
6747 acts identically to reduction(+:var). */
6748 if (code
== MINUS_EXPR
)
6752 = (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
);
6753 tree new_vard
= new_var
;
6754 if (is_simd
&& omp_privatize_by_reference (var
))
6756 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
6757 new_vard
= TREE_OPERAND (new_var
, 0);
6758 gcc_assert (DECL_P (new_vard
));
6760 tree rvar
= NULL_TREE
, *rvarp
= NULL
, rvar2
= NULL_TREE
;
6762 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6763 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6766 && lower_rec_simd_input_clauses (new_var
, ctx
, &sctx
,
6770 if (new_vard
!= new_var
)
6772 SET_DECL_VALUE_EXPR (new_vard
,
6773 build_fold_addr_expr (lvar
));
6774 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
6777 tree ref
= build_outer_var_ref (var
, ctx
);
6781 if (ctx
->for_simd_scan_phase
)
6783 gimplify_assign (ivar
, ref
, &llist
[0]);
6784 ref
= build_outer_var_ref (var
, ctx
);
6785 gimplify_assign (ref
, rvar
, &llist
[3]);
6789 gimplify_assign (unshare_expr (ivar
), x
, &llist
[0]);
6794 simt_lane
= create_tmp_var (unsigned_type_node
);
6795 x
= build_call_expr_internal_loc
6796 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_BFLY
,
6797 TREE_TYPE (ivar
), 2, ivar
, simt_lane
);
6798 /* Make sure x is evaluated unconditionally. */
6799 tree bfly_var
= create_tmp_var (TREE_TYPE (ivar
));
6800 gimplify_assign (bfly_var
, x
, &llist
[2]);
6801 x
= build2 (code
, TREE_TYPE (ivar
), ivar
, bfly_var
);
6802 gimplify_assign (ivar
, x
, &llist
[2]);
6808 tree zero
= build_zero_cst (TREE_TYPE (ivar
));
6809 ivar2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6810 boolean_type_node
, ivar
,
6812 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6813 boolean_type_node
, ref
,
6816 x
= build2 (code
, TREE_TYPE (ref
), ref2
, ivar2
);
6818 x
= fold_convert (TREE_TYPE (ref
), x
);
6819 ref
= build_outer_var_ref (var
, ctx
);
6820 gimplify_assign (ref
, x
, &llist
[1]);
6825 lower_private_allocate (var
, new_var
, allocator
,
6826 allocate_ptr
, ilist
, ctx
,
6828 if (omp_privatize_by_reference (var
) && is_simd
)
6829 handle_simd_reference (clause_loc
, new_vard
, ilist
);
6830 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6831 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
6833 gimplify_assign (new_var
, x
, ilist
);
6836 tree ref
= build_outer_var_ref (var
, ctx
);
6837 tree new_var2
= new_var
;
6841 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
6843 = fold_build2_loc (clause_loc
, NE_EXPR
,
6844 boolean_type_node
, new_var
,
6846 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
,
6847 boolean_type_node
, ref
,
6850 x
= build2 (code
, TREE_TYPE (ref2
), ref2
, new_var2
);
6852 x
= fold_convert (TREE_TYPE (new_var
), x
);
6853 ref
= build_outer_var_ref (var
, ctx
);
6854 gimplify_assign (ref
, x
, dlist
);
6869 tree clobber
= build_clobber (TREE_TYPE (tskred_avar
));
6870 gimple_seq_add_stmt (ilist
, gimple_build_assign (tskred_avar
, clobber
));
6873 if (known_eq (sctx
.max_vf
, 1U))
6875 sctx
.is_simt
= false;
6876 if (ctx
->lastprivate_conditional_map
)
6878 if (gimple_omp_for_combined_into_p (ctx
->stmt
))
6880 /* Signal to lower_omp_1 that it should use parent context. */
6881 ctx
->combined_into_simd_safelen1
= true;
6882 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
6883 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6884 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
6886 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
6887 omp_context
*outer
= ctx
->outer
;
6888 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_SCAN
)
6889 outer
= outer
->outer
;
6890 tree
*v
= ctx
->lastprivate_conditional_map
->get (o
);
6891 tree po
= lookup_decl (OMP_CLAUSE_DECL (c
), outer
);
6892 tree
*pv
= outer
->lastprivate_conditional_map
->get (po
);
6898 /* When not vectorized, treat lastprivate(conditional:) like
6899 normal lastprivate, as there will be just one simd lane
6900 writing the privatized variable. */
6901 delete ctx
->lastprivate_conditional_map
;
6902 ctx
->lastprivate_conditional_map
= NULL
;
6907 if (nonconst_simd_if
)
6909 if (sctx
.lane
== NULL_TREE
)
6911 sctx
.idx
= create_tmp_var (unsigned_type_node
);
6912 sctx
.lane
= create_tmp_var (unsigned_type_node
);
6914 /* FIXME: For now. */
6915 sctx
.is_simt
= false;
6918 if (sctx
.lane
|| sctx
.is_simt
)
6920 uid
= create_tmp_var (ptr_type_node
, "simduid");
6921 /* Don't want uninit warnings on simduid, it is always uninitialized,
6922 but we use it not for the value, but for the DECL_UID only. */
6923 suppress_warning (uid
, OPT_Wuninitialized
);
6924 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SIMDUID_
);
6925 OMP_CLAUSE__SIMDUID__DECL (c
) = uid
;
6926 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
6927 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
6929 /* Emit calls denoting privatized variables and initializing a pointer to
6930 structure that holds private variables as fields after ompdevlow pass. */
6933 sctx
.simt_eargs
[0] = uid
;
6935 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, sctx
.simt_eargs
);
6936 gimple_call_set_lhs (g
, uid
);
6937 gimple_seq_add_stmt (ilist
, g
);
6938 sctx
.simt_eargs
.release ();
6940 simtrec
= create_tmp_var (ptr_type_node
, ".omp_simt");
6941 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC
, 1, uid
);
6942 gimple_call_set_lhs (g
, simtrec
);
6943 gimple_seq_add_stmt (ilist
, g
);
6947 gimple
*g
= gimple_build_call_internal (IFN_GOMP_SIMD_LANE
,
6948 2 + (nonconst_simd_if
!= NULL
),
6949 uid
, integer_zero_node
,
6951 gimple_call_set_lhs (g
, sctx
.lane
);
6952 gimple_stmt_iterator gsi
= gsi_start (*gimple_omp_body_ptr (ctx
->stmt
));
6953 gsi_insert_before_without_update (&gsi
, g
, GSI_SAME_STMT
);
6954 g
= gimple_build_assign (sctx
.lane
, INTEGER_CST
,
6955 build_int_cst (unsigned_type_node
, 0));
6956 gimple_seq_add_stmt (ilist
, g
);
6959 g
= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
6961 gimple_call_set_lhs (g
, sctx
.lastlane
);
6962 gimple_seq_add_stmt (dlist
, g
);
6963 gimple_seq_add_seq (dlist
, llist
[3]);
6965 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6968 tree simt_vf
= create_tmp_var (unsigned_type_node
);
6969 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VF
, 0);
6970 gimple_call_set_lhs (g
, simt_vf
);
6971 gimple_seq_add_stmt (dlist
, g
);
6973 tree t
= build_int_cst (unsigned_type_node
, 1);
6974 g
= gimple_build_assign (simt_lane
, INTEGER_CST
, t
);
6975 gimple_seq_add_stmt (dlist
, g
);
6977 t
= build_int_cst (unsigned_type_node
, 0);
6978 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
6979 gimple_seq_add_stmt (dlist
, g
);
6981 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
6982 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
6983 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
6984 gimple_seq_add_stmt (dlist
, gimple_build_goto (header
));
6985 gimple_seq_add_stmt (dlist
, gimple_build_label (body
));
6987 gimple_seq_add_seq (dlist
, llist
[2]);
6989 g
= gimple_build_assign (simt_lane
, LSHIFT_EXPR
, simt_lane
, integer_one_node
);
6990 gimple_seq_add_stmt (dlist
, g
);
6992 gimple_seq_add_stmt (dlist
, gimple_build_label (header
));
6993 g
= gimple_build_cond (LT_EXPR
, simt_lane
, simt_vf
, body
, end
);
6994 gimple_seq_add_stmt (dlist
, g
);
6996 gimple_seq_add_stmt (dlist
, gimple_build_label (end
));
6998 for (int i
= 0; i
< 2; i
++)
7001 tree vf
= create_tmp_var (unsigned_type_node
);
7002 g
= gimple_build_call_internal (IFN_GOMP_SIMD_VF
, 1, uid
);
7003 gimple_call_set_lhs (g
, vf
);
7004 gimple_seq
*seq
= i
== 0 ? ilist
: dlist
;
7005 gimple_seq_add_stmt (seq
, g
);
7006 tree t
= build_int_cst (unsigned_type_node
, 0);
7007 g
= gimple_build_assign (sctx
.idx
, INTEGER_CST
, t
);
7008 gimple_seq_add_stmt (seq
, g
);
7009 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
7010 tree header
= create_artificial_label (UNKNOWN_LOCATION
);
7011 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
7012 gimple_seq_add_stmt (seq
, gimple_build_goto (header
));
7013 gimple_seq_add_stmt (seq
, gimple_build_label (body
));
7014 gimple_seq_add_seq (seq
, llist
[i
]);
7015 t
= build_int_cst (unsigned_type_node
, 1);
7016 g
= gimple_build_assign (sctx
.idx
, PLUS_EXPR
, sctx
.idx
, t
);
7017 gimple_seq_add_stmt (seq
, g
);
7018 gimple_seq_add_stmt (seq
, gimple_build_label (header
));
7019 g
= gimple_build_cond (LT_EXPR
, sctx
.idx
, vf
, body
, end
);
7020 gimple_seq_add_stmt (seq
, g
);
7021 gimple_seq_add_stmt (seq
, gimple_build_label (end
));
7026 gimple_seq_add_seq (dlist
, sctx
.simt_dlist
);
7028 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT
, 1, simtrec
);
7029 gimple_seq_add_stmt (dlist
, g
);
7032 /* The copyin sequence is not to be executed by the main thread, since
7033 that would result in self-copies. Perhaps not visible to scalars,
7034 but it certainly is to C++ operator=. */
7037 x
= build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
),
7039 x
= build2 (NE_EXPR
, boolean_type_node
, x
,
7040 build_int_cst (TREE_TYPE (x
), 0));
7041 x
= build3 (COND_EXPR
, void_type_node
, x
, copyin_seq
, NULL
);
7042 gimplify_and_add (x
, ilist
);
7045 /* If any copyin variable is passed by reference, we must ensure the
7046 master thread doesn't modify it before it is copied over in all
7047 threads. Similarly for variables in both firstprivate and
7048 lastprivate clauses we need to ensure the lastprivate copying
7049 happens after firstprivate copying in all threads. And similarly
7050 for UDRs if initializer expression refers to omp_orig. */
7051 if (copyin_by_ref
|| lastprivate_firstprivate
7052 || (reduction_omp_orig_ref
7053 && !ctx
->scan_inclusive
7054 && !ctx
->scan_exclusive
))
7056 /* Don't add any barrier for #pragma omp simd or
7057 #pragma omp distribute. */
7058 if (!is_task_ctx (ctx
)
7059 && (gimple_code (ctx
->stmt
) != GIMPLE_OMP_FOR
7060 || gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_FOR
))
7061 gimple_seq_add_stmt (ilist
, omp_build_barrier (NULL_TREE
));
7064 /* If max_vf is non-zero, then we can use only a vectorization factor
7065 up to the max_vf we chose. So stick it into the safelen clause. */
7066 if (maybe_ne (sctx
.max_vf
, 0U))
7068 tree c
= omp_find_clause (gimple_omp_for_clauses (ctx
->stmt
),
7069 OMP_CLAUSE_SAFELEN
);
7070 poly_uint64 safe_len
;
7072 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c
), &safe_len
)
7073 && maybe_gt (safe_len
, sctx
.max_vf
)))
7075 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
7076 OMP_CLAUSE_SAFELEN_EXPR (c
) = build_int_cst (integer_type_node
,
7078 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (ctx
->stmt
);
7079 gimple_omp_for_set_clauses (ctx
->stmt
, c
);
7084 /* Create temporary variables for lastprivate(conditional:) implementation
7085 in context CTX with CLAUSES. */
7088 lower_lastprivate_conditional_clauses (tree
*clauses
, omp_context
*ctx
)
7090 tree iter_type
= NULL_TREE
;
7091 tree cond_ptr
= NULL_TREE
;
7092 tree iter_var
= NULL_TREE
;
7093 bool is_simd
= (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
7094 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
7095 tree next
= *clauses
;
7096 for (tree c
= *clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7097 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7098 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
7102 tree cc
= omp_find_clause (next
, OMP_CLAUSE__CONDTEMP_
);
7104 if (iter_type
== NULL_TREE
)
7106 iter_type
= TREE_TYPE (OMP_CLAUSE_DECL (cc
));
7107 iter_var
= create_tmp_var_raw (iter_type
);
7108 DECL_CONTEXT (iter_var
) = current_function_decl
;
7109 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
7110 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
7111 ctx
->block_vars
= iter_var
;
7113 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
7114 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
7115 OMP_CLAUSE_DECL (c3
) = iter_var
;
7116 OMP_CLAUSE_CHAIN (c3
) = *clauses
;
7118 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
7120 next
= OMP_CLAUSE_CHAIN (cc
);
7121 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
7122 tree v
= lookup_decl (OMP_CLAUSE_DECL (cc
), ctx
);
7123 ctx
->lastprivate_conditional_map
->put (o
, v
);
7126 if (iter_type
== NULL
)
7128 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
)
7130 struct omp_for_data fd
;
7131 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->stmt
), &fd
,
7133 iter_type
= unsigned_type_for (fd
.iter_type
);
7135 else if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_SECTIONS
)
7136 iter_type
= unsigned_type_node
;
7137 tree c2
= omp_find_clause (*clauses
, OMP_CLAUSE__CONDTEMP_
);
7141 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2
), ctx
);
7142 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
7146 cond_ptr
= create_tmp_var_raw (build_pointer_type (iter_type
));
7147 DECL_CONTEXT (cond_ptr
) = current_function_decl
;
7148 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr
) = 1;
7149 DECL_CHAIN (cond_ptr
) = ctx
->block_vars
;
7150 ctx
->block_vars
= cond_ptr
;
7151 c2
= build_omp_clause (UNKNOWN_LOCATION
,
7152 OMP_CLAUSE__CONDTEMP_
);
7153 OMP_CLAUSE_DECL (c2
) = cond_ptr
;
7154 OMP_CLAUSE_CHAIN (c2
) = *clauses
;
7157 iter_var
= create_tmp_var_raw (iter_type
);
7158 DECL_CONTEXT (iter_var
) = current_function_decl
;
7159 DECL_SEEN_IN_BIND_EXPR_P (iter_var
) = 1;
7160 DECL_CHAIN (iter_var
) = ctx
->block_vars
;
7161 ctx
->block_vars
= iter_var
;
7163 = build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
7164 OMP_CLAUSE__CONDTEMP__ITER (c3
) = 1;
7165 OMP_CLAUSE_DECL (c3
) = iter_var
;
7166 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (c2
);
7167 OMP_CLAUSE_CHAIN (c2
) = c3
;
7168 ctx
->lastprivate_conditional_map
= new hash_map
<tree
, tree
>;
7170 tree v
= create_tmp_var_raw (iter_type
);
7171 DECL_CONTEXT (v
) = current_function_decl
;
7172 DECL_SEEN_IN_BIND_EXPR_P (v
) = 1;
7173 DECL_CHAIN (v
) = ctx
->block_vars
;
7174 ctx
->block_vars
= v
;
7175 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
7176 ctx
->lastprivate_conditional_map
->put (o
, v
);
7181 /* Generate code to implement the LASTPRIVATE clauses. This is used for
7182 both parallel and workshare constructs. PREDICATE may be NULL if it's
7183 always true. BODY_P is the sequence to insert early initialization
7184 if needed, STMT_LIST is where the non-conditional lastprivate handling
7185 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
7189 lower_lastprivate_clauses (tree clauses
, tree predicate
, gimple_seq
*body_p
,
7190 gimple_seq
*stmt_list
, gimple_seq
*cstmt_list
,
7193 tree x
, c
, label
= NULL
, orig_clauses
= clauses
;
7194 bool par_clauses
= false;
7195 tree simduid
= NULL
, lastlane
= NULL
, simtcond
= NULL
, simtlast
= NULL
;
7196 unsigned HOST_WIDE_INT conditional_off
= 0;
7197 gimple_seq post_stmt_list
= NULL
;
7199 /* Early exit if there are no lastprivate or linear clauses. */
7200 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
7201 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LASTPRIVATE
7202 || (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_LINEAR
7203 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses
)))
7205 if (clauses
== NULL
)
7207 /* If this was a workshare clause, see if it had been combined
7208 with its parallel. In that case, look for the clauses on the
7209 parallel statement itself. */
7210 if (is_parallel_ctx (ctx
))
7214 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
7217 clauses
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
7218 OMP_CLAUSE_LASTPRIVATE
);
7219 if (clauses
== NULL
)
7224 bool maybe_simt
= false;
7225 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
7226 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
7228 maybe_simt
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMT_
);
7229 simduid
= omp_find_clause (orig_clauses
, OMP_CLAUSE__SIMDUID_
);
7231 simduid
= OMP_CLAUSE__SIMDUID__DECL (simduid
);
7237 tree label_true
, arm1
, arm2
;
7238 enum tree_code pred_code
= TREE_CODE (predicate
);
7240 label
= create_artificial_label (UNKNOWN_LOCATION
);
7241 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
7242 if (TREE_CODE_CLASS (pred_code
) == tcc_comparison
)
7244 arm1
= TREE_OPERAND (predicate
, 0);
7245 arm2
= TREE_OPERAND (predicate
, 1);
7246 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7247 gimplify_expr (&arm2
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7252 gimplify_expr (&arm1
, stmt_list
, NULL
, is_gimple_val
, fb_rvalue
);
7253 arm2
= boolean_false_node
;
7254 pred_code
= NE_EXPR
;
7258 c
= build2 (pred_code
, boolean_type_node
, arm1
, arm2
);
7259 c
= fold_convert (integer_type_node
, c
);
7260 simtcond
= create_tmp_var (integer_type_node
);
7261 gimplify_assign (simtcond
, c
, stmt_list
);
7262 gcall
*g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
,
7264 c
= create_tmp_var (integer_type_node
);
7265 gimple_call_set_lhs (g
, c
);
7266 gimple_seq_add_stmt (stmt_list
, g
);
7267 stmt
= gimple_build_cond (NE_EXPR
, c
, integer_zero_node
,
7271 stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
, label
);
7272 gimple_seq_add_stmt (stmt_list
, stmt
);
7273 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label_true
));
7276 tree cond_ptr
= NULL_TREE
;
7277 for (c
= clauses
; c
;)
7280 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7281 gimple_seq
*this_stmt_list
= stmt_list
;
7282 tree lab2
= NULL_TREE
;
7284 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7285 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
7286 && ctx
->lastprivate_conditional_map
7287 && !ctx
->combined_into_simd_safelen1
)
7289 gcc_assert (body_p
);
7292 if (cond_ptr
== NULL_TREE
)
7294 cond_ptr
= omp_find_clause (orig_clauses
, OMP_CLAUSE__CONDTEMP_
);
7295 cond_ptr
= OMP_CLAUSE_DECL (cond_ptr
);
7297 tree type
= TREE_TYPE (TREE_TYPE (cond_ptr
));
7298 tree o
= lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
7299 tree v
= *ctx
->lastprivate_conditional_map
->get (o
);
7300 gimplify_assign (v
, build_zero_cst (type
), body_p
);
7301 this_stmt_list
= cstmt_list
;
7303 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr
)))
7305 mem
= build2 (MEM_REF
, type
, cond_ptr
,
7306 build_int_cst (TREE_TYPE (cond_ptr
),
7308 conditional_off
+= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
7311 mem
= build4 (ARRAY_REF
, type
, cond_ptr
,
7312 size_int (conditional_off
++), NULL_TREE
, NULL_TREE
);
7313 tree mem2
= copy_node (mem
);
7314 gimple_seq seq
= NULL
;
7315 mem
= force_gimple_operand (mem
, &seq
, true, NULL_TREE
);
7316 gimple_seq_add_seq (this_stmt_list
, seq
);
7317 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
7318 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
7319 gimple
*g
= gimple_build_cond (GT_EXPR
, v
, mem
, lab1
, lab2
);
7320 gimple_seq_add_stmt (this_stmt_list
, g
);
7321 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab1
));
7322 gimplify_assign (mem2
, v
, this_stmt_list
);
7325 && ctx
->combined_into_simd_safelen1
7326 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7327 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
7328 && ctx
->lastprivate_conditional_map
)
7329 this_stmt_list
= &post_stmt_list
;
7331 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7332 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
7333 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
7335 var
= OMP_CLAUSE_DECL (c
);
7336 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7337 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
7338 && is_taskloop_ctx (ctx
))
7340 gcc_checking_assert (ctx
->outer
&& is_task_ctx (ctx
->outer
));
7341 new_var
= lookup_decl (var
, ctx
->outer
);
7345 new_var
= lookup_decl (var
, ctx
);
7346 /* Avoid uninitialized warnings for lastprivate and
7347 for linear iterators. */
7349 && (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7350 || OMP_CLAUSE_LINEAR_NO_COPYIN (c
)))
7351 suppress_warning (new_var
, OPT_Wuninitialized
);
7354 if (!maybe_simt
&& simduid
&& DECL_HAS_VALUE_EXPR_P (new_var
))
7356 tree val
= DECL_VALUE_EXPR (new_var
);
7357 if (TREE_CODE (val
) == ARRAY_REF
7358 && VAR_P (TREE_OPERAND (val
, 0))
7359 && lookup_attribute ("omp simd array",
7360 DECL_ATTRIBUTES (TREE_OPERAND (val
,
7363 if (lastlane
== NULL
)
7365 lastlane
= create_tmp_var (unsigned_type_node
);
7367 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE
,
7369 TREE_OPERAND (val
, 1));
7370 gimple_call_set_lhs (g
, lastlane
);
7371 gimple_seq_add_stmt (this_stmt_list
, g
);
7373 new_var
= build4 (ARRAY_REF
, TREE_TYPE (val
),
7374 TREE_OPERAND (val
, 0), lastlane
,
7375 NULL_TREE
, NULL_TREE
);
7376 TREE_THIS_NOTRAP (new_var
) = 1;
7379 else if (maybe_simt
)
7381 tree val
= (DECL_HAS_VALUE_EXPR_P (new_var
)
7382 ? DECL_VALUE_EXPR (new_var
)
7384 if (simtlast
== NULL
)
7386 simtlast
= create_tmp_var (unsigned_type_node
);
7387 gcall
*g
= gimple_build_call_internal
7388 (IFN_GOMP_SIMT_LAST_LANE
, 1, simtcond
);
7389 gimple_call_set_lhs (g
, simtlast
);
7390 gimple_seq_add_stmt (this_stmt_list
, g
);
7392 x
= build_call_expr_internal_loc
7393 (UNKNOWN_LOCATION
, IFN_GOMP_SIMT_XCHG_IDX
,
7394 TREE_TYPE (val
), 2, val
, simtlast
);
7395 new_var
= unshare_expr (new_var
);
7396 gimplify_assign (new_var
, x
, this_stmt_list
);
7397 new_var
= unshare_expr (new_var
);
7400 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7401 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
))
7403 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
), ctx
);
7404 gimple_seq_add_seq (this_stmt_list
,
7405 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
7406 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) = NULL
;
7408 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
7409 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
))
7411 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
), ctx
);
7412 gimple_seq_add_seq (this_stmt_list
,
7413 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
7414 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) = NULL
;
7418 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7419 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
)
7420 && is_taskloop_ctx (ctx
))
7422 tree ovar
= maybe_lookup_decl_in_outer_ctx (var
,
7424 if (is_global_var (ovar
))
7428 x
= build_outer_var_ref (var
, ctx
, OMP_CLAUSE_LASTPRIVATE
);
7429 if (omp_privatize_by_reference (var
))
7430 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7431 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, new_var
);
7432 gimplify_and_add (x
, this_stmt_list
);
7435 gimple_seq_add_stmt (this_stmt_list
, gimple_build_label (lab2
));
7439 c
= OMP_CLAUSE_CHAIN (c
);
7440 if (c
== NULL
&& !par_clauses
)
7442 /* If this was a workshare clause, see if it had been combined
7443 with its parallel. In that case, continue looking for the
7444 clauses also on the parallel statement itself. */
7445 if (is_parallel_ctx (ctx
))
7449 if (ctx
== NULL
|| !is_parallel_ctx (ctx
))
7452 c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
7453 OMP_CLAUSE_LASTPRIVATE
);
7459 gimple_seq_add_stmt (stmt_list
, gimple_build_label (label
));
7460 gimple_seq_add_seq (stmt_list
, post_stmt_list
);
7463 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7464 (which might be a placeholder). INNER is true if this is an inner
7465 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7466 join markers. Generate the before-loop forking sequence in
7467 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7468 general form of these sequences is
7470 GOACC_REDUCTION_SETUP
7472 GOACC_REDUCTION_INIT
7474 GOACC_REDUCTION_FINI
7476 GOACC_REDUCTION_TEARDOWN. */
7479 lower_oacc_reductions (location_t loc
, tree clauses
, tree level
, bool inner
,
7480 gcall
*fork
, gcall
*private_marker
, gcall
*join
,
7481 gimple_seq
*fork_seq
, gimple_seq
*join_seq
,
7484 gimple_seq before_fork
= NULL
;
7485 gimple_seq after_fork
= NULL
;
7486 gimple_seq before_join
= NULL
;
7487 gimple_seq after_join
= NULL
;
7488 tree init_code
= NULL_TREE
, fini_code
= NULL_TREE
,
7489 setup_code
= NULL_TREE
, teardown_code
= NULL_TREE
;
7490 unsigned offset
= 0;
7492 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7493 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
)
7495 /* No 'reduction' clauses on OpenACC 'kernels'. */
7496 gcc_checking_assert (!is_oacc_kernels (ctx
));
7497 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7498 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
7500 tree orig
= OMP_CLAUSE_DECL (c
);
7501 tree var
= maybe_lookup_decl (orig
, ctx
);
7502 tree ref_to_res
= NULL_TREE
;
7503 tree incoming
, outgoing
, v1
, v2
, v3
;
7504 bool is_private
= false;
7506 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
7507 if (rcode
== MINUS_EXPR
)
7509 else if (rcode
== TRUTH_ANDIF_EXPR
)
7510 rcode
= BIT_AND_EXPR
;
7511 else if (rcode
== TRUTH_ORIF_EXPR
)
7512 rcode
= BIT_IOR_EXPR
;
7513 tree op
= build_int_cst (unsigned_type_node
, rcode
);
7518 incoming
= outgoing
= var
;
7522 /* See if an outer construct also reduces this variable. */
7523 omp_context
*outer
= ctx
;
7525 while (omp_context
*probe
= outer
->outer
)
7527 enum gimple_code type
= gimple_code (probe
->stmt
);
7532 case GIMPLE_OMP_FOR
:
7533 cls
= gimple_omp_for_clauses (probe
->stmt
);
7536 case GIMPLE_OMP_TARGET
:
7537 /* No 'reduction' clauses inside OpenACC 'kernels'
7539 gcc_checking_assert (!is_oacc_kernels (probe
));
7541 if (!is_gimple_omp_offloaded (probe
->stmt
))
7544 cls
= gimple_omp_target_clauses (probe
->stmt
);
7552 for (; cls
; cls
= OMP_CLAUSE_CHAIN (cls
))
7553 if (OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_REDUCTION
7554 && orig
== OMP_CLAUSE_DECL (cls
))
7556 incoming
= outgoing
= lookup_decl (orig
, probe
);
7557 goto has_outer_reduction
;
7559 else if ((OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_FIRSTPRIVATE
7560 || OMP_CLAUSE_CODE (cls
) == OMP_CLAUSE_PRIVATE
)
7561 && orig
== OMP_CLAUSE_DECL (cls
))
7569 /* This is the outermost construct with this reduction,
7570 see if there's a mapping for it. */
7571 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_TARGET
7572 && maybe_lookup_field (orig
, outer
) && !is_private
)
7574 ref_to_res
= build_receiver_ref (orig
, false, outer
);
7575 if (omp_privatize_by_reference (orig
))
7576 ref_to_res
= build_simple_mem_ref (ref_to_res
);
7578 tree type
= TREE_TYPE (var
);
7579 if (POINTER_TYPE_P (type
))
7580 type
= TREE_TYPE (type
);
7583 incoming
= omp_reduction_init_op (loc
, rcode
, type
);
7587 /* Try to look at enclosing contexts for reduction var,
7588 use original if no mapping found. */
7590 omp_context
*c
= ctx
->outer
;
7593 t
= maybe_lookup_decl (orig
, c
);
7596 incoming
= outgoing
= (t
? t
: orig
);
7599 has_outer_reduction
:;
7603 ref_to_res
= integer_zero_node
;
7605 if (omp_privatize_by_reference (orig
))
7607 tree type
= TREE_TYPE (var
);
7608 const char *id
= IDENTIFIER_POINTER (DECL_NAME (var
));
7612 tree x
= create_tmp_var (TREE_TYPE (type
), id
);
7613 gimplify_assign (var
, build_fold_addr_expr (x
), fork_seq
);
7616 v1
= create_tmp_var (type
, id
);
7617 v2
= create_tmp_var (type
, id
);
7618 v3
= create_tmp_var (type
, id
);
7620 gimplify_assign (v1
, var
, fork_seq
);
7621 gimplify_assign (v2
, var
, fork_seq
);
7622 gimplify_assign (v3
, var
, fork_seq
);
7624 var
= build_simple_mem_ref (var
);
7625 v1
= build_simple_mem_ref (v1
);
7626 v2
= build_simple_mem_ref (v2
);
7627 v3
= build_simple_mem_ref (v3
);
7628 outgoing
= build_simple_mem_ref (outgoing
);
7630 if (!TREE_CONSTANT (incoming
))
7631 incoming
= build_simple_mem_ref (incoming
);
7636 /* Determine position in reduction buffer, which may be used
7637 by target. The parser has ensured that this is not a
7638 variable-sized type. */
7639 fixed_size_mode mode
7640 = as_a
<fixed_size_mode
> (TYPE_MODE (TREE_TYPE (var
)));
7641 unsigned align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
7642 offset
= (offset
+ align
- 1) & ~(align
- 1);
7643 tree off
= build_int_cst (sizetype
, offset
);
7644 offset
+= GET_MODE_SIZE (mode
);
7648 init_code
= build_int_cst (integer_type_node
,
7649 IFN_GOACC_REDUCTION_INIT
);
7650 fini_code
= build_int_cst (integer_type_node
,
7651 IFN_GOACC_REDUCTION_FINI
);
7652 setup_code
= build_int_cst (integer_type_node
,
7653 IFN_GOACC_REDUCTION_SETUP
);
7654 teardown_code
= build_int_cst (integer_type_node
,
7655 IFN_GOACC_REDUCTION_TEARDOWN
);
7659 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7660 TREE_TYPE (var
), 6, setup_code
,
7661 unshare_expr (ref_to_res
),
7662 incoming
, level
, op
, off
);
7664 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7665 TREE_TYPE (var
), 6, init_code
,
7666 unshare_expr (ref_to_res
),
7667 v1
, level
, op
, off
);
7669 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7670 TREE_TYPE (var
), 6, fini_code
,
7671 unshare_expr (ref_to_res
),
7672 v2
, level
, op
, off
);
7674 = build_call_expr_internal_loc (loc
, IFN_GOACC_REDUCTION
,
7675 TREE_TYPE (var
), 6, teardown_code
,
7676 ref_to_res
, v3
, level
, op
, off
);
7678 gimplify_assign (v1
, setup_call
, &before_fork
);
7679 gimplify_assign (v2
, init_call
, &after_fork
);
7680 gimplify_assign (v3
, fini_call
, &before_join
);
7681 gimplify_assign (outgoing
, teardown_call
, &after_join
);
7684 /* Now stitch things together. */
7685 gimple_seq_add_seq (fork_seq
, before_fork
);
7687 gimple_seq_add_stmt (fork_seq
, private_marker
);
7689 gimple_seq_add_stmt (fork_seq
, fork
);
7690 gimple_seq_add_seq (fork_seq
, after_fork
);
7692 gimple_seq_add_seq (join_seq
, before_join
);
7694 gimple_seq_add_stmt (join_seq
, join
);
7695 gimple_seq_add_seq (join_seq
, after_join
);
7698 /* Generate code to implement the REDUCTION clauses, append it
7699 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7700 that should be emitted also inside of the critical section,
7701 in that case clear *CLIST afterwards, otherwise leave it as is
7702 and let the caller emit it itself. */
7705 lower_reduction_clauses (tree clauses
, gimple_seq
*stmt_seqp
,
7706 gimple_seq
*clist
, omp_context
*ctx
)
7708 gimple_seq sub_seq
= NULL
;
7713 /* OpenACC loop reductions are handled elsewhere. */
7714 if (is_gimple_omp_oacc (ctx
->stmt
))
7717 /* SIMD reductions are handled in lower_rec_input_clauses. */
7718 if (gimple_code (ctx
->stmt
) == GIMPLE_OMP_FOR
7719 && gimple_omp_for_kind (ctx
->stmt
) == GF_OMP_FOR_KIND_SIMD
)
7722 /* inscan reductions are handled elsewhere. */
7723 if (ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
7726 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7727 update in that case, otherwise use a lock. */
7728 for (c
= clauses
; c
&& count
< 2; c
= OMP_CLAUSE_CHAIN (c
))
7729 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
7730 && !OMP_CLAUSE_REDUCTION_TASK (c
))
7732 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
7733 || TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
7735 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7745 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7747 tree var
, ref
, new_var
, orig_var
;
7748 enum tree_code code
;
7749 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
7751 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
7752 || OMP_CLAUSE_REDUCTION_TASK (c
))
7755 enum omp_clause_code ccode
= OMP_CLAUSE_REDUCTION
;
7756 orig_var
= var
= OMP_CLAUSE_DECL (c
);
7757 if (TREE_CODE (var
) == MEM_REF
)
7759 var
= TREE_OPERAND (var
, 0);
7760 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
7761 var
= TREE_OPERAND (var
, 0);
7762 if (TREE_CODE (var
) == ADDR_EXPR
)
7763 var
= TREE_OPERAND (var
, 0);
7766 /* If this is a pointer or referenced based array
7767 section, the var could be private in the outer
7768 context e.g. on orphaned loop construct. Pretend this
7769 is private variable's outer reference. */
7770 ccode
= OMP_CLAUSE_PRIVATE
;
7771 if (TREE_CODE (var
) == INDIRECT_REF
)
7772 var
= TREE_OPERAND (var
, 0);
7775 if (is_variable_sized (var
))
7777 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
7778 var
= DECL_VALUE_EXPR (var
);
7779 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
7780 var
= TREE_OPERAND (var
, 0);
7781 gcc_assert (DECL_P (var
));
7784 new_var
= lookup_decl (var
, ctx
);
7785 if (var
== OMP_CLAUSE_DECL (c
)
7786 && omp_privatize_by_reference (var
))
7787 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7788 ref
= build_outer_var_ref (var
, ctx
, ccode
);
7789 code
= OMP_CLAUSE_REDUCTION_CODE (c
);
7791 /* reduction(-:var) sums up the partial results, so it acts
7792 identically to reduction(+:var). */
7793 if (code
== MINUS_EXPR
)
7796 bool is_truth_op
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
);
7799 tree addr
= build_fold_addr_expr_loc (clause_loc
, ref
);
7801 addr
= save_expr (addr
);
7802 ref
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (addr
)), addr
);
7803 tree new_var2
= new_var
;
7807 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
7808 new_var2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7809 boolean_type_node
, new_var
, zero
);
7810 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
, boolean_type_node
,
7813 x
= fold_build2_loc (clause_loc
, code
, TREE_TYPE (new_var2
), ref2
,
7816 x
= fold_convert (TREE_TYPE (new_var
), x
);
7817 x
= build2 (OMP_ATOMIC
, void_type_node
, addr
, x
);
7818 OMP_ATOMIC_MEMORY_ORDER (x
) = OMP_MEMORY_ORDER_RELAXED
;
7819 gimplify_and_add (x
, stmt_seqp
);
7822 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
7824 tree d
= OMP_CLAUSE_DECL (c
);
7825 tree type
= TREE_TYPE (d
);
7826 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
7827 tree i
= create_tmp_var (TREE_TYPE (v
));
7828 tree ptype
= build_pointer_type (TREE_TYPE (type
));
7829 tree bias
= TREE_OPERAND (d
, 1);
7830 d
= TREE_OPERAND (d
, 0);
7831 if (TREE_CODE (d
) == POINTER_PLUS_EXPR
)
7833 tree b
= TREE_OPERAND (d
, 1);
7834 b
= maybe_lookup_decl (b
, ctx
);
7837 b
= TREE_OPERAND (d
, 1);
7838 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
7840 if (integer_zerop (bias
))
7844 bias
= fold_convert_loc (clause_loc
, TREE_TYPE (b
), bias
);
7845 bias
= fold_build2_loc (clause_loc
, PLUS_EXPR
,
7846 TREE_TYPE (b
), b
, bias
);
7848 d
= TREE_OPERAND (d
, 0);
7850 /* For ref build_outer_var_ref already performs this, so
7851 only new_var needs a dereference. */
7852 if (TREE_CODE (d
) == INDIRECT_REF
)
7854 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7855 gcc_assert (omp_privatize_by_reference (var
)
7856 && var
== orig_var
);
7858 else if (TREE_CODE (d
) == ADDR_EXPR
)
7860 if (orig_var
== var
)
7862 new_var
= build_fold_addr_expr (new_var
);
7863 ref
= build_fold_addr_expr (ref
);
7868 gcc_assert (orig_var
== var
);
7869 if (omp_privatize_by_reference (var
))
7870 ref
= build_fold_addr_expr (ref
);
7874 tree t
= maybe_lookup_decl (v
, ctx
);
7878 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
7879 gimplify_expr (&v
, stmt_seqp
, NULL
, is_gimple_val
, fb_rvalue
);
7881 if (!integer_zerop (bias
))
7883 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
7884 new_var
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7885 TREE_TYPE (new_var
), new_var
,
7886 unshare_expr (bias
));
7887 ref
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
7888 TREE_TYPE (ref
), ref
, bias
);
7890 new_var
= fold_convert_loc (clause_loc
, ptype
, new_var
);
7891 ref
= fold_convert_loc (clause_loc
, ptype
, ref
);
7892 tree m
= create_tmp_var (ptype
);
7893 gimplify_assign (m
, new_var
, stmt_seqp
);
7895 m
= create_tmp_var (ptype
);
7896 gimplify_assign (m
, ref
, stmt_seqp
);
7898 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), stmt_seqp
);
7899 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
7900 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
7901 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (body
));
7902 tree priv
= build_simple_mem_ref_loc (clause_loc
, new_var
);
7903 tree out
= build_simple_mem_ref_loc (clause_loc
, ref
);
7904 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7906 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7907 tree decl_placeholder
7908 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
7909 SET_DECL_VALUE_EXPR (placeholder
, out
);
7910 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7911 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
7912 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
7913 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7914 gimple_seq_add_seq (&sub_seq
,
7915 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7916 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7917 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7918 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
7926 tree zero
= build_zero_cst (TREE_TYPE (out
));
7927 out2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7928 boolean_type_node
, out
, zero
);
7929 priv2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7930 boolean_type_node
, priv
, zero
);
7932 x
= build2 (code
, TREE_TYPE (out2
), out2
, priv2
);
7934 x
= fold_convert (TREE_TYPE (out
), x
);
7935 out
= unshare_expr (out
);
7936 gimplify_assign (out
, x
, &sub_seq
);
7938 gimple
*g
= gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
7939 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7940 gimple_seq_add_stmt (&sub_seq
, g
);
7941 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
7942 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
7943 gimple_seq_add_stmt (&sub_seq
, g
);
7944 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
7945 build_int_cst (TREE_TYPE (i
), 1));
7946 gimple_seq_add_stmt (&sub_seq
, g
);
7947 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, end
);
7948 gimple_seq_add_stmt (&sub_seq
, g
);
7949 gimple_seq_add_stmt (&sub_seq
, gimple_build_label (end
));
7951 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
7953 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
7955 if (omp_privatize_by_reference (var
)
7956 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
7958 ref
= build_fold_addr_expr_loc (clause_loc
, ref
);
7959 SET_DECL_VALUE_EXPR (placeholder
, ref
);
7960 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
7961 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
7962 gimple_seq_add_seq (&sub_seq
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
7963 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
7964 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
7968 tree new_var2
= new_var
;
7972 tree zero
= build_zero_cst (TREE_TYPE (new_var
));
7973 new_var2
= fold_build2_loc (clause_loc
, NE_EXPR
,
7974 boolean_type_node
, new_var
, zero
);
7975 ref2
= fold_build2_loc (clause_loc
, NE_EXPR
, boolean_type_node
,
7978 x
= build2 (code
, TREE_TYPE (ref
), ref2
, new_var2
);
7980 x
= fold_convert (TREE_TYPE (new_var
), x
);
7981 ref
= build_outer_var_ref (var
, ctx
);
7982 gimplify_assign (ref
, x
, &sub_seq
);
7986 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
),
7988 gimple_seq_add_stmt (stmt_seqp
, stmt
);
7990 gimple_seq_add_seq (stmt_seqp
, sub_seq
);
7994 gimple_seq_add_seq (stmt_seqp
, *clist
);
7998 stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
),
8000 gimple_seq_add_stmt (stmt_seqp
, stmt
);
8004 /* Generate code to implement the COPYPRIVATE clauses. */
8007 lower_copyprivate_clauses (tree clauses
, gimple_seq
*slist
, gimple_seq
*rlist
,
8012 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8014 tree var
, new_var
, ref
, x
;
8016 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8018 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYPRIVATE
)
8021 var
= OMP_CLAUSE_DECL (c
);
8022 by_ref
= use_pointer_for_field (var
, NULL
);
8024 ref
= build_sender_ref (var
, ctx
);
8025 x
= new_var
= lookup_decl_in_outer_ctx (var
, ctx
);
8028 x
= build_fold_addr_expr_loc (clause_loc
, new_var
);
8029 x
= fold_convert_loc (clause_loc
, TREE_TYPE (ref
), x
);
8031 gimplify_assign (ref
, x
, slist
);
8033 ref
= build_receiver_ref (var
, false, ctx
);
8036 ref
= fold_convert_loc (clause_loc
,
8037 build_pointer_type (TREE_TYPE (new_var
)),
8039 ref
= build_fold_indirect_ref_loc (clause_loc
, ref
);
8041 if (omp_privatize_by_reference (var
))
8043 ref
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), ref
);
8044 ref
= build_simple_mem_ref_loc (clause_loc
, ref
);
8045 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
8047 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, new_var
, ref
);
8048 gimplify_and_add (x
, rlist
);
8053 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
8054 and REDUCTION from the sender (aka parent) side. */
8057 lower_send_clauses (tree clauses
, gimple_seq
*ilist
, gimple_seq
*olist
,
8061 int ignored_looptemp
= 0;
8062 bool is_taskloop
= false;
8064 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
8065 by GOMP_taskloop. */
8066 if (is_task_ctx (ctx
) && gimple_omp_task_taskloop_p (ctx
->stmt
))
8068 ignored_looptemp
= 2;
8072 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8074 tree val
, ref
, x
, var
;
8075 bool by_ref
, do_in
= false, do_out
= false;
8076 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
8078 switch (OMP_CLAUSE_CODE (c
))
8080 case OMP_CLAUSE_PRIVATE
:
8081 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
8084 case OMP_CLAUSE_FIRSTPRIVATE
:
8085 case OMP_CLAUSE_COPYIN
:
8086 case OMP_CLAUSE_LASTPRIVATE
:
8087 case OMP_CLAUSE_IN_REDUCTION
:
8088 case OMP_CLAUSE__REDUCTEMP_
:
8090 case OMP_CLAUSE_REDUCTION
:
8091 if (is_task_ctx (ctx
) || OMP_CLAUSE_REDUCTION_TASK (c
))
8094 case OMP_CLAUSE_SHARED
:
8095 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
8098 case OMP_CLAUSE__LOOPTEMP_
:
8099 if (ignored_looptemp
)
8109 val
= OMP_CLAUSE_DECL (c
);
8110 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
8111 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
)
8112 && TREE_CODE (val
) == MEM_REF
)
8114 val
= TREE_OPERAND (val
, 0);
8115 if (TREE_CODE (val
) == POINTER_PLUS_EXPR
)
8116 val
= TREE_OPERAND (val
, 0);
8117 if (TREE_CODE (val
) == INDIRECT_REF
8118 || TREE_CODE (val
) == ADDR_EXPR
)
8119 val
= TREE_OPERAND (val
, 0);
8120 if (is_variable_sized (val
))
8124 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
8125 outer taskloop region. */
8126 omp_context
*ctx_for_o
= ctx
;
8128 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
8129 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
8130 ctx_for_o
= ctx
->outer
;
8132 var
= lookup_decl_in_outer_ctx (val
, ctx_for_o
);
8134 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_COPYIN
8135 && is_global_var (var
)
8136 && (val
== OMP_CLAUSE_DECL (c
)
8137 || !is_task_ctx (ctx
)
8138 || (TREE_CODE (TREE_TYPE (val
)) != POINTER_TYPE
8139 && (TREE_CODE (TREE_TYPE (val
)) != REFERENCE_TYPE
8140 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val
)))
8141 != POINTER_TYPE
)))))
8144 t
= omp_member_access_dummy_var (var
);
8147 var
= DECL_VALUE_EXPR (var
);
8148 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx_for_o
);
8150 var
= unshare_and_remap (var
, t
, o
);
8152 var
= unshare_expr (var
);
8155 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
8157 /* Handle taskloop firstprivate/lastprivate, where the
8158 lastprivate on GIMPLE_OMP_TASK is represented as
8159 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
8160 tree f
= lookup_sfield ((splay_tree_key
) &DECL_UID (val
), ctx
);
8161 x
= omp_build_component_ref (ctx
->sender_decl
, f
);
8162 if (use_pointer_for_field (val
, ctx
))
8163 var
= build_fold_addr_expr (var
);
8164 gimplify_assign (x
, var
, ilist
);
8165 DECL_ABSTRACT_ORIGIN (f
) = NULL
;
8169 if (((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
8170 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IN_REDUCTION
)
8171 || val
== OMP_CLAUSE_DECL (c
))
8172 && is_variable_sized (val
))
8174 by_ref
= use_pointer_for_field (val
, NULL
);
8176 switch (OMP_CLAUSE_CODE (c
))
8178 case OMP_CLAUSE_FIRSTPRIVATE
:
8179 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
)
8181 && is_task_ctx (ctx
))
8182 suppress_warning (var
);
8186 case OMP_CLAUSE_PRIVATE
:
8187 case OMP_CLAUSE_COPYIN
:
8188 case OMP_CLAUSE__LOOPTEMP_
:
8189 case OMP_CLAUSE__REDUCTEMP_
:
8193 case OMP_CLAUSE_LASTPRIVATE
:
8194 if (by_ref
|| omp_privatize_by_reference (val
))
8196 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
8203 if (lang_hooks
.decls
.omp_private_outer_ref (val
))
8208 case OMP_CLAUSE_REDUCTION
:
8209 case OMP_CLAUSE_IN_REDUCTION
:
8211 if (val
== OMP_CLAUSE_DECL (c
))
8213 if (is_task_ctx (ctx
))
8214 by_ref
= use_pointer_for_field (val
, ctx
);
8216 do_out
= !(by_ref
|| omp_privatize_by_reference (val
));
8219 by_ref
= TREE_CODE (TREE_TYPE (val
)) == ARRAY_TYPE
;
8228 ref
= build_sender_ref (val
, ctx
);
8229 x
= by_ref
? build_fold_addr_expr_loc (clause_loc
, var
) : var
;
8230 gimplify_assign (ref
, x
, ilist
);
8231 if (is_task_ctx (ctx
))
8232 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref
, 1)) = NULL
;
8237 ref
= build_sender_ref (val
, ctx
);
8238 gimplify_assign (var
, ref
, olist
);
8243 /* Generate code to implement SHARED from the sender (aka parent)
8244 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8245 list things that got automatically shared. */
8248 lower_send_shared_vars (gimple_seq
*ilist
, gimple_seq
*olist
, omp_context
*ctx
)
8250 tree var
, ovar
, nvar
, t
, f
, x
, record_type
;
8252 if (ctx
->record_type
== NULL
)
8255 record_type
= ctx
->srecord_type
? ctx
->srecord_type
: ctx
->record_type
;
8256 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
8258 ovar
= DECL_ABSTRACT_ORIGIN (f
);
8259 if (!ovar
|| TREE_CODE (ovar
) == FIELD_DECL
)
8262 nvar
= maybe_lookup_decl (ovar
, ctx
);
8264 || !DECL_HAS_VALUE_EXPR_P (nvar
)
8265 || (ctx
->allocate_map
8266 && ctx
->allocate_map
->get (ovar
)))
8269 /* If CTX is a nested parallel directive. Find the immediately
8270 enclosing parallel or workshare construct that contains a
8271 mapping for OVAR. */
8272 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
8274 t
= omp_member_access_dummy_var (var
);
8277 var
= DECL_VALUE_EXPR (var
);
8278 tree o
= maybe_lookup_decl_in_outer_ctx (t
, ctx
);
8280 var
= unshare_and_remap (var
, t
, o
);
8282 var
= unshare_expr (var
);
8285 if (use_pointer_for_field (ovar
, ctx
))
8287 x
= build_sender_ref (ovar
, ctx
);
8288 if (TREE_CODE (TREE_TYPE (f
)) == ARRAY_TYPE
8289 && TREE_TYPE (f
) == TREE_TYPE (ovar
))
8291 gcc_assert (is_parallel_ctx (ctx
)
8292 && DECL_ARTIFICIAL (ovar
));
8293 /* _condtemp_ clause. */
8294 var
= build_constructor (TREE_TYPE (x
), NULL
);
8297 var
= build_fold_addr_expr (var
);
8298 gimplify_assign (x
, var
, ilist
);
8302 x
= build_sender_ref (ovar
, ctx
);
8303 gimplify_assign (x
, var
, ilist
);
8305 if (!TREE_READONLY (var
)
8306 /* We don't need to receive a new reference to a result
8307 or parm decl. In fact we may not store to it as we will
8308 invalidate any pending RSO and generate wrong gimple
8310 && !((TREE_CODE (var
) == RESULT_DECL
8311 || TREE_CODE (var
) == PARM_DECL
)
8312 && DECL_BY_REFERENCE (var
)))
8314 x
= build_sender_ref (ovar
, ctx
);
8315 gimplify_assign (var
, x
, olist
);
8321 /* Emit an OpenACC head marker call, encapulating the partitioning and
8322 other information that must be processed by the target compiler.
8323 Return the maximum number of dimensions the associated loop might
8324 be partitioned over. */
8327 lower_oacc_head_mark (location_t loc
, tree ddvar
, tree clauses
,
8328 gimple_seq
*seq
, omp_context
*ctx
)
8330 unsigned levels
= 0;
8332 tree gang_static
= NULL_TREE
;
8333 auto_vec
<tree
, 5> args
;
8335 args
.quick_push (build_int_cst
8336 (integer_type_node
, IFN_UNIQUE_OACC_HEAD_MARK
));
8337 args
.quick_push (ddvar
);
8338 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8340 switch (OMP_CLAUSE_CODE (c
))
8342 case OMP_CLAUSE_GANG
:
8343 tag
|= OLF_DIM_GANG
;
8344 gang_static
= OMP_CLAUSE_GANG_STATIC_EXPR (c
);
8345 /* static:* is represented by -1, and we can ignore it, as
8346 scheduling is always static. */
8347 if (gang_static
&& integer_minus_onep (gang_static
))
8348 gang_static
= NULL_TREE
;
8352 case OMP_CLAUSE_WORKER
:
8353 tag
|= OLF_DIM_WORKER
;
8357 case OMP_CLAUSE_VECTOR
:
8358 tag
|= OLF_DIM_VECTOR
;
8362 case OMP_CLAUSE_SEQ
:
8366 case OMP_CLAUSE_AUTO
:
8370 case OMP_CLAUSE_INDEPENDENT
:
8371 tag
|= OLF_INDEPENDENT
;
8374 case OMP_CLAUSE_TILE
:
8378 case OMP_CLAUSE_REDUCTION
:
8379 tag
|= OLF_REDUCTION
;
8389 if (DECL_P (gang_static
))
8390 gang_static
= build_outer_var_ref (gang_static
, ctx
);
8391 tag
|= OLF_GANG_STATIC
;
8394 omp_context
*tgt
= enclosing_target_ctx (ctx
);
8395 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
8397 else if (is_oacc_kernels (tgt
))
8398 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8400 else if (is_oacc_kernels_decomposed_part (tgt
))
8405 /* In a parallel region, loops are implicitly INDEPENDENT. */
8406 if (!tgt
|| is_oacc_parallel_or_serial (tgt
))
8407 tag
|= OLF_INDEPENDENT
;
8409 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8410 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8411 if (tgt
&& is_oacc_kernels_decomposed_part (tgt
))
8413 gcc_assert (tag
& (OLF_SEQ
| OLF_INDEPENDENT
));
8414 gcc_assert (!(tag
& OLF_AUTO
));
8418 /* Tiling could use all 3 levels. */
8422 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8423 Ensure at least one level, or 2 for possible auto
8425 bool maybe_auto
= !(tag
& (((GOMP_DIM_MASK (GOMP_DIM_MAX
) - 1)
8426 << OLF_DIM_BASE
) | OLF_SEQ
));
8428 if (levels
< 1u + maybe_auto
)
8429 levels
= 1u + maybe_auto
;
8432 args
.quick_push (build_int_cst (integer_type_node
, levels
));
8433 args
.quick_push (build_int_cst (integer_type_node
, tag
));
8435 args
.quick_push (gang_static
);
8437 gcall
*call
= gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
8438 gimple_set_location (call
, loc
);
8439 gimple_set_lhs (call
, ddvar
);
8440 gimple_seq_add_stmt (seq
, call
);
8445 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8446 partitioning level of the enclosed region. */
8449 lower_oacc_loop_marker (location_t loc
, tree ddvar
, bool head
,
8450 tree tofollow
, gimple_seq
*seq
)
8452 int marker_kind
= (head
? IFN_UNIQUE_OACC_HEAD_MARK
8453 : IFN_UNIQUE_OACC_TAIL_MARK
);
8454 tree marker
= build_int_cst (integer_type_node
, marker_kind
);
8455 int nargs
= 2 + (tofollow
!= NULL_TREE
);
8456 gcall
*call
= gimple_build_call_internal (IFN_UNIQUE
, nargs
,
8457 marker
, ddvar
, tofollow
);
8458 gimple_set_location (call
, loc
);
8459 gimple_set_lhs (call
, ddvar
);
8460 gimple_seq_add_stmt (seq
, call
);
8463 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8464 the loop clauses, from which we extract reductions. Initialize
8468 lower_oacc_head_tail (location_t loc
, tree clauses
, gcall
*private_marker
,
8469 gimple_seq
*head
, gimple_seq
*tail
, omp_context
*ctx
)
8472 tree ddvar
= create_tmp_var (integer_type_node
, ".data_dep");
8473 gimple_seq_add_stmt (head
, gimple_build_assign (ddvar
, integer_zero_node
));
8475 unsigned count
= lower_oacc_head_mark (loc
, ddvar
, clauses
, head
, ctx
);
8479 gimple_set_location (private_marker
, loc
);
8480 gimple_call_set_lhs (private_marker
, ddvar
);
8481 gimple_call_set_arg (private_marker
, 1, ddvar
);
8484 tree fork_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_FORK
);
8485 tree join_kind
= build_int_cst (unsigned_type_node
, IFN_UNIQUE_OACC_JOIN
);
8488 for (unsigned done
= 1; count
; count
--, done
++)
8490 gimple_seq fork_seq
= NULL
;
8491 gimple_seq join_seq
= NULL
;
8493 tree place
= build_int_cst (integer_type_node
, -1);
8494 gcall
*fork
= gimple_build_call_internal (IFN_UNIQUE
, 3,
8495 fork_kind
, ddvar
, place
);
8496 gimple_set_location (fork
, loc
);
8497 gimple_set_lhs (fork
, ddvar
);
8499 gcall
*join
= gimple_build_call_internal (IFN_UNIQUE
, 3,
8500 join_kind
, ddvar
, place
);
8501 gimple_set_location (join
, loc
);
8502 gimple_set_lhs (join
, ddvar
);
8504 /* Mark the beginning of this level sequence. */
8506 lower_oacc_loop_marker (loc
, ddvar
, true,
8507 build_int_cst (integer_type_node
, count
),
8509 lower_oacc_loop_marker (loc
, ddvar
, false,
8510 build_int_cst (integer_type_node
, done
),
8513 lower_oacc_reductions (loc
, clauses
, place
, inner
,
8514 fork
, (count
== 1) ? private_marker
: NULL
,
8515 join
, &fork_seq
, &join_seq
, ctx
);
8517 /* Append this level to head. */
8518 gimple_seq_add_seq (head
, fork_seq
);
8519 /* Prepend it to tail. */
8520 gimple_seq_add_seq (&join_seq
, *tail
);
8526 /* Mark the end of the sequence. */
8527 lower_oacc_loop_marker (loc
, ddvar
, true, NULL_TREE
, head
);
8528 lower_oacc_loop_marker (loc
, ddvar
, false, NULL_TREE
, tail
);
8531 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8532 catch handler and return it. This prevents programs from violating the
8533 structured block semantics with throws. */
8536 maybe_catch_exception (gimple_seq body
)
8541 if (!flag_exceptions
)
8544 if (lang_hooks
.eh_protect_cleanup_actions
!= NULL
)
8545 decl
= lang_hooks
.eh_protect_cleanup_actions ();
8547 decl
= builtin_decl_explicit (BUILT_IN_TRAP
);
8549 g
= gimple_build_eh_must_not_throw (decl
);
8550 g
= gimple_build_try (body
, gimple_seq_alloc_with_stmt (g
),
8553 return gimple_seq_alloc_with_stmt (g
);
8557 /* Routines to lower OMP directives into OMP-GIMPLE. */
8559 /* If ctx is a worksharing context inside of a cancellable parallel
8560 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8561 and conditional branch to parallel's cancel_label to handle
8562 cancellation in the implicit barrier. */
8565 maybe_add_implicit_barrier_cancel (omp_context
*ctx
, gimple
*omp_return
,
8568 gcc_assert (gimple_code (omp_return
) == GIMPLE_OMP_RETURN
);
8569 if (gimple_omp_return_nowait_p (omp_return
))
8571 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
8572 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
8573 && outer
->cancellable
)
8575 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_CANCEL
);
8576 tree c_bool_type
= TREE_TYPE (TREE_TYPE (fndecl
));
8577 tree lhs
= create_tmp_var (c_bool_type
);
8578 gimple_omp_return_set_lhs (omp_return
, lhs
);
8579 tree fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
8580 gimple
*g
= gimple_build_cond (NE_EXPR
, lhs
,
8581 fold_convert (c_bool_type
,
8582 boolean_false_node
),
8583 outer
->cancel_label
, fallthru_label
);
8584 gimple_seq_add_stmt (body
, g
);
8585 gimple_seq_add_stmt (body
, gimple_build_label (fallthru_label
));
8587 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
8588 && gimple_code (outer
->stmt
) != GIMPLE_OMP_SCOPE
)
8592 /* Find the first task_reduction or reduction clause or return NULL
8593 if there are none. */
8596 omp_task_reductions_find_first (tree clauses
, enum tree_code code
,
8597 enum omp_clause_code ccode
)
8601 clauses
= omp_find_clause (clauses
, ccode
);
8602 if (clauses
== NULL_TREE
)
8604 if (ccode
!= OMP_CLAUSE_REDUCTION
8605 || code
== OMP_TASKLOOP
8606 || OMP_CLAUSE_REDUCTION_TASK (clauses
))
8608 clauses
= OMP_CLAUSE_CHAIN (clauses
);
8612 static void lower_omp_task_reductions (omp_context
*, enum tree_code
, tree
,
8613 gimple_seq
*, gimple_seq
*);
8615 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8616 CTX is the enclosing OMP context for the current statement. */
8619 lower_omp_sections (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8621 tree block
, control
;
8622 gimple_stmt_iterator tgsi
;
8623 gomp_sections
*stmt
;
8625 gbind
*new_stmt
, *bind
;
8626 gimple_seq ilist
, dlist
, olist
, tred_dlist
= NULL
, clist
= NULL
, new_body
;
8628 stmt
= as_a
<gomp_sections
*> (gsi_stmt (*gsi_p
));
8630 push_gimplify_context ();
8636 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt
),
8637 OMP_SECTIONS
, OMP_CLAUSE_REDUCTION
);
8638 tree rtmp
= NULL_TREE
;
8641 tree type
= build_pointer_type (pointer_sized_int_node
);
8642 tree temp
= create_tmp_var (type
);
8643 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
8644 OMP_CLAUSE_DECL (c
) = temp
;
8645 OMP_CLAUSE_CHAIN (c
) = gimple_omp_sections_clauses (stmt
);
8646 gimple_omp_sections_set_clauses (stmt
, c
);
8647 lower_omp_task_reductions (ctx
, OMP_SECTIONS
,
8648 gimple_omp_sections_clauses (stmt
),
8649 &ilist
, &tred_dlist
);
8651 rtmp
= make_ssa_name (type
);
8652 gimple_seq_add_stmt (&ilist
, gimple_build_assign (rtmp
, temp
));
8655 tree
*clauses_ptr
= gimple_omp_sections_clauses_ptr (stmt
);
8656 lower_lastprivate_conditional_clauses (clauses_ptr
, ctx
);
8658 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt
),
8659 &ilist
, &dlist
, ctx
, NULL
);
8661 control
= create_tmp_var (unsigned_type_node
, ".section");
8662 gimple_omp_sections_set_control (stmt
, control
);
8664 new_body
= gimple_omp_body (stmt
);
8665 gimple_omp_set_body (stmt
, NULL
);
8666 tgsi
= gsi_start (new_body
);
8667 for (; !gsi_end_p (tgsi
); gsi_next (&tgsi
))
8672 sec_start
= gsi_stmt (tgsi
);
8673 sctx
= maybe_lookup_ctx (sec_start
);
8676 lower_omp (gimple_omp_body_ptr (sec_start
), sctx
);
8677 gsi_insert_seq_after (&tgsi
, gimple_omp_body (sec_start
),
8678 GSI_CONTINUE_LINKING
);
8679 gimple_omp_set_body (sec_start
, NULL
);
8681 if (gsi_one_before_end_p (tgsi
))
8683 gimple_seq l
= NULL
;
8684 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt
), NULL
,
8685 &ilist
, &l
, &clist
, ctx
);
8686 gsi_insert_seq_after (&tgsi
, l
, GSI_CONTINUE_LINKING
);
8687 gimple_omp_section_set_last (sec_start
);
8690 gsi_insert_after (&tgsi
, gimple_build_omp_return (false),
8691 GSI_CONTINUE_LINKING
);
8694 block
= make_node (BLOCK
);
8695 bind
= gimple_build_bind (NULL
, new_body
, block
);
8698 lower_reduction_clauses (gimple_omp_sections_clauses (stmt
), &olist
,
8702 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
8703 gcall
*g
= gimple_build_call (fndecl
, 0);
8704 gimple_seq_add_stmt (&olist
, g
);
8705 gimple_seq_add_seq (&olist
, clist
);
8706 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
8707 g
= gimple_build_call (fndecl
, 0);
8708 gimple_seq_add_stmt (&olist
, g
);
8711 block
= make_node (BLOCK
);
8712 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
8713 gsi_replace (gsi_p
, new_stmt
, true);
8715 pop_gimplify_context (new_stmt
);
8716 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
8717 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
8718 if (BLOCK_VARS (block
))
8719 TREE_USED (block
) = 1;
8722 gimple_seq_add_seq (&new_body
, ilist
);
8723 gimple_seq_add_stmt (&new_body
, stmt
);
8724 gimple_seq_add_stmt (&new_body
, gimple_build_omp_sections_switch ());
8725 gimple_seq_add_stmt (&new_body
, bind
);
8727 t
= gimple_build_omp_continue (control
, control
);
8728 gimple_seq_add_stmt (&new_body
, t
);
8730 gimple_seq_add_seq (&new_body
, olist
);
8731 if (ctx
->cancellable
)
8732 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
8733 gimple_seq_add_seq (&new_body
, dlist
);
8735 new_body
= maybe_catch_exception (new_body
);
8737 bool nowait
= omp_find_clause (gimple_omp_sections_clauses (stmt
),
8738 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8739 t
= gimple_build_omp_return (nowait
);
8740 gimple_seq_add_stmt (&new_body
, t
);
8741 gimple_seq_add_seq (&new_body
, tred_dlist
);
8742 maybe_add_implicit_barrier_cancel (ctx
, t
, &new_body
);
8745 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
8747 gimple_bind_set_body (new_stmt
, new_body
);
8751 /* A subroutine of lower_omp_single. Expand the simple form of
8752 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8754 if (GOMP_single_start ())
8756 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8758 FIXME. It may be better to delay expanding the logic of this until
8759 pass_expand_omp. The expanded logic may make the job more difficult
8760 to a synchronization analysis pass. */
8763 lower_omp_single_simple (gomp_single
*single_stmt
, gimple_seq
*pre_p
)
8765 location_t loc
= gimple_location (single_stmt
);
8766 tree tlabel
= create_artificial_label (loc
);
8767 tree flabel
= create_artificial_label (loc
);
8768 gimple
*call
, *cond
;
8771 decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START
);
8772 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (decl
)));
8773 call
= gimple_build_call (decl
, 0);
8774 gimple_call_set_lhs (call
, lhs
);
8775 gimple_seq_add_stmt (pre_p
, call
);
8777 cond
= gimple_build_cond (EQ_EXPR
, lhs
,
8778 fold_convert_loc (loc
, TREE_TYPE (lhs
),
8781 gimple_seq_add_stmt (pre_p
, cond
);
8782 gimple_seq_add_stmt (pre_p
, gimple_build_label (tlabel
));
8783 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
8784 gimple_seq_add_stmt (pre_p
, gimple_build_label (flabel
));
8788 /* A subroutine of lower_omp_single. Expand the simple form of
8789 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8791 #pragma omp single copyprivate (a, b, c)
8793 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8796 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8802 GOMP_single_copy_end (©out);
8813 FIXME. It may be better to delay expanding the logic of this until
8814 pass_expand_omp. The expanded logic may make the job more difficult
8815 to a synchronization analysis pass. */
8818 lower_omp_single_copy (gomp_single
*single_stmt
, gimple_seq
*pre_p
,
8821 tree ptr_type
, t
, l0
, l1
, l2
, bfn_decl
;
8822 gimple_seq copyin_seq
;
8823 location_t loc
= gimple_location (single_stmt
);
8825 ctx
->sender_decl
= create_tmp_var (ctx
->record_type
, ".omp_copy_o");
8827 ptr_type
= build_pointer_type (ctx
->record_type
);
8828 ctx
->receiver_decl
= create_tmp_var (ptr_type
, ".omp_copy_i");
8830 l0
= create_artificial_label (loc
);
8831 l1
= create_artificial_label (loc
);
8832 l2
= create_artificial_label (loc
);
8834 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START
);
8835 t
= build_call_expr_loc (loc
, bfn_decl
, 0);
8836 t
= fold_convert_loc (loc
, ptr_type
, t
);
8837 gimplify_assign (ctx
->receiver_decl
, t
, pre_p
);
8839 t
= build2 (EQ_EXPR
, boolean_type_node
, ctx
->receiver_decl
,
8840 build_int_cst (ptr_type
, 0));
8841 t
= build3 (COND_EXPR
, void_type_node
, t
,
8842 build_and_jump (&l0
), build_and_jump (&l1
));
8843 gimplify_and_add (t
, pre_p
);
8845 gimple_seq_add_stmt (pre_p
, gimple_build_label (l0
));
8847 gimple_seq_add_seq (pre_p
, gimple_omp_body (single_stmt
));
8850 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt
), pre_p
,
8853 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
8854 bfn_decl
= builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END
);
8855 t
= build_call_expr_loc (loc
, bfn_decl
, 1, t
);
8856 gimplify_and_add (t
, pre_p
);
8858 t
= build_and_jump (&l2
);
8859 gimplify_and_add (t
, pre_p
);
8861 gimple_seq_add_stmt (pre_p
, gimple_build_label (l1
));
8863 gimple_seq_add_seq (pre_p
, copyin_seq
);
8865 gimple_seq_add_stmt (pre_p
, gimple_build_label (l2
));
8869 /* Expand code for an OpenMP single directive. */
8872 lower_omp_single (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8875 gomp_single
*single_stmt
= as_a
<gomp_single
*> (gsi_stmt (*gsi_p
));
8877 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
8879 push_gimplify_context ();
8881 block
= make_node (BLOCK
);
8882 bind
= gimple_build_bind (NULL
, NULL
, block
);
8883 gsi_replace (gsi_p
, bind
, true);
8886 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt
),
8887 &bind_body
, &dlist
, ctx
, NULL
);
8888 lower_omp (gimple_omp_body_ptr (single_stmt
), ctx
);
8890 gimple_seq_add_stmt (&bind_body
, single_stmt
);
8892 if (ctx
->record_type
)
8893 lower_omp_single_copy (single_stmt
, &bind_body
, ctx
);
8895 lower_omp_single_simple (single_stmt
, &bind_body
);
8897 gimple_omp_set_body (single_stmt
, NULL
);
8899 gimple_seq_add_seq (&bind_body
, dlist
);
8901 bind_body
= maybe_catch_exception (bind_body
);
8903 bool nowait
= omp_find_clause (gimple_omp_single_clauses (single_stmt
),
8904 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8905 gimple
*g
= gimple_build_omp_return (nowait
);
8906 gimple_seq_add_stmt (&bind_body_tail
, g
);
8907 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
8908 if (ctx
->record_type
)
8910 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
8911 tree clobber
= build_clobber (ctx
->record_type
);
8912 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
8913 clobber
), GSI_SAME_STMT
);
8915 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
8916 gimple_bind_set_body (bind
, bind_body
);
8918 pop_gimplify_context (bind
);
8920 gimple_bind_append_vars (bind
, ctx
->block_vars
);
8921 BLOCK_VARS (block
) = ctx
->block_vars
;
8922 if (BLOCK_VARS (block
))
8923 TREE_USED (block
) = 1;
8927 /* Lower code for an OMP scope directive. */
8930 lower_omp_scope (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
8933 gimple
*scope_stmt
= gsi_stmt (*gsi_p
);
8935 gimple_seq bind_body
, bind_body_tail
= NULL
, dlist
;
8936 gimple_seq tred_dlist
= NULL
;
8938 push_gimplify_context ();
8940 block
= make_node (BLOCK
);
8941 bind
= gimple_build_bind (NULL
, NULL
, block
);
8942 gsi_replace (gsi_p
, bind
, true);
8947 = omp_task_reductions_find_first (gimple_omp_scope_clauses (scope_stmt
),
8948 OMP_SCOPE
, OMP_CLAUSE_REDUCTION
);
8951 tree type
= build_pointer_type (pointer_sized_int_node
);
8952 tree temp
= create_tmp_var (type
);
8953 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
8954 OMP_CLAUSE_DECL (c
) = temp
;
8955 OMP_CLAUSE_CHAIN (c
) = gimple_omp_scope_clauses (scope_stmt
);
8956 gimple_omp_scope_set_clauses (scope_stmt
, c
);
8957 lower_omp_task_reductions (ctx
, OMP_SCOPE
,
8958 gimple_omp_scope_clauses (scope_stmt
),
8959 &bind_body
, &tred_dlist
);
8961 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_SCOPE_START
);
8962 gimple
*stmt
= gimple_build_call (fndecl
, 1, temp
);
8963 gimple_seq_add_stmt (&bind_body
, stmt
);
8966 lower_rec_input_clauses (gimple_omp_scope_clauses (scope_stmt
),
8967 &bind_body
, &dlist
, ctx
, NULL
);
8968 lower_omp (gimple_omp_body_ptr (scope_stmt
), ctx
);
8970 gimple_seq_add_stmt (&bind_body
, scope_stmt
);
8972 gimple_seq_add_seq (&bind_body
, gimple_omp_body (scope_stmt
));
8974 gimple_omp_set_body (scope_stmt
, NULL
);
8976 gimple_seq clist
= NULL
;
8977 lower_reduction_clauses (gimple_omp_scope_clauses (scope_stmt
),
8978 &bind_body
, &clist
, ctx
);
8981 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
8982 gcall
*g
= gimple_build_call (fndecl
, 0);
8983 gimple_seq_add_stmt (&bind_body
, g
);
8984 gimple_seq_add_seq (&bind_body
, clist
);
8985 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
8986 g
= gimple_build_call (fndecl
, 0);
8987 gimple_seq_add_stmt (&bind_body
, g
);
8990 gimple_seq_add_seq (&bind_body
, dlist
);
8992 bind_body
= maybe_catch_exception (bind_body
);
8994 bool nowait
= omp_find_clause (gimple_omp_scope_clauses (scope_stmt
),
8995 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8996 gimple
*g
= gimple_build_omp_return (nowait
);
8997 gimple_seq_add_stmt (&bind_body_tail
, g
);
8998 gimple_seq_add_seq (&bind_body_tail
, tred_dlist
);
8999 maybe_add_implicit_barrier_cancel (ctx
, g
, &bind_body_tail
);
9000 if (ctx
->record_type
)
9002 gimple_stmt_iterator gsi
= gsi_start (bind_body_tail
);
9003 tree clobber
= build_clobber (ctx
->record_type
);
9004 gsi_insert_after (&gsi
, gimple_build_assign (ctx
->sender_decl
,
9005 clobber
), GSI_SAME_STMT
);
9007 gimple_seq_add_seq (&bind_body
, bind_body_tail
);
9009 gimple_bind_set_body (bind
, bind_body
);
9011 pop_gimplify_context (bind
);
9013 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9014 BLOCK_VARS (block
) = ctx
->block_vars
;
9015 if (BLOCK_VARS (block
))
9016 TREE_USED (block
) = 1;
9018 /* Expand code for an OpenMP master or masked directive. */
9021 lower_omp_master (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9023 tree block
, lab
= NULL
, x
, bfn_decl
;
9024 gimple
*stmt
= gsi_stmt (*gsi_p
);
9026 location_t loc
= gimple_location (stmt
);
9028 tree filter
= integer_zero_node
;
9030 push_gimplify_context ();
9032 if (gimple_code (stmt
) == GIMPLE_OMP_MASKED
)
9034 filter
= omp_find_clause (gimple_omp_masked_clauses (stmt
),
9037 filter
= fold_convert (integer_type_node
,
9038 OMP_CLAUSE_FILTER_EXPR (filter
));
9040 filter
= integer_zero_node
;
9042 block
= make_node (BLOCK
);
9043 bind
= gimple_build_bind (NULL
, NULL
, block
);
9044 gsi_replace (gsi_p
, bind
, true);
9045 gimple_bind_add_stmt (bind
, stmt
);
9047 bfn_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
9048 x
= build_call_expr_loc (loc
, bfn_decl
, 0);
9049 x
= build2 (EQ_EXPR
, boolean_type_node
, x
, filter
);
9050 x
= build3 (COND_EXPR
, void_type_node
, x
, NULL
, build_and_jump (&lab
));
9052 gimplify_and_add (x
, &tseq
);
9053 gimple_bind_add_seq (bind
, tseq
);
9055 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9056 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
9057 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9058 gimple_omp_set_body (stmt
, NULL
);
9060 gimple_bind_add_stmt (bind
, gimple_build_label (lab
));
9062 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
9064 pop_gimplify_context (bind
);
9066 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9067 BLOCK_VARS (block
) = ctx
->block_vars
;
9070 /* Helper function for lower_omp_task_reductions. For a specific PASS
9071 find out the current clause it should be processed, or return false
9072 if all have been processed already. */
9075 omp_task_reduction_iterate (int pass
, enum tree_code code
,
9076 enum omp_clause_code ccode
, tree
*c
, tree
*decl
,
9077 tree
*type
, tree
*next
)
9079 for (; *c
; *c
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
))
9081 if (ccode
== OMP_CLAUSE_REDUCTION
9082 && code
!= OMP_TASKLOOP
9083 && !OMP_CLAUSE_REDUCTION_TASK (*c
))
9085 *decl
= OMP_CLAUSE_DECL (*c
);
9086 *type
= TREE_TYPE (*decl
);
9087 if (TREE_CODE (*decl
) == MEM_REF
)
9094 if (omp_privatize_by_reference (*decl
))
9095 *type
= TREE_TYPE (*type
);
9096 if (pass
!= (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type
))))
9099 *next
= omp_find_clause (OMP_CLAUSE_CHAIN (*c
), ccode
);
9108 /* Lower task_reduction and reduction clauses (the latter unless CODE is
9109 OMP_TASKGROUP only with task modifier). Register mapping of those in
9110 START sequence and reducing them and unregister them in the END sequence. */
9113 lower_omp_task_reductions (omp_context
*ctx
, enum tree_code code
, tree clauses
,
9114 gimple_seq
*start
, gimple_seq
*end
)
9116 enum omp_clause_code ccode
9117 = (code
== OMP_TASKGROUP
9118 ? OMP_CLAUSE_TASK_REDUCTION
: OMP_CLAUSE_REDUCTION
);
9119 tree cancellable
= NULL_TREE
;
9120 clauses
= omp_task_reductions_find_first (clauses
, code
, ccode
);
9121 if (clauses
== NULL_TREE
)
9123 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
|| code
== OMP_SCOPE
)
9125 for (omp_context
*outer
= ctx
->outer
; outer
; outer
= outer
->outer
)
9126 if (gimple_code (outer
->stmt
) == GIMPLE_OMP_PARALLEL
9127 && outer
->cancellable
)
9129 cancellable
= error_mark_node
;
9132 else if (gimple_code (outer
->stmt
) != GIMPLE_OMP_TASKGROUP
9133 && gimple_code (outer
->stmt
) != GIMPLE_OMP_SCOPE
)
9136 tree record_type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
9137 tree
*last
= &TYPE_FIELDS (record_type
);
9141 tree field
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
9143 tree ifield
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
, NULL_TREE
,
9146 DECL_CHAIN (field
) = ifield
;
9147 last
= &DECL_CHAIN (ifield
);
9148 DECL_CONTEXT (field
) = record_type
;
9149 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
9150 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
9151 DECL_CONTEXT (ifield
) = record_type
;
9152 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (ifield
))
9153 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (ifield
));
9155 for (int pass
= 0; pass
< 2; pass
++)
9157 tree decl
, type
, next
;
9158 for (tree c
= clauses
;
9159 omp_task_reduction_iterate (pass
, code
, ccode
,
9160 &c
, &decl
, &type
, &next
); c
= next
)
9163 tree new_type
= type
;
9165 new_type
= remap_type (type
, &ctx
->outer
->cb
);
9167 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
,
9168 DECL_P (decl
) ? DECL_NAME (decl
) : NULL_TREE
,
9170 if (DECL_P (decl
) && type
== TREE_TYPE (decl
))
9172 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
9173 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
9174 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
9177 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
9178 DECL_CONTEXT (field
) = record_type
;
9179 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (field
))
9180 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (field
));
9182 last
= &DECL_CHAIN (field
);
9184 = build_decl (OMP_CLAUSE_LOCATION (c
), FIELD_DECL
, NULL_TREE
,
9186 DECL_CONTEXT (bfield
) = record_type
;
9187 if (TYPE_ALIGN (record_type
) < DECL_ALIGN (bfield
))
9188 SET_TYPE_ALIGN (record_type
, DECL_ALIGN (bfield
));
9190 last
= &DECL_CHAIN (bfield
);
9194 layout_type (record_type
);
9196 /* Build up an array which registers with the runtime all the reductions
9197 and deregisters them at the end. Format documented in libgomp/task.c. */
9198 tree atype
= build_array_type_nelts (pointer_sized_int_node
, 7 + cnt
* 3);
9199 tree avar
= create_tmp_var_raw (atype
);
9200 gimple_add_tmp_var (avar
);
9201 TREE_ADDRESSABLE (avar
) = 1;
9202 tree r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_zero_node
,
9203 NULL_TREE
, NULL_TREE
);
9204 tree t
= build_int_cst (pointer_sized_int_node
, cnt
);
9205 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9206 gimple_seq seq
= NULL
;
9207 tree sz
= fold_convert (pointer_sized_int_node
,
9208 TYPE_SIZE_UNIT (record_type
));
9210 sz
= fold_build2 (PLUS_EXPR
, pointer_sized_int_node
, sz
,
9211 build_int_cst (pointer_sized_int_node
, cachesz
- 1));
9212 sz
= fold_build2 (BIT_AND_EXPR
, pointer_sized_int_node
, sz
,
9213 build_int_cst (pointer_sized_int_node
, ~(cachesz
- 1)));
9214 ctx
->task_reductions
.create (1 + cnt
);
9215 ctx
->task_reduction_map
= new hash_map
<tree
, unsigned>;
9216 ctx
->task_reductions
.quick_push (TREE_CODE (sz
) == INTEGER_CST
9218 sz
= force_gimple_operand (sz
, &seq
, true, NULL_TREE
);
9219 gimple_seq_add_seq (start
, seq
);
9220 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_one_node
,
9221 NULL_TREE
, NULL_TREE
);
9222 gimple_seq_add_stmt (start
, gimple_build_assign (r
, sz
));
9223 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
9224 NULL_TREE
, NULL_TREE
);
9225 t
= build_int_cst (pointer_sized_int_node
,
9226 MAX (TYPE_ALIGN_UNIT (record_type
), (unsigned) cachesz
));
9227 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9228 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (3),
9229 NULL_TREE
, NULL_TREE
);
9230 t
= build_int_cst (pointer_sized_int_node
, -1);
9231 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9232 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (4),
9233 NULL_TREE
, NULL_TREE
);
9234 t
= build_int_cst (pointer_sized_int_node
, 0);
9235 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9237 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
9238 and for each task reduction checks a bool right after the private variable
9239 within that thread's chunk; if the bool is clear, it hasn't been
9240 initialized and thus isn't going to be reduced nor destructed, otherwise
9241 reduce and destruct it. */
9242 tree idx
= create_tmp_var (size_type_node
);
9243 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, size_zero_node
));
9244 tree num_thr_sz
= create_tmp_var (size_type_node
);
9245 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
9246 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
9247 tree lab3
= NULL_TREE
, lab7
= NULL_TREE
;
9249 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
|| code
== OMP_SCOPE
)
9251 /* For worksharing constructs or scope, only perform it in the master
9252 thread, with the exception of cancelled implicit barriers - then only
9253 handle the current thread. */
9254 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
9255 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
9256 tree thr_num
= create_tmp_var (integer_type_node
);
9257 g
= gimple_build_call (t
, 0);
9258 gimple_call_set_lhs (g
, thr_num
);
9259 gimple_seq_add_stmt (end
, g
);
9263 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9264 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9265 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
9266 if (code
== OMP_FOR
)
9267 c
= gimple_omp_for_clauses (ctx
->stmt
);
9268 else if (code
== OMP_SECTIONS
)
9269 c
= gimple_omp_sections_clauses (ctx
->stmt
);
9270 else /* if (code == OMP_SCOPE) */
9271 c
= gimple_omp_scope_clauses (ctx
->stmt
);
9272 c
= OMP_CLAUSE_DECL (omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
));
9274 g
= gimple_build_cond (NE_EXPR
, c
, build_zero_cst (TREE_TYPE (c
)),
9276 gimple_seq_add_stmt (end
, g
);
9277 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9278 g
= gimple_build_assign (idx
, NOP_EXPR
, thr_num
);
9279 gimple_seq_add_stmt (end
, g
);
9280 g
= gimple_build_assign (num_thr_sz
, PLUS_EXPR
, idx
,
9281 build_one_cst (TREE_TYPE (idx
)));
9282 gimple_seq_add_stmt (end
, g
);
9283 gimple_seq_add_stmt (end
, gimple_build_goto (lab3
));
9284 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9286 g
= gimple_build_cond (NE_EXPR
, thr_num
, integer_zero_node
, lab2
, lab4
);
9287 gimple_seq_add_stmt (end
, g
);
9288 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
9290 if (code
!= OMP_PARALLEL
)
9292 t
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
9293 tree num_thr
= create_tmp_var (integer_type_node
);
9294 g
= gimple_build_call (t
, 0);
9295 gimple_call_set_lhs (g
, num_thr
);
9296 gimple_seq_add_stmt (end
, g
);
9297 g
= gimple_build_assign (num_thr_sz
, NOP_EXPR
, num_thr
);
9298 gimple_seq_add_stmt (end
, g
);
9300 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
9304 tree c
= omp_find_clause (gimple_omp_parallel_clauses (ctx
->stmt
),
9305 OMP_CLAUSE__REDUCTEMP_
);
9306 t
= fold_convert (pointer_sized_int_node
, OMP_CLAUSE_DECL (c
));
9307 t
= fold_convert (size_type_node
, t
);
9308 gimplify_assign (num_thr_sz
, t
, end
);
9310 t
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
, size_int (2),
9311 NULL_TREE
, NULL_TREE
);
9312 tree data
= create_tmp_var (pointer_sized_int_node
);
9313 gimple_seq_add_stmt (end
, gimple_build_assign (data
, t
));
9314 if (code
== OMP_TASKLOOP
)
9316 lab7
= create_artificial_label (UNKNOWN_LOCATION
);
9317 g
= gimple_build_cond (NE_EXPR
, data
,
9318 build_zero_cst (pointer_sized_int_node
),
9320 gimple_seq_add_stmt (end
, g
);
9322 gimple_seq_add_stmt (end
, gimple_build_label (lab1
));
9324 if (TREE_CODE (TYPE_SIZE_UNIT (record_type
)) == INTEGER_CST
)
9325 ptr
= create_tmp_var (build_pointer_type (record_type
));
9327 ptr
= create_tmp_var (ptr_type_node
);
9328 gimple_seq_add_stmt (end
, gimple_build_assign (ptr
, NOP_EXPR
, data
));
9330 tree field
= TYPE_FIELDS (record_type
);
9333 field
= DECL_CHAIN (DECL_CHAIN (field
));
9334 for (int pass
= 0; pass
< 2; pass
++)
9336 tree decl
, type
, next
;
9337 for (tree c
= clauses
;
9338 omp_task_reduction_iterate (pass
, code
, ccode
,
9339 &c
, &decl
, &type
, &next
); c
= next
)
9341 tree var
= decl
, ref
;
9342 if (TREE_CODE (decl
) == MEM_REF
)
9344 var
= TREE_OPERAND (var
, 0);
9345 if (TREE_CODE (var
) == POINTER_PLUS_EXPR
)
9346 var
= TREE_OPERAND (var
, 0);
9348 if (TREE_CODE (var
) == ADDR_EXPR
)
9349 var
= TREE_OPERAND (var
, 0);
9350 else if (TREE_CODE (var
) == INDIRECT_REF
)
9351 var
= TREE_OPERAND (var
, 0);
9352 tree orig_var
= var
;
9353 if (is_variable_sized (var
))
9355 gcc_assert (DECL_HAS_VALUE_EXPR_P (var
));
9356 var
= DECL_VALUE_EXPR (var
);
9357 gcc_assert (TREE_CODE (var
) == INDIRECT_REF
);
9358 var
= TREE_OPERAND (var
, 0);
9359 gcc_assert (DECL_P (var
));
9361 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
9362 if (orig_var
!= var
)
9363 gcc_assert (TREE_CODE (v
) == ADDR_EXPR
);
9364 else if (TREE_CODE (v
) == ADDR_EXPR
)
9365 t
= build_fold_addr_expr (t
);
9366 else if (TREE_CODE (v
) == INDIRECT_REF
)
9367 t
= build_fold_indirect_ref (t
);
9368 if (TREE_CODE (TREE_OPERAND (decl
, 0)) == POINTER_PLUS_EXPR
)
9370 tree b
= TREE_OPERAND (TREE_OPERAND (decl
, 0), 1);
9371 b
= maybe_lookup_decl_in_outer_ctx (b
, ctx
);
9372 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
, b
);
9374 if (!integer_zerop (TREE_OPERAND (decl
, 1)))
9375 t
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
,
9376 fold_convert (size_type_node
,
9377 TREE_OPERAND (decl
, 1)));
9381 t
= ref
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
9382 if (!omp_privatize_by_reference (decl
))
9383 t
= build_fold_addr_expr (t
);
9385 t
= fold_convert (pointer_sized_int_node
, t
);
9387 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
9388 gimple_seq_add_seq (start
, seq
);
9389 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9390 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
9391 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9392 t
= unshare_expr (byte_position (field
));
9393 t
= fold_convert (pointer_sized_int_node
, t
);
9394 ctx
->task_reduction_map
->put (c
, cnt
);
9395 ctx
->task_reductions
.quick_push (TREE_CODE (t
) == INTEGER_CST
9398 t
= force_gimple_operand (t
, &seq
, true, NULL_TREE
);
9399 gimple_seq_add_seq (start
, seq
);
9400 r
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9401 size_int (7 + cnt
* 3 + 1), NULL_TREE
, NULL_TREE
);
9402 gimple_seq_add_stmt (start
, gimple_build_assign (r
, t
));
9404 tree bfield
= DECL_CHAIN (field
);
9406 if (code
== OMP_PARALLEL
9408 || code
== OMP_SECTIONS
9409 || code
== OMP_SCOPE
)
9410 /* In parallel, worksharing or scope all threads unconditionally
9411 initialize all their task reduction private variables. */
9412 cond
= boolean_true_node
;
9413 else if (TREE_TYPE (ptr
) == ptr_type_node
)
9415 cond
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
9416 unshare_expr (byte_position (bfield
)));
9418 cond
= force_gimple_operand (cond
, &seq
, true, NULL_TREE
);
9419 gimple_seq_add_seq (end
, seq
);
9420 tree pbool
= build_pointer_type (TREE_TYPE (bfield
));
9421 cond
= build2 (MEM_REF
, TREE_TYPE (bfield
), cond
,
9422 build_int_cst (pbool
, 0));
9425 cond
= build3 (COMPONENT_REF
, TREE_TYPE (bfield
),
9426 build_simple_mem_ref (ptr
), bfield
, NULL_TREE
);
9427 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
9428 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
9429 tree condv
= create_tmp_var (boolean_type_node
);
9430 gimple_seq_add_stmt (end
, gimple_build_assign (condv
, cond
));
9431 g
= gimple_build_cond (NE_EXPR
, condv
, boolean_false_node
,
9433 gimple_seq_add_stmt (end
, g
);
9434 gimple_seq_add_stmt (end
, gimple_build_label (lab3
));
9435 if (cancellable
&& OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) == NULL_TREE
)
9437 /* If this reduction doesn't need destruction and parallel
9438 has been cancelled, there is nothing to do for this
9439 reduction, so jump around the merge operation. */
9440 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9441 g
= gimple_build_cond (NE_EXPR
, cancellable
,
9442 build_zero_cst (TREE_TYPE (cancellable
)),
9444 gimple_seq_add_stmt (end
, g
);
9445 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9449 if (TREE_TYPE (ptr
) == ptr_type_node
)
9451 new_var
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, ptr
,
9452 unshare_expr (byte_position (field
)));
9454 new_var
= force_gimple_operand (new_var
, &seq
, true, NULL_TREE
);
9455 gimple_seq_add_seq (end
, seq
);
9456 tree pbool
= build_pointer_type (TREE_TYPE (field
));
9457 new_var
= build2 (MEM_REF
, TREE_TYPE (field
), new_var
,
9458 build_int_cst (pbool
, 0));
9461 new_var
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
9462 build_simple_mem_ref (ptr
), field
, NULL_TREE
);
9464 enum tree_code rcode
= OMP_CLAUSE_REDUCTION_CODE (c
);
9465 if (TREE_CODE (decl
) != MEM_REF
9466 && omp_privatize_by_reference (decl
))
9467 ref
= build_simple_mem_ref (ref
);
9468 /* reduction(-:var) sums up the partial results, so it acts
9469 identically to reduction(+:var). */
9470 if (rcode
== MINUS_EXPR
)
9472 if (TREE_CODE (decl
) == MEM_REF
)
9474 tree type
= TREE_TYPE (new_var
);
9475 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
9476 tree i
= create_tmp_var (TREE_TYPE (v
));
9477 tree ptype
= build_pointer_type (TREE_TYPE (type
));
9480 v
= maybe_lookup_decl_in_outer_ctx (v
, ctx
);
9481 tree vv
= create_tmp_var (TREE_TYPE (v
));
9482 gimplify_assign (vv
, v
, start
);
9485 ref
= build4 (ARRAY_REF
, pointer_sized_int_node
, avar
,
9486 size_int (7 + cnt
* 3), NULL_TREE
, NULL_TREE
);
9487 new_var
= build_fold_addr_expr (new_var
);
9488 new_var
= fold_convert (ptype
, new_var
);
9489 ref
= fold_convert (ptype
, ref
);
9490 tree m
= create_tmp_var (ptype
);
9491 gimplify_assign (m
, new_var
, end
);
9493 m
= create_tmp_var (ptype
);
9494 gimplify_assign (m
, ref
, end
);
9496 gimplify_assign (i
, build_int_cst (TREE_TYPE (v
), 0), end
);
9497 tree body
= create_artificial_label (UNKNOWN_LOCATION
);
9498 tree endl
= create_artificial_label (UNKNOWN_LOCATION
);
9499 gimple_seq_add_stmt (end
, gimple_build_label (body
));
9500 tree priv
= build_simple_mem_ref (new_var
);
9501 tree out
= build_simple_mem_ref (ref
);
9502 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9504 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9505 tree decl_placeholder
9506 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
);
9507 tree lab6
= NULL_TREE
;
9510 /* If this reduction needs destruction and parallel
9511 has been cancelled, jump around the merge operation
9512 to the destruction. */
9513 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9514 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9515 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
9516 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
9518 gimple_seq_add_stmt (end
, g
);
9519 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9521 SET_DECL_VALUE_EXPR (placeholder
, out
);
9522 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9523 SET_DECL_VALUE_EXPR (decl_placeholder
, priv
);
9524 DECL_HAS_VALUE_EXPR_P (decl_placeholder
) = 1;
9525 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
9526 gimple_seq_add_seq (end
,
9527 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
9528 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9529 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
9531 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
9532 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
) = NULL
;
9535 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9536 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, priv
);
9539 gimple_seq tseq
= NULL
;
9540 gimplify_stmt (&x
, &tseq
);
9541 gimple_seq_add_seq (end
, tseq
);
9546 tree x
= build2 (rcode
, TREE_TYPE (out
), out
, priv
);
9547 out
= unshare_expr (out
);
9548 gimplify_assign (out
, x
, end
);
9551 = gimple_build_assign (new_var
, POINTER_PLUS_EXPR
, new_var
,
9552 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
9553 gimple_seq_add_stmt (end
, g
);
9554 g
= gimple_build_assign (ref
, POINTER_PLUS_EXPR
, ref
,
9555 TYPE_SIZE_UNIT (TREE_TYPE (type
)));
9556 gimple_seq_add_stmt (end
, g
);
9557 g
= gimple_build_assign (i
, PLUS_EXPR
, i
,
9558 build_int_cst (TREE_TYPE (i
), 1));
9559 gimple_seq_add_stmt (end
, g
);
9560 g
= gimple_build_cond (LE_EXPR
, i
, v
, body
, endl
);
9561 gimple_seq_add_stmt (end
, g
);
9562 gimple_seq_add_stmt (end
, gimple_build_label (endl
));
9564 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9566 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
9567 tree oldv
= NULL_TREE
;
9568 tree lab6
= NULL_TREE
;
9571 /* If this reduction needs destruction and parallel
9572 has been cancelled, jump around the merge operation
9573 to the destruction. */
9574 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
9575 lab6
= create_artificial_label (UNKNOWN_LOCATION
);
9576 tree zero
= build_zero_cst (TREE_TYPE (cancellable
));
9577 g
= gimple_build_cond (NE_EXPR
, cancellable
, zero
,
9579 gimple_seq_add_stmt (end
, g
);
9580 gimple_seq_add_stmt (end
, gimple_build_label (lab5
));
9582 if (omp_privatize_by_reference (decl
)
9583 && !useless_type_conversion_p (TREE_TYPE (placeholder
),
9585 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
9586 ref
= build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c
), ref
);
9587 tree refv
= create_tmp_var (TREE_TYPE (ref
));
9588 gimplify_assign (refv
, ref
, end
);
9589 ref
= build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c
), refv
);
9590 SET_DECL_VALUE_EXPR (placeholder
, ref
);
9591 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
9592 tree d
= maybe_lookup_decl (decl
, ctx
);
9594 if (DECL_HAS_VALUE_EXPR_P (d
))
9595 oldv
= DECL_VALUE_EXPR (d
);
9596 if (omp_privatize_by_reference (var
))
9598 tree v
= fold_convert (TREE_TYPE (d
),
9599 build_fold_addr_expr (new_var
));
9600 SET_DECL_VALUE_EXPR (d
, v
);
9603 SET_DECL_VALUE_EXPR (d
, new_var
);
9604 DECL_HAS_VALUE_EXPR_P (d
) = 1;
9605 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
), ctx
);
9607 SET_DECL_VALUE_EXPR (d
, oldv
);
9610 SET_DECL_VALUE_EXPR (d
, NULL_TREE
);
9611 DECL_HAS_VALUE_EXPR_P (d
) = 0;
9613 gimple_seq_add_seq (end
, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
9614 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9615 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
9616 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
) = NULL
;
9618 gimple_seq_add_stmt (end
, gimple_build_label (lab6
));
9619 tree x
= lang_hooks
.decls
.omp_clause_dtor (c
, new_var
);
9622 gimple_seq tseq
= NULL
;
9623 gimplify_stmt (&x
, &tseq
);
9624 gimple_seq_add_seq (end
, tseq
);
9629 tree x
= build2 (rcode
, TREE_TYPE (ref
), ref
, new_var
);
9630 ref
= unshare_expr (ref
);
9631 gimplify_assign (ref
, x
, end
);
9633 gimple_seq_add_stmt (end
, gimple_build_label (lab4
));
9635 field
= DECL_CHAIN (bfield
);
9639 if (code
== OMP_TASKGROUP
)
9641 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER
);
9642 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
9643 gimple_seq_add_stmt (start
, g
);
9648 if (code
== OMP_FOR
)
9649 c
= gimple_omp_for_clauses (ctx
->stmt
);
9650 else if (code
== OMP_SECTIONS
)
9651 c
= gimple_omp_sections_clauses (ctx
->stmt
);
9652 else if (code
== OMP_SCOPE
)
9653 c
= gimple_omp_scope_clauses (ctx
->stmt
);
9655 c
= gimple_omp_taskreg_clauses (ctx
->stmt
);
9656 c
= omp_find_clause (c
, OMP_CLAUSE__REDUCTEMP_
);
9657 t
= fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c
)),
9658 build_fold_addr_expr (avar
));
9659 gimplify_assign (OMP_CLAUSE_DECL (c
), t
, start
);
9662 gimple_seq_add_stmt (end
, gimple_build_assign (data
, PLUS_EXPR
, data
, sz
));
9663 gimple_seq_add_stmt (end
, gimple_build_assign (idx
, PLUS_EXPR
, idx
,
9665 g
= gimple_build_cond (NE_EXPR
, idx
, num_thr_sz
, lab1
, lab2
);
9666 gimple_seq_add_stmt (end
, g
);
9667 gimple_seq_add_stmt (end
, gimple_build_label (lab2
));
9668 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
|| code
== OMP_SCOPE
)
9670 enum built_in_function bfn
9671 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER
;
9672 t
= builtin_decl_explicit (bfn
);
9673 tree c_bool_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t
)));
9677 arg
= create_tmp_var (c_bool_type
);
9678 gimple_seq_add_stmt (end
, gimple_build_assign (arg
, NOP_EXPR
,
9682 arg
= build_int_cst (c_bool_type
, 0);
9683 g
= gimple_build_call (t
, 1, arg
);
9687 t
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER
);
9688 g
= gimple_build_call (t
, 1, build_fold_addr_expr (avar
));
9690 gimple_seq_add_stmt (end
, g
);
9692 gimple_seq_add_stmt (end
, gimple_build_label (lab7
));
9693 t
= build_constructor (atype
, NULL
);
9694 TREE_THIS_VOLATILE (t
) = 1;
9695 gimple_seq_add_stmt (end
, gimple_build_assign (avar
, t
));
9698 /* Expand code for an OpenMP taskgroup directive. */
9701 lower_omp_taskgroup (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9703 gimple
*stmt
= gsi_stmt (*gsi_p
);
9706 gimple_seq dseq
= NULL
;
9707 tree block
= make_node (BLOCK
);
9709 bind
= gimple_build_bind (NULL
, NULL
, block
);
9710 gsi_replace (gsi_p
, bind
, true);
9711 gimple_bind_add_stmt (bind
, stmt
);
9713 push_gimplify_context ();
9715 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START
),
9717 gimple_bind_add_stmt (bind
, x
);
9719 lower_omp_task_reductions (ctx
, OMP_TASKGROUP
,
9720 gimple_omp_taskgroup_clauses (stmt
),
9721 gimple_bind_body_ptr (bind
), &dseq
);
9723 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
9724 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
9725 gimple_omp_set_body (stmt
, NULL
);
9727 gimple_bind_add_seq (bind
, dseq
);
9729 pop_gimplify_context (bind
);
9731 gimple_bind_append_vars (bind
, ctx
->block_vars
);
9732 BLOCK_VARS (block
) = ctx
->block_vars
;
9736 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9739 lower_omp_ordered_clauses (gimple_stmt_iterator
*gsi_p
, gomp_ordered
*ord_stmt
,
9742 struct omp_for_data fd
;
9743 if (!ctx
->outer
|| gimple_code (ctx
->outer
->stmt
) != GIMPLE_OMP_FOR
)
9746 unsigned int len
= gimple_omp_for_collapse (ctx
->outer
->stmt
);
9747 struct omp_for_data_loop
*loops
= XALLOCAVEC (struct omp_for_data_loop
, len
);
9748 omp_extract_for_data (as_a
<gomp_for
*> (ctx
->outer
->stmt
), &fd
, loops
);
9752 tree
*list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
9753 tree c
= gimple_omp_ordered_clauses (ord_stmt
);
9754 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
9755 && OMP_CLAUSE_DOACROSS_KIND (c
) == OMP_CLAUSE_DOACROSS_SINK
)
9757 /* Merge depend clauses from multiple adjacent
9758 #pragma omp ordered depend(sink:...) constructs
9759 into one #pragma omp ordered depend(sink:...), so that
9760 we can optimize them together. */
9761 gimple_stmt_iterator gsi
= *gsi_p
;
9763 while (!gsi_end_p (gsi
))
9765 gimple
*stmt
= gsi_stmt (gsi
);
9766 if (is_gimple_debug (stmt
)
9767 || gimple_code (stmt
) == GIMPLE_NOP
)
9772 if (gimple_code (stmt
) != GIMPLE_OMP_ORDERED
)
9774 gomp_ordered
*ord_stmt2
= as_a
<gomp_ordered
*> (stmt
);
9775 c
= gimple_omp_ordered_clauses (ord_stmt2
);
9777 || OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DOACROSS
9778 || OMP_CLAUSE_DOACROSS_KIND (c
) != OMP_CLAUSE_DOACROSS_SINK
)
9781 list_p
= &OMP_CLAUSE_CHAIN (*list_p
);
9783 gsi_remove (&gsi
, true);
9787 /* Canonicalize sink dependence clauses into one folded clause if
9790 The basic algorithm is to create a sink vector whose first
9791 element is the GCD of all the first elements, and whose remaining
9792 elements are the minimum of the subsequent columns.
9794 We ignore dependence vectors whose first element is zero because
9795 such dependencies are known to be executed by the same thread.
9797 We take into account the direction of the loop, so a minimum
9798 becomes a maximum if the loop is iterating forwards. We also
9799 ignore sink clauses where the loop direction is unknown, or where
9800 the offsets are clearly invalid because they are not a multiple
9801 of the loop increment.
9805 #pragma omp for ordered(2)
9806 for (i=0; i < N; ++i)
9807 for (j=0; j < M; ++j)
9809 #pragma omp ordered \
9810 depend(sink:i-8,j-2) \
9811 depend(sink:i,j-1) \ // Completely ignored because i+0.
9812 depend(sink:i-4,j-3) \
9813 depend(sink:i-6,j-4)
9814 #pragma omp ordered depend(source)
9819 depend(sink:-gcd(8,4,6),-min(2,3,4))
9824 /* FIXME: Computing GCD's where the first element is zero is
9825 non-trivial in the presence of collapsed loops. Do this later. */
9826 if (fd
.collapse
> 1)
9829 wide_int
*folded_deps
= XALLOCAVEC (wide_int
, 2 * len
- 1);
9831 /* wide_int is not a POD so it must be default-constructed. */
9832 for (unsigned i
= 0; i
!= 2 * len
- 1; ++i
)
9833 new (static_cast<void*>(folded_deps
+ i
)) wide_int ();
9835 tree folded_dep
= NULL_TREE
;
9836 /* TRUE if the first dimension's offset is negative. */
9837 bool neg_offset_p
= false;
9839 list_p
= gimple_omp_ordered_clauses_ptr (ord_stmt
);
9841 while ((c
= *list_p
) != NULL
)
9843 bool remove
= false;
9845 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
);
9846 if (OMP_CLAUSE_DOACROSS_KIND (c
) != OMP_CLAUSE_DOACROSS_SINK
)
9847 goto next_ordered_clause
;
9850 for (vec
= OMP_CLAUSE_DECL (c
), i
= 0;
9851 vec
&& TREE_CODE (vec
) == TREE_LIST
;
9852 vec
= TREE_CHAIN (vec
), ++i
)
9854 gcc_assert (i
< len
);
9856 /* omp_extract_for_data has canonicalized the condition. */
9857 gcc_assert (fd
.loops
[i
].cond_code
== LT_EXPR
9858 || fd
.loops
[i
].cond_code
== GT_EXPR
);
9859 bool forward
= fd
.loops
[i
].cond_code
== LT_EXPR
;
9860 bool maybe_lexically_later
= true;
9862 /* While the committee makes up its mind, bail if we have any
9863 non-constant steps. */
9864 if (TREE_CODE (fd
.loops
[i
].step
) != INTEGER_CST
)
9865 goto lower_omp_ordered_ret
;
9867 tree itype
= TREE_TYPE (TREE_VALUE (vec
));
9868 if (POINTER_TYPE_P (itype
))
9870 wide_int offset
= wide_int::from (wi::to_wide (TREE_PURPOSE (vec
)),
9871 TYPE_PRECISION (itype
),
9874 /* Ignore invalid offsets that are not multiples of the step. */
9875 if (!wi::multiple_of_p (wi::abs (offset
),
9876 wi::abs (wi::to_wide (fd
.loops
[i
].step
)),
9879 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
9880 "ignoring sink clause with offset that is not "
9881 "a multiple of the loop step");
9883 goto next_ordered_clause
;
9886 /* Calculate the first dimension. The first dimension of
9887 the folded dependency vector is the GCD of the first
9888 elements, while ignoring any first elements whose offset
9892 /* Ignore dependence vectors whose first dimension is 0. */
9896 goto next_ordered_clause
;
9900 if (!TYPE_UNSIGNED (itype
) && (forward
^ wi::neg_p (offset
)))
9902 error_at (OMP_CLAUSE_LOCATION (c
),
9903 "first offset must be in opposite direction "
9904 "of loop iterations");
9905 goto lower_omp_ordered_ret
;
9909 neg_offset_p
= forward
;
9910 /* Initialize the first time around. */
9911 if (folded_dep
== NULL_TREE
)
9914 folded_deps
[0] = offset
;
9917 folded_deps
[0] = wi::gcd (folded_deps
[0],
9921 /* Calculate minimum for the remaining dimensions. */
9924 folded_deps
[len
+ i
- 1] = offset
;
9925 if (folded_dep
== c
)
9926 folded_deps
[i
] = offset
;
9927 else if (maybe_lexically_later
9928 && !wi::eq_p (folded_deps
[i
], offset
))
9930 if (forward
^ wi::gts_p (folded_deps
[i
], offset
))
9934 for (j
= 1; j
<= i
; j
++)
9935 folded_deps
[j
] = folded_deps
[len
+ j
- 1];
9938 maybe_lexically_later
= false;
9942 gcc_assert (i
== len
);
9946 next_ordered_clause
:
9948 *list_p
= OMP_CLAUSE_CHAIN (c
);
9950 list_p
= &OMP_CLAUSE_CHAIN (c
);
9956 folded_deps
[0] = -folded_deps
[0];
9958 tree itype
= TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep
)));
9959 if (POINTER_TYPE_P (itype
))
9962 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep
))
9963 = wide_int_to_tree (itype
, folded_deps
[0]);
9964 OMP_CLAUSE_CHAIN (folded_dep
) = gimple_omp_ordered_clauses (ord_stmt
);
9965 *gimple_omp_ordered_clauses_ptr (ord_stmt
) = folded_dep
;
9968 lower_omp_ordered_ret
:
9970 /* Ordered without clauses is #pragma omp threads, while we want
9971 a nop instead if we remove all clauses. */
9972 if (gimple_omp_ordered_clauses (ord_stmt
) == NULL_TREE
)
9973 gsi_replace (gsi_p
, gimple_build_nop (), true);
9977 /* Expand code for an OpenMP ordered directive. */
9980 lower_omp_ordered (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
9983 gimple
*stmt
= gsi_stmt (*gsi_p
), *g
;
9984 gomp_ordered
*ord_stmt
= as_a
<gomp_ordered
*> (stmt
);
9987 bool simd
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9989 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9992 = simd
&& omp_maybe_offloaded_ctx (ctx
) && omp_max_simt_vf () > 1;
9993 bool threads
= omp_find_clause (gimple_omp_ordered_clauses (ord_stmt
),
9994 OMP_CLAUSE_THREADS
);
9996 if (gimple_omp_ordered_standalone_p (ord_stmt
))
9998 /* FIXME: This is needs to be moved to the expansion to verify various
9999 conditions only testable on cfg with dominators computed, and also
10000 all the depend clauses to be merged still might need to be available
10001 for the runtime checks. */
10003 lower_omp_ordered_clauses (gsi_p
, ord_stmt
, ctx
);
10007 push_gimplify_context ();
10009 block
= make_node (BLOCK
);
10010 bind
= gimple_build_bind (NULL
, NULL
, block
);
10011 gsi_replace (gsi_p
, bind
, true);
10012 gimple_bind_add_stmt (bind
, stmt
);
10016 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START
, 1,
10017 build_int_cst (NULL_TREE
, threads
));
10018 cfun
->has_simduid_loops
= true;
10021 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START
),
10023 gimple_bind_add_stmt (bind
, x
);
10025 tree counter
= NULL_TREE
, test
= NULL_TREE
, body
= NULL_TREE
;
10028 counter
= create_tmp_var (integer_type_node
);
10029 g
= gimple_build_call_internal (IFN_GOMP_SIMT_LANE
, 0);
10030 gimple_call_set_lhs (g
, counter
);
10031 gimple_bind_add_stmt (bind
, g
);
10033 body
= create_artificial_label (UNKNOWN_LOCATION
);
10034 test
= create_artificial_label (UNKNOWN_LOCATION
);
10035 gimple_bind_add_stmt (bind
, gimple_build_label (body
));
10037 tree simt_pred
= create_tmp_var (integer_type_node
);
10038 g
= gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED
, 1, counter
);
10039 gimple_call_set_lhs (g
, simt_pred
);
10040 gimple_bind_add_stmt (bind
, g
);
10042 tree t
= create_artificial_label (UNKNOWN_LOCATION
);
10043 g
= gimple_build_cond (EQ_EXPR
, simt_pred
, integer_zero_node
, t
, test
);
10044 gimple_bind_add_stmt (bind
, g
);
10046 gimple_bind_add_stmt (bind
, gimple_build_label (t
));
10048 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
10049 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
10050 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
10051 gimple_omp_set_body (stmt
, NULL
);
10055 gimple_bind_add_stmt (bind
, gimple_build_label (test
));
10056 g
= gimple_build_assign (counter
, MINUS_EXPR
, counter
, integer_one_node
);
10057 gimple_bind_add_stmt (bind
, g
);
10059 tree c
= build2 (GE_EXPR
, boolean_type_node
, counter
, integer_zero_node
);
10060 tree nonneg
= create_tmp_var (integer_type_node
);
10061 gimple_seq tseq
= NULL
;
10062 gimplify_assign (nonneg
, fold_convert (integer_type_node
, c
), &tseq
);
10063 gimple_bind_add_seq (bind
, tseq
);
10065 g
= gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY
, 1, nonneg
);
10066 gimple_call_set_lhs (g
, nonneg
);
10067 gimple_bind_add_stmt (bind
, g
);
10069 tree end
= create_artificial_label (UNKNOWN_LOCATION
);
10070 g
= gimple_build_cond (NE_EXPR
, nonneg
, integer_zero_node
, body
, end
);
10071 gimple_bind_add_stmt (bind
, g
);
10073 gimple_bind_add_stmt (bind
, gimple_build_label (end
));
10076 x
= gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END
, 1,
10077 build_int_cst (NULL_TREE
, threads
));
10079 x
= gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END
),
10081 gimple_bind_add_stmt (bind
, x
);
10083 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
10085 pop_gimplify_context (bind
);
10087 gimple_bind_append_vars (bind
, ctx
->block_vars
);
10088 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
10092 /* Expand code for an OpenMP scan directive and the structured block
10093 before the scan directive. */
10096 lower_omp_scan (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10098 gimple
*stmt
= gsi_stmt (*gsi_p
);
10100 = gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)) != NULL
;
10101 tree lane
= NULL_TREE
;
10102 gimple_seq before
= NULL
;
10103 omp_context
*octx
= ctx
->outer
;
10105 if (octx
->scan_exclusive
&& !has_clauses
)
10107 gimple_stmt_iterator gsi2
= *gsi_p
;
10109 gimple
*stmt2
= gsi_stmt (gsi2
);
10110 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
10111 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
10112 the one with exclusive clause(s), comes first. */
10114 && gimple_code (stmt2
) == GIMPLE_OMP_SCAN
10115 && gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt2
)) != NULL
)
10117 gsi_remove (gsi_p
, false);
10118 gsi_insert_after (gsi_p
, stmt
, GSI_SAME_STMT
);
10119 ctx
= maybe_lookup_ctx (stmt2
);
10121 lower_omp_scan (gsi_p
, ctx
);
10126 bool input_phase
= has_clauses
^ octx
->scan_inclusive
;
10127 bool is_simd
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
10128 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_SIMD
);
10129 bool is_for
= (gimple_code (octx
->stmt
) == GIMPLE_OMP_FOR
10130 && gimple_omp_for_kind (octx
->stmt
) == GF_OMP_FOR_KIND_FOR
10131 && !gimple_omp_for_combined_p (octx
->stmt
));
10132 bool is_for_simd
= is_simd
&& gimple_omp_for_combined_into_p (octx
->stmt
);
10133 if (is_for_simd
&& octx
->for_simd_scan_phase
)
10136 if (tree c
= omp_find_clause (gimple_omp_for_clauses (octx
->stmt
),
10137 OMP_CLAUSE__SIMDUID_
))
10139 tree uid
= OMP_CLAUSE__SIMDUID__DECL (c
);
10140 lane
= create_tmp_var (unsigned_type_node
);
10141 tree t
= build_int_cst (integer_type_node
,
10143 : octx
->scan_inclusive
? 2 : 3);
10145 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE
, 2, uid
, t
);
10146 gimple_call_set_lhs (g
, lane
);
10147 gimple_seq_add_stmt (&before
, g
);
10150 if (is_simd
|| is_for
)
10152 for (tree c
= gimple_omp_for_clauses (octx
->stmt
);
10153 c
; c
= OMP_CLAUSE_CHAIN (c
))
10154 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
10155 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
10157 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
10158 tree var
= OMP_CLAUSE_DECL (c
);
10159 tree new_var
= lookup_decl (var
, octx
);
10160 tree val
= new_var
;
10161 tree var2
= NULL_TREE
;
10162 tree var3
= NULL_TREE
;
10163 tree var4
= NULL_TREE
;
10164 tree lane0
= NULL_TREE
;
10165 tree new_vard
= new_var
;
10166 if (omp_privatize_by_reference (var
))
10168 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
10171 if (DECL_HAS_VALUE_EXPR_P (new_vard
))
10173 val
= DECL_VALUE_EXPR (new_vard
);
10174 if (new_vard
!= new_var
)
10176 gcc_assert (TREE_CODE (val
) == ADDR_EXPR
);
10177 val
= TREE_OPERAND (val
, 0);
10179 if (TREE_CODE (val
) == ARRAY_REF
10180 && VAR_P (TREE_OPERAND (val
, 0)))
10182 tree v
= TREE_OPERAND (val
, 0);
10183 if (lookup_attribute ("omp simd array",
10184 DECL_ATTRIBUTES (v
)))
10186 val
= unshare_expr (val
);
10187 lane0
= TREE_OPERAND (val
, 1);
10188 TREE_OPERAND (val
, 1) = lane
;
10189 var2
= lookup_decl (v
, octx
);
10190 if (octx
->scan_exclusive
)
10191 var4
= lookup_decl (var2
, octx
);
10193 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10194 var3
= maybe_lookup_decl (var4
? var4
: var2
, octx
);
10197 var2
= build4 (ARRAY_REF
, TREE_TYPE (val
),
10198 var2
, lane
, NULL_TREE
, NULL_TREE
);
10199 TREE_THIS_NOTRAP (var2
) = 1;
10200 if (octx
->scan_exclusive
)
10202 var4
= build4 (ARRAY_REF
, TREE_TYPE (val
),
10203 var4
, lane
, NULL_TREE
,
10205 TREE_THIS_NOTRAP (var4
) = 1;
10216 var2
= build_outer_var_ref (var
, octx
);
10217 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10219 var3
= maybe_lookup_decl (new_vard
, octx
);
10220 if (var3
== new_vard
|| var3
== NULL_TREE
)
10222 else if (is_simd
&& octx
->scan_exclusive
&& !input_phase
)
10224 var4
= maybe_lookup_decl (var3
, octx
);
10225 if (var4
== var3
|| var4
== NULL_TREE
)
10227 if (TREE_ADDRESSABLE (TREE_TYPE (new_var
)))
10238 && octx
->scan_exclusive
10240 && var4
== NULL_TREE
)
10241 var4
= create_tmp_var (TREE_TYPE (val
));
10243 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10245 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
10250 /* If we've added a separate identity element
10251 variable, copy it over into val. */
10252 tree x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
10254 gimplify_and_add (x
, &before
);
10256 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
10258 /* Otherwise, assign to it the identity element. */
10259 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
10261 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
10262 tree ref
= build_outer_var_ref (var
, octx
);
10263 tree x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
10264 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
10267 if (new_vard
!= new_var
)
10268 val
= build_fold_addr_expr_loc (clause_loc
, val
);
10269 SET_DECL_VALUE_EXPR (new_vard
, val
);
10271 SET_DECL_VALUE_EXPR (placeholder
, ref
);
10272 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10273 lower_omp (&tseq
, octx
);
10275 SET_DECL_VALUE_EXPR (new_vard
, x
);
10276 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
10277 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10278 gimple_seq_add_seq (&before
, tseq
);
10280 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
10286 if (octx
->scan_exclusive
)
10288 tree v4
= unshare_expr (var4
);
10289 tree v2
= unshare_expr (var2
);
10290 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, v4
, v2
);
10291 gimplify_and_add (x
, &before
);
10293 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
10294 x
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
10295 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
10297 if (x
&& new_vard
!= new_var
)
10298 vexpr
= build_fold_addr_expr_loc (clause_loc
, val
);
10300 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
10301 SET_DECL_VALUE_EXPR (placeholder
, var2
);
10302 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
10303 lower_omp (&tseq
, octx
);
10304 gimple_seq_add_seq (&before
, tseq
);
10305 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
10307 SET_DECL_VALUE_EXPR (new_vard
, x
);
10308 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
10309 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
10310 if (octx
->scan_inclusive
)
10312 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
10314 gimplify_and_add (x
, &before
);
10316 else if (lane0
== NULL_TREE
)
10318 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, val
,
10320 gimplify_and_add (x
, &before
);
10328 /* input phase. Set val to initializer before
10330 tree x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
10331 gimplify_assign (val
, x
, &before
);
10336 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
10337 if (code
== MINUS_EXPR
)
10340 tree x
= build2 (code
, TREE_TYPE (var2
),
10341 unshare_expr (var2
), unshare_expr (val
));
10342 if (octx
->scan_inclusive
)
10344 gimplify_assign (unshare_expr (var2
), x
, &before
);
10345 gimplify_assign (val
, var2
, &before
);
10349 gimplify_assign (unshare_expr (var4
),
10350 unshare_expr (var2
), &before
);
10351 gimplify_assign (var2
, x
, &before
);
10352 if (lane0
== NULL_TREE
)
10353 gimplify_assign (val
, var4
, &before
);
10357 if (octx
->scan_exclusive
&& !input_phase
&& lane0
)
10359 tree vexpr
= unshare_expr (var4
);
10360 TREE_OPERAND (vexpr
, 1) = lane0
;
10361 if (new_vard
!= new_var
)
10362 vexpr
= build_fold_addr_expr_loc (clause_loc
, vexpr
);
10363 SET_DECL_VALUE_EXPR (new_vard
, vexpr
);
10367 if (is_simd
&& !is_for_simd
)
10369 gsi_insert_seq_after (gsi_p
, gimple_omp_body (stmt
), GSI_SAME_STMT
);
10370 gsi_insert_seq_after (gsi_p
, before
, GSI_SAME_STMT
);
10371 gsi_replace (gsi_p
, gimple_build_nop (), true);
10374 lower_omp (gimple_omp_body_ptr (stmt
), octx
);
10377 gimple_stmt_iterator gsi
= gsi_start (*gimple_omp_body_ptr (stmt
));
10378 gsi_insert_seq_before (&gsi
, before
, GSI_SAME_STMT
);
10383 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
10384 substitution of a couple of function calls. But in the NAMED case,
10385 requires that languages coordinate a symbol name. It is therefore
10386 best put here in common code. */
10388 static GTY(()) hash_map
<tree
, tree
> *critical_name_mutexes
;
10391 lower_omp_critical (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
10394 tree name
, lock
, unlock
;
10395 gomp_critical
*stmt
= as_a
<gomp_critical
*> (gsi_stmt (*gsi_p
));
10397 location_t loc
= gimple_location (stmt
);
10400 name
= gimple_omp_critical_name (stmt
);
10405 if (!critical_name_mutexes
)
10406 critical_name_mutexes
= hash_map
<tree
, tree
>::create_ggc (10);
10408 tree
*n
= critical_name_mutexes
->get (name
);
10413 decl
= create_tmp_var_raw (ptr_type_node
);
10415 new_str
= ACONCAT ((".gomp_critical_user_",
10416 IDENTIFIER_POINTER (name
), NULL
));
10417 DECL_NAME (decl
) = get_identifier (new_str
);
10418 TREE_PUBLIC (decl
) = 1;
10419 TREE_STATIC (decl
) = 1;
10420 DECL_COMMON (decl
) = 1;
10421 DECL_ARTIFICIAL (decl
) = 1;
10422 DECL_IGNORED_P (decl
) = 1;
10424 varpool_node::finalize_decl (decl
);
10426 critical_name_mutexes
->put (name
, decl
);
10431 /* If '#pragma omp critical' is inside offloaded region or
10432 inside function marked as offloadable, the symbol must be
10433 marked as offloadable too. */
10435 if (cgraph_node::get (current_function_decl
)->offloadable
)
10436 varpool_node::get_create (decl
)->offloadable
= 1;
10438 for (octx
= ctx
->outer
; octx
; octx
= octx
->outer
)
10439 if (is_gimple_omp_offloaded (octx
->stmt
))
10441 varpool_node::get_create (decl
)->offloadable
= 1;
10445 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START
);
10446 lock
= build_call_expr_loc (loc
, lock
, 1,
10447 build_fold_addr_expr_loc (loc
, decl
));
10449 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END
);
10450 unlock
= build_call_expr_loc (loc
, unlock
, 1,
10451 build_fold_addr_expr_loc (loc
, decl
));
10455 lock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START
);
10456 lock
= build_call_expr_loc (loc
, lock
, 0);
10458 unlock
= builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END
);
10459 unlock
= build_call_expr_loc (loc
, unlock
, 0);
10462 push_gimplify_context ();
10464 block
= make_node (BLOCK
);
10465 bind
= gimple_build_bind (NULL
, NULL
, block
);
10466 gsi_replace (gsi_p
, bind
, true);
10467 gimple_bind_add_stmt (bind
, stmt
);
10469 tbody
= gimple_bind_body (bind
);
10470 gimplify_and_add (lock
, &tbody
);
10471 gimple_bind_set_body (bind
, tbody
);
10473 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
10474 gimple_omp_set_body (stmt
, maybe_catch_exception (gimple_omp_body (stmt
)));
10475 gimple_bind_add_seq (bind
, gimple_omp_body (stmt
));
10476 gimple_omp_set_body (stmt
, NULL
);
10478 tbody
= gimple_bind_body (bind
);
10479 gimplify_and_add (unlock
, &tbody
);
10480 gimple_bind_set_body (bind
, tbody
);
10482 gimple_bind_add_stmt (bind
, gimple_build_omp_return (true));
10484 pop_gimplify_context (bind
);
10485 gimple_bind_append_vars (bind
, ctx
->block_vars
);
10486 BLOCK_VARS (block
) = gimple_bind_vars (bind
);
10489 /* A subroutine of lower_omp_for. Generate code to emit the predicate
10490 for a lastprivate clause. Given a loop control predicate of (V
10491 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10492 is appended to *DLIST, iterator initialization is appended to
10493 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10494 to be emitted in a critical section. */
10497 lower_omp_for_lastprivate (struct omp_for_data
*fd
, gimple_seq
*body_p
,
10498 gimple_seq
*dlist
, gimple_seq
*clist
,
10499 struct omp_context
*ctx
)
10501 tree clauses
, cond
, vinit
;
10502 enum tree_code cond_code
;
10505 cond_code
= fd
->loop
.cond_code
;
10506 cond_code
= cond_code
== LT_EXPR
? GE_EXPR
: LE_EXPR
;
10508 /* When possible, use a strict equality expression. This can let VRP
10509 type optimizations deduce the value and remove a copy. */
10510 if (tree_fits_shwi_p (fd
->loop
.step
))
10512 HOST_WIDE_INT step
= tree_to_shwi (fd
->loop
.step
);
10513 if (step
== 1 || step
== -1)
10514 cond_code
= EQ_EXPR
;
10517 tree n2
= fd
->loop
.n2
;
10518 if (fd
->collapse
> 1
10519 && TREE_CODE (n2
) != INTEGER_CST
10520 && gimple_omp_for_combined_into_p (fd
->for_stmt
))
10522 struct omp_context
*taskreg_ctx
= NULL
;
10523 if (gimple_code (ctx
->outer
->stmt
) == GIMPLE_OMP_FOR
)
10525 gomp_for
*gfor
= as_a
<gomp_for
*> (ctx
->outer
->stmt
);
10526 if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_FOR
10527 || gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_DISTRIBUTE
)
10529 if (gimple_omp_for_combined_into_p (gfor
))
10531 gcc_assert (ctx
->outer
->outer
10532 && is_parallel_ctx (ctx
->outer
->outer
));
10533 taskreg_ctx
= ctx
->outer
->outer
;
10537 struct omp_for_data outer_fd
;
10538 omp_extract_for_data (gfor
, &outer_fd
, NULL
);
10539 n2
= fold_convert (TREE_TYPE (n2
), outer_fd
.loop
.n2
);
10542 else if (gimple_omp_for_kind (gfor
) == GF_OMP_FOR_KIND_TASKLOOP
)
10543 taskreg_ctx
= ctx
->outer
->outer
;
10545 else if (is_taskreg_ctx (ctx
->outer
))
10546 taskreg_ctx
= ctx
->outer
;
10550 tree taskreg_clauses
10551 = gimple_omp_taskreg_clauses (taskreg_ctx
->stmt
);
10552 tree innerc
= omp_find_clause (taskreg_clauses
,
10553 OMP_CLAUSE__LOOPTEMP_
);
10554 gcc_assert (innerc
);
10555 int count
= fd
->collapse
;
10557 && fd
->last_nonrect
== fd
->first_nonrect
+ 1)
10558 if (tree v
= gimple_omp_for_index (fd
->for_stmt
, fd
->last_nonrect
))
10559 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
10561 for (i
= 0; i
< count
; i
++)
10563 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
10564 OMP_CLAUSE__LOOPTEMP_
);
10565 gcc_assert (innerc
);
10567 innerc
= omp_find_clause (OMP_CLAUSE_CHAIN (innerc
),
10568 OMP_CLAUSE__LOOPTEMP_
);
10570 n2
= fold_convert (TREE_TYPE (n2
),
10571 lookup_decl (OMP_CLAUSE_DECL (innerc
),
10575 cond
= build2 (cond_code
, boolean_type_node
, fd
->loop
.v
, n2
);
10577 clauses
= gimple_omp_for_clauses (fd
->for_stmt
);
10579 lower_lastprivate_clauses (clauses
, cond
, body_p
, &stmts
, clist
, ctx
);
10580 if (!gimple_seq_empty_p (stmts
))
10582 gimple_seq_add_seq (&stmts
, *dlist
);
10585 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10586 vinit
= fd
->loop
.n1
;
10587 if (cond_code
== EQ_EXPR
10588 && tree_fits_shwi_p (fd
->loop
.n2
)
10589 && ! integer_zerop (fd
->loop
.n2
))
10590 vinit
= build_int_cst (TREE_TYPE (fd
->loop
.v
), 0);
10592 vinit
= unshare_expr (vinit
);
10594 /* Initialize the iterator variable, so that threads that don't execute
10595 any iterations don't execute the lastprivate clauses by accident. */
10596 gimplify_assign (fd
->loop
.v
, vinit
, body_p
);
10600 /* OpenACC privatization.
10602 Or, in other words, *sharing* at the respective OpenACC level of
10605 From a correctness perspective, a non-addressable variable can't be accessed
10606 outside the current thread, so it can go in a (faster than shared memory)
10607 register -- though that register may need to be broadcast in some
10608 circumstances. A variable can only meaningfully be "shared" across workers
10609 or vector lanes if its address is taken, e.g. by a call to an atomic
10612 From an optimisation perspective, the answer might be fuzzier: maybe
10613 sometimes, using shared memory directly would be faster than
10617 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags
,
10618 const location_t loc
, const tree c
,
10621 const dump_user_location_t d_u_loc
10622 = dump_user_location_t::from_location_t (loc
);
10623 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10625 # pragma GCC diagnostic push
10626 # pragma GCC diagnostic ignored "-Wformat"
10628 dump_printf_loc (l_dump_flags
, d_u_loc
,
10629 "variable %<%T%> ", decl
);
10631 # pragma GCC diagnostic pop
10634 dump_printf (l_dump_flags
,
10636 omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
10638 dump_printf (l_dump_flags
,
10639 "declared in block ");
10643 oacc_privatization_candidate_p (const location_t loc
, const tree c
,
10646 dump_flags_t l_dump_flags
= get_openacc_privatization_dump_flags ();
10648 /* There is some differentiation depending on block vs. clause. */
10653 if (res
&& !VAR_P (decl
))
10655 /* A PARM_DECL (appearing in a 'private' clause) is expected to have been
10656 privatized into a new VAR_DECL. */
10657 gcc_checking_assert (TREE_CODE (decl
) != PARM_DECL
);
10661 if (dump_enabled_p ())
10663 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10664 dump_printf (l_dump_flags
,
10665 "potentially has improper OpenACC privatization level: %qs\n",
10666 get_tree_code_name (TREE_CODE (decl
)));
10670 if (res
&& block
&& TREE_STATIC (decl
))
10674 if (dump_enabled_p ())
10676 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10677 dump_printf (l_dump_flags
,
10678 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10683 if (res
&& block
&& DECL_EXTERNAL (decl
))
10687 if (dump_enabled_p ())
10689 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10690 dump_printf (l_dump_flags
,
10691 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10696 if (res
&& !TREE_ADDRESSABLE (decl
))
10700 if (dump_enabled_p ())
10702 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10703 dump_printf (l_dump_flags
,
10704 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10705 "not addressable");
10711 if (dump_enabled_p ())
10713 oacc_privatization_begin_diagnose_var (l_dump_flags
, loc
, c
, decl
);
10714 dump_printf (l_dump_flags
,
10715 "is candidate for adjusting OpenACC privatization level\n");
10719 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10721 print_generic_decl (dump_file
, decl
, dump_flags
);
10722 fprintf (dump_file
, "\n");
10728 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10732 oacc_privatization_scan_clause_chain (omp_context
*ctx
, tree clauses
)
10734 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10735 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
)
10737 tree decl
= OMP_CLAUSE_DECL (c
);
10739 tree new_decl
= lookup_decl (decl
, ctx
);
10741 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c
), c
,
10745 gcc_checking_assert
10746 (!ctx
->oacc_privatization_candidates
.contains (new_decl
));
10747 ctx
->oacc_privatization_candidates
.safe_push (new_decl
);
10751 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10755 oacc_privatization_scan_decl_chain (omp_context
*ctx
, tree decls
)
10757 for (tree decl
= decls
; decl
; decl
= DECL_CHAIN (decl
))
10759 tree new_decl
= lookup_decl (decl
, ctx
);
10760 gcc_checking_assert (new_decl
== decl
);
10762 if (!oacc_privatization_candidate_p (gimple_location (ctx
->stmt
), NULL
,
10766 gcc_checking_assert
10767 (!ctx
->oacc_privatization_candidates
.contains (new_decl
));
10768 ctx
->oacc_privatization_candidates
.safe_push (new_decl
);
10772 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10775 omp_find_scan (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
10776 struct walk_stmt_info
*wi
)
10778 gimple
*stmt
= gsi_stmt (*gsi_p
);
10780 *handled_ops_p
= true;
10781 switch (gimple_code (stmt
))
10785 case GIMPLE_OMP_FOR
:
10786 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_SIMD
10787 && gimple_omp_for_combined_into_p (stmt
))
10788 *handled_ops_p
= false;
10791 case GIMPLE_OMP_SCAN
:
10792 *(gimple_stmt_iterator
*) (wi
->info
) = *gsi_p
;
10793 return integer_zero_node
;
10800 /* Helper function for lower_omp_for, add transformations for a worksharing
10801 loop with scan directives inside of it.
10802 For worksharing loop not combined with simd, transform:
10803 #pragma omp for reduction(inscan,+:r) private(i)
10804 for (i = 0; i < n; i = i + 1)
10809 #pragma omp scan inclusive(r)
10815 into two worksharing loops + code to merge results:
10817 num_threads = omp_get_num_threads ();
10818 thread_num = omp_get_thread_num ();
10819 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10824 // For UDRs this is UDR init, or if ctors are needed, copy from
10825 // var3 that has been constructed to contain the neutral element.
10829 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10830 // a shared array with num_threads elements and rprivb to a local array
10831 // number of elements equal to the number of (contiguous) iterations the
10832 // current thread will perform. controlb and controlp variables are
10833 // temporaries to handle deallocation of rprivb at the end of second
10835 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10836 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10837 for (i = 0; i < n; i = i + 1)
10840 // For UDRs this is UDR init or copy from var3.
10842 // This is the input phase from user code.
10846 // For UDRs this is UDR merge.
10848 // Rather than handing it over to the user, save to local thread's
10850 rprivb[ivar] = var2;
10851 // For exclusive scan, the above two statements are swapped.
10855 // And remember the final value from this thread's into the shared
10857 rpriva[(sizetype) thread_num] = var2;
10858 // If more than one thread, compute using Work-Efficient prefix sum
10859 // the inclusive parallel scan of the rpriva array.
10860 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10865 num_threadsu = (unsigned int) num_threads;
10866 thread_numup1 = (unsigned int) thread_num + 1;
10869 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10873 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10878 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10879 mul = REALPART_EXPR <cplx>;
10880 ovf = IMAGPART_EXPR <cplx>;
10881 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10884 andvm1 = andv + 4294967295;
10886 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10888 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10889 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10890 rpriva[l] = rpriva[l - k] + rpriva[l];
10892 if (down == 0) goto <D.2121>; else goto <D.2122>;
10900 if (k != 0) goto <D.2108>; else goto <D.2103>;
10902 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10904 // For UDRs this is UDR init or copy from var3.
10908 var2 = rpriva[thread_num - 1];
10911 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10912 reduction(inscan,+:r) private(i)
10913 for (i = 0; i < n; i = i + 1)
10916 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10917 r = var2 + rprivb[ivar];
10920 // This is the scan phase from user code.
10922 // Plus a bump of the iterator.
10928 lower_omp_for_scan (gimple_seq
*body_p
, gimple_seq
*dlist
, gomp_for
*stmt
,
10929 struct omp_for_data
*fd
, omp_context
*ctx
)
10931 bool is_for_simd
= gimple_omp_for_combined_p (stmt
);
10932 gcc_assert (ctx
->scan_inclusive
|| ctx
->scan_exclusive
);
10934 gimple_seq body
= gimple_omp_body (stmt
);
10935 gimple_stmt_iterator input1_gsi
= gsi_none ();
10936 struct walk_stmt_info wi
;
10937 memset (&wi
, 0, sizeof (wi
));
10938 wi
.val_only
= true;
10939 wi
.info
= (void *) &input1_gsi
;
10940 walk_gimple_seq_mod (&body
, omp_find_scan
, NULL
, &wi
);
10941 gcc_assert (!gsi_end_p (input1_gsi
));
10943 gimple
*input_stmt1
= gsi_stmt (input1_gsi
);
10944 gimple_stmt_iterator gsi
= input1_gsi
;
10946 gimple_stmt_iterator scan1_gsi
= gsi
;
10947 gimple
*scan_stmt1
= gsi_stmt (gsi
);
10948 gcc_assert (scan_stmt1
&& gimple_code (scan_stmt1
) == GIMPLE_OMP_SCAN
);
10950 gimple_seq input_body
= gimple_omp_body (input_stmt1
);
10951 gimple_seq scan_body
= gimple_omp_body (scan_stmt1
);
10952 gimple_omp_set_body (input_stmt1
, NULL
);
10953 gimple_omp_set_body (scan_stmt1
, NULL
);
10954 gimple_omp_set_body (stmt
, NULL
);
10956 gomp_for
*new_stmt
= as_a
<gomp_for
*> (gimple_copy (stmt
));
10957 gimple_seq new_body
= copy_gimple_seq_and_replace_locals (body
);
10958 gimple_omp_set_body (stmt
, body
);
10959 gimple_omp_set_body (input_stmt1
, input_body
);
10961 gimple_stmt_iterator input2_gsi
= gsi_none ();
10962 memset (&wi
, 0, sizeof (wi
));
10963 wi
.val_only
= true;
10964 wi
.info
= (void *) &input2_gsi
;
10965 walk_gimple_seq_mod (&new_body
, omp_find_scan
, NULL
, &wi
);
10966 gcc_assert (!gsi_end_p (input2_gsi
));
10968 gimple
*input_stmt2
= gsi_stmt (input2_gsi
);
10971 gimple_stmt_iterator scan2_gsi
= gsi
;
10972 gimple
*scan_stmt2
= gsi_stmt (gsi
);
10973 gcc_assert (scan_stmt2
&& gimple_code (scan_stmt2
) == GIMPLE_OMP_SCAN
);
10974 gimple_omp_set_body (scan_stmt2
, scan_body
);
10976 gimple_stmt_iterator input3_gsi
= gsi_none ();
10977 gimple_stmt_iterator scan3_gsi
= gsi_none ();
10978 gimple_stmt_iterator input4_gsi
= gsi_none ();
10979 gimple_stmt_iterator scan4_gsi
= gsi_none ();
10980 gimple
*input_stmt3
= NULL
, *scan_stmt3
= NULL
;
10981 gimple
*input_stmt4
= NULL
, *scan_stmt4
= NULL
;
10982 omp_context
*input_simd_ctx
= NULL
, *scan_simd_ctx
= NULL
;
10985 memset (&wi
, 0, sizeof (wi
));
10986 wi
.val_only
= true;
10987 wi
.info
= (void *) &input3_gsi
;
10988 walk_gimple_seq_mod (&input_body
, omp_find_scan
, NULL
, &wi
);
10989 gcc_assert (!gsi_end_p (input3_gsi
));
10991 input_stmt3
= gsi_stmt (input3_gsi
);
10995 scan_stmt3
= gsi_stmt (gsi
);
10996 gcc_assert (scan_stmt3
&& gimple_code (scan_stmt3
) == GIMPLE_OMP_SCAN
);
10998 memset (&wi
, 0, sizeof (wi
));
10999 wi
.val_only
= true;
11000 wi
.info
= (void *) &input4_gsi
;
11001 walk_gimple_seq_mod (&scan_body
, omp_find_scan
, NULL
, &wi
);
11002 gcc_assert (!gsi_end_p (input4_gsi
));
11004 input_stmt4
= gsi_stmt (input4_gsi
);
11008 scan_stmt4
= gsi_stmt (gsi
);
11009 gcc_assert (scan_stmt4
&& gimple_code (scan_stmt4
) == GIMPLE_OMP_SCAN
);
11011 input_simd_ctx
= maybe_lookup_ctx (input_stmt3
)->outer
;
11012 scan_simd_ctx
= maybe_lookup_ctx (input_stmt4
)->outer
;
11015 tree num_threads
= create_tmp_var (integer_type_node
);
11016 tree thread_num
= create_tmp_var (integer_type_node
);
11017 tree nthreads_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS
);
11018 tree threadnum_decl
= builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM
);
11019 gimple
*g
= gimple_build_call (nthreads_decl
, 0);
11020 gimple_call_set_lhs (g
, num_threads
);
11021 gimple_seq_add_stmt (body_p
, g
);
11022 g
= gimple_build_call (threadnum_decl
, 0);
11023 gimple_call_set_lhs (g
, thread_num
);
11024 gimple_seq_add_stmt (body_p
, g
);
11026 tree ivar
= create_tmp_var (sizetype
);
11027 tree new_clauses1
= NULL_TREE
, new_clauses2
= NULL_TREE
;
11028 tree
*cp1
= &new_clauses1
, *cp2
= &new_clauses2
;
11029 tree k
= create_tmp_var (unsigned_type_node
);
11030 tree l
= create_tmp_var (unsigned_type_node
);
11032 gimple_seq clist
= NULL
, mdlist
= NULL
;
11033 gimple_seq thr01_list
= NULL
, thrn1_list
= NULL
;
11034 gimple_seq thr02_list
= NULL
, thrn2_list
= NULL
;
11035 gimple_seq scan1_list
= NULL
, input2_list
= NULL
;
11036 gimple_seq last_list
= NULL
, reduc_list
= NULL
;
11037 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11038 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
11039 && OMP_CLAUSE_REDUCTION_INSCAN (c
))
11041 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
11042 tree var
= OMP_CLAUSE_DECL (c
);
11043 tree new_var
= lookup_decl (var
, ctx
);
11044 tree var3
= NULL_TREE
;
11045 tree new_vard
= new_var
;
11046 if (omp_privatize_by_reference (var
))
11047 new_var
= build_simple_mem_ref_loc (clause_loc
, new_var
);
11048 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
11050 var3
= maybe_lookup_decl (new_vard
, ctx
);
11051 if (var3
== new_vard
)
11055 tree ptype
= build_pointer_type (TREE_TYPE (new_var
));
11056 tree rpriva
= create_tmp_var (ptype
);
11057 tree nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
11058 OMP_CLAUSE_DECL (nc
) = rpriva
;
11060 cp1
= &OMP_CLAUSE_CHAIN (nc
);
11062 tree rprivb
= create_tmp_var (ptype
);
11063 nc
= build_omp_clause (clause_loc
, OMP_CLAUSE__SCANTEMP_
);
11064 OMP_CLAUSE_DECL (nc
) = rprivb
;
11065 OMP_CLAUSE__SCANTEMP__ALLOC (nc
) = 1;
11067 cp1
= &OMP_CLAUSE_CHAIN (nc
);
11069 tree var2
= create_tmp_var_raw (TREE_TYPE (new_var
));
11070 if (new_vard
!= new_var
)
11071 TREE_ADDRESSABLE (var2
) = 1;
11072 gimple_add_tmp_var (var2
);
11074 tree x
= fold_convert_loc (clause_loc
, sizetype
, thread_num
);
11075 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
11076 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11077 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
11078 tree rpriva_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11080 x
= fold_build2_loc (clause_loc
, PLUS_EXPR
, integer_type_node
,
11081 thread_num
, integer_minus_one_node
);
11082 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
11083 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
11084 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11085 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
11086 tree rprivam1_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11088 x
= fold_convert_loc (clause_loc
, sizetype
, l
);
11089 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
11090 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11091 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
11092 tree rprival_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11094 x
= fold_build2_loc (clause_loc
, MINUS_EXPR
, unsigned_type_node
, l
, k
);
11095 x
= fold_convert_loc (clause_loc
, sizetype
, x
);
11096 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, x
,
11097 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11098 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rpriva
), rpriva
, x
);
11099 tree rprivalmk_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11101 x
= fold_build2_loc (clause_loc
, MULT_EXPR
, sizetype
, ivar
,
11102 TYPE_SIZE_UNIT (TREE_TYPE (ptype
)));
11103 x
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (rprivb
), rprivb
, x
);
11104 tree rprivb_ref
= build_simple_mem_ref_loc (clause_loc
, x
);
11106 tree var4
= is_for_simd
? new_var
: var2
;
11107 tree var5
= NULL_TREE
, var6
= NULL_TREE
;
11110 var5
= lookup_decl (var
, input_simd_ctx
);
11111 var6
= lookup_decl (var
, scan_simd_ctx
);
11112 if (new_vard
!= new_var
)
11114 var5
= build_simple_mem_ref_loc (clause_loc
, var5
);
11115 var6
= build_simple_mem_ref_loc (clause_loc
, var6
);
11118 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
11120 tree placeholder
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
);
11123 x
= lang_hooks
.decls
.omp_clause_default_ctor
11124 (c
, var2
, build_outer_var_ref (var
, ctx
));
11126 gimplify_and_add (x
, &clist
);
11128 x
= build_outer_var_ref (var
, ctx
);
11129 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, unshare_expr (var4
),
11131 gimplify_and_add (x
, &thr01_list
);
11133 tree y
= (DECL_HAS_VALUE_EXPR_P (new_vard
)
11134 ? DECL_VALUE_EXPR (new_vard
) : NULL_TREE
);
11137 x
= unshare_expr (var4
);
11138 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
11139 gimplify_and_add (x
, &thrn1_list
);
11140 x
= unshare_expr (var4
);
11141 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var3
);
11142 gimplify_and_add (x
, &thr02_list
);
11144 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
))
11146 /* Otherwise, assign to it the identity element. */
11147 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
11148 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11151 if (new_vard
!= new_var
)
11152 val
= build_fold_addr_expr_loc (clause_loc
, val
);
11153 SET_DECL_VALUE_EXPR (new_vard
, val
);
11154 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
11156 SET_DECL_VALUE_EXPR (placeholder
, error_mark_node
);
11157 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11158 lower_omp (&tseq
, ctx
);
11159 gimple_seq_add_seq (&thrn1_list
, tseq
);
11160 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
);
11161 lower_omp (&tseq
, ctx
);
11162 gimple_seq_add_seq (&thr02_list
, tseq
);
11163 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
11164 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
11165 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
11167 SET_DECL_VALUE_EXPR (new_vard
, y
);
11170 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
11171 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
11175 x
= unshare_expr (var4
);
11176 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rprivam1_ref
);
11177 gimplify_and_add (x
, &thrn2_list
);
11181 x
= unshare_expr (rprivb_ref
);
11182 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var5
);
11183 gimplify_and_add (x
, &scan1_list
);
11187 if (ctx
->scan_exclusive
)
11189 x
= unshare_expr (rprivb_ref
);
11190 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
11191 gimplify_and_add (x
, &scan1_list
);
11194 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
11195 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11196 SET_DECL_VALUE_EXPR (placeholder
, var2
);
11197 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11198 lower_omp (&tseq
, ctx
);
11199 gimple_seq_add_seq (&scan1_list
, tseq
);
11201 if (ctx
->scan_inclusive
)
11203 x
= unshare_expr (rprivb_ref
);
11204 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var2
);
11205 gimplify_and_add (x
, &scan1_list
);
11209 x
= unshare_expr (rpriva_ref
);
11210 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
,
11211 unshare_expr (var4
));
11212 gimplify_and_add (x
, &mdlist
);
11214 x
= unshare_expr (is_for_simd
? var6
: new_var
);
11215 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, var4
);
11216 gimplify_and_add (x
, &input2_list
);
11219 if (new_vard
!= new_var
)
11220 val
= build_fold_addr_expr_loc (clause_loc
, val
);
11222 gimple_seq tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
11223 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11224 SET_DECL_VALUE_EXPR (new_vard
, val
);
11225 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
11228 SET_DECL_VALUE_EXPR (placeholder
, var6
);
11229 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11232 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
11233 lower_omp (&tseq
, ctx
);
11235 SET_DECL_VALUE_EXPR (new_vard
, y
);
11238 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
11239 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
11243 SET_DECL_VALUE_EXPR (placeholder
, new_var
);
11244 DECL_HAS_VALUE_EXPR_P (placeholder
) = 1;
11245 lower_omp (&tseq
, ctx
);
11247 gimple_seq_add_seq (&input2_list
, tseq
);
11249 x
= build_outer_var_ref (var
, ctx
);
11250 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, x
, rpriva_ref
);
11251 gimplify_and_add (x
, &last_list
);
11253 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, var2
, rprivalmk_ref
);
11254 gimplify_and_add (x
, &reduc_list
);
11255 tseq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
);
11256 tseq
= copy_gimple_seq_and_replace_locals (tseq
);
11258 if (new_vard
!= new_var
)
11259 val
= build_fold_addr_expr_loc (clause_loc
, val
);
11260 SET_DECL_VALUE_EXPR (new_vard
, val
);
11261 DECL_HAS_VALUE_EXPR_P (new_vard
) = 1;
11262 SET_DECL_VALUE_EXPR (placeholder
, var2
);
11263 lower_omp (&tseq
, ctx
);
11264 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
11265 SET_DECL_VALUE_EXPR (placeholder
, NULL_TREE
);
11266 DECL_HAS_VALUE_EXPR_P (placeholder
) = 0;
11268 SET_DECL_VALUE_EXPR (new_vard
, y
);
11271 DECL_HAS_VALUE_EXPR_P (new_vard
) = 0;
11272 SET_DECL_VALUE_EXPR (new_vard
, NULL_TREE
);
11274 gimple_seq_add_seq (&reduc_list
, tseq
);
11275 x
= lang_hooks
.decls
.omp_clause_assign_op (c
, rprival_ref
, var2
);
11276 gimplify_and_add (x
, &reduc_list
);
11278 x
= lang_hooks
.decls
.omp_clause_dtor (c
, var2
);
11280 gimplify_and_add (x
, dlist
);
11284 x
= build_outer_var_ref (var
, ctx
);
11285 gimplify_assign (unshare_expr (var4
), x
, &thr01_list
);
11287 x
= omp_reduction_init (c
, TREE_TYPE (new_var
));
11288 gimplify_assign (unshare_expr (var4
), unshare_expr (x
),
11290 gimplify_assign (unshare_expr (var4
), x
, &thr02_list
);
11292 gimplify_assign (unshare_expr (var4
), rprivam1_ref
, &thrn2_list
);
11294 enum tree_code code
= OMP_CLAUSE_REDUCTION_CODE (c
);
11295 if (code
== MINUS_EXPR
)
11299 gimplify_assign (unshare_expr (rprivb_ref
), var5
, &scan1_list
);
11302 if (ctx
->scan_exclusive
)
11303 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
11305 x
= build2 (code
, TREE_TYPE (new_var
), var2
, new_var
);
11306 gimplify_assign (var2
, x
, &scan1_list
);
11307 if (ctx
->scan_inclusive
)
11308 gimplify_assign (unshare_expr (rprivb_ref
), var2
,
11312 gimplify_assign (unshare_expr (rpriva_ref
), unshare_expr (var4
),
11315 x
= build2 (code
, TREE_TYPE (new_var
), var4
, rprivb_ref
);
11316 gimplify_assign (is_for_simd
? var6
: new_var
, x
, &input2_list
);
11318 gimplify_assign (build_outer_var_ref (var
, ctx
), rpriva_ref
,
11321 x
= build2 (code
, TREE_TYPE (new_var
), rprivalmk_ref
,
11322 unshare_expr (rprival_ref
));
11323 gimplify_assign (rprival_ref
, x
, &reduc_list
);
11327 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
11328 gimple_seq_add_stmt (&scan1_list
, g
);
11329 g
= gimple_build_assign (ivar
, PLUS_EXPR
, ivar
, size_one_node
);
11330 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
11331 ? scan_stmt4
: scan_stmt2
), g
);
11333 tree controlb
= create_tmp_var (boolean_type_node
);
11334 tree controlp
= create_tmp_var (ptr_type_node
);
11335 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11336 OMP_CLAUSE_DECL (nc
) = controlb
;
11337 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11339 cp1
= &OMP_CLAUSE_CHAIN (nc
);
11340 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11341 OMP_CLAUSE_DECL (nc
) = controlp
;
11342 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11344 cp1
= &OMP_CLAUSE_CHAIN (nc
);
11345 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11346 OMP_CLAUSE_DECL (nc
) = controlb
;
11347 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11349 cp2
= &OMP_CLAUSE_CHAIN (nc
);
11350 nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__SCANTEMP_
);
11351 OMP_CLAUSE_DECL (nc
) = controlp
;
11352 OMP_CLAUSE__SCANTEMP__CONTROL (nc
) = 1;
11354 cp2
= &OMP_CLAUSE_CHAIN (nc
);
11356 *cp1
= gimple_omp_for_clauses (stmt
);
11357 gimple_omp_for_set_clauses (stmt
, new_clauses1
);
11358 *cp2
= gimple_omp_for_clauses (new_stmt
);
11359 gimple_omp_for_set_clauses (new_stmt
, new_clauses2
);
11363 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3
), scan1_list
);
11364 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4
), input2_list
);
11366 gsi_insert_seq_after (&input3_gsi
, gimple_omp_body (input_stmt3
),
11368 gsi_remove (&input3_gsi
, true);
11369 gsi_insert_seq_after (&scan3_gsi
, gimple_omp_body (scan_stmt3
),
11371 gsi_remove (&scan3_gsi
, true);
11372 gsi_insert_seq_after (&input4_gsi
, gimple_omp_body (input_stmt4
),
11374 gsi_remove (&input4_gsi
, true);
11375 gsi_insert_seq_after (&scan4_gsi
, gimple_omp_body (scan_stmt4
),
11377 gsi_remove (&scan4_gsi
, true);
11381 gimple_omp_set_body (scan_stmt1
, scan1_list
);
11382 gimple_omp_set_body (input_stmt2
, input2_list
);
11385 gsi_insert_seq_after (&input1_gsi
, gimple_omp_body (input_stmt1
),
11387 gsi_remove (&input1_gsi
, true);
11388 gsi_insert_seq_after (&scan1_gsi
, gimple_omp_body (scan_stmt1
),
11390 gsi_remove (&scan1_gsi
, true);
11391 gsi_insert_seq_after (&input2_gsi
, gimple_omp_body (input_stmt2
),
11393 gsi_remove (&input2_gsi
, true);
11394 gsi_insert_seq_after (&scan2_gsi
, gimple_omp_body (scan_stmt2
),
11396 gsi_remove (&scan2_gsi
, true);
11398 gimple_seq_add_seq (body_p
, clist
);
11400 tree lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11401 tree lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11402 tree lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11403 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
11404 gimple_seq_add_stmt (body_p
, g
);
11405 g
= gimple_build_label (lab1
);
11406 gimple_seq_add_stmt (body_p
, g
);
11407 gimple_seq_add_seq (body_p
, thr01_list
);
11408 g
= gimple_build_goto (lab3
);
11409 gimple_seq_add_stmt (body_p
, g
);
11410 g
= gimple_build_label (lab2
);
11411 gimple_seq_add_stmt (body_p
, g
);
11412 gimple_seq_add_seq (body_p
, thrn1_list
);
11413 g
= gimple_build_label (lab3
);
11414 gimple_seq_add_stmt (body_p
, g
);
11416 g
= gimple_build_assign (ivar
, size_zero_node
);
11417 gimple_seq_add_stmt (body_p
, g
);
11419 gimple_seq_add_stmt (body_p
, stmt
);
11420 gimple_seq_add_seq (body_p
, body
);
11421 gimple_seq_add_stmt (body_p
, gimple_build_omp_continue (fd
->loop
.v
,
11424 g
= gimple_build_omp_return (true);
11425 gimple_seq_add_stmt (body_p
, g
);
11426 gimple_seq_add_seq (body_p
, mdlist
);
11428 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11429 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11430 g
= gimple_build_cond (GT_EXPR
, num_threads
, integer_one_node
, lab1
, lab2
);
11431 gimple_seq_add_stmt (body_p
, g
);
11432 g
= gimple_build_label (lab1
);
11433 gimple_seq_add_stmt (body_p
, g
);
11435 g
= omp_build_barrier (NULL
);
11436 gimple_seq_add_stmt (body_p
, g
);
11438 tree down
= create_tmp_var (unsigned_type_node
);
11439 g
= gimple_build_assign (down
, build_zero_cst (unsigned_type_node
));
11440 gimple_seq_add_stmt (body_p
, g
);
11442 g
= gimple_build_assign (k
, build_one_cst (unsigned_type_node
));
11443 gimple_seq_add_stmt (body_p
, g
);
11445 tree num_threadsu
= create_tmp_var (unsigned_type_node
);
11446 g
= gimple_build_assign (num_threadsu
, NOP_EXPR
, num_threads
);
11447 gimple_seq_add_stmt (body_p
, g
);
11449 tree thread_numu
= create_tmp_var (unsigned_type_node
);
11450 g
= gimple_build_assign (thread_numu
, NOP_EXPR
, thread_num
);
11451 gimple_seq_add_stmt (body_p
, g
);
11453 tree thread_nump1
= create_tmp_var (unsigned_type_node
);
11454 g
= gimple_build_assign (thread_nump1
, PLUS_EXPR
, thread_numu
,
11455 build_int_cst (unsigned_type_node
, 1));
11456 gimple_seq_add_stmt (body_p
, g
);
11458 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11459 g
= gimple_build_label (lab3
);
11460 gimple_seq_add_stmt (body_p
, g
);
11462 tree twok
= create_tmp_var (unsigned_type_node
);
11463 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
11464 gimple_seq_add_stmt (body_p
, g
);
11466 tree lab4
= create_artificial_label (UNKNOWN_LOCATION
);
11467 tree lab5
= create_artificial_label (UNKNOWN_LOCATION
);
11468 tree lab6
= create_artificial_label (UNKNOWN_LOCATION
);
11469 g
= gimple_build_cond (GT_EXPR
, twok
, num_threadsu
, lab4
, lab5
);
11470 gimple_seq_add_stmt (body_p
, g
);
11471 g
= gimple_build_label (lab4
);
11472 gimple_seq_add_stmt (body_p
, g
);
11473 g
= gimple_build_assign (down
, build_all_ones_cst (unsigned_type_node
));
11474 gimple_seq_add_stmt (body_p
, g
);
11475 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11476 gimple_seq_add_stmt (body_p
, g
);
11478 g
= gimple_build_cond (EQ_EXPR
, k
, num_threadsu
, lab6
, lab5
);
11479 gimple_seq_add_stmt (body_p
, g
);
11480 g
= gimple_build_label (lab6
);
11481 gimple_seq_add_stmt (body_p
, g
);
11483 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11484 gimple_seq_add_stmt (body_p
, g
);
11486 g
= gimple_build_label (lab5
);
11487 gimple_seq_add_stmt (body_p
, g
);
11489 g
= gimple_build_assign (twok
, LSHIFT_EXPR
, k
, integer_one_node
);
11490 gimple_seq_add_stmt (body_p
, g
);
11492 tree cplx
= create_tmp_var (build_complex_type (unsigned_type_node
, false));
11493 g
= gimple_build_call_internal (IFN_MUL_OVERFLOW
, 2, thread_nump1
, twok
);
11494 gimple_call_set_lhs (g
, cplx
);
11495 gimple_seq_add_stmt (body_p
, g
);
11496 tree mul
= create_tmp_var (unsigned_type_node
);
11497 g
= gimple_build_assign (mul
, REALPART_EXPR
,
11498 build1 (REALPART_EXPR
, unsigned_type_node
, cplx
));
11499 gimple_seq_add_stmt (body_p
, g
);
11500 tree ovf
= create_tmp_var (unsigned_type_node
);
11501 g
= gimple_build_assign (ovf
, IMAGPART_EXPR
,
11502 build1 (IMAGPART_EXPR
, unsigned_type_node
, cplx
));
11503 gimple_seq_add_stmt (body_p
, g
);
11505 tree lab7
= create_artificial_label (UNKNOWN_LOCATION
);
11506 tree lab8
= create_artificial_label (UNKNOWN_LOCATION
);
11507 g
= gimple_build_cond (EQ_EXPR
, ovf
, build_zero_cst (unsigned_type_node
),
11509 gimple_seq_add_stmt (body_p
, g
);
11510 g
= gimple_build_label (lab7
);
11511 gimple_seq_add_stmt (body_p
, g
);
11513 tree andv
= create_tmp_var (unsigned_type_node
);
11514 g
= gimple_build_assign (andv
, BIT_AND_EXPR
, k
, down
);
11515 gimple_seq_add_stmt (body_p
, g
);
11516 tree andvm1
= create_tmp_var (unsigned_type_node
);
11517 g
= gimple_build_assign (andvm1
, PLUS_EXPR
, andv
,
11518 build_minus_one_cst (unsigned_type_node
));
11519 gimple_seq_add_stmt (body_p
, g
);
11521 g
= gimple_build_assign (l
, PLUS_EXPR
, mul
, andvm1
);
11522 gimple_seq_add_stmt (body_p
, g
);
11524 tree lab9
= create_artificial_label (UNKNOWN_LOCATION
);
11525 g
= gimple_build_cond (LT_EXPR
, l
, num_threadsu
, lab9
, lab8
);
11526 gimple_seq_add_stmt (body_p
, g
);
11527 g
= gimple_build_label (lab9
);
11528 gimple_seq_add_stmt (body_p
, g
);
11529 gimple_seq_add_seq (body_p
, reduc_list
);
11530 g
= gimple_build_label (lab8
);
11531 gimple_seq_add_stmt (body_p
, g
);
11533 tree lab10
= create_artificial_label (UNKNOWN_LOCATION
);
11534 tree lab11
= create_artificial_label (UNKNOWN_LOCATION
);
11535 tree lab12
= create_artificial_label (UNKNOWN_LOCATION
);
11536 g
= gimple_build_cond (EQ_EXPR
, down
, build_zero_cst (unsigned_type_node
),
11538 gimple_seq_add_stmt (body_p
, g
);
11539 g
= gimple_build_label (lab10
);
11540 gimple_seq_add_stmt (body_p
, g
);
11541 g
= gimple_build_assign (k
, LSHIFT_EXPR
, k
, integer_one_node
);
11542 gimple_seq_add_stmt (body_p
, g
);
11543 g
= gimple_build_goto (lab12
);
11544 gimple_seq_add_stmt (body_p
, g
);
11545 g
= gimple_build_label (lab11
);
11546 gimple_seq_add_stmt (body_p
, g
);
11547 g
= gimple_build_assign (k
, RSHIFT_EXPR
, k
, integer_one_node
);
11548 gimple_seq_add_stmt (body_p
, g
);
11549 g
= gimple_build_label (lab12
);
11550 gimple_seq_add_stmt (body_p
, g
);
11552 g
= omp_build_barrier (NULL
);
11553 gimple_seq_add_stmt (body_p
, g
);
11555 g
= gimple_build_cond (NE_EXPR
, k
, build_zero_cst (unsigned_type_node
),
11557 gimple_seq_add_stmt (body_p
, g
);
11559 g
= gimple_build_label (lab2
);
11560 gimple_seq_add_stmt (body_p
, g
);
11562 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11563 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11564 lab3
= create_artificial_label (UNKNOWN_LOCATION
);
11565 g
= gimple_build_cond (EQ_EXPR
, thread_num
, integer_zero_node
, lab1
, lab2
);
11566 gimple_seq_add_stmt (body_p
, g
);
11567 g
= gimple_build_label (lab1
);
11568 gimple_seq_add_stmt (body_p
, g
);
11569 gimple_seq_add_seq (body_p
, thr02_list
);
11570 g
= gimple_build_goto (lab3
);
11571 gimple_seq_add_stmt (body_p
, g
);
11572 g
= gimple_build_label (lab2
);
11573 gimple_seq_add_stmt (body_p
, g
);
11574 gimple_seq_add_seq (body_p
, thrn2_list
);
11575 g
= gimple_build_label (lab3
);
11576 gimple_seq_add_stmt (body_p
, g
);
11578 g
= gimple_build_assign (ivar
, size_zero_node
);
11579 gimple_seq_add_stmt (body_p
, g
);
11580 gimple_seq_add_stmt (body_p
, new_stmt
);
11581 gimple_seq_add_seq (body_p
, new_body
);
11583 gimple_seq new_dlist
= NULL
;
11584 lab1
= create_artificial_label (UNKNOWN_LOCATION
);
11585 lab2
= create_artificial_label (UNKNOWN_LOCATION
);
11586 tree num_threadsm1
= create_tmp_var (integer_type_node
);
11587 g
= gimple_build_assign (num_threadsm1
, PLUS_EXPR
, num_threads
,
11588 integer_minus_one_node
);
11589 gimple_seq_add_stmt (&new_dlist
, g
);
11590 g
= gimple_build_cond (EQ_EXPR
, thread_num
, num_threadsm1
, lab1
, lab2
);
11591 gimple_seq_add_stmt (&new_dlist
, g
);
11592 g
= gimple_build_label (lab1
);
11593 gimple_seq_add_stmt (&new_dlist
, g
);
11594 gimple_seq_add_seq (&new_dlist
, last_list
);
11595 g
= gimple_build_label (lab2
);
11596 gimple_seq_add_stmt (&new_dlist
, g
);
11597 gimple_seq_add_seq (&new_dlist
, *dlist
);
11598 *dlist
= new_dlist
;
11601 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11602 the addresses of variables to be made private at the surrounding
11603 parallelism level. Such functions appear in the gimple code stream in two
11604 forms, e.g. for a partitioned loop:
11606 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11607 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11608 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11609 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11611 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11612 not as part of a HEAD_MARK sequence:
11614 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11616 For such stand-alone appearances, the 3rd argument is always 0, denoting
11617 gang partitioning. */
11620 lower_oacc_private_marker (omp_context
*ctx
)
11622 if (ctx
->oacc_privatization_candidates
.length () == 0)
11625 auto_vec
<tree
, 5> args
;
11627 args
.quick_push (build_int_cst (integer_type_node
, IFN_UNIQUE_OACC_PRIVATE
));
11628 args
.quick_push (integer_zero_node
);
11629 args
.quick_push (integer_minus_one_node
);
11633 FOR_EACH_VEC_ELT (ctx
->oacc_privatization_candidates
, i
, decl
)
11635 gcc_checking_assert (TREE_ADDRESSABLE (decl
));
11636 tree addr
= build_fold_addr_expr (decl
);
11637 args
.safe_push (addr
);
11640 return gimple_build_call_internal_vec (IFN_UNIQUE
, args
);
11643 /* Lower code for an OMP loop directive. */
11646 lower_omp_for (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
11648 tree
*rhs_p
, block
;
11649 struct omp_for_data fd
, *fdp
= NULL
;
11650 gomp_for
*stmt
= as_a
<gomp_for
*> (gsi_stmt (*gsi_p
));
11652 gimple_seq omp_for_body
, body
, dlist
, tred_ilist
= NULL
, tred_dlist
= NULL
;
11653 gimple_seq cnt_list
= NULL
, clist
= NULL
;
11654 gimple_seq oacc_head
= NULL
, oacc_tail
= NULL
;
11657 push_gimplify_context ();
11659 if (is_gimple_omp_oacc (ctx
->stmt
))
11660 oacc_privatization_scan_clause_chain (ctx
, gimple_omp_for_clauses (stmt
));
11662 lower_omp (gimple_omp_for_pre_body_ptr (stmt
), ctx
);
11664 block
= make_node (BLOCK
);
11665 new_stmt
= gimple_build_bind (NULL
, NULL
, block
);
11666 /* Replace at gsi right away, so that 'stmt' is no member
11667 of a sequence anymore as we're going to add to a different
11669 gsi_replace (gsi_p
, new_stmt
, true);
11671 /* Move declaration of temporaries in the loop body before we make
11673 omp_for_body
= gimple_omp_body (stmt
);
11674 if (!gimple_seq_empty_p (omp_for_body
)
11675 && gimple_code (gimple_seq_first_stmt (omp_for_body
)) == GIMPLE_BIND
)
11678 = as_a
<gbind
*> (gimple_seq_first_stmt (omp_for_body
));
11679 tree vars
= gimple_bind_vars (inner_bind
);
11680 if (is_gimple_omp_oacc (ctx
->stmt
))
11681 oacc_privatization_scan_decl_chain (ctx
, vars
);
11682 gimple_bind_append_vars (new_stmt
, vars
);
11683 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11684 keep them on the inner_bind and it's block. */
11685 gimple_bind_set_vars (inner_bind
, NULL_TREE
);
11686 if (gimple_bind_block (inner_bind
))
11687 BLOCK_VARS (gimple_bind_block (inner_bind
)) = NULL_TREE
;
11690 if (gimple_omp_for_combined_into_p (stmt
))
11692 omp_extract_for_data (stmt
, &fd
, NULL
);
11695 /* We need two temporaries with fd.loop.v type (istart/iend)
11696 and then (fd.collapse - 1) temporaries with the same
11697 type for count2 ... countN-1 vars if not constant. */
11699 tree type
= fd
.iter_type
;
11700 if (fd
.collapse
> 1
11701 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
11702 count
+= fd
.collapse
- 1;
11704 tree type2
= NULL_TREE
;
11706 = (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
11707 || gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_TASKLOOP
);
11708 tree outerc
= NULL
, *pc
= gimple_omp_for_clauses_ptr (stmt
);
11710 tree clauses
= *pc
;
11711 if (fd
.collapse
> 1
11713 && fd
.last_nonrect
== fd
.first_nonrect
+ 1
11714 && TREE_CODE (fd
.loop
.n2
) != INTEGER_CST
)
11715 if (tree v
= gimple_omp_for_index (stmt
, fd
.last_nonrect
))
11716 if (!TYPE_UNSIGNED (TREE_TYPE (v
)))
11718 v
= gimple_omp_for_index (stmt
, fd
.first_nonrect
);
11719 type2
= TREE_TYPE (v
);
11725 = omp_find_clause (gimple_omp_taskreg_clauses (ctx
->outer
->stmt
),
11726 OMP_CLAUSE__LOOPTEMP_
);
11727 if (ctx
->simt_stmt
)
11728 simtc
= omp_find_clause (gimple_omp_for_clauses (ctx
->simt_stmt
),
11729 OMP_CLAUSE__LOOPTEMP_
);
11730 for (i
= 0; i
< count
+ count2
; i
++)
11735 gcc_assert (outerc
);
11736 temp
= lookup_decl (OMP_CLAUSE_DECL (outerc
), ctx
->outer
);
11737 outerc
= omp_find_clause (OMP_CLAUSE_CHAIN (outerc
),
11738 OMP_CLAUSE__LOOPTEMP_
);
11742 /* If there are 2 adjacent SIMD stmts, one with _simt_
11743 clause, another without, make sure they have the same
11744 decls in _looptemp_ clauses, because the outer stmt
11745 they are combined into will look up just one inner_stmt. */
11746 if (ctx
->simt_stmt
)
11747 temp
= OMP_CLAUSE_DECL (simtc
);
11749 temp
= create_tmp_var (i
>= count
? type2
: type
);
11750 insert_decl_map (&ctx
->outer
->cb
, temp
, temp
);
11752 *pc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__LOOPTEMP_
);
11753 OMP_CLAUSE_DECL (*pc
) = temp
;
11754 pc
= &OMP_CLAUSE_CHAIN (*pc
);
11755 if (ctx
->simt_stmt
)
11756 simtc
= omp_find_clause (OMP_CLAUSE_CHAIN (simtc
),
11757 OMP_CLAUSE__LOOPTEMP_
);
11762 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11766 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt
), OMP_FOR
,
11767 OMP_CLAUSE_REDUCTION
);
11768 tree rtmp
= NULL_TREE
;
11771 tree type
= build_pointer_type (pointer_sized_int_node
);
11772 tree temp
= create_tmp_var (type
);
11773 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__REDUCTEMP_
);
11774 OMP_CLAUSE_DECL (c
) = temp
;
11775 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (stmt
);
11776 gimple_omp_for_set_clauses (stmt
, c
);
11777 lower_omp_task_reductions (ctx
, OMP_FOR
,
11778 gimple_omp_for_clauses (stmt
),
11779 &tred_ilist
, &tred_dlist
);
11781 rtmp
= make_ssa_name (type
);
11782 gimple_seq_add_stmt (&body
, gimple_build_assign (rtmp
, temp
));
11785 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt
),
11788 lower_rec_input_clauses (gimple_omp_for_clauses (stmt
), &body
, &dlist
, ctx
,
11790 gimple_seq_add_seq (rclauses
? &tred_ilist
: &body
,
11791 gimple_omp_for_pre_body (stmt
));
11793 lower_omp (gimple_omp_body_ptr (stmt
), ctx
);
11795 gcall
*private_marker
= NULL
;
11796 if (is_gimple_omp_oacc (ctx
->stmt
)
11797 && !gimple_seq_empty_p (omp_for_body
))
11798 private_marker
= lower_oacc_private_marker (ctx
);
11800 /* Lower the header expressions. At this point, we can assume that
11801 the header is of the form:
11803 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11805 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11806 using the .omp_data_s mapping, if needed. */
11807 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
11809 rhs_p
= gimple_omp_for_initial_ptr (stmt
, i
);
11810 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
11812 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
11813 TREE_VEC_ELT (*rhs_p
, 1)
11814 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
11815 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
11816 TREE_VEC_ELT (*rhs_p
, 2)
11817 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
11819 else if (!is_gimple_min_invariant (*rhs_p
))
11820 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11821 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
11822 recompute_tree_invariant_for_addr_expr (*rhs_p
);
11824 rhs_p
= gimple_omp_for_final_ptr (stmt
, i
);
11825 if (TREE_CODE (*rhs_p
) == TREE_VEC
)
11827 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 1)))
11828 TREE_VEC_ELT (*rhs_p
, 1)
11829 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 1), &cnt_list
);
11830 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p
, 2)))
11831 TREE_VEC_ELT (*rhs_p
, 2)
11832 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p
, 2), &cnt_list
);
11834 else if (!is_gimple_min_invariant (*rhs_p
))
11835 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11836 else if (TREE_CODE (*rhs_p
) == ADDR_EXPR
)
11837 recompute_tree_invariant_for_addr_expr (*rhs_p
);
11839 rhs_p
= &TREE_OPERAND (gimple_omp_for_incr (stmt
, i
), 1);
11840 if (!is_gimple_min_invariant (*rhs_p
))
11841 *rhs_p
= get_formal_tmp_var (*rhs_p
, &cnt_list
);
11844 gimple_seq_add_seq (&tred_ilist
, cnt_list
);
11846 gimple_seq_add_seq (&body
, cnt_list
);
11848 /* Once lowered, extract the bounds and clauses. */
11849 omp_extract_for_data (stmt
, &fd
, NULL
);
11851 if (is_gimple_omp_oacc (ctx
->stmt
)
11852 && !ctx_in_oacc_kernels_region (ctx
))
11853 lower_oacc_head_tail (gimple_location (stmt
),
11854 gimple_omp_for_clauses (stmt
), private_marker
,
11855 &oacc_head
, &oacc_tail
, ctx
);
11857 /* Add OpenACC partitioning and reduction markers just before the loop. */
11859 gimple_seq_add_seq (&body
, oacc_head
);
11861 lower_omp_for_lastprivate (&fd
, &body
, &dlist
, &clist
, ctx
);
11863 if (gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
11864 for (tree c
= gimple_omp_for_clauses (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11865 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
11866 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
11868 OMP_CLAUSE_DECL (c
) = lookup_decl (OMP_CLAUSE_DECL (c
), ctx
);
11869 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c
)))
11870 OMP_CLAUSE_LINEAR_STEP (c
)
11871 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c
),
11875 if ((ctx
->scan_inclusive
|| ctx
->scan_exclusive
)
11876 && gimple_omp_for_kind (stmt
) == GF_OMP_FOR_KIND_FOR
)
11877 lower_omp_for_scan (&body
, &dlist
, stmt
, &fd
, ctx
);
11880 gimple_seq_add_stmt (&body
, stmt
);
11881 gimple_seq_add_seq (&body
, gimple_omp_body (stmt
));
11884 gimple_seq_add_stmt (&body
, gimple_build_omp_continue (fd
.loop
.v
,
11887 /* After the loop, add exit clauses. */
11888 lower_reduction_clauses (gimple_omp_for_clauses (stmt
), &body
, &clist
, ctx
);
11892 tree fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START
);
11893 gcall
*g
= gimple_build_call (fndecl
, 0);
11894 gimple_seq_add_stmt (&body
, g
);
11895 gimple_seq_add_seq (&body
, clist
);
11896 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END
);
11897 g
= gimple_build_call (fndecl
, 0);
11898 gimple_seq_add_stmt (&body
, g
);
11901 if (ctx
->cancellable
)
11902 gimple_seq_add_stmt (&body
, gimple_build_label (ctx
->cancel_label
));
11904 gimple_seq_add_seq (&body
, dlist
);
11908 gimple_seq_add_seq (&tred_ilist
, body
);
11912 body
= maybe_catch_exception (body
);
11914 /* Region exit marker goes at the end of the loop body. */
11915 gimple
*g
= gimple_build_omp_return (fd
.have_nowait
);
11916 gimple_seq_add_stmt (&body
, g
);
11918 gimple_seq_add_seq (&body
, tred_dlist
);
11920 maybe_add_implicit_barrier_cancel (ctx
, g
, &body
);
11923 OMP_CLAUSE_DECL (rclauses
) = rtmp
;
11925 /* Add OpenACC joining and reduction markers just after the loop. */
11927 gimple_seq_add_seq (&body
, oacc_tail
);
11929 pop_gimplify_context (new_stmt
);
11931 gimple_bind_append_vars (new_stmt
, ctx
->block_vars
);
11932 maybe_remove_omp_member_access_dummy_vars (new_stmt
);
11933 BLOCK_VARS (block
) = gimple_bind_vars (new_stmt
);
11934 if (BLOCK_VARS (block
))
11935 TREE_USED (block
) = 1;
11937 gimple_bind_set_body (new_stmt
, body
);
11938 gimple_omp_set_body (stmt
, NULL
);
11939 gimple_omp_for_set_pre_body (stmt
, NULL
);
11942 /* Callback for walk_stmts. Check if the current statement only contains
11943 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11946 check_combined_parallel (gimple_stmt_iterator
*gsi_p
,
11947 bool *handled_ops_p
,
11948 struct walk_stmt_info
*wi
)
11950 int *info
= (int *) wi
->info
;
11951 gimple
*stmt
= gsi_stmt (*gsi_p
);
11953 *handled_ops_p
= true;
11954 switch (gimple_code (stmt
))
11960 case GIMPLE_OMP_FOR
:
11961 case GIMPLE_OMP_SECTIONS
:
11962 *info
= *info
== 0 ? 1 : -1;
11971 struct omp_taskcopy_context
11973 /* This field must be at the beginning, as we do "inheritance": Some
11974 callback functions for tree-inline.cc (e.g., omp_copy_decl)
11975 receive a copy_body_data pointer that is up-casted to an
11976 omp_context pointer. */
11982 task_copyfn_copy_decl (tree var
, copy_body_data
*cb
)
11984 struct omp_taskcopy_context
*tcctx
= (struct omp_taskcopy_context
*) cb
;
11986 if (splay_tree_lookup (tcctx
->ctx
->sfield_map
, (splay_tree_key
) var
))
11987 return create_tmp_var (TREE_TYPE (var
));
11993 task_copyfn_remap_type (struct omp_taskcopy_context
*tcctx
, tree orig_type
)
11995 tree name
, new_fields
= NULL
, type
, f
;
11997 type
= lang_hooks
.types
.make_type (RECORD_TYPE
);
11998 name
= DECL_NAME (TYPE_NAME (orig_type
));
11999 name
= build_decl (gimple_location (tcctx
->ctx
->stmt
),
12000 TYPE_DECL
, name
, type
);
12001 TYPE_NAME (type
) = name
;
12003 for (f
= TYPE_FIELDS (orig_type
); f
; f
= TREE_CHAIN (f
))
12005 tree new_f
= copy_node (f
);
12006 DECL_CONTEXT (new_f
) = type
;
12007 TREE_TYPE (new_f
) = remap_type (TREE_TYPE (f
), &tcctx
->cb
);
12008 TREE_CHAIN (new_f
) = new_fields
;
12009 walk_tree (&DECL_SIZE (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
12010 walk_tree (&DECL_SIZE_UNIT (new_f
), copy_tree_body_r
, &tcctx
->cb
, NULL
);
12011 walk_tree (&DECL_FIELD_OFFSET (new_f
), copy_tree_body_r
,
12013 new_fields
= new_f
;
12014 tcctx
->cb
.decl_map
->put (f
, new_f
);
12016 TYPE_FIELDS (type
) = nreverse (new_fields
);
12017 layout_type (type
);
12021 /* Create task copyfn. */
12024 create_task_copyfn (gomp_task
*task_stmt
, omp_context
*ctx
)
12026 struct function
*child_cfun
;
12027 tree child_fn
, t
, c
, src
, dst
, f
, sf
, arg
, sarg
, decl
;
12028 tree record_type
, srecord_type
, bind
, list
;
12029 bool record_needs_remap
= false, srecord_needs_remap
= false;
12031 struct omp_taskcopy_context tcctx
;
12032 location_t loc
= gimple_location (task_stmt
);
12033 size_t looptempno
= 0;
12035 child_fn
= gimple_omp_task_copy_fn (task_stmt
);
12036 task_cpyfns
.safe_push (task_stmt
);
12037 child_cfun
= DECL_STRUCT_FUNCTION (child_fn
);
12038 gcc_assert (child_cfun
->cfg
== NULL
);
12039 DECL_SAVED_TREE (child_fn
) = alloc_stmt_list ();
12041 /* Reset DECL_CONTEXT on function arguments. */
12042 for (t
= DECL_ARGUMENTS (child_fn
); t
; t
= DECL_CHAIN (t
))
12043 DECL_CONTEXT (t
) = child_fn
;
12045 /* Populate the function. */
12046 push_gimplify_context ();
12047 push_cfun (child_cfun
);
12049 bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
12050 TREE_SIDE_EFFECTS (bind
) = 1;
12052 DECL_SAVED_TREE (child_fn
) = bind
;
12053 DECL_SOURCE_LOCATION (child_fn
) = gimple_location (task_stmt
);
12055 /* Remap src and dst argument types if needed. */
12056 record_type
= ctx
->record_type
;
12057 srecord_type
= ctx
->srecord_type
;
12058 for (f
= TYPE_FIELDS (record_type
); f
; f
= DECL_CHAIN (f
))
12059 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
12061 record_needs_remap
= true;
12064 for (f
= TYPE_FIELDS (srecord_type
); f
; f
= DECL_CHAIN (f
))
12065 if (variably_modified_type_p (TREE_TYPE (f
), ctx
->cb
.src_fn
))
12067 srecord_needs_remap
= true;
12071 if (record_needs_remap
|| srecord_needs_remap
)
12073 memset (&tcctx
, '\0', sizeof (tcctx
));
12074 tcctx
.cb
.src_fn
= ctx
->cb
.src_fn
;
12075 tcctx
.cb
.dst_fn
= child_fn
;
12076 tcctx
.cb
.src_node
= cgraph_node::get (tcctx
.cb
.src_fn
);
12077 gcc_checking_assert (tcctx
.cb
.src_node
);
12078 tcctx
.cb
.dst_node
= tcctx
.cb
.src_node
;
12079 tcctx
.cb
.src_cfun
= ctx
->cb
.src_cfun
;
12080 tcctx
.cb
.copy_decl
= task_copyfn_copy_decl
;
12081 tcctx
.cb
.eh_lp_nr
= 0;
12082 tcctx
.cb
.transform_call_graph_edges
= CB_CGE_MOVE
;
12083 tcctx
.cb
.decl_map
= new hash_map
<tree
, tree
>;
12086 if (record_needs_remap
)
12087 record_type
= task_copyfn_remap_type (&tcctx
, record_type
);
12088 if (srecord_needs_remap
)
12089 srecord_type
= task_copyfn_remap_type (&tcctx
, srecord_type
);
12092 tcctx
.cb
.decl_map
= NULL
;
12094 arg
= DECL_ARGUMENTS (child_fn
);
12095 TREE_TYPE (arg
) = build_pointer_type (record_type
);
12096 sarg
= DECL_CHAIN (arg
);
12097 TREE_TYPE (sarg
) = build_pointer_type (srecord_type
);
12099 /* First pass: initialize temporaries used in record_type and srecord_type
12100 sizes and field offsets. */
12101 if (tcctx
.cb
.decl_map
)
12102 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12103 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12107 decl
= OMP_CLAUSE_DECL (c
);
12108 p
= tcctx
.cb
.decl_map
->get (decl
);
12111 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
12112 sf
= (tree
) n
->value
;
12113 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12114 src
= build_simple_mem_ref_loc (loc
, sarg
);
12115 src
= omp_build_component_ref (src
, sf
);
12116 t
= build2 (MODIFY_EXPR
, TREE_TYPE (*p
), *p
, src
);
12117 append_to_statement_list (t
, &list
);
12120 /* Second pass: copy shared var pointers and copy construct non-VLA
12121 firstprivate vars. */
12122 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12123 switch (OMP_CLAUSE_CODE (c
))
12125 splay_tree_key key
;
12126 case OMP_CLAUSE_SHARED
:
12127 decl
= OMP_CLAUSE_DECL (c
);
12128 key
= (splay_tree_key
) decl
;
12129 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
))
12130 key
= (splay_tree_key
) &DECL_UID (decl
);
12131 n
= splay_tree_lookup (ctx
->field_map
, key
);
12134 f
= (tree
) n
->value
;
12135 if (tcctx
.cb
.decl_map
)
12136 f
= *tcctx
.cb
.decl_map
->get (f
);
12137 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
12138 sf
= (tree
) n
->value
;
12139 if (tcctx
.cb
.decl_map
)
12140 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12141 src
= build_simple_mem_ref_loc (loc
, sarg
);
12142 src
= omp_build_component_ref (src
, sf
);
12143 dst
= build_simple_mem_ref_loc (loc
, arg
);
12144 dst
= omp_build_component_ref (dst
, f
);
12145 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12146 append_to_statement_list (t
, &list
);
12148 case OMP_CLAUSE_REDUCTION
:
12149 case OMP_CLAUSE_IN_REDUCTION
:
12150 decl
= OMP_CLAUSE_DECL (c
);
12151 if (TREE_CODE (decl
) == MEM_REF
)
12153 decl
= TREE_OPERAND (decl
, 0);
12154 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
12155 decl
= TREE_OPERAND (decl
, 0);
12156 if (TREE_CODE (decl
) == INDIRECT_REF
12157 || TREE_CODE (decl
) == ADDR_EXPR
)
12158 decl
= TREE_OPERAND (decl
, 0);
12160 key
= (splay_tree_key
) decl
;
12161 n
= splay_tree_lookup (ctx
->field_map
, key
);
12164 f
= (tree
) n
->value
;
12165 if (tcctx
.cb
.decl_map
)
12166 f
= *tcctx
.cb
.decl_map
->get (f
);
12167 n
= splay_tree_lookup (ctx
->sfield_map
, key
);
12168 sf
= (tree
) n
->value
;
12169 if (tcctx
.cb
.decl_map
)
12170 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12171 src
= build_simple_mem_ref_loc (loc
, sarg
);
12172 src
= omp_build_component_ref (src
, sf
);
12173 if (decl
!= OMP_CLAUSE_DECL (c
)
12174 && TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
12175 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
12176 src
= build_simple_mem_ref_loc (loc
, src
);
12177 dst
= build_simple_mem_ref_loc (loc
, arg
);
12178 dst
= omp_build_component_ref (dst
, f
);
12179 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12180 append_to_statement_list (t
, &list
);
12182 case OMP_CLAUSE__LOOPTEMP_
:
12183 /* Fields for first two _looptemp_ clauses are initialized by
12184 GOMP_taskloop*, the rest are handled like firstprivate. */
12185 if (looptempno
< 2)
12191 case OMP_CLAUSE__REDUCTEMP_
:
12192 case OMP_CLAUSE_FIRSTPRIVATE
:
12193 decl
= OMP_CLAUSE_DECL (c
);
12194 if (is_variable_sized (decl
))
12196 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
12199 f
= (tree
) n
->value
;
12200 if (tcctx
.cb
.decl_map
)
12201 f
= *tcctx
.cb
.decl_map
->get (f
);
12202 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
12205 sf
= (tree
) n
->value
;
12206 if (tcctx
.cb
.decl_map
)
12207 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12208 src
= build_simple_mem_ref_loc (loc
, sarg
);
12209 src
= omp_build_component_ref (src
, sf
);
12210 if (use_pointer_for_field (decl
, NULL
)
12211 || omp_privatize_by_reference (decl
))
12212 src
= build_simple_mem_ref_loc (loc
, src
);
12216 dst
= build_simple_mem_ref_loc (loc
, arg
);
12217 dst
= omp_build_component_ref (dst
, f
);
12218 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
12219 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12222 if (ctx
->allocate_map
)
12223 if (tree
*allocatorp
= ctx
->allocate_map
->get (decl
))
12225 tree allocator
= *allocatorp
;
12226 HOST_WIDE_INT ialign
= 0;
12227 if (TREE_CODE (allocator
) == TREE_LIST
)
12229 ialign
= tree_to_uhwi (TREE_VALUE (allocator
));
12230 allocator
= TREE_PURPOSE (allocator
);
12232 if (TREE_CODE (allocator
) != INTEGER_CST
)
12234 n
= splay_tree_lookup (ctx
->sfield_map
,
12235 (splay_tree_key
) allocator
);
12236 allocator
= (tree
) n
->value
;
12237 if (tcctx
.cb
.decl_map
)
12238 allocator
= *tcctx
.cb
.decl_map
->get (allocator
);
12239 tree a
= build_simple_mem_ref_loc (loc
, sarg
);
12240 allocator
= omp_build_component_ref (a
, allocator
);
12242 allocator
= fold_convert (pointer_sized_int_node
, allocator
);
12243 tree a
= builtin_decl_explicit (BUILT_IN_GOMP_ALLOC
);
12244 tree align
= build_int_cst (size_type_node
,
12246 DECL_ALIGN_UNIT (decl
)));
12247 tree sz
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst
)));
12248 tree ptr
= build_call_expr_loc (loc
, a
, 3, align
, sz
,
12250 ptr
= fold_convert (TREE_TYPE (dst
), ptr
);
12251 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, ptr
);
12252 append_to_statement_list (t
, &list
);
12253 dst
= build_simple_mem_ref_loc (loc
, dst
);
12255 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
12257 append_to_statement_list (t
, &list
);
12259 case OMP_CLAUSE_PRIVATE
:
12260 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c
))
12262 decl
= OMP_CLAUSE_DECL (c
);
12263 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
12264 f
= (tree
) n
->value
;
12265 if (tcctx
.cb
.decl_map
)
12266 f
= *tcctx
.cb
.decl_map
->get (f
);
12267 n
= splay_tree_lookup (ctx
->sfield_map
, (splay_tree_key
) decl
);
12270 sf
= (tree
) n
->value
;
12271 if (tcctx
.cb
.decl_map
)
12272 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12273 src
= build_simple_mem_ref_loc (loc
, sarg
);
12274 src
= omp_build_component_ref (src
, sf
);
12275 if (use_pointer_for_field (decl
, NULL
))
12276 src
= build_simple_mem_ref_loc (loc
, src
);
12280 dst
= build_simple_mem_ref_loc (loc
, arg
);
12281 dst
= omp_build_component_ref (dst
, f
);
12282 t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12283 append_to_statement_list (t
, &list
);
12289 /* Last pass: handle VLA firstprivates. */
12290 if (tcctx
.cb
.decl_map
)
12291 for (c
= gimple_omp_task_clauses (task_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12292 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12296 decl
= OMP_CLAUSE_DECL (c
);
12297 if (!is_variable_sized (decl
))
12299 n
= splay_tree_lookup (ctx
->field_map
, (splay_tree_key
) decl
);
12302 f
= (tree
) n
->value
;
12303 f
= *tcctx
.cb
.decl_map
->get (f
);
12304 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
12305 ind
= DECL_VALUE_EXPR (decl
);
12306 gcc_assert (TREE_CODE (ind
) == INDIRECT_REF
);
12307 gcc_assert (DECL_P (TREE_OPERAND (ind
, 0)));
12308 n
= splay_tree_lookup (ctx
->sfield_map
,
12309 (splay_tree_key
) TREE_OPERAND (ind
, 0));
12310 sf
= (tree
) n
->value
;
12311 sf
= *tcctx
.cb
.decl_map
->get (sf
);
12312 src
= build_simple_mem_ref_loc (loc
, sarg
);
12313 src
= omp_build_component_ref (src
, sf
);
12314 src
= build_simple_mem_ref_loc (loc
, src
);
12315 dst
= build_simple_mem_ref_loc (loc
, arg
);
12316 dst
= omp_build_component_ref (dst
, f
);
12317 t
= lang_hooks
.decls
.omp_clause_copy_ctor (c
, dst
, src
);
12318 append_to_statement_list (t
, &list
);
12319 n
= splay_tree_lookup (ctx
->field_map
,
12320 (splay_tree_key
) TREE_OPERAND (ind
, 0));
12321 df
= (tree
) n
->value
;
12322 df
= *tcctx
.cb
.decl_map
->get (df
);
12323 ptr
= build_simple_mem_ref_loc (loc
, arg
);
12324 ptr
= omp_build_component_ref (ptr
, df
);
12325 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ptr
), ptr
,
12326 build_fold_addr_expr_loc (loc
, dst
));
12327 append_to_statement_list (t
, &list
);
12330 t
= build1 (RETURN_EXPR
, void_type_node
, NULL
);
12331 append_to_statement_list (t
, &list
);
12333 if (tcctx
.cb
.decl_map
)
12334 delete tcctx
.cb
.decl_map
;
12335 pop_gimplify_context (NULL
);
12336 BIND_EXPR_BODY (bind
) = list
;
12341 lower_depend_clauses (tree
*pclauses
, gimple_seq
*iseq
, gimple_seq
*oseq
)
12345 size_t cnt
[5] = { 0, 0, 0, 0, 0 }, idx
= 2, i
;
12347 clauses
= omp_find_clause (*pclauses
, OMP_CLAUSE_DEPEND
);
12348 gcc_assert (clauses
);
12349 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12350 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
12351 switch (OMP_CLAUSE_DEPEND_KIND (c
))
12353 case OMP_CLAUSE_DEPEND_LAST
:
12354 /* Lowering already done at gimplification. */
12356 case OMP_CLAUSE_DEPEND_IN
:
12359 case OMP_CLAUSE_DEPEND_OUT
:
12360 case OMP_CLAUSE_DEPEND_INOUT
:
12363 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
12366 case OMP_CLAUSE_DEPEND_DEPOBJ
:
12369 case OMP_CLAUSE_DEPEND_INOUTSET
:
12373 gcc_unreachable ();
12375 if (cnt
[1] || cnt
[3] || cnt
[4])
12377 size_t total
= cnt
[0] + cnt
[1] + cnt
[2] + cnt
[3] + cnt
[4];
12378 size_t inoutidx
= total
+ idx
;
12379 tree type
= build_array_type_nelts (ptr_type_node
, total
+ idx
+ 2 * cnt
[4]);
12380 tree array
= create_tmp_var (type
);
12381 TREE_ADDRESSABLE (array
) = 1;
12382 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
12386 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, 0));
12387 gimple_seq_add_stmt (iseq
, g
);
12388 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
12391 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, total
));
12392 gimple_seq_add_stmt (iseq
, g
);
12393 for (i
= 0; i
< (idx
== 5 ? 3 : 1); i
++)
12395 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
12396 size_int (i
+ 1 + (idx
== 5)), NULL_TREE
, NULL_TREE
);
12397 g
= gimple_build_assign (r
, build_int_cst (ptr_type_node
, cnt
[i
]));
12398 gimple_seq_add_stmt (iseq
, g
);
12400 for (i
= 0; i
< 5; i
++)
12404 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12405 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_DEPEND
)
12409 switch (OMP_CLAUSE_DEPEND_KIND (c
))
12411 case OMP_CLAUSE_DEPEND_IN
:
12415 case OMP_CLAUSE_DEPEND_OUT
:
12416 case OMP_CLAUSE_DEPEND_INOUT
:
12420 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
12424 case OMP_CLAUSE_DEPEND_DEPOBJ
:
12428 case OMP_CLAUSE_DEPEND_INOUTSET
:
12433 gcc_unreachable ();
12435 tree t
= OMP_CLAUSE_DECL (c
);
12438 t
= build4 (ARRAY_REF
, ptr_type_node
, array
,
12439 size_int (inoutidx
), NULL_TREE
, NULL_TREE
);
12440 t
= build_fold_addr_expr (t
);
12443 t
= fold_convert (ptr_type_node
, t
);
12444 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
12445 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
12446 NULL_TREE
, NULL_TREE
);
12447 g
= gimple_build_assign (r
, t
);
12448 gimple_seq_add_stmt (iseq
, g
);
12452 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12453 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
12454 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_INOUTSET
)
12456 tree t
= OMP_CLAUSE_DECL (c
);
12457 t
= fold_convert (ptr_type_node
, t
);
12458 gimplify_expr (&t
, iseq
, NULL
, is_gimple_val
, fb_rvalue
);
12459 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
12460 NULL_TREE
, NULL_TREE
);
12461 g
= gimple_build_assign (r
, t
);
12462 gimple_seq_add_stmt (iseq
, g
);
12463 t
= build_int_cst (ptr_type_node
, GOMP_DEPEND_INOUTSET
);
12464 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (idx
++),
12465 NULL_TREE
, NULL_TREE
);
12466 g
= gimple_build_assign (r
, t
);
12467 gimple_seq_add_stmt (iseq
, g
);
12470 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
12471 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
12472 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
12473 OMP_CLAUSE_CHAIN (c
) = *pclauses
;
12475 tree clobber
= build_clobber (type
);
12476 g
= gimple_build_assign (array
, clobber
);
12477 gimple_seq_add_stmt (oseq
, g
);
12480 /* Lower the OpenMP parallel or task directive in the current statement
12481 in GSI_P. CTX holds context information for the directive. */
12484 lower_omp_taskreg (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12488 gimple
*stmt
= gsi_stmt (*gsi_p
);
12489 gbind
*par_bind
, *bind
, *dep_bind
= NULL
;
12490 gimple_seq par_body
;
12491 location_t loc
= gimple_location (stmt
);
12493 clauses
= gimple_omp_taskreg_clauses (stmt
);
12494 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12495 && gimple_omp_task_taskwait_p (stmt
))
12503 = as_a
<gbind
*> (gimple_seq_first_stmt (gimple_omp_body (stmt
)));
12504 par_body
= gimple_bind_body (par_bind
);
12506 child_fn
= ctx
->cb
.dst_fn
;
12507 if (gimple_code (stmt
) == GIMPLE_OMP_PARALLEL
12508 && !gimple_omp_parallel_combined_p (stmt
))
12510 struct walk_stmt_info wi
;
12513 memset (&wi
, 0, sizeof (wi
));
12515 wi
.val_only
= true;
12516 walk_gimple_seq (par_body
, check_combined_parallel
, NULL
, &wi
);
12518 gimple_omp_parallel_set_combined_p (stmt
, true);
12520 gimple_seq dep_ilist
= NULL
;
12521 gimple_seq dep_olist
= NULL
;
12522 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12523 && omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
))
12525 push_gimplify_context ();
12526 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12527 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt
),
12528 &dep_ilist
, &dep_olist
);
12531 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
12532 && gimple_omp_task_taskwait_p (stmt
))
12536 gsi_replace (gsi_p
, dep_bind
, true);
12537 gimple_bind_add_seq (dep_bind
, dep_ilist
);
12538 gimple_bind_add_stmt (dep_bind
, stmt
);
12539 gimple_bind_add_seq (dep_bind
, dep_olist
);
12540 pop_gimplify_context (dep_bind
);
12545 if (ctx
->srecord_type
)
12546 create_task_copyfn (as_a
<gomp_task
*> (stmt
), ctx
);
12548 gimple_seq tskred_ilist
= NULL
;
12549 gimple_seq tskred_olist
= NULL
;
12550 if ((is_task_ctx (ctx
)
12551 && gimple_omp_task_taskloop_p (ctx
->stmt
)
12552 && omp_find_clause (gimple_omp_task_clauses (ctx
->stmt
),
12553 OMP_CLAUSE_REDUCTION
))
12554 || (is_parallel_ctx (ctx
)
12555 && omp_find_clause (gimple_omp_parallel_clauses (stmt
),
12556 OMP_CLAUSE__REDUCTEMP_
)))
12558 if (dep_bind
== NULL
)
12560 push_gimplify_context ();
12561 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12563 lower_omp_task_reductions (ctx
, is_task_ctx (ctx
) ? OMP_TASKLOOP
12565 gimple_omp_taskreg_clauses (ctx
->stmt
),
12566 &tskred_ilist
, &tskred_olist
);
12569 push_gimplify_context ();
12571 gimple_seq par_olist
= NULL
;
12572 gimple_seq par_ilist
= NULL
;
12573 gimple_seq par_rlist
= NULL
;
12574 lower_rec_input_clauses (clauses
, &par_ilist
, &par_olist
, ctx
, NULL
);
12575 lower_omp (&par_body
, ctx
);
12576 if (gimple_code (stmt
) != GIMPLE_OMP_TASK
)
12577 lower_reduction_clauses (clauses
, &par_rlist
, NULL
, ctx
);
12579 /* Declare all the variables created by mapping and the variables
12580 declared in the scope of the parallel body. */
12581 record_vars_into (ctx
->block_vars
, child_fn
);
12582 maybe_remove_omp_member_access_dummy_vars (par_bind
);
12583 record_vars_into (gimple_bind_vars (par_bind
), child_fn
);
12585 if (ctx
->record_type
)
12588 = create_tmp_var (ctx
->srecord_type
? ctx
->srecord_type
12589 : ctx
->record_type
, ".omp_data_o");
12590 DECL_NAMELESS (ctx
->sender_decl
) = 1;
12591 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
12592 gimple_omp_taskreg_set_data_arg (stmt
, ctx
->sender_decl
);
12595 gimple_seq olist
= NULL
;
12596 gimple_seq ilist
= NULL
;
12597 lower_send_clauses (clauses
, &ilist
, &olist
, ctx
);
12598 lower_send_shared_vars (&ilist
, &olist
, ctx
);
12600 if (ctx
->record_type
)
12602 tree clobber
= build_clobber (TREE_TYPE (ctx
->sender_decl
));
12603 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
12607 /* Once all the expansions are done, sequence all the different
12608 fragments inside gimple_omp_body. */
12610 gimple_seq new_body
= NULL
;
12612 if (ctx
->record_type
)
12614 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
12615 /* fixup_child_record_type might have changed receiver_decl's type. */
12616 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
12617 gimple_seq_add_stmt (&new_body
,
12618 gimple_build_assign (ctx
->receiver_decl
, t
));
12621 gimple_seq_add_seq (&new_body
, par_ilist
);
12622 gimple_seq_add_seq (&new_body
, par_body
);
12623 gimple_seq_add_seq (&new_body
, par_rlist
);
12624 if (ctx
->cancellable
)
12625 gimple_seq_add_stmt (&new_body
, gimple_build_label (ctx
->cancel_label
));
12626 gimple_seq_add_seq (&new_body
, par_olist
);
12627 new_body
= maybe_catch_exception (new_body
);
12628 if (gimple_code (stmt
) == GIMPLE_OMP_TASK
)
12629 gimple_seq_add_stmt (&new_body
,
12630 gimple_build_omp_continue (integer_zero_node
,
12631 integer_zero_node
));
12632 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
12633 gimple_omp_set_body (stmt
, new_body
);
12635 if (dep_bind
&& gimple_bind_block (par_bind
) == NULL_TREE
)
12636 bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12638 bind
= gimple_build_bind (NULL
, NULL
, gimple_bind_block (par_bind
));
12639 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
12640 gimple_bind_add_seq (bind
, ilist
);
12641 gimple_bind_add_stmt (bind
, stmt
);
12642 gimple_bind_add_seq (bind
, olist
);
12644 pop_gimplify_context (NULL
);
12648 gimple_bind_add_seq (dep_bind
, dep_ilist
);
12649 gimple_bind_add_seq (dep_bind
, tskred_ilist
);
12650 gimple_bind_add_stmt (dep_bind
, bind
);
12651 gimple_bind_add_seq (dep_bind
, tskred_olist
);
12652 gimple_bind_add_seq (dep_bind
, dep_olist
);
12653 pop_gimplify_context (dep_bind
);
12657 /* Lower the GIMPLE_OMP_TARGET in the current statement
12658 in GSI_P. CTX holds context information for the directive. */
12661 lower_omp_target (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
12664 tree child_fn
, t
, c
;
12665 gomp_target
*stmt
= as_a
<gomp_target
*> (gsi_stmt (*gsi_p
));
12666 gbind
*tgt_bind
, *bind
, *dep_bind
= NULL
;
12667 gimple_seq tgt_body
, olist
, ilist
, fplist
, new_body
;
12668 location_t loc
= gimple_location (stmt
);
12669 bool offloaded
, data_region
;
12670 unsigned int map_cnt
= 0;
12671 tree in_reduction_clauses
= NULL_TREE
;
12673 offloaded
= is_gimple_omp_offloaded (stmt
);
12674 switch (gimple_omp_target_kind (stmt
))
12676 case GF_OMP_TARGET_KIND_REGION
:
12678 q
= &in_reduction_clauses
;
12679 for (p
= gimple_omp_target_clauses_ptr (stmt
); *p
; )
12680 if (OMP_CLAUSE_CODE (*p
) == OMP_CLAUSE_IN_REDUCTION
)
12683 q
= &OMP_CLAUSE_CHAIN (*q
);
12684 *p
= OMP_CLAUSE_CHAIN (*p
);
12687 p
= &OMP_CLAUSE_CHAIN (*p
);
12689 *p
= in_reduction_clauses
;
12691 case GF_OMP_TARGET_KIND_UPDATE
:
12692 case GF_OMP_TARGET_KIND_ENTER_DATA
:
12693 case GF_OMP_TARGET_KIND_EXIT_DATA
:
12694 case GF_OMP_TARGET_KIND_OACC_PARALLEL
:
12695 case GF_OMP_TARGET_KIND_OACC_KERNELS
:
12696 case GF_OMP_TARGET_KIND_OACC_SERIAL
:
12697 case GF_OMP_TARGET_KIND_OACC_UPDATE
:
12698 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA
:
12699 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA
:
12700 case GF_OMP_TARGET_KIND_OACC_DECLARE
:
12701 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED
:
12702 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE
:
12703 data_region
= false;
12705 case GF_OMP_TARGET_KIND_DATA
:
12706 case GF_OMP_TARGET_KIND_OACC_DATA
:
12707 case GF_OMP_TARGET_KIND_OACC_HOST_DATA
:
12708 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS
:
12709 data_region
= true;
12712 gcc_unreachable ();
12715 /* Ensure that requires map is written via output_offload_tables, even if only
12716 'target (enter/exit) data' is used in the translation unit. */
12717 if (ENABLE_OFFLOADING
&& (omp_requires_mask
& OMP_REQUIRES_TARGET_USED
))
12718 g
->have_offload
= true;
12720 clauses
= gimple_omp_target_clauses (stmt
);
12722 gimple_seq dep_ilist
= NULL
;
12723 gimple_seq dep_olist
= NULL
;
12724 bool has_depend
= omp_find_clause (clauses
, OMP_CLAUSE_DEPEND
) != NULL_TREE
;
12725 if (has_depend
|| in_reduction_clauses
)
12727 push_gimplify_context ();
12728 dep_bind
= gimple_build_bind (NULL
, NULL
, make_node (BLOCK
));
12730 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt
),
12731 &dep_ilist
, &dep_olist
);
12732 if (in_reduction_clauses
)
12733 lower_rec_input_clauses (in_reduction_clauses
, &dep_ilist
, &dep_olist
,
12741 tgt_bind
= gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt
));
12742 tgt_body
= gimple_bind_body (tgt_bind
);
12744 else if (data_region
)
12745 tgt_body
= gimple_omp_body (stmt
);
12746 child_fn
= ctx
->cb
.dst_fn
;
12748 push_gimplify_context ();
12751 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12752 switch (OMP_CLAUSE_CODE (c
))
12758 case OMP_CLAUSE_MAP
:
12760 /* First check what we're prepared to handle in the following. */
12761 switch (OMP_CLAUSE_MAP_KIND (c
))
12763 case GOMP_MAP_ALLOC
:
12765 case GOMP_MAP_FROM
:
12766 case GOMP_MAP_TOFROM
:
12767 case GOMP_MAP_POINTER
:
12768 case GOMP_MAP_TO_PSET
:
12769 case GOMP_MAP_DELETE
:
12770 case GOMP_MAP_RELEASE
:
12771 case GOMP_MAP_ALWAYS_TO
:
12772 case GOMP_MAP_ALWAYS_FROM
:
12773 case GOMP_MAP_ALWAYS_TOFROM
:
12774 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
12775 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
12776 case GOMP_MAP_STRUCT
:
12777 case GOMP_MAP_ALWAYS_POINTER
:
12778 case GOMP_MAP_ATTACH
:
12779 case GOMP_MAP_DETACH
:
12780 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
12781 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
:
12783 case GOMP_MAP_IF_PRESENT
:
12784 case GOMP_MAP_FORCE_ALLOC
:
12785 case GOMP_MAP_FORCE_TO
:
12786 case GOMP_MAP_FORCE_FROM
:
12787 case GOMP_MAP_FORCE_TOFROM
:
12788 case GOMP_MAP_FORCE_PRESENT
:
12789 case GOMP_MAP_FORCE_DEVICEPTR
:
12790 case GOMP_MAP_DEVICE_RESIDENT
:
12791 case GOMP_MAP_LINK
:
12792 case GOMP_MAP_FORCE_DETACH
:
12793 gcc_assert (is_gimple_omp_oacc (stmt
));
12796 gcc_unreachable ();
12800 case OMP_CLAUSE_TO
:
12801 case OMP_CLAUSE_FROM
:
12803 var
= OMP_CLAUSE_DECL (c
);
12806 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
12807 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
12808 && (OMP_CLAUSE_MAP_KIND (c
)
12809 != GOMP_MAP_FIRSTPRIVATE_POINTER
)))
12814 if (DECL_SIZE (var
)
12815 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
12817 tree var2
= DECL_VALUE_EXPR (var
);
12818 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
12819 var2
= TREE_OPERAND (var2
, 0);
12820 gcc_assert (DECL_P (var2
));
12825 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12826 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
12827 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
12829 if (TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12831 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var
, ctx
))
12832 && varpool_node::get_create (var
)->offloadable
)
12835 tree type
= build_pointer_type (TREE_TYPE (var
));
12836 tree new_var
= lookup_decl (var
, ctx
);
12837 x
= create_tmp_var_raw (type
, get_name (new_var
));
12838 gimple_add_tmp_var (x
);
12839 x
= build_simple_mem_ref (x
);
12840 SET_DECL_VALUE_EXPR (new_var
, x
);
12841 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12846 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12847 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
12848 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
12849 && is_omp_target (stmt
))
12851 gcc_assert (maybe_lookup_field (c
, ctx
));
12856 if (!maybe_lookup_field (var
, ctx
))
12859 /* Don't remap compute constructs' reduction variables, because the
12860 intermediate result must be local to each gang. */
12861 if (offloaded
&& !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12862 && is_gimple_omp_oacc (ctx
->stmt
)
12863 && OMP_CLAUSE_MAP_IN_REDUCTION (c
)))
12865 x
= build_receiver_ref (var
, true, ctx
);
12866 tree new_var
= lookup_decl (var
, ctx
);
12868 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12869 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
12870 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
12871 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
12872 x
= build_simple_mem_ref (x
);
12873 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12875 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
12876 if (omp_privatize_by_reference (new_var
)
12877 && (TREE_CODE (TREE_TYPE (new_var
)) != POINTER_TYPE
12878 || DECL_BY_REFERENCE (var
)))
12880 /* Create a local object to hold the instance
12882 tree type
= TREE_TYPE (TREE_TYPE (new_var
));
12883 const char *id
= IDENTIFIER_POINTER (DECL_NAME (new_var
));
12884 tree inst
= create_tmp_var (type
, id
);
12885 gimplify_assign (inst
, fold_indirect_ref (x
), &fplist
);
12886 x
= build_fold_addr_expr (inst
);
12888 gimplify_assign (new_var
, x
, &fplist
);
12890 else if (DECL_P (new_var
))
12892 SET_DECL_VALUE_EXPR (new_var
, x
);
12893 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12896 gcc_unreachable ();
12901 case OMP_CLAUSE_FIRSTPRIVATE
:
12902 omp_firstprivate_recv
:
12903 gcc_checking_assert (offloaded
);
12904 if (is_gimple_omp_oacc (ctx
->stmt
))
12906 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12907 gcc_checking_assert (!is_oacc_kernels (ctx
));
12908 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12909 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
12911 goto oacc_firstprivate
;
12914 var
= OMP_CLAUSE_DECL (c
);
12915 if (!omp_privatize_by_reference (var
)
12916 && !is_gimple_reg_type (TREE_TYPE (var
)))
12918 tree new_var
= lookup_decl (var
, ctx
);
12919 if (is_variable_sized (var
))
12921 tree pvar
= DECL_VALUE_EXPR (var
);
12922 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12923 pvar
= TREE_OPERAND (pvar
, 0);
12924 gcc_assert (DECL_P (pvar
));
12925 tree new_pvar
= lookup_decl (pvar
, ctx
);
12926 x
= build_fold_indirect_ref (new_pvar
);
12927 TREE_THIS_NOTRAP (x
) = 1;
12930 x
= build_receiver_ref (var
, true, ctx
);
12931 SET_DECL_VALUE_EXPR (new_var
, x
);
12932 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12934 /* Fortran array descriptors: firstprivate of data + attach. */
12935 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
12936 && lang_hooks
.decls
.omp_array_data (var
, true))
12940 case OMP_CLAUSE_PRIVATE
:
12941 gcc_checking_assert (offloaded
);
12942 if (is_gimple_omp_oacc (ctx
->stmt
))
12944 /* No 'private' clauses on OpenACC 'kernels'. */
12945 gcc_checking_assert (!is_oacc_kernels (ctx
));
12946 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12947 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx
));
12951 var
= OMP_CLAUSE_DECL (c
);
12952 if (is_variable_sized (var
))
12954 tree new_var
= lookup_decl (var
, ctx
);
12955 tree pvar
= DECL_VALUE_EXPR (var
);
12956 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12957 pvar
= TREE_OPERAND (pvar
, 0);
12958 gcc_assert (DECL_P (pvar
));
12959 tree new_pvar
= lookup_decl (pvar
, ctx
);
12960 x
= build_fold_indirect_ref (new_pvar
);
12961 TREE_THIS_NOTRAP (x
) = 1;
12962 SET_DECL_VALUE_EXPR (new_var
, x
);
12963 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12967 case OMP_CLAUSE_USE_DEVICE_PTR
:
12968 case OMP_CLAUSE_USE_DEVICE_ADDR
:
12969 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
12970 case OMP_CLAUSE_IS_DEVICE_PTR
:
12971 var
= OMP_CLAUSE_DECL (c
);
12972 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
12974 while (TREE_CODE (var
) == INDIRECT_REF
12975 || TREE_CODE (var
) == ARRAY_REF
)
12976 var
= TREE_OPERAND (var
, 0);
12977 if (lang_hooks
.decls
.omp_array_data (var
, true))
12978 goto omp_firstprivate_recv
;
12981 if (is_variable_sized (var
))
12983 tree new_var
= lookup_decl (var
, ctx
);
12984 tree pvar
= DECL_VALUE_EXPR (var
);
12985 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
12986 pvar
= TREE_OPERAND (pvar
, 0);
12987 gcc_assert (DECL_P (pvar
));
12988 tree new_pvar
= lookup_decl (pvar
, ctx
);
12989 x
= build_fold_indirect_ref (new_pvar
);
12990 TREE_THIS_NOTRAP (x
) = 1;
12991 SET_DECL_VALUE_EXPR (new_var
, x
);
12992 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
12994 else if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
12995 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
12996 && !omp_privatize_by_reference (var
)
12997 && !omp_is_allocatable_or_ptr (var
)
12998 && !lang_hooks
.decls
.omp_array_data (var
, true))
12999 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
13001 tree new_var
= lookup_decl (var
, ctx
);
13002 tree type
= build_pointer_type (TREE_TYPE (var
));
13003 x
= create_tmp_var_raw (type
, get_name (new_var
));
13004 gimple_add_tmp_var (x
);
13005 x
= build_simple_mem_ref (x
);
13006 SET_DECL_VALUE_EXPR (new_var
, x
);
13007 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
13011 tree new_var
= lookup_decl (var
, ctx
);
13012 x
= create_tmp_var_raw (TREE_TYPE (new_var
), get_name (new_var
));
13013 gimple_add_tmp_var (x
);
13014 SET_DECL_VALUE_EXPR (new_var
, x
);
13015 DECL_HAS_VALUE_EXPR_P (new_var
) = 1;
13022 target_nesting_level
++;
13023 lower_omp (&tgt_body
, ctx
);
13024 target_nesting_level
--;
13026 else if (data_region
)
13027 lower_omp (&tgt_body
, ctx
);
13031 /* Declare all the variables created by mapping and the variables
13032 declared in the scope of the target body. */
13033 record_vars_into (ctx
->block_vars
, child_fn
);
13034 maybe_remove_omp_member_access_dummy_vars (tgt_bind
);
13035 record_vars_into (gimple_bind_vars (tgt_bind
), child_fn
);
13040 if (ctx
->record_type
)
13043 = create_tmp_var (ctx
->record_type
, ".omp_data_arr");
13044 DECL_NAMELESS (ctx
->sender_decl
) = 1;
13045 TREE_ADDRESSABLE (ctx
->sender_decl
) = 1;
13046 t
= make_tree_vec (3);
13047 TREE_VEC_ELT (t
, 0) = ctx
->sender_decl
;
13048 TREE_VEC_ELT (t
, 1)
13049 = create_tmp_var (build_array_type_nelts (size_type_node
, map_cnt
),
13050 ".omp_data_sizes");
13051 DECL_NAMELESS (TREE_VEC_ELT (t
, 1)) = 1;
13052 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 1)) = 1;
13053 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 1;
13054 tree tkind_type
= short_unsigned_type_node
;
13055 int talign_shift
= 8;
13056 TREE_VEC_ELT (t
, 2)
13057 = create_tmp_var (build_array_type_nelts (tkind_type
, map_cnt
),
13058 ".omp_data_kinds");
13059 DECL_NAMELESS (TREE_VEC_ELT (t
, 2)) = 1;
13060 TREE_ADDRESSABLE (TREE_VEC_ELT (t
, 2)) = 1;
13061 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 1;
13062 gimple_omp_target_set_data_arg (stmt
, t
);
13064 vec
<constructor_elt
, va_gc
> *vsize
;
13065 vec
<constructor_elt
, va_gc
> *vkind
;
13066 vec_alloc (vsize
, map_cnt
);
13067 vec_alloc (vkind
, map_cnt
);
13068 unsigned int map_idx
= 0;
13070 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13071 switch (OMP_CLAUSE_CODE (c
))
13073 tree ovar
, nc
, s
, purpose
, var
, x
, type
;
13074 unsigned int talign
;
13079 case OMP_CLAUSE_MAP
:
13080 case OMP_CLAUSE_TO
:
13081 case OMP_CLAUSE_FROM
:
13082 oacc_firstprivate_map
:
13084 ovar
= OMP_CLAUSE_DECL (c
);
13085 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13086 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
13087 || (OMP_CLAUSE_MAP_KIND (c
)
13088 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
13090 if (!DECL_P (ovar
))
13092 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13093 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
))
13095 nc
= OMP_CLAUSE_CHAIN (c
);
13096 gcc_checking_assert (OMP_CLAUSE_DECL (nc
)
13097 == get_base_address (ovar
));
13098 ovar
= OMP_CLAUSE_DECL (nc
);
13102 tree x
= build_sender_ref (ovar
, ctx
);
13104 if (in_reduction_clauses
13105 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13106 && OMP_CLAUSE_MAP_IN_REDUCTION (c
))
13108 v
= unshare_expr (v
);
13110 while (handled_component_p (*p
)
13111 || TREE_CODE (*p
) == INDIRECT_REF
13112 || TREE_CODE (*p
) == ADDR_EXPR
13113 || TREE_CODE (*p
) == MEM_REF
13114 || TREE_CODE (*p
) == NON_LVALUE_EXPR
)
13115 p
= &TREE_OPERAND (*p
, 0);
13117 if (is_variable_sized (d
))
13119 gcc_assert (DECL_HAS_VALUE_EXPR_P (d
));
13120 d
= DECL_VALUE_EXPR (d
);
13121 gcc_assert (TREE_CODE (d
) == INDIRECT_REF
);
13122 d
= TREE_OPERAND (d
, 0);
13123 gcc_assert (DECL_P (d
));
13126 = (splay_tree_key
) &DECL_CONTEXT (d
);
13127 tree nd
= (tree
) splay_tree_lookup (ctx
->field_map
,
13132 *p
= build_fold_indirect_ref (nd
);
13134 v
= build_fold_addr_expr_with_type (v
, ptr_type_node
);
13135 gimplify_assign (x
, v
, &ilist
);
13141 if (DECL_SIZE (ovar
)
13142 && TREE_CODE (DECL_SIZE (ovar
)) != INTEGER_CST
)
13144 tree ovar2
= DECL_VALUE_EXPR (ovar
);
13145 gcc_assert (TREE_CODE (ovar2
) == INDIRECT_REF
);
13146 ovar2
= TREE_OPERAND (ovar2
, 0);
13147 gcc_assert (DECL_P (ovar2
));
13150 if (!maybe_lookup_field (ovar
, ctx
)
13151 && !(OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13152 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
13153 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)))
13157 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (ovar
));
13158 if (DECL_P (ovar
) && DECL_ALIGN_UNIT (ovar
) > talign
)
13159 talign
= DECL_ALIGN_UNIT (ovar
);
13164 if (in_reduction_clauses
13165 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13166 && OMP_CLAUSE_MAP_IN_REDUCTION (c
))
13169 if (is_variable_sized (d
))
13171 gcc_assert (DECL_HAS_VALUE_EXPR_P (d
));
13172 d
= DECL_VALUE_EXPR (d
);
13173 gcc_assert (TREE_CODE (d
) == INDIRECT_REF
);
13174 d
= TREE_OPERAND (d
, 0);
13175 gcc_assert (DECL_P (d
));
13178 = (splay_tree_key
) &DECL_CONTEXT (d
);
13179 tree nd
= (tree
) splay_tree_lookup (ctx
->field_map
,
13184 var
= build_fold_indirect_ref (nd
);
13187 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
13190 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13191 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
13192 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
13193 && is_omp_target (stmt
))
13195 x
= build_sender_ref (c
, ctx
);
13196 gimplify_assign (x
, build_fold_addr_expr (var
), &ilist
);
13200 x
= build_sender_ref (ovar
, ctx
);
13202 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13203 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
13204 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c
)
13205 && TREE_CODE (TREE_TYPE (ovar
)) == ARRAY_TYPE
)
13207 gcc_assert (offloaded
);
13209 = create_tmp_var (TREE_TYPE (TREE_TYPE (x
)));
13210 mark_addressable (avar
);
13211 gimplify_assign (avar
, build_fold_addr_expr (var
), &ilist
);
13212 talign
= DECL_ALIGN_UNIT (avar
);
13213 avar
= build_fold_addr_expr (avar
);
13214 gimplify_assign (x
, avar
, &ilist
);
13216 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
13218 gcc_assert (is_gimple_omp_oacc (ctx
->stmt
));
13219 if (!omp_privatize_by_reference (var
))
13221 if (is_gimple_reg (var
)
13222 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
13223 suppress_warning (var
);
13224 var
= build_fold_addr_expr (var
);
13227 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
13228 gimplify_assign (x
, var
, &ilist
);
13230 else if (is_gimple_reg (var
))
13232 gcc_assert (offloaded
);
13233 tree avar
= create_tmp_var (TREE_TYPE (var
));
13234 mark_addressable (avar
);
13235 enum gomp_map_kind map_kind
= OMP_CLAUSE_MAP_KIND (c
);
13236 if (GOMP_MAP_COPY_TO_P (map_kind
)
13237 || map_kind
== GOMP_MAP_POINTER
13238 || map_kind
== GOMP_MAP_TO_PSET
13239 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
13241 /* If we need to initialize a temporary
13242 with VAR because it is not addressable, and
13243 the variable hasn't been initialized yet, then
13244 we'll get a warning for the store to avar.
13245 Don't warn in that case, the mapping might
13247 suppress_warning (var
, OPT_Wuninitialized
);
13248 gimplify_assign (avar
, var
, &ilist
);
13250 avar
= build_fold_addr_expr (avar
);
13251 gimplify_assign (x
, avar
, &ilist
);
13252 if ((GOMP_MAP_COPY_FROM_P (map_kind
)
13253 || map_kind
== GOMP_MAP_FORCE_DEVICEPTR
)
13254 && !TYPE_READONLY (TREE_TYPE (var
)))
13256 x
= unshare_expr (x
);
13257 x
= build_simple_mem_ref (x
);
13258 gimplify_assign (var
, x
, &olist
);
13263 /* While MAP is handled explicitly by the FE,
13264 for 'target update', only the identified is passed. */
13265 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FROM
13266 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TO
)
13267 && (omp_is_allocatable_or_ptr (var
)
13268 && omp_check_optional_argument (var
, false)))
13269 var
= build_fold_indirect_ref (var
);
13270 else if ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FROM
13271 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_TO
)
13272 || (!omp_is_allocatable_or_ptr (var
)
13273 && !omp_check_optional_argument (var
, false)))
13274 var
= build_fold_addr_expr (var
);
13275 gimplify_assign (x
, var
, &ilist
);
13279 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
13281 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
13282 s
= TREE_TYPE (ovar
);
13283 if (TREE_CODE (s
) == REFERENCE_TYPE
13284 || omp_check_optional_argument (ovar
, false))
13286 s
= TYPE_SIZE_UNIT (s
);
13289 s
= OMP_CLAUSE_SIZE (c
);
13290 if (s
== NULL_TREE
)
13291 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
13292 s
= fold_convert (size_type_node
, s
);
13293 purpose
= size_int (map_idx
++);
13294 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13295 if (TREE_CODE (s
) != INTEGER_CST
)
13296 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
13298 unsigned HOST_WIDE_INT tkind
, tkind_zero
;
13299 switch (OMP_CLAUSE_CODE (c
))
13301 case OMP_CLAUSE_MAP
:
13302 tkind
= OMP_CLAUSE_MAP_KIND (c
);
13303 tkind_zero
= tkind
;
13304 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c
))
13307 case GOMP_MAP_ALLOC
:
13308 case GOMP_MAP_IF_PRESENT
:
13310 case GOMP_MAP_FROM
:
13311 case GOMP_MAP_TOFROM
:
13312 case GOMP_MAP_ALWAYS_TO
:
13313 case GOMP_MAP_ALWAYS_FROM
:
13314 case GOMP_MAP_ALWAYS_TOFROM
:
13315 case GOMP_MAP_RELEASE
:
13316 case GOMP_MAP_FORCE_TO
:
13317 case GOMP_MAP_FORCE_FROM
:
13318 case GOMP_MAP_FORCE_TOFROM
:
13319 case GOMP_MAP_FORCE_PRESENT
:
13320 tkind_zero
= GOMP_MAP_ZERO_LEN_ARRAY_SECTION
;
13322 case GOMP_MAP_DELETE
:
13323 tkind_zero
= GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION
;
13327 if (tkind_zero
!= tkind
)
13329 if (integer_zerop (s
))
13330 tkind
= tkind_zero
;
13331 else if (integer_nonzerop (s
))
13332 tkind_zero
= tkind
;
13334 if (tkind_zero
== tkind
13335 && OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (c
)
13336 && (((tkind
& GOMP_MAP_FLAG_SPECIAL_BITS
)
13337 & ~GOMP_MAP_IMPLICIT
)
13340 /* If this is an implicit map, and the GOMP_MAP_IMPLICIT
13341 bits are not interfered by other special bit encodings,
13342 then turn the GOMP_IMPLICIT_BIT flag on for the runtime
13344 tkind
|= GOMP_MAP_IMPLICIT
;
13345 tkind_zero
= tkind
;
13348 case OMP_CLAUSE_FIRSTPRIVATE
:
13349 gcc_checking_assert (is_gimple_omp_oacc (ctx
->stmt
));
13350 tkind
= GOMP_MAP_TO
;
13351 tkind_zero
= tkind
;
13353 case OMP_CLAUSE_TO
:
13354 tkind
= GOMP_MAP_TO
;
13355 tkind_zero
= tkind
;
13357 case OMP_CLAUSE_FROM
:
13358 tkind
= GOMP_MAP_FROM
;
13359 tkind_zero
= tkind
;
13362 gcc_unreachable ();
13364 gcc_checking_assert (tkind
13365 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13366 gcc_checking_assert (tkind_zero
13367 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13368 talign
= ceil_log2 (talign
);
13369 tkind
|= talign
<< talign_shift
;
13370 tkind_zero
|= talign
<< talign_shift
;
13371 gcc_checking_assert (tkind
13372 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13373 gcc_checking_assert (tkind_zero
13374 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13375 if (tkind
== tkind_zero
)
13376 x
= build_int_cstu (tkind_type
, tkind
);
13379 TREE_STATIC (TREE_VEC_ELT (t
, 2)) = 0;
13380 x
= build3 (COND_EXPR
, tkind_type
,
13381 fold_build2 (EQ_EXPR
, boolean_type_node
,
13382 unshare_expr (s
), size_zero_node
),
13383 build_int_cstu (tkind_type
, tkind_zero
),
13384 build_int_cstu (tkind_type
, tkind
));
13386 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
, x
);
13391 case OMP_CLAUSE_FIRSTPRIVATE
:
13392 omp_has_device_addr_descr
:
13393 if (is_gimple_omp_oacc (ctx
->stmt
))
13394 goto oacc_firstprivate_map
;
13395 ovar
= OMP_CLAUSE_DECL (c
);
13396 if (omp_privatize_by_reference (ovar
))
13397 talign
= TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
13399 talign
= DECL_ALIGN_UNIT (ovar
);
13400 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
13401 x
= build_sender_ref (ovar
, ctx
);
13402 tkind
= GOMP_MAP_FIRSTPRIVATE
;
13403 type
= TREE_TYPE (ovar
);
13404 if (omp_privatize_by_reference (ovar
))
13405 type
= TREE_TYPE (type
);
13406 if ((INTEGRAL_TYPE_P (type
)
13407 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
13408 || TREE_CODE (type
) == POINTER_TYPE
)
13410 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
13412 if (omp_privatize_by_reference (var
))
13413 t
= build_simple_mem_ref (var
);
13414 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
13415 suppress_warning (var
);
13416 if (TREE_CODE (type
) != POINTER_TYPE
)
13417 t
= fold_convert (pointer_sized_int_node
, t
);
13418 t
= fold_convert (TREE_TYPE (x
), t
);
13419 gimplify_assign (x
, t
, &ilist
);
13421 else if (omp_privatize_by_reference (var
))
13422 gimplify_assign (x
, var
, &ilist
);
13423 else if (is_gimple_reg (var
))
13425 tree avar
= create_tmp_var (TREE_TYPE (var
));
13426 mark_addressable (avar
);
13427 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
13428 suppress_warning (var
);
13429 gimplify_assign (avar
, var
, &ilist
);
13430 avar
= build_fold_addr_expr (avar
);
13431 gimplify_assign (x
, avar
, &ilist
);
13435 var
= build_fold_addr_expr (var
);
13436 gimplify_assign (x
, var
, &ilist
);
13438 if (tkind
== GOMP_MAP_FIRSTPRIVATE_INT
)
13440 else if (omp_privatize_by_reference (ovar
))
13441 s
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar
)));
13443 s
= TYPE_SIZE_UNIT (TREE_TYPE (ovar
));
13444 s
= fold_convert (size_type_node
, s
);
13445 purpose
= size_int (map_idx
++);
13446 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13447 if (TREE_CODE (s
) != INTEGER_CST
)
13448 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
13450 gcc_checking_assert (tkind
13451 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13452 talign
= ceil_log2 (talign
);
13453 tkind
|= talign
<< talign_shift
;
13454 gcc_checking_assert (tkind
13455 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13456 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13457 build_int_cstu (tkind_type
, tkind
));
13458 /* Fortran array descriptors: firstprivate of data + attach. */
13459 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
13460 && lang_hooks
.decls
.omp_array_data (ovar
, true))
13462 tree not_null_lb
, null_lb
, after_lb
;
13463 tree var1
, var2
, size1
, size2
;
13464 tree present
= omp_check_optional_argument (ovar
, true);
13467 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13468 not_null_lb
= create_artificial_label (clause_loc
);
13469 null_lb
= create_artificial_label (clause_loc
);
13470 after_lb
= create_artificial_label (clause_loc
);
13471 gimple_seq seq
= NULL
;
13472 present
= force_gimple_operand (present
, &seq
, true,
13474 gimple_seq_add_seq (&ilist
, seq
);
13475 gimple_seq_add_stmt (&ilist
,
13476 gimple_build_cond_from_tree (present
,
13477 not_null_lb
, null_lb
));
13478 gimple_seq_add_stmt (&ilist
,
13479 gimple_build_label (not_null_lb
));
13481 var1
= lang_hooks
.decls
.omp_array_data (var
, false);
13482 size1
= lang_hooks
.decls
.omp_array_size (var
, &ilist
);
13483 var2
= build_fold_addr_expr (x
);
13484 if (!POINTER_TYPE_P (TREE_TYPE (var
)))
13485 var
= build_fold_addr_expr (var
);
13486 size2
= fold_build2 (POINTER_DIFF_EXPR
, ssizetype
,
13487 build_fold_addr_expr (var1
), var
);
13488 size2
= fold_convert (sizetype
, size2
);
13491 tree tmp
= create_tmp_var (TREE_TYPE (var1
));
13492 gimplify_assign (tmp
, var1
, &ilist
);
13494 tmp
= create_tmp_var (TREE_TYPE (var2
));
13495 gimplify_assign (tmp
, var2
, &ilist
);
13497 tmp
= create_tmp_var (TREE_TYPE (size1
));
13498 gimplify_assign (tmp
, size1
, &ilist
);
13500 tmp
= create_tmp_var (TREE_TYPE (size2
));
13501 gimplify_assign (tmp
, size2
, &ilist
);
13503 gimple_seq_add_stmt (&ilist
, gimple_build_goto (after_lb
));
13504 gimple_seq_add_stmt (&ilist
, gimple_build_label (null_lb
));
13505 gimplify_assign (var1
, null_pointer_node
, &ilist
);
13506 gimplify_assign (var2
, null_pointer_node
, &ilist
);
13507 gimplify_assign (size1
, size_zero_node
, &ilist
);
13508 gimplify_assign (size2
, size_zero_node
, &ilist
);
13509 gimple_seq_add_stmt (&ilist
, gimple_build_label (after_lb
));
13511 x
= build_sender_ref ((splay_tree_key
) &DECL_NAME (ovar
), ctx
);
13512 gimplify_assign (x
, var1
, &ilist
);
13513 tkind
= GOMP_MAP_FIRSTPRIVATE
;
13514 talign
= DECL_ALIGN_UNIT (ovar
);
13515 talign
= ceil_log2 (talign
);
13516 tkind
|= talign
<< talign_shift
;
13517 gcc_checking_assert (tkind
13519 TYPE_MAX_VALUE (tkind_type
)));
13520 purpose
= size_int (map_idx
++);
13521 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, size1
);
13522 if (TREE_CODE (size1
) != INTEGER_CST
)
13523 TREE_STATIC (TREE_VEC_ELT (t
, 1)) = 0;
13524 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13525 build_int_cstu (tkind_type
, tkind
));
13526 x
= build_sender_ref ((splay_tree_key
) &DECL_UID (ovar
), ctx
);
13527 gimplify_assign (x
, var2
, &ilist
);
13528 tkind
= GOMP_MAP_ATTACH
;
13529 purpose
= size_int (map_idx
++);
13530 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, size2
);
13531 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13532 build_int_cstu (tkind_type
, tkind
));
13536 case OMP_CLAUSE_USE_DEVICE_PTR
:
13537 case OMP_CLAUSE_USE_DEVICE_ADDR
:
13538 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
13539 case OMP_CLAUSE_IS_DEVICE_PTR
:
13540 ovar
= OMP_CLAUSE_DECL (c
);
13541 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
13543 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
13544 goto omp_has_device_addr_descr
;
13545 while (TREE_CODE (ovar
) == INDIRECT_REF
13546 || TREE_CODE (ovar
) == ARRAY_REF
)
13547 ovar
= TREE_OPERAND (ovar
, 0);
13549 var
= lookup_decl_in_outer_ctx (ovar
, ctx
);
13551 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
13553 tkind
= ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
13554 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
)
13555 ? GOMP_MAP_USE_DEVICE_PTR
: GOMP_MAP_FIRSTPRIVATE_INT
);
13556 x
= build_sender_ref ((splay_tree_key
) &DECL_NAME (ovar
), ctx
);
13558 else if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
13559 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
)
13561 tkind
= GOMP_MAP_USE_DEVICE_PTR
;
13562 x
= build_sender_ref ((splay_tree_key
) &DECL_UID (ovar
), ctx
);
13566 tkind
= GOMP_MAP_FIRSTPRIVATE_INT
;
13567 x
= build_sender_ref (ovar
, ctx
);
13570 if (is_gimple_omp_oacc (ctx
->stmt
))
13572 gcc_assert (tkind
== GOMP_MAP_USE_DEVICE_PTR
);
13574 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c
))
13575 tkind
= GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT
;
13578 type
= TREE_TYPE (ovar
);
13579 if (lang_hooks
.decls
.omp_array_data (ovar
, true))
13580 var
= lang_hooks
.decls
.omp_array_data (var
, false);
13581 else if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
13582 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
13583 && !omp_privatize_by_reference (ovar
)
13584 && !omp_is_allocatable_or_ptr (ovar
))
13585 || TREE_CODE (type
) == ARRAY_TYPE
)
13586 var
= build_fold_addr_expr (var
);
13589 if (omp_privatize_by_reference (ovar
)
13590 || omp_check_optional_argument (ovar
, false)
13591 || omp_is_allocatable_or_ptr (ovar
))
13593 type
= TREE_TYPE (type
);
13594 if (POINTER_TYPE_P (type
)
13595 && TREE_CODE (type
) != ARRAY_TYPE
13596 && ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
13597 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
13598 && !omp_is_allocatable_or_ptr (ovar
))
13599 || (omp_privatize_by_reference (ovar
)
13600 && omp_is_allocatable_or_ptr (ovar
))))
13601 var
= build_simple_mem_ref (var
);
13602 var
= fold_convert (TREE_TYPE (x
), var
);
13606 present
= omp_check_optional_argument (ovar
, true);
13609 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
13610 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
13611 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
13612 tree new_x
= unshare_expr (x
);
13613 gimplify_expr (&present
, &ilist
, NULL
, is_gimple_val
,
13615 gcond
*cond
= gimple_build_cond_from_tree (present
,
13618 gimple_seq_add_stmt (&ilist
, cond
);
13619 gimple_seq_add_stmt (&ilist
, gimple_build_label (null_label
));
13620 gimplify_assign (new_x
, null_pointer_node
, &ilist
);
13621 gimple_seq_add_stmt (&ilist
, gimple_build_goto (opt_arg_label
));
13622 gimple_seq_add_stmt (&ilist
,
13623 gimple_build_label (notnull_label
));
13624 gimplify_assign (x
, var
, &ilist
);
13625 gimple_seq_add_stmt (&ilist
,
13626 gimple_build_label (opt_arg_label
));
13629 gimplify_assign (x
, var
, &ilist
);
13631 purpose
= size_int (map_idx
++);
13632 CONSTRUCTOR_APPEND_ELT (vsize
, purpose
, s
);
13633 gcc_checking_assert (tkind
13634 < (HOST_WIDE_INT_C (1U) << talign_shift
));
13635 gcc_checking_assert (tkind
13636 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type
)));
13637 CONSTRUCTOR_APPEND_ELT (vkind
, purpose
,
13638 build_int_cstu (tkind_type
, tkind
));
13642 gcc_assert (map_idx
== map_cnt
);
13644 DECL_INITIAL (TREE_VEC_ELT (t
, 1))
13645 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 1)), vsize
);
13646 DECL_INITIAL (TREE_VEC_ELT (t
, 2))
13647 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t
, 2)), vkind
);
13648 for (int i
= 1; i
<= 2; i
++)
13649 if (!TREE_STATIC (TREE_VEC_ELT (t
, i
)))
13651 gimple_seq initlist
= NULL
;
13652 force_gimple_operand (build1 (DECL_EXPR
, void_type_node
,
13653 TREE_VEC_ELT (t
, i
)),
13654 &initlist
, true, NULL_TREE
);
13655 gimple_seq_add_seq (&ilist
, initlist
);
13657 tree clobber
= build_clobber (TREE_TYPE (TREE_VEC_ELT (t
, i
)));
13658 gimple_seq_add_stmt (&olist
,
13659 gimple_build_assign (TREE_VEC_ELT (t
, i
),
13662 else if (omp_maybe_offloaded_ctx (ctx
->outer
))
13664 tree id
= get_identifier ("omp declare target");
13665 tree decl
= TREE_VEC_ELT (t
, i
);
13666 DECL_ATTRIBUTES (decl
)
13667 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (decl
));
13668 varpool_node
*node
= varpool_node::get (decl
);
13671 node
->offloadable
= 1;
13672 if (ENABLE_OFFLOADING
)
13674 g
->have_offload
= true;
13675 vec_safe_push (offload_vars
, t
);
13680 tree clobber
= build_clobber (ctx
->record_type
);
13681 gimple_seq_add_stmt (&olist
, gimple_build_assign (ctx
->sender_decl
,
13685 /* Once all the expansions are done, sequence all the different
13686 fragments inside gimple_omp_body. */
13691 && ctx
->record_type
)
13693 t
= build_fold_addr_expr_loc (loc
, ctx
->sender_decl
);
13694 /* fixup_child_record_type might have changed receiver_decl's type. */
13695 t
= fold_convert_loc (loc
, TREE_TYPE (ctx
->receiver_decl
), t
);
13696 gimple_seq_add_stmt (&new_body
,
13697 gimple_build_assign (ctx
->receiver_decl
, t
));
13699 gimple_seq_add_seq (&new_body
, fplist
);
13701 if (offloaded
|| data_region
)
13703 tree prev
= NULL_TREE
;
13704 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13705 switch (OMP_CLAUSE_CODE (c
))
13710 case OMP_CLAUSE_FIRSTPRIVATE
:
13711 omp_firstprivatize_data_region
:
13712 if (is_gimple_omp_oacc (ctx
->stmt
))
13714 var
= OMP_CLAUSE_DECL (c
);
13715 if (omp_privatize_by_reference (var
)
13716 || is_gimple_reg_type (TREE_TYPE (var
)))
13718 tree new_var
= lookup_decl (var
, ctx
);
13720 type
= TREE_TYPE (var
);
13721 if (omp_privatize_by_reference (var
))
13722 type
= TREE_TYPE (type
);
13723 if ((INTEGRAL_TYPE_P (type
)
13724 && TYPE_PRECISION (type
) <= POINTER_SIZE
)
13725 || TREE_CODE (type
) == POINTER_TYPE
)
13727 x
= build_receiver_ref (var
, false, ctx
);
13728 if (TREE_CODE (type
) != POINTER_TYPE
)
13729 x
= fold_convert (pointer_sized_int_node
, x
);
13730 x
= fold_convert (type
, x
);
13731 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13733 if (omp_privatize_by_reference (var
))
13735 tree v
= create_tmp_var_raw (type
, get_name (var
));
13736 gimple_add_tmp_var (v
);
13737 TREE_ADDRESSABLE (v
) = 1;
13738 gimple_seq_add_stmt (&new_body
,
13739 gimple_build_assign (v
, x
));
13740 x
= build_fold_addr_expr (v
);
13742 gimple_seq_add_stmt (&new_body
,
13743 gimple_build_assign (new_var
, x
));
13747 bool by_ref
= !omp_privatize_by_reference (var
);
13748 x
= build_receiver_ref (var
, by_ref
, ctx
);
13749 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13751 gimple_seq_add_stmt (&new_body
,
13752 gimple_build_assign (new_var
, x
));
13755 else if (is_variable_sized (var
))
13757 tree pvar
= DECL_VALUE_EXPR (var
);
13758 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
13759 pvar
= TREE_OPERAND (pvar
, 0);
13760 gcc_assert (DECL_P (pvar
));
13761 tree new_var
= lookup_decl (pvar
, ctx
);
13762 x
= build_receiver_ref (var
, false, ctx
);
13763 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13764 gimple_seq_add_stmt (&new_body
,
13765 gimple_build_assign (new_var
, x
));
13768 case OMP_CLAUSE_PRIVATE
:
13769 if (is_gimple_omp_oacc (ctx
->stmt
))
13771 var
= OMP_CLAUSE_DECL (c
);
13772 if (omp_privatize_by_reference (var
))
13774 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13775 tree new_var
= lookup_decl (var
, ctx
);
13776 x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
13777 if (TREE_CONSTANT (x
))
13779 x
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var
)),
13781 gimple_add_tmp_var (x
);
13782 TREE_ADDRESSABLE (x
) = 1;
13783 x
= build_fold_addr_expr_loc (clause_loc
, x
);
13788 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
13789 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
13790 gimple_seq_add_stmt (&new_body
,
13791 gimple_build_assign (new_var
, x
));
13794 case OMP_CLAUSE_USE_DEVICE_PTR
:
13795 case OMP_CLAUSE_USE_DEVICE_ADDR
:
13796 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
13797 case OMP_CLAUSE_IS_DEVICE_PTR
:
13799 gimple_seq assign_body
;
13800 bool is_array_data
;
13801 bool do_optional_check
;
13802 assign_body
= NULL
;
13803 do_optional_check
= false;
13804 var
= OMP_CLAUSE_DECL (c
);
13805 is_array_data
= lang_hooks
.decls
.omp_array_data (var
, true) != NULL
;
13806 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
&& is_array_data
)
13807 goto omp_firstprivatize_data_region
;
13809 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_IS_DEVICE_PTR
13810 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
)
13811 x
= build_sender_ref (is_array_data
13812 ? (splay_tree_key
) &DECL_NAME (var
)
13813 : (splay_tree_key
) &DECL_UID (var
), ctx
);
13816 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
13818 while (TREE_CODE (var
) == INDIRECT_REF
13819 || TREE_CODE (var
) == ARRAY_REF
)
13820 var
= TREE_OPERAND (var
, 0);
13822 x
= build_receiver_ref (var
, false, ctx
);
13827 bool is_ref
= omp_privatize_by_reference (var
);
13828 do_optional_check
= true;
13829 /* First, we copy the descriptor data from the host; then
13830 we update its data to point to the target address. */
13831 new_var
= lookup_decl (var
, ctx
);
13832 new_var
= DECL_VALUE_EXPR (new_var
);
13835 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
13836 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
)
13837 v2
= maybe_lookup_decl_in_outer_ctx (var
, ctx
);
13841 v2
= build_fold_indirect_ref (v2
);
13842 v
= create_tmp_var_raw (TREE_TYPE (v2
), get_name (var
));
13843 gimple_add_tmp_var (v
);
13844 TREE_ADDRESSABLE (v
) = 1;
13845 gimplify_assign (v
, v2
, &assign_body
);
13846 tree rhs
= build_fold_addr_expr (v
);
13847 gimple_seq_add_stmt (&assign_body
,
13848 gimple_build_assign (new_var
, rhs
));
13851 gimplify_assign (new_var
, v2
, &assign_body
);
13853 v2
= lang_hooks
.decls
.omp_array_data (unshare_expr (v
), false);
13855 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13856 gimple_seq_add_stmt (&assign_body
,
13857 gimple_build_assign (v2
, x
));
13859 else if (is_variable_sized (var
))
13861 tree pvar
= DECL_VALUE_EXPR (var
);
13862 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
13863 pvar
= TREE_OPERAND (pvar
, 0);
13864 gcc_assert (DECL_P (pvar
));
13865 new_var
= lookup_decl (pvar
, ctx
);
13866 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13867 gimple_seq_add_stmt (&assign_body
,
13868 gimple_build_assign (new_var
, x
));
13870 else if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_ADDR
13871 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_HAS_DEVICE_ADDR
)
13872 && !omp_privatize_by_reference (var
)
13873 && !omp_is_allocatable_or_ptr (var
))
13874 || TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
)
13876 new_var
= lookup_decl (var
, ctx
);
13877 new_var
= DECL_VALUE_EXPR (new_var
);
13878 gcc_assert (TREE_CODE (new_var
) == MEM_REF
);
13879 new_var
= TREE_OPERAND (new_var
, 0);
13880 gcc_assert (DECL_P (new_var
));
13881 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13882 gimple_seq_add_stmt (&assign_body
,
13883 gimple_build_assign (new_var
, x
));
13887 tree type
= TREE_TYPE (var
);
13888 new_var
= lookup_decl (var
, ctx
);
13889 if (omp_privatize_by_reference (var
))
13891 type
= TREE_TYPE (type
);
13892 if (POINTER_TYPE_P (type
)
13893 && TREE_CODE (type
) != ARRAY_TYPE
13894 && ((OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_USE_DEVICE_ADDR
13895 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
)
13896 || (omp_privatize_by_reference (var
)
13897 && omp_is_allocatable_or_ptr (var
))))
13899 tree v
= create_tmp_var_raw (type
, get_name (var
));
13900 gimple_add_tmp_var (v
);
13901 TREE_ADDRESSABLE (v
) = 1;
13902 x
= fold_convert (type
, x
);
13903 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
,
13905 gimple_seq_add_stmt (&assign_body
,
13906 gimple_build_assign (v
, x
));
13907 x
= build_fold_addr_expr (v
);
13908 do_optional_check
= true;
13911 new_var
= DECL_VALUE_EXPR (new_var
);
13912 x
= fold_convert (TREE_TYPE (new_var
), x
);
13913 gimplify_expr (&x
, &assign_body
, NULL
, is_gimple_val
, fb_rvalue
);
13914 gimple_seq_add_stmt (&assign_body
,
13915 gimple_build_assign (new_var
, x
));
13918 present
= ((do_optional_check
13919 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_HAS_DEVICE_ADDR
)
13920 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c
), true)
13924 tree null_label
= create_artificial_label (UNKNOWN_LOCATION
);
13925 tree notnull_label
= create_artificial_label (UNKNOWN_LOCATION
);
13926 tree opt_arg_label
= create_artificial_label (UNKNOWN_LOCATION
);
13927 glabel
*null_glabel
= gimple_build_label (null_label
);
13928 glabel
*notnull_glabel
= gimple_build_label (notnull_label
);
13929 ggoto
*opt_arg_ggoto
= gimple_build_goto (opt_arg_label
);
13930 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
,
13932 gimplify_expr (&present
, &new_body
, NULL
, is_gimple_val
,
13934 gcond
*cond
= gimple_build_cond_from_tree (present
,
13937 gimple_seq_add_stmt (&new_body
, cond
);
13938 gimple_seq_add_stmt (&new_body
, null_glabel
);
13939 gimplify_assign (new_var
, null_pointer_node
, &new_body
);
13940 gimple_seq_add_stmt (&new_body
, opt_arg_ggoto
);
13941 gimple_seq_add_stmt (&new_body
, notnull_glabel
);
13942 gimple_seq_add_seq (&new_body
, assign_body
);
13943 gimple_seq_add_stmt (&new_body
,
13944 gimple_build_label (opt_arg_label
));
13947 gimple_seq_add_seq (&new_body
, assign_body
);
13950 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13951 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13952 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
13953 or references to VLAs. */
13954 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13955 switch (OMP_CLAUSE_CODE (c
))
13960 case OMP_CLAUSE_MAP
:
13961 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
13962 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
13964 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
13965 poly_int64 offset
= 0;
13967 var
= OMP_CLAUSE_DECL (c
);
13969 && TREE_CODE (TREE_TYPE (var
)) == ARRAY_TYPE
13970 && is_global_var (maybe_lookup_decl_in_outer_ctx (var
,
13972 && varpool_node::get_create (var
)->offloadable
)
13974 if (TREE_CODE (var
) == INDIRECT_REF
13975 && TREE_CODE (TREE_OPERAND (var
, 0)) == COMPONENT_REF
)
13976 var
= TREE_OPERAND (var
, 0);
13977 if (TREE_CODE (var
) == COMPONENT_REF
)
13979 var
= get_addr_base_and_unit_offset (var
, &offset
);
13980 gcc_assert (var
!= NULL_TREE
&& DECL_P (var
));
13982 else if (DECL_SIZE (var
)
13983 && TREE_CODE (DECL_SIZE (var
)) != INTEGER_CST
)
13985 tree var2
= DECL_VALUE_EXPR (var
);
13986 gcc_assert (TREE_CODE (var2
) == INDIRECT_REF
);
13987 var2
= TREE_OPERAND (var2
, 0);
13988 gcc_assert (DECL_P (var2
));
13991 tree new_var
= lookup_decl (var
, ctx
), x
;
13992 tree type
= TREE_TYPE (new_var
);
13994 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == INDIRECT_REF
13995 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
13998 type
= TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0));
14000 new_var
= build2 (MEM_REF
, type
,
14001 build_fold_addr_expr (new_var
),
14002 build_int_cst (build_pointer_type (type
),
14005 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPONENT_REF
)
14007 type
= TREE_TYPE (OMP_CLAUSE_DECL (c
));
14008 is_ref
= TREE_CODE (type
) == REFERENCE_TYPE
;
14009 new_var
= build2 (MEM_REF
, type
,
14010 build_fold_addr_expr (new_var
),
14011 build_int_cst (build_pointer_type (type
),
14015 is_ref
= omp_privatize_by_reference (var
);
14016 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
14018 bool ref_to_array
= false;
14019 bool ref_to_ptr
= false;
14022 type
= TREE_TYPE (type
);
14023 if (TREE_CODE (type
) == ARRAY_TYPE
)
14025 type
= build_pointer_type (type
);
14026 ref_to_array
= true;
14029 else if (TREE_CODE (type
) == ARRAY_TYPE
)
14031 tree decl2
= DECL_VALUE_EXPR (new_var
);
14032 gcc_assert (TREE_CODE (decl2
) == MEM_REF
);
14033 decl2
= TREE_OPERAND (decl2
, 0);
14034 gcc_assert (DECL_P (decl2
));
14036 type
= TREE_TYPE (new_var
);
14038 else if (TREE_CODE (type
) == REFERENCE_TYPE
14039 && TREE_CODE (TREE_TYPE (type
)) == POINTER_TYPE
)
14041 type
= TREE_TYPE (type
);
14044 x
= build_receiver_ref (OMP_CLAUSE_DECL (prev
), false, ctx
);
14045 x
= fold_convert_loc (clause_loc
, type
, x
);
14046 if (!integer_zerop (OMP_CLAUSE_SIZE (c
)))
14048 tree bias
= OMP_CLAUSE_SIZE (c
);
14050 bias
= lookup_decl (bias
, ctx
);
14051 bias
= fold_convert_loc (clause_loc
, sizetype
, bias
);
14052 bias
= fold_build1_loc (clause_loc
, NEGATE_EXPR
, sizetype
,
14054 x
= fold_build2_loc (clause_loc
, POINTER_PLUS_EXPR
,
14055 TREE_TYPE (x
), x
, bias
);
14058 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
14059 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
14060 if ((is_ref
&& !ref_to_array
)
14063 tree t
= create_tmp_var_raw (type
, get_name (var
));
14064 gimple_add_tmp_var (t
);
14065 TREE_ADDRESSABLE (t
) = 1;
14066 gimple_seq_add_stmt (&new_body
,
14067 gimple_build_assign (t
, x
));
14068 x
= build_fold_addr_expr_loc (clause_loc
, t
);
14070 gimple_seq_add_stmt (&new_body
,
14071 gimple_build_assign (new_var
, x
));
14074 else if (OMP_CLAUSE_CHAIN (c
)
14075 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
))
14077 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
14078 == GOMP_MAP_FIRSTPRIVATE_POINTER
14079 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
14080 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)))
14083 case OMP_CLAUSE_PRIVATE
:
14084 var
= OMP_CLAUSE_DECL (c
);
14085 if (is_variable_sized (var
))
14087 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
14088 tree new_var
= lookup_decl (var
, ctx
);
14089 tree pvar
= DECL_VALUE_EXPR (var
);
14090 gcc_assert (TREE_CODE (pvar
) == INDIRECT_REF
);
14091 pvar
= TREE_OPERAND (pvar
, 0);
14092 gcc_assert (DECL_P (pvar
));
14093 tree new_pvar
= lookup_decl (pvar
, ctx
);
14094 tree atmp
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
14095 tree al
= size_int (DECL_ALIGN (var
));
14096 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (new_var
));
14097 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
14098 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_pvar
), x
);
14099 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
14100 gimple_seq_add_stmt (&new_body
,
14101 gimple_build_assign (new_pvar
, x
));
14103 else if (omp_privatize_by_reference (var
)
14104 && !is_gimple_omp_oacc (ctx
->stmt
))
14106 location_t clause_loc
= OMP_CLAUSE_LOCATION (c
);
14107 tree new_var
= lookup_decl (var
, ctx
);
14108 tree x
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var
)));
14109 if (TREE_CONSTANT (x
))
14114 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
14115 tree rtype
= TREE_TYPE (TREE_TYPE (new_var
));
14116 tree al
= size_int (TYPE_ALIGN (rtype
));
14117 x
= build_call_expr_loc (clause_loc
, atmp
, 2, x
, al
);
14120 x
= fold_convert_loc (clause_loc
, TREE_TYPE (new_var
), x
);
14121 gimplify_expr (&x
, &new_body
, NULL
, is_gimple_val
, fb_rvalue
);
14122 gimple_seq_add_stmt (&new_body
,
14123 gimple_build_assign (new_var
, x
));
14128 gimple_seq fork_seq
= NULL
;
14129 gimple_seq join_seq
= NULL
;
14131 if (offloaded
&& is_gimple_omp_oacc (ctx
->stmt
))
14133 /* If there are reductions on the offloaded region itself, treat
14134 them as a dummy GANG loop. */
14135 tree level
= build_int_cst (integer_type_node
, GOMP_DIM_GANG
);
14137 gcall
*private_marker
= lower_oacc_private_marker (ctx
);
14139 if (private_marker
)
14140 gimple_call_set_arg (private_marker
, 2, level
);
14142 lower_oacc_reductions (gimple_location (ctx
->stmt
), clauses
, level
,
14143 false, NULL
, private_marker
, NULL
, &fork_seq
,
14147 gimple_seq_add_seq (&new_body
, fork_seq
);
14148 gimple_seq_add_seq (&new_body
, tgt_body
);
14149 gimple_seq_add_seq (&new_body
, join_seq
);
14153 new_body
= maybe_catch_exception (new_body
);
14154 gimple_seq_add_stmt (&new_body
, gimple_build_omp_return (false));
14156 gimple_omp_set_body (stmt
, new_body
);
14159 bind
= gimple_build_bind (NULL
, NULL
,
14160 tgt_bind
? gimple_bind_block (tgt_bind
)
14162 gsi_replace (gsi_p
, dep_bind
? dep_bind
: bind
, true);
14163 gimple_bind_add_seq (bind
, ilist
);
14164 gimple_bind_add_stmt (bind
, stmt
);
14165 gimple_bind_add_seq (bind
, olist
);
14167 pop_gimplify_context (NULL
);
14171 gimple_bind_add_seq (dep_bind
, dep_ilist
);
14172 gimple_bind_add_stmt (dep_bind
, bind
);
14173 gimple_bind_add_seq (dep_bind
, dep_olist
);
14174 pop_gimplify_context (dep_bind
);
14178 /* Expand code for an OpenMP teams directive. */
14181 lower_omp_teams (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
14183 gomp_teams
*teams_stmt
= as_a
<gomp_teams
*> (gsi_stmt (*gsi_p
));
14184 push_gimplify_context ();
14186 tree block
= make_node (BLOCK
);
14187 gbind
*bind
= gimple_build_bind (NULL
, NULL
, block
);
14188 gsi_replace (gsi_p
, bind
, true);
14189 gimple_seq bind_body
= NULL
;
14190 gimple_seq dlist
= NULL
;
14191 gimple_seq olist
= NULL
;
14193 tree num_teams
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
14194 OMP_CLAUSE_NUM_TEAMS
);
14195 tree num_teams_lower
= NULL_TREE
;
14196 if (num_teams
== NULL_TREE
)
14197 num_teams
= build_int_cst (unsigned_type_node
, 0);
14200 num_teams_lower
= OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (num_teams
);
14201 if (num_teams_lower
)
14203 num_teams_lower
= fold_convert (unsigned_type_node
, num_teams_lower
);
14204 gimplify_expr (&num_teams_lower
, &bind_body
, NULL
, is_gimple_val
,
14207 num_teams
= OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (num_teams
);
14208 num_teams
= fold_convert (unsigned_type_node
, num_teams
);
14209 gimplify_expr (&num_teams
, &bind_body
, NULL
, is_gimple_val
, fb_rvalue
);
14211 if (num_teams_lower
== NULL_TREE
)
14212 num_teams_lower
= num_teams
;
14213 tree thread_limit
= omp_find_clause (gimple_omp_teams_clauses (teams_stmt
),
14214 OMP_CLAUSE_THREAD_LIMIT
);
14215 if (thread_limit
== NULL_TREE
)
14216 thread_limit
= build_int_cst (unsigned_type_node
, 0);
14219 thread_limit
= OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit
);
14220 thread_limit
= fold_convert (unsigned_type_node
, thread_limit
);
14221 gimplify_expr (&thread_limit
, &bind_body
, NULL
, is_gimple_val
,
14224 location_t loc
= gimple_location (teams_stmt
);
14225 tree decl
= builtin_decl_explicit (BUILT_IN_GOMP_TEAMS4
);
14226 tree rettype
= TREE_TYPE (TREE_TYPE (decl
));
14227 tree first
= create_tmp_var (rettype
);
14228 gimple_seq_add_stmt (&bind_body
,
14229 gimple_build_assign (first
, build_one_cst (rettype
)));
14230 tree llabel
= create_artificial_label (loc
);
14231 gimple_seq_add_stmt (&bind_body
, gimple_build_label (llabel
));
14233 = gimple_build_call (decl
, 4, num_teams_lower
, num_teams
, thread_limit
,
14235 gimple_set_location (call
, loc
);
14236 tree temp
= create_tmp_var (rettype
);
14237 gimple_call_set_lhs (call
, temp
);
14238 gimple_seq_add_stmt (&bind_body
, call
);
14240 tree tlabel
= create_artificial_label (loc
);
14241 tree flabel
= create_artificial_label (loc
);
14242 gimple
*cond
= gimple_build_cond (NE_EXPR
, temp
, build_zero_cst (rettype
),
14244 gimple_seq_add_stmt (&bind_body
, cond
);
14245 gimple_seq_add_stmt (&bind_body
, gimple_build_label (tlabel
));
14246 gimple_seq_add_stmt (&bind_body
,
14247 gimple_build_assign (first
, build_zero_cst (rettype
)));
14249 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt
),
14250 &bind_body
, &dlist
, ctx
, NULL
);
14251 lower_omp (gimple_omp_body_ptr (teams_stmt
), ctx
);
14252 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt
), &olist
,
14254 gimple_seq_add_stmt (&bind_body
, teams_stmt
);
14256 gimple_seq_add_seq (&bind_body
, gimple_omp_body (teams_stmt
));
14257 gimple_omp_set_body (teams_stmt
, NULL
);
14258 gimple_seq_add_seq (&bind_body
, olist
);
14259 gimple_seq_add_seq (&bind_body
, dlist
);
14260 gimple_seq_add_stmt (&bind_body
, gimple_build_omp_return (true));
14261 gimple_seq_add_stmt (&bind_body
, gimple_build_goto (llabel
));
14262 gimple_seq_add_stmt (&bind_body
, gimple_build_label (flabel
));
14263 gimple_bind_set_body (bind
, bind_body
);
14265 pop_gimplify_context (bind
);
14267 gimple_bind_append_vars (bind
, ctx
->block_vars
);
14268 BLOCK_VARS (block
) = ctx
->block_vars
;
14269 if (BLOCK_VARS (block
))
14270 TREE_USED (block
) = 1;
14273 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
14274 regimplified. If DATA is non-NULL, lower_omp_1 is outside
14275 of OMP context, but with make_addressable_vars set. */
14278 lower_omp_regimplify_p (tree
*tp
, int *walk_subtrees
,
14283 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
14284 if ((VAR_P (t
) || TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
14286 && DECL_HAS_VALUE_EXPR_P (t
))
14289 if (make_addressable_vars
14291 && bitmap_bit_p (make_addressable_vars
, DECL_UID (t
)))
14294 /* If a global variable has been privatized, TREE_CONSTANT on
14295 ADDR_EXPR might be wrong. */
14296 if (data
== NULL
&& TREE_CODE (t
) == ADDR_EXPR
)
14297 recompute_tree_invariant_for_addr_expr (t
);
14299 *walk_subtrees
= !IS_TYPE_OR_DECL_P (t
);
14303 /* Data to be communicated between lower_omp_regimplify_operands and
14304 lower_omp_regimplify_operands_p. */
14306 struct lower_omp_regimplify_operands_data
14312 /* Helper function for lower_omp_regimplify_operands. Find
14313 omp_member_access_dummy_var vars and adjust temporarily their
14314 DECL_VALUE_EXPRs if needed. */
14317 lower_omp_regimplify_operands_p (tree
*tp
, int *walk_subtrees
,
14320 tree t
= omp_member_access_dummy_var (*tp
);
14323 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
14324 lower_omp_regimplify_operands_data
*ldata
14325 = (lower_omp_regimplify_operands_data
*) wi
->info
;
14326 tree o
= maybe_lookup_decl (t
, ldata
->ctx
);
14329 ldata
->decls
->safe_push (DECL_VALUE_EXPR (*tp
));
14330 ldata
->decls
->safe_push (*tp
);
14331 tree v
= unshare_and_remap (DECL_VALUE_EXPR (*tp
), t
, o
);
14332 SET_DECL_VALUE_EXPR (*tp
, v
);
14335 *walk_subtrees
= !IS_TYPE_OR_DECL_P (*tp
);
14339 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
14340 of omp_member_access_dummy_var vars during regimplification. */
14343 lower_omp_regimplify_operands (omp_context
*ctx
, gimple
*stmt
,
14344 gimple_stmt_iterator
*gsi_p
)
14346 auto_vec
<tree
, 10> decls
;
14349 struct walk_stmt_info wi
;
14350 memset (&wi
, '\0', sizeof (wi
));
14351 struct lower_omp_regimplify_operands_data data
;
14353 data
.decls
= &decls
;
14355 walk_gimple_op (stmt
, lower_omp_regimplify_operands_p
, &wi
);
14357 gimple_regimplify_operands (stmt
, gsi_p
);
14358 while (!decls
.is_empty ())
14360 tree t
= decls
.pop ();
14361 tree v
= decls
.pop ();
14362 SET_DECL_VALUE_EXPR (t
, v
);
14367 lower_omp_1 (gimple_stmt_iterator
*gsi_p
, omp_context
*ctx
)
14369 gimple
*stmt
= gsi_stmt (*gsi_p
);
14370 struct walk_stmt_info wi
;
14373 if (gimple_has_location (stmt
))
14374 input_location
= gimple_location (stmt
);
14376 if (make_addressable_vars
)
14377 memset (&wi
, '\0', sizeof (wi
));
14379 /* If we have issued syntax errors, avoid doing any heavy lifting.
14380 Just replace the OMP directives with a NOP to avoid
14381 confusing RTL expansion. */
14382 if (seen_error () && is_gimple_omp (stmt
))
14384 gsi_replace (gsi_p
, gimple_build_nop (), true);
14388 switch (gimple_code (stmt
))
14392 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
14393 if ((ctx
|| make_addressable_vars
)
14394 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt
),
14395 lower_omp_regimplify_p
,
14396 ctx
? NULL
: &wi
, NULL
)
14397 || walk_tree (gimple_cond_rhs_ptr (cond_stmt
),
14398 lower_omp_regimplify_p
,
14399 ctx
? NULL
: &wi
, NULL
)))
14400 lower_omp_regimplify_operands (ctx
, cond_stmt
, gsi_p
);
14404 lower_omp (gimple_catch_handler_ptr (as_a
<gcatch
*> (stmt
)), ctx
);
14406 case GIMPLE_EH_FILTER
:
14407 lower_omp (gimple_eh_filter_failure_ptr (stmt
), ctx
);
14410 lower_omp (gimple_try_eval_ptr (stmt
), ctx
);
14411 lower_omp (gimple_try_cleanup_ptr (stmt
), ctx
);
14413 case GIMPLE_TRANSACTION
:
14414 lower_omp (gimple_transaction_body_ptr (as_a
<gtransaction
*> (stmt
)),
14418 if (ctx
&& is_gimple_omp_oacc (ctx
->stmt
))
14420 tree vars
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
14421 oacc_privatization_scan_decl_chain (ctx
, vars
);
14423 lower_omp (gimple_bind_body_ptr (as_a
<gbind
*> (stmt
)), ctx
);
14424 maybe_remove_omp_member_access_dummy_vars (as_a
<gbind
*> (stmt
));
14426 case GIMPLE_OMP_PARALLEL
:
14427 case GIMPLE_OMP_TASK
:
14428 ctx
= maybe_lookup_ctx (stmt
);
14430 if (ctx
->cancellable
)
14431 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
14432 lower_omp_taskreg (gsi_p
, ctx
);
14434 case GIMPLE_OMP_FOR
:
14435 ctx
= maybe_lookup_ctx (stmt
);
14437 if (ctx
->cancellable
)
14438 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
14439 lower_omp_for (gsi_p
, ctx
);
14441 case GIMPLE_OMP_SECTIONS
:
14442 ctx
= maybe_lookup_ctx (stmt
);
14444 if (ctx
->cancellable
)
14445 ctx
->cancel_label
= create_artificial_label (UNKNOWN_LOCATION
);
14446 lower_omp_sections (gsi_p
, ctx
);
14448 case GIMPLE_OMP_SCOPE
:
14449 ctx
= maybe_lookup_ctx (stmt
);
14451 lower_omp_scope (gsi_p
, ctx
);
14453 case GIMPLE_OMP_SINGLE
:
14454 ctx
= maybe_lookup_ctx (stmt
);
14456 lower_omp_single (gsi_p
, ctx
);
14458 case GIMPLE_OMP_MASTER
:
14459 case GIMPLE_OMP_MASKED
:
14460 ctx
= maybe_lookup_ctx (stmt
);
14462 lower_omp_master (gsi_p
, ctx
);
14464 case GIMPLE_OMP_TASKGROUP
:
14465 ctx
= maybe_lookup_ctx (stmt
);
14467 lower_omp_taskgroup (gsi_p
, ctx
);
14469 case GIMPLE_OMP_ORDERED
:
14470 ctx
= maybe_lookup_ctx (stmt
);
14472 lower_omp_ordered (gsi_p
, ctx
);
14474 case GIMPLE_OMP_SCAN
:
14475 ctx
= maybe_lookup_ctx (stmt
);
14477 lower_omp_scan (gsi_p
, ctx
);
14479 case GIMPLE_OMP_CRITICAL
:
14480 ctx
= maybe_lookup_ctx (stmt
);
14482 lower_omp_critical (gsi_p
, ctx
);
14484 case GIMPLE_OMP_ATOMIC_LOAD
:
14485 if ((ctx
|| make_addressable_vars
)
14486 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
14487 as_a
<gomp_atomic_load
*> (stmt
)),
14488 lower_omp_regimplify_p
, ctx
? NULL
: &wi
, NULL
))
14489 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
14491 case GIMPLE_OMP_TARGET
:
14492 ctx
= maybe_lookup_ctx (stmt
);
14494 lower_omp_target (gsi_p
, ctx
);
14496 case GIMPLE_OMP_TEAMS
:
14497 ctx
= maybe_lookup_ctx (stmt
);
14499 if (gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
14500 lower_omp_taskreg (gsi_p
, ctx
);
14502 lower_omp_teams (gsi_p
, ctx
);
14506 call_stmt
= as_a
<gcall
*> (stmt
);
14507 fndecl
= gimple_call_fndecl (call_stmt
);
14509 && fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
14510 switch (DECL_FUNCTION_CODE (fndecl
))
14512 case BUILT_IN_GOMP_BARRIER
:
14516 case BUILT_IN_GOMP_CANCEL
:
14517 case BUILT_IN_GOMP_CANCELLATION_POINT
:
14520 if (gimple_code (cctx
->stmt
) == GIMPLE_OMP_SECTION
)
14521 cctx
= cctx
->outer
;
14522 gcc_assert (gimple_call_lhs (call_stmt
) == NULL_TREE
);
14523 if (!cctx
->cancellable
)
14525 if (DECL_FUNCTION_CODE (fndecl
)
14526 == BUILT_IN_GOMP_CANCELLATION_POINT
)
14528 stmt
= gimple_build_nop ();
14529 gsi_replace (gsi_p
, stmt
, false);
14533 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_GOMP_BARRIER
)
14535 fndecl
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL
);
14536 gimple_call_set_fndecl (call_stmt
, fndecl
);
14537 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fndecl
));
14540 lhs
= create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl
)));
14541 gimple_call_set_lhs (call_stmt
, lhs
);
14542 tree fallthru_label
;
14543 fallthru_label
= create_artificial_label (UNKNOWN_LOCATION
);
14545 g
= gimple_build_label (fallthru_label
);
14546 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
14547 g
= gimple_build_cond (NE_EXPR
, lhs
,
14548 fold_convert (TREE_TYPE (lhs
),
14549 boolean_false_node
),
14550 cctx
->cancel_label
, fallthru_label
);
14551 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
14558 case GIMPLE_ASSIGN
:
14559 for (omp_context
*up
= ctx
; up
; up
= up
->outer
)
14561 if (gimple_code (up
->stmt
) == GIMPLE_OMP_ORDERED
14562 || gimple_code (up
->stmt
) == GIMPLE_OMP_CRITICAL
14563 || gimple_code (up
->stmt
) == GIMPLE_OMP_TASKGROUP
14564 || gimple_code (up
->stmt
) == GIMPLE_OMP_SCOPE
14565 || gimple_code (up
->stmt
) == GIMPLE_OMP_SECTION
14566 || gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
14567 || (gimple_code (up
->stmt
) == GIMPLE_OMP_TARGET
14568 && (gimple_omp_target_kind (up
->stmt
)
14569 == GF_OMP_TARGET_KIND_DATA
)))
14571 else if (!up
->lastprivate_conditional_map
)
14573 tree lhs
= get_base_address (gimple_assign_lhs (stmt
));
14574 if (TREE_CODE (lhs
) == MEM_REF
14575 && DECL_P (TREE_OPERAND (lhs
, 0))
14576 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs
,
14577 0))) == REFERENCE_TYPE
)
14578 lhs
= TREE_OPERAND (lhs
, 0);
14580 if (tree
*v
= up
->lastprivate_conditional_map
->get (lhs
))
14583 if (up
->combined_into_simd_safelen1
)
14586 if (gimple_code (up
->stmt
) == GIMPLE_OMP_SCAN
)
14589 if (gimple_code (up
->stmt
) == GIMPLE_OMP_FOR
)
14590 clauses
= gimple_omp_for_clauses (up
->stmt
);
14592 clauses
= gimple_omp_sections_clauses (up
->stmt
);
14593 tree c
= omp_find_clause (clauses
, OMP_CLAUSE__CONDTEMP_
);
14594 if (!OMP_CLAUSE__CONDTEMP__ITER (c
))
14595 c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
14596 OMP_CLAUSE__CONDTEMP_
);
14597 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c
));
14598 gimple
*g
= gimple_build_assign (*v
, OMP_CLAUSE_DECL (c
));
14599 gsi_insert_after (gsi_p
, g
, GSI_SAME_STMT
);
14606 if ((ctx
|| make_addressable_vars
)
14607 && walk_gimple_op (stmt
, lower_omp_regimplify_p
,
14610 /* Just remove clobbers, this should happen only if we have
14611 "privatized" local addressable variables in SIMD regions,
14612 the clobber isn't needed in that case and gimplifying address
14613 of the ARRAY_REF into a pointer and creating MEM_REF based
14614 clobber would create worse code than we get with the clobber
14616 if (gimple_clobber_p (stmt
))
14618 gsi_replace (gsi_p
, gimple_build_nop (), true);
14621 lower_omp_regimplify_operands (ctx
, stmt
, gsi_p
);
14628 lower_omp (gimple_seq
*body
, omp_context
*ctx
)
14630 location_t saved_location
= input_location
;
14631 gimple_stmt_iterator gsi
;
14632 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
14633 lower_omp_1 (&gsi
, ctx
);
14634 /* During gimplification, we haven't folded statments inside offloading
14635 or taskreg regions (gimplify.cc:maybe_fold_stmt); do that now. */
14636 if (target_nesting_level
|| taskreg_nesting_level
)
14637 for (gsi
= gsi_start (*body
); !gsi_end_p (gsi
); gsi_next (&gsi
))
14639 input_location
= saved_location
;
14642 /* Main entry point. */
14644 static unsigned int
14645 execute_lower_omp (void)
14651 /* This pass always runs, to provide PROP_gimple_lomp.
14652 But often, there is nothing to do. */
14653 if (flag_openacc
== 0 && flag_openmp
== 0
14654 && flag_openmp_simd
== 0)
14657 all_contexts
= splay_tree_new (splay_tree_compare_pointers
, 0,
14658 delete_omp_context
);
14660 body
= gimple_body (current_function_decl
);
14662 scan_omp (&body
, NULL
);
14663 gcc_assert (taskreg_nesting_level
== 0);
14664 FOR_EACH_VEC_ELT (taskreg_contexts
, i
, ctx
)
14665 finish_taskreg_scan (ctx
);
14666 taskreg_contexts
.release ();
14668 if (all_contexts
->root
)
14670 if (make_addressable_vars
)
14671 push_gimplify_context ();
14672 lower_omp (&body
, NULL
);
14673 if (make_addressable_vars
)
14674 pop_gimplify_context (NULL
);
14679 splay_tree_delete (all_contexts
);
14680 all_contexts
= NULL
;
14682 BITMAP_FREE (make_addressable_vars
);
14683 BITMAP_FREE (global_nonaddressable_vars
);
14685 /* If current function is a method, remove artificial dummy VAR_DECL created
14686 for non-static data member privatization, they aren't needed for
14687 debuginfo nor anything else, have been already replaced everywhere in the
14688 IL and cause problems with LTO. */
14689 if (DECL_ARGUMENTS (current_function_decl
)
14690 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl
))
14691 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl
)))
14693 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl
));
14695 for (auto task_stmt
: task_cpyfns
)
14696 finalize_task_copyfn (task_stmt
);
14697 task_cpyfns
.release ();
14703 const pass_data pass_data_lower_omp
=
14705 GIMPLE_PASS
, /* type */
14706 "omplower", /* name */
14707 OPTGROUP_OMP
, /* optinfo_flags */
14708 TV_NONE
, /* tv_id */
14709 PROP_gimple_any
, /* properties_required */
14710 PROP_gimple_lomp
| PROP_gimple_lomp_dev
, /* properties_provided */
14711 0, /* properties_destroyed */
14712 0, /* todo_flags_start */
14713 0, /* todo_flags_finish */
14716 class pass_lower_omp
: public gimple_opt_pass
14719 pass_lower_omp (gcc::context
*ctxt
)
14720 : gimple_opt_pass (pass_data_lower_omp
, ctxt
)
14723 /* opt_pass methods: */
14724 unsigned int execute (function
*) final override
14726 return execute_lower_omp ();
14729 }; // class pass_lower_omp
14731 } // anon namespace
14734 make_pass_lower_omp (gcc::context
*ctxt
)
14736 return new pass_lower_omp (ctxt
);
14739 /* The following is a utility to diagnose structured block violations.
14740 It is not part of the "omplower" pass, as that's invoked too late. It
14741 should be invoked by the respective front ends after gimplification. */
14743 static splay_tree all_labels
;
14745 /* Check for mismatched contexts and generate an error if needed. Return
14746 true if an error is detected. */
14749 diagnose_sb_0 (gimple_stmt_iterator
*gsi_p
,
14750 gimple
*branch_ctx
, gimple
*label_ctx
)
14752 gcc_checking_assert (!branch_ctx
|| is_gimple_omp (branch_ctx
));
14753 gcc_checking_assert (!label_ctx
|| is_gimple_omp (label_ctx
));
14755 if (label_ctx
== branch_ctx
)
14758 const char* kind
= NULL
;
14762 if ((branch_ctx
&& is_gimple_omp_oacc (branch_ctx
))
14763 || (label_ctx
&& is_gimple_omp_oacc (label_ctx
)))
14765 gcc_checking_assert (kind
== NULL
);
14771 gcc_checking_assert (flag_openmp
|| flag_openmp_simd
);
14775 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14776 so we could traverse it and issue a correct "exit" or "enter" error
14777 message upon a structured block violation.
14779 We built the context by building a list with tree_cons'ing, but there is
14780 no easy counterpart in gimple tuples. It seems like far too much work
14781 for issuing exit/enter error messages. If someone really misses the
14782 distinct error message... patches welcome. */
14785 /* Try to avoid confusing the user by producing and error message
14786 with correct "exit" or "enter" verbiage. We prefer "exit"
14787 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14788 if (branch_ctx
== NULL
)
14794 if (TREE_VALUE (label_ctx
) == branch_ctx
)
14799 label_ctx
= TREE_CHAIN (label_ctx
);
14804 error ("invalid exit from %s structured block", kind
);
14806 error ("invalid entry to %s structured block", kind
);
14809 /* If it's obvious we have an invalid entry, be specific about the error. */
14810 if (branch_ctx
== NULL
)
14811 error ("invalid entry to %s structured block", kind
);
14814 /* Otherwise, be vague and lazy, but efficient. */
14815 error ("invalid branch to/from %s structured block", kind
);
14818 gsi_replace (gsi_p
, gimple_build_nop (), false);
14822 /* Pass 1: Create a minimal tree of structured blocks, and record
14823 where each label is found. */
14826 diagnose_sb_1 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
14827 struct walk_stmt_info
*wi
)
14829 gimple
*context
= (gimple
*) wi
->info
;
14830 gimple
*inner_context
;
14831 gimple
*stmt
= gsi_stmt (*gsi_p
);
14833 *handled_ops_p
= true;
14835 switch (gimple_code (stmt
))
14839 case GIMPLE_OMP_PARALLEL
:
14840 case GIMPLE_OMP_TASK
:
14841 case GIMPLE_OMP_SCOPE
:
14842 case GIMPLE_OMP_SECTIONS
:
14843 case GIMPLE_OMP_SINGLE
:
14844 case GIMPLE_OMP_SECTION
:
14845 case GIMPLE_OMP_MASTER
:
14846 case GIMPLE_OMP_MASKED
:
14847 case GIMPLE_OMP_ORDERED
:
14848 case GIMPLE_OMP_SCAN
:
14849 case GIMPLE_OMP_CRITICAL
:
14850 case GIMPLE_OMP_TARGET
:
14851 case GIMPLE_OMP_TEAMS
:
14852 case GIMPLE_OMP_TASKGROUP
:
14853 /* The minimal context here is just the current OMP construct. */
14854 inner_context
= stmt
;
14855 wi
->info
= inner_context
;
14856 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
14857 wi
->info
= context
;
14860 case GIMPLE_OMP_FOR
:
14861 inner_context
= stmt
;
14862 wi
->info
= inner_context
;
14863 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14865 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
14866 diagnose_sb_1
, NULL
, wi
);
14867 walk_gimple_seq (gimple_omp_body (stmt
), diagnose_sb_1
, NULL
, wi
);
14868 wi
->info
= context
;
14872 splay_tree_insert (all_labels
,
14873 (splay_tree_key
) gimple_label_label (
14874 as_a
<glabel
*> (stmt
)),
14875 (splay_tree_value
) context
);
14885 /* Pass 2: Check each branch and see if its context differs from that of
14886 the destination label's context. */
14889 diagnose_sb_2 (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
14890 struct walk_stmt_info
*wi
)
14892 gimple
*context
= (gimple
*) wi
->info
;
14894 gimple
*stmt
= gsi_stmt (*gsi_p
);
14896 *handled_ops_p
= true;
14898 switch (gimple_code (stmt
))
14902 case GIMPLE_OMP_PARALLEL
:
14903 case GIMPLE_OMP_TASK
:
14904 case GIMPLE_OMP_SCOPE
:
14905 case GIMPLE_OMP_SECTIONS
:
14906 case GIMPLE_OMP_SINGLE
:
14907 case GIMPLE_OMP_SECTION
:
14908 case GIMPLE_OMP_MASTER
:
14909 case GIMPLE_OMP_MASKED
:
14910 case GIMPLE_OMP_ORDERED
:
14911 case GIMPLE_OMP_SCAN
:
14912 case GIMPLE_OMP_CRITICAL
:
14913 case GIMPLE_OMP_TARGET
:
14914 case GIMPLE_OMP_TEAMS
:
14915 case GIMPLE_OMP_TASKGROUP
:
14917 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
14918 wi
->info
= context
;
14921 case GIMPLE_OMP_FOR
:
14923 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14925 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
),
14926 diagnose_sb_2
, NULL
, wi
);
14927 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), diagnose_sb_2
, NULL
, wi
);
14928 wi
->info
= context
;
14933 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
14934 tree lab
= gimple_cond_true_label (cond_stmt
);
14937 n
= splay_tree_lookup (all_labels
,
14938 (splay_tree_key
) lab
);
14939 diagnose_sb_0 (gsi_p
, context
,
14940 n
? (gimple
*) n
->value
: NULL
);
14942 lab
= gimple_cond_false_label (cond_stmt
);
14945 n
= splay_tree_lookup (all_labels
,
14946 (splay_tree_key
) lab
);
14947 diagnose_sb_0 (gsi_p
, context
,
14948 n
? (gimple
*) n
->value
: NULL
);
14955 tree lab
= gimple_goto_dest (stmt
);
14956 if (TREE_CODE (lab
) != LABEL_DECL
)
14959 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
14960 diagnose_sb_0 (gsi_p
, context
, n
? (gimple
*) n
->value
: NULL
);
14964 case GIMPLE_SWITCH
:
14966 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
14968 for (i
= 0; i
< gimple_switch_num_labels (switch_stmt
); ++i
)
14970 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
14971 n
= splay_tree_lookup (all_labels
, (splay_tree_key
) lab
);
14972 if (n
&& diagnose_sb_0 (gsi_p
, context
, (gimple
*) n
->value
))
14978 case GIMPLE_RETURN
:
14979 diagnose_sb_0 (gsi_p
, context
, NULL
);
14989 static unsigned int
14990 diagnose_omp_structured_block_errors (void)
14992 struct walk_stmt_info wi
;
14993 gimple_seq body
= gimple_body (current_function_decl
);
14995 all_labels
= splay_tree_new (splay_tree_compare_pointers
, 0, 0);
14997 memset (&wi
, 0, sizeof (wi
));
14998 walk_gimple_seq (body
, diagnose_sb_1
, NULL
, &wi
);
15000 memset (&wi
, 0, sizeof (wi
));
15001 wi
.want_locations
= true;
15002 walk_gimple_seq_mod (&body
, diagnose_sb_2
, NULL
, &wi
);
15004 gimple_set_body (current_function_decl
, body
);
15006 splay_tree_delete (all_labels
);
15014 const pass_data pass_data_diagnose_omp_blocks
=
15016 GIMPLE_PASS
, /* type */
15017 "*diagnose_omp_blocks", /* name */
15018 OPTGROUP_OMP
, /* optinfo_flags */
15019 TV_NONE
, /* tv_id */
15020 PROP_gimple_any
, /* properties_required */
15021 0, /* properties_provided */
15022 0, /* properties_destroyed */
15023 0, /* todo_flags_start */
15024 0, /* todo_flags_finish */
15027 class pass_diagnose_omp_blocks
: public gimple_opt_pass
15030 pass_diagnose_omp_blocks (gcc::context
*ctxt
)
15031 : gimple_opt_pass (pass_data_diagnose_omp_blocks
, ctxt
)
15034 /* opt_pass methods: */
15035 bool gate (function
*) final override
15037 return flag_openacc
|| flag_openmp
|| flag_openmp_simd
;
15039 unsigned int execute (function
*) final override
15041 return diagnose_omp_structured_block_errors ();
15044 }; // class pass_diagnose_omp_blocks
15046 } // anon namespace
15049 make_pass_diagnose_omp_blocks (gcc::context
*ctxt
)
15051 return new pass_diagnose_omp_blocks (ctxt
);
15055 #include "gt-omp-low.h"