]> gcc.gnu.org Git - gcc.git/blob - gcc/cp/cp-gimplify.c
c++: Move CALL_FROM_NEW_OR_DELETE_P to tree.h
[gcc.git] / gcc / cp / cp-gimplify.c
1 /* C++-specific tree lowering bits; see also c-gimplify.c and gimple.c.
2
3 Copyright (C) 2002-2020 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "asan.h"
37 #include "gcc-rich-location.h"
38 #include "memmodel.h"
39 #include "tm_p.h"
40 #include "output.h"
41 #include "file-prefix-map.h"
42 #include "cgraph.h"
43 #include "omp-general.h"
44
45 /* Forward declarations. */
46
47 static tree cp_genericize_r (tree *, int *, void *);
48 static tree cp_fold_r (tree *, int *, void *);
49 static void cp_genericize_tree (tree*, bool);
50 static tree cp_fold (tree);
51
52 /* Genericize a TRY_BLOCK. */
53
54 static void
55 genericize_try_block (tree *stmt_p)
56 {
57 tree body = TRY_STMTS (*stmt_p);
58 tree cleanup = TRY_HANDLERS (*stmt_p);
59
60 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
61 }
62
63 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
64
65 static void
66 genericize_catch_block (tree *stmt_p)
67 {
68 tree type = HANDLER_TYPE (*stmt_p);
69 tree body = HANDLER_BODY (*stmt_p);
70
71 /* FIXME should the caught type go in TREE_TYPE? */
72 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
73 }
74
75 /* A terser interface for building a representation of an exception
76 specification. */
77
78 static tree
79 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
80 {
81 tree t;
82
83 /* FIXME should the allowed types go in TREE_TYPE? */
84 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
85 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
86
87 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
88 append_to_statement_list (body, &TREE_OPERAND (t, 0));
89
90 return t;
91 }
92
93 /* Genericize an EH_SPEC_BLOCK by converting it to a
94 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
95
96 static void
97 genericize_eh_spec_block (tree *stmt_p)
98 {
99 tree body = EH_SPEC_STMTS (*stmt_p);
100 tree allowed = EH_SPEC_RAISES (*stmt_p);
101 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
102
103 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
104 TREE_NO_WARNING (*stmt_p) = true;
105 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
106 }
107
108 /* Return the first non-compound statement in STMT. */
109
110 tree
111 first_stmt (tree stmt)
112 {
113 switch (TREE_CODE (stmt))
114 {
115 case STATEMENT_LIST:
116 if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt))
117 return first_stmt (p->stmt);
118 return void_node;
119
120 case BIND_EXPR:
121 return first_stmt (BIND_EXPR_BODY (stmt));
122
123 default:
124 return stmt;
125 }
126 }
127
128 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
129
130 static void
131 genericize_if_stmt (tree *stmt_p)
132 {
133 tree stmt, cond, then_, else_;
134 location_t locus = EXPR_LOCATION (*stmt_p);
135
136 stmt = *stmt_p;
137 cond = IF_COND (stmt);
138 then_ = THEN_CLAUSE (stmt);
139 else_ = ELSE_CLAUSE (stmt);
140
141 if (then_ && else_)
142 {
143 tree ft = first_stmt (then_);
144 tree fe = first_stmt (else_);
145 br_predictor pr;
146 if (TREE_CODE (ft) == PREDICT_EXPR
147 && TREE_CODE (fe) == PREDICT_EXPR
148 && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe)
149 && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL))
150 {
151 gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus));
152 richloc.add_range (EXPR_LOC_OR_LOC (fe, locus));
153 warning_at (&richloc, OPT_Wattributes,
154 "both branches of %<if%> statement marked as %qs",
155 pr == PRED_HOT_LABEL ? "likely" : "unlikely");
156 }
157 }
158
159 if (!then_)
160 then_ = build_empty_stmt (locus);
161 if (!else_)
162 else_ = build_empty_stmt (locus);
163
164 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
165 stmt = then_;
166 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
167 stmt = else_;
168 else
169 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
170 protected_set_expr_location_if_unset (stmt, locus);
171 *stmt_p = stmt;
172 }
173
174 /* Hook into the middle of gimplifying an OMP_FOR node. */
175
176 static enum gimplify_status
177 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
178 {
179 tree for_stmt = *expr_p;
180 gimple_seq seq = NULL;
181
182 /* Protect ourselves from recursion. */
183 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
184 return GS_UNHANDLED;
185 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
186
187 gimplify_and_add (for_stmt, &seq);
188 gimple_seq_add_seq (pre_p, seq);
189
190 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
191
192 return GS_ALL_DONE;
193 }
194
195 /* Gimplify an EXPR_STMT node. */
196
197 static void
198 gimplify_expr_stmt (tree *stmt_p)
199 {
200 tree stmt = EXPR_STMT_EXPR (*stmt_p);
201
202 if (stmt == error_mark_node)
203 stmt = NULL;
204
205 /* Gimplification of a statement expression will nullify the
206 statement if all its side effects are moved to *PRE_P and *POST_P.
207
208 In this case we will not want to emit the gimplified statement.
209 However, we may still want to emit a warning, so we do that before
210 gimplification. */
211 if (stmt && warn_unused_value)
212 {
213 if (!TREE_SIDE_EFFECTS (stmt))
214 {
215 if (!IS_EMPTY_STMT (stmt)
216 && !VOID_TYPE_P (TREE_TYPE (stmt))
217 && !TREE_NO_WARNING (stmt))
218 warning (OPT_Wunused_value, "statement with no effect");
219 }
220 else
221 warn_if_unused_value (stmt, input_location);
222 }
223
224 if (stmt == NULL_TREE)
225 stmt = alloc_stmt_list ();
226
227 *stmt_p = stmt;
228 }
229
230 /* Gimplify initialization from an AGGR_INIT_EXPR. */
231
232 static void
233 cp_gimplify_init_expr (tree *expr_p)
234 {
235 tree from = TREE_OPERAND (*expr_p, 1);
236 tree to = TREE_OPERAND (*expr_p, 0);
237 tree t;
238
239 /* What about code that pulls out the temp and uses it elsewhere? I
240 think that such code never uses the TARGET_EXPR as an initializer. If
241 I'm wrong, we'll abort because the temp won't have any RTL. In that
242 case, I guess we'll need to replace references somehow. */
243 if (TREE_CODE (from) == TARGET_EXPR && TARGET_EXPR_INITIAL (from))
244 from = TARGET_EXPR_INITIAL (from);
245
246 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
247 inside the TARGET_EXPR. */
248 for (t = from; t; )
249 {
250 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
251
252 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
253 replace the slot operand with our target.
254
255 Should we add a target parm to gimplify_expr instead? No, as in this
256 case we want to replace the INIT_EXPR. */
257 if (TREE_CODE (sub) == AGGR_INIT_EXPR
258 || TREE_CODE (sub) == VEC_INIT_EXPR)
259 {
260 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
261 AGGR_INIT_EXPR_SLOT (sub) = to;
262 else
263 VEC_INIT_EXPR_SLOT (sub) = to;
264 *expr_p = from;
265
266 /* The initialization is now a side-effect, so the container can
267 become void. */
268 if (from != sub)
269 TREE_TYPE (from) = void_type_node;
270 }
271
272 /* Handle aggregate NSDMI. */
273 replace_placeholders (sub, to);
274
275 if (t == sub)
276 break;
277 else
278 t = TREE_OPERAND (t, 1);
279 }
280
281 }
282
283 /* Gimplify a MUST_NOT_THROW_EXPR. */
284
285 static enum gimplify_status
286 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
287 {
288 tree stmt = *expr_p;
289 tree temp = voidify_wrapper_expr (stmt, NULL);
290 tree body = TREE_OPERAND (stmt, 0);
291 gimple_seq try_ = NULL;
292 gimple_seq catch_ = NULL;
293 gimple *mnt;
294
295 gimplify_and_add (body, &try_);
296 mnt = gimple_build_eh_must_not_throw (terminate_fn);
297 gimple_seq_add_stmt_without_update (&catch_, mnt);
298 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
299
300 gimple_seq_add_stmt_without_update (pre_p, mnt);
301 if (temp)
302 {
303 *expr_p = temp;
304 return GS_OK;
305 }
306
307 *expr_p = NULL;
308 return GS_ALL_DONE;
309 }
310
311 /* Return TRUE if an operand (OP) of a given TYPE being copied is
312 really just an empty class copy.
313
314 Check that the operand has a simple form so that TARGET_EXPRs and
315 non-empty CONSTRUCTORs get reduced properly, and we leave the
316 return slot optimization alone because it isn't a copy. */
317
318 bool
319 simple_empty_class_p (tree type, tree op, tree_code code)
320 {
321 if (TREE_CODE (op) == COMPOUND_EXPR)
322 return simple_empty_class_p (type, TREE_OPERAND (op, 1), code);
323 if (SIMPLE_TARGET_EXPR_P (op)
324 && TYPE_HAS_TRIVIAL_DESTRUCTOR (type))
325 /* The TARGET_EXPR is itself a simple copy, look through it. */
326 return simple_empty_class_p (type, TARGET_EXPR_INITIAL (op), code);
327 return
328 (TREE_CODE (op) == EMPTY_CLASS_EXPR
329 || code == MODIFY_EXPR
330 || is_gimple_lvalue (op)
331 || INDIRECT_REF_P (op)
332 || (TREE_CODE (op) == CONSTRUCTOR
333 && CONSTRUCTOR_NELTS (op) == 0)
334 || (TREE_CODE (op) == CALL_EXPR
335 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
336 && !TREE_CLOBBER_P (op)
337 && is_really_empty_class (type, /*ignore_vptr*/true);
338 }
339
340 /* Returns true if evaluating E as an lvalue has side-effects;
341 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
342 have side-effects until there is a read or write through it. */
343
344 static bool
345 lvalue_has_side_effects (tree e)
346 {
347 if (!TREE_SIDE_EFFECTS (e))
348 return false;
349 while (handled_component_p (e))
350 {
351 if (TREE_CODE (e) == ARRAY_REF
352 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
353 return true;
354 e = TREE_OPERAND (e, 0);
355 }
356 if (DECL_P (e))
357 /* Just naming a variable has no side-effects. */
358 return false;
359 else if (INDIRECT_REF_P (e))
360 /* Similarly, indirection has no side-effects. */
361 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
362 else
363 /* For anything else, trust TREE_SIDE_EFFECTS. */
364 return TREE_SIDE_EFFECTS (e);
365 }
366
367 /* Gimplify *EXPR_P as rvalue into an expression that can't be modified
368 by expressions with side-effects in other operands. */
369
370 static enum gimplify_status
371 gimplify_to_rvalue (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
372 bool (*gimple_test_f) (tree))
373 {
374 enum gimplify_status t
375 = gimplify_expr (expr_p, pre_p, post_p, gimple_test_f, fb_rvalue);
376 if (t == GS_ERROR)
377 return GS_ERROR;
378 else if (is_gimple_variable (*expr_p) && TREE_CODE (*expr_p) != SSA_NAME)
379 *expr_p = get_initialized_tmp_var (*expr_p, pre_p);
380 return t;
381 }
382
383 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
384
385 int
386 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
387 {
388 int saved_stmts_are_full_exprs_p = 0;
389 location_t loc = cp_expr_loc_or_input_loc (*expr_p);
390 enum tree_code code = TREE_CODE (*expr_p);
391 enum gimplify_status ret;
392
393 if (STATEMENT_CODE_P (code))
394 {
395 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
396 current_stmt_tree ()->stmts_are_full_exprs_p
397 = STMT_IS_FULL_EXPR_P (*expr_p);
398 }
399
400 switch (code)
401 {
402 case AGGR_INIT_EXPR:
403 simplify_aggr_init_expr (expr_p);
404 ret = GS_OK;
405 break;
406
407 case VEC_INIT_EXPR:
408 {
409 location_t loc = input_location;
410 tree init = VEC_INIT_EXPR_INIT (*expr_p);
411 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
412 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
413 input_location = EXPR_LOCATION (*expr_p);
414 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
415 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
416 from_array,
417 tf_warning_or_error);
418 hash_set<tree> pset;
419 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
420 cp_genericize_tree (expr_p, false);
421 copy_if_shared (expr_p);
422 ret = GS_OK;
423 input_location = loc;
424 }
425 break;
426
427 case THROW_EXPR:
428 /* FIXME communicate throw type to back end, probably by moving
429 THROW_EXPR into ../tree.def. */
430 *expr_p = TREE_OPERAND (*expr_p, 0);
431 ret = GS_OK;
432 break;
433
434 case MUST_NOT_THROW_EXPR:
435 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
436 break;
437
438 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
439 LHS of an assignment might also be involved in the RHS, as in bug
440 25979. */
441 case INIT_EXPR:
442 cp_gimplify_init_expr (expr_p);
443 if (TREE_CODE (*expr_p) != INIT_EXPR)
444 return GS_OK;
445 /* Fall through. */
446 case MODIFY_EXPR:
447 modify_expr_case:
448 {
449 /* If the back end isn't clever enough to know that the lhs and rhs
450 types are the same, add an explicit conversion. */
451 tree op0 = TREE_OPERAND (*expr_p, 0);
452 tree op1 = TREE_OPERAND (*expr_p, 1);
453
454 if (!error_operand_p (op0)
455 && !error_operand_p (op1)
456 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
457 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
458 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
459 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
460 TREE_TYPE (op0), op1);
461
462 else if (simple_empty_class_p (TREE_TYPE (op0), op1, code))
463 {
464 while (TREE_CODE (op1) == TARGET_EXPR)
465 /* We're disconnecting the initializer from its target,
466 don't create a temporary. */
467 op1 = TARGET_EXPR_INITIAL (op1);
468
469 /* Remove any copies of empty classes. Also drop volatile
470 variables on the RHS to avoid infinite recursion from
471 gimplify_expr trying to load the value. */
472 if (TREE_SIDE_EFFECTS (op1))
473 {
474 if (TREE_THIS_VOLATILE (op1)
475 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
476 op1 = build_fold_addr_expr (op1);
477
478 gimplify_and_add (op1, pre_p);
479 }
480 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
481 is_gimple_lvalue, fb_lvalue);
482 *expr_p = TREE_OPERAND (*expr_p, 0);
483 if (code == RETURN_EXPR && REFERENCE_CLASS_P (*expr_p))
484 /* Avoid 'return *<retval>;' */
485 *expr_p = TREE_OPERAND (*expr_p, 0);
486 }
487 /* P0145 says that the RHS is sequenced before the LHS.
488 gimplify_modify_expr gimplifies the RHS before the LHS, but that
489 isn't quite strong enough in two cases:
490
491 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
492 mean it's evaluated after the LHS.
493
494 2) the value calculation of the RHS is also sequenced before the
495 LHS, so for scalar assignment we need to preevaluate if the
496 RHS could be affected by LHS side-effects even if it has no
497 side-effects of its own. We don't need this for classes because
498 class assignment takes its RHS by reference. */
499 else if (flag_strong_eval_order > 1
500 && TREE_CODE (*expr_p) == MODIFY_EXPR
501 && lvalue_has_side_effects (op0)
502 && (TREE_CODE (op1) == CALL_EXPR
503 || (SCALAR_TYPE_P (TREE_TYPE (op1))
504 && !TREE_CONSTANT (op1))))
505 TREE_OPERAND (*expr_p, 1) = get_initialized_tmp_var (op1, pre_p);
506 }
507 ret = GS_OK;
508 break;
509
510 case EMPTY_CLASS_EXPR:
511 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
512 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
513 ret = GS_OK;
514 break;
515
516 case BASELINK:
517 *expr_p = BASELINK_FUNCTIONS (*expr_p);
518 ret = GS_OK;
519 break;
520
521 case TRY_BLOCK:
522 genericize_try_block (expr_p);
523 ret = GS_OK;
524 break;
525
526 case HANDLER:
527 genericize_catch_block (expr_p);
528 ret = GS_OK;
529 break;
530
531 case EH_SPEC_BLOCK:
532 genericize_eh_spec_block (expr_p);
533 ret = GS_OK;
534 break;
535
536 case USING_STMT:
537 gcc_unreachable ();
538
539 case FOR_STMT:
540 case WHILE_STMT:
541 case DO_STMT:
542 case SWITCH_STMT:
543 case CONTINUE_STMT:
544 case BREAK_STMT:
545 gcc_unreachable ();
546
547 case OMP_FOR:
548 case OMP_SIMD:
549 case OMP_DISTRIBUTE:
550 case OMP_LOOP:
551 case OMP_TASKLOOP:
552 ret = cp_gimplify_omp_for (expr_p, pre_p);
553 break;
554
555 case EXPR_STMT:
556 gimplify_expr_stmt (expr_p);
557 ret = GS_OK;
558 break;
559
560 case UNARY_PLUS_EXPR:
561 {
562 tree arg = TREE_OPERAND (*expr_p, 0);
563 tree type = TREE_TYPE (*expr_p);
564 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
565 : arg;
566 ret = GS_OK;
567 }
568 break;
569
570 case CALL_EXPR:
571 ret = GS_OK;
572 if (flag_strong_eval_order == 2
573 && CALL_EXPR_FN (*expr_p)
574 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p)
575 && cp_get_callee_fndecl_nofold (*expr_p) == NULL_TREE)
576 {
577 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
578 enum gimplify_status t
579 = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
580 is_gimple_call_addr);
581 if (t == GS_ERROR)
582 ret = GS_ERROR;
583 /* GIMPLE considers most pointer conversion useless, but for
584 calls we actually care about the exact function pointer type. */
585 else if (TREE_TYPE (CALL_EXPR_FN (*expr_p)) != fnptrtype)
586 CALL_EXPR_FN (*expr_p)
587 = build1 (NOP_EXPR, fnptrtype, CALL_EXPR_FN (*expr_p));
588 }
589 if (!CALL_EXPR_FN (*expr_p))
590 /* Internal function call. */;
591 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
592 {
593 /* This is a call to a (compound) assignment operator that used
594 the operator syntax; gimplify the RHS first. */
595 gcc_assert (call_expr_nargs (*expr_p) == 2);
596 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
597 enum gimplify_status t
598 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
599 if (t == GS_ERROR)
600 ret = GS_ERROR;
601 }
602 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
603 {
604 /* Leave the last argument for gimplify_call_expr, to avoid problems
605 with __builtin_va_arg_pack(). */
606 int nargs = call_expr_nargs (*expr_p) - 1;
607 for (int i = 0; i < nargs; ++i)
608 {
609 enum gimplify_status t
610 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
611 if (t == GS_ERROR)
612 ret = GS_ERROR;
613 }
614 }
615 else if (flag_strong_eval_order
616 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
617 {
618 /* If flag_strong_eval_order, evaluate the object argument first. */
619 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
620 if (INDIRECT_TYPE_P (fntype))
621 fntype = TREE_TYPE (fntype);
622 if (TREE_CODE (fntype) == METHOD_TYPE)
623 {
624 enum gimplify_status t
625 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
626 if (t == GS_ERROR)
627 ret = GS_ERROR;
628 }
629 }
630 if (ret != GS_ERROR)
631 {
632 tree decl = cp_get_callee_fndecl_nofold (*expr_p);
633 if (decl
634 && fndecl_built_in_p (decl, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
635 BUILT_IN_FRONTEND))
636 *expr_p = boolean_false_node;
637 else if (decl
638 && fndecl_built_in_p (decl, CP_BUILT_IN_SOURCE_LOCATION,
639 BUILT_IN_FRONTEND))
640 *expr_p = fold_builtin_source_location (EXPR_LOCATION (*expr_p));
641 }
642 break;
643
644 case TARGET_EXPR:
645 /* A TARGET_EXPR that expresses direct-initialization should have been
646 elided by cp_gimplify_init_expr. */
647 gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p));
648 ret = GS_UNHANDLED;
649 break;
650
651 case RETURN_EXPR:
652 if (TREE_OPERAND (*expr_p, 0)
653 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
654 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
655 {
656 expr_p = &TREE_OPERAND (*expr_p, 0);
657 /* Avoid going through the INIT_EXPR case, which can
658 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
659 goto modify_expr_case;
660 }
661 /* Fall through. */
662
663 default:
664 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
665 break;
666 }
667
668 /* Restore saved state. */
669 if (STATEMENT_CODE_P (code))
670 current_stmt_tree ()->stmts_are_full_exprs_p
671 = saved_stmts_are_full_exprs_p;
672
673 return ret;
674 }
675
676 static inline bool
677 is_invisiref_parm (const_tree t)
678 {
679 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
680 && DECL_BY_REFERENCE (t));
681 }
682
683 /* Return true if the uid in both int tree maps are equal. */
684
685 bool
686 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
687 {
688 return (a->uid == b->uid);
689 }
690
691 /* Hash a UID in a cxx_int_tree_map. */
692
693 unsigned int
694 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
695 {
696 return item->uid;
697 }
698
699 /* A stable comparison routine for use with splay trees and DECLs. */
700
701 static int
702 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
703 {
704 tree a = (tree) xa;
705 tree b = (tree) xb;
706
707 return DECL_UID (a) - DECL_UID (b);
708 }
709
710 /* OpenMP context during genericization. */
711
712 struct cp_genericize_omp_taskreg
713 {
714 bool is_parallel;
715 bool default_shared;
716 struct cp_genericize_omp_taskreg *outer;
717 splay_tree variables;
718 };
719
720 /* Return true if genericization should try to determine if
721 DECL is firstprivate or shared within task regions. */
722
723 static bool
724 omp_var_to_track (tree decl)
725 {
726 tree type = TREE_TYPE (decl);
727 if (is_invisiref_parm (decl))
728 type = TREE_TYPE (type);
729 else if (TYPE_REF_P (type))
730 type = TREE_TYPE (type);
731 while (TREE_CODE (type) == ARRAY_TYPE)
732 type = TREE_TYPE (type);
733 if (type == error_mark_node || !CLASS_TYPE_P (type))
734 return false;
735 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
736 return false;
737 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
738 return false;
739 return true;
740 }
741
742 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
743
744 static void
745 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
746 {
747 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
748 (splay_tree_key) decl);
749 if (n == NULL)
750 {
751 int flags = OMP_CLAUSE_DEFAULT_SHARED;
752 if (omp_ctx->outer)
753 omp_cxx_notice_variable (omp_ctx->outer, decl);
754 if (!omp_ctx->default_shared)
755 {
756 struct cp_genericize_omp_taskreg *octx;
757
758 for (octx = omp_ctx->outer; octx; octx = octx->outer)
759 {
760 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
761 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
762 {
763 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
764 break;
765 }
766 if (octx->is_parallel)
767 break;
768 }
769 if (octx == NULL
770 && (TREE_CODE (decl) == PARM_DECL
771 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
772 && DECL_CONTEXT (decl) == current_function_decl)))
773 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
774 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
775 {
776 /* DECL is implicitly determined firstprivate in
777 the current task construct. Ensure copy ctor and
778 dtor are instantiated, because during gimplification
779 it will be already too late. */
780 tree type = TREE_TYPE (decl);
781 if (is_invisiref_parm (decl))
782 type = TREE_TYPE (type);
783 else if (TYPE_REF_P (type))
784 type = TREE_TYPE (type);
785 while (TREE_CODE (type) == ARRAY_TYPE)
786 type = TREE_TYPE (type);
787 get_copy_ctor (type, tf_none);
788 get_dtor (type, tf_none);
789 }
790 }
791 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
792 }
793 }
794
795 /* Genericization context. */
796
797 struct cp_genericize_data
798 {
799 hash_set<tree> *p_set;
800 vec<tree> bind_expr_stack;
801 struct cp_genericize_omp_taskreg *omp_ctx;
802 tree try_block;
803 bool no_sanitize_p;
804 bool handle_invisiref_parm_p;
805 };
806
807 /* Perform any pre-gimplification folding of C++ front end trees to
808 GENERIC.
809 Note: The folding of none-omp cases is something to move into
810 the middle-end. As for now we have most foldings only on GENERIC
811 in fold-const, we need to perform this before transformation to
812 GIMPLE-form. */
813
814 static tree
815 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
816 {
817 tree stmt;
818 enum tree_code code;
819
820 *stmt_p = stmt = cp_fold (*stmt_p);
821
822 if (((hash_set<tree> *) data)->add (stmt))
823 {
824 /* Don't walk subtrees of stmts we've already walked once, otherwise
825 we can have exponential complexity with e.g. lots of nested
826 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
827 always the same tree, which the first time cp_fold_r has been
828 called on it had the subtrees walked. */
829 *walk_subtrees = 0;
830 return NULL;
831 }
832
833 code = TREE_CODE (stmt);
834 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
835 || code == OMP_LOOP || code == OMP_TASKLOOP || code == OACC_LOOP)
836 {
837 tree x;
838 int i, n;
839
840 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
841 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
842 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
843 x = OMP_FOR_COND (stmt);
844 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
845 {
846 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
847 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
848 }
849 else if (x && TREE_CODE (x) == TREE_VEC)
850 {
851 n = TREE_VEC_LENGTH (x);
852 for (i = 0; i < n; i++)
853 {
854 tree o = TREE_VEC_ELT (x, i);
855 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
856 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
857 }
858 }
859 x = OMP_FOR_INCR (stmt);
860 if (x && TREE_CODE (x) == TREE_VEC)
861 {
862 n = TREE_VEC_LENGTH (x);
863 for (i = 0; i < n; i++)
864 {
865 tree o = TREE_VEC_ELT (x, i);
866 if (o && TREE_CODE (o) == MODIFY_EXPR)
867 o = TREE_OPERAND (o, 1);
868 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
869 || TREE_CODE (o) == POINTER_PLUS_EXPR))
870 {
871 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
872 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
873 }
874 }
875 }
876 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
877 *walk_subtrees = 0;
878 }
879
880 return NULL;
881 }
882
883 /* Fold ALL the trees! FIXME we should be able to remove this, but
884 apparently that still causes optimization regressions. */
885
886 void
887 cp_fold_function (tree fndecl)
888 {
889 hash_set<tree> pset;
890 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
891 }
892
893 /* Turn SPACESHIP_EXPR EXPR into GENERIC. */
894
895 static tree genericize_spaceship (tree expr)
896 {
897 iloc_sentinel s (cp_expr_location (expr));
898 tree type = TREE_TYPE (expr);
899 tree op0 = TREE_OPERAND (expr, 0);
900 tree op1 = TREE_OPERAND (expr, 1);
901 return genericize_spaceship (type, op0, op1);
902 }
903
904 /* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
905 to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
906 the middle-end (c++/88256). If EXPR is a DECL, use add_stmt and return
907 NULL_TREE; otherwise return a COMPOUND_STMT of the DECL_EXPR and EXPR. */
908
909 tree
910 predeclare_vla (tree expr)
911 {
912 tree type = TREE_TYPE (expr);
913 if (type == error_mark_node)
914 return expr;
915 if (is_typedef_decl (expr))
916 type = DECL_ORIGINAL_TYPE (expr);
917
918 /* We need to strip pointers for gimplify_type_sizes. */
919 tree vla = type;
920 while (POINTER_TYPE_P (vla))
921 {
922 if (TYPE_NAME (vla))
923 return expr;
924 vla = TREE_TYPE (vla);
925 }
926 if (vla == type || TYPE_NAME (vla)
927 || !variably_modified_type_p (vla, NULL_TREE))
928 return expr;
929
930 tree decl = build_decl (input_location, TYPE_DECL, NULL_TREE, vla);
931 DECL_ARTIFICIAL (decl) = 1;
932 TYPE_NAME (vla) = decl;
933 tree dexp = build_stmt (input_location, DECL_EXPR, decl);
934 if (DECL_P (expr))
935 {
936 add_stmt (dexp);
937 return NULL_TREE;
938 }
939 else
940 {
941 expr = build2 (COMPOUND_EXPR, type, dexp, expr);
942 return expr;
943 }
944 }
945
946 /* Perform any pre-gimplification lowering of C++ front end trees to
947 GENERIC. */
948
949 static tree
950 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
951 {
952 tree stmt = *stmt_p;
953 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
954 hash_set<tree> *p_set = wtd->p_set;
955
956 /* If in an OpenMP context, note var uses. */
957 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
958 && (VAR_P (stmt)
959 || TREE_CODE (stmt) == PARM_DECL
960 || TREE_CODE (stmt) == RESULT_DECL)
961 && omp_var_to_track (stmt))
962 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
963
964 /* Don't dereference parms in a thunk, pass the references through. */
965 if ((TREE_CODE (stmt) == CALL_EXPR && call_from_lambda_thunk_p (stmt))
966 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
967 {
968 *walk_subtrees = 0;
969 return NULL;
970 }
971
972 /* Dereference invisible reference parms. */
973 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
974 {
975 *stmt_p = convert_from_reference (stmt);
976 p_set->add (*stmt_p);
977 *walk_subtrees = 0;
978 return NULL;
979 }
980
981 /* Map block scope extern declarations to visible declarations with the
982 same name and type in outer scopes if any. */
983 if (cp_function_chain->extern_decl_map
984 && VAR_OR_FUNCTION_DECL_P (stmt)
985 && DECL_EXTERNAL (stmt))
986 {
987 struct cxx_int_tree_map *h, in;
988 in.uid = DECL_UID (stmt);
989 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
990 if (h)
991 {
992 *stmt_p = h->to;
993 TREE_USED (h->to) |= TREE_USED (stmt);
994 *walk_subtrees = 0;
995 return NULL;
996 }
997 }
998
999 if (TREE_CODE (stmt) == INTEGER_CST
1000 && TYPE_REF_P (TREE_TYPE (stmt))
1001 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1002 && !wtd->no_sanitize_p)
1003 {
1004 ubsan_maybe_instrument_reference (stmt_p);
1005 if (*stmt_p != stmt)
1006 {
1007 *walk_subtrees = 0;
1008 return NULL_TREE;
1009 }
1010 }
1011
1012 /* Other than invisiref parms, don't walk the same tree twice. */
1013 if (p_set->contains (stmt))
1014 {
1015 *walk_subtrees = 0;
1016 return NULL_TREE;
1017 }
1018
1019 switch (TREE_CODE (stmt))
1020 {
1021 case ADDR_EXPR:
1022 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1023 {
1024 /* If in an OpenMP context, note var uses. */
1025 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1026 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1027 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1028 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1029 *walk_subtrees = 0;
1030 }
1031 break;
1032
1033 case RETURN_EXPR:
1034 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1035 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1036 *walk_subtrees = 0;
1037 break;
1038
1039 case OMP_CLAUSE:
1040 switch (OMP_CLAUSE_CODE (stmt))
1041 {
1042 case OMP_CLAUSE_LASTPRIVATE:
1043 /* Don't dereference an invisiref in OpenMP clauses. */
1044 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1045 {
1046 *walk_subtrees = 0;
1047 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1048 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1049 cp_genericize_r, data, NULL);
1050 }
1051 break;
1052 case OMP_CLAUSE_PRIVATE:
1053 /* Don't dereference an invisiref in OpenMP clauses. */
1054 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1055 *walk_subtrees = 0;
1056 else if (wtd->omp_ctx != NULL)
1057 {
1058 /* Private clause doesn't cause any references to the
1059 var in outer contexts, avoid calling
1060 omp_cxx_notice_variable for it. */
1061 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1062 wtd->omp_ctx = NULL;
1063 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1064 data, NULL);
1065 wtd->omp_ctx = old;
1066 *walk_subtrees = 0;
1067 }
1068 break;
1069 case OMP_CLAUSE_SHARED:
1070 case OMP_CLAUSE_FIRSTPRIVATE:
1071 case OMP_CLAUSE_COPYIN:
1072 case OMP_CLAUSE_COPYPRIVATE:
1073 case OMP_CLAUSE_INCLUSIVE:
1074 case OMP_CLAUSE_EXCLUSIVE:
1075 /* Don't dereference an invisiref in OpenMP clauses. */
1076 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1077 *walk_subtrees = 0;
1078 break;
1079 case OMP_CLAUSE_REDUCTION:
1080 case OMP_CLAUSE_IN_REDUCTION:
1081 case OMP_CLAUSE_TASK_REDUCTION:
1082 /* Don't dereference an invisiref in reduction clause's
1083 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1084 still needs to be genericized. */
1085 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1086 {
1087 *walk_subtrees = 0;
1088 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1089 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1090 cp_genericize_r, data, NULL);
1091 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1092 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1093 cp_genericize_r, data, NULL);
1094 }
1095 break;
1096 default:
1097 break;
1098 }
1099 break;
1100
1101 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1102 to lower this construct before scanning it, so we need to lower these
1103 before doing anything else. */
1104 case CLEANUP_STMT:
1105 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1106 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1107 : TRY_FINALLY_EXPR,
1108 void_type_node,
1109 CLEANUP_BODY (stmt),
1110 CLEANUP_EXPR (stmt));
1111 break;
1112
1113 case IF_STMT:
1114 genericize_if_stmt (stmt_p);
1115 /* *stmt_p has changed, tail recurse to handle it again. */
1116 return cp_genericize_r (stmt_p, walk_subtrees, data);
1117
1118 /* COND_EXPR might have incompatible types in branches if one or both
1119 arms are bitfields. Fix it up now. */
1120 case COND_EXPR:
1121 {
1122 tree type_left
1123 = (TREE_OPERAND (stmt, 1)
1124 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1125 : NULL_TREE);
1126 tree type_right
1127 = (TREE_OPERAND (stmt, 2)
1128 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1129 : NULL_TREE);
1130 if (type_left
1131 && !useless_type_conversion_p (TREE_TYPE (stmt),
1132 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1133 {
1134 TREE_OPERAND (stmt, 1)
1135 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1136 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1137 type_left));
1138 }
1139 if (type_right
1140 && !useless_type_conversion_p (TREE_TYPE (stmt),
1141 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1142 {
1143 TREE_OPERAND (stmt, 2)
1144 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1145 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1146 type_right));
1147 }
1148 }
1149 break;
1150
1151 case BIND_EXPR:
1152 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1153 {
1154 tree decl;
1155 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1156 if (VAR_P (decl)
1157 && !DECL_EXTERNAL (decl)
1158 && omp_var_to_track (decl))
1159 {
1160 splay_tree_node n
1161 = splay_tree_lookup (wtd->omp_ctx->variables,
1162 (splay_tree_key) decl);
1163 if (n == NULL)
1164 splay_tree_insert (wtd->omp_ctx->variables,
1165 (splay_tree_key) decl,
1166 TREE_STATIC (decl)
1167 ? OMP_CLAUSE_DEFAULT_SHARED
1168 : OMP_CLAUSE_DEFAULT_PRIVATE);
1169 }
1170 }
1171 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1172 {
1173 /* The point here is to not sanitize static initializers. */
1174 bool no_sanitize_p = wtd->no_sanitize_p;
1175 wtd->no_sanitize_p = true;
1176 for (tree decl = BIND_EXPR_VARS (stmt);
1177 decl;
1178 decl = DECL_CHAIN (decl))
1179 if (VAR_P (decl)
1180 && TREE_STATIC (decl)
1181 && DECL_INITIAL (decl))
1182 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1183 wtd->no_sanitize_p = no_sanitize_p;
1184 }
1185 wtd->bind_expr_stack.safe_push (stmt);
1186 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1187 cp_genericize_r, data, NULL);
1188 wtd->bind_expr_stack.pop ();
1189 break;
1190
1191 case USING_STMT:
1192 {
1193 tree block = NULL_TREE;
1194
1195 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1196 BLOCK, and append an IMPORTED_DECL to its
1197 BLOCK_VARS chained list. */
1198 if (wtd->bind_expr_stack.exists ())
1199 {
1200 int i;
1201 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1202 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1203 break;
1204 }
1205 if (block)
1206 {
1207 tree decl = TREE_OPERAND (stmt, 0);
1208 gcc_assert (decl);
1209
1210 if (undeduced_auto_decl (decl))
1211 /* Omit from the GENERIC, the back-end can't handle it. */;
1212 else
1213 {
1214 tree using_directive = make_node (IMPORTED_DECL);
1215 TREE_TYPE (using_directive) = void_type_node;
1216 DECL_CONTEXT (using_directive) = current_function_decl;
1217
1218 IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1219 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1220 BLOCK_VARS (block) = using_directive;
1221 }
1222 }
1223 /* The USING_STMT won't appear in GENERIC. */
1224 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1225 *walk_subtrees = 0;
1226 }
1227 break;
1228
1229 case DECL_EXPR:
1230 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1231 {
1232 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1233 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1234 *walk_subtrees = 0;
1235 }
1236 else
1237 {
1238 tree d = DECL_EXPR_DECL (stmt);
1239 if (VAR_P (d))
1240 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1241 }
1242 break;
1243
1244 case OMP_PARALLEL:
1245 case OMP_TASK:
1246 case OMP_TASKLOOP:
1247 {
1248 struct cp_genericize_omp_taskreg omp_ctx;
1249 tree c, decl;
1250 splay_tree_node n;
1251
1252 *walk_subtrees = 0;
1253 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1254 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1255 omp_ctx.default_shared = omp_ctx.is_parallel;
1256 omp_ctx.outer = wtd->omp_ctx;
1257 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1258 wtd->omp_ctx = &omp_ctx;
1259 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1260 switch (OMP_CLAUSE_CODE (c))
1261 {
1262 case OMP_CLAUSE_SHARED:
1263 case OMP_CLAUSE_PRIVATE:
1264 case OMP_CLAUSE_FIRSTPRIVATE:
1265 case OMP_CLAUSE_LASTPRIVATE:
1266 decl = OMP_CLAUSE_DECL (c);
1267 if (decl == error_mark_node || !omp_var_to_track (decl))
1268 break;
1269 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1270 if (n != NULL)
1271 break;
1272 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1273 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1274 ? OMP_CLAUSE_DEFAULT_SHARED
1275 : OMP_CLAUSE_DEFAULT_PRIVATE);
1276 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1277 omp_cxx_notice_variable (omp_ctx.outer, decl);
1278 break;
1279 case OMP_CLAUSE_DEFAULT:
1280 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1281 omp_ctx.default_shared = true;
1282 default:
1283 break;
1284 }
1285 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1286 c_genericize_control_stmt (stmt_p, walk_subtrees, data,
1287 cp_genericize_r, cp_walk_subtrees);
1288 else
1289 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1290 wtd->omp_ctx = omp_ctx.outer;
1291 splay_tree_delete (omp_ctx.variables);
1292 }
1293 break;
1294
1295 case OMP_TARGET:
1296 cfun->has_omp_target = true;
1297 break;
1298
1299 case TRY_BLOCK:
1300 {
1301 *walk_subtrees = 0;
1302 tree try_block = wtd->try_block;
1303 wtd->try_block = stmt;
1304 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1305 wtd->try_block = try_block;
1306 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1307 }
1308 break;
1309
1310 case MUST_NOT_THROW_EXPR:
1311 /* MUST_NOT_THROW_COND might be something else with TM. */
1312 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1313 {
1314 *walk_subtrees = 0;
1315 tree try_block = wtd->try_block;
1316 wtd->try_block = stmt;
1317 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1318 wtd->try_block = try_block;
1319 }
1320 break;
1321
1322 case THROW_EXPR:
1323 {
1324 location_t loc = location_of (stmt);
1325 if (TREE_NO_WARNING (stmt))
1326 /* Never mind. */;
1327 else if (wtd->try_block)
1328 {
1329 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
1330 {
1331 auto_diagnostic_group d;
1332 if (warning_at (loc, OPT_Wterminate,
1333 "%<throw%> will always call %<terminate%>")
1334 && cxx_dialect >= cxx11
1335 && DECL_DESTRUCTOR_P (current_function_decl))
1336 inform (loc, "in C++11 destructors default to %<noexcept%>");
1337 }
1338 }
1339 else
1340 {
1341 if (warn_cxx11_compat && cxx_dialect < cxx11
1342 && DECL_DESTRUCTOR_P (current_function_decl)
1343 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1344 == NULL_TREE)
1345 && (get_defaulted_eh_spec (current_function_decl)
1346 == empty_except_spec))
1347 warning_at (loc, OPT_Wc__11_compat,
1348 "in C++11 this %<throw%> will call %<terminate%> "
1349 "because destructors default to %<noexcept%>");
1350 }
1351 }
1352 break;
1353
1354 case CONVERT_EXPR:
1355 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1356 break;
1357
1358 case SPACESHIP_EXPR:
1359 *stmt_p = genericize_spaceship (*stmt_p);
1360 break;
1361
1362 case PTRMEM_CST:
1363 /* By the time we get here we're handing off to the back end, so we don't
1364 need or want to preserve PTRMEM_CST anymore. */
1365 *stmt_p = cplus_expand_constant (stmt);
1366 *walk_subtrees = 0;
1367 break;
1368
1369 case MEM_REF:
1370 /* For MEM_REF, make sure not to sanitize the second operand even
1371 if it has reference type. It is just an offset with a type
1372 holding other information. There is no other processing we
1373 need to do for INTEGER_CSTs, so just ignore the second argument
1374 unconditionally. */
1375 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1376 *walk_subtrees = 0;
1377 break;
1378
1379 case NOP_EXPR:
1380 *stmt_p = predeclare_vla (*stmt_p);
1381 if (!wtd->no_sanitize_p
1382 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1383 && TYPE_REF_P (TREE_TYPE (stmt)))
1384 ubsan_maybe_instrument_reference (stmt_p);
1385 break;
1386
1387 case CALL_EXPR:
1388 /* Evaluate function concept checks instead of treating them as
1389 normal functions. */
1390 if (concept_check_p (stmt))
1391 {
1392 *stmt_p = evaluate_concept_check (stmt, tf_warning_or_error);
1393 * walk_subtrees = 0;
1394 break;
1395 }
1396
1397 if (!wtd->no_sanitize_p
1398 && sanitize_flags_p ((SANITIZE_NULL
1399 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1400 {
1401 tree fn = CALL_EXPR_FN (stmt);
1402 if (fn != NULL_TREE
1403 && !error_operand_p (fn)
1404 && INDIRECT_TYPE_P (TREE_TYPE (fn))
1405 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1406 {
1407 bool is_ctor
1408 = TREE_CODE (fn) == ADDR_EXPR
1409 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1410 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1411 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1412 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1413 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1414 cp_ubsan_maybe_instrument_member_call (stmt);
1415 }
1416 else if (fn == NULL_TREE
1417 && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1418 && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
1419 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
1420 *walk_subtrees = 0;
1421 }
1422 /* Fall through. */
1423 case AGGR_INIT_EXPR:
1424 /* For calls to a multi-versioned function, overload resolution
1425 returns the function with the highest target priority, that is,
1426 the version that will checked for dispatching first. If this
1427 version is inlinable, a direct call to this version can be made
1428 otherwise the call should go through the dispatcher. */
1429 {
1430 tree fn = cp_get_callee_fndecl_nofold (stmt);
1431 if (fn && DECL_FUNCTION_VERSIONED (fn)
1432 && (current_function_decl == NULL
1433 || !targetm.target_option.can_inline_p (current_function_decl,
1434 fn)))
1435 if (tree dis = get_function_version_dispatcher (fn))
1436 {
1437 mark_versions_used (dis);
1438 dis = build_address (dis);
1439 if (TREE_CODE (stmt) == CALL_EXPR)
1440 CALL_EXPR_FN (stmt) = dis;
1441 else
1442 AGGR_INIT_EXPR_FN (stmt) = dis;
1443 }
1444 }
1445 break;
1446
1447 case TARGET_EXPR:
1448 if (TARGET_EXPR_INITIAL (stmt)
1449 && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1450 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1451 TARGET_EXPR_NO_ELIDE (stmt) = 1;
1452 break;
1453
1454 case REQUIRES_EXPR:
1455 /* Emit the value of the requires-expression. */
1456 *stmt_p = constant_boolean_node (constraints_satisfied_p (stmt),
1457 boolean_type_node);
1458 *walk_subtrees = 0;
1459 break;
1460
1461 case TEMPLATE_ID_EXPR:
1462 gcc_assert (concept_check_p (stmt));
1463 /* Emit the value of the concept check. */
1464 *stmt_p = evaluate_concept_check (stmt, tf_warning_or_error);
1465 walk_subtrees = 0;
1466 break;
1467
1468 case STATEMENT_LIST:
1469 if (TREE_SIDE_EFFECTS (stmt))
1470 {
1471 tree_stmt_iterator i;
1472 int nondebug_stmts = 0;
1473 bool clear_side_effects = true;
1474 /* Genericization can clear TREE_SIDE_EFFECTS, e.g. when
1475 transforming an IF_STMT into COND_EXPR. If such stmt
1476 appears in a STATEMENT_LIST that contains only that
1477 stmt and some DEBUG_BEGIN_STMTs, without -g where the
1478 STATEMENT_LIST wouldn't be present at all the resulting
1479 expression wouldn't have TREE_SIDE_EFFECTS set, so make sure
1480 to clear it even on the STATEMENT_LIST in such cases. */
1481 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1482 {
1483 tree t = tsi_stmt (i);
1484 if (TREE_CODE (t) != DEBUG_BEGIN_STMT && nondebug_stmts < 2)
1485 nondebug_stmts++;
1486 cp_walk_tree (tsi_stmt_ptr (i), cp_genericize_r, data, NULL);
1487 if (TREE_CODE (t) != DEBUG_BEGIN_STMT
1488 && (nondebug_stmts > 1 || TREE_SIDE_EFFECTS (tsi_stmt (i))))
1489 clear_side_effects = false;
1490 }
1491 if (clear_side_effects)
1492 TREE_SIDE_EFFECTS (stmt) = 0;
1493 *walk_subtrees = 0;
1494 }
1495 break;
1496
1497 case OMP_DISTRIBUTE:
1498 /* Need to explicitly instantiate copy ctors on class iterators of
1499 composite distribute parallel for. */
1500 if (OMP_FOR_INIT (*stmt_p) == NULL_TREE)
1501 {
1502 tree *data[4] = { NULL, NULL, NULL, NULL };
1503 tree inner = walk_tree (&OMP_FOR_BODY (*stmt_p),
1504 find_combined_omp_for, data, NULL);
1505 if (inner != NULL_TREE
1506 && TREE_CODE (inner) == OMP_FOR)
1507 {
1508 for (int i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner)); i++)
1509 if (OMP_FOR_ORIG_DECLS (inner)
1510 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1511 i)) == TREE_LIST
1512 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1513 i)))
1514 {
1515 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner), i);
1516 /* Class iterators aren't allowed on OMP_SIMD, so the only
1517 case we need to solve is distribute parallel for. */
1518 gcc_assert (TREE_CODE (inner) == OMP_FOR
1519 && data[1]);
1520 tree orig_decl = TREE_PURPOSE (orig);
1521 tree c, cl = NULL_TREE;
1522 for (c = OMP_FOR_CLAUSES (inner);
1523 c; c = OMP_CLAUSE_CHAIN (c))
1524 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1525 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
1526 && OMP_CLAUSE_DECL (c) == orig_decl)
1527 {
1528 cl = c;
1529 break;
1530 }
1531 if (cl == NULL_TREE)
1532 {
1533 for (c = OMP_PARALLEL_CLAUSES (*data[1]);
1534 c; c = OMP_CLAUSE_CHAIN (c))
1535 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1536 && OMP_CLAUSE_DECL (c) == orig_decl)
1537 {
1538 cl = c;
1539 break;
1540 }
1541 }
1542 if (cl)
1543 {
1544 orig_decl = require_complete_type (orig_decl);
1545 tree inner_type = TREE_TYPE (orig_decl);
1546 if (orig_decl == error_mark_node)
1547 continue;
1548 if (TYPE_REF_P (TREE_TYPE (orig_decl)))
1549 inner_type = TREE_TYPE (inner_type);
1550
1551 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1552 inner_type = TREE_TYPE (inner_type);
1553 get_copy_ctor (inner_type, tf_warning_or_error);
1554 }
1555 }
1556 }
1557 }
1558 /* FALLTHRU */
1559
1560 case FOR_STMT:
1561 case WHILE_STMT:
1562 case DO_STMT:
1563 case SWITCH_STMT:
1564 case CONTINUE_STMT:
1565 case BREAK_STMT:
1566 case OMP_FOR:
1567 case OMP_SIMD:
1568 case OMP_LOOP:
1569 case OACC_LOOP:
1570 /* These cases are handled by shared code. */
1571 c_genericize_control_stmt (stmt_p, walk_subtrees, data,
1572 cp_genericize_r, cp_walk_subtrees);
1573 break;
1574
1575 default:
1576 if (IS_TYPE_OR_DECL_P (stmt))
1577 *walk_subtrees = 0;
1578 break;
1579 }
1580
1581 p_set->add (*stmt_p);
1582
1583 return NULL;
1584 }
1585
1586 /* Lower C++ front end trees to GENERIC in T_P. */
1587
1588 static void
1589 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1590 {
1591 struct cp_genericize_data wtd;
1592
1593 wtd.p_set = new hash_set<tree>;
1594 wtd.bind_expr_stack.create (0);
1595 wtd.omp_ctx = NULL;
1596 wtd.try_block = NULL_TREE;
1597 wtd.no_sanitize_p = false;
1598 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1599 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1600 delete wtd.p_set;
1601 wtd.bind_expr_stack.release ();
1602 if (sanitize_flags_p (SANITIZE_VPTR))
1603 cp_ubsan_instrument_member_accesses (t_p);
1604 }
1605
1606 /* If a function that should end with a return in non-void
1607 function doesn't obviously end with return, add ubsan
1608 instrumentation code to verify it at runtime. If -fsanitize=return
1609 is not enabled, instrument __builtin_unreachable. */
1610
1611 static void
1612 cp_maybe_instrument_return (tree fndecl)
1613 {
1614 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1615 || DECL_CONSTRUCTOR_P (fndecl)
1616 || DECL_DESTRUCTOR_P (fndecl)
1617 || !targetm.warn_func_return (fndecl))
1618 return;
1619
1620 if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1621 /* Don't add __builtin_unreachable () if not optimizing, it will not
1622 improve any optimizations in that case, just break UB code.
1623 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1624 UBSan covers this with ubsan_instrument_return above where sufficient
1625 information is provided, while the __builtin_unreachable () below
1626 if return sanitization is disabled will just result in hard to
1627 understand runtime error without location. */
1628 && (!optimize
1629 || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1630 return;
1631
1632 tree t = DECL_SAVED_TREE (fndecl);
1633 while (t)
1634 {
1635 switch (TREE_CODE (t))
1636 {
1637 case BIND_EXPR:
1638 t = BIND_EXPR_BODY (t);
1639 continue;
1640 case TRY_FINALLY_EXPR:
1641 case CLEANUP_POINT_EXPR:
1642 t = TREE_OPERAND (t, 0);
1643 continue;
1644 case STATEMENT_LIST:
1645 {
1646 tree_stmt_iterator i = tsi_last (t);
1647 while (!tsi_end_p (i))
1648 {
1649 tree p = tsi_stmt (i);
1650 if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
1651 break;
1652 tsi_prev (&i);
1653 }
1654 if (!tsi_end_p (i))
1655 {
1656 t = tsi_stmt (i);
1657 continue;
1658 }
1659 }
1660 break;
1661 case RETURN_EXPR:
1662 return;
1663 default:
1664 break;
1665 }
1666 break;
1667 }
1668 if (t == NULL_TREE)
1669 return;
1670 tree *p = &DECL_SAVED_TREE (fndecl);
1671 if (TREE_CODE (*p) == BIND_EXPR)
1672 p = &BIND_EXPR_BODY (*p);
1673
1674 location_t loc = DECL_SOURCE_LOCATION (fndecl);
1675 if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1676 t = ubsan_instrument_return (loc);
1677 else
1678 {
1679 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
1680 t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
1681 }
1682
1683 append_to_statement_list (t, p);
1684 }
1685
1686 void
1687 cp_genericize (tree fndecl)
1688 {
1689 tree t;
1690
1691 /* Fix up the types of parms passed by invisible reference. */
1692 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1693 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1694 {
1695 /* If a function's arguments are copied to create a thunk,
1696 then DECL_BY_REFERENCE will be set -- but the type of the
1697 argument will be a pointer type, so we will never get
1698 here. */
1699 gcc_assert (!DECL_BY_REFERENCE (t));
1700 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1701 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1702 DECL_BY_REFERENCE (t) = 1;
1703 TREE_ADDRESSABLE (t) = 0;
1704 relayout_decl (t);
1705 }
1706
1707 /* Do the same for the return value. */
1708 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1709 {
1710 t = DECL_RESULT (fndecl);
1711 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1712 DECL_BY_REFERENCE (t) = 1;
1713 TREE_ADDRESSABLE (t) = 0;
1714 relayout_decl (t);
1715 if (DECL_NAME (t))
1716 {
1717 /* Adjust DECL_VALUE_EXPR of the original var. */
1718 tree outer = outer_curly_brace_block (current_function_decl);
1719 tree var;
1720
1721 if (outer)
1722 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1723 if (VAR_P (var)
1724 && DECL_NAME (t) == DECL_NAME (var)
1725 && DECL_HAS_VALUE_EXPR_P (var)
1726 && DECL_VALUE_EXPR (var) == t)
1727 {
1728 tree val = convert_from_reference (t);
1729 SET_DECL_VALUE_EXPR (var, val);
1730 break;
1731 }
1732 }
1733 }
1734
1735 /* If we're a clone, the body is already GIMPLE. */
1736 if (DECL_CLONED_FUNCTION_P (fndecl))
1737 return;
1738
1739 /* Allow cp_genericize calls to be nested. */
1740 bc_state_t save_state;
1741 save_bc_state (&save_state);
1742
1743 /* We do want to see every occurrence of the parms, so we can't just use
1744 walk_tree's hash functionality. */
1745 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1746
1747 cp_maybe_instrument_return (fndecl);
1748
1749 /* Do everything else. */
1750 c_genericize (fndecl);
1751 restore_bc_state (&save_state);
1752 }
1753 \f
1754 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1755 NULL if there is in fact nothing to do. ARG2 may be null if FN
1756 actually only takes one argument. */
1757
1758 static tree
1759 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1760 {
1761 tree defparm, parm, t;
1762 int i = 0;
1763 int nargs;
1764 tree *argarray;
1765
1766 if (fn == NULL)
1767 return NULL;
1768
1769 nargs = list_length (DECL_ARGUMENTS (fn));
1770 argarray = XALLOCAVEC (tree, nargs);
1771
1772 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1773 if (arg2)
1774 defparm = TREE_CHAIN (defparm);
1775
1776 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
1777 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1778 {
1779 tree inner_type = TREE_TYPE (arg1);
1780 tree start1, end1, p1;
1781 tree start2 = NULL, p2 = NULL;
1782 tree ret = NULL, lab;
1783
1784 start1 = arg1;
1785 start2 = arg2;
1786 do
1787 {
1788 inner_type = TREE_TYPE (inner_type);
1789 start1 = build4 (ARRAY_REF, inner_type, start1,
1790 size_zero_node, NULL, NULL);
1791 if (arg2)
1792 start2 = build4 (ARRAY_REF, inner_type, start2,
1793 size_zero_node, NULL, NULL);
1794 }
1795 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1796 start1 = build_fold_addr_expr_loc (input_location, start1);
1797 if (arg2)
1798 start2 = build_fold_addr_expr_loc (input_location, start2);
1799
1800 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1801 end1 = fold_build_pointer_plus (start1, end1);
1802
1803 p1 = create_tmp_var (TREE_TYPE (start1));
1804 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1805 append_to_statement_list (t, &ret);
1806
1807 if (arg2)
1808 {
1809 p2 = create_tmp_var (TREE_TYPE (start2));
1810 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1811 append_to_statement_list (t, &ret);
1812 }
1813
1814 lab = create_artificial_label (input_location);
1815 t = build1 (LABEL_EXPR, void_type_node, lab);
1816 append_to_statement_list (t, &ret);
1817
1818 argarray[i++] = p1;
1819 if (arg2)
1820 argarray[i++] = p2;
1821 /* Handle default arguments. */
1822 for (parm = defparm; parm && parm != void_list_node;
1823 parm = TREE_CHAIN (parm), i++)
1824 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1825 TREE_PURPOSE (parm), fn,
1826 i - is_method, tf_warning_or_error);
1827 t = build_call_a (fn, i, argarray);
1828 t = fold_convert (void_type_node, t);
1829 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1830 append_to_statement_list (t, &ret);
1831
1832 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1833 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1834 append_to_statement_list (t, &ret);
1835
1836 if (arg2)
1837 {
1838 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1839 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1840 append_to_statement_list (t, &ret);
1841 }
1842
1843 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1844 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1845 append_to_statement_list (t, &ret);
1846
1847 return ret;
1848 }
1849 else
1850 {
1851 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1852 if (arg2)
1853 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1854 /* Handle default arguments. */
1855 for (parm = defparm; parm && parm != void_list_node;
1856 parm = TREE_CHAIN (parm), i++)
1857 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1858 TREE_PURPOSE (parm), fn,
1859 i - is_method, tf_warning_or_error);
1860 t = build_call_a (fn, i, argarray);
1861 t = fold_convert (void_type_node, t);
1862 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1863 }
1864 }
1865
1866 /* Return code to initialize DECL with its default constructor, or
1867 NULL if there's nothing to do. */
1868
1869 tree
1870 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1871 {
1872 tree info = CP_OMP_CLAUSE_INFO (clause);
1873 tree ret = NULL;
1874
1875 if (info)
1876 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1877
1878 return ret;
1879 }
1880
1881 /* Return code to initialize DST with a copy constructor from SRC. */
1882
1883 tree
1884 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1885 {
1886 tree info = CP_OMP_CLAUSE_INFO (clause);
1887 tree ret = NULL;
1888
1889 if (info)
1890 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1891 if (ret == NULL)
1892 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1893
1894 return ret;
1895 }
1896
1897 /* Similarly, except use an assignment operator instead. */
1898
1899 tree
1900 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1901 {
1902 tree info = CP_OMP_CLAUSE_INFO (clause);
1903 tree ret = NULL;
1904
1905 if (info)
1906 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1907 if (ret == NULL)
1908 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1909
1910 return ret;
1911 }
1912
1913 /* Return code to destroy DECL. */
1914
1915 tree
1916 cxx_omp_clause_dtor (tree clause, tree decl)
1917 {
1918 tree info = CP_OMP_CLAUSE_INFO (clause);
1919 tree ret = NULL;
1920
1921 if (info)
1922 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1923
1924 return ret;
1925 }
1926
1927 /* True if OpenMP should privatize what this DECL points to rather
1928 than the DECL itself. */
1929
1930 bool
1931 cxx_omp_privatize_by_reference (const_tree decl)
1932 {
1933 return (TYPE_REF_P (TREE_TYPE (decl))
1934 || is_invisiref_parm (decl));
1935 }
1936
1937 /* Return true if DECL is const qualified var having no mutable member. */
1938 bool
1939 cxx_omp_const_qual_no_mutable (tree decl)
1940 {
1941 tree type = TREE_TYPE (decl);
1942 if (TYPE_REF_P (type))
1943 {
1944 if (!is_invisiref_parm (decl))
1945 return false;
1946 type = TREE_TYPE (type);
1947
1948 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1949 {
1950 /* NVR doesn't preserve const qualification of the
1951 variable's type. */
1952 tree outer = outer_curly_brace_block (current_function_decl);
1953 tree var;
1954
1955 if (outer)
1956 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1957 if (VAR_P (var)
1958 && DECL_NAME (decl) == DECL_NAME (var)
1959 && (TYPE_MAIN_VARIANT (type)
1960 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1961 {
1962 if (TYPE_READONLY (TREE_TYPE (var)))
1963 type = TREE_TYPE (var);
1964 break;
1965 }
1966 }
1967 }
1968
1969 if (type == error_mark_node)
1970 return false;
1971
1972 /* Variables with const-qualified type having no mutable member
1973 are predetermined shared. */
1974 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1975 return true;
1976
1977 return false;
1978 }
1979
1980 /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
1981 of DECL is predetermined. */
1982
1983 enum omp_clause_default_kind
1984 cxx_omp_predetermined_sharing_1 (tree decl)
1985 {
1986 /* Static data members are predetermined shared. */
1987 if (TREE_STATIC (decl))
1988 {
1989 tree ctx = CP_DECL_CONTEXT (decl);
1990 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1991 return OMP_CLAUSE_DEFAULT_SHARED;
1992
1993 if (c_omp_predefined_variable (decl))
1994 return OMP_CLAUSE_DEFAULT_SHARED;
1995 }
1996
1997 /* this may not be specified in data-sharing clauses, still we need
1998 to predetermined it firstprivate. */
1999 if (decl == current_class_ptr)
2000 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
2001
2002 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2003 }
2004
2005 /* Likewise, but also include the artificial vars. We don't want to
2006 disallow the artificial vars being mentioned in explicit clauses,
2007 as we use artificial vars e.g. for loop constructs with random
2008 access iterators other than pointers, but during gimplification
2009 we want to treat them as predetermined. */
2010
2011 enum omp_clause_default_kind
2012 cxx_omp_predetermined_sharing (tree decl)
2013 {
2014 enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2015 if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2016 return ret;
2017
2018 /* Predetermine artificial variables holding integral values, those
2019 are usually result of gimplify_one_sizepos or SAVE_EXPR
2020 gimplification. */
2021 if (VAR_P (decl)
2022 && DECL_ARTIFICIAL (decl)
2023 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2024 && !(DECL_LANG_SPECIFIC (decl)
2025 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2026 return OMP_CLAUSE_DEFAULT_SHARED;
2027
2028 /* Similarly for typeinfo symbols. */
2029 if (VAR_P (decl) && DECL_ARTIFICIAL (decl) && DECL_TINFO_P (decl))
2030 return OMP_CLAUSE_DEFAULT_SHARED;
2031
2032 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2033 }
2034
2035 enum omp_clause_defaultmap_kind
2036 cxx_omp_predetermined_mapping (tree decl)
2037 {
2038 /* Predetermine artificial variables holding integral values, those
2039 are usually result of gimplify_one_sizepos or SAVE_EXPR
2040 gimplification. */
2041 if (VAR_P (decl)
2042 && DECL_ARTIFICIAL (decl)
2043 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2044 && !(DECL_LANG_SPECIFIC (decl)
2045 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2046 return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE;
2047
2048 if (c_omp_predefined_variable (decl))
2049 return OMP_CLAUSE_DEFAULTMAP_TO;
2050
2051 return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED;
2052 }
2053
2054 /* Finalize an implicitly determined clause. */
2055
2056 void
2057 cxx_omp_finish_clause (tree c, gimple_seq *, bool /* openacc */)
2058 {
2059 tree decl, inner_type;
2060 bool make_shared = false;
2061
2062 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE
2063 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LASTPRIVATE
2064 || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)))
2065 return;
2066
2067 decl = OMP_CLAUSE_DECL (c);
2068 decl = require_complete_type (decl);
2069 inner_type = TREE_TYPE (decl);
2070 if (decl == error_mark_node)
2071 make_shared = true;
2072 else if (TYPE_REF_P (TREE_TYPE (decl)))
2073 inner_type = TREE_TYPE (inner_type);
2074
2075 /* We're interested in the base element, not arrays. */
2076 while (TREE_CODE (inner_type) == ARRAY_TYPE)
2077 inner_type = TREE_TYPE (inner_type);
2078
2079 /* Check for special function availability by building a call to one.
2080 Save the results, because later we won't be in the right context
2081 for making these queries. */
2082 bool first = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE;
2083 if (!make_shared
2084 && CLASS_TYPE_P (inner_type)
2085 && cxx_omp_create_clause_info (c, inner_type, !first, first, !first,
2086 true))
2087 make_shared = true;
2088
2089 if (make_shared)
2090 {
2091 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2092 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2093 OMP_CLAUSE_SHARED_READONLY (c) = 0;
2094 }
2095 }
2096
2097 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2098 disregarded in OpenMP construct, because it is going to be
2099 remapped during OpenMP lowering. SHARED is true if DECL
2100 is going to be shared, false if it is going to be privatized. */
2101
2102 bool
2103 cxx_omp_disregard_value_expr (tree decl, bool shared)
2104 {
2105 if (shared)
2106 return false;
2107 if (VAR_P (decl)
2108 && DECL_HAS_VALUE_EXPR_P (decl)
2109 && DECL_ARTIFICIAL (decl)
2110 && DECL_LANG_SPECIFIC (decl)
2111 && DECL_OMP_PRIVATIZED_MEMBER (decl))
2112 return true;
2113 if (VAR_P (decl) && DECL_CONTEXT (decl) && is_capture_proxy (decl))
2114 return true;
2115 return false;
2116 }
2117
2118 /* Fold expression X which is used as an rvalue if RVAL is true. */
2119
2120 tree
2121 cp_fold_maybe_rvalue (tree x, bool rval)
2122 {
2123 while (true)
2124 {
2125 x = cp_fold (x);
2126 if (rval)
2127 x = mark_rvalue_use (x);
2128 if (rval && DECL_P (x)
2129 && !TYPE_REF_P (TREE_TYPE (x)))
2130 {
2131 tree v = decl_constant_value (x);
2132 if (v != x && v != error_mark_node)
2133 {
2134 x = v;
2135 continue;
2136 }
2137 }
2138 break;
2139 }
2140 return x;
2141 }
2142
2143 /* Fold expression X which is used as an rvalue. */
2144
2145 tree
2146 cp_fold_rvalue (tree x)
2147 {
2148 return cp_fold_maybe_rvalue (x, true);
2149 }
2150
2151 /* Perform folding on expression X. */
2152
2153 tree
2154 cp_fully_fold (tree x)
2155 {
2156 if (processing_template_decl)
2157 return x;
2158 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2159 have to call both. */
2160 if (cxx_dialect >= cxx11)
2161 {
2162 x = maybe_constant_value (x);
2163 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2164 a TARGET_EXPR; undo that here. */
2165 if (TREE_CODE (x) == TARGET_EXPR)
2166 x = TARGET_EXPR_INITIAL (x);
2167 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2168 && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2169 && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2170 x = TREE_OPERAND (x, 0);
2171 }
2172 return cp_fold_rvalue (x);
2173 }
2174
2175 /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2176 in some cases. */
2177
2178 tree
2179 cp_fully_fold_init (tree x)
2180 {
2181 if (processing_template_decl)
2182 return x;
2183 x = cp_fully_fold (x);
2184 hash_set<tree> pset;
2185 cp_walk_tree (&x, cp_fold_r, &pset, NULL);
2186 return x;
2187 }
2188
2189 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2190 and certain changes are made to the folding done. Or should be (FIXME). We
2191 never touch maybe_const, as it is only used for the C front-end
2192 C_MAYBE_CONST_EXPR. */
2193
2194 tree
2195 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2196 {
2197 return cp_fold_maybe_rvalue (x, !lval);
2198 }
2199
2200 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2201
2202 /* Dispose of the whole FOLD_CACHE. */
2203
2204 void
2205 clear_fold_cache (void)
2206 {
2207 if (fold_cache != NULL)
2208 fold_cache->empty ();
2209 }
2210
2211 /* This function tries to fold an expression X.
2212 To avoid combinatorial explosion, folding results are kept in fold_cache.
2213 If X is invalid, we don't fold at all.
2214 For performance reasons we don't cache expressions representing a
2215 declaration or constant.
2216 Function returns X or its folded variant. */
2217
2218 static tree
2219 cp_fold (tree x)
2220 {
2221 tree op0, op1, op2, op3;
2222 tree org_x = x, r = NULL_TREE;
2223 enum tree_code code;
2224 location_t loc;
2225 bool rval_ops = true;
2226
2227 if (!x || x == error_mark_node)
2228 return x;
2229
2230 if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
2231 return x;
2232
2233 /* Don't bother to cache DECLs or constants. */
2234 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2235 return x;
2236
2237 if (fold_cache == NULL)
2238 fold_cache = hash_map<tree, tree>::create_ggc (101);
2239
2240 if (tree *cached = fold_cache->get (x))
2241 return *cached;
2242
2243 uid_sensitive_constexpr_evaluation_checker c;
2244
2245 code = TREE_CODE (x);
2246 switch (code)
2247 {
2248 case CLEANUP_POINT_EXPR:
2249 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2250 effects. */
2251 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2252 if (!TREE_SIDE_EFFECTS (r))
2253 x = r;
2254 break;
2255
2256 case SIZEOF_EXPR:
2257 x = fold_sizeof_expr (x);
2258 break;
2259
2260 case VIEW_CONVERT_EXPR:
2261 rval_ops = false;
2262 /* FALLTHRU */
2263 case CONVERT_EXPR:
2264 case NOP_EXPR:
2265 case NON_LVALUE_EXPR:
2266
2267 if (VOID_TYPE_P (TREE_TYPE (x)))
2268 {
2269 /* This is just to make sure we don't end up with casts to
2270 void from error_mark_node. If we just return x, then
2271 cp_fold_r might fold the operand into error_mark_node and
2272 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2273 during gimplification doesn't like such casts.
2274 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2275 folding of the operand should be in the caches and if in cp_fold_r
2276 it will modify it in place. */
2277 op0 = cp_fold (TREE_OPERAND (x, 0));
2278 if (op0 == error_mark_node)
2279 x = error_mark_node;
2280 break;
2281 }
2282
2283 loc = EXPR_LOCATION (x);
2284 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2285
2286 if (code == CONVERT_EXPR
2287 && SCALAR_TYPE_P (TREE_TYPE (x))
2288 && op0 != void_node)
2289 /* During parsing we used convert_to_*_nofold; re-convert now using the
2290 folding variants, since fold() doesn't do those transformations. */
2291 x = fold (convert (TREE_TYPE (x), op0));
2292 else if (op0 != TREE_OPERAND (x, 0))
2293 {
2294 if (op0 == error_mark_node)
2295 x = error_mark_node;
2296 else
2297 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2298 }
2299 else
2300 x = fold (x);
2301
2302 /* Conversion of an out-of-range value has implementation-defined
2303 behavior; the language considers it different from arithmetic
2304 overflow, which is undefined. */
2305 if (TREE_CODE (op0) == INTEGER_CST
2306 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2307 TREE_OVERFLOW (x) = false;
2308
2309 break;
2310
2311 case INDIRECT_REF:
2312 /* We don't need the decltype(auto) obfuscation anymore. */
2313 if (REF_PARENTHESIZED_P (x))
2314 {
2315 tree p = maybe_undo_parenthesized_ref (x);
2316 return cp_fold (p);
2317 }
2318 goto unary;
2319
2320 case ADDR_EXPR:
2321 loc = EXPR_LOCATION (x);
2322 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2323
2324 /* Cope with user tricks that amount to offsetof. */
2325 if (op0 != error_mark_node
2326 && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0)))
2327 {
2328 tree val = get_base_address (op0);
2329 if (val
2330 && INDIRECT_REF_P (val)
2331 && COMPLETE_TYPE_P (TREE_TYPE (val))
2332 && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2333 {
2334 val = TREE_OPERAND (val, 0);
2335 STRIP_NOPS (val);
2336 val = maybe_constant_value (val);
2337 if (TREE_CODE (val) == INTEGER_CST)
2338 return fold_offsetof (op0, TREE_TYPE (x));
2339 }
2340 }
2341 goto finish_unary;
2342
2343 case REALPART_EXPR:
2344 case IMAGPART_EXPR:
2345 rval_ops = false;
2346 /* FALLTHRU */
2347 case CONJ_EXPR:
2348 case FIX_TRUNC_EXPR:
2349 case FLOAT_EXPR:
2350 case NEGATE_EXPR:
2351 case ABS_EXPR:
2352 case ABSU_EXPR:
2353 case BIT_NOT_EXPR:
2354 case TRUTH_NOT_EXPR:
2355 case FIXED_CONVERT_EXPR:
2356 unary:
2357
2358 loc = EXPR_LOCATION (x);
2359 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2360
2361 finish_unary:
2362 if (op0 != TREE_OPERAND (x, 0))
2363 {
2364 if (op0 == error_mark_node)
2365 x = error_mark_node;
2366 else
2367 {
2368 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2369 if (code == INDIRECT_REF
2370 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2371 {
2372 TREE_READONLY (x) = TREE_READONLY (org_x);
2373 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2374 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2375 }
2376 }
2377 }
2378 else
2379 x = fold (x);
2380
2381 gcc_assert (TREE_CODE (x) != COND_EXPR
2382 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2383 break;
2384
2385 case UNARY_PLUS_EXPR:
2386 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2387 if (op0 == error_mark_node)
2388 x = error_mark_node;
2389 else
2390 x = fold_convert (TREE_TYPE (x), op0);
2391 break;
2392
2393 case POSTDECREMENT_EXPR:
2394 case POSTINCREMENT_EXPR:
2395 case INIT_EXPR:
2396 case PREDECREMENT_EXPR:
2397 case PREINCREMENT_EXPR:
2398 case COMPOUND_EXPR:
2399 case MODIFY_EXPR:
2400 rval_ops = false;
2401 /* FALLTHRU */
2402 case POINTER_PLUS_EXPR:
2403 case PLUS_EXPR:
2404 case POINTER_DIFF_EXPR:
2405 case MINUS_EXPR:
2406 case MULT_EXPR:
2407 case TRUNC_DIV_EXPR:
2408 case CEIL_DIV_EXPR:
2409 case FLOOR_DIV_EXPR:
2410 case ROUND_DIV_EXPR:
2411 case TRUNC_MOD_EXPR:
2412 case CEIL_MOD_EXPR:
2413 case ROUND_MOD_EXPR:
2414 case RDIV_EXPR:
2415 case EXACT_DIV_EXPR:
2416 case MIN_EXPR:
2417 case MAX_EXPR:
2418 case LSHIFT_EXPR:
2419 case RSHIFT_EXPR:
2420 case LROTATE_EXPR:
2421 case RROTATE_EXPR:
2422 case BIT_AND_EXPR:
2423 case BIT_IOR_EXPR:
2424 case BIT_XOR_EXPR:
2425 case TRUTH_AND_EXPR:
2426 case TRUTH_ANDIF_EXPR:
2427 case TRUTH_OR_EXPR:
2428 case TRUTH_ORIF_EXPR:
2429 case TRUTH_XOR_EXPR:
2430 case LT_EXPR: case LE_EXPR:
2431 case GT_EXPR: case GE_EXPR:
2432 case EQ_EXPR: case NE_EXPR:
2433 case UNORDERED_EXPR: case ORDERED_EXPR:
2434 case UNLT_EXPR: case UNLE_EXPR:
2435 case UNGT_EXPR: case UNGE_EXPR:
2436 case UNEQ_EXPR: case LTGT_EXPR:
2437 case RANGE_EXPR: case COMPLEX_EXPR:
2438
2439 loc = EXPR_LOCATION (x);
2440 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2441 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2442
2443 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2444 {
2445 if (op0 == error_mark_node || op1 == error_mark_node)
2446 x = error_mark_node;
2447 else
2448 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2449 }
2450 else
2451 x = fold (x);
2452
2453 /* This is only needed for -Wnonnull-compare and only if
2454 TREE_NO_WARNING (org_x), but to avoid that option affecting code
2455 generation, we do it always. */
2456 if (COMPARISON_CLASS_P (org_x))
2457 {
2458 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2459 ;
2460 else if (COMPARISON_CLASS_P (x))
2461 {
2462 if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
2463 TREE_NO_WARNING (x) = 1;
2464 }
2465 /* Otherwise give up on optimizing these, let GIMPLE folders
2466 optimize those later on. */
2467 else if (op0 != TREE_OPERAND (org_x, 0)
2468 || op1 != TREE_OPERAND (org_x, 1))
2469 {
2470 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2471 if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
2472 TREE_NO_WARNING (x) = 1;
2473 }
2474 else
2475 x = org_x;
2476 }
2477
2478 break;
2479
2480 case VEC_COND_EXPR:
2481 case COND_EXPR:
2482 loc = EXPR_LOCATION (x);
2483 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2484 op1 = cp_fold (TREE_OPERAND (x, 1));
2485 op2 = cp_fold (TREE_OPERAND (x, 2));
2486
2487 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2488 {
2489 warning_sentinel s (warn_int_in_bool_context);
2490 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2491 op1 = cp_truthvalue_conversion (op1, tf_warning_or_error);
2492 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2493 op2 = cp_truthvalue_conversion (op2, tf_warning_or_error);
2494 }
2495 else if (VOID_TYPE_P (TREE_TYPE (x)))
2496 {
2497 if (TREE_CODE (op0) == INTEGER_CST)
2498 {
2499 /* If the condition is constant, fold can fold away
2500 the COND_EXPR. If some statement-level uses of COND_EXPR
2501 have one of the branches NULL, avoid folding crash. */
2502 if (!op1)
2503 op1 = build_empty_stmt (loc);
2504 if (!op2)
2505 op2 = build_empty_stmt (loc);
2506 }
2507 else
2508 {
2509 /* Otherwise, don't bother folding a void condition, since
2510 it can't produce a constant value. */
2511 if (op0 != TREE_OPERAND (x, 0)
2512 || op1 != TREE_OPERAND (x, 1)
2513 || op2 != TREE_OPERAND (x, 2))
2514 x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2515 break;
2516 }
2517 }
2518
2519 if (op0 != TREE_OPERAND (x, 0)
2520 || op1 != TREE_OPERAND (x, 1)
2521 || op2 != TREE_OPERAND (x, 2))
2522 {
2523 if (op0 == error_mark_node
2524 || op1 == error_mark_node
2525 || op2 == error_mark_node)
2526 x = error_mark_node;
2527 else
2528 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2529 }
2530 else
2531 x = fold (x);
2532
2533 /* A COND_EXPR might have incompatible types in branches if one or both
2534 arms are bitfields. If folding exposed such a branch, fix it up. */
2535 if (TREE_CODE (x) != code
2536 && x != error_mark_node
2537 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2538 x = fold_convert (TREE_TYPE (org_x), x);
2539
2540 break;
2541
2542 case CALL_EXPR:
2543 {
2544 int sv = optimize, nw = sv;
2545 tree callee = get_callee_fndecl (x);
2546
2547 /* Some built-in function calls will be evaluated at compile-time in
2548 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2549 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2550 if (callee && fndecl_built_in_p (callee) && !optimize
2551 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2552 && current_function_decl
2553 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2554 nw = 1;
2555
2556 /* Defer folding __builtin_is_constant_evaluated. */
2557 if (callee
2558 && fndecl_built_in_p (callee, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
2559 BUILT_IN_FRONTEND))
2560 break;
2561
2562 if (callee
2563 && fndecl_built_in_p (callee, CP_BUILT_IN_SOURCE_LOCATION,
2564 BUILT_IN_FRONTEND))
2565 {
2566 x = fold_builtin_source_location (EXPR_LOCATION (x));
2567 break;
2568 }
2569
2570 bool changed = false;
2571 int m = call_expr_nargs (x);
2572 for (int i = 0; i < m; i++)
2573 {
2574 r = cp_fold (CALL_EXPR_ARG (x, i));
2575 if (r != CALL_EXPR_ARG (x, i))
2576 {
2577 if (r == error_mark_node)
2578 {
2579 x = error_mark_node;
2580 break;
2581 }
2582 if (!changed)
2583 x = copy_node (x);
2584 CALL_EXPR_ARG (x, i) = r;
2585 changed = true;
2586 }
2587 }
2588 if (x == error_mark_node)
2589 break;
2590
2591 optimize = nw;
2592 r = fold (x);
2593 optimize = sv;
2594
2595 if (TREE_CODE (r) != CALL_EXPR)
2596 {
2597 x = cp_fold (r);
2598 break;
2599 }
2600
2601 optimize = nw;
2602
2603 /* Invoke maybe_constant_value for functions declared
2604 constexpr and not called with AGGR_INIT_EXPRs.
2605 TODO:
2606 Do constexpr expansion of expressions where the call itself is not
2607 constant, but the call followed by an INDIRECT_REF is. */
2608 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2609 && !flag_no_inline)
2610 r = maybe_constant_value (x);
2611 optimize = sv;
2612
2613 if (TREE_CODE (r) != CALL_EXPR)
2614 {
2615 if (DECL_CONSTRUCTOR_P (callee))
2616 {
2617 loc = EXPR_LOCATION (x);
2618 tree s = build_fold_indirect_ref_loc (loc,
2619 CALL_EXPR_ARG (x, 0));
2620 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2621 }
2622 x = r;
2623 break;
2624 }
2625
2626 break;
2627 }
2628
2629 case CONSTRUCTOR:
2630 {
2631 unsigned i;
2632 constructor_elt *p;
2633 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2634 vec<constructor_elt, va_gc> *nelts = NULL;
2635 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2636 {
2637 tree op = cp_fold (p->value);
2638 if (op != p->value)
2639 {
2640 if (op == error_mark_node)
2641 {
2642 x = error_mark_node;
2643 vec_free (nelts);
2644 break;
2645 }
2646 if (nelts == NULL)
2647 nelts = elts->copy ();
2648 (*nelts)[i].value = op;
2649 }
2650 }
2651 if (nelts)
2652 {
2653 x = build_constructor (TREE_TYPE (x), nelts);
2654 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
2655 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
2656 }
2657 if (VECTOR_TYPE_P (TREE_TYPE (x)))
2658 x = fold (x);
2659 break;
2660 }
2661 case TREE_VEC:
2662 {
2663 bool changed = false;
2664 int n = TREE_VEC_LENGTH (x);
2665
2666 for (int i = 0; i < n; i++)
2667 {
2668 tree op = cp_fold (TREE_VEC_ELT (x, i));
2669 if (op != TREE_VEC_ELT (x, i))
2670 {
2671 if (!changed)
2672 x = copy_node (x);
2673 TREE_VEC_ELT (x, i) = op;
2674 changed = true;
2675 }
2676 }
2677 }
2678
2679 break;
2680
2681 case ARRAY_REF:
2682 case ARRAY_RANGE_REF:
2683
2684 loc = EXPR_LOCATION (x);
2685 op0 = cp_fold (TREE_OPERAND (x, 0));
2686 op1 = cp_fold (TREE_OPERAND (x, 1));
2687 op2 = cp_fold (TREE_OPERAND (x, 2));
2688 op3 = cp_fold (TREE_OPERAND (x, 3));
2689
2690 if (op0 != TREE_OPERAND (x, 0)
2691 || op1 != TREE_OPERAND (x, 1)
2692 || op2 != TREE_OPERAND (x, 2)
2693 || op3 != TREE_OPERAND (x, 3))
2694 {
2695 if (op0 == error_mark_node
2696 || op1 == error_mark_node
2697 || op2 == error_mark_node
2698 || op3 == error_mark_node)
2699 x = error_mark_node;
2700 else
2701 {
2702 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2703 TREE_READONLY (x) = TREE_READONLY (org_x);
2704 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2705 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2706 }
2707 }
2708
2709 x = fold (x);
2710 break;
2711
2712 case SAVE_EXPR:
2713 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2714 folding, evaluates to an invariant. In that case no need to wrap
2715 this folded tree with a SAVE_EXPR. */
2716 r = cp_fold (TREE_OPERAND (x, 0));
2717 if (tree_invariant_p (r))
2718 x = r;
2719 break;
2720
2721 default:
2722 return org_x;
2723 }
2724
2725 if (EXPR_P (x) && TREE_CODE (x) == code)
2726 {
2727 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2728 TREE_NO_WARNING (x) = TREE_NO_WARNING (org_x);
2729 }
2730
2731 if (!c.evaluation_restricted_p ())
2732 {
2733 fold_cache->put (org_x, x);
2734 /* Prevent that we try to fold an already folded result again. */
2735 if (x != org_x)
2736 fold_cache->put (x, x);
2737 }
2738
2739 return x;
2740 }
2741
2742 /* Look up either "hot" or "cold" in attribute list LIST. */
2743
2744 tree
2745 lookup_hotness_attribute (tree list)
2746 {
2747 for (; list; list = TREE_CHAIN (list))
2748 {
2749 tree name = get_attribute_name (list);
2750 if (is_attribute_p ("hot", name)
2751 || is_attribute_p ("cold", name)
2752 || is_attribute_p ("likely", name)
2753 || is_attribute_p ("unlikely", name))
2754 break;
2755 }
2756 return list;
2757 }
2758
2759 /* Remove both "hot" and "cold" attributes from LIST. */
2760
2761 static tree
2762 remove_hotness_attribute (tree list)
2763 {
2764 list = remove_attribute ("hot", list);
2765 list = remove_attribute ("cold", list);
2766 list = remove_attribute ("likely", list);
2767 list = remove_attribute ("unlikely", list);
2768 return list;
2769 }
2770
2771 /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
2772 PREDICT_EXPR. */
2773
2774 tree
2775 process_stmt_hotness_attribute (tree std_attrs, location_t attrs_loc)
2776 {
2777 if (std_attrs == error_mark_node)
2778 return std_attrs;
2779 if (tree attr = lookup_hotness_attribute (std_attrs))
2780 {
2781 tree name = get_attribute_name (attr);
2782 bool hot = (is_attribute_p ("hot", name)
2783 || is_attribute_p ("likely", name));
2784 tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL,
2785 hot ? TAKEN : NOT_TAKEN);
2786 SET_EXPR_LOCATION (pred, attrs_loc);
2787 add_stmt (pred);
2788 if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr)))
2789 warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE",
2790 get_attribute_name (other), name);
2791 std_attrs = remove_hotness_attribute (std_attrs);
2792 }
2793 return std_attrs;
2794 }
2795
2796 /* Helper of fold_builtin_source_location, return the
2797 std::source_location::__impl type after performing verification
2798 on it. LOC is used for reporting any errors. */
2799
2800 static tree
2801 get_source_location_impl_type (location_t loc)
2802 {
2803 tree name = get_identifier ("source_location");
2804 tree decl = lookup_qualified_name (std_node, name);
2805 if (TREE_CODE (decl) != TYPE_DECL)
2806 {
2807 auto_diagnostic_group d;
2808 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
2809 qualified_name_lookup_error (std_node, name, decl, loc);
2810 else
2811 error_at (loc, "%qD is not a type", decl);
2812 return error_mark_node;
2813 }
2814 name = get_identifier ("__impl");
2815 tree type = TREE_TYPE (decl);
2816 decl = lookup_qualified_name (type, name);
2817 if (TREE_CODE (decl) != TYPE_DECL)
2818 {
2819 auto_diagnostic_group d;
2820 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
2821 qualified_name_lookup_error (type, name, decl, loc);
2822 else
2823 error_at (loc, "%qD is not a type", decl);
2824 return error_mark_node;
2825 }
2826 type = TREE_TYPE (decl);
2827 if (TREE_CODE (type) != RECORD_TYPE)
2828 {
2829 error_at (loc, "%qD is not a class type", decl);
2830 return error_mark_node;
2831 }
2832
2833 int cnt = 0;
2834 for (tree field = TYPE_FIELDS (type);
2835 (field = next_initializable_field (field)) != NULL_TREE;
2836 field = DECL_CHAIN (field))
2837 {
2838 if (DECL_NAME (field) != NULL_TREE)
2839 {
2840 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
2841 if (strcmp (n, "_M_file_name") == 0
2842 || strcmp (n, "_M_function_name") == 0)
2843 {
2844 if (TREE_TYPE (field) != const_string_type_node)
2845 {
2846 error_at (loc, "%qD does not have %<const char *%> type",
2847 field);
2848 return error_mark_node;
2849 }
2850 cnt++;
2851 continue;
2852 }
2853 else if (strcmp (n, "_M_line") == 0 || strcmp (n, "_M_column") == 0)
2854 {
2855 if (TREE_CODE (TREE_TYPE (field)) != INTEGER_TYPE)
2856 {
2857 error_at (loc, "%qD does not have integral type", field);
2858 return error_mark_node;
2859 }
2860 cnt++;
2861 continue;
2862 }
2863 }
2864 cnt = 0;
2865 break;
2866 }
2867 if (cnt != 4)
2868 {
2869 error_at (loc, "%<std::source_location::__impl%> does not contain only "
2870 "non-static data members %<_M_file_name%>, "
2871 "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
2872 return error_mark_node;
2873 }
2874 return build_qualified_type (type, TYPE_QUAL_CONST);
2875 }
2876
2877 /* Type for source_location_table hash_set. */
2878 struct GTY((for_user)) source_location_table_entry {
2879 location_t loc;
2880 unsigned uid;
2881 tree var;
2882 };
2883
2884 /* Traits class for function start hash maps below. */
2885
2886 struct source_location_table_entry_hash
2887 : ggc_remove <source_location_table_entry>
2888 {
2889 typedef source_location_table_entry value_type;
2890 typedef source_location_table_entry compare_type;
2891
2892 static hashval_t
2893 hash (const source_location_table_entry &ref)
2894 {
2895 inchash::hash hstate (0);
2896 hstate.add_int (ref.loc);
2897 hstate.add_int (ref.uid);
2898 return hstate.end ();
2899 }
2900
2901 static bool
2902 equal (const source_location_table_entry &ref1,
2903 const source_location_table_entry &ref2)
2904 {
2905 return ref1.loc == ref2.loc && ref1.uid == ref2.uid;
2906 }
2907
2908 static void
2909 mark_deleted (source_location_table_entry &ref)
2910 {
2911 ref.loc = UNKNOWN_LOCATION;
2912 ref.uid = -1U;
2913 ref.var = NULL_TREE;
2914 }
2915
2916 static const bool empty_zero_p = true;
2917
2918 static void
2919 mark_empty (source_location_table_entry &ref)
2920 {
2921 ref.loc = UNKNOWN_LOCATION;
2922 ref.uid = 0;
2923 ref.var = NULL_TREE;
2924 }
2925
2926 static bool
2927 is_deleted (const source_location_table_entry &ref)
2928 {
2929 return (ref.loc == UNKNOWN_LOCATION
2930 && ref.uid == -1U
2931 && ref.var == NULL_TREE);
2932 }
2933
2934 static bool
2935 is_empty (const source_location_table_entry &ref)
2936 {
2937 return (ref.loc == UNKNOWN_LOCATION
2938 && ref.uid == 0
2939 && ref.var == NULL_TREE);
2940 }
2941 };
2942
2943 static GTY(()) hash_table <source_location_table_entry_hash>
2944 *source_location_table;
2945 static GTY(()) unsigned int source_location_id;
2946
2947 /* Fold __builtin_source_location () call. LOC is the location
2948 of the call. */
2949
2950 tree
2951 fold_builtin_source_location (location_t loc)
2952 {
2953 if (source_location_impl == NULL_TREE)
2954 {
2955 auto_diagnostic_group d;
2956 source_location_impl = get_source_location_impl_type (loc);
2957 if (source_location_impl == error_mark_node)
2958 inform (loc, "evaluating %qs", "__builtin_source_location");
2959 }
2960 if (source_location_impl == error_mark_node)
2961 return build_zero_cst (const_ptr_type_node);
2962 if (source_location_table == NULL)
2963 source_location_table
2964 = hash_table <source_location_table_entry_hash>::create_ggc (64);
2965 const line_map_ordinary *map;
2966 source_location_table_entry entry;
2967 entry.loc
2968 = linemap_resolve_location (line_table, loc, LRK_MACRO_EXPANSION_POINT,
2969 &map);
2970 entry.uid = current_function_decl ? DECL_UID (current_function_decl) : -1;
2971 entry.var = error_mark_node;
2972 source_location_table_entry *entryp
2973 = source_location_table->find_slot (entry, INSERT);
2974 tree var;
2975 if (entryp->var)
2976 var = entryp->var;
2977 else
2978 {
2979 char tmp_name[32];
2980 ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lsrc_loc", source_location_id++);
2981 var = build_decl (loc, VAR_DECL, get_identifier (tmp_name),
2982 source_location_impl);
2983 TREE_STATIC (var) = 1;
2984 TREE_PUBLIC (var) = 0;
2985 DECL_ARTIFICIAL (var) = 1;
2986 DECL_IGNORED_P (var) = 1;
2987 DECL_EXTERNAL (var) = 0;
2988 DECL_DECLARED_CONSTEXPR_P (var) = 1;
2989 DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var) = 1;
2990 layout_decl (var, 0);
2991
2992 vec<constructor_elt, va_gc> *v = NULL;
2993 vec_alloc (v, 4);
2994 for (tree field = TYPE_FIELDS (source_location_impl);
2995 (field = next_initializable_field (field)) != NULL_TREE;
2996 field = DECL_CHAIN (field))
2997 {
2998 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
2999 tree val = NULL_TREE;
3000 if (strcmp (n, "_M_file_name") == 0)
3001 {
3002 if (const char *fname = LOCATION_FILE (loc))
3003 {
3004 fname = remap_macro_filename (fname);
3005 val = build_string_literal (strlen (fname) + 1, fname);
3006 }
3007 else
3008 val = build_string_literal (1, "");
3009 }
3010 else if (strcmp (n, "_M_function_name") == 0)
3011 {
3012 const char *name = "";
3013
3014 if (current_function_decl)
3015 name = cxx_printable_name (current_function_decl, 0);
3016
3017 val = build_string_literal (strlen (name) + 1, name);
3018 }
3019 else if (strcmp (n, "_M_line") == 0)
3020 val = build_int_cst (TREE_TYPE (field), LOCATION_LINE (loc));
3021 else if (strcmp (n, "_M_column") == 0)
3022 val = build_int_cst (TREE_TYPE (field), LOCATION_COLUMN (loc));
3023 else
3024 gcc_unreachable ();
3025 CONSTRUCTOR_APPEND_ELT (v, field, val);
3026 }
3027
3028 tree ctor = build_constructor (source_location_impl, v);
3029 TREE_CONSTANT (ctor) = 1;
3030 TREE_STATIC (ctor) = 1;
3031 DECL_INITIAL (var) = ctor;
3032 varpool_node::finalize_decl (var);
3033 *entryp = entry;
3034 entryp->var = var;
3035 }
3036
3037 return build_fold_addr_expr_with_type_loc (loc, var, const_ptr_type_node);
3038 }
3039
3040 #include "gt-cp-cp-gimplify.h"
This page took 0.178415 seconds and 5 git commands to generate.