]> gcc.gnu.org Git - gcc.git/blob - gcc/cp/cp-gimplify.cc
Install correct patch version.
[gcc.git] / gcc / cp / cp-gimplify.cc
1 /* C++-specific tree lowering bits; see also c-gimplify.cc and gimple.cc.
2
3 Copyright (C) 2002-2022 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "asan.h"
37 #include "gcc-rich-location.h"
38 #include "memmodel.h"
39 #include "tm_p.h"
40 #include "output.h"
41 #include "file-prefix-map.h"
42 #include "cgraph.h"
43 #include "omp-general.h"
44 #include "opts.h"
45
46 /* Forward declarations. */
47
48 static tree cp_genericize_r (tree *, int *, void *);
49 static tree cp_fold_r (tree *, int *, void *);
50 static void cp_genericize_tree (tree*, bool);
51 static tree cp_fold (tree);
52
53 /* Genericize a TRY_BLOCK. */
54
55 static void
56 genericize_try_block (tree *stmt_p)
57 {
58 tree body = TRY_STMTS (*stmt_p);
59 tree cleanup = TRY_HANDLERS (*stmt_p);
60
61 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
62 }
63
64 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
65
66 static void
67 genericize_catch_block (tree *stmt_p)
68 {
69 tree type = HANDLER_TYPE (*stmt_p);
70 tree body = HANDLER_BODY (*stmt_p);
71
72 /* FIXME should the caught type go in TREE_TYPE? */
73 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
74 }
75
76 /* A terser interface for building a representation of an exception
77 specification. */
78
79 static tree
80 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
81 {
82 tree t;
83
84 /* FIXME should the allowed types go in TREE_TYPE? */
85 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
86 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
87
88 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
89 append_to_statement_list (body, &TREE_OPERAND (t, 0));
90
91 return t;
92 }
93
94 /* Genericize an EH_SPEC_BLOCK by converting it to a
95 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
96
97 static void
98 genericize_eh_spec_block (tree *stmt_p)
99 {
100 tree body = EH_SPEC_STMTS (*stmt_p);
101 tree allowed = EH_SPEC_RAISES (*stmt_p);
102 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
103
104 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
105 suppress_warning (*stmt_p);
106 suppress_warning (TREE_OPERAND (*stmt_p, 1));
107 }
108
109 /* Return the first non-compound statement in STMT. */
110
111 tree
112 first_stmt (tree stmt)
113 {
114 switch (TREE_CODE (stmt))
115 {
116 case STATEMENT_LIST:
117 if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt))
118 return first_stmt (p->stmt);
119 return void_node;
120
121 case BIND_EXPR:
122 return first_stmt (BIND_EXPR_BODY (stmt));
123
124 default:
125 return stmt;
126 }
127 }
128
129 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
130
131 static void
132 genericize_if_stmt (tree *stmt_p)
133 {
134 tree stmt, cond, then_, else_;
135 location_t locus = EXPR_LOCATION (*stmt_p);
136
137 stmt = *stmt_p;
138 cond = IF_COND (stmt);
139 then_ = THEN_CLAUSE (stmt);
140 else_ = ELSE_CLAUSE (stmt);
141
142 if (then_ && else_)
143 {
144 tree ft = first_stmt (then_);
145 tree fe = first_stmt (else_);
146 br_predictor pr;
147 if (TREE_CODE (ft) == PREDICT_EXPR
148 && TREE_CODE (fe) == PREDICT_EXPR
149 && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe)
150 && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL))
151 {
152 gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus));
153 richloc.add_range (EXPR_LOC_OR_LOC (fe, locus));
154 warning_at (&richloc, OPT_Wattributes,
155 "both branches of %<if%> statement marked as %qs",
156 pr == PRED_HOT_LABEL ? "likely" : "unlikely");
157 }
158 }
159
160 if (!then_)
161 then_ = build_empty_stmt (locus);
162 if (!else_)
163 else_ = build_empty_stmt (locus);
164
165 /* consteval if has been verified not to have the then_/else_ blocks
166 entered by gotos/case labels from elsewhere, and as then_ block
167 can contain unfolded immediate function calls, we have to discard
168 the then_ block regardless of whether else_ has side-effects or not. */
169 if (IF_STMT_CONSTEVAL_P (stmt))
170 {
171 if (block_may_fallthru (then_))
172 stmt = build3 (COND_EXPR, void_type_node, boolean_false_node,
173 void_node, else_);
174 else
175 stmt = else_;
176 }
177 else if (IF_STMT_CONSTEXPR_P (stmt))
178 stmt = integer_nonzerop (cond) ? then_ : else_;
179 else
180 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
181 protected_set_expr_location_if_unset (stmt, locus);
182 *stmt_p = stmt;
183 }
184
185 /* Hook into the middle of gimplifying an OMP_FOR node. */
186
187 static enum gimplify_status
188 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
189 {
190 tree for_stmt = *expr_p;
191 gimple_seq seq = NULL;
192
193 /* Protect ourselves from recursion. */
194 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
195 return GS_UNHANDLED;
196 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
197
198 gimplify_and_add (for_stmt, &seq);
199 gimple_seq_add_seq (pre_p, seq);
200
201 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
202
203 return GS_ALL_DONE;
204 }
205
206 /* Gimplify an EXPR_STMT node. */
207
208 static void
209 gimplify_expr_stmt (tree *stmt_p)
210 {
211 tree stmt = EXPR_STMT_EXPR (*stmt_p);
212
213 if (stmt == error_mark_node)
214 stmt = NULL;
215
216 /* Gimplification of a statement expression will nullify the
217 statement if all its side effects are moved to *PRE_P and *POST_P.
218
219 In this case we will not want to emit the gimplified statement.
220 However, we may still want to emit a warning, so we do that before
221 gimplification. */
222 if (stmt && warn_unused_value)
223 {
224 if (!TREE_SIDE_EFFECTS (stmt))
225 {
226 if (!IS_EMPTY_STMT (stmt)
227 && !VOID_TYPE_P (TREE_TYPE (stmt))
228 && !warning_suppressed_p (stmt, OPT_Wunused_value))
229 warning (OPT_Wunused_value, "statement with no effect");
230 }
231 else
232 warn_if_unused_value (stmt, input_location);
233 }
234
235 if (stmt == NULL_TREE)
236 stmt = alloc_stmt_list ();
237
238 *stmt_p = stmt;
239 }
240
241 /* Gimplify initialization from an AGGR_INIT_EXPR. */
242
243 static void
244 cp_gimplify_init_expr (tree *expr_p)
245 {
246 tree from = TREE_OPERAND (*expr_p, 1);
247 tree to = TREE_OPERAND (*expr_p, 0);
248 tree t;
249
250 if (TREE_CODE (from) == TARGET_EXPR)
251 if (tree init = TARGET_EXPR_INITIAL (from))
252 {
253 if (target_expr_needs_replace (from))
254 {
255 /* If this was changed by cp_genericize_target_expr, we need to
256 walk into it to replace uses of the slot. */
257 replace_decl (&init, TARGET_EXPR_SLOT (from), to);
258 *expr_p = init;
259 return;
260 }
261 else
262 from = init;
263 }
264
265 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
266 inside the TARGET_EXPR. */
267 for (t = from; t; )
268 {
269 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
270
271 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
272 replace the slot operand with our target.
273
274 Should we add a target parm to gimplify_expr instead? No, as in this
275 case we want to replace the INIT_EXPR. */
276 if (TREE_CODE (sub) == AGGR_INIT_EXPR
277 || TREE_CODE (sub) == VEC_INIT_EXPR)
278 {
279 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
280 AGGR_INIT_EXPR_SLOT (sub) = to;
281 else
282 VEC_INIT_EXPR_SLOT (sub) = to;
283 *expr_p = from;
284
285 /* The initialization is now a side-effect, so the container can
286 become void. */
287 if (from != sub)
288 TREE_TYPE (from) = void_type_node;
289 }
290
291 /* Handle aggregate NSDMI. */
292 replace_placeholders (sub, to);
293
294 if (t == sub)
295 break;
296 else
297 t = TREE_OPERAND (t, 1);
298 }
299
300 }
301
302 /* Gimplify a MUST_NOT_THROW_EXPR. */
303
304 static enum gimplify_status
305 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
306 {
307 tree stmt = *expr_p;
308 tree temp = voidify_wrapper_expr (stmt, NULL);
309 tree body = TREE_OPERAND (stmt, 0);
310 gimple_seq try_ = NULL;
311 gimple_seq catch_ = NULL;
312 gimple *mnt;
313
314 gimplify_and_add (body, &try_);
315 mnt = gimple_build_eh_must_not_throw (terminate_fn);
316 gimple_seq_add_stmt_without_update (&catch_, mnt);
317 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
318
319 gimple_seq_add_stmt_without_update (pre_p, mnt);
320 if (temp)
321 {
322 *expr_p = temp;
323 return GS_OK;
324 }
325
326 *expr_p = NULL;
327 return GS_ALL_DONE;
328 }
329
330 /* Return TRUE if an operand (OP) of a given TYPE being copied is
331 really just an empty class copy.
332
333 Check that the operand has a simple form so that TARGET_EXPRs and
334 non-empty CONSTRUCTORs get reduced properly, and we leave the
335 return slot optimization alone because it isn't a copy. */
336
337 bool
338 simple_empty_class_p (tree type, tree op, tree_code code)
339 {
340 if (TREE_CODE (op) == COMPOUND_EXPR)
341 return simple_empty_class_p (type, TREE_OPERAND (op, 1), code);
342 if (SIMPLE_TARGET_EXPR_P (op)
343 && TYPE_HAS_TRIVIAL_DESTRUCTOR (type))
344 /* The TARGET_EXPR is itself a simple copy, look through it. */
345 return simple_empty_class_p (type, TARGET_EXPR_INITIAL (op), code);
346
347 if (TREE_CODE (op) == PARM_DECL
348 && TREE_ADDRESSABLE (TREE_TYPE (op)))
349 {
350 tree fn = DECL_CONTEXT (op);
351 if (DECL_THUNK_P (fn)
352 || lambda_static_thunk_p (fn))
353 /* In a thunk, we pass through invisible reference parms, so this isn't
354 actually a copy. */
355 return false;
356 }
357
358 return
359 (TREE_CODE (op) == EMPTY_CLASS_EXPR
360 || code == MODIFY_EXPR
361 || is_gimple_lvalue (op)
362 || INDIRECT_REF_P (op)
363 || (TREE_CODE (op) == CONSTRUCTOR
364 && CONSTRUCTOR_NELTS (op) == 0)
365 || (TREE_CODE (op) == CALL_EXPR
366 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
367 && !TREE_CLOBBER_P (op)
368 && is_really_empty_class (type, /*ignore_vptr*/true);
369 }
370
371 /* Returns true if evaluating E as an lvalue has side-effects;
372 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
373 have side-effects until there is a read or write through it. */
374
375 static bool
376 lvalue_has_side_effects (tree e)
377 {
378 if (!TREE_SIDE_EFFECTS (e))
379 return false;
380 while (handled_component_p (e))
381 {
382 if (TREE_CODE (e) == ARRAY_REF
383 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
384 return true;
385 e = TREE_OPERAND (e, 0);
386 }
387 if (DECL_P (e))
388 /* Just naming a variable has no side-effects. */
389 return false;
390 else if (INDIRECT_REF_P (e))
391 /* Similarly, indirection has no side-effects. */
392 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
393 else
394 /* For anything else, trust TREE_SIDE_EFFECTS. */
395 return TREE_SIDE_EFFECTS (e);
396 }
397
398 /* Gimplify *EXPR_P as rvalue into an expression that can't be modified
399 by expressions with side-effects in other operands. */
400
401 static enum gimplify_status
402 gimplify_to_rvalue (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
403 bool (*gimple_test_f) (tree))
404 {
405 enum gimplify_status t
406 = gimplify_expr (expr_p, pre_p, post_p, gimple_test_f, fb_rvalue);
407 if (t == GS_ERROR)
408 return GS_ERROR;
409 else if (is_gimple_variable (*expr_p) && TREE_CODE (*expr_p) != SSA_NAME)
410 *expr_p = get_initialized_tmp_var (*expr_p, pre_p);
411 return t;
412 }
413
414 /* Like gimplify_arg, but if ORDERED is set (which should be set if
415 any of the arguments this argument is sequenced before has
416 TREE_SIDE_EFFECTS set, make sure expressions with is_gimple_reg_type type
417 are gimplified into SSA_NAME or a fresh temporary and for
418 non-is_gimple_reg_type we don't optimize away TARGET_EXPRs. */
419
420 static enum gimplify_status
421 cp_gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
422 bool ordered)
423 {
424 enum gimplify_status t;
425 if (ordered
426 && !is_gimple_reg_type (TREE_TYPE (*arg_p))
427 && TREE_CODE (*arg_p) == TARGET_EXPR)
428 {
429 /* gimplify_arg would strip away the TARGET_EXPR, but
430 that can mean we don't copy the argument and some following
431 argument with side-effect could modify it. */
432 protected_set_expr_location (*arg_p, call_location);
433 return gimplify_expr (arg_p, pre_p, NULL, is_gimple_lvalue, fb_either);
434 }
435 else
436 {
437 t = gimplify_arg (arg_p, pre_p, call_location);
438 if (t == GS_ERROR)
439 return GS_ERROR;
440 else if (ordered
441 && is_gimple_reg_type (TREE_TYPE (*arg_p))
442 && is_gimple_variable (*arg_p)
443 && TREE_CODE (*arg_p) != SSA_NAME
444 /* No need to force references into register, references
445 can't be modified. */
446 && !TYPE_REF_P (TREE_TYPE (*arg_p))
447 /* And this can't be modified either. */
448 && *arg_p != current_class_ptr)
449 *arg_p = get_initialized_tmp_var (*arg_p, pre_p);
450 return t;
451 }
452
453 }
454
455 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
456
457 int
458 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
459 {
460 int saved_stmts_are_full_exprs_p = 0;
461 location_t loc = cp_expr_loc_or_input_loc (*expr_p);
462 enum tree_code code = TREE_CODE (*expr_p);
463 enum gimplify_status ret;
464
465 if (STATEMENT_CODE_P (code))
466 {
467 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
468 current_stmt_tree ()->stmts_are_full_exprs_p
469 = STMT_IS_FULL_EXPR_P (*expr_p);
470 }
471
472 switch (code)
473 {
474 case AGGR_INIT_EXPR:
475 simplify_aggr_init_expr (expr_p);
476 ret = GS_OK;
477 break;
478
479 case VEC_INIT_EXPR:
480 {
481 *expr_p = expand_vec_init_expr (NULL_TREE, *expr_p,
482 tf_warning_or_error);
483
484 hash_set<tree> pset;
485 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
486 cp_genericize_tree (expr_p, false);
487 copy_if_shared (expr_p);
488 ret = GS_OK;
489 }
490 break;
491
492 case THROW_EXPR:
493 /* FIXME communicate throw type to back end, probably by moving
494 THROW_EXPR into ../tree.def. */
495 *expr_p = TREE_OPERAND (*expr_p, 0);
496 ret = GS_OK;
497 break;
498
499 case MUST_NOT_THROW_EXPR:
500 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
501 break;
502
503 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
504 LHS of an assignment might also be involved in the RHS, as in bug
505 25979. */
506 case INIT_EXPR:
507 cp_gimplify_init_expr (expr_p);
508 if (TREE_CODE (*expr_p) != INIT_EXPR)
509 return GS_OK;
510 /* Fall through. */
511 case MODIFY_EXPR:
512 modify_expr_case:
513 {
514 /* If the back end isn't clever enough to know that the lhs and rhs
515 types are the same, add an explicit conversion. */
516 tree op0 = TREE_OPERAND (*expr_p, 0);
517 tree op1 = TREE_OPERAND (*expr_p, 1);
518
519 if (!error_operand_p (op0)
520 && !error_operand_p (op1)
521 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
522 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
523 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
524 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
525 TREE_TYPE (op0), op1);
526
527 else if (simple_empty_class_p (TREE_TYPE (op0), op1, code))
528 {
529 while (TREE_CODE (op1) == TARGET_EXPR)
530 /* We're disconnecting the initializer from its target,
531 don't create a temporary. */
532 op1 = TARGET_EXPR_INITIAL (op1);
533
534 /* Remove any copies of empty classes. Also drop volatile
535 variables on the RHS to avoid infinite recursion from
536 gimplify_expr trying to load the value. */
537 if (TREE_SIDE_EFFECTS (op1))
538 {
539 if (TREE_THIS_VOLATILE (op1)
540 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
541 op1 = build_fold_addr_expr (op1);
542
543 gimplify_and_add (op1, pre_p);
544 }
545 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
546 is_gimple_lvalue, fb_lvalue);
547 *expr_p = TREE_OPERAND (*expr_p, 0);
548 if (code == RETURN_EXPR && REFERENCE_CLASS_P (*expr_p))
549 /* Avoid 'return *<retval>;' */
550 *expr_p = TREE_OPERAND (*expr_p, 0);
551 }
552 /* P0145 says that the RHS is sequenced before the LHS.
553 gimplify_modify_expr gimplifies the RHS before the LHS, but that
554 isn't quite strong enough in two cases:
555
556 1) gimplify.cc wants to leave a CALL_EXPR on the RHS, which would
557 mean it's evaluated after the LHS.
558
559 2) the value calculation of the RHS is also sequenced before the
560 LHS, so for scalar assignment we need to preevaluate if the
561 RHS could be affected by LHS side-effects even if it has no
562 side-effects of its own. We don't need this for classes because
563 class assignment takes its RHS by reference. */
564 else if (flag_strong_eval_order > 1
565 && TREE_CODE (*expr_p) == MODIFY_EXPR
566 && lvalue_has_side_effects (op0)
567 && (TREE_CODE (op1) == CALL_EXPR
568 || (SCALAR_TYPE_P (TREE_TYPE (op1))
569 && !TREE_CONSTANT (op1))))
570 TREE_OPERAND (*expr_p, 1) = get_initialized_tmp_var (op1, pre_p);
571 }
572 ret = GS_OK;
573 break;
574
575 case EMPTY_CLASS_EXPR:
576 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
577 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
578 ret = GS_OK;
579 break;
580
581 case BASELINK:
582 *expr_p = BASELINK_FUNCTIONS (*expr_p);
583 ret = GS_OK;
584 break;
585
586 case TRY_BLOCK:
587 genericize_try_block (expr_p);
588 ret = GS_OK;
589 break;
590
591 case HANDLER:
592 genericize_catch_block (expr_p);
593 ret = GS_OK;
594 break;
595
596 case EH_SPEC_BLOCK:
597 genericize_eh_spec_block (expr_p);
598 ret = GS_OK;
599 break;
600
601 case USING_STMT:
602 gcc_unreachable ();
603
604 case FOR_STMT:
605 case WHILE_STMT:
606 case DO_STMT:
607 case SWITCH_STMT:
608 case CONTINUE_STMT:
609 case BREAK_STMT:
610 gcc_unreachable ();
611
612 case OMP_FOR:
613 case OMP_SIMD:
614 case OMP_DISTRIBUTE:
615 case OMP_LOOP:
616 case OMP_TASKLOOP:
617 ret = cp_gimplify_omp_for (expr_p, pre_p);
618 break;
619
620 case EXPR_STMT:
621 gimplify_expr_stmt (expr_p);
622 ret = GS_OK;
623 break;
624
625 case UNARY_PLUS_EXPR:
626 {
627 tree arg = TREE_OPERAND (*expr_p, 0);
628 tree type = TREE_TYPE (*expr_p);
629 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
630 : arg;
631 ret = GS_OK;
632 }
633 break;
634
635 case CALL_EXPR:
636 ret = GS_OK;
637 if (flag_strong_eval_order == 2
638 && CALL_EXPR_FN (*expr_p)
639 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p)
640 && cp_get_callee_fndecl_nofold (*expr_p) == NULL_TREE)
641 {
642 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
643 enum gimplify_status t
644 = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
645 is_gimple_call_addr);
646 if (t == GS_ERROR)
647 ret = GS_ERROR;
648 /* GIMPLE considers most pointer conversion useless, but for
649 calls we actually care about the exact function pointer type. */
650 else if (TREE_TYPE (CALL_EXPR_FN (*expr_p)) != fnptrtype)
651 CALL_EXPR_FN (*expr_p)
652 = build1 (NOP_EXPR, fnptrtype, CALL_EXPR_FN (*expr_p));
653 }
654 if (!CALL_EXPR_FN (*expr_p))
655 /* Internal function call. */;
656 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
657 {
658 /* This is a call to a (compound) assignment operator that used
659 the operator syntax; gimplify the RHS first. */
660 gcc_assert (call_expr_nargs (*expr_p) == 2);
661 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
662 enum gimplify_status t
663 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc,
664 TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, 0)));
665 if (t == GS_ERROR)
666 ret = GS_ERROR;
667 }
668 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
669 {
670 /* Leave the last argument for gimplify_call_expr, to avoid problems
671 with __builtin_va_arg_pack(). */
672 int nargs = call_expr_nargs (*expr_p) - 1;
673 int last_side_effects_arg = -1;
674 for (int i = nargs; i > 0; --i)
675 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
676 {
677 last_side_effects_arg = i;
678 break;
679 }
680 for (int i = 0; i < nargs; ++i)
681 {
682 enum gimplify_status t
683 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc,
684 i < last_side_effects_arg);
685 if (t == GS_ERROR)
686 ret = GS_ERROR;
687 }
688 }
689 else if (flag_strong_eval_order
690 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
691 {
692 /* If flag_strong_eval_order, evaluate the object argument first. */
693 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
694 if (INDIRECT_TYPE_P (fntype))
695 fntype = TREE_TYPE (fntype);
696 if (TREE_CODE (fntype) == METHOD_TYPE)
697 {
698 int nargs = call_expr_nargs (*expr_p);
699 bool side_effects = false;
700 for (int i = 1; i < nargs; ++i)
701 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
702 {
703 side_effects = true;
704 break;
705 }
706 enum gimplify_status t
707 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc,
708 side_effects);
709 if (t == GS_ERROR)
710 ret = GS_ERROR;
711 }
712 }
713 if (ret != GS_ERROR)
714 {
715 tree decl = cp_get_callee_fndecl_nofold (*expr_p);
716 if (decl && fndecl_built_in_p (decl, BUILT_IN_FRONTEND))
717 switch (DECL_FE_FUNCTION_CODE (decl))
718 {
719 case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
720 *expr_p = boolean_false_node;
721 break;
722 case CP_BUILT_IN_SOURCE_LOCATION:
723 *expr_p
724 = fold_builtin_source_location (EXPR_LOCATION (*expr_p));
725 break;
726 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
727 *expr_p
728 = fold_builtin_is_corresponding_member
729 (EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
730 &CALL_EXPR_ARG (*expr_p, 0));
731 break;
732 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
733 *expr_p
734 = fold_builtin_is_pointer_inverconvertible_with_class
735 (EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
736 &CALL_EXPR_ARG (*expr_p, 0));
737 break;
738 default:
739 break;
740 }
741 }
742 break;
743
744 case TARGET_EXPR:
745 /* A TARGET_EXPR that expresses direct-initialization should have been
746 elided by cp_gimplify_init_expr. */
747 gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p));
748 ret = GS_UNHANDLED;
749 break;
750
751 case PTRMEM_CST:
752 *expr_p = cplus_expand_constant (*expr_p);
753 if (TREE_CODE (*expr_p) == PTRMEM_CST)
754 ret = GS_ERROR;
755 else
756 ret = GS_OK;
757 break;
758
759 case RETURN_EXPR:
760 if (TREE_OPERAND (*expr_p, 0)
761 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
762 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
763 {
764 expr_p = &TREE_OPERAND (*expr_p, 0);
765 /* Avoid going through the INIT_EXPR case, which can
766 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
767 goto modify_expr_case;
768 }
769 /* Fall through. */
770
771 default:
772 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
773 break;
774 }
775
776 /* Restore saved state. */
777 if (STATEMENT_CODE_P (code))
778 current_stmt_tree ()->stmts_are_full_exprs_p
779 = saved_stmts_are_full_exprs_p;
780
781 return ret;
782 }
783
784 static inline bool
785 is_invisiref_parm (const_tree t)
786 {
787 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
788 && DECL_BY_REFERENCE (t));
789 }
790
791 /* A stable comparison routine for use with splay trees and DECLs. */
792
793 static int
794 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
795 {
796 tree a = (tree) xa;
797 tree b = (tree) xb;
798
799 return DECL_UID (a) - DECL_UID (b);
800 }
801
802 /* OpenMP context during genericization. */
803
804 struct cp_genericize_omp_taskreg
805 {
806 bool is_parallel;
807 bool default_shared;
808 struct cp_genericize_omp_taskreg *outer;
809 splay_tree variables;
810 };
811
812 /* Return true if genericization should try to determine if
813 DECL is firstprivate or shared within task regions. */
814
815 static bool
816 omp_var_to_track (tree decl)
817 {
818 tree type = TREE_TYPE (decl);
819 if (is_invisiref_parm (decl))
820 type = TREE_TYPE (type);
821 else if (TYPE_REF_P (type))
822 type = TREE_TYPE (type);
823 while (TREE_CODE (type) == ARRAY_TYPE)
824 type = TREE_TYPE (type);
825 if (type == error_mark_node || !CLASS_TYPE_P (type))
826 return false;
827 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
828 return false;
829 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
830 return false;
831 return true;
832 }
833
834 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
835
836 static void
837 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
838 {
839 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
840 (splay_tree_key) decl);
841 if (n == NULL)
842 {
843 int flags = OMP_CLAUSE_DEFAULT_SHARED;
844 if (omp_ctx->outer)
845 omp_cxx_notice_variable (omp_ctx->outer, decl);
846 if (!omp_ctx->default_shared)
847 {
848 struct cp_genericize_omp_taskreg *octx;
849
850 for (octx = omp_ctx->outer; octx; octx = octx->outer)
851 {
852 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
853 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
854 {
855 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
856 break;
857 }
858 if (octx->is_parallel)
859 break;
860 }
861 if (octx == NULL
862 && (TREE_CODE (decl) == PARM_DECL
863 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
864 && DECL_CONTEXT (decl) == current_function_decl)))
865 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
866 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
867 {
868 /* DECL is implicitly determined firstprivate in
869 the current task construct. Ensure copy ctor and
870 dtor are instantiated, because during gimplification
871 it will be already too late. */
872 tree type = TREE_TYPE (decl);
873 if (is_invisiref_parm (decl))
874 type = TREE_TYPE (type);
875 else if (TYPE_REF_P (type))
876 type = TREE_TYPE (type);
877 while (TREE_CODE (type) == ARRAY_TYPE)
878 type = TREE_TYPE (type);
879 get_copy_ctor (type, tf_none);
880 get_dtor (type, tf_none);
881 }
882 }
883 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
884 }
885 }
886
887 /* If we might need to clean up a partially constructed object, break down the
888 CONSTRUCTOR with split_nonconstant_init. Also expand VEC_INIT_EXPR at this
889 point. If initializing TO with FROM is non-trivial, overwrite *REPLACE with
890 the result. */
891
892 static void
893 cp_genericize_init (tree *replace, tree from, tree to)
894 {
895 if (TREE_CODE (from) == VEC_INIT_EXPR)
896 {
897 tree init = expand_vec_init_expr (to, from, tf_warning_or_error);
898
899 /* Make cp_gimplify_init_expr call replace_decl. */
900 *replace = fold_convert (void_type_node, init);
901 }
902 else if (flag_exceptions
903 && TREE_CODE (from) == CONSTRUCTOR
904 && TREE_SIDE_EFFECTS (from)
905 && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (from)))
906 {
907 to = cp_stabilize_reference (to);
908 replace_placeholders (from, to);
909 *replace = split_nonconstant_init (to, from);
910 }
911 }
912
913 /* For an INIT_EXPR, replace the INIT_EXPR itself. */
914
915 static void
916 cp_genericize_init_expr (tree *stmt_p)
917 {
918 tree to = TREE_OPERAND (*stmt_p, 0);
919 tree from = TREE_OPERAND (*stmt_p, 1);
920 if (SIMPLE_TARGET_EXPR_P (from)
921 /* Return gets confused if we clobber its INIT_EXPR this soon. */
922 && TREE_CODE (to) != RESULT_DECL)
923 from = TARGET_EXPR_INITIAL (from);
924 cp_genericize_init (stmt_p, from, to);
925 }
926
927 /* For a TARGET_EXPR, change the TARGET_EXPR_INITIAL. We will need to use
928 replace_decl later when we know what we're initializing. */
929
930 static void
931 cp_genericize_target_expr (tree *stmt_p)
932 {
933 tree slot = TARGET_EXPR_SLOT (*stmt_p);
934 cp_genericize_init (&TARGET_EXPR_INITIAL (*stmt_p),
935 TARGET_EXPR_INITIAL (*stmt_p), slot);
936 gcc_assert (!DECL_INITIAL (slot));
937 }
938
939 /* Genericization context. */
940
941 struct cp_genericize_data
942 {
943 hash_set<tree> *p_set;
944 auto_vec<tree> bind_expr_stack;
945 struct cp_genericize_omp_taskreg *omp_ctx;
946 tree try_block;
947 bool no_sanitize_p;
948 bool handle_invisiref_parm_p;
949 };
950
951 /* Perform any pre-gimplification folding of C++ front end trees to
952 GENERIC.
953 Note: The folding of non-omp cases is something to move into
954 the middle-end. As for now we have most foldings only on GENERIC
955 in fold-const, we need to perform this before transformation to
956 GIMPLE-form. */
957
958 struct cp_fold_data
959 {
960 hash_set<tree> pset;
961 bool genericize; // called from cp_fold_function?
962
963 cp_fold_data (bool g): genericize (g) {}
964 };
965
966 static tree
967 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
968 {
969 cp_fold_data *data = (cp_fold_data*)data_;
970 tree stmt = *stmt_p;
971 enum tree_code code = TREE_CODE (stmt);
972
973 switch (code)
974 {
975 case PTRMEM_CST:
976 if (TREE_CODE (PTRMEM_CST_MEMBER (stmt)) == FUNCTION_DECL
977 && DECL_IMMEDIATE_FUNCTION_P (PTRMEM_CST_MEMBER (stmt)))
978 {
979 if (!data->pset.add (stmt))
980 error_at (PTRMEM_CST_LOCATION (stmt),
981 "taking address of an immediate function %qD",
982 PTRMEM_CST_MEMBER (stmt));
983 stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
984 break;
985 }
986 break;
987
988 case ADDR_EXPR:
989 if (TREE_CODE (TREE_OPERAND (stmt, 0)) == FUNCTION_DECL
990 && DECL_IMMEDIATE_FUNCTION_P (TREE_OPERAND (stmt, 0)))
991 {
992 error_at (EXPR_LOCATION (stmt),
993 "taking address of an immediate function %qD",
994 TREE_OPERAND (stmt, 0));
995 stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
996 break;
997 }
998 break;
999
1000 case CALL_EXPR:
1001 if (tree fndecl = cp_get_callee_fndecl_nofold (stmt))
1002 if (DECL_IMMEDIATE_FUNCTION_P (fndecl)
1003 && source_location_current_p (fndecl))
1004 *stmt_p = stmt = cxx_constant_value (stmt);
1005 break;
1006
1007 default:
1008 break;
1009 }
1010
1011 *stmt_p = stmt = cp_fold (*stmt_p);
1012
1013 if (data->pset.add (stmt))
1014 {
1015 /* Don't walk subtrees of stmts we've already walked once, otherwise
1016 we can have exponential complexity with e.g. lots of nested
1017 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1018 always the same tree, which the first time cp_fold_r has been
1019 called on it had the subtrees walked. */
1020 *walk_subtrees = 0;
1021 return NULL;
1022 }
1023
1024 code = TREE_CODE (stmt);
1025 switch (code)
1026 {
1027 tree x;
1028 int i, n;
1029 case OMP_FOR:
1030 case OMP_SIMD:
1031 case OMP_DISTRIBUTE:
1032 case OMP_LOOP:
1033 case OMP_TASKLOOP:
1034 case OACC_LOOP:
1035 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1036 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1037 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1038 x = OMP_FOR_COND (stmt);
1039 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1040 {
1041 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1042 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1043 }
1044 else if (x && TREE_CODE (x) == TREE_VEC)
1045 {
1046 n = TREE_VEC_LENGTH (x);
1047 for (i = 0; i < n; i++)
1048 {
1049 tree o = TREE_VEC_ELT (x, i);
1050 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1051 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1052 }
1053 }
1054 x = OMP_FOR_INCR (stmt);
1055 if (x && TREE_CODE (x) == TREE_VEC)
1056 {
1057 n = TREE_VEC_LENGTH (x);
1058 for (i = 0; i < n; i++)
1059 {
1060 tree o = TREE_VEC_ELT (x, i);
1061 if (o && TREE_CODE (o) == MODIFY_EXPR)
1062 o = TREE_OPERAND (o, 1);
1063 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1064 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1065 {
1066 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1067 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1068 }
1069 }
1070 }
1071 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1072 *walk_subtrees = 0;
1073 return NULL;
1074
1075 case IF_STMT:
1076 if (IF_STMT_CONSTEVAL_P (stmt))
1077 {
1078 /* Don't walk THEN_CLAUSE (stmt) for consteval if. IF_COND is always
1079 boolean_false_node. */
1080 cp_walk_tree (&ELSE_CLAUSE (stmt), cp_fold_r, data, NULL);
1081 cp_walk_tree (&IF_SCOPE (stmt), cp_fold_r, data, NULL);
1082 *walk_subtrees = 0;
1083 return NULL;
1084 }
1085 break;
1086
1087 /* cp_genericize_{init,target}_expr are only for genericize time; they're
1088 here rather than in cp_genericize to avoid problems with the invisible
1089 reference transition. */
1090 case INIT_EXPR:
1091 if (data->genericize)
1092 cp_genericize_init_expr (stmt_p);
1093 break;
1094
1095 case TARGET_EXPR:
1096 if (data->genericize)
1097 cp_genericize_target_expr (stmt_p);
1098
1099 /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
1100 that case, use it in place of this one. */
1101 if (tree &init = TARGET_EXPR_INITIAL (stmt))
1102 {
1103 cp_walk_tree (&init, cp_fold_r, data, NULL);
1104 *walk_subtrees = 0;
1105 if (TREE_CODE (init) == TARGET_EXPR)
1106 *stmt_p = init;
1107 }
1108 break;
1109
1110 default:
1111 break;
1112 }
1113
1114 return NULL;
1115 }
1116
1117 /* Fold ALL the trees! FIXME we should be able to remove this, but
1118 apparently that still causes optimization regressions. */
1119
1120 void
1121 cp_fold_function (tree fndecl)
1122 {
1123 cp_fold_data data (/*genericize*/true);
1124 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
1125 }
1126
1127 /* Turn SPACESHIP_EXPR EXPR into GENERIC. */
1128
1129 static tree genericize_spaceship (tree expr)
1130 {
1131 iloc_sentinel s (cp_expr_location (expr));
1132 tree type = TREE_TYPE (expr);
1133 tree op0 = TREE_OPERAND (expr, 0);
1134 tree op1 = TREE_OPERAND (expr, 1);
1135 return genericize_spaceship (input_location, type, op0, op1);
1136 }
1137
1138 /* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
1139 to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
1140 the middle-end (c++/88256). If EXPR is a DECL, use add_stmt and return
1141 NULL_TREE; otherwise return a COMPOUND_STMT of the DECL_EXPR and EXPR. */
1142
1143 tree
1144 predeclare_vla (tree expr)
1145 {
1146 tree type = TREE_TYPE (expr);
1147 if (type == error_mark_node)
1148 return expr;
1149 if (is_typedef_decl (expr))
1150 type = DECL_ORIGINAL_TYPE (expr);
1151
1152 /* We need to strip pointers for gimplify_type_sizes. */
1153 tree vla = type;
1154 while (POINTER_TYPE_P (vla))
1155 {
1156 if (TYPE_NAME (vla))
1157 return expr;
1158 vla = TREE_TYPE (vla);
1159 }
1160 if (vla == type || TYPE_NAME (vla)
1161 || !variably_modified_type_p (vla, NULL_TREE))
1162 return expr;
1163
1164 tree decl = build_decl (input_location, TYPE_DECL, NULL_TREE, vla);
1165 DECL_ARTIFICIAL (decl) = 1;
1166 TYPE_NAME (vla) = decl;
1167 tree dexp = build_stmt (input_location, DECL_EXPR, decl);
1168 if (DECL_P (expr))
1169 {
1170 add_stmt (dexp);
1171 return NULL_TREE;
1172 }
1173 else
1174 {
1175 expr = build2 (COMPOUND_EXPR, type, dexp, expr);
1176 return expr;
1177 }
1178 }
1179
1180 /* Perform any pre-gimplification lowering of C++ front end trees to
1181 GENERIC. */
1182
1183 static tree
1184 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1185 {
1186 tree stmt = *stmt_p;
1187 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1188 hash_set<tree> *p_set = wtd->p_set;
1189
1190 /* If in an OpenMP context, note var uses. */
1191 if (UNLIKELY (wtd->omp_ctx != NULL)
1192 && (VAR_P (stmt)
1193 || TREE_CODE (stmt) == PARM_DECL
1194 || TREE_CODE (stmt) == RESULT_DECL)
1195 && omp_var_to_track (stmt))
1196 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1197
1198 /* Don't dereference parms in a thunk, pass the references through. */
1199 if ((TREE_CODE (stmt) == CALL_EXPR && call_from_lambda_thunk_p (stmt))
1200 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1201 {
1202 *walk_subtrees = 0;
1203 return NULL;
1204 }
1205
1206 /* Dereference invisible reference parms. */
1207 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1208 {
1209 *stmt_p = convert_from_reference (stmt);
1210 p_set->add (*stmt_p);
1211 *walk_subtrees = 0;
1212 return NULL;
1213 }
1214
1215 /* Map block scope extern declarations to visible declarations with the
1216 same name and type in outer scopes if any. */
1217 if (VAR_OR_FUNCTION_DECL_P (stmt) && DECL_LOCAL_DECL_P (stmt))
1218 if (tree alias = DECL_LOCAL_DECL_ALIAS (stmt))
1219 {
1220 if (alias != error_mark_node)
1221 {
1222 *stmt_p = alias;
1223 TREE_USED (alias) |= TREE_USED (stmt);
1224 }
1225 *walk_subtrees = 0;
1226 return NULL;
1227 }
1228
1229 if (TREE_CODE (stmt) == INTEGER_CST
1230 && TYPE_REF_P (TREE_TYPE (stmt))
1231 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1232 && !wtd->no_sanitize_p)
1233 {
1234 ubsan_maybe_instrument_reference (stmt_p);
1235 if (*stmt_p != stmt)
1236 {
1237 *walk_subtrees = 0;
1238 return NULL_TREE;
1239 }
1240 }
1241
1242 /* Other than invisiref parms, don't walk the same tree twice. */
1243 if (p_set->contains (stmt))
1244 {
1245 *walk_subtrees = 0;
1246 return NULL_TREE;
1247 }
1248
1249 switch (TREE_CODE (stmt))
1250 {
1251 case ADDR_EXPR:
1252 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1253 {
1254 /* If in an OpenMP context, note var uses. */
1255 if (UNLIKELY (wtd->omp_ctx != NULL)
1256 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1257 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1258 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1259 *walk_subtrees = 0;
1260 }
1261 break;
1262
1263 case RETURN_EXPR:
1264 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1265 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1266 *walk_subtrees = 0;
1267 break;
1268
1269 case OMP_CLAUSE:
1270 switch (OMP_CLAUSE_CODE (stmt))
1271 {
1272 case OMP_CLAUSE_LASTPRIVATE:
1273 /* Don't dereference an invisiref in OpenMP clauses. */
1274 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1275 {
1276 *walk_subtrees = 0;
1277 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1278 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1279 cp_genericize_r, data, NULL);
1280 }
1281 break;
1282 case OMP_CLAUSE_PRIVATE:
1283 /* Don't dereference an invisiref in OpenMP clauses. */
1284 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1285 *walk_subtrees = 0;
1286 else if (wtd->omp_ctx != NULL)
1287 {
1288 /* Private clause doesn't cause any references to the
1289 var in outer contexts, avoid calling
1290 omp_cxx_notice_variable for it. */
1291 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1292 wtd->omp_ctx = NULL;
1293 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1294 data, NULL);
1295 wtd->omp_ctx = old;
1296 *walk_subtrees = 0;
1297 }
1298 break;
1299 case OMP_CLAUSE_SHARED:
1300 case OMP_CLAUSE_FIRSTPRIVATE:
1301 case OMP_CLAUSE_COPYIN:
1302 case OMP_CLAUSE_COPYPRIVATE:
1303 case OMP_CLAUSE_INCLUSIVE:
1304 case OMP_CLAUSE_EXCLUSIVE:
1305 /* Don't dereference an invisiref in OpenMP clauses. */
1306 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1307 *walk_subtrees = 0;
1308 break;
1309 case OMP_CLAUSE_REDUCTION:
1310 case OMP_CLAUSE_IN_REDUCTION:
1311 case OMP_CLAUSE_TASK_REDUCTION:
1312 /* Don't dereference an invisiref in reduction clause's
1313 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1314 still needs to be genericized. */
1315 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1316 {
1317 *walk_subtrees = 0;
1318 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1319 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1320 cp_genericize_r, data, NULL);
1321 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1322 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1323 cp_genericize_r, data, NULL);
1324 }
1325 break;
1326 default:
1327 break;
1328 }
1329 break;
1330
1331 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1332 to lower this construct before scanning it, so we need to lower these
1333 before doing anything else. */
1334 case CLEANUP_STMT:
1335 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1336 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1337 : TRY_FINALLY_EXPR,
1338 void_type_node,
1339 CLEANUP_BODY (stmt),
1340 CLEANUP_EXPR (stmt));
1341 break;
1342
1343 case IF_STMT:
1344 genericize_if_stmt (stmt_p);
1345 /* *stmt_p has changed, tail recurse to handle it again. */
1346 return cp_genericize_r (stmt_p, walk_subtrees, data);
1347
1348 /* COND_EXPR might have incompatible types in branches if one or both
1349 arms are bitfields. Fix it up now. */
1350 case COND_EXPR:
1351 {
1352 tree type_left
1353 = (TREE_OPERAND (stmt, 1)
1354 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1355 : NULL_TREE);
1356 tree type_right
1357 = (TREE_OPERAND (stmt, 2)
1358 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1359 : NULL_TREE);
1360 if (type_left
1361 && !useless_type_conversion_p (TREE_TYPE (stmt),
1362 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1363 {
1364 TREE_OPERAND (stmt, 1)
1365 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1366 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1367 type_left));
1368 }
1369 if (type_right
1370 && !useless_type_conversion_p (TREE_TYPE (stmt),
1371 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1372 {
1373 TREE_OPERAND (stmt, 2)
1374 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1375 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1376 type_right));
1377 }
1378 }
1379 break;
1380
1381 case BIND_EXPR:
1382 if (UNLIKELY (wtd->omp_ctx != NULL))
1383 {
1384 tree decl;
1385 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1386 if (VAR_P (decl)
1387 && !DECL_EXTERNAL (decl)
1388 && omp_var_to_track (decl))
1389 {
1390 splay_tree_node n
1391 = splay_tree_lookup (wtd->omp_ctx->variables,
1392 (splay_tree_key) decl);
1393 if (n == NULL)
1394 splay_tree_insert (wtd->omp_ctx->variables,
1395 (splay_tree_key) decl,
1396 TREE_STATIC (decl)
1397 ? OMP_CLAUSE_DEFAULT_SHARED
1398 : OMP_CLAUSE_DEFAULT_PRIVATE);
1399 }
1400 }
1401 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1402 {
1403 /* The point here is to not sanitize static initializers. */
1404 bool no_sanitize_p = wtd->no_sanitize_p;
1405 wtd->no_sanitize_p = true;
1406 for (tree decl = BIND_EXPR_VARS (stmt);
1407 decl;
1408 decl = DECL_CHAIN (decl))
1409 if (VAR_P (decl)
1410 && TREE_STATIC (decl)
1411 && DECL_INITIAL (decl))
1412 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1413 wtd->no_sanitize_p = no_sanitize_p;
1414 }
1415 wtd->bind_expr_stack.safe_push (stmt);
1416 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1417 cp_genericize_r, data, NULL);
1418 wtd->bind_expr_stack.pop ();
1419 break;
1420
1421 case USING_STMT:
1422 {
1423 tree block = NULL_TREE;
1424
1425 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1426 BLOCK, and append an IMPORTED_DECL to its
1427 BLOCK_VARS chained list. */
1428 if (wtd->bind_expr_stack.exists ())
1429 {
1430 int i;
1431 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1432 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1433 break;
1434 }
1435 if (block)
1436 {
1437 tree decl = TREE_OPERAND (stmt, 0);
1438 gcc_assert (decl);
1439
1440 if (undeduced_auto_decl (decl))
1441 /* Omit from the GENERIC, the back-end can't handle it. */;
1442 else
1443 {
1444 tree using_directive = make_node (IMPORTED_DECL);
1445 TREE_TYPE (using_directive) = void_type_node;
1446 DECL_CONTEXT (using_directive) = current_function_decl;
1447
1448 IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1449 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1450 BLOCK_VARS (block) = using_directive;
1451 }
1452 }
1453 /* The USING_STMT won't appear in GENERIC. */
1454 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1455 *walk_subtrees = 0;
1456 }
1457 break;
1458
1459 case DECL_EXPR:
1460 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1461 {
1462 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1463 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1464 *walk_subtrees = 0;
1465 }
1466 else
1467 {
1468 tree d = DECL_EXPR_DECL (stmt);
1469 if (VAR_P (d))
1470 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1471 }
1472 break;
1473
1474 case OMP_PARALLEL:
1475 case OMP_TASK:
1476 case OMP_TASKLOOP:
1477 {
1478 struct cp_genericize_omp_taskreg omp_ctx;
1479 tree c, decl;
1480 splay_tree_node n;
1481
1482 *walk_subtrees = 0;
1483 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1484 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1485 omp_ctx.default_shared = omp_ctx.is_parallel;
1486 omp_ctx.outer = wtd->omp_ctx;
1487 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1488 wtd->omp_ctx = &omp_ctx;
1489 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1490 switch (OMP_CLAUSE_CODE (c))
1491 {
1492 case OMP_CLAUSE_SHARED:
1493 case OMP_CLAUSE_PRIVATE:
1494 case OMP_CLAUSE_FIRSTPRIVATE:
1495 case OMP_CLAUSE_LASTPRIVATE:
1496 decl = OMP_CLAUSE_DECL (c);
1497 if (decl == error_mark_node || !omp_var_to_track (decl))
1498 break;
1499 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1500 if (n != NULL)
1501 break;
1502 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1503 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1504 ? OMP_CLAUSE_DEFAULT_SHARED
1505 : OMP_CLAUSE_DEFAULT_PRIVATE);
1506 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1507 omp_cxx_notice_variable (omp_ctx.outer, decl);
1508 break;
1509 case OMP_CLAUSE_DEFAULT:
1510 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1511 omp_ctx.default_shared = true;
1512 default:
1513 break;
1514 }
1515 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1516 c_genericize_control_stmt (stmt_p, walk_subtrees, data,
1517 cp_genericize_r, cp_walk_subtrees);
1518 else
1519 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1520 wtd->omp_ctx = omp_ctx.outer;
1521 splay_tree_delete (omp_ctx.variables);
1522 }
1523 break;
1524
1525 case OMP_TARGET:
1526 cfun->has_omp_target = true;
1527 break;
1528
1529 case TRY_BLOCK:
1530 {
1531 *walk_subtrees = 0;
1532 tree try_block = wtd->try_block;
1533 wtd->try_block = stmt;
1534 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1535 wtd->try_block = try_block;
1536 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1537 }
1538 break;
1539
1540 case MUST_NOT_THROW_EXPR:
1541 /* MUST_NOT_THROW_COND might be something else with TM. */
1542 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1543 {
1544 *walk_subtrees = 0;
1545 tree try_block = wtd->try_block;
1546 wtd->try_block = stmt;
1547 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1548 wtd->try_block = try_block;
1549 }
1550 break;
1551
1552 case THROW_EXPR:
1553 {
1554 location_t loc = location_of (stmt);
1555 if (warning_suppressed_p (stmt /* What warning? */))
1556 /* Never mind. */;
1557 else if (wtd->try_block)
1558 {
1559 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
1560 {
1561 auto_diagnostic_group d;
1562 if (warning_at (loc, OPT_Wterminate,
1563 "%<throw%> will always call %<terminate%>")
1564 && cxx_dialect >= cxx11
1565 && DECL_DESTRUCTOR_P (current_function_decl))
1566 inform (loc, "in C++11 destructors default to %<noexcept%>");
1567 }
1568 }
1569 else
1570 {
1571 if (warn_cxx11_compat && cxx_dialect < cxx11
1572 && DECL_DESTRUCTOR_P (current_function_decl)
1573 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1574 == NULL_TREE)
1575 && (get_defaulted_eh_spec (current_function_decl)
1576 == empty_except_spec))
1577 warning_at (loc, OPT_Wc__11_compat,
1578 "in C++11 this %<throw%> will call %<terminate%> "
1579 "because destructors default to %<noexcept%>");
1580 }
1581 }
1582 break;
1583
1584 case CONVERT_EXPR:
1585 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1586 break;
1587
1588 case SPACESHIP_EXPR:
1589 *stmt_p = genericize_spaceship (*stmt_p);
1590 break;
1591
1592 case PTRMEM_CST:
1593 /* By the time we get here we're handing off to the back end, so we don't
1594 need or want to preserve PTRMEM_CST anymore. */
1595 *stmt_p = cplus_expand_constant (stmt);
1596 *walk_subtrees = 0;
1597 break;
1598
1599 case MEM_REF:
1600 /* For MEM_REF, make sure not to sanitize the second operand even
1601 if it has reference type. It is just an offset with a type
1602 holding other information. There is no other processing we
1603 need to do for INTEGER_CSTs, so just ignore the second argument
1604 unconditionally. */
1605 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1606 *walk_subtrees = 0;
1607 break;
1608
1609 case NOP_EXPR:
1610 *stmt_p = predeclare_vla (*stmt_p);
1611 if (!wtd->no_sanitize_p
1612 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1613 && TYPE_REF_P (TREE_TYPE (stmt)))
1614 ubsan_maybe_instrument_reference (stmt_p);
1615 break;
1616
1617 case CALL_EXPR:
1618 /* Evaluate function concept checks instead of treating them as
1619 normal functions. */
1620 if (concept_check_p (stmt))
1621 {
1622 *stmt_p = evaluate_concept_check (stmt);
1623 * walk_subtrees = 0;
1624 break;
1625 }
1626
1627 if (!wtd->no_sanitize_p
1628 && sanitize_flags_p ((SANITIZE_NULL
1629 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1630 {
1631 tree fn = CALL_EXPR_FN (stmt);
1632 if (fn != NULL_TREE
1633 && !error_operand_p (fn)
1634 && INDIRECT_TYPE_P (TREE_TYPE (fn))
1635 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1636 {
1637 bool is_ctor
1638 = TREE_CODE (fn) == ADDR_EXPR
1639 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1640 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1641 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1642 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1643 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1644 cp_ubsan_maybe_instrument_member_call (stmt);
1645 }
1646 else if (fn == NULL_TREE
1647 && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1648 && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
1649 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
1650 *walk_subtrees = 0;
1651 }
1652 /* Fall through. */
1653 case AGGR_INIT_EXPR:
1654 /* For calls to a multi-versioned function, overload resolution
1655 returns the function with the highest target priority, that is,
1656 the version that will checked for dispatching first. If this
1657 version is inlinable, a direct call to this version can be made
1658 otherwise the call should go through the dispatcher. */
1659 {
1660 tree fn = cp_get_callee_fndecl_nofold (stmt);
1661 if (fn && DECL_FUNCTION_VERSIONED (fn)
1662 && (current_function_decl == NULL
1663 || !targetm.target_option.can_inline_p (current_function_decl,
1664 fn)))
1665 if (tree dis = get_function_version_dispatcher (fn))
1666 {
1667 mark_versions_used (dis);
1668 dis = build_address (dis);
1669 if (TREE_CODE (stmt) == CALL_EXPR)
1670 CALL_EXPR_FN (stmt) = dis;
1671 else
1672 AGGR_INIT_EXPR_FN (stmt) = dis;
1673 }
1674 }
1675 break;
1676
1677 case TARGET_EXPR:
1678 if (TARGET_EXPR_INITIAL (stmt)
1679 && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1680 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1681 TARGET_EXPR_NO_ELIDE (stmt) = 1;
1682 break;
1683
1684 case TEMPLATE_ID_EXPR:
1685 gcc_assert (concept_check_p (stmt));
1686 /* Emit the value of the concept check. */
1687 *stmt_p = evaluate_concept_check (stmt);
1688 walk_subtrees = 0;
1689 break;
1690
1691 case OMP_DISTRIBUTE:
1692 /* Need to explicitly instantiate copy ctors on class iterators of
1693 composite distribute parallel for. */
1694 if (OMP_FOR_INIT (*stmt_p) == NULL_TREE)
1695 {
1696 tree *data[4] = { NULL, NULL, NULL, NULL };
1697 tree inner = walk_tree (&OMP_FOR_BODY (*stmt_p),
1698 find_combined_omp_for, data, NULL);
1699 if (inner != NULL_TREE
1700 && TREE_CODE (inner) == OMP_FOR)
1701 {
1702 for (int i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner)); i++)
1703 if (OMP_FOR_ORIG_DECLS (inner)
1704 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1705 i)) == TREE_LIST
1706 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1707 i)))
1708 {
1709 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner), i);
1710 /* Class iterators aren't allowed on OMP_SIMD, so the only
1711 case we need to solve is distribute parallel for. */
1712 gcc_assert (TREE_CODE (inner) == OMP_FOR
1713 && data[1]);
1714 tree orig_decl = TREE_PURPOSE (orig);
1715 tree c, cl = NULL_TREE;
1716 for (c = OMP_FOR_CLAUSES (inner);
1717 c; c = OMP_CLAUSE_CHAIN (c))
1718 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1719 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
1720 && OMP_CLAUSE_DECL (c) == orig_decl)
1721 {
1722 cl = c;
1723 break;
1724 }
1725 if (cl == NULL_TREE)
1726 {
1727 for (c = OMP_PARALLEL_CLAUSES (*data[1]);
1728 c; c = OMP_CLAUSE_CHAIN (c))
1729 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1730 && OMP_CLAUSE_DECL (c) == orig_decl)
1731 {
1732 cl = c;
1733 break;
1734 }
1735 }
1736 if (cl)
1737 {
1738 orig_decl = require_complete_type (orig_decl);
1739 tree inner_type = TREE_TYPE (orig_decl);
1740 if (orig_decl == error_mark_node)
1741 continue;
1742 if (TYPE_REF_P (TREE_TYPE (orig_decl)))
1743 inner_type = TREE_TYPE (inner_type);
1744
1745 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1746 inner_type = TREE_TYPE (inner_type);
1747 get_copy_ctor (inner_type, tf_warning_or_error);
1748 }
1749 }
1750 }
1751 }
1752 /* FALLTHRU */
1753
1754 case FOR_STMT:
1755 case WHILE_STMT:
1756 case DO_STMT:
1757 case SWITCH_STMT:
1758 case CONTINUE_STMT:
1759 case BREAK_STMT:
1760 case OMP_FOR:
1761 case OMP_SIMD:
1762 case OMP_LOOP:
1763 case OACC_LOOP:
1764 case STATEMENT_LIST:
1765 /* These cases are handled by shared code. */
1766 c_genericize_control_stmt (stmt_p, walk_subtrees, data,
1767 cp_genericize_r, cp_walk_subtrees);
1768 break;
1769
1770 case BIT_CAST_EXPR:
1771 *stmt_p = build1_loc (EXPR_LOCATION (stmt), VIEW_CONVERT_EXPR,
1772 TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1773 break;
1774
1775 default:
1776 if (IS_TYPE_OR_DECL_P (stmt))
1777 *walk_subtrees = 0;
1778 break;
1779 }
1780
1781 p_set->add (*stmt_p);
1782
1783 return NULL;
1784 }
1785
1786 /* Lower C++ front end trees to GENERIC in T_P. */
1787
1788 static void
1789 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1790 {
1791 struct cp_genericize_data wtd;
1792
1793 wtd.p_set = new hash_set<tree>;
1794 wtd.bind_expr_stack.create (0);
1795 wtd.omp_ctx = NULL;
1796 wtd.try_block = NULL_TREE;
1797 wtd.no_sanitize_p = false;
1798 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1799 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1800 delete wtd.p_set;
1801 if (sanitize_flags_p (SANITIZE_VPTR))
1802 cp_ubsan_instrument_member_accesses (t_p);
1803 }
1804
1805 /* If a function that should end with a return in non-void
1806 function doesn't obviously end with return, add ubsan
1807 instrumentation code to verify it at runtime. If -fsanitize=return
1808 is not enabled, instrument __builtin_unreachable. */
1809
1810 static void
1811 cp_maybe_instrument_return (tree fndecl)
1812 {
1813 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1814 || DECL_CONSTRUCTOR_P (fndecl)
1815 || DECL_DESTRUCTOR_P (fndecl)
1816 || !targetm.warn_func_return (fndecl))
1817 return;
1818
1819 if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1820 /* Don't add __builtin_unreachable () if not optimizing, it will not
1821 improve any optimizations in that case, just break UB code.
1822 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1823 UBSan covers this with ubsan_instrument_return above where sufficient
1824 information is provided, while the __builtin_unreachable () below
1825 if return sanitization is disabled will just result in hard to
1826 understand runtime error without location. */
1827 && ((!optimize && !flag_unreachable_traps)
1828 || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1829 return;
1830
1831 tree t = DECL_SAVED_TREE (fndecl);
1832 while (t)
1833 {
1834 switch (TREE_CODE (t))
1835 {
1836 case BIND_EXPR:
1837 t = BIND_EXPR_BODY (t);
1838 continue;
1839 case TRY_FINALLY_EXPR:
1840 case CLEANUP_POINT_EXPR:
1841 t = TREE_OPERAND (t, 0);
1842 continue;
1843 case STATEMENT_LIST:
1844 {
1845 tree_stmt_iterator i = tsi_last (t);
1846 while (!tsi_end_p (i))
1847 {
1848 tree p = tsi_stmt (i);
1849 if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
1850 break;
1851 tsi_prev (&i);
1852 }
1853 if (!tsi_end_p (i))
1854 {
1855 t = tsi_stmt (i);
1856 continue;
1857 }
1858 }
1859 break;
1860 case RETURN_EXPR:
1861 return;
1862 default:
1863 break;
1864 }
1865 break;
1866 }
1867 if (t == NULL_TREE)
1868 return;
1869 tree *p = &DECL_SAVED_TREE (fndecl);
1870 if (TREE_CODE (*p) == BIND_EXPR)
1871 p = &BIND_EXPR_BODY (*p);
1872
1873 location_t loc = DECL_SOURCE_LOCATION (fndecl);
1874 if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1875 t = ubsan_instrument_return (loc);
1876 else
1877 t = build_builtin_unreachable (BUILTINS_LOCATION);
1878
1879 append_to_statement_list (t, p);
1880 }
1881
1882 void
1883 cp_genericize (tree fndecl)
1884 {
1885 tree t;
1886
1887 /* Fix up the types of parms passed by invisible reference. */
1888 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1889 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1890 {
1891 /* If a function's arguments are copied to create a thunk,
1892 then DECL_BY_REFERENCE will be set -- but the type of the
1893 argument will be a pointer type, so we will never get
1894 here. */
1895 gcc_assert (!DECL_BY_REFERENCE (t));
1896 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1897 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1898 DECL_BY_REFERENCE (t) = 1;
1899 TREE_ADDRESSABLE (t) = 0;
1900 relayout_decl (t);
1901 }
1902
1903 /* Do the same for the return value. */
1904 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1905 {
1906 t = DECL_RESULT (fndecl);
1907 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1908 DECL_BY_REFERENCE (t) = 1;
1909 TREE_ADDRESSABLE (t) = 0;
1910 relayout_decl (t);
1911 if (DECL_NAME (t))
1912 {
1913 /* Adjust DECL_VALUE_EXPR of the original var. */
1914 tree outer = outer_curly_brace_block (current_function_decl);
1915 tree var;
1916
1917 if (outer)
1918 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1919 if (VAR_P (var)
1920 && DECL_NAME (t) == DECL_NAME (var)
1921 && DECL_HAS_VALUE_EXPR_P (var)
1922 && DECL_VALUE_EXPR (var) == t)
1923 {
1924 tree val = convert_from_reference (t);
1925 SET_DECL_VALUE_EXPR (var, val);
1926 break;
1927 }
1928 }
1929 }
1930
1931 /* If we're a clone, the body is already GIMPLE. */
1932 if (DECL_CLONED_FUNCTION_P (fndecl))
1933 return;
1934
1935 /* Allow cp_genericize calls to be nested. */
1936 bc_state_t save_state;
1937 save_bc_state (&save_state);
1938
1939 /* We do want to see every occurrence of the parms, so we can't just use
1940 walk_tree's hash functionality. */
1941 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1942
1943 cp_maybe_instrument_return (fndecl);
1944
1945 /* Do everything else. */
1946 c_genericize (fndecl);
1947 restore_bc_state (&save_state);
1948 }
1949 \f
1950 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1951 NULL if there is in fact nothing to do. ARG2 may be null if FN
1952 actually only takes one argument. */
1953
1954 static tree
1955 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1956 {
1957 tree defparm, parm, t;
1958 int i = 0;
1959 int nargs;
1960 tree *argarray;
1961
1962 if (fn == NULL)
1963 return NULL;
1964
1965 nargs = list_length (DECL_ARGUMENTS (fn));
1966 argarray = XALLOCAVEC (tree, nargs);
1967
1968 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1969 if (arg2)
1970 defparm = TREE_CHAIN (defparm);
1971
1972 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
1973 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1974 {
1975 tree inner_type = TREE_TYPE (arg1);
1976 tree start1, end1, p1;
1977 tree start2 = NULL, p2 = NULL;
1978 tree ret = NULL, lab;
1979
1980 start1 = arg1;
1981 start2 = arg2;
1982 do
1983 {
1984 inner_type = TREE_TYPE (inner_type);
1985 start1 = build4 (ARRAY_REF, inner_type, start1,
1986 size_zero_node, NULL, NULL);
1987 if (arg2)
1988 start2 = build4 (ARRAY_REF, inner_type, start2,
1989 size_zero_node, NULL, NULL);
1990 }
1991 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1992 start1 = build_fold_addr_expr_loc (input_location, start1);
1993 if (arg2)
1994 start2 = build_fold_addr_expr_loc (input_location, start2);
1995
1996 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1997 end1 = fold_build_pointer_plus (start1, end1);
1998
1999 p1 = create_tmp_var (TREE_TYPE (start1));
2000 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
2001 append_to_statement_list (t, &ret);
2002
2003 if (arg2)
2004 {
2005 p2 = create_tmp_var (TREE_TYPE (start2));
2006 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
2007 append_to_statement_list (t, &ret);
2008 }
2009
2010 lab = create_artificial_label (input_location);
2011 t = build1 (LABEL_EXPR, void_type_node, lab);
2012 append_to_statement_list (t, &ret);
2013
2014 argarray[i++] = p1;
2015 if (arg2)
2016 argarray[i++] = p2;
2017 /* Handle default arguments. */
2018 for (parm = defparm; parm && parm != void_list_node;
2019 parm = TREE_CHAIN (parm), i++)
2020 argarray[i] = convert_default_arg (TREE_VALUE (parm),
2021 TREE_PURPOSE (parm), fn,
2022 i - is_method, tf_warning_or_error);
2023 t = build_call_a (fn, i, argarray);
2024 t = fold_convert (void_type_node, t);
2025 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2026 append_to_statement_list (t, &ret);
2027
2028 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
2029 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
2030 append_to_statement_list (t, &ret);
2031
2032 if (arg2)
2033 {
2034 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
2035 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
2036 append_to_statement_list (t, &ret);
2037 }
2038
2039 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
2040 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
2041 append_to_statement_list (t, &ret);
2042
2043 return ret;
2044 }
2045 else
2046 {
2047 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
2048 if (arg2)
2049 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
2050 /* Handle default arguments. */
2051 for (parm = defparm; parm && parm != void_list_node;
2052 parm = TREE_CHAIN (parm), i++)
2053 argarray[i] = convert_default_arg (TREE_VALUE (parm),
2054 TREE_PURPOSE (parm), fn,
2055 i - is_method, tf_warning_or_error);
2056 t = build_call_a (fn, i, argarray);
2057 t = fold_convert (void_type_node, t);
2058 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2059 }
2060 }
2061
2062 /* Return code to initialize DECL with its default constructor, or
2063 NULL if there's nothing to do. */
2064
2065 tree
2066 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
2067 {
2068 tree info = CP_OMP_CLAUSE_INFO (clause);
2069 tree ret = NULL;
2070
2071 if (info)
2072 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
2073
2074 return ret;
2075 }
2076
2077 /* Return code to initialize DST with a copy constructor from SRC. */
2078
2079 tree
2080 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
2081 {
2082 tree info = CP_OMP_CLAUSE_INFO (clause);
2083 tree ret = NULL;
2084
2085 if (info)
2086 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
2087 if (ret == NULL)
2088 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2089
2090 return ret;
2091 }
2092
2093 /* Similarly, except use an assignment operator instead. */
2094
2095 tree
2096 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
2097 {
2098 tree info = CP_OMP_CLAUSE_INFO (clause);
2099 tree ret = NULL;
2100
2101 if (info)
2102 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
2103 if (ret == NULL)
2104 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2105
2106 return ret;
2107 }
2108
2109 /* Return code to destroy DECL. */
2110
2111 tree
2112 cxx_omp_clause_dtor (tree clause, tree decl)
2113 {
2114 tree info = CP_OMP_CLAUSE_INFO (clause);
2115 tree ret = NULL;
2116
2117 if (info)
2118 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
2119
2120 return ret;
2121 }
2122
2123 /* True if OpenMP should privatize what this DECL points to rather
2124 than the DECL itself. */
2125
2126 bool
2127 cxx_omp_privatize_by_reference (const_tree decl)
2128 {
2129 return (TYPE_REF_P (TREE_TYPE (decl))
2130 || is_invisiref_parm (decl));
2131 }
2132
2133 /* Return true if DECL is const qualified var having no mutable member. */
2134 bool
2135 cxx_omp_const_qual_no_mutable (tree decl)
2136 {
2137 tree type = TREE_TYPE (decl);
2138 if (TYPE_REF_P (type))
2139 {
2140 if (!is_invisiref_parm (decl))
2141 return false;
2142 type = TREE_TYPE (type);
2143
2144 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
2145 {
2146 /* NVR doesn't preserve const qualification of the
2147 variable's type. */
2148 tree outer = outer_curly_brace_block (current_function_decl);
2149 tree var;
2150
2151 if (outer)
2152 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2153 if (VAR_P (var)
2154 && DECL_NAME (decl) == DECL_NAME (var)
2155 && (TYPE_MAIN_VARIANT (type)
2156 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
2157 {
2158 if (TYPE_READONLY (TREE_TYPE (var)))
2159 type = TREE_TYPE (var);
2160 break;
2161 }
2162 }
2163 }
2164
2165 if (type == error_mark_node)
2166 return false;
2167
2168 /* Variables with const-qualified type having no mutable member
2169 are predetermined shared. */
2170 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
2171 return true;
2172
2173 return false;
2174 }
2175
2176 /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
2177 of DECL is predetermined. */
2178
2179 enum omp_clause_default_kind
2180 cxx_omp_predetermined_sharing_1 (tree decl)
2181 {
2182 /* Static data members are predetermined shared. */
2183 if (TREE_STATIC (decl))
2184 {
2185 tree ctx = CP_DECL_CONTEXT (decl);
2186 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
2187 return OMP_CLAUSE_DEFAULT_SHARED;
2188
2189 if (c_omp_predefined_variable (decl))
2190 return OMP_CLAUSE_DEFAULT_SHARED;
2191 }
2192
2193 /* this may not be specified in data-sharing clauses, still we need
2194 to predetermined it firstprivate. */
2195 if (decl == current_class_ptr)
2196 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
2197
2198 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2199 }
2200
2201 /* Likewise, but also include the artificial vars. We don't want to
2202 disallow the artificial vars being mentioned in explicit clauses,
2203 as we use artificial vars e.g. for loop constructs with random
2204 access iterators other than pointers, but during gimplification
2205 we want to treat them as predetermined. */
2206
2207 enum omp_clause_default_kind
2208 cxx_omp_predetermined_sharing (tree decl)
2209 {
2210 enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2211 if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2212 return ret;
2213
2214 /* Predetermine artificial variables holding integral values, those
2215 are usually result of gimplify_one_sizepos or SAVE_EXPR
2216 gimplification. */
2217 if (VAR_P (decl)
2218 && DECL_ARTIFICIAL (decl)
2219 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2220 && !(DECL_LANG_SPECIFIC (decl)
2221 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2222 return OMP_CLAUSE_DEFAULT_SHARED;
2223
2224 /* Similarly for typeinfo symbols. */
2225 if (VAR_P (decl) && DECL_ARTIFICIAL (decl) && DECL_TINFO_P (decl))
2226 return OMP_CLAUSE_DEFAULT_SHARED;
2227
2228 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2229 }
2230
2231 enum omp_clause_defaultmap_kind
2232 cxx_omp_predetermined_mapping (tree decl)
2233 {
2234 /* Predetermine artificial variables holding integral values, those
2235 are usually result of gimplify_one_sizepos or SAVE_EXPR
2236 gimplification. */
2237 if (VAR_P (decl)
2238 && DECL_ARTIFICIAL (decl)
2239 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2240 && !(DECL_LANG_SPECIFIC (decl)
2241 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2242 return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE;
2243
2244 if (c_omp_predefined_variable (decl))
2245 return OMP_CLAUSE_DEFAULTMAP_TO;
2246
2247 return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED;
2248 }
2249
2250 /* Finalize an implicitly determined clause. */
2251
2252 void
2253 cxx_omp_finish_clause (tree c, gimple_seq *, bool /* openacc */)
2254 {
2255 tree decl, inner_type;
2256 bool make_shared = false;
2257
2258 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE
2259 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
2260 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LASTPRIVATE
2261 || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)))
2262 return;
2263
2264 decl = OMP_CLAUSE_DECL (c);
2265 decl = require_complete_type (decl);
2266 inner_type = TREE_TYPE (decl);
2267 if (decl == error_mark_node)
2268 make_shared = true;
2269 else if (TYPE_REF_P (TREE_TYPE (decl)))
2270 inner_type = TREE_TYPE (inner_type);
2271
2272 /* We're interested in the base element, not arrays. */
2273 while (TREE_CODE (inner_type) == ARRAY_TYPE)
2274 inner_type = TREE_TYPE (inner_type);
2275
2276 /* Check for special function availability by building a call to one.
2277 Save the results, because later we won't be in the right context
2278 for making these queries. */
2279 bool first = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE;
2280 bool last = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE;
2281 if (!make_shared
2282 && CLASS_TYPE_P (inner_type)
2283 && cxx_omp_create_clause_info (c, inner_type, !first, first, last,
2284 true))
2285 make_shared = true;
2286
2287 if (make_shared)
2288 {
2289 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2290 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2291 OMP_CLAUSE_SHARED_READONLY (c) = 0;
2292 }
2293 }
2294
2295 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2296 disregarded in OpenMP construct, because it is going to be
2297 remapped during OpenMP lowering. SHARED is true if DECL
2298 is going to be shared, false if it is going to be privatized. */
2299
2300 bool
2301 cxx_omp_disregard_value_expr (tree decl, bool shared)
2302 {
2303 if (shared)
2304 return false;
2305 if (VAR_P (decl)
2306 && DECL_HAS_VALUE_EXPR_P (decl)
2307 && DECL_ARTIFICIAL (decl)
2308 && DECL_LANG_SPECIFIC (decl)
2309 && DECL_OMP_PRIVATIZED_MEMBER (decl))
2310 return true;
2311 if (VAR_P (decl) && DECL_CONTEXT (decl) && is_capture_proxy (decl))
2312 return true;
2313 return false;
2314 }
2315
2316 /* Fold expression X which is used as an rvalue if RVAL is true. */
2317
2318 tree
2319 cp_fold_maybe_rvalue (tree x, bool rval)
2320 {
2321 while (true)
2322 {
2323 x = cp_fold (x);
2324 if (rval)
2325 x = mark_rvalue_use (x);
2326 if (rval && DECL_P (x)
2327 && !TYPE_REF_P (TREE_TYPE (x)))
2328 {
2329 tree v = decl_constant_value (x);
2330 if (v != x && v != error_mark_node)
2331 {
2332 x = v;
2333 continue;
2334 }
2335 }
2336 break;
2337 }
2338 return x;
2339 }
2340
2341 /* Fold expression X which is used as an rvalue. */
2342
2343 tree
2344 cp_fold_rvalue (tree x)
2345 {
2346 return cp_fold_maybe_rvalue (x, true);
2347 }
2348
2349 /* Perform folding on expression X. */
2350
2351 tree
2352 cp_fully_fold (tree x)
2353 {
2354 if (processing_template_decl)
2355 return x;
2356 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2357 have to call both. */
2358 if (cxx_dialect >= cxx11)
2359 {
2360 x = maybe_constant_value (x);
2361 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2362 a TARGET_EXPR; undo that here. */
2363 if (TREE_CODE (x) == TARGET_EXPR)
2364 x = TARGET_EXPR_INITIAL (x);
2365 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2366 && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2367 && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2368 x = TREE_OPERAND (x, 0);
2369 }
2370 return cp_fold_rvalue (x);
2371 }
2372
2373 /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2374 in some cases. */
2375
2376 tree
2377 cp_fully_fold_init (tree x)
2378 {
2379 if (processing_template_decl)
2380 return x;
2381 x = cp_fully_fold (x);
2382 cp_fold_data data (/*genericize*/false);
2383 cp_walk_tree (&x, cp_fold_r, &data, NULL);
2384 return x;
2385 }
2386
2387 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2388 and certain changes are made to the folding done. Or should be (FIXME). We
2389 never touch maybe_const, as it is only used for the C front-end
2390 C_MAYBE_CONST_EXPR. */
2391
2392 tree
2393 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2394 {
2395 return cp_fold_maybe_rvalue (x, !lval);
2396 }
2397
2398 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2399
2400 /* Dispose of the whole FOLD_CACHE. */
2401
2402 void
2403 clear_fold_cache (void)
2404 {
2405 if (fold_cache != NULL)
2406 fold_cache->empty ();
2407 }
2408
2409 /* This function tries to fold an expression X.
2410 To avoid combinatorial explosion, folding results are kept in fold_cache.
2411 If X is invalid, we don't fold at all.
2412 For performance reasons we don't cache expressions representing a
2413 declaration or constant.
2414 Function returns X or its folded variant. */
2415
2416 static tree
2417 cp_fold (tree x)
2418 {
2419 tree op0, op1, op2, op3;
2420 tree org_x = x, r = NULL_TREE;
2421 enum tree_code code;
2422 location_t loc;
2423 bool rval_ops = true;
2424
2425 if (!x || x == error_mark_node)
2426 return x;
2427
2428 if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
2429 return x;
2430
2431 /* Don't bother to cache DECLs or constants. */
2432 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2433 return x;
2434
2435 if (fold_cache == NULL)
2436 fold_cache = hash_map<tree, tree>::create_ggc (101);
2437
2438 if (tree *cached = fold_cache->get (x))
2439 return *cached;
2440
2441 uid_sensitive_constexpr_evaluation_checker c;
2442
2443 code = TREE_CODE (x);
2444 switch (code)
2445 {
2446 case CLEANUP_POINT_EXPR:
2447 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2448 effects. */
2449 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2450 if (!TREE_SIDE_EFFECTS (r))
2451 x = r;
2452 break;
2453
2454 case SIZEOF_EXPR:
2455 x = fold_sizeof_expr (x);
2456 break;
2457
2458 case VIEW_CONVERT_EXPR:
2459 rval_ops = false;
2460 /* FALLTHRU */
2461 case NON_LVALUE_EXPR:
2462 CASE_CONVERT:
2463
2464 if (VOID_TYPE_P (TREE_TYPE (x)))
2465 {
2466 /* This is just to make sure we don't end up with casts to
2467 void from error_mark_node. If we just return x, then
2468 cp_fold_r might fold the operand into error_mark_node and
2469 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2470 during gimplification doesn't like such casts.
2471 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2472 folding of the operand should be in the caches and if in cp_fold_r
2473 it will modify it in place. */
2474 op0 = cp_fold (TREE_OPERAND (x, 0));
2475 if (op0 == error_mark_node)
2476 x = error_mark_node;
2477 break;
2478 }
2479
2480 loc = EXPR_LOCATION (x);
2481 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2482
2483 if (code == CONVERT_EXPR
2484 && SCALAR_TYPE_P (TREE_TYPE (x))
2485 && op0 != void_node)
2486 /* During parsing we used convert_to_*_nofold; re-convert now using the
2487 folding variants, since fold() doesn't do those transformations. */
2488 x = fold (convert (TREE_TYPE (x), op0));
2489 else if (op0 != TREE_OPERAND (x, 0))
2490 {
2491 if (op0 == error_mark_node)
2492 x = error_mark_node;
2493 else
2494 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2495 }
2496 else
2497 x = fold (x);
2498
2499 /* Conversion of an out-of-range value has implementation-defined
2500 behavior; the language considers it different from arithmetic
2501 overflow, which is undefined. */
2502 if (TREE_CODE (op0) == INTEGER_CST
2503 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2504 TREE_OVERFLOW (x) = false;
2505
2506 break;
2507
2508 case INDIRECT_REF:
2509 /* We don't need the decltype(auto) obfuscation anymore. */
2510 if (REF_PARENTHESIZED_P (x))
2511 {
2512 tree p = maybe_undo_parenthesized_ref (x);
2513 if (p != x)
2514 return cp_fold (p);
2515 }
2516 goto unary;
2517
2518 case ADDR_EXPR:
2519 loc = EXPR_LOCATION (x);
2520 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2521
2522 /* Cope with user tricks that amount to offsetof. */
2523 if (op0 != error_mark_node
2524 && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0)))
2525 {
2526 tree val = get_base_address (op0);
2527 if (val
2528 && INDIRECT_REF_P (val)
2529 && COMPLETE_TYPE_P (TREE_TYPE (val))
2530 && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2531 {
2532 val = TREE_OPERAND (val, 0);
2533 STRIP_NOPS (val);
2534 val = maybe_constant_value (val);
2535 if (TREE_CODE (val) == INTEGER_CST)
2536 return fold_offsetof (op0, TREE_TYPE (x));
2537 }
2538 }
2539 goto finish_unary;
2540
2541 case REALPART_EXPR:
2542 case IMAGPART_EXPR:
2543 rval_ops = false;
2544 /* FALLTHRU */
2545 case CONJ_EXPR:
2546 case FIX_TRUNC_EXPR:
2547 case FLOAT_EXPR:
2548 case NEGATE_EXPR:
2549 case ABS_EXPR:
2550 case ABSU_EXPR:
2551 case BIT_NOT_EXPR:
2552 case TRUTH_NOT_EXPR:
2553 case FIXED_CONVERT_EXPR:
2554 unary:
2555
2556 loc = EXPR_LOCATION (x);
2557 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2558
2559 finish_unary:
2560 if (op0 != TREE_OPERAND (x, 0))
2561 {
2562 if (op0 == error_mark_node)
2563 x = error_mark_node;
2564 else
2565 {
2566 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2567 if (code == INDIRECT_REF
2568 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2569 {
2570 TREE_READONLY (x) = TREE_READONLY (org_x);
2571 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2572 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2573 }
2574 }
2575 }
2576 else
2577 x = fold (x);
2578
2579 gcc_assert (TREE_CODE (x) != COND_EXPR
2580 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2581 break;
2582
2583 case UNARY_PLUS_EXPR:
2584 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2585 if (op0 == error_mark_node)
2586 x = error_mark_node;
2587 else
2588 x = fold_convert (TREE_TYPE (x), op0);
2589 break;
2590
2591 case POSTDECREMENT_EXPR:
2592 case POSTINCREMENT_EXPR:
2593 case INIT_EXPR:
2594 case PREDECREMENT_EXPR:
2595 case PREINCREMENT_EXPR:
2596 case COMPOUND_EXPR:
2597 case MODIFY_EXPR:
2598 rval_ops = false;
2599 /* FALLTHRU */
2600 case POINTER_PLUS_EXPR:
2601 case PLUS_EXPR:
2602 case POINTER_DIFF_EXPR:
2603 case MINUS_EXPR:
2604 case MULT_EXPR:
2605 case TRUNC_DIV_EXPR:
2606 case CEIL_DIV_EXPR:
2607 case FLOOR_DIV_EXPR:
2608 case ROUND_DIV_EXPR:
2609 case TRUNC_MOD_EXPR:
2610 case CEIL_MOD_EXPR:
2611 case ROUND_MOD_EXPR:
2612 case RDIV_EXPR:
2613 case EXACT_DIV_EXPR:
2614 case MIN_EXPR:
2615 case MAX_EXPR:
2616 case LSHIFT_EXPR:
2617 case RSHIFT_EXPR:
2618 case LROTATE_EXPR:
2619 case RROTATE_EXPR:
2620 case BIT_AND_EXPR:
2621 case BIT_IOR_EXPR:
2622 case BIT_XOR_EXPR:
2623 case TRUTH_AND_EXPR:
2624 case TRUTH_ANDIF_EXPR:
2625 case TRUTH_OR_EXPR:
2626 case TRUTH_ORIF_EXPR:
2627 case TRUTH_XOR_EXPR:
2628 case LT_EXPR: case LE_EXPR:
2629 case GT_EXPR: case GE_EXPR:
2630 case EQ_EXPR: case NE_EXPR:
2631 case UNORDERED_EXPR: case ORDERED_EXPR:
2632 case UNLT_EXPR: case UNLE_EXPR:
2633 case UNGT_EXPR: case UNGE_EXPR:
2634 case UNEQ_EXPR: case LTGT_EXPR:
2635 case RANGE_EXPR: case COMPLEX_EXPR:
2636
2637 loc = EXPR_LOCATION (x);
2638 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2639 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2640
2641 /* decltype(nullptr) has only one value, so optimize away all comparisons
2642 with that type right away, keeping them in the IL causes troubles for
2643 various optimizations. */
2644 if (COMPARISON_CLASS_P (org_x)
2645 && TREE_CODE (TREE_TYPE (op0)) == NULLPTR_TYPE
2646 && TREE_CODE (TREE_TYPE (op1)) == NULLPTR_TYPE)
2647 {
2648 switch (code)
2649 {
2650 case EQ_EXPR:
2651 x = constant_boolean_node (true, TREE_TYPE (x));
2652 break;
2653 case NE_EXPR:
2654 x = constant_boolean_node (false, TREE_TYPE (x));
2655 break;
2656 default:
2657 gcc_unreachable ();
2658 }
2659 return omit_two_operands_loc (loc, TREE_TYPE (x), x,
2660 op0, op1);
2661 }
2662
2663 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2664 {
2665 if (op0 == error_mark_node || op1 == error_mark_node)
2666 x = error_mark_node;
2667 else
2668 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2669 }
2670 else
2671 x = fold (x);
2672
2673 /* This is only needed for -Wnonnull-compare and only if
2674 TREE_NO_WARNING (org_x), but to avoid that option affecting code
2675 generation, we do it always. */
2676 if (COMPARISON_CLASS_P (org_x))
2677 {
2678 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2679 ;
2680 else if (COMPARISON_CLASS_P (x))
2681 {
2682 if (warn_nonnull_compare
2683 && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
2684 suppress_warning (x, OPT_Wnonnull_compare);
2685 }
2686 /* Otherwise give up on optimizing these, let GIMPLE folders
2687 optimize those later on. */
2688 else if (op0 != TREE_OPERAND (org_x, 0)
2689 || op1 != TREE_OPERAND (org_x, 1))
2690 {
2691 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2692 if (warn_nonnull_compare
2693 && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
2694 suppress_warning (x, OPT_Wnonnull_compare);
2695 }
2696 else
2697 x = org_x;
2698 }
2699
2700 break;
2701
2702 case VEC_COND_EXPR:
2703 case COND_EXPR:
2704 loc = EXPR_LOCATION (x);
2705 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2706 op1 = cp_fold (TREE_OPERAND (x, 1));
2707 op2 = cp_fold (TREE_OPERAND (x, 2));
2708
2709 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2710 {
2711 warning_sentinel s (warn_int_in_bool_context);
2712 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2713 op1 = cp_truthvalue_conversion (op1, tf_warning_or_error);
2714 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2715 op2 = cp_truthvalue_conversion (op2, tf_warning_or_error);
2716 }
2717 else if (VOID_TYPE_P (TREE_TYPE (x)))
2718 {
2719 if (TREE_CODE (op0) == INTEGER_CST)
2720 {
2721 /* If the condition is constant, fold can fold away
2722 the COND_EXPR. If some statement-level uses of COND_EXPR
2723 have one of the branches NULL, avoid folding crash. */
2724 if (!op1)
2725 op1 = build_empty_stmt (loc);
2726 if (!op2)
2727 op2 = build_empty_stmt (loc);
2728 }
2729 else
2730 {
2731 /* Otherwise, don't bother folding a void condition, since
2732 it can't produce a constant value. */
2733 if (op0 != TREE_OPERAND (x, 0)
2734 || op1 != TREE_OPERAND (x, 1)
2735 || op2 != TREE_OPERAND (x, 2))
2736 x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2737 break;
2738 }
2739 }
2740
2741 if (op0 != TREE_OPERAND (x, 0)
2742 || op1 != TREE_OPERAND (x, 1)
2743 || op2 != TREE_OPERAND (x, 2))
2744 {
2745 if (op0 == error_mark_node
2746 || op1 == error_mark_node
2747 || op2 == error_mark_node)
2748 x = error_mark_node;
2749 else
2750 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2751 }
2752 else
2753 x = fold (x);
2754
2755 /* A COND_EXPR might have incompatible types in branches if one or both
2756 arms are bitfields. If folding exposed such a branch, fix it up. */
2757 if (TREE_CODE (x) != code
2758 && x != error_mark_node
2759 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2760 x = fold_convert (TREE_TYPE (org_x), x);
2761
2762 break;
2763
2764 case CALL_EXPR:
2765 {
2766 tree callee = get_callee_fndecl (x);
2767
2768 /* "Inline" calls to std::move/forward and other cast-like functions
2769 by simply folding them into a corresponding cast to their return
2770 type. This is cheaper than relying on the middle end to do so, and
2771 also means we avoid generating useless debug info for them at all.
2772
2773 At this point the argument has already been converted into a
2774 reference, so it suffices to use a NOP_EXPR to express the
2775 cast. */
2776 if ((OPTION_SET_P (flag_fold_simple_inlines)
2777 ? flag_fold_simple_inlines
2778 : !flag_no_inline)
2779 && call_expr_nargs (x) == 1
2780 && decl_in_std_namespace_p (callee)
2781 && DECL_NAME (callee) != NULL_TREE
2782 && (id_equal (DECL_NAME (callee), "move")
2783 || id_equal (DECL_NAME (callee), "forward")
2784 || id_equal (DECL_NAME (callee), "addressof")
2785 /* This addressof equivalent is used heavily in libstdc++. */
2786 || id_equal (DECL_NAME (callee), "__addressof")
2787 || id_equal (DECL_NAME (callee), "as_const")))
2788 {
2789 r = CALL_EXPR_ARG (x, 0);
2790 /* Check that the return and argument types are sane before
2791 folding. */
2792 if (INDIRECT_TYPE_P (TREE_TYPE (x))
2793 && INDIRECT_TYPE_P (TREE_TYPE (r)))
2794 {
2795 if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
2796 r = build_nop (TREE_TYPE (x), r);
2797 x = cp_fold (r);
2798 break;
2799 }
2800 }
2801
2802 int sv = optimize, nw = sv;
2803
2804 /* Some built-in function calls will be evaluated at compile-time in
2805 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2806 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2807 if (callee && fndecl_built_in_p (callee) && !optimize
2808 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2809 && current_function_decl
2810 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2811 nw = 1;
2812
2813 if (callee && fndecl_built_in_p (callee, BUILT_IN_FRONTEND))
2814 {
2815 switch (DECL_FE_FUNCTION_CODE (callee))
2816 {
2817 /* Defer folding __builtin_is_constant_evaluated. */
2818 case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
2819 break;
2820 case CP_BUILT_IN_SOURCE_LOCATION:
2821 x = fold_builtin_source_location (EXPR_LOCATION (x));
2822 break;
2823 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
2824 x = fold_builtin_is_corresponding_member
2825 (EXPR_LOCATION (x), call_expr_nargs (x),
2826 &CALL_EXPR_ARG (x, 0));
2827 break;
2828 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
2829 x = fold_builtin_is_pointer_inverconvertible_with_class
2830 (EXPR_LOCATION (x), call_expr_nargs (x),
2831 &CALL_EXPR_ARG (x, 0));
2832 break;
2833 default:
2834 break;
2835 }
2836 break;
2837 }
2838
2839 if (callee
2840 && fndecl_built_in_p (callee, CP_BUILT_IN_SOURCE_LOCATION,
2841 BUILT_IN_FRONTEND))
2842 {
2843 x = fold_builtin_source_location (EXPR_LOCATION (x));
2844 break;
2845 }
2846
2847 bool changed = false;
2848 int m = call_expr_nargs (x);
2849 for (int i = 0; i < m; i++)
2850 {
2851 r = cp_fold (CALL_EXPR_ARG (x, i));
2852 if (r != CALL_EXPR_ARG (x, i))
2853 {
2854 if (r == error_mark_node)
2855 {
2856 x = error_mark_node;
2857 break;
2858 }
2859 if (!changed)
2860 x = copy_node (x);
2861 CALL_EXPR_ARG (x, i) = r;
2862 changed = true;
2863 }
2864 }
2865 if (x == error_mark_node)
2866 break;
2867
2868 optimize = nw;
2869 r = fold (x);
2870 optimize = sv;
2871
2872 if (TREE_CODE (r) != CALL_EXPR)
2873 {
2874 x = cp_fold (r);
2875 break;
2876 }
2877
2878 optimize = nw;
2879
2880 /* Invoke maybe_constant_value for functions declared
2881 constexpr and not called with AGGR_INIT_EXPRs.
2882 TODO:
2883 Do constexpr expansion of expressions where the call itself is not
2884 constant, but the call followed by an INDIRECT_REF is. */
2885 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2886 && !flag_no_inline)
2887 r = maybe_constant_value (x);
2888 optimize = sv;
2889
2890 if (TREE_CODE (r) != CALL_EXPR)
2891 {
2892 if (DECL_CONSTRUCTOR_P (callee))
2893 {
2894 loc = EXPR_LOCATION (x);
2895 tree s = build_fold_indirect_ref_loc (loc,
2896 CALL_EXPR_ARG (x, 0));
2897 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2898 }
2899 x = r;
2900 break;
2901 }
2902
2903 break;
2904 }
2905
2906 case CONSTRUCTOR:
2907 {
2908 unsigned i;
2909 constructor_elt *p;
2910 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2911 vec<constructor_elt, va_gc> *nelts = NULL;
2912 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2913 {
2914 tree op = cp_fold (p->value);
2915 if (op != p->value)
2916 {
2917 if (op == error_mark_node)
2918 {
2919 x = error_mark_node;
2920 vec_free (nelts);
2921 break;
2922 }
2923 if (nelts == NULL)
2924 nelts = elts->copy ();
2925 (*nelts)[i].value = op;
2926 }
2927 }
2928 if (nelts)
2929 {
2930 x = build_constructor (TREE_TYPE (x), nelts);
2931 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
2932 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
2933 }
2934 if (VECTOR_TYPE_P (TREE_TYPE (x)))
2935 x = fold (x);
2936 break;
2937 }
2938 case TREE_VEC:
2939 {
2940 bool changed = false;
2941 int n = TREE_VEC_LENGTH (x);
2942
2943 for (int i = 0; i < n; i++)
2944 {
2945 tree op = cp_fold (TREE_VEC_ELT (x, i));
2946 if (op != TREE_VEC_ELT (x, i))
2947 {
2948 if (!changed)
2949 x = copy_node (x);
2950 TREE_VEC_ELT (x, i) = op;
2951 changed = true;
2952 }
2953 }
2954 }
2955
2956 break;
2957
2958 case ARRAY_REF:
2959 case ARRAY_RANGE_REF:
2960
2961 loc = EXPR_LOCATION (x);
2962 op0 = cp_fold (TREE_OPERAND (x, 0));
2963 op1 = cp_fold (TREE_OPERAND (x, 1));
2964 op2 = cp_fold (TREE_OPERAND (x, 2));
2965 op3 = cp_fold (TREE_OPERAND (x, 3));
2966
2967 if (op0 != TREE_OPERAND (x, 0)
2968 || op1 != TREE_OPERAND (x, 1)
2969 || op2 != TREE_OPERAND (x, 2)
2970 || op3 != TREE_OPERAND (x, 3))
2971 {
2972 if (op0 == error_mark_node
2973 || op1 == error_mark_node
2974 || op2 == error_mark_node
2975 || op3 == error_mark_node)
2976 x = error_mark_node;
2977 else
2978 {
2979 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2980 TREE_READONLY (x) = TREE_READONLY (org_x);
2981 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2982 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2983 }
2984 }
2985
2986 x = fold (x);
2987 break;
2988
2989 case SAVE_EXPR:
2990 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2991 folding, evaluates to an invariant. In that case no need to wrap
2992 this folded tree with a SAVE_EXPR. */
2993 r = cp_fold (TREE_OPERAND (x, 0));
2994 if (tree_invariant_p (r))
2995 x = r;
2996 break;
2997
2998 case REQUIRES_EXPR:
2999 x = evaluate_requires_expr (x);
3000 break;
3001
3002 default:
3003 return org_x;
3004 }
3005
3006 if (EXPR_P (x) && TREE_CODE (x) == code)
3007 {
3008 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3009 copy_warning (x, org_x);
3010 }
3011
3012 if (!c.evaluation_restricted_p ())
3013 {
3014 fold_cache->put (org_x, x);
3015 /* Prevent that we try to fold an already folded result again. */
3016 if (x != org_x)
3017 fold_cache->put (x, x);
3018 }
3019
3020 return x;
3021 }
3022
3023 /* Look up either "hot" or "cold" in attribute list LIST. */
3024
3025 tree
3026 lookup_hotness_attribute (tree list)
3027 {
3028 for (; list; list = TREE_CHAIN (list))
3029 {
3030 tree name = get_attribute_name (list);
3031 if (is_attribute_p ("hot", name)
3032 || is_attribute_p ("cold", name)
3033 || is_attribute_p ("likely", name)
3034 || is_attribute_p ("unlikely", name))
3035 break;
3036 }
3037 return list;
3038 }
3039
3040 /* Remove both "hot" and "cold" attributes from LIST. */
3041
3042 static tree
3043 remove_hotness_attribute (tree list)
3044 {
3045 list = remove_attribute ("hot", list);
3046 list = remove_attribute ("cold", list);
3047 list = remove_attribute ("likely", list);
3048 list = remove_attribute ("unlikely", list);
3049 return list;
3050 }
3051
3052 /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
3053 PREDICT_EXPR. */
3054
3055 tree
3056 process_stmt_hotness_attribute (tree std_attrs, location_t attrs_loc)
3057 {
3058 if (std_attrs == error_mark_node)
3059 return std_attrs;
3060 if (tree attr = lookup_hotness_attribute (std_attrs))
3061 {
3062 tree name = get_attribute_name (attr);
3063 bool hot = (is_attribute_p ("hot", name)
3064 || is_attribute_p ("likely", name));
3065 tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL,
3066 hot ? TAKEN : NOT_TAKEN);
3067 SET_EXPR_LOCATION (pred, attrs_loc);
3068 add_stmt (pred);
3069 if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr)))
3070 warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE",
3071 get_attribute_name (other), name);
3072 std_attrs = remove_hotness_attribute (std_attrs);
3073 }
3074 return std_attrs;
3075 }
3076
3077 /* Helper of fold_builtin_source_location, return the
3078 std::source_location::__impl type after performing verification
3079 on it. LOC is used for reporting any errors. */
3080
3081 static tree
3082 get_source_location_impl_type (location_t loc)
3083 {
3084 tree name = get_identifier ("source_location");
3085 tree decl = lookup_qualified_name (std_node, name);
3086 if (TREE_CODE (decl) != TYPE_DECL)
3087 {
3088 auto_diagnostic_group d;
3089 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3090 qualified_name_lookup_error (std_node, name, decl, loc);
3091 else
3092 error_at (loc, "%qD is not a type", decl);
3093 return error_mark_node;
3094 }
3095 name = get_identifier ("__impl");
3096 tree type = TREE_TYPE (decl);
3097 decl = lookup_qualified_name (type, name);
3098 if (TREE_CODE (decl) != TYPE_DECL)
3099 {
3100 auto_diagnostic_group d;
3101 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3102 qualified_name_lookup_error (type, name, decl, loc);
3103 else
3104 error_at (loc, "%qD is not a type", decl);
3105 return error_mark_node;
3106 }
3107 type = TREE_TYPE (decl);
3108 if (TREE_CODE (type) != RECORD_TYPE)
3109 {
3110 error_at (loc, "%qD is not a class type", decl);
3111 return error_mark_node;
3112 }
3113
3114 int cnt = 0;
3115 for (tree field = TYPE_FIELDS (type);
3116 (field = next_aggregate_field (field)) != NULL_TREE;
3117 field = DECL_CHAIN (field))
3118 {
3119 if (DECL_NAME (field) != NULL_TREE)
3120 {
3121 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3122 if (strcmp (n, "_M_file_name") == 0
3123 || strcmp (n, "_M_function_name") == 0)
3124 {
3125 if (TREE_TYPE (field) != const_string_type_node)
3126 {
3127 error_at (loc, "%qD does not have %<const char *%> type",
3128 field);
3129 return error_mark_node;
3130 }
3131 cnt++;
3132 continue;
3133 }
3134 else if (strcmp (n, "_M_line") == 0 || strcmp (n, "_M_column") == 0)
3135 {
3136 if (TREE_CODE (TREE_TYPE (field)) != INTEGER_TYPE)
3137 {
3138 error_at (loc, "%qD does not have integral type", field);
3139 return error_mark_node;
3140 }
3141 cnt++;
3142 continue;
3143 }
3144 }
3145 cnt = 0;
3146 break;
3147 }
3148 if (cnt != 4)
3149 {
3150 error_at (loc, "%<std::source_location::__impl%> does not contain only "
3151 "non-static data members %<_M_file_name%>, "
3152 "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
3153 return error_mark_node;
3154 }
3155 return build_qualified_type (type, TYPE_QUAL_CONST);
3156 }
3157
3158 /* Type for source_location_table hash_set. */
3159 struct GTY((for_user)) source_location_table_entry {
3160 location_t loc;
3161 unsigned uid;
3162 tree var;
3163 };
3164
3165 /* Traits class for function start hash maps below. */
3166
3167 struct source_location_table_entry_hash
3168 : ggc_remove <source_location_table_entry>
3169 {
3170 typedef source_location_table_entry value_type;
3171 typedef source_location_table_entry compare_type;
3172
3173 static hashval_t
3174 hash (const source_location_table_entry &ref)
3175 {
3176 inchash::hash hstate (0);
3177 hstate.add_int (ref.loc);
3178 hstate.add_int (ref.uid);
3179 return hstate.end ();
3180 }
3181
3182 static bool
3183 equal (const source_location_table_entry &ref1,
3184 const source_location_table_entry &ref2)
3185 {
3186 return ref1.loc == ref2.loc && ref1.uid == ref2.uid;
3187 }
3188
3189 static void
3190 mark_deleted (source_location_table_entry &ref)
3191 {
3192 ref.loc = UNKNOWN_LOCATION;
3193 ref.uid = -1U;
3194 ref.var = NULL_TREE;
3195 }
3196
3197 static const bool empty_zero_p = true;
3198
3199 static void
3200 mark_empty (source_location_table_entry &ref)
3201 {
3202 ref.loc = UNKNOWN_LOCATION;
3203 ref.uid = 0;
3204 ref.var = NULL_TREE;
3205 }
3206
3207 static bool
3208 is_deleted (const source_location_table_entry &ref)
3209 {
3210 return (ref.loc == UNKNOWN_LOCATION
3211 && ref.uid == -1U
3212 && ref.var == NULL_TREE);
3213 }
3214
3215 static bool
3216 is_empty (const source_location_table_entry &ref)
3217 {
3218 return (ref.loc == UNKNOWN_LOCATION
3219 && ref.uid == 0
3220 && ref.var == NULL_TREE);
3221 }
3222
3223 static void
3224 pch_nx (source_location_table_entry &p)
3225 {
3226 extern void gt_pch_nx (source_location_table_entry &);
3227 gt_pch_nx (p);
3228 }
3229
3230 static void
3231 pch_nx (source_location_table_entry &p, gt_pointer_operator op, void *cookie)
3232 {
3233 extern void gt_pch_nx (source_location_table_entry *, gt_pointer_operator,
3234 void *);
3235 gt_pch_nx (&p, op, cookie);
3236 }
3237 };
3238
3239 static GTY(()) hash_table <source_location_table_entry_hash>
3240 *source_location_table;
3241 static GTY(()) unsigned int source_location_id;
3242
3243 /* Fold __builtin_source_location () call. LOC is the location
3244 of the call. */
3245
3246 tree
3247 fold_builtin_source_location (location_t loc)
3248 {
3249 if (source_location_impl == NULL_TREE)
3250 {
3251 auto_diagnostic_group d;
3252 source_location_impl = get_source_location_impl_type (loc);
3253 if (source_location_impl == error_mark_node)
3254 inform (loc, "evaluating %qs", "__builtin_source_location");
3255 }
3256 if (source_location_impl == error_mark_node)
3257 return build_zero_cst (const_ptr_type_node);
3258 if (source_location_table == NULL)
3259 source_location_table
3260 = hash_table <source_location_table_entry_hash>::create_ggc (64);
3261 const line_map_ordinary *map;
3262 source_location_table_entry entry;
3263 entry.loc
3264 = linemap_resolve_location (line_table, loc, LRK_MACRO_EXPANSION_POINT,
3265 &map);
3266 entry.uid = current_function_decl ? DECL_UID (current_function_decl) : -1;
3267 entry.var = error_mark_node;
3268 source_location_table_entry *entryp
3269 = source_location_table->find_slot (entry, INSERT);
3270 tree var;
3271 if (entryp->var)
3272 var = entryp->var;
3273 else
3274 {
3275 char tmp_name[32];
3276 ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lsrc_loc", source_location_id++);
3277 var = build_decl (loc, VAR_DECL, get_identifier (tmp_name),
3278 source_location_impl);
3279 TREE_STATIC (var) = 1;
3280 TREE_PUBLIC (var) = 0;
3281 DECL_ARTIFICIAL (var) = 1;
3282 DECL_IGNORED_P (var) = 1;
3283 DECL_EXTERNAL (var) = 0;
3284 DECL_DECLARED_CONSTEXPR_P (var) = 1;
3285 DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var) = 1;
3286 layout_decl (var, 0);
3287
3288 vec<constructor_elt, va_gc> *v = NULL;
3289 vec_alloc (v, 4);
3290 for (tree field = TYPE_FIELDS (source_location_impl);
3291 (field = next_aggregate_field (field)) != NULL_TREE;
3292 field = DECL_CHAIN (field))
3293 {
3294 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3295 tree val = NULL_TREE;
3296 if (strcmp (n, "_M_file_name") == 0)
3297 {
3298 if (const char *fname = LOCATION_FILE (loc))
3299 {
3300 fname = remap_macro_filename (fname);
3301 val = build_string_literal (strlen (fname) + 1, fname);
3302 }
3303 else
3304 val = build_string_literal (1, "");
3305 }
3306 else if (strcmp (n, "_M_function_name") == 0)
3307 {
3308 const char *name = "";
3309
3310 if (current_function_decl)
3311 name = cxx_printable_name (current_function_decl, 2);
3312
3313 val = build_string_literal (strlen (name) + 1, name);
3314 }
3315 else if (strcmp (n, "_M_line") == 0)
3316 val = build_int_cst (TREE_TYPE (field), LOCATION_LINE (loc));
3317 else if (strcmp (n, "_M_column") == 0)
3318 val = build_int_cst (TREE_TYPE (field), LOCATION_COLUMN (loc));
3319 else
3320 gcc_unreachable ();
3321 CONSTRUCTOR_APPEND_ELT (v, field, val);
3322 }
3323
3324 tree ctor = build_constructor (source_location_impl, v);
3325 TREE_CONSTANT (ctor) = 1;
3326 TREE_STATIC (ctor) = 1;
3327 DECL_INITIAL (var) = ctor;
3328 varpool_node::finalize_decl (var);
3329 *entryp = entry;
3330 entryp->var = var;
3331 }
3332
3333 return build_fold_addr_expr_with_type_loc (loc, var, const_ptr_type_node);
3334 }
3335
3336 #include "gt-cp-cp-gimplify.h"
This page took 0.185044 seconds and 5 git commands to generate.