]> gcc.gnu.org Git - gcc.git/blame - gcc/gimple-low.c
re PR c++/82570 (Lambda fails to compile because it doesn't meet constexpr requirements)
[gcc.git] / gcc / gimple-low.c
CommitLineData
726a989a 1/* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
6de9cd9a 2
cbe34bb5 3 Copyright (C) 2003-2017 Free Software Foundation, Inc.
6de9cd9a
DN
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9dcd6f09 9Software Foundation; either version 3, or (at your option) any later
6de9cd9a
DN
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
6de9cd9a 25#include "tree.h"
c7131fb2 26#include "gimple.h"
957060b5 27#include "tree-pass.h"
40e23961 28#include "fold-const.h"
d8a2d370
DN
29#include "tree-nested.h"
30#include "calls.h"
5be5c238 31#include "gimple-iterator.h"
4484a35a 32#include "gimple-low.h"
e59a1c22
ML
33#include "predict.h"
34#include "gimple-predict.h"
6de9cd9a 35
726a989a
RB
36/* The differences between High GIMPLE and Low GIMPLE are the
37 following:
38
39 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
40
41 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
42 flow and exception regions are built as an on-the-side region
43 hierarchy (See tree-eh.c:lower_eh_constructs).
44
45 3- Multiple identical return statements are grouped into a single
46 return and gotos to the unique return site. */
47
48/* Match a return statement with a label. During lowering, we identify
49 identical return statements and replace duplicates with a jump to
50 the corresponding label. */
51struct return_statements_t
52{
53 tree label;
538dd0b7 54 greturn *stmt;
726a989a
RB
55};
56typedef struct return_statements_t return_statements_t;
57
726a989a 58
6de9cd9a
DN
59struct lower_data
60{
61 /* Block the current statement belongs to. */
62 tree block;
f5a76aea 63
726a989a 64 /* A vector of label and return statements to be moved to the end
71877985 65 of the function. */
9771b263 66 vec<return_statements_t> return_statements;
4f6c2131 67
a141816c
EB
68 /* True if the current statement cannot fall through. */
69 bool cannot_fallthru;
6de9cd9a
DN
70};
71
726a989a
RB
72static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
73static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
f778c049 74static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *);
726a989a
RB
75static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
76static void lower_builtin_setjmp (gimple_stmt_iterator *);
831806cb 77static void lower_builtin_posix_memalign (gimple_stmt_iterator *);
6de9cd9a 78
726a989a
RB
79
80/* Lower the body of current_function_decl from High GIMPLE into Low
81 GIMPLE. */
6de9cd9a 82
c2924966 83static unsigned int
6de9cd9a
DN
84lower_function_body (void)
85{
86 struct lower_data data;
726a989a
RB
87 gimple_seq body = gimple_body (current_function_decl);
88 gimple_seq lowered_body;
89 gimple_stmt_iterator i;
355fe088
TS
90 gimple *bind;
91 gimple *x;
726a989a
RB
92
93 /* The gimplifier should've left a body of exactly one statement,
94 namely a GIMPLE_BIND. */
95 gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
96 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
6de9cd9a 97
953ff289 98 memset (&data, 0, sizeof (data));
6de9cd9a
DN
99 data.block = DECL_INITIAL (current_function_decl);
100 BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
101 BLOCK_CHAIN (data.block) = NULL_TREE;
102 TREE_ASM_WRITTEN (data.block) = 1;
9771b263 103 data.return_statements.create (8);
726a989a
RB
104
105 bind = gimple_seq_first_stmt (body);
106 lowered_body = NULL;
107 gimple_seq_add_stmt (&lowered_body, bind);
108 i = gsi_start (lowered_body);
109 lower_gimple_bind (&i, &data);
6de9cd9a 110
726a989a 111 i = gsi_last (lowered_body);
ff98621c
RH
112
113 /* If the function falls off the end, we need a null return statement.
726a989a 114 If we've already got one in the return_statements vector, we don't
ff98621c 115 need to do anything special. Otherwise build one by hand. */
67b69814
EB
116 bool may_fallthru = gimple_seq_may_fallthru (lowered_body);
117 if (may_fallthru
9771b263 118 && (data.return_statements.is_empty ()
c3284718
RS
119 || (gimple_return_retval (data.return_statements.last().stmt)
120 != NULL)))
ff98621c 121 {
726a989a
RB
122 x = gimple_build_return (NULL);
123 gimple_set_location (x, cfun->function_end_locus);
cc2a64dd 124 gimple_set_block (x, DECL_INITIAL (current_function_decl));
726a989a 125 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
67b69814 126 may_fallthru = false;
ff98621c
RH
127 }
128
129 /* If we lowered any return statements, emit the representative
130 at the end of the function. */
9771b263 131 while (!data.return_statements.is_empty ())
f5a76aea 132 {
9771b263 133 return_statements_t t = data.return_statements.pop ();
726a989a
RB
134 x = gimple_build_label (t.label);
135 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
726a989a 136 gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
67b69814
EB
137 if (may_fallthru)
138 {
139 /* Remove the line number from the representative return statement.
140 It now fills in for the fallthru too. Failure to remove this
141 will result in incorrect results for coverage analysis. */
142 gimple_set_location (t.stmt, UNKNOWN_LOCATION);
143 may_fallthru = false;
144 }
f5a76aea
RH
145 }
146
355a7673
MM
147 /* Once the old body has been lowered, replace it with the new
148 lowered sequence. */
149 gimple_set_body (current_function_decl, lowered_body);
150
282899df 151 gcc_assert (data.block == DECL_INITIAL (current_function_decl));
6de9cd9a
DN
152 BLOCK_SUBBLOCKS (data.block)
153 = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
154
155 clear_block_marks (data.block);
9771b263 156 data.return_statements.release ();
c2924966 157 return 0;
6de9cd9a
DN
158}
159
27a4cd48
DM
160namespace {
161
162const pass_data pass_data_lower_cf =
6de9cd9a 163{
27a4cd48
DM
164 GIMPLE_PASS, /* type */
165 "lower", /* name */
166 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
167 TV_NONE, /* tv_id */
168 PROP_gimple_any, /* properties_required */
169 PROP_gimple_lcf, /* properties_provided */
170 0, /* properties_destroyed */
171 0, /* todo_flags_start */
172 0, /* todo_flags_finish */
6de9cd9a
DN
173};
174
27a4cd48
DM
175class pass_lower_cf : public gimple_opt_pass
176{
177public:
c3284718
RS
178 pass_lower_cf (gcc::context *ctxt)
179 : gimple_opt_pass (pass_data_lower_cf, ctxt)
27a4cd48
DM
180 {}
181
182 /* opt_pass methods: */
be55bfe6 183 virtual unsigned int execute (function *) { return lower_function_body (); }
27a4cd48
DM
184
185}; // class pass_lower_cf
186
187} // anon namespace
188
189gimple_opt_pass *
190make_pass_lower_cf (gcc::context *ctxt)
191{
192 return new pass_lower_cf (ctxt);
193}
194
726a989a 195/* Lower sequence SEQ. Unlike gimplification the statements are not relowered
6de9cd9a
DN
196 when they are changed -- if this has to be done, the lowering routine must
197 do it explicitly. DATA is passed through the recursion. */
198
1ebf7687 199static void
355a7673 200lower_sequence (gimple_seq *seq, struct lower_data *data)
6de9cd9a 201{
726a989a 202 gimple_stmt_iterator gsi;
6de9cd9a 203
355a7673 204 for (gsi = gsi_start (*seq); !gsi_end_p (gsi); )
726a989a 205 lower_stmt (&gsi, data);
6de9cd9a
DN
206}
207
50674e96 208
726a989a 209/* Lower the OpenMP directive statement pointed by GSI. DATA is
50674e96
DN
210 passed through the recursion. */
211
212static void
726a989a 213lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
50674e96 214{
355fe088 215 gimple *stmt;
b8698a0f 216
726a989a 217 stmt = gsi_stmt (*gsi);
50674e96 218
355a7673
MM
219 lower_sequence (gimple_omp_body_ptr (stmt), data);
220 gsi_insert_seq_after (gsi, gimple_omp_body (stmt), GSI_CONTINUE_LINKING);
726a989a 221 gimple_omp_set_body (stmt, NULL);
355a7673 222 gsi_next (gsi);
50674e96
DN
223}
224
225
a141816c
EB
226/* Lower statement GSI. DATA is passed through the recursion. We try to
227 track the fallthruness of statements and get rid of unreachable return
228 statements in order to prevent the EH lowering pass from adding useless
229 edges that can cause bogus warnings to be issued later; this guess need
230 not be 100% accurate, simply be conservative and reset cannot_fallthru
231 to false if we don't know. */
6de9cd9a
DN
232
233static void
726a989a 234lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
6de9cd9a 235{
355fe088 236 gimple *stmt = gsi_stmt (*gsi);
6de9cd9a 237
726a989a 238 gimple_set_block (stmt, data->block);
6de9cd9a 239
726a989a 240 switch (gimple_code (stmt))
6de9cd9a 241 {
726a989a
RB
242 case GIMPLE_BIND:
243 lower_gimple_bind (gsi, data);
a141816c 244 /* Propagate fallthruness. */
f5a76aea 245 return;
6de9cd9a 246
726a989a 247 case GIMPLE_COND:
a141816c
EB
248 case GIMPLE_GOTO:
249 case GIMPLE_SWITCH:
250 data->cannot_fallthru = true;
251 gsi_next (gsi);
252 return;
726a989a
RB
253
254 case GIMPLE_RETURN:
a141816c
EB
255 if (data->cannot_fallthru)
256 {
257 gsi_remove (gsi, false);
258 /* Propagate fallthruness. */
259 }
260 else
261 {
262 lower_gimple_return (gsi, data);
263 data->cannot_fallthru = true;
264 }
726a989a
RB
265 return;
266
267 case GIMPLE_TRY:
f778c049
EB
268 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
269 lower_try_catch (gsi, data);
270 else
271 {
272 /* It must be a GIMPLE_TRY_FINALLY. */
273 bool cannot_fallthru;
274 lower_sequence (gimple_try_eval_ptr (stmt), data);
275 cannot_fallthru = data->cannot_fallthru;
276
277 /* The finally clause is always executed after the try clause,
278 so if it does not fall through, then the try-finally will not
279 fall through. Otherwise, if the try clause does not fall
280 through, then when the finally clause falls through it will
281 resume execution wherever the try clause was going. So the
282 whole try-finally will only fall through if both the try
283 clause and the finally clause fall through. */
284 data->cannot_fallthru = false;
285 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
286 data->cannot_fallthru |= cannot_fallthru;
287 gsi_next (gsi);
288 }
289 return;
777f7f9a 290
0a35513e 291 case GIMPLE_EH_ELSE:
538dd0b7
DM
292 {
293 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
294 lower_sequence (gimple_eh_else_n_body_ptr (eh_else_stmt), data);
295 lower_sequence (gimple_eh_else_e_body_ptr (eh_else_stmt), data);
296 }
0a35513e
AH
297 break;
298
726a989a
RB
299 case GIMPLE_NOP:
300 case GIMPLE_ASM:
301 case GIMPLE_ASSIGN:
726a989a
RB
302 case GIMPLE_PREDICT:
303 case GIMPLE_LABEL:
1d65f45c 304 case GIMPLE_EH_MUST_NOT_THROW:
726a989a
RB
305 case GIMPLE_OMP_FOR:
306 case GIMPLE_OMP_SECTIONS:
307 case GIMPLE_OMP_SECTIONS_SWITCH:
308 case GIMPLE_OMP_SECTION:
309 case GIMPLE_OMP_SINGLE:
310 case GIMPLE_OMP_MASTER:
acf0174b 311 case GIMPLE_OMP_TASKGROUP:
726a989a
RB
312 case GIMPLE_OMP_ORDERED:
313 case GIMPLE_OMP_CRITICAL:
314 case GIMPLE_OMP_RETURN:
315 case GIMPLE_OMP_ATOMIC_LOAD:
316 case GIMPLE_OMP_ATOMIC_STORE:
317 case GIMPLE_OMP_CONTINUE:
318 break;
4f6c2131 319
726a989a 320 case GIMPLE_CALL:
4f6c2131 321 {
726a989a 322 tree decl = gimple_call_fndecl (stmt);
f16dd822
DC
323 unsigned i;
324
325 for (i = 0; i < gimple_call_num_args (stmt); i++)
326 {
327 tree arg = gimple_call_arg (stmt, i);
328 if (EXPR_P (arg))
329 TREE_SET_BLOCK (arg, data->block);
330 }
726a989a 331
4f6c2131 332 if (decl
831806cb 333 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
4f6c2131 334 {
903c723b 335 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
831806cb
RB
336 {
337 lower_builtin_setjmp (gsi);
338 data->cannot_fallthru = false;
339 return;
903c723b
TC
340 }
341 else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_POSIX_MEMALIGN
342 && flag_tree_bit_ccp
343 && gimple_builtin_call_types_compatible_p (stmt, decl))
344 {
345 lower_builtin_posix_memalign (gsi);
831806cb
RB
346 return;
347 }
4f6c2131 348 }
79ddec02 349
79ddec02
EB
350 if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
351 {
a141816c 352 data->cannot_fallthru = true;
79ddec02 353 gsi_next (gsi);
79ddec02
EB
354 return;
355 }
4f6c2131
EB
356 }
357 break;
358
726a989a
RB
359 case GIMPLE_OMP_PARALLEL:
360 case GIMPLE_OMP_TASK:
acf0174b
JJ
361 case GIMPLE_OMP_TARGET:
362 case GIMPLE_OMP_TEAMS:
b2b40051 363 case GIMPLE_OMP_GRID_BODY:
a141816c 364 data->cannot_fallthru = false;
726a989a 365 lower_omp_directive (gsi, data);
a141816c 366 data->cannot_fallthru = false;
50674e96
DN
367 return;
368
0a35513e 369 case GIMPLE_TRANSACTION:
538dd0b7
DM
370 lower_sequence (gimple_transaction_body_ptr (
371 as_a <gtransaction *> (stmt)),
372 data);
0a35513e
AH
373 break;
374
6de9cd9a 375 default:
282899df 376 gcc_unreachable ();
6de9cd9a
DN
377 }
378
a141816c 379 data->cannot_fallthru = false;
726a989a 380 gsi_next (gsi);
6de9cd9a
DN
381}
382
4f6c2131 383/* Lower a bind_expr TSI. DATA is passed through the recursion. */
6de9cd9a
DN
384
385static void
726a989a 386lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
6de9cd9a
DN
387{
388 tree old_block = data->block;
538dd0b7 389 gbind *stmt = as_a <gbind *> (gsi_stmt (*gsi));
726a989a 390 tree new_block = gimple_bind_block (stmt);
6de9cd9a
DN
391
392 if (new_block)
393 {
394 if (new_block == old_block)
395 {
396 /* The outermost block of the original function may not be the
397 outermost statement chain of the gimplified function. So we
398 may see the outermost block just inside the function. */
282899df 399 gcc_assert (new_block == DECL_INITIAL (current_function_decl));
6de9cd9a
DN
400 new_block = NULL;
401 }
402 else
403 {
404 /* We do not expect to handle duplicate blocks. */
282899df 405 gcc_assert (!TREE_ASM_WRITTEN (new_block));
6de9cd9a
DN
406 TREE_ASM_WRITTEN (new_block) = 1;
407
408 /* Block tree may get clobbered by inlining. Normally this would
409 be fixed in rest_of_decl_compilation using block notes, but
410 since we are not going to emit them, it is up to us. */
411 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
412 BLOCK_SUBBLOCKS (old_block) = new_block;
413 BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
414 BLOCK_SUPERCONTEXT (new_block) = old_block;
415
416 data->block = new_block;
417 }
418 }
419
726a989a 420 record_vars (gimple_bind_vars (stmt));
014b59e6
RB
421
422 /* Scrap DECL_CHAIN up to BLOCK_VARS to ease GC after we no longer
423 need gimple_bind_vars. */
424 tree next;
b13ff1f5
RB
425 /* BLOCK_VARS and gimple_bind_vars share a common sub-chain. Find
426 it by marking all BLOCK_VARS. */
014b59e6 427 if (gimple_bind_block (stmt))
b13ff1f5
RB
428 for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
429 TREE_VISITED (t) = 1;
430 for (tree var = gimple_bind_vars (stmt);
431 var && ! TREE_VISITED (var); var = next)
014b59e6 432 {
014b59e6
RB
433 next = DECL_CHAIN (var);
434 DECL_CHAIN (var) = NULL_TREE;
435 }
b13ff1f5
RB
436 /* Unmark BLOCK_VARS. */
437 if (gimple_bind_block (stmt))
438 for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
439 TREE_VISITED (t) = 0;
014b59e6 440
355a7673 441 lower_sequence (gimple_bind_body_ptr (stmt), data);
6de9cd9a
DN
442
443 if (new_block)
444 {
282899df 445 gcc_assert (data->block == new_block);
6de9cd9a
DN
446
447 BLOCK_SUBBLOCKS (new_block)
448 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
449 data->block = old_block;
450 }
451
726a989a
RB
452 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
453 gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
454 gsi_remove (gsi, false);
6de9cd9a
DN
455}
456
f778c049
EB
457/* Same as above, but for a GIMPLE_TRY_CATCH. */
458
459static void
460lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
461{
462 bool cannot_fallthru;
355fe088 463 gimple *stmt = gsi_stmt (*gsi);
f778c049
EB
464 gimple_stmt_iterator i;
465
466 /* We don't handle GIMPLE_TRY_FINALLY. */
467 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
468
469 lower_sequence (gimple_try_eval_ptr (stmt), data);
470 cannot_fallthru = data->cannot_fallthru;
471
472 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
473 switch (gimple_code (gsi_stmt (i)))
474 {
475 case GIMPLE_CATCH:
476 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
477 catch expression and a body. The whole try/catch may fall
478 through iff any of the catch bodies falls through. */
479 for (; !gsi_end_p (i); gsi_next (&i))
480 {
481 data->cannot_fallthru = false;
538dd0b7
DM
482 lower_sequence (gimple_catch_handler_ptr (
483 as_a <gcatch *> (gsi_stmt (i))),
484 data);
f778c049
EB
485 if (!data->cannot_fallthru)
486 cannot_fallthru = false;
487 }
488 break;
489
490 case GIMPLE_EH_FILTER:
491 /* The exception filter expression only matters if there is an
492 exception. If the exception does not match EH_FILTER_TYPES,
493 we will execute EH_FILTER_FAILURE, and we will fall through
494 if that falls through. If the exception does match
495 EH_FILTER_TYPES, the stack unwinder will continue up the
496 stack, so we will not fall through. We don't know whether we
497 will throw an exception which matches EH_FILTER_TYPES or not,
498 so we just ignore EH_FILTER_TYPES and assume that we might
499 throw an exception which doesn't match. */
500 data->cannot_fallthru = false;
501 lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i)), data);
502 if (!data->cannot_fallthru)
503 cannot_fallthru = false;
504 break;
505
506 default:
507 /* This case represents statements to be executed when an
508 exception occurs. Those statements are implicitly followed
509 by a GIMPLE_RESX to resume execution after the exception. So
510 in this case the try/catch never falls through. */
511 data->cannot_fallthru = false;
512 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
513 break;
514 }
515
516 data->cannot_fallthru = cannot_fallthru;
517 gsi_next (gsi);
518}
519
6737ba67 520
cf2d1b38
AM
521/* Try to determine whether a TRY_CATCH expression can fall through.
522 This is a subroutine of gimple_stmt_may_fallthru. */
726a989a
RB
523
524static bool
538dd0b7 525gimple_try_catch_may_fallthru (gtry *stmt)
726a989a
RB
526{
527 gimple_stmt_iterator i;
528
529 /* We don't handle GIMPLE_TRY_FINALLY. */
530 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
531
532 /* If the TRY block can fall through, the whole TRY_CATCH can
533 fall through. */
534 if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
535 return true;
536
355a7673 537 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
726a989a
RB
538 switch (gimple_code (gsi_stmt (i)))
539 {
540 case GIMPLE_CATCH:
541 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
542 catch expression and a body. The whole try/catch may fall
543 through iff any of the catch bodies falls through. */
544 for (; !gsi_end_p (i); gsi_next (&i))
545 {
538dd0b7
DM
546 if (gimple_seq_may_fallthru (gimple_catch_handler (
547 as_a <gcatch *> (gsi_stmt (i)))))
726a989a
RB
548 return true;
549 }
550 return false;
551
552 case GIMPLE_EH_FILTER:
553 /* The exception filter expression only matters if there is an
554 exception. If the exception does not match EH_FILTER_TYPES,
555 we will execute EH_FILTER_FAILURE, and we will fall through
556 if that falls through. If the exception does match
557 EH_FILTER_TYPES, the stack unwinder will continue up the
558 stack, so we will not fall through. We don't know whether we
559 will throw an exception which matches EH_FILTER_TYPES or not,
560 so we just ignore EH_FILTER_TYPES and assume that we might
561 throw an exception which doesn't match. */
562 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
563
564 default:
565 /* This case represents statements to be executed when an
566 exception occurs. Those statements are implicitly followed
567 by a GIMPLE_RESX to resume execution after the exception. So
568 in this case the try/catch never falls through. */
569 return false;
570 }
571}
572
573
726a989a
RB
574/* Try to determine if we can continue executing the statement
575 immediately following STMT. This guess need not be 100% accurate;
576 simply be conservative and return true if we don't know. This is
577 used only to avoid stupidly generating extra code. If we're wrong,
578 we'll just delete the extra code later. */
579
580bool
355fe088 581gimple_stmt_may_fallthru (gimple *stmt)
6de9cd9a 582{
726a989a
RB
583 if (!stmt)
584 return true;
6de9cd9a 585
726a989a
RB
586 switch (gimple_code (stmt))
587 {
588 case GIMPLE_GOTO:
589 case GIMPLE_RETURN:
590 case GIMPLE_RESX:
b8698a0f 591 /* Easy cases. If the last statement of the seq implies
726a989a
RB
592 control transfer, then we can't fall through. */
593 return false;
6de9cd9a 594
726a989a 595 case GIMPLE_SWITCH:
a141816c
EB
596 /* Switch has already been lowered and represents a branch
597 to a selected label and hence can't fall through. */
598 return false;
6de9cd9a 599
726a989a
RB
600 case GIMPLE_COND:
601 /* GIMPLE_COND's are already lowered into a two-way branch. They
602 can't fall through. */
603 return false;
6de9cd9a 604
726a989a 605 case GIMPLE_BIND:
538dd0b7
DM
606 return gimple_seq_may_fallthru (
607 gimple_bind_body (as_a <gbind *> (stmt)));
6de9cd9a 608
726a989a
RB
609 case GIMPLE_TRY:
610 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
538dd0b7 611 return gimple_try_catch_may_fallthru (as_a <gtry *> (stmt));
6de9cd9a 612
726a989a 613 /* It must be a GIMPLE_TRY_FINALLY. */
6de9cd9a 614
726a989a
RB
615 /* The finally clause is always executed after the try clause,
616 so if it does not fall through, then the try-finally will not
617 fall through. Otherwise, if the try clause does not fall
618 through, then when the finally clause falls through it will
619 resume execution wherever the try clause was going. So the
620 whole try-finally will only fall through if both the try
621 clause and the finally clause fall through. */
622 return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
623 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
624
0a35513e 625 case GIMPLE_EH_ELSE:
538dd0b7
DM
626 {
627 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
628 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (eh_else_stmt))
629 || gimple_seq_may_fallthru (gimple_eh_else_e_body (
630 eh_else_stmt)));
631 }
0a35513e 632
726a989a
RB
633 case GIMPLE_CALL:
634 /* Functions that do not return do not fall through. */
865f7046 635 return !gimple_call_noreturn_p (stmt);
a141816c 636
726a989a
RB
637 default:
638 return true;
6de9cd9a 639 }
726a989a
RB
640}
641
6de9cd9a 642
726a989a 643/* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
6de9cd9a 644
726a989a
RB
645bool
646gimple_seq_may_fallthru (gimple_seq seq)
647{
648 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
6de9cd9a 649}
f5a76aea 650
726a989a
RB
651
652/* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
4f6c2131 653
f5a76aea 654static void
726a989a 655lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
f5a76aea 656{
538dd0b7 657 greturn *stmt = as_a <greturn *> (gsi_stmt (*gsi));
355fe088 658 gimple *t;
726a989a
RB
659 int i;
660 return_statements_t tmp_rs;
f5a76aea 661
71877985 662 /* Match this up with an existing return statement that's been created. */
9771b263 663 for (i = data->return_statements.length () - 1;
726a989a 664 i >= 0; i--)
f5a76aea 665 {
9771b263 666 tmp_rs = data->return_statements[i];
71877985 667
726a989a 668 if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
0efb9d64
AK
669 {
670 /* Remove the line number from the representative return statement.
671 It now fills in for many such returns. Failure to remove this
672 will result in incorrect results for coverage analysis. */
673 gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
674
675 goto found;
676 }
f5a76aea
RH
677 }
678
71877985 679 /* Not found. Create a new label and record the return statement. */
c2255bc4 680 tmp_rs.label = create_artificial_label (cfun->function_end_locus);
726a989a 681 tmp_rs.stmt = stmt;
9771b263 682 data->return_statements.safe_push (tmp_rs);
71877985
RH
683
684 /* Generate a goto statement and remove the return statement. */
685 found:
516426da
EB
686 /* When not optimizing, make sure user returns are preserved. */
687 if (!optimize && gimple_has_location (stmt))
688 DECL_ARTIFICIAL (tmp_rs.label) = 0;
726a989a
RB
689 t = gimple_build_goto (tmp_rs.label);
690 gimple_set_location (t, gimple_location (stmt));
cc2a64dd 691 gimple_set_block (t, gimple_block (stmt));
726a989a
RB
692 gsi_insert_before (gsi, t, GSI_SAME_STMT);
693 gsi_remove (gsi, false);
4f6c2131
EB
694}
695
a141816c 696/* Lower a __builtin_setjmp GSI.
4f6c2131
EB
697
698 __builtin_setjmp is passed a pointer to an array of five words (not
699 all will be used on all machines). It operates similarly to the C
700 library function of the same name, but is more efficient.
701
09b22f48
JJ
702 It is lowered into 2 other builtins, namely __builtin_setjmp_setup,
703 __builtin_setjmp_receiver.
4f6c2131
EB
704
705 After full lowering, the body of the function should look like:
706
707 {
4f6c2131
EB
708 int D.1844;
709 int D.2844;
710
711 [...]
712
713 __builtin_setjmp_setup (&buf, &<D1847>);
714 D.1844 = 0;
715 goto <D1846>;
716 <D1847>:;
717 __builtin_setjmp_receiver (&<D1847>);
718 D.1844 = 1;
719 <D1846>:;
720 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
721
722 [...]
723
724 __builtin_setjmp_setup (&buf, &<D2847>);
725 D.2844 = 0;
726 goto <D2846>;
727 <D2847>:;
728 __builtin_setjmp_receiver (&<D2847>);
729 D.2844 = 1;
730 <D2846>:;
731 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
732
733 [...]
734
735 <D3850>:;
736 return;
4f6c2131
EB
737 }
738
09b22f48
JJ
739 During cfg creation an extra per-function (or per-OpenMP region)
740 block with ABNORMAL_DISPATCHER internal call will be added, unique
741 destination of all the abnormal call edges and the unique source of
742 all the abnormal edges to the receivers, thus keeping the complexity
743 explosion localized. */
4f6c2131
EB
744
745static void
726a989a 746lower_builtin_setjmp (gimple_stmt_iterator *gsi)
4f6c2131 747{
355fe088 748 gimple *stmt = gsi_stmt (*gsi);
c2255bc4
AH
749 location_t loc = gimple_location (stmt);
750 tree cont_label = create_artificial_label (loc);
751 tree next_label = create_artificial_label (loc);
4f6c2131 752 tree dest, t, arg;
355fe088 753 gimple *g;
4f6c2131 754
021293cb
JJ
755 /* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
756 these builtins are modelled as non-local label jumps to the label
757 that is passed to these two builtins, so pretend we have a non-local
758 label during GIMPLE passes too. See PR60003. */
d764963b 759 cfun->has_nonlocal_label = 1;
021293cb 760
4f6c2131
EB
761 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
762 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
763 FORCED_LABEL (next_label) = 1;
764
381cdae4
RB
765 tree orig_dest = dest = gimple_call_lhs (stmt);
766 if (orig_dest && TREE_CODE (orig_dest) == SSA_NAME)
767 dest = create_tmp_reg (TREE_TYPE (orig_dest));
4f6c2131
EB
768
769 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
aa00059c 770 arg = build_addr (next_label);
e79983f4 771 t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
726a989a 772 g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
db3927fb 773 gimple_set_location (g, loc);
cc2a64dd 774 gimple_set_block (g, gimple_block (stmt));
726a989a 775 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4f6c2131
EB
776
777 /* Build 'DEST = 0' and insert. */
778 if (dest)
779 {
e8160c9a 780 g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
db3927fb 781 gimple_set_location (g, loc);
cc2a64dd 782 gimple_set_block (g, gimple_block (stmt));
726a989a 783 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4f6c2131
EB
784 }
785
786 /* Build 'goto CONT_LABEL' and insert. */
726a989a 787 g = gimple_build_goto (cont_label);
bbbbb16a 788 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4f6c2131
EB
789
790 /* Build 'NEXT_LABEL:' and insert. */
726a989a
RB
791 g = gimple_build_label (next_label);
792 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4f6c2131
EB
793
794 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
aa00059c 795 arg = build_addr (next_label);
e79983f4 796 t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
726a989a 797 g = gimple_build_call (t, 1, arg);
db3927fb 798 gimple_set_location (g, loc);
cc2a64dd 799 gimple_set_block (g, gimple_block (stmt));
726a989a 800 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4f6c2131
EB
801
802 /* Build 'DEST = 1' and insert. */
803 if (dest)
804 {
db3927fb
AH
805 g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
806 integer_one_node));
807 gimple_set_location (g, loc);
cc2a64dd 808 gimple_set_block (g, gimple_block (stmt));
726a989a 809 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4f6c2131
EB
810 }
811
812 /* Build 'CONT_LABEL:' and insert. */
726a989a
RB
813 g = gimple_build_label (cont_label);
814 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4f6c2131 815
381cdae4
RB
816 /* Build orig_dest = dest if necessary. */
817 if (dest != orig_dest)
818 {
819 g = gimple_build_assign (orig_dest, dest);
820 gsi_insert_before (gsi, g, GSI_SAME_STMT);
821 }
822
4f6c2131 823 /* Remove the call to __builtin_setjmp. */
726a989a 824 gsi_remove (gsi, false);
f5a76aea 825}
831806cb
RB
826
827/* Lower calls to posix_memalign to
c4c8514e
RB
828 res = posix_memalign (ptr, align, size);
829 if (res == 0)
830 *ptr = __builtin_assume_aligned (*ptr, align);
831806cb
RB
831 or to
832 void *tem;
c4c8514e
RB
833 res = posix_memalign (&tem, align, size);
834 if (res == 0)
835 ptr = __builtin_assume_aligned (tem, align);
831806cb
RB
836 in case the first argument was &ptr. That way we can get at the
837 alignment of the heap pointer in CCP. */
838
839static void
840lower_builtin_posix_memalign (gimple_stmt_iterator *gsi)
841{
355fe088 842 gimple *stmt, *call = gsi_stmt (*gsi);
c4c8514e
RB
843 tree pptr = gimple_call_arg (call, 0);
844 tree align = gimple_call_arg (call, 1);
845 tree res = gimple_call_lhs (call);
b731b390 846 tree ptr = create_tmp_reg (ptr_type_node);
831806cb
RB
847 if (TREE_CODE (pptr) == ADDR_EXPR)
848 {
b731b390 849 tree tem = create_tmp_var (ptr_type_node);
831806cb 850 TREE_ADDRESSABLE (tem) = 1;
c4c8514e 851 gimple_call_set_arg (call, 0, build_fold_addr_expr (tem));
831806cb
RB
852 stmt = gimple_build_assign (ptr, tem);
853 }
854 else
855 stmt = gimple_build_assign (ptr,
856 fold_build2 (MEM_REF, ptr_type_node, pptr,
857 build_int_cst (ptr_type_node, 0)));
c4c8514e
RB
858 if (res == NULL_TREE)
859 {
b731b390 860 res = create_tmp_reg (integer_type_node);
c4c8514e
RB
861 gimple_call_set_lhs (call, res);
862 }
863 tree align_label = create_artificial_label (UNKNOWN_LOCATION);
864 tree noalign_label = create_artificial_label (UNKNOWN_LOCATION);
355fe088 865 gimple *cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
c4c8514e
RB
866 align_label, noalign_label);
867 gsi_insert_after (gsi, cond, GSI_NEW_STMT);
868 gsi_insert_after (gsi, gimple_build_label (align_label), GSI_NEW_STMT);
831806cb
RB
869 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
870 stmt = gimple_build_call (builtin_decl_implicit (BUILT_IN_ASSUME_ALIGNED),
871 2, ptr, align);
872 gimple_call_set_lhs (stmt, ptr);
873 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
874 stmt = gimple_build_assign (fold_build2 (MEM_REF, ptr_type_node, pptr,
875 build_int_cst (ptr_type_node, 0)),
876 ptr);
877 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
c4c8514e 878 gsi_insert_after (gsi, gimple_build_label (noalign_label), GSI_NEW_STMT);
831806cb 879}
6de9cd9a
DN
880\f
881
50674e96 882/* Record the variables in VARS into function FN. */
6de9cd9a
DN
883
884void
50674e96 885record_vars_into (tree vars, tree fn)
6de9cd9a 886{
910ad8de 887 for (; vars; vars = DECL_CHAIN (vars))
6de9cd9a
DN
888 {
889 tree var = vars;
890
acb8f212
JH
891 /* BIND_EXPRs contains also function/type/constant declarations
892 we don't need to care about. */
8813a647 893 if (!VAR_P (var))
acb8f212 894 continue;
50674e96 895
6de9cd9a
DN
896 /* Nothing to do in this case. */
897 if (DECL_EXTERNAL (var))
898 continue;
6de9cd9a
DN
899
900 /* Record the variable. */
45b62594 901 add_local_decl (DECL_STRUCT_FUNCTION (fn), var);
6de9cd9a 902 }
50674e96
DN
903}
904
905
906/* Record the variables in VARS into current_function_decl. */
907
908void
909record_vars (tree vars)
910{
911 record_vars_into (vars, current_function_decl);
6de9cd9a 912}
This page took 4.836056 seconds and 5 git commands to generate.