]> gcc.gnu.org Git - gcc.git/blame - gcc/except.cc
Daily bump.
[gcc.git] / gcc / except.cc
CommitLineData
12670d88 1/* Implements exception handling.
aeee4812 2 Copyright (C) 1989-2023 Free Software Foundation, Inc.
4956d07c
MS
3 Contributed by Mike Stump <mrs@cygnus.com>.
4
1322177d 5This file is part of GCC.
4956d07c 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9dcd6f09 9Software Foundation; either version 3, or (at your option) any later
1322177d 10version.
4956d07c 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
4956d07c
MS
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
4956d07c
MS
20
21
1d65f45c
RH
22/* An exception is an event that can be "thrown" from within a
23 function. This event can then be "caught" by the callers of
24 the function.
25
26 The representation of exceptions changes several times during
27 the compilation process:
28
29 In the beginning, in the front end, we have the GENERIC trees
ebebc928 30 TRY_CATCH_EXPR, TRY_FINALLY_EXPR, EH_ELSE_EXPR, WITH_CLEANUP_EXPR,
1d65f45c
RH
31 CLEANUP_POINT_EXPR, CATCH_EXPR, and EH_FILTER_EXPR.
32
e53b6e56 33 During initial gimplification (gimplify.cc) these are lowered to the
ebebc928
AO
34 GIMPLE_TRY, GIMPLE_CATCH, GIMPLE_EH_ELSE, and GIMPLE_EH_FILTER
35 nodes. The WITH_CLEANUP_EXPR and CLEANUP_POINT_EXPR nodes are
36 converted into GIMPLE_TRY_FINALLY nodes; the others are a more
37 direct 1-1 conversion.
1d65f45c 38
e53b6e56 39 During pass_lower_eh (tree-eh.cc) we record the nested structure
1d65f45c 40 of the TRY nodes in EH_REGION nodes in CFUN->EH->REGION_TREE.
3b06d379 41 We expand the eh_protect_cleanup_actions langhook into MUST_NOT_THROW
1d65f45c
RH
42 regions at this time. We can then flatten the statements within
43 the TRY nodes to straight-line code. Statements that had been within
44 TRY nodes that can throw are recorded within CFUN->EH->THROW_STMT_TABLE,
45 so that we may remember what action is supposed to be taken if
46 a given statement does throw. During this lowering process,
47 we create an EH_LANDING_PAD node for each EH_REGION that has
48 some code within the function that needs to be executed if a
b8698a0f 49 throw does happen. We also create RESX statements that are
1d65f45c
RH
50 used to transfer control from an inner EH_REGION to an outer
51 EH_REGION. We also create EH_DISPATCH statements as placeholders
52 for a runtime type comparison that should be made in order to
53 select the action to perform among different CATCH and EH_FILTER
54 regions.
55
e53b6e56 56 During pass_lower_eh_dispatch (tree-eh.cc), which is run after
1d65f45c
RH
57 all inlining is complete, we are able to run assign_filter_values,
58 which allows us to map the set of types manipulated by all of the
59 CATCH and EH_FILTER regions to a set of integers. This set of integers
60 will be how the exception runtime communicates with the code generated
61 within the function. We then expand the GIMPLE_EH_DISPATCH statements
62 to a switch or conditional branches that use the argument provided by
63 the runtime (__builtin_eh_filter) and the set of integers we computed
64 in assign_filter_values.
65
e53b6e56 66 During pass_lower_resx (tree-eh.cc), which is run near the end
1d65f45c
RH
67 of optimization, we expand RESX statements. If the eh region
68 that is outer to the RESX statement is a MUST_NOT_THROW, then
69 the RESX expands to some form of abort statement. If the eh
70 region that is outer to the RESX statement is within the current
71 function, then the RESX expands to a bookkeeping call
72 (__builtin_eh_copy_values) and a goto. Otherwise, the next
73 handler for the exception must be within a function somewhere
74 up the call chain, so we call back into the exception runtime
75 (__builtin_unwind_resume).
b8698a0f 76
e53b6e56 77 During pass_expand (cfgexpand.cc), we generate REG_EH_REGION notes
1d65f45c
RH
78 that create an rtl to eh_region mapping that corresponds to the
79 gimple to eh_region mapping that had been recorded in the
80 THROW_STMT_TABLE.
81
dac1fbf8 82 Then, via finish_eh_generation, we generate the real landing pads
1d65f45c
RH
83 to which the runtime will actually transfer control. These new
84 landing pads perform whatever bookkeeping is needed by the target
85 backend in order to resume execution within the current function.
86 Each of these new landing pads falls through into the post_landing_pad
87 label which had been used within the CFG up to this point. All
88 exception edges within the CFG are redirected to the new landing pads.
89 If the target uses setjmp to implement exceptions, the various extra
90 calls into the runtime to register and unregister the current stack
91 frame are emitted at this time.
92
e53b6e56 93 During pass_convert_to_eh_region_ranges (except.cc), we transform
b8698a0f 94 the REG_EH_REGION notes attached to individual insns into
1d65f45c
RH
95 non-overlapping ranges of insns bounded by NOTE_INSN_EH_REGION_BEG
96 and NOTE_INSN_EH_REGION_END. Each insn within such ranges has the
97 same associated action within the exception region tree, meaning
98 that (1) the exception is caught by the same landing pad within the
99 current function, (2) the exception is blocked by the runtime with
100 a MUST_NOT_THROW region, or (3) the exception is not handled at all
101 within the current function.
102
103 Finally, during assembly generation, we call
e53b6e56 104 output_function_exception_table (except.cc) to emit the tables with
1d65f45c
RH
105 which the exception runtime can determine if a given stack frame
106 handles a given exception, and if so what filter value to provide
107 to the function when the non-local control transfer is effected.
108 If the target uses dwarf2 unwinding to implement exceptions, then
e53b6e56 109 output_call_frame_info (dwarf2out.cc) emits the required unwind data. */
4956d07c
MS
110
111
112#include "config.h"
670ee920 113#include "system.h"
4977bab6 114#include "coretypes.h"
c7131fb2 115#include "backend.h"
957060b5 116#include "target.h"
4956d07c
MS
117#include "rtl.h"
118#include "tree.h"
957060b5
AM
119#include "cfghooks.h"
120#include "tree-pass.h"
4d0cdd0c 121#include "memmodel.h"
957060b5 122#include "tm_p.h"
d8a2d370 123#include "stringpool.h"
957060b5
AM
124#include "expmed.h"
125#include "optabs.h"
126#include "emit-rtl.h"
127#include "cgraph.h"
128#include "diagnostic.h"
957060b5 129#include "fold-const.h"
d8a2d370 130#include "stor-layout.h"
36566b39 131#include "explow.h"
36566b39 132#include "stmt.h"
4956d07c 133#include "expr.h"
ee516de9 134#include "calls.h"
e78d8e51 135#include "libfuncs.h"
52a11cbf 136#include "except.h"
4956d07c 137#include "output.h"
52a11cbf 138#include "dwarf2asm.h"
5b1ce655 139#include "dwarf2.h"
677f3fa8 140#include "common/common-target.h"
f1e639b1 141#include "langhooks.h"
60393bbc 142#include "cfgrtl.h"
cf835838 143#include "tree-pretty-print.h"
7d776ee2 144#include "cfgloop.h"
9b2b7279 145#include "builtins.h"
84a98514 146#include "tree-hash-traits.h"
66168f96 147#include "flags.h"
52a11cbf 148
21c157b4 149static GTY(()) int call_site_base;
b086d530 150
2ab71f3d
EB
151static GTY(()) hash_map<tree_hash, tree> *type_to_runtime_map;
152
153static GTY(()) tree setjmp_fn;
52a11cbf
RH
154
155/* Describe the SjLj_Function_Context structure. */
e2500fed 156static GTY(()) tree sjlj_fc_type_node;
52a11cbf
RH
157static int sjlj_fc_call_site_ofs;
158static int sjlj_fc_data_ofs;
159static int sjlj_fc_personality_ofs;
160static int sjlj_fc_lsda_ofs;
161static int sjlj_fc_jbuf_ofs;
162\f
0856ee98 163
7e5487a2 164struct GTY(()) call_site_record_d
e2500fed
GK
165{
166 rtx landing_pad;
167 int action;
168};
4a8fb1a1
LC
169
170/* In the following structure and associated functions,
171 we represent entries in the action table as 1-based indices.
172 Special cases are:
173
174 0: null action record, non-null landing pad; implies cleanups
175 -1: null action record, null landing pad; implies no action
176 -2: no call-site entry; implies must_not_throw
177 -3: we have yet to process outer regions
178
179 Further, no special cases apply to the "next" field of the record.
180 For next, 0 means end of list. */
181
182struct action_record
183{
184 int offset;
185 int filter;
186 int next;
187};
188
189/* Hashtable helpers. */
190
95fbe13e 191struct action_record_hasher : free_ptr_hash <action_record>
4a8fb1a1 192{
67f58944
TS
193 static inline hashval_t hash (const action_record *);
194 static inline bool equal (const action_record *, const action_record *);
4a8fb1a1
LC
195};
196
197inline hashval_t
67f58944 198action_record_hasher::hash (const action_record *entry)
4a8fb1a1
LC
199{
200 return entry->next * 1009 + entry->filter;
201}
202
203inline bool
67f58944
TS
204action_record_hasher::equal (const action_record *entry,
205 const action_record *data)
4a8fb1a1
LC
206{
207 return entry->filter == data->filter && entry->next == data->next;
208}
209
c203e8a7 210typedef hash_table<action_record_hasher> action_hash_type;
52a11cbf 211\f
1d65f45c
RH
212static bool get_eh_region_and_lp_from_rtx (const_rtx, eh_region *,
213 eh_landing_pad *);
214
502b8322 215static void dw2_build_landing_pads (void);
52a11cbf 216
c203e8a7 217static int collect_one_action_chain (action_hash_type *, eh_region);
17f6e37d 218static int add_call_site (rtx, int, int);
502b8322 219
9771b263
DN
220static void push_uleb128 (vec<uchar, va_gc> **, unsigned int);
221static void push_sleb128 (vec<uchar, va_gc> **, int);
b84b6ee6 222static int dw2_size_of_call_site_table (int);
502b8322 223static int sjlj_size_of_call_site_table (void);
17f6e37d 224static void dw2_output_call_site_table (int, int);
502b8322 225static void sjlj_output_call_site_table (void);
e6cfb550 226
52a11cbf
RH
227\f
228void
502b8322 229init_eh (void)
4956d07c 230{
52a11cbf
RH
231 if (! flag_exceptions)
232 return;
4956d07c 233
fb5c464a 234 type_to_runtime_map = hash_map<tree_hash, tree>::create_ggc (31);
4956d07c 235
52a11cbf
RH
236 /* Create the SjLj_Function_Context structure. This should match
237 the definition in unwind-sjlj.c. */
677f3fa8 238 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
52a11cbf
RH
239 {
240 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
4956d07c 241
ae2bcd98 242 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
9a0d1e1b 243
d40eb158 244 f_prev = build_decl (BUILTINS_LOCATION,
c2255bc4 245 FIELD_DECL, get_identifier ("__prev"),
52a11cbf
RH
246 build_pointer_type (sjlj_fc_type_node));
247 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
9a0d1e1b 248
d40eb158 249 f_cs = build_decl (BUILTINS_LOCATION,
c2255bc4 250 FIELD_DECL, get_identifier ("__call_site"),
52a11cbf
RH
251 integer_type_node);
252 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
4956d07c 253
45a2c477 254 tmp = build_index_type (size_int (4 - 1));
7b0518e3
UW
255 tmp = build_array_type (lang_hooks.types.type_for_mode
256 (targetm.unwind_word_mode (), 1),
b0c48229 257 tmp);
d40eb158 258 f_data = build_decl (BUILTINS_LOCATION,
c2255bc4 259 FIELD_DECL, get_identifier ("__data"), tmp);
52a11cbf 260 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
9a0d1e1b 261
d40eb158 262 f_per = build_decl (BUILTINS_LOCATION,
c2255bc4 263 FIELD_DECL, get_identifier ("__personality"),
52a11cbf
RH
264 ptr_type_node);
265 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
4956d07c 266
d40eb158 267 f_lsda = build_decl (BUILTINS_LOCATION,
c2255bc4 268 FIELD_DECL, get_identifier ("__lsda"),
52a11cbf
RH
269 ptr_type_node);
270 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
6814a8a0 271
52a11cbf
RH
272#ifdef DONT_USE_BUILTIN_SETJMP
273#ifdef JMP_BUF_SIZE
45a2c477 274 tmp = size_int (JMP_BUF_SIZE - 1);
52a11cbf
RH
275#else
276 /* Should be large enough for most systems, if it is not,
277 JMP_BUF_SIZE should be defined with the proper value. It will
278 also tend to be larger than necessary for most systems, a more
279 optimal port will define JMP_BUF_SIZE. */
45a2c477 280 tmp = size_int (FIRST_PSEUDO_REGISTER + 2 - 1);
52a11cbf
RH
281#endif
282#else
40ba8dfb
NC
283 /* Compute a minimally sized jump buffer. We need room to store at
284 least 3 pointers - stack pointer, frame pointer and return address.
285 Plus for some targets we need room for an extra pointer - in the
286 case of MIPS this is the global pointer. This makes a total of four
287 pointers, but to be safe we actually allocate room for 5.
288
289 If pointers are smaller than words then we allocate enough room for
290 5 words, just in case the backend needs this much room. For more
291 discussion on this issue see:
292 http://gcc.gnu.org/ml/gcc-patches/2014-05/msg00313.html. */
293 if (POINTER_SIZE > BITS_PER_WORD)
294 tmp = size_int (5 - 1);
295 else
296 tmp = size_int ((5 * BITS_PER_WORD / POINTER_SIZE) - 1);
52a11cbf 297#endif
40ba8dfb 298
52a11cbf
RH
299 tmp = build_index_type (tmp);
300 tmp = build_array_type (ptr_type_node, tmp);
d40eb158 301 f_jbuf = build_decl (BUILTINS_LOCATION,
c2255bc4 302 FIELD_DECL, get_identifier ("__jbuf"), tmp);
52a11cbf
RH
303#ifdef DONT_USE_BUILTIN_SETJMP
304 /* We don't know what the alignment requirements of the
305 runtime's jmp_buf has. Overestimate. */
fe37c7af 306 SET_DECL_ALIGN (f_jbuf, BIGGEST_ALIGNMENT);
52a11cbf
RH
307 DECL_USER_ALIGN (f_jbuf) = 1;
308#endif
309 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
310
311 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
312 TREE_CHAIN (f_prev) = f_cs;
313 TREE_CHAIN (f_cs) = f_data;
314 TREE_CHAIN (f_data) = f_per;
315 TREE_CHAIN (f_per) = f_lsda;
316 TREE_CHAIN (f_lsda) = f_jbuf;
317
318 layout_type (sjlj_fc_type_node);
319
320 /* Cache the interesting field offsets so that we have
321 easy access from rtl. */
322 sjlj_fc_call_site_ofs
ae7e9ddd
RS
323 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_cs))
324 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_cs)) / BITS_PER_UNIT);
52a11cbf 325 sjlj_fc_data_ofs
ae7e9ddd
RS
326 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_data))
327 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_data)) / BITS_PER_UNIT);
52a11cbf 328 sjlj_fc_personality_ofs
ae7e9ddd
RS
329 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_per))
330 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_per)) / BITS_PER_UNIT);
52a11cbf 331 sjlj_fc_lsda_ofs
ae7e9ddd
RS
332 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_lsda))
333 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_lsda)) / BITS_PER_UNIT);
52a11cbf 334 sjlj_fc_jbuf_ofs
ae7e9ddd
RS
335 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_jbuf))
336 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_jbuf)) / BITS_PER_UNIT);
2ab71f3d
EB
337
338#ifdef DONT_USE_BUILTIN_SETJMP
339 tmp = build_function_type_list (integer_type_node, TREE_TYPE (f_jbuf),
340 NULL);
341 setjmp_fn = build_decl (BUILTINS_LOCATION, FUNCTION_DECL,
342 get_identifier ("setjmp"), tmp);
343 TREE_PUBLIC (setjmp_fn) = 1;
344 DECL_EXTERNAL (setjmp_fn) = 1;
345 DECL_ASSEMBLER_NAME (setjmp_fn);
346#endif
52a11cbf 347 }
4956d07c
MS
348}
349
52a11cbf 350void
502b8322 351init_eh_for_function (void)
4956d07c 352{
766090c2 353 cfun->eh = ggc_cleared_alloc<eh_status> ();
1d65f45c
RH
354
355 /* Make sure zero'th entries are used. */
9771b263
DN
356 vec_safe_push (cfun->eh->region_array, (eh_region)0);
357 vec_safe_push (cfun->eh->lp_array, (eh_landing_pad)0);
6a58eee9 358}
52a11cbf 359\f
083cad55 360/* Routines to generate the exception tree somewhat directly.
e53b6e56 361 These are used from tree-eh.cc when processing exception related
6de9cd9a
DN
362 nodes during tree optimization. */
363
1d65f45c
RH
364static eh_region
365gen_eh_region (enum eh_region_type type, eh_region outer)
6de9cd9a 366{
1d65f45c 367 eh_region new_eh;
6de9cd9a 368
6de9cd9a 369 /* Insert a new blank region as a leaf in the tree. */
766090c2 370 new_eh = ggc_cleared_alloc<eh_region_d> ();
d858f359
KG
371 new_eh->type = type;
372 new_eh->outer = outer;
6de9cd9a
DN
373 if (outer)
374 {
d858f359
KG
375 new_eh->next_peer = outer->inner;
376 outer->inner = new_eh;
6de9cd9a
DN
377 }
378 else
379 {
d858f359
KG
380 new_eh->next_peer = cfun->eh->region_tree;
381 cfun->eh->region_tree = new_eh;
6de9cd9a
DN
382 }
383
9771b263
DN
384 new_eh->index = vec_safe_length (cfun->eh->region_array);
385 vec_safe_push (cfun->eh->region_array, new_eh);
6de9cd9a 386
384c400a
RH
387 /* Copy the language's notion of whether to use __cxa_end_cleanup. */
388 if (targetm.arm_eabi_unwinder && lang_hooks.eh_use_cxa_end_cleanup)
389 new_eh->use_cxa_end_cleanup = true;
390
d858f359 391 return new_eh;
6de9cd9a
DN
392}
393
1d65f45c
RH
394eh_region
395gen_eh_region_cleanup (eh_region outer)
6de9cd9a 396{
1d65f45c 397 return gen_eh_region (ERT_CLEANUP, outer);
6de9cd9a
DN
398}
399
1d65f45c
RH
400eh_region
401gen_eh_region_try (eh_region outer)
6de9cd9a
DN
402{
403 return gen_eh_region (ERT_TRY, outer);
404}
405
1d65f45c
RH
406eh_catch
407gen_eh_region_catch (eh_region t, tree type_or_list)
6de9cd9a 408{
1d65f45c 409 eh_catch c, l;
6de9cd9a
DN
410 tree type_list, type_node;
411
1d65f45c
RH
412 gcc_assert (t->type == ERT_TRY);
413
6de9cd9a
DN
414 /* Ensure to always end up with a type list to normalize further
415 processing, then register each type against the runtime types map. */
416 type_list = type_or_list;
417 if (type_or_list)
418 {
419 if (TREE_CODE (type_or_list) != TREE_LIST)
420 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
421
422 type_node = type_list;
423 for (; type_node; type_node = TREE_CHAIN (type_node))
424 add_type_for_runtime (TREE_VALUE (type_node));
425 }
426
766090c2 427 c = ggc_cleared_alloc<eh_catch_d> ();
1d65f45c 428 c->type_list = type_list;
82d6e6fc 429 l = t->u.eh_try.last_catch;
1d65f45c 430 c->prev_catch = l;
6de9cd9a 431 if (l)
1d65f45c 432 l->next_catch = c;
6de9cd9a 433 else
1d65f45c 434 t->u.eh_try.first_catch = c;
82d6e6fc 435 t->u.eh_try.last_catch = c;
6de9cd9a
DN
436
437 return c;
438}
439
1d65f45c
RH
440eh_region
441gen_eh_region_allowed (eh_region outer, tree allowed)
6de9cd9a 442{
1d65f45c 443 eh_region region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
6de9cd9a
DN
444 region->u.allowed.type_list = allowed;
445
446 for (; allowed ; allowed = TREE_CHAIN (allowed))
447 add_type_for_runtime (TREE_VALUE (allowed));
448
449 return region;
450}
451
1d65f45c
RH
452eh_region
453gen_eh_region_must_not_throw (eh_region outer)
6de9cd9a
DN
454{
455 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
456}
457
1d65f45c
RH
458eh_landing_pad
459gen_eh_landing_pad (eh_region region)
6de9cd9a 460{
766090c2 461 eh_landing_pad lp = ggc_cleared_alloc<eh_landing_pad_d> ();
6de9cd9a 462
1d65f45c
RH
463 lp->next_lp = region->landing_pads;
464 lp->region = region;
9771b263 465 lp->index = vec_safe_length (cfun->eh->lp_array);
1d65f45c
RH
466 region->landing_pads = lp;
467
9771b263 468 vec_safe_push (cfun->eh->lp_array, lp);
1d65f45c
RH
469
470 return lp;
6de9cd9a
DN
471}
472
1d65f45c
RH
473eh_region
474get_eh_region_from_number_fn (struct function *ifun, int i)
6de9cd9a 475{
9771b263 476 return (*ifun->eh->region_array)[i];
6de9cd9a
DN
477}
478
1d65f45c
RH
479eh_region
480get_eh_region_from_number (int i)
4e6d1743 481{
1d65f45c 482 return get_eh_region_from_number_fn (cfun, i);
4e6d1743
JH
483}
484
1d65f45c
RH
485eh_landing_pad
486get_eh_landing_pad_from_number_fn (struct function *ifun, int i)
6de9cd9a 487{
9771b263 488 return (*ifun->eh->lp_array)[i];
6de9cd9a 489}
6de9cd9a 490
1d65f45c
RH
491eh_landing_pad
492get_eh_landing_pad_from_number (int i)
493{
494 return get_eh_landing_pad_from_number_fn (cfun, i);
6de9cd9a
DN
495}
496
1d65f45c
RH
497eh_region
498get_eh_region_from_lp_number_fn (struct function *ifun, int i)
b2dd096b 499{
1d65f45c 500 if (i < 0)
9771b263 501 return (*ifun->eh->region_array)[-i];
1d65f45c
RH
502 else if (i == 0)
503 return NULL;
504 else
b2dd096b 505 {
1d65f45c 506 eh_landing_pad lp;
9771b263 507 lp = (*ifun->eh->lp_array)[i];
1d65f45c 508 return lp->region;
b2dd096b
MM
509 }
510}
511
1d65f45c
RH
512eh_region
513get_eh_region_from_lp_number (int i)
4956d07c 514{
1d65f45c 515 return get_eh_region_from_lp_number_fn (cfun, i);
52a11cbf 516}
1d65f45c
RH
517\f
518/* Returns true if the current function has exception handling regions. */
4956d07c 519
1d65f45c
RH
520bool
521current_function_has_exception_handlers (void)
47c84870 522{
1d65f45c 523 return cfun->eh->region_tree != NULL;
47c84870 524}
52a11cbf 525\f
1d65f45c
RH
526/* A subroutine of duplicate_eh_regions. Copy the eh_region tree at OLD.
527 Root it at OUTER, and apply LP_OFFSET to the lp numbers. */
154bba13 528
1d65f45c 529struct duplicate_eh_regions_data
154bba13 530{
1d65f45c
RH
531 duplicate_eh_regions_map label_map;
532 void *label_map_data;
b787e7a2 533 hash_map<void *, void *> *eh_map;
1d65f45c 534};
154bba13 535
1d65f45c
RH
536static void
537duplicate_eh_regions_1 (struct duplicate_eh_regions_data *data,
538 eh_region old_r, eh_region outer)
539{
540 eh_landing_pad old_lp, new_lp;
541 eh_region new_r;
154bba13 542
1d65f45c 543 new_r = gen_eh_region (old_r->type, outer);
b787e7a2 544 gcc_assert (!data->eh_map->put (old_r, new_r));
154bba13 545
1d65f45c 546 switch (old_r->type)
52a11cbf 547 {
1d65f45c
RH
548 case ERT_CLEANUP:
549 break;
52a11cbf 550
1d65f45c
RH
551 case ERT_TRY:
552 {
553 eh_catch oc, nc;
554 for (oc = old_r->u.eh_try.first_catch; oc ; oc = oc->next_catch)
555 {
556 /* We should be doing all our region duplication before and
557 during inlining, which is before filter lists are created. */
558 gcc_assert (oc->filter_list == NULL);
559 nc = gen_eh_region_catch (new_r, oc->type_list);
560 nc->label = data->label_map (oc->label, data->label_map_data);
561 }
562 }
563 break;
27a36778 564
1d65f45c
RH
565 case ERT_ALLOWED_EXCEPTIONS:
566 new_r->u.allowed.type_list = old_r->u.allowed.type_list;
6728ee79
MM
567 if (old_r->u.allowed.label)
568 new_r->u.allowed.label
569 = data->label_map (old_r->u.allowed.label, data->label_map_data);
570 else
571 new_r->u.allowed.label = NULL_TREE;
1d65f45c 572 break;
655dd289 573
1d65f45c 574 case ERT_MUST_NOT_THROW:
5368224f
DC
575 new_r->u.must_not_throw.failure_loc =
576 LOCATION_LOCUS (old_r->u.must_not_throw.failure_loc);
577 new_r->u.must_not_throw.failure_decl =
578 old_r->u.must_not_throw.failure_decl;
1d65f45c
RH
579 break;
580 }
a8da523f 581
1d65f45c 582 for (old_lp = old_r->landing_pads; old_lp ; old_lp = old_lp->next_lp)
655dd289 583 {
1d65f45c
RH
584 /* Don't bother copying unused landing pads. */
585 if (old_lp->post_landing_pad == NULL)
586 continue;
a8da523f 587
1d65f45c 588 new_lp = gen_eh_landing_pad (new_r);
b787e7a2 589 gcc_assert (!data->eh_map->put (old_lp, new_lp));
1d65f45c
RH
590
591 new_lp->post_landing_pad
592 = data->label_map (old_lp->post_landing_pad, data->label_map_data);
593 EH_LANDING_PAD_NR (new_lp->post_landing_pad) = new_lp->index;
655dd289 594 }
1d65f45c 595
384c400a
RH
596 /* Make sure to preserve the original use of __cxa_end_cleanup. */
597 new_r->use_cxa_end_cleanup = old_r->use_cxa_end_cleanup;
598
1d65f45c
RH
599 for (old_r = old_r->inner; old_r ; old_r = old_r->next_peer)
600 duplicate_eh_regions_1 (data, old_r, new_r);
a8da523f 601}
655dd289 602
1d65f45c
RH
603/* Duplicate the EH regions from IFUN rooted at COPY_REGION into
604 the current function and root the tree below OUTER_REGION.
605 The special case of COPY_REGION of NULL means all regions.
606 Remap labels using MAP/MAP_DATA callback. Return a pointer map
607 that allows the caller to remap uses of both EH regions and
608 EH landing pads. */
14925fcd 609
b787e7a2 610hash_map<void *, void *> *
1d65f45c
RH
611duplicate_eh_regions (struct function *ifun,
612 eh_region copy_region, int outer_lp,
613 duplicate_eh_regions_map map, void *map_data)
14925fcd 614{
1d65f45c
RH
615 struct duplicate_eh_regions_data data;
616 eh_region outer_region;
14925fcd 617
b2b29377
MM
618 if (flag_checking)
619 verify_eh_tree (ifun);
a3710436 620
1d65f45c
RH
621 data.label_map = map;
622 data.label_map_data = map_data;
b787e7a2 623 data.eh_map = new hash_map<void *, void *>;
a3710436 624
b0dd8c90 625 outer_region = get_eh_region_from_lp_number_fn (cfun, outer_lp);
b8698a0f 626
1d65f45c
RH
627 /* Copy all the regions in the subtree. */
628 if (copy_region)
629 duplicate_eh_regions_1 (&data, copy_region, outer_region);
630 else
a3710436 631 {
1d65f45c
RH
632 eh_region r;
633 for (r = ifun->eh->region_tree; r ; r = r->next_peer)
634 duplicate_eh_regions_1 (&data, r, outer_region);
a3710436 635 }
a3710436 636
b2b29377
MM
637 if (flag_checking)
638 verify_eh_tree (cfun);
a3710436 639
1d65f45c 640 return data.eh_map;
a3710436
JH
641}
642
1d65f45c 643/* Return the region that is outer to both REGION_A and REGION_B in IFUN. */
a3710436 644
1d65f45c
RH
645eh_region
646eh_region_outermost (struct function *ifun, eh_region region_a,
647 eh_region region_b)
a3710436 648{
1d65f45c
RH
649 gcc_assert (ifun->eh->region_array);
650 gcc_assert (ifun->eh->region_tree);
a3710436 651
7ba9e72d 652 auto_sbitmap b_outer (ifun->eh->region_array->length ());
f61e445a 653 bitmap_clear (b_outer);
a3710436 654
1d65f45c
RH
655 do
656 {
d7c028c0 657 bitmap_set_bit (b_outer, region_b->index);
1d65f45c
RH
658 region_b = region_b->outer;
659 }
660 while (region_b);
a3710436 661
1d65f45c 662 do
a3710436 663 {
d7c028c0 664 if (bitmap_bit_p (b_outer, region_a->index))
a3710436 665 break;
1d65f45c 666 region_a = region_a->outer;
a3710436 667 }
1d65f45c 668 while (region_a);
a3710436 669
1d65f45c
RH
670 return region_a;
671}
672\f
1d65f45c
RH
673void
674add_type_for_runtime (tree type)
675{
1d65f45c
RH
676 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
677 if (TREE_CODE (type) == NOP_EXPR)
678 return;
a3710436 679
b086d530
TS
680 bool existed = false;
681 tree *slot = &type_to_runtime_map->get_or_insert (type, &existed);
682 if (!existed)
683 *slot = lang_hooks.eh_runtime_type (type);
a3710436
JH
684}
685
1d65f45c
RH
686tree
687lookup_type_for_runtime (tree type)
a8da523f 688{
1d65f45c
RH
689 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
690 if (TREE_CODE (type) == NOP_EXPR)
691 return type;
a8da523f 692
1d65f45c 693 /* We should have always inserted the data earlier. */
b086d530 694 return *type_to_runtime_map->get (type);
1d65f45c 695}
083cad55 696
1d65f45c
RH
697\f
698/* Represent an entry in @TTypes for either catch actions
699 or exception filter actions. */
1aa67003 700struct ttypes_filter {
1d65f45c
RH
701 tree t;
702 int filter;
703};
14925fcd 704
4a8fb1a1
LC
705/* Helper for ttypes_filter hashing. */
706
95fbe13e 707struct ttypes_filter_hasher : free_ptr_hash <ttypes_filter>
4a8fb1a1 708{
67f58944
TS
709 typedef tree_node *compare_type;
710 static inline hashval_t hash (const ttypes_filter *);
711 static inline bool equal (const ttypes_filter *, const tree_node *);
4a8fb1a1
LC
712};
713
1d65f45c
RH
714/* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
715 (a tree) for a @TTypes type node we are thinking about adding. */
14925fcd 716
4a8fb1a1 717inline bool
67f58944 718ttypes_filter_hasher::equal (const ttypes_filter *entry, const tree_node *data)
1d65f45c 719{
52a11cbf 720 return entry->t == data;
9762d48d
JM
721}
722
4a8fb1a1 723inline hashval_t
67f58944 724ttypes_filter_hasher::hash (const ttypes_filter *entry)
52a11cbf 725{
fd917e0d 726 return TREE_HASH (entry->t);
52a11cbf 727}
4956d07c 728
c203e8a7 729typedef hash_table<ttypes_filter_hasher> ttypes_hash_type;
4a8fb1a1
LC
730
731
732/* Helper for ehspec hashing. */
733
95fbe13e 734struct ehspec_hasher : free_ptr_hash <ttypes_filter>
4a8fb1a1 735{
67f58944
TS
736 static inline hashval_t hash (const ttypes_filter *);
737 static inline bool equal (const ttypes_filter *, const ttypes_filter *);
4a8fb1a1
LC
738};
739
52a11cbf
RH
740/* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
741 exception specification list we are thinking about adding. */
742/* ??? Currently we use the type lists in the order given. Someone
743 should put these in some canonical order. */
744
4a8fb1a1 745inline bool
67f58944 746ehspec_hasher::equal (const ttypes_filter *entry, const ttypes_filter *data)
4956d07c 747{
52a11cbf 748 return type_list_equal (entry->t, data->t);
4956d07c
MS
749}
750
52a11cbf 751/* Hash function for exception specification lists. */
4956d07c 752
4a8fb1a1 753inline hashval_t
67f58944 754ehspec_hasher::hash (const ttypes_filter *entry)
4956d07c 755{
52a11cbf
RH
756 hashval_t h = 0;
757 tree list;
758
759 for (list = entry->t; list ; list = TREE_CHAIN (list))
fd917e0d 760 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
52a11cbf 761 return h;
4956d07c
MS
762}
763
c203e8a7 764typedef hash_table<ehspec_hasher> ehspec_hash_type;
4a8fb1a1
LC
765
766
1d65f45c 767/* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
fd917e0d 768 to speed up the search. Return the filter value to be used. */
4956d07c 769
52a11cbf 770static int
c203e8a7 771add_ttypes_entry (ttypes_hash_type *ttypes_hash, tree type)
4956d07c 772{
52a11cbf 773 struct ttypes_filter **slot, *n;
4956d07c 774
c203e8a7 775 slot = ttypes_hash->find_slot_with_hash (type, (hashval_t) TREE_HASH (type),
4a8fb1a1 776 INSERT);
52a11cbf
RH
777
778 if ((n = *slot) == NULL)
4956d07c 779 {
52a11cbf 780 /* Filter value is a 1 based table index. */
12670d88 781
5ed6ace5 782 n = XNEW (struct ttypes_filter);
52a11cbf 783 n->t = type;
9771b263 784 n->filter = vec_safe_length (cfun->eh->ttype_data) + 1;
52a11cbf
RH
785 *slot = n;
786
9771b263 787 vec_safe_push (cfun->eh->ttype_data, type);
4956d07c 788 }
52a11cbf
RH
789
790 return n->filter;
4956d07c
MS
791}
792
1d65f45c 793/* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
52a11cbf
RH
794 to speed up the search. Return the filter value to be used. */
795
796static int
c203e8a7 797add_ehspec_entry (ehspec_hash_type *ehspec_hash, ttypes_hash_type *ttypes_hash,
4a8fb1a1 798 tree list)
12670d88 799{
52a11cbf
RH
800 struct ttypes_filter **slot, *n;
801 struct ttypes_filter dummy;
12670d88 802
52a11cbf 803 dummy.t = list;
c203e8a7 804 slot = ehspec_hash->find_slot (&dummy, INSERT);
52a11cbf
RH
805
806 if ((n = *slot) == NULL)
807 {
1d65f45c
RH
808 int len;
809
810 if (targetm.arm_eabi_unwinder)
9771b263 811 len = vec_safe_length (cfun->eh->ehspec_data.arm_eabi);
1d65f45c 812 else
9771b263 813 len = vec_safe_length (cfun->eh->ehspec_data.other);
1d65f45c 814
52a11cbf
RH
815 /* Filter value is a -1 based byte index into a uleb128 buffer. */
816
5ed6ace5 817 n = XNEW (struct ttypes_filter);
52a11cbf 818 n->t = list;
1d65f45c 819 n->filter = -(len + 1);
52a11cbf
RH
820 *slot = n;
821
617a1b71 822 /* Generate a 0 terminated list of filter values. */
52a11cbf 823 for (; list ; list = TREE_CHAIN (list))
617a1b71
PB
824 {
825 if (targetm.arm_eabi_unwinder)
9771b263 826 vec_safe_push (cfun->eh->ehspec_data.arm_eabi, TREE_VALUE (list));
617a1b71
PB
827 else
828 {
829 /* Look up each type in the list and encode its filter
830 value as a uleb128. */
1d65f45c
RH
831 push_uleb128 (&cfun->eh->ehspec_data.other,
832 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
6d41a92f 833 }
52a11cbf 834 }
1d65f45c 835 if (targetm.arm_eabi_unwinder)
9771b263 836 vec_safe_push (cfun->eh->ehspec_data.arm_eabi, NULL_TREE);
1d65f45c 837 else
9771b263 838 vec_safe_push (cfun->eh->ehspec_data.other, (uchar)0);
52a11cbf 839 }
1d65f45c
RH
840
841 return n->filter;
52a11cbf 842}
1e4ceb6f 843
1d65f45c
RH
844/* Generate the action filter values to be used for CATCH and
845 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
846 we use lots of landing pads, and so every type or list can share
847 the same filter value, which saves table space. */
47c84870 848
1d65f45c
RH
849void
850assign_filter_values (void)
1e4ceb6f 851{
52a11cbf 852 int i;
1d65f45c
RH
853 eh_region r;
854 eh_catch c;
76fc91c7 855
9771b263 856 vec_alloc (cfun->eh->ttype_data, 16);
1d65f45c 857 if (targetm.arm_eabi_unwinder)
9771b263 858 vec_alloc (cfun->eh->ehspec_data.arm_eabi, 64);
1d65f45c 859 else
9771b263 860 vec_alloc (cfun->eh->ehspec_data.other, 64);
1e4ceb6f 861
c203e8a7
TS
862 ehspec_hash_type ehspec (31);
863 ttypes_hash_type ttypes (31);
1e4ceb6f 864
9771b263 865 for (i = 1; vec_safe_iterate (cfun->eh->region_array, i, &r); ++i)
1d65f45c
RH
866 {
867 if (r == NULL)
52a11cbf 868 continue;
76fc91c7 869
1d65f45c 870 switch (r->type)
12c3874e 871 {
1d65f45c
RH
872 case ERT_TRY:
873 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
f645e9a2 874 {
1d65f45c
RH
875 /* Whatever type_list is (NULL or true list), we build a list
876 of filters for the region. */
877 c->filter_list = NULL_TREE;
878
879 if (c->type_list != NULL)
880 {
881 /* Get a filter value for each of the types caught and store
882 them in the region's dedicated list. */
883 tree tp_node = c->type_list;
884
885 for ( ; tp_node; tp_node = TREE_CHAIN (tp_node))
886 {
c203e8a7
TS
887 int flt
888 = add_ttypes_entry (&ttypes, TREE_VALUE (tp_node));
45a2c477 889 tree flt_node = build_int_cst (integer_type_node, flt);
1d65f45c
RH
890
891 c->filter_list
892 = tree_cons (NULL_TREE, flt_node, c->filter_list);
893 }
894 }
895 else
896 {
897 /* Get a filter value for the NULL list also since it
898 will need an action record anyway. */
c203e8a7 899 int flt = add_ttypes_entry (&ttypes, NULL);
45a2c477 900 tree flt_node = build_int_cst (integer_type_node, flt);
1d65f45c
RH
901
902 c->filter_list
903 = tree_cons (NULL_TREE, flt_node, NULL);
904 }
f645e9a2 905 }
1d65f45c 906 break;
f645e9a2 907
1d65f45c
RH
908 case ERT_ALLOWED_EXCEPTIONS:
909 r->u.allowed.filter
c203e8a7 910 = add_ehspec_entry (&ehspec, &ttypes, r->u.allowed.type_list);
1d65f45c 911 break;
6de9cd9a 912
1d65f45c
RH
913 default:
914 break;
915 }
52a11cbf
RH
916 }
917}
918
1d65f45c
RH
919/* Emit SEQ into basic block just before INSN (that is assumed to be
920 first instruction of some existing BB and return the newly
921 produced block. */
922static basic_block
9b2ea071 923emit_to_new_bb_before (rtx_insn *seq, rtx_insn *insn)
1d65f45c 924{
4500f751 925 rtx_insn *next, *last;
8a829274 926 basic_block bb;
1d65f45c
RH
927 edge e;
928 edge_iterator ei;
929
930 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
931 call), we don't want it to go into newly created landing pad or other EH
932 construct. */
933 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
934 if (e->flags & EDGE_FALLTHRU)
935 force_nonfallthru (e);
936 else
937 ei_next (&ei);
4500f751
EB
938
939 /* Make sure to put the location of INSN or a subsequent instruction on SEQ
940 to avoid inheriting the location of the previous instruction. */
941 next = insn;
942 while (next && !NONDEBUG_INSN_P (next))
943 next = NEXT_INSN (next);
944 if (next)
945 last = emit_insn_before_setloc (seq, insn, INSN_LOCATION (next));
946 else
947 last = emit_insn_before (seq, insn);
1d65f45c
RH
948 if (BARRIER_P (last))
949 last = PREV_INSN (last);
8a829274 950 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1d65f45c
RH
951 update_bb_for_insn (bb);
952 bb->flags |= BB_SUPERBLOCK;
953 return bb;
954}
52a11cbf 955\f
0be7e7a6
RH
956/* A subroutine of dw2_build_landing_pads, also used for edge splitting
957 at the rtl level. Emit the code required by the target at a landing
958 pad for the given region. */
959
465d0087 960static void
0be7e7a6
RH
961expand_dw2_landing_pad_for_region (eh_region region)
962{
95a3fb9d
RS
963 if (targetm.have_exception_receiver ())
964 emit_insn (targetm.gen_exception_receiver ());
965 else if (targetm.have_nonlocal_goto_receiver ())
966 emit_insn (targetm.gen_nonlocal_goto_receiver ());
0be7e7a6 967 else
0be7e7a6
RH
968 { /* Nothing */ }
969
970 if (region->exc_ptr_reg)
971 emit_move_insn (region->exc_ptr_reg,
972 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
973 if (region->filter_reg)
974 emit_move_insn (region->filter_reg,
975 gen_rtx_REG (targetm.eh_return_filter_mode (),
976 EH_RETURN_DATA_REGNO (1)));
977}
978
1d65f45c
RH
979/* Expand the extra code needed at landing pads for dwarf2 unwinding. */
980
52a11cbf 981static void
502b8322 982dw2_build_landing_pads (void)
4956d07c 983{
ae0ed63a 984 int i;
1d65f45c 985 eh_landing_pad lp;
0be7e7a6
RH
986 int e_flags = EDGE_FALLTHRU;
987
988 /* If we're going to partition blocks, we need to be able to add
989 new landing pads later, which means that we need to hold on to
990 the post-landing-pad block. Prevent it from being merged away.
991 We'll remove this bit after partitioning. */
992 if (flag_reorder_blocks_and_partition)
993 e_flags |= EDGE_PRESERVE;
4956d07c 994
9771b263 995 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
52a11cbf 996 {
12c3874e 997 basic_block bb;
f8b23302 998 rtx_insn *seq;
4956d07c 999
1d65f45c 1000 if (lp == NULL || lp->post_landing_pad == NULL)
a3710436
JH
1001 continue;
1002
52a11cbf 1003 start_sequence ();
4956d07c 1004
1d65f45c
RH
1005 lp->landing_pad = gen_label_rtx ();
1006 emit_label (lp->landing_pad);
c40b5400 1007 LABEL_PRESERVE_P (lp->landing_pad) = 1;
4956d07c 1008
0be7e7a6 1009 expand_dw2_landing_pad_for_region (lp->region);
9a0d1e1b 1010
52a11cbf
RH
1011 seq = get_insns ();
1012 end_sequence ();
5816cb14 1013
1d65f45c 1014 bb = emit_to_new_bb_before (seq, label_rtx (lp->post_landing_pad));
57a5a8b3 1015 bb->count = bb->next_bb->count;
357067f2 1016 make_single_succ_edge (bb, bb->next_bb, e_flags);
0b7fccfa
RG
1017 if (current_loops)
1018 {
99b1c316 1019 class loop *loop = bb->next_bb->loop_father;
0b7fccfa
RG
1020 /* If we created a pre-header block, add the new block to the
1021 outer loop, otherwise to the loop itself. */
1022 if (bb->next_bb == loop->header)
1023 add_bb_to_loop (bb, loop_outer (loop));
1024 else
1025 add_bb_to_loop (bb, loop);
1026 }
52a11cbf 1027 }
4956d07c
MS
1028}
1029
52a11cbf 1030\f
9771b263 1031static vec<int> sjlj_lp_call_site_index;
12670d88 1032
1d65f45c
RH
1033/* Process all active landing pads. Assign each one a compact dispatch
1034 index, and a call-site index. */
e701eb4d 1035
1d65f45c
RH
1036static int
1037sjlj_assign_call_site_values (void)
e701eb4d 1038{
c203e8a7 1039 action_hash_type ar_hash (31);
1d65f45c
RH
1040 int i, disp_index;
1041 eh_landing_pad lp;
52a11cbf 1042
9771b263 1043 vec_alloc (crtl->eh.action_record_data, 64);
52a11cbf 1044
1d65f45c
RH
1045 disp_index = 0;
1046 call_site_base = 1;
9771b263 1047 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1d65f45c 1048 if (lp && lp->post_landing_pad)
e6cfb550 1049 {
1d65f45c 1050 int action, call_site;
0856ee98 1051
1d65f45c 1052 /* First: build the action table. */
c203e8a7 1053 action = collect_one_action_chain (&ar_hash, lp->region);
52a11cbf 1054
1d65f45c
RH
1055 /* Next: assign call-site values. If dwarf2 terms, this would be
1056 the region number assigned by convert_to_eh_region_ranges, but
1057 handles no-action and must-not-throw differently. */
52a11cbf
RH
1058 /* Map must-not-throw to otherwise unused call-site index 0. */
1059 if (action == -2)
1d65f45c 1060 call_site = 0;
52a11cbf
RH
1061 /* Map no-action to otherwise unused call-site index -1. */
1062 else if (action == -1)
1d65f45c 1063 call_site = -1;
52a11cbf
RH
1064 /* Otherwise, look it up in the table. */
1065 else
1d65f45c 1066 call_site = add_call_site (GEN_INT (disp_index), action, 0);
9771b263 1067 sjlj_lp_call_site_index[i] = call_site;
52a11cbf 1068
1d65f45c 1069 disp_index++;
52a11cbf 1070 }
1d65f45c 1071
1d65f45c 1072 return disp_index;
4956d07c 1073}
27a36778 1074
1d65f45c
RH
1075/* Emit code to record the current call-site index before every
1076 insn that can throw. */
1077
52a11cbf 1078static void
1d65f45c 1079sjlj_mark_call_sites (void)
27a36778 1080{
52a11cbf 1081 int last_call_site = -2;
f8b23302
DM
1082 rtx_insn *insn;
1083 rtx mem;
52a11cbf 1084
52a11cbf 1085 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
27a36778 1086 {
1d65f45c
RH
1087 eh_landing_pad lp;
1088 eh_region r;
1089 bool nothrow;
52a11cbf 1090 int this_call_site;
f8b23302 1091 rtx_insn *before, *p;
27a36778 1092
52a11cbf 1093 /* Reset value tracking at extended basic block boundaries. */
4b4bf941 1094 if (LABEL_P (insn))
52a11cbf 1095 last_call_site = -2;
27a36778 1096
d33606c3
EB
1097 /* If the function allocates dynamic stack space, the context must
1098 be updated after every allocation/deallocation accordingly. */
1099 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_UPDATE_SJLJ_CONTEXT)
1100 {
1101 rtx buf_addr;
1102
1103 start_sequence ();
1104 buf_addr = plus_constant (Pmode, XEXP (crtl->eh.sjlj_fc, 0),
1105 sjlj_fc_jbuf_ofs);
1106 expand_builtin_update_setjmp_buf (buf_addr);
1107 p = get_insns ();
1108 end_sequence ();
1109 emit_insn_before (p, insn);
1110 }
1111
52a11cbf
RH
1112 if (! INSN_P (insn))
1113 continue;
27a36778 1114
1d65f45c
RH
1115 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1116 if (nothrow)
12633413 1117 continue;
1d65f45c 1118 if (lp)
9771b263 1119 this_call_site = sjlj_lp_call_site_index[lp->index];
1d65f45c 1120 else if (r == NULL)
52a11cbf
RH
1121 {
1122 /* Calls (and trapping insns) without notes are outside any
1123 exception handling region in this function. Mark them as
1124 no action. */
1d65f45c 1125 this_call_site = -1;
52a11cbf
RH
1126 }
1127 else
1d65f45c
RH
1128 {
1129 gcc_assert (r->type == ERT_MUST_NOT_THROW);
1130 this_call_site = 0;
1131 }
27a36778 1132
0cecee06
UW
1133 if (this_call_site != -1)
1134 crtl->uses_eh_lsda = 1;
1135
52a11cbf
RH
1136 if (this_call_site == last_call_site)
1137 continue;
1138
1139 /* Don't separate a call from it's argument loads. */
1140 before = insn;
4b4bf941 1141 if (CALL_P (insn))
9321cf00 1142 before = find_first_parameter_load (insn, NULL);
4956d07c 1143
52a11cbf 1144 start_sequence ();
69c32ec8 1145 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
fd2c57a9 1146 sjlj_fc_call_site_ofs);
69db2d57 1147 emit_move_insn (mem, gen_int_mode (this_call_site, GET_MODE (mem)));
52a11cbf
RH
1148 p = get_insns ();
1149 end_sequence ();
12670d88 1150
2f937369 1151 emit_insn_before (p, before);
52a11cbf
RH
1152 last_call_site = this_call_site;
1153 }
1154}
4956d07c 1155
52a11cbf
RH
1156/* Construct the SjLj_Function_Context. */
1157
1158static void
f8b23302 1159sjlj_emit_function_enter (rtx_code_label *dispatch_label)
4956d07c 1160{
f8b23302
DM
1161 rtx_insn *fn_begin, *seq;
1162 rtx fc, mem;
fc781ee0 1163 bool fn_begin_outside_block;
f9417da1 1164 rtx personality = get_personality_function (current_function_decl);
4956d07c 1165
69c32ec8 1166 fc = crtl->eh.sjlj_fc;
4956d07c 1167
52a11cbf 1168 start_sequence ();
8a4451aa 1169
8979edec
JL
1170 /* We're storing this libcall's address into memory instead of
1171 calling it directly. Thus, we must call assemble_external_libcall
67914693 1172 here, as we cannot depend on emit_library_call to do it for us. */
f9417da1 1173 assemble_external_libcall (personality);
f4ef873c 1174 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
f9417da1 1175 emit_move_insn (mem, personality);
52a11cbf 1176
f4ef873c 1177 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
e3b5732b 1178 if (crtl->uses_eh_lsda)
52a11cbf
RH
1179 {
1180 char buf[20];
86bdf071
RE
1181 rtx sym;
1182
df696a75 1183 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
86bdf071
RE
1184 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1185 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1186 emit_move_insn (mem, sym);
8a4451aa 1187 }
52a11cbf
RH
1188 else
1189 emit_move_insn (mem, const0_rtx);
3f2c5d1a 1190
0cecee06
UW
1191 if (dispatch_label)
1192 {
ee516de9
EB
1193 rtx addr = plus_constant (Pmode, XEXP (fc, 0), sjlj_fc_jbuf_ofs);
1194
52a11cbf 1195#ifdef DONT_USE_BUILTIN_SETJMP
ee516de9
EB
1196 addr = copy_addr_to_reg (addr);
1197 addr = convert_memory_address (ptr_mode, addr);
1198 tree addr_tree = make_tree (ptr_type_node, addr);
1199
2ab71f3d 1200 tree call_expr = build_call_expr (setjmp_fn, 1, addr_tree);
ee516de9 1201 rtx x = expand_call (call_expr, NULL_RTX, false);
0cecee06
UW
1202
1203 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1204 TYPE_MODE (integer_type_node), 0,
84a1b7fe
JH
1205 dispatch_label,
1206 profile_probability::unlikely ());
0cecee06 1207#else
ee516de9 1208 expand_builtin_setjmp_setup (addr, dispatch_label);
4956d07c 1209#endif
0cecee06 1210 }
4956d07c 1211
52a11cbf 1212 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
db69559b 1213 XEXP (fc, 0), Pmode);
12670d88 1214
52a11cbf
RH
1215 seq = get_insns ();
1216 end_sequence ();
4956d07c 1217
52a11cbf
RH
1218 /* ??? Instead of doing this at the beginning of the function,
1219 do this in a block that is at loop level 0 and dominates all
1220 can_throw_internal instructions. */
4956d07c 1221
fc781ee0 1222 fn_begin_outside_block = true;
52a11cbf 1223 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
fc781ee0
AM
1224 if (NOTE_P (fn_begin))
1225 {
a38e7aa5 1226 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
fc781ee0 1227 break;
a38e7aa5 1228 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
fc781ee0
AM
1229 fn_begin_outside_block = false;
1230 }
1231
06fc54dd
EB
1232#ifdef DONT_USE_BUILTIN_SETJMP
1233 if (dispatch_label)
1234 {
1235 /* The sequence contains a branch in the middle so we need to force
1236 the creation of a new basic block by means of BB_SUPERBLOCK. */
1237 if (fn_begin_outside_block)
1238 {
1239 basic_block bb
1240 = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1241 if (JUMP_P (BB_END (bb)))
1242 emit_insn_before (seq, BB_END (bb));
1243 else
1244 emit_insn_after (seq, BB_END (bb));
1245 }
1246 else
1247 emit_insn_after (seq, fn_begin);
1248
1249 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flags |= BB_SUPERBLOCK;
1250 return;
1251 }
1252#endif
1253
fc781ee0 1254 if (fn_begin_outside_block)
fefa31b5 1255 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
12c3874e 1256 else
fc781ee0 1257 emit_insn_after (seq, fn_begin);
4956d07c
MS
1258}
1259
52a11cbf
RH
1260/* Call back from expand_function_end to know where we should put
1261 the call to unwind_sjlj_unregister_libfunc if needed. */
12670d88 1262
52a11cbf 1263void
f8b23302 1264sjlj_emit_function_exit_after (rtx_insn *after)
52a11cbf 1265{
69c32ec8 1266 crtl->eh.sjlj_exit_after = after;
52a11cbf 1267}
4956d07c
MS
1268
1269static void
502b8322 1270sjlj_emit_function_exit (void)
52a11cbf 1271{
f8b23302 1272 rtx_insn *seq, *insn;
4956d07c 1273
52a11cbf 1274 start_sequence ();
ce152ef8 1275
52a11cbf 1276 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
db69559b 1277 XEXP (crtl->eh.sjlj_fc, 0), Pmode);
e6cfb550 1278
52a11cbf
RH
1279 seq = get_insns ();
1280 end_sequence ();
4956d07c 1281
52a11cbf
RH
1282 /* ??? Really this can be done in any block at loop level 0 that
1283 post-dominates all can_throw_internal instructions. This is
1284 the last possible moment. */
9a0d1e1b 1285
4c33221c
UW
1286 insn = crtl->eh.sjlj_exit_after;
1287 if (LABEL_P (insn))
1288 insn = NEXT_INSN (insn);
12c3874e 1289
4c33221c 1290 emit_insn_after (seq, insn);
9a0d1e1b
AM
1291}
1292
52a11cbf 1293static void
be7457df 1294sjlj_emit_dispatch_table (rtx_code_label *dispatch_label, int num_dispatch)
ce152ef8 1295{
095a2d76
RS
1296 scalar_int_mode unwind_word_mode = targetm.unwind_word_mode ();
1297 scalar_int_mode filter_mode = targetm.eh_return_filter_mode ();
1d65f45c 1298 eh_landing_pad lp;
e67d1102 1299 rtx mem, fc, exc_ptr_reg, filter_reg;
f8b23302 1300 rtx_insn *seq;
12c3874e 1301 basic_block bb;
1d65f45c 1302 eh_region r;
1d65f45c 1303 int i, disp_index;
6e1aa848 1304 vec<tree> dispatch_labels = vNULL;
52a11cbf 1305
69c32ec8 1306 fc = crtl->eh.sjlj_fc;
52a11cbf
RH
1307
1308 start_sequence ();
1309
1310 emit_label (dispatch_label);
3f2c5d1a 1311
52a11cbf
RH
1312#ifndef DONT_USE_BUILTIN_SETJMP
1313 expand_builtin_setjmp_receiver (dispatch_label);
52a11cbf 1314
1d65f45c
RH
1315 /* The caller of expand_builtin_setjmp_receiver is responsible for
1316 making sure that the label doesn't vanish. The only other caller
1317 is the expander for __builtin_setjmp_receiver, which places this
1318 label on the nonlocal_goto_label list. Since we're modeling these
1319 CFG edges more exactly, we can use the forced_labels list instead. */
1320 LABEL_PRESERVE_P (dispatch_label) = 1;
6f7eba34 1321 vec_safe_push<rtx_insn *> (forced_labels, dispatch_label);
1d65f45c 1322#endif
52a11cbf 1323
1d65f45c 1324 /* Load up exc_ptr and filter values from the function context. */
7b0518e3
UW
1325 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
1326 if (unwind_word_mode != ptr_mode)
52a11cbf
RH
1327 {
1328#ifdef POINTERS_EXTEND_UNSIGNED
f920765d 1329 mem = convert_memory_address (ptr_mode, mem);
52a11cbf 1330#else
f920765d 1331 mem = convert_to_mode (ptr_mode, mem, 0);
52a11cbf
RH
1332#endif
1333 }
1d65f45c 1334 exc_ptr_reg = force_reg (ptr_mode, mem);
52a11cbf 1335
7b0518e3
UW
1336 mem = adjust_address (fc, unwind_word_mode,
1337 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
1338 if (unwind_word_mode != filter_mode)
1339 mem = convert_to_mode (filter_mode, mem, 0);
1d65f45c 1340 filter_reg = force_reg (filter_mode, mem);
4956d07c 1341
52a11cbf 1342 /* Jump to one of the directly reachable regions. */
52a11cbf 1343
1d65f45c 1344 disp_index = 0;
e67d1102 1345 rtx_code_label *first_reachable_label = NULL;
1d65f45c
RH
1346
1347 /* If there's exactly one call site in the function, don't bother
1348 generating a switch statement. */
1d65f45c 1349 if (num_dispatch > 1)
9771b263 1350 dispatch_labels.create (num_dispatch);
1d65f45c 1351
9771b263 1352 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1d65f45c
RH
1353 if (lp && lp->post_landing_pad)
1354 {
f8b23302 1355 rtx_insn *seq2;
1476d1bd 1356 rtx_code_label *label;
1d65f45c
RH
1357
1358 start_sequence ();
1359
1360 lp->landing_pad = dispatch_label;
1361
1362 if (num_dispatch > 1)
1363 {
3d528853 1364 tree t_label, case_elt, t;
1d65f45c
RH
1365
1366 t_label = create_artificial_label (UNKNOWN_LOCATION);
3d528853
NF
1367 t = build_int_cst (integer_type_node, disp_index);
1368 case_elt = build_case_label (t, NULL, t_label);
9771b263 1369 dispatch_labels.quick_push (case_elt);
1476d1bd 1370 label = jump_target_rtx (t_label);
1d65f45c
RH
1371 }
1372 else
1373 label = gen_label_rtx ();
1374
1375 if (disp_index == 0)
1376 first_reachable_label = label;
1377 emit_label (label);
1378
1379 r = lp->region;
1380 if (r->exc_ptr_reg)
1381 emit_move_insn (r->exc_ptr_reg, exc_ptr_reg);
1382 if (r->filter_reg)
1383 emit_move_insn (r->filter_reg, filter_reg);
1384
1385 seq2 = get_insns ();
1386 end_sequence ();
1387
e67d1102 1388 rtx_insn *before = label_rtx (lp->post_landing_pad);
1d65f45c 1389 bb = emit_to_new_bb_before (seq2, before);
357067f2 1390 make_single_succ_edge (bb, bb->next_bb, EDGE_FALLTHRU);
405af656
RG
1391 if (current_loops)
1392 {
99b1c316 1393 class loop *loop = bb->next_bb->loop_father;
405af656
RG
1394 /* If we created a pre-header block, add the new block to the
1395 outer loop, otherwise to the loop itself. */
1396 if (bb->next_bb == loop->header)
1397 add_bb_to_loop (bb, loop_outer (loop));
1398 else
1399 add_bb_to_loop (bb, loop);
1400 /* ??? For multiple dispatches we will end up with edges
1401 from the loop tree root into this loop, making it a
1402 multiple-entry loop. Discard all affected loops. */
1403 if (num_dispatch > 1)
1404 {
1405 for (loop = bb->loop_father;
1406 loop_outer (loop); loop = loop_outer (loop))
08c13199 1407 mark_loop_for_removal (loop);
405af656
RG
1408 }
1409 }
e6cfb550 1410
1d65f45c
RH
1411 disp_index++;
1412 }
1413 gcc_assert (disp_index == num_dispatch);
1414
1415 if (num_dispatch > 1)
1416 {
fd8d363e
SB
1417 rtx disp = adjust_address (fc, TYPE_MODE (integer_type_node),
1418 sjlj_fc_call_site_ofs);
9a1b6b7a 1419 expand_sjlj_dispatch_table (disp, dispatch_labels);
a1622f83 1420 }
9a0d1e1b 1421
52a11cbf
RH
1422 seq = get_insns ();
1423 end_sequence ();
4956d07c 1424
1d65f45c
RH
1425 bb = emit_to_new_bb_before (seq, first_reachable_label);
1426 if (num_dispatch == 1)
1427 {
357067f2 1428 make_single_succ_edge (bb, bb->next_bb, EDGE_FALLTHRU);
405af656
RG
1429 if (current_loops)
1430 {
99b1c316 1431 class loop *loop = bb->next_bb->loop_father;
405af656
RG
1432 /* If we created a pre-header block, add the new block to the
1433 outer loop, otherwise to the loop itself. */
1434 if (bb->next_bb == loop->header)
1435 add_bb_to_loop (bb, loop_outer (loop));
1436 else
1437 add_bb_to_loop (bb, loop);
1438 }
1439 }
1440 else
1441 {
1442 /* We are not wiring up edges here, but as the dispatcher call
1443 is at function begin simply associate the block with the
1444 outermost (non-)loop. */
1445 if (current_loops)
1446 add_bb_to_loop (bb, current_loops->tree_root);
1d65f45c 1447 }
ce152ef8
AM
1448}
1449
52a11cbf 1450static void
502b8322 1451sjlj_build_landing_pads (void)
ce152ef8 1452{
1d65f45c 1453 int num_dispatch;
ce152ef8 1454
9771b263 1455 num_dispatch = vec_safe_length (cfun->eh->lp_array);
1d65f45c
RH
1456 if (num_dispatch == 0)
1457 return;
cb3874dc 1458 sjlj_lp_call_site_index.safe_grow_cleared (num_dispatch, true);
ce152ef8 1459
1d65f45c
RH
1460 num_dispatch = sjlj_assign_call_site_values ();
1461 if (num_dispatch > 0)
52a11cbf 1462 {
f8b23302 1463 rtx_code_label *dispatch_label = gen_label_rtx ();
3a695389
UW
1464 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1465 TYPE_MODE (sjlj_fc_type_node),
1466 TYPE_ALIGN (sjlj_fc_type_node));
69c32ec8 1467 crtl->eh.sjlj_fc
52a11cbf
RH
1468 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1469 int_size_in_bytes (sjlj_fc_type_node),
3a695389 1470 align);
4956d07c 1471
1d65f45c 1472 sjlj_mark_call_sites ();
52a11cbf 1473 sjlj_emit_function_enter (dispatch_label);
1d65f45c 1474 sjlj_emit_dispatch_table (dispatch_label, num_dispatch);
52a11cbf
RH
1475 sjlj_emit_function_exit ();
1476 }
a1622f83 1477
0cecee06
UW
1478 /* If we do not have any landing pads, we may still need to register a
1479 personality routine and (empty) LSDA to handle must-not-throw regions. */
1480 else if (function_needs_eh_personality (cfun) != eh_personality_none)
1481 {
1482 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1483 TYPE_MODE (sjlj_fc_type_node),
1484 TYPE_ALIGN (sjlj_fc_type_node));
1485 crtl->eh.sjlj_fc
1486 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1487 int_size_in_bytes (sjlj_fc_type_node),
1488 align);
1489
1490 sjlj_mark_call_sites ();
f8b23302 1491 sjlj_emit_function_enter (NULL);
0cecee06
UW
1492 sjlj_emit_function_exit ();
1493 }
1494
9771b263 1495 sjlj_lp_call_site_index.release ();
4956d07c 1496}
ce152ef8 1497
d33606c3
EB
1498/* Update the sjlj function context. This function should be called
1499 whenever we allocate or deallocate dynamic stack space. */
1500
1501void
1502update_sjlj_context (void)
1503{
1504 if (!flag_exceptions)
1505 return;
1506
1507 emit_note (NOTE_INSN_UPDATE_SJLJ_CONTEXT);
1508}
1509
3cabd6d1
LB
1510/* After initial rtl generation, call back to finish generating
1511 exception support code. */
1512
dac1fbf8 1513void
502b8322 1514finish_eh_generation (void)
ce152ef8 1515{
12c3874e
JH
1516 basic_block bb;
1517
52a11cbf 1518 /* Construct the landing pads. */
677f3fa8 1519 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
52a11cbf
RH
1520 sjlj_build_landing_pads ();
1521 else
1522 dw2_build_landing_pads ();
1d65f45c 1523
56b97603 1524 break_superblocks ();
1d65f45c
RH
1525
1526 /* Redirect all EH edges from the post_landing_pad to the landing pad. */
11cd3bed 1527 FOR_EACH_BB_FN (bb, cfun)
12c3874e 1528 {
1d65f45c 1529 eh_landing_pad lp;
628f6a4e 1530 edge_iterator ei;
1d65f45c 1531 edge e;
496a4ef5 1532
1d65f45c 1533 lp = get_eh_landing_pad_from_rtx (BB_END (bb));
6a58eee9 1534
1d65f45c
RH
1535 FOR_EACH_EDGE (e, ei, bb->succs)
1536 if (e->flags & EDGE_EH)
1537 break;
87c476a2 1538
1d65f45c
RH
1539 /* We should not have generated any new throwing insns during this
1540 pass, and we should not have lost any EH edges, so we only need
1541 to handle two cases here:
1542 (1) reachable handler and an existing edge to post-landing-pad,
1543 (2) no reachable handler and no edge. */
1544 gcc_assert ((lp != NULL) == (e != NULL));
1545 if (lp != NULL)
87c476a2 1546 {
1d65f45c 1547 gcc_assert (BB_HEAD (e->dest) == label_rtx (lp->post_landing_pad));
f19c9228 1548
1d65f45c
RH
1549 redirect_edge_succ (e, BLOCK_FOR_INSN (lp->landing_pad));
1550 e->flags |= (CALL_P (BB_END (bb))
1551 ? EDGE_ABNORMAL | EDGE_ABNORMAL_CALL
1552 : EDGE_ABNORMAL);
52a11cbf
RH
1553 }
1554 }
56b97603
EB
1555
1556 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
1557 /* Kludge for Alpha (see alpha_gp_save_rtx). */
1558 || single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->insns.r)
1559 commit_edge_insertions ();
4956d07c 1560}
1d65f45c
RH
1561\f
1562/* This section handles removing dead code for flow. */
a8da523f
JH
1563
1564void
1d65f45c 1565remove_eh_landing_pad (eh_landing_pad lp)
a8da523f 1566{
1d65f45c 1567 eh_landing_pad *pp;
a8da523f 1568
1d65f45c
RH
1569 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
1570 continue;
1571 *pp = lp->next_lp;
b8698a0f 1572
1d65f45c
RH
1573 if (lp->post_landing_pad)
1574 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
9771b263 1575 (*cfun->eh->lp_array)[lp->index] = NULL;
a8da523f
JH
1576}
1577
d273b176 1578/* Splice the EH region at PP from the region tree. */
496a4ef5 1579
d273b176
SB
1580static void
1581remove_eh_handler_splicer (eh_region *pp)
496a4ef5 1582{
d273b176 1583 eh_region region = *pp;
1d65f45c 1584 eh_landing_pad lp;
496a4ef5 1585
1d65f45c 1586 for (lp = region->landing_pads; lp ; lp = lp->next_lp)
6d07ad98 1587 {
1d65f45c
RH
1588 if (lp->post_landing_pad)
1589 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
9771b263 1590 (*cfun->eh->lp_array)[lp->index] = NULL;
6d07ad98 1591 }
f698d217 1592
1d65f45c 1593 if (region->inner)
f698d217 1594 {
d273b176
SB
1595 eh_region p, outer;
1596 outer = region->outer;
1597
1d65f45c
RH
1598 *pp = p = region->inner;
1599 do
1600 {
1601 p->outer = outer;
1602 pp = &p->next_peer;
1603 p = *pp;
1604 }
1605 while (p);
f698d217 1606 }
1d65f45c 1607 *pp = region->next_peer;
87ff9c8e 1608
9771b263 1609 (*cfun->eh->region_array)[region->index] = NULL;
1d65f45c 1610}
87ff9c8e 1611
d273b176
SB
1612/* Splice a single EH region REGION from the region tree.
1613
1614 To unlink REGION, we need to find the pointer to it with a relatively
1615 expensive search in REGION's outer region. If you are going to
1616 remove a number of handlers, using remove_unreachable_eh_regions may
1617 be a better option. */
1618
1619void
1620remove_eh_handler (eh_region region)
1621{
1622 eh_region *pp, *pp_start, p, outer;
1623
1624 outer = region->outer;
1625 if (outer)
1626 pp_start = &outer->inner;
1627 else
1628 pp_start = &cfun->eh->region_tree;
1629 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
1630 continue;
1631
1632 remove_eh_handler_splicer (pp);
1633}
1634
1635/* Worker for remove_unreachable_eh_regions.
1636 PP is a pointer to the region to start a region tree depth-first
1637 search from. R_REACHABLE is the set of regions that have to be
1638 preserved. */
1639
1640static void
1641remove_unreachable_eh_regions_worker (eh_region *pp, sbitmap r_reachable)
1642{
1643 while (*pp)
1644 {
1645 eh_region region = *pp;
1646 remove_unreachable_eh_regions_worker (&region->inner, r_reachable);
1647 if (!bitmap_bit_p (r_reachable, region->index))
1648 remove_eh_handler_splicer (pp);
1649 else
1650 pp = &region->next_peer;
1651 }
1652}
1653
1654/* Splice all EH regions *not* marked in R_REACHABLE from the region tree.
1655 Do this by traversing the EH tree top-down and splice out regions that
1656 are not marked. By removing regions from the leaves, we avoid costly
1657 searches in the region tree. */
1658
1659void
1660remove_unreachable_eh_regions (sbitmap r_reachable)
1661{
1662 remove_unreachable_eh_regions_worker (&cfun->eh->region_tree, r_reachable);
1663}
1664
1d65f45c
RH
1665/* Invokes CALLBACK for every exception handler landing pad label.
1666 Only used by reload hackery; should not be used by new code. */
87ff9c8e 1667
1d65f45c
RH
1668void
1669for_each_eh_label (void (*callback) (rtx))
87ff9c8e 1670{
1d65f45c
RH
1671 eh_landing_pad lp;
1672 int i;
52a11cbf 1673
9771b263 1674 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
f54a7f6f 1675 {
1d65f45c 1676 if (lp)
4537ec0c 1677 {
e67d1102 1678 rtx_code_label *lab = lp->landing_pad;
1d65f45c
RH
1679 if (lab && LABEL_P (lab))
1680 (*callback) (lab);
4537ec0c 1681 }
52a11cbf 1682 }
87ff9c8e 1683}
1d65f45c
RH
1684\f
1685/* Create the REG_EH_REGION note for INSN, given its ECF_FLAGS for a
b8698a0f 1686 call insn.
1d65f45c
RH
1687
1688 At the gimple level, we use LP_NR
1689 > 0 : The statement transfers to landing pad LP_NR
1690 = 0 : The statement is outside any EH region
1691 < 0 : The statement is within MUST_NOT_THROW region -LP_NR.
1692
1693 At the rtl level, we use LP_NR
1694 > 0 : The insn transfers to landing pad LP_NR
1695 = 0 : The insn cannot throw
1696 < 0 : The insn is within MUST_NOT_THROW region -LP_NR
1697 = INT_MIN : The insn cannot throw or execute a nonlocal-goto.
1698 missing note: The insn is outside any EH region.
1699
1700 ??? This difference probably ought to be avoided. We could stand
1701 to record nothrow for arbitrary gimple statements, and so avoid
1702 some moderately complex lookups in stmt_could_throw_p. Perhaps
1703 NOTHROW should be mapped on both sides to INT_MIN. Perhaps the
1704 no-nonlocal-goto property should be recorded elsewhere as a bit
1705 on the call_insn directly. Perhaps we should make more use of
1706 attaching the trees to call_insns (reachable via symbol_ref in
1707 direct call cases) and just pull the data out of the trees. */
87ff9c8e 1708
1d65f45c 1709void
84f16edb 1710make_reg_eh_region_note (rtx_insn *insn, int ecf_flags, int lp_nr)
87ff9c8e 1711{
1d65f45c
RH
1712 rtx value;
1713 if (ecf_flags & ECF_NOTHROW)
1714 value = const0_rtx;
1715 else if (lp_nr != 0)
1716 value = GEN_INT (lp_nr);
52a11cbf 1717 else
1d65f45c
RH
1718 return;
1719 add_reg_note (insn, REG_EH_REGION, value);
87ff9c8e
RH
1720}
1721
1d65f45c
RH
1722/* Create a REG_EH_REGION note for a CALL_INSN that cannot throw
1723 nor perform a non-local goto. Replace the region note if it
1724 already exists. */
87ff9c8e 1725
1d65f45c 1726void
84f16edb 1727make_reg_eh_region_note_nothrow_nononlocal (rtx_insn *insn)
87ff9c8e 1728{
1d65f45c
RH
1729 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1730 rtx intmin = GEN_INT (INT_MIN);
52a11cbf 1731
1d65f45c
RH
1732 if (note != 0)
1733 XEXP (note, 0) = intmin;
1734 else
1735 add_reg_note (insn, REG_EH_REGION, intmin);
1736}
21cd906e 1737
1d65f45c
RH
1738/* Return true if INSN could throw, assuming no REG_EH_REGION note
1739 to the contrary. */
21cd906e 1740
1d65f45c
RH
1741bool
1742insn_could_throw_p (const_rtx insn)
1743{
642d55de
EB
1744 if (!flag_exceptions)
1745 return false;
1d65f45c
RH
1746 if (CALL_P (insn))
1747 return true;
8f4f502f 1748 if (INSN_P (insn) && cfun->can_throw_non_call_exceptions)
1d65f45c
RH
1749 return may_trap_p (PATTERN (insn));
1750 return false;
1751}
98f464e0 1752
1d65f45c
RH
1753/* Copy an REG_EH_REGION note to each insn that might throw beginning
1754 at FIRST and ending at LAST. NOTE_OR_INSN is either the source insn
1755 to look for a note, or the note itself. */
98f464e0 1756
1d65f45c 1757void
dc01c3d1 1758copy_reg_eh_region_note_forward (rtx note_or_insn, rtx_insn *first, rtx last)
1d65f45c 1759{
dc01c3d1
DM
1760 rtx_insn *insn;
1761 rtx note = note_or_insn;
21cd906e 1762
1d65f45c
RH
1763 if (INSN_P (note_or_insn))
1764 {
1765 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1766 if (note == NULL)
1767 return;
21cd906e 1768 }
89cfdb7e
JJ
1769 else if (is_a <rtx_insn *> (note_or_insn))
1770 return;
1d65f45c
RH
1771 note = XEXP (note, 0);
1772
1773 for (insn = first; insn != last ; insn = NEXT_INSN (insn))
1774 if (!find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1775 && insn_could_throw_p (insn))
1776 add_reg_note (insn, REG_EH_REGION, note);
fa51b01b 1777}
4956d07c 1778
1d65f45c 1779/* Likewise, but iterate backward. */
4956d07c 1780
6de9cd9a 1781void
dc01c3d1 1782copy_reg_eh_region_note_backward (rtx note_or_insn, rtx_insn *last, rtx first)
4956d07c 1783{
dc01c3d1
DM
1784 rtx_insn *insn;
1785 rtx note = note_or_insn;
fb13d4d0 1786
1d65f45c 1787 if (INSN_P (note_or_insn))
7f206d8f 1788 {
1d65f45c
RH
1789 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1790 if (note == NULL)
6de9cd9a 1791 return;
52a11cbf 1792 }
89cfdb7e
JJ
1793 else if (is_a <rtx_insn *> (note_or_insn))
1794 return;
1d65f45c 1795 note = XEXP (note, 0);
fac62ecf 1796
1d65f45c
RH
1797 for (insn = last; insn != first; insn = PREV_INSN (insn))
1798 if (insn_could_throw_p (insn))
1799 add_reg_note (insn, REG_EH_REGION, note);
6de9cd9a
DN
1800}
1801
6de9cd9a 1802
1d65f45c
RH
1803/* Extract all EH information from INSN. Return true if the insn
1804 was marked NOTHROW. */
6de9cd9a 1805
1d65f45c
RH
1806static bool
1807get_eh_region_and_lp_from_rtx (const_rtx insn, eh_region *pr,
1808 eh_landing_pad *plp)
6de9cd9a 1809{
1d65f45c
RH
1810 eh_landing_pad lp = NULL;
1811 eh_region r = NULL;
1812 bool ret = false;
1813 rtx note;
1814 int lp_nr;
6de9cd9a 1815
1d65f45c
RH
1816 if (! INSN_P (insn))
1817 goto egress;
1818
1819 if (NONJUMP_INSN_P (insn)
1820 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1821 insn = XVECEXP (PATTERN (insn), 0, 0);
6de9cd9a 1822
1d65f45c
RH
1823 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1824 if (!note)
6de9cd9a 1825 {
1d65f45c
RH
1826 ret = !insn_could_throw_p (insn);
1827 goto egress;
6de9cd9a 1828 }
1d65f45c
RH
1829
1830 lp_nr = INTVAL (XEXP (note, 0));
1831 if (lp_nr == 0 || lp_nr == INT_MIN)
6de9cd9a 1832 {
1d65f45c
RH
1833 ret = true;
1834 goto egress;
6de9cd9a 1835 }
502b8322 1836
1d65f45c 1837 if (lp_nr < 0)
9771b263 1838 r = (*cfun->eh->region_array)[-lp_nr];
1d65f45c
RH
1839 else
1840 {
9771b263 1841 lp = (*cfun->eh->lp_array)[lp_nr];
1d65f45c
RH
1842 r = lp->region;
1843 }
6de9cd9a 1844
1d65f45c
RH
1845 egress:
1846 *plp = lp;
1847 *pr = r;
1848 return ret;
fb13d4d0
JM
1849}
1850
1d65f45c
RH
1851/* Return the landing pad to which INSN may go, or NULL if it does not
1852 have a reachable landing pad within this function. */
4956d07c 1853
1d65f45c
RH
1854eh_landing_pad
1855get_eh_landing_pad_from_rtx (const_rtx insn)
4956d07c 1856{
1d65f45c
RH
1857 eh_landing_pad lp;
1858 eh_region r;
6de9cd9a 1859
1d65f45c
RH
1860 get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1861 return lp;
1862}
6de9cd9a 1863
1d65f45c
RH
1864/* Return the region to which INSN may go, or NULL if it does not
1865 have a reachable region within this function. */
6de9cd9a 1866
1d65f45c
RH
1867eh_region
1868get_eh_region_from_rtx (const_rtx insn)
1869{
1870 eh_landing_pad lp;
1871 eh_region r;
6de9cd9a 1872
1d65f45c
RH
1873 get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1874 return r;
6de9cd9a
DN
1875}
1876
1d65f45c
RH
1877/* Return true if INSN throws and is caught by something in this function. */
1878
6de9cd9a 1879bool
ed7a4b4b 1880can_throw_internal (const_rtx insn)
6de9cd9a 1881{
1d65f45c
RH
1882 return get_eh_landing_pad_from_rtx (insn) != NULL;
1883}
1884
1885/* Return true if INSN throws and escapes from the current function. */
1886
1887bool
1888can_throw_external (const_rtx insn)
1889{
1890 eh_landing_pad lp;
1891 eh_region r;
1892 bool nothrow;
e6cfb550 1893
52a11cbf
RH
1894 if (! INSN_P (insn))
1895 return false;
12670d88 1896
4b4bf941 1897 if (NONJUMP_INSN_P (insn)
52a11cbf 1898 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1d65f45c 1899 {
2a62e439
DM
1900 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1901 int i, n = seq->len ();
4956d07c 1902
1d65f45c 1903 for (i = 0; i < n; i++)
2a62e439 1904 if (can_throw_external (seq->element (i)))
1d65f45c 1905 return true;
6de9cd9a 1906
1d65f45c
RH
1907 return false;
1908 }
6de9cd9a 1909
1d65f45c 1910 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
6de9cd9a 1911
1d65f45c
RH
1912 /* If we can't throw, we obviously can't throw external. */
1913 if (nothrow)
1914 return false;
4956d07c 1915
1d65f45c
RH
1916 /* If we have an internal landing pad, then we're not external. */
1917 if (lp != NULL)
1918 return false;
4956d07c 1919
1d65f45c
RH
1920 /* If we're not within an EH region, then we are external. */
1921 if (r == NULL)
1922 return true;
4956d07c 1923
1d65f45c
RH
1924 /* The only thing that ought to be left is MUST_NOT_THROW regions,
1925 which don't always have landing pads. */
1926 gcc_assert (r->type == ERT_MUST_NOT_THROW);
1927 return false;
52a11cbf 1928}
4956d07c 1929
1d65f45c
RH
1930/* Return true if INSN cannot throw at all. */
1931
52a11cbf 1932bool
1d65f45c 1933insn_nothrow_p (const_rtx insn)
4956d07c 1934{
1d65f45c
RH
1935 eh_landing_pad lp;
1936 eh_region r;
4956d07c 1937
52a11cbf 1938 if (! INSN_P (insn))
1d65f45c 1939 return true;
b59fa6cf 1940
4b4bf941 1941 if (NONJUMP_INSN_P (insn)
52a11cbf 1942 && GET_CODE (PATTERN (insn)) == SEQUENCE)
efaadb93 1943 {
2a62e439
DM
1944 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1945 int i, n = seq->len ();
efaadb93
AN
1946
1947 for (i = 0; i < n; i++)
2a62e439 1948 if (!insn_nothrow_p (seq->element (i)))
1d65f45c 1949 return false;
efaadb93 1950
1d65f45c 1951 return true;
efaadb93 1952 }
52a11cbf 1953
1d65f45c
RH
1954 return get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1955}
1956
1957/* Return true if INSN can perform a non-local goto. */
1958/* ??? This test is here in this file because it (ab)uses REG_EH_REGION. */
1959
1960bool
d8c1e9b6 1961can_nonlocal_goto (const rtx_insn *insn)
1d65f45c
RH
1962{
1963 if (nonlocal_goto_handler_labels && CALL_P (insn))
52a11cbf 1964 {
1d65f45c
RH
1965 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1966 if (!note || INTVAL (XEXP (note, 0)) != INT_MIN)
1967 return true;
52a11cbf 1968 }
1d65f45c 1969 return false;
4956d07c 1970}
1d65f45c 1971\f
e3b5732b 1972/* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
6814a8a0 1973
20cdc2be 1974static unsigned int
502b8322 1975set_nothrow_function_flags (void)
1ef1bf06 1976{
f8b23302 1977 rtx_insn *insn;
502b8322 1978
fe89fbc5 1979 crtl->nothrow = 1;
1ef1bf06 1980
e3b5732b 1981 /* Assume crtl->all_throwers_are_sibcalls until we encounter
b6128b8c
SH
1982 something that can throw an exception. We specifically exempt
1983 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
1984 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
1985 is optimistic. */
1ef1bf06 1986
e3b5732b 1987 crtl->all_throwers_are_sibcalls = 1;
b6128b8c 1988
fe89fbc5
JH
1989 /* If we don't know that this implementation of the function will
1990 actually be used, then we must not set TREE_NOTHROW, since
1991 callers must not assume that this function does not throw. */
1992 if (TREE_NOTHROW (current_function_decl))
1993 return 0;
1994
b6128b8c 1995 if (! flag_exceptions)
c2924966 1996 return 0;
502b8322 1997
1ef1bf06 1998 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
52a11cbf 1999 if (can_throw_external (insn))
b6128b8c 2000 {
fe89fbc5 2001 crtl->nothrow = 0;
b6128b8c 2002
4b4bf941 2003 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
b6128b8c 2004 {
e3b5732b 2005 crtl->all_throwers_are_sibcalls = 0;
c2924966 2006 return 0;
b6128b8c
SH
2007 }
2008 }
2009
fe89fbc5 2010 if (crtl->nothrow
d52f5295 2011 && (cgraph_node::get (current_function_decl)->get_availability ()
fe89fbc5 2012 >= AVAIL_AVAILABLE))
f7dd1864 2013 {
d52f5295 2014 struct cgraph_node *node = cgraph_node::get (current_function_decl);
2505c5ed
JH
2015 struct cgraph_edge *e;
2016 for (e = node->callers; e; e = e->next_caller)
2017 e->can_throw_external = false;
d52f5295 2018 node->set_nothrow_flag (true);
f7dd1864
AN
2019
2020 if (dump_file)
2021 fprintf (dump_file, "Marking function nothrow: %s\n\n",
2022 current_function_name ());
2023 }
c2924966 2024 return 0;
1ef1bf06 2025}
52a11cbf 2026
27a4cd48
DM
2027namespace {
2028
2029const pass_data pass_data_set_nothrow_function_flags =
2030{
2031 RTL_PASS, /* type */
2032 "nothrow", /* name */
2033 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
2034 TV_NONE, /* tv_id */
2035 0, /* properties_required */
2036 0, /* properties_provided */
2037 0, /* properties_destroyed */
2038 0, /* todo_flags_start */
2039 0, /* todo_flags_finish */
ef330312
PB
2040};
2041
27a4cd48
DM
2042class pass_set_nothrow_function_flags : public rtl_opt_pass
2043{
2044public:
c3284718
RS
2045 pass_set_nothrow_function_flags (gcc::context *ctxt)
2046 : rtl_opt_pass (pass_data_set_nothrow_function_flags, ctxt)
27a4cd48
DM
2047 {}
2048
2049 /* opt_pass methods: */
725793af 2050 unsigned int execute (function *) final override
be55bfe6
TS
2051 {
2052 return set_nothrow_function_flags ();
2053 }
27a4cd48
DM
2054
2055}; // class pass_set_nothrow_function_flags
2056
2057} // anon namespace
2058
2059rtl_opt_pass *
2060make_pass_set_nothrow_function_flags (gcc::context *ctxt)
2061{
2062 return new pass_set_nothrow_function_flags (ctxt);
2063}
2064
1d65f45c
RH
2065\f
2066/* Various hooks for unwind library. */
2067
2068/* Expand the EH support builtin functions:
2069 __builtin_eh_pointer and __builtin_eh_filter. */
2070
2071static eh_region
2072expand_builtin_eh_common (tree region_nr_t)
2073{
2074 HOST_WIDE_INT region_nr;
2075 eh_region region;
2076
9541ffee 2077 gcc_assert (tree_fits_shwi_p (region_nr_t));
9439e9a1 2078 region_nr = tree_to_shwi (region_nr_t);
1d65f45c 2079
9771b263 2080 region = (*cfun->eh->region_array)[region_nr];
1d65f45c
RH
2081
2082 /* ??? We shouldn't have been able to delete a eh region without
2083 deleting all the code that depended on it. */
2084 gcc_assert (region != NULL);
2085
2086 return region;
2087}
2088
2089/* Expand to the exc_ptr value from the given eh region. */
2090
2091rtx
2092expand_builtin_eh_pointer (tree exp)
2093{
2094 eh_region region
2095 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2096 if (region->exc_ptr_reg == NULL)
2097 region->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2098 return region->exc_ptr_reg;
2099}
2100
2101/* Expand to the filter value from the given eh region. */
2102
2103rtx
2104expand_builtin_eh_filter (tree exp)
2105{
2106 eh_region region
2107 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2108 if (region->filter_reg == NULL)
2109 region->filter_reg = gen_reg_rtx (targetm.eh_return_filter_mode ());
2110 return region->filter_reg;
2111}
2112
2113/* Copy the exc_ptr and filter values from one landing pad's registers
2114 to another. This is used to inline the resx statement. */
2115
2116rtx
2117expand_builtin_eh_copy_values (tree exp)
2118{
2119 eh_region dst
2120 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2121 eh_region src
2122 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 1));
095a2d76 2123 scalar_int_mode fmode = targetm.eh_return_filter_mode ();
1d65f45c
RH
2124
2125 if (dst->exc_ptr_reg == NULL)
2126 dst->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2127 if (src->exc_ptr_reg == NULL)
2128 src->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2129
2130 if (dst->filter_reg == NULL)
2131 dst->filter_reg = gen_reg_rtx (fmode);
2132 if (src->filter_reg == NULL)
2133 src->filter_reg = gen_reg_rtx (fmode);
2134
2135 emit_move_insn (dst->exc_ptr_reg, src->exc_ptr_reg);
2136 emit_move_insn (dst->filter_reg, src->filter_reg);
2137
2138 return const0_rtx;
2139}
ca55abae
JM
2140
2141/* Do any necessary initialization to access arbitrary stack frames.
2142 On the SPARC, this means flushing the register windows. */
2143
2144void
502b8322 2145expand_builtin_unwind_init (void)
ca55abae
JM
2146{
2147 /* Set this so all the registers get saved in our frame; we need to be
30f7a378 2148 able to copy the saved values for any registers from frames we unwind. */
e3b5732b 2149 crtl->saves_all_registers = 1;
ca55abae 2150
ca55abae 2151 SETUP_FRAME_ADDRESSES ();
ca55abae
JM
2152}
2153
1d65f45c
RH
2154/* Map a non-negative number to an eh return data register number; expands
2155 to -1 if no return data register is associated with the input number.
2156 At least the inputs 0 and 1 must be mapped; the target may provide more. */
2157
52a11cbf 2158rtx
5039610b 2159expand_builtin_eh_return_data_regno (tree exp)
52a11cbf 2160{
5039610b 2161 tree which = CALL_EXPR_ARG (exp, 0);
52a11cbf
RH
2162 unsigned HOST_WIDE_INT iwhich;
2163
2164 if (TREE_CODE (which) != INTEGER_CST)
2165 {
971801ff 2166 error ("argument of %<__builtin_eh_return_regno%> must be constant");
52a11cbf
RH
2167 return constm1_rtx;
2168 }
2169
ae7e9ddd 2170 iwhich = tree_to_uhwi (which);
52a11cbf
RH
2171 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2172 if (iwhich == INVALID_REGNUM)
2173 return constm1_rtx;
2174
2175#ifdef DWARF_FRAME_REGNUM
2176 iwhich = DWARF_FRAME_REGNUM (iwhich);
2177#else
ca60bd93 2178 iwhich = DEBUGGER_REGNO (iwhich);
52a11cbf
RH
2179#endif
2180
3f2c5d1a 2181 return GEN_INT (iwhich);
52a11cbf
RH
2182}
2183
ca55abae
JM
2184/* Given a value extracted from the return address register or stack slot,
2185 return the actual address encoded in that value. */
2186
2187rtx
502b8322 2188expand_builtin_extract_return_addr (tree addr_tree)
ca55abae 2189{
49452c07 2190 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
52a11cbf 2191
0ab38418
EC
2192 if (GET_MODE (addr) != Pmode
2193 && GET_MODE (addr) != VOIDmode)
2194 {
2195#ifdef POINTERS_EXTEND_UNSIGNED
2196 addr = convert_memory_address (Pmode, addr);
2197#else
2198 addr = convert_to_mode (Pmode, addr, 0);
2199#endif
2200 }
2201
52a11cbf 2202 /* First mask out any unwanted bits. */
cbc7d031
TS
2203 rtx mask = MASK_RETURN_ADDR;
2204 if (mask)
2205 expand_and (Pmode, addr, mask, addr);
52a11cbf
RH
2206
2207 /* Then adjust to find the real return address. */
a8a6b3df
TS
2208 if (RETURN_ADDR_OFFSET)
2209 addr = plus_constant (Pmode, addr, RETURN_ADDR_OFFSET);
52a11cbf
RH
2210
2211 return addr;
ca55abae
JM
2212}
2213
2214/* Given an actual address in addr_tree, do any necessary encoding
2215 and return the value to be stored in the return address register or
2216 stack slot so the epilogue will return to that address. */
2217
2218rtx
502b8322 2219expand_builtin_frob_return_addr (tree addr_tree)
ca55abae 2220{
49452c07 2221 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
52a11cbf 2222
5ae6cd0d 2223 addr = convert_memory_address (Pmode, addr);
be128cd9 2224
a8a6b3df
TS
2225 if (RETURN_ADDR_OFFSET)
2226 {
2227 addr = force_reg (Pmode, addr);
2228 addr = plus_constant (Pmode, addr, -RETURN_ADDR_OFFSET);
2229 }
52a11cbf 2230
ca55abae
JM
2231 return addr;
2232}
2233
52a11cbf
RH
2234/* Set up the epilogue with the magic bits we'll need to return to the
2235 exception handler. */
ca55abae 2236
52a11cbf 2237void
502b8322
AJ
2238expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2239 tree handler_tree)
ca55abae 2240{
34dc173c 2241 rtx tmp;
ca55abae 2242
34dc173c 2243#ifdef EH_RETURN_STACKADJ_RTX
69c32ec8 2244 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
49452c07 2245 VOIDmode, EXPAND_NORMAL);
5ae6cd0d 2246 tmp = convert_memory_address (Pmode, tmp);
69c32ec8 2247 if (!crtl->eh.ehr_stackadj)
dcd7a7df 2248 crtl->eh.ehr_stackadj = copy_addr_to_reg (tmp);
69c32ec8
JH
2249 else if (tmp != crtl->eh.ehr_stackadj)
2250 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
be128cd9
RK
2251#endif
2252
69c32ec8 2253 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
49452c07 2254 VOIDmode, EXPAND_NORMAL);
5ae6cd0d 2255 tmp = convert_memory_address (Pmode, tmp);
69c32ec8 2256 if (!crtl->eh.ehr_handler)
dcd7a7df 2257 crtl->eh.ehr_handler = copy_addr_to_reg (tmp);
69c32ec8
JH
2258 else if (tmp != crtl->eh.ehr_handler)
2259 emit_move_insn (crtl->eh.ehr_handler, tmp);
ca55abae 2260
69c32ec8
JH
2261 if (!crtl->eh.ehr_label)
2262 crtl->eh.ehr_label = gen_label_rtx ();
2263 emit_jump (crtl->eh.ehr_label);
a1622f83
AM
2264}
2265
1d65f45c
RH
2266/* Expand __builtin_eh_return. This exit path from the function loads up
2267 the eh return data registers, adjusts the stack, and branches to a
2268 given PC other than the normal return address. */
2269
71038426 2270void
502b8322 2271expand_eh_return (void)
ca55abae 2272{
f8b23302 2273 rtx_code_label *around_label;
ca55abae 2274
69c32ec8 2275 if (! crtl->eh.ehr_label)
71038426 2276 return;
ca55abae 2277
e3b5732b 2278 crtl->calls_eh_return = 1;
ca55abae 2279
34dc173c
UW
2280#ifdef EH_RETURN_STACKADJ_RTX
2281 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2282#endif
2283
52a11cbf 2284 around_label = gen_label_rtx ();
52a11cbf 2285 emit_jump (around_label);
ca55abae 2286
69c32ec8 2287 emit_label (crtl->eh.ehr_label);
52a11cbf 2288 clobber_return_register ();
ca55abae 2289
34dc173c 2290#ifdef EH_RETURN_STACKADJ_RTX
69c32ec8 2291 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
34dc173c
UW
2292#endif
2293
3b0b0013
RS
2294 if (targetm.have_eh_return ())
2295 emit_insn (targetm.gen_eh_return (crtl->eh.ehr_handler));
52a11cbf 2296 else
52a11cbf 2297 {
dd4fb609
TS
2298 if (rtx handler = EH_RETURN_HANDLER_RTX)
2299 emit_move_insn (handler, crtl->eh.ehr_handler);
2300 else
a3f9f006 2301 error ("%<__builtin_eh_return%> not supported on this target");
52a11cbf 2302 }
71038426 2303
52a11cbf 2304 emit_label (around_label);
71038426 2305}
c76362b4
JW
2306
2307/* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2308 POINTERS_EXTEND_UNSIGNED and return it. */
2309
2310rtx
2311expand_builtin_extend_pointer (tree addr_tree)
2312{
49452c07 2313 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
c76362b4
JW
2314 int extend;
2315
2316#ifdef POINTERS_EXTEND_UNSIGNED
2317 extend = POINTERS_EXTEND_UNSIGNED;
2318#else
2319 /* The previous EH code did an unsigned extend by default, so we do this also
2320 for consistency. */
2321 extend = 1;
2322#endif
2323
7b0518e3 2324 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
c76362b4 2325}
77d33a84 2326\f
52a11cbf 2327static int
c203e8a7 2328add_action_record (action_hash_type *ar_hash, int filter, int next)
77d33a84 2329{
d858f359 2330 struct action_record **slot, *new_ar, tmp;
52a11cbf
RH
2331
2332 tmp.filter = filter;
2333 tmp.next = next;
c203e8a7 2334 slot = ar_hash->find_slot (&tmp, INSERT);
77d33a84 2335
d858f359 2336 if ((new_ar = *slot) == NULL)
77d33a84 2337 {
d858f359 2338 new_ar = XNEW (struct action_record);
9771b263 2339 new_ar->offset = crtl->eh.action_record_data->length () + 1;
d858f359
KG
2340 new_ar->filter = filter;
2341 new_ar->next = next;
2342 *slot = new_ar;
52a11cbf
RH
2343
2344 /* The filter value goes in untouched. The link to the next
2345 record is a "self-relative" byte offset, or zero to indicate
2346 that there is no next record. So convert the absolute 1 based
eaec9b3d 2347 indices we've been carrying around into a displacement. */
52a11cbf 2348
69c32ec8 2349 push_sleb128 (&crtl->eh.action_record_data, filter);
52a11cbf 2350 if (next)
9771b263 2351 next -= crtl->eh.action_record_data->length () + 1;
69c32ec8 2352 push_sleb128 (&crtl->eh.action_record_data, next);
77d33a84 2353 }
77d33a84 2354
d858f359 2355 return new_ar->offset;
52a11cbf 2356}
77d33a84 2357
52a11cbf 2358static int
c203e8a7 2359collect_one_action_chain (action_hash_type *ar_hash, eh_region region)
77d33a84 2360{
52a11cbf 2361 int next;
77d33a84 2362
52a11cbf
RH
2363 /* If we've reached the top of the region chain, then we have
2364 no actions, and require no landing pad. */
2365 if (region == NULL)
2366 return -1;
2367
2368 switch (region->type)
77d33a84 2369 {
52a11cbf 2370 case ERT_CLEANUP:
1d65f45c
RH
2371 {
2372 eh_region r;
2373 /* A cleanup adds a zero filter to the beginning of the chain, but
2374 there are special cases to look out for. If there are *only*
2375 cleanups along a path, then it compresses to a zero action.
2376 Further, if there are multiple cleanups along a path, we only
2377 need to represent one of them, as that is enough to trigger
2378 entry to the landing pad at runtime. */
2379 next = collect_one_action_chain (ar_hash, region->outer);
2380 if (next <= 0)
2381 return 0;
2382 for (r = region->outer; r ; r = r->outer)
2383 if (r->type == ERT_CLEANUP)
2384 return next;
2385 return add_action_record (ar_hash, 0, next);
2386 }
52a11cbf
RH
2387
2388 case ERT_TRY:
1d65f45c
RH
2389 {
2390 eh_catch c;
2391
2392 /* Process the associated catch regions in reverse order.
2393 If there's a catch-all handler, then we don't need to
2394 search outer regions. Use a magic -3 value to record
2395 that we haven't done the outer search. */
2396 next = -3;
2397 for (c = region->u.eh_try.last_catch; c ; c = c->prev_catch)
2398 {
2399 if (c->type_list == NULL)
2400 {
2401 /* Retrieve the filter from the head of the filter list
2402 where we have stored it (see assign_filter_values). */
2403 int filter = TREE_INT_CST_LOW (TREE_VALUE (c->filter_list));
2404 next = add_action_record (ar_hash, filter, 0);
2405 }
2406 else
2407 {
2408 /* Once the outer search is done, trigger an action record for
2409 each filter we have. */
2410 tree flt_node;
6d41a92f 2411
1d65f45c
RH
2412 if (next == -3)
2413 {
2414 next = collect_one_action_chain (ar_hash, region->outer);
2415
2416 /* If there is no next action, terminate the chain. */
2417 if (next == -1)
2418 next = 0;
2419 /* If all outer actions are cleanups or must_not_throw,
2420 we'll have no action record for it, since we had wanted
2421 to encode these states in the call-site record directly.
2422 Add a cleanup action to the chain to catch these. */
2423 else if (next <= 0)
2424 next = add_action_record (ar_hash, 0, 0);
2425 }
3f2c5d1a 2426
1d65f45c
RH
2427 flt_node = c->filter_list;
2428 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
2429 {
2430 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
2431 next = add_action_record (ar_hash, filter, next);
2432 }
2433 }
2434 }
2435 return next;
2436 }
52a11cbf
RH
2437
2438 case ERT_ALLOWED_EXCEPTIONS:
2439 /* An exception specification adds its filter to the
2440 beginning of the chain. */
2441 next = collect_one_action_chain (ar_hash, region->outer);
0977ab3a
RH
2442
2443 /* If there is no next action, terminate the chain. */
2444 if (next == -1)
2445 next = 0;
2446 /* If all outer actions are cleanups or must_not_throw,
2447 we'll have no action record for it, since we had wanted
2448 to encode these states in the call-site record directly.
2449 Add a cleanup action to the chain to catch these. */
2450 else if (next <= 0)
2451 next = add_action_record (ar_hash, 0, 0);
083cad55 2452
0977ab3a 2453 return add_action_record (ar_hash, region->u.allowed.filter, next);
52a11cbf
RH
2454
2455 case ERT_MUST_NOT_THROW:
2456 /* A must-not-throw region with no inner handlers or cleanups
2457 requires no call-site entry. Note that this differs from
2458 the no handler or cleanup case in that we do require an lsda
2459 to be generated. Return a magic -2 value to record this. */
2460 return -2;
77d33a84 2461 }
1d65f45c
RH
2462
2463 gcc_unreachable ();
77d33a84
AM
2464}
2465
52a11cbf 2466static int
17f6e37d 2467add_call_site (rtx landing_pad, int action, int section)
77d33a84 2468{
69c32ec8 2469 call_site_record record;
1d65f45c 2470
766090c2 2471 record = ggc_alloc<call_site_record_d> ();
69c32ec8
JH
2472 record->landing_pad = landing_pad;
2473 record->action = action;
77d33a84 2474
9771b263 2475 vec_safe_push (crtl->eh.call_site_record_v[section], record);
77d33a84 2476
9771b263 2477 return call_site_base + crtl->eh.call_site_record_v[section]->length () - 1;
77d33a84
AM
2478}
2479
66e8df53 2480static rtx_note *
b32d5189 2481emit_note_eh_region_end (rtx_insn *insn)
a4a51a52 2482{
a4a51a52
UB
2483 return emit_note_after (NOTE_INSN_EH_REGION_END, insn);
2484}
2485
3fa410c0
JH
2486/* Add NOP after NOTE_INSN_SWITCH_TEXT_SECTIONS when the cold section starts
2487 with landing pad.
2488 With landing pad being at offset 0 from the start label of the section
2489 we would miss EH delivery because 0 is special and means no landing pad. */
2490
2491static bool
2492maybe_add_nop_after_section_switch (void)
2493{
2494 if (!crtl->uses_eh_lsda
2495 || !crtl->eh.call_site_record_v[1])
2496 return false;
2497 int n = vec_safe_length (crtl->eh.call_site_record_v[1]);
2498 hash_set<rtx_insn *> visited;
2499
2500 for (int i = 0; i < n; ++i)
2501 {
2502 struct call_site_record_d *cs
2503 = (*crtl->eh.call_site_record_v[1])[i];
2504 if (cs->landing_pad)
2505 {
2506 rtx_insn *insn = as_a <rtx_insn *> (cs->landing_pad);
2507 while (true)
2508 {
2509 /* Landing pads have LABEL_PRESERVE_P flag set. This check make
2510 sure that we do not walk past landing pad visited earlier
2511 which would result in possible quadratic behaviour. */
2512 if (LABEL_P (insn) && LABEL_PRESERVE_P (insn)
2513 && visited.add (insn))
2514 break;
2515
2516 /* Conservatively assume that ASM insn may be empty. We have
2517 now way to tell what they contain. */
2518 if (active_insn_p (insn)
2519 && GET_CODE (PATTERN (insn)) != ASM_INPUT
2520 && GET_CODE (PATTERN (insn)) != ASM_OPERANDS)
2521 break;
2522
2523 /* If we reached the start of hot section, then NOP will be
2524 needed. */
2525 if (GET_CODE (insn) == NOTE
2526 && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
2527 {
2528 emit_insn_after (gen_nop (), insn);
2529 break;
2530 }
2531
2532 /* We visit only labels from cold section. We should never hit
2533 begining of the insn stream here. */
2534 insn = PREV_INSN (insn);
2535 }
2536 }
2537 }
2538 return false;
2539}
2540
52a11cbf
RH
2541/* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
2542 The new note numbers will not refer to region numbers, but
2543 instead to call site entries. */
77d33a84 2544
1d65f45c 2545static unsigned int
502b8322 2546convert_to_eh_region_ranges (void)
77d33a84 2547{
f8b23302
DM
2548 rtx insn;
2549 rtx_insn *iter;
66e8df53 2550 rtx_note *note;
c203e8a7 2551 action_hash_type ar_hash (31);
52a11cbf 2552 int last_action = -3;
f8b23302 2553 rtx_insn *last_action_insn = NULL;
52a11cbf 2554 rtx last_landing_pad = NULL_RTX;
f8b23302 2555 rtx_insn *first_no_action_insn = NULL;
ae0ed63a 2556 int call_site = 0;
17f6e37d 2557 int cur_sec = 0;
e67d1102 2558 rtx_insn *section_switch_note = NULL;
f8b23302
DM
2559 rtx_insn *first_no_action_insn_before_switch = NULL;
2560 rtx_insn *last_no_action_insn_before_switch = NULL;
17f6e37d 2561 int saved_call_site_base = call_site_base;
77d33a84 2562
9771b263 2563 vec_alloc (crtl->eh.action_record_data, 64);
77d33a84 2564
52a11cbf
RH
2565 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
2566 if (INSN_P (iter))
2567 {
1d65f45c
RH
2568 eh_landing_pad lp;
2569 eh_region region;
2570 bool nothrow;
52a11cbf 2571 int this_action;
e67d1102 2572 rtx_code_label *this_landing_pad;
77d33a84 2573
52a11cbf 2574 insn = iter;
4b4bf941 2575 if (NONJUMP_INSN_P (insn)
52a11cbf
RH
2576 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2577 insn = XVECEXP (PATTERN (insn), 0, 0);
1ef1bf06 2578
1d65f45c
RH
2579 nothrow = get_eh_region_and_lp_from_rtx (insn, &region, &lp);
2580 if (nothrow)
2581 continue;
2582 if (region)
c203e8a7 2583 this_action = collect_one_action_chain (&ar_hash, region);
52a11cbf 2584 else
1d65f45c 2585 this_action = -1;
52a11cbf
RH
2586
2587 /* Existence of catch handlers, or must-not-throw regions
2588 implies that an lsda is needed (even if empty). */
2589 if (this_action != -1)
e3b5732b 2590 crtl->uses_eh_lsda = 1;
52a11cbf
RH
2591
2592 /* Delay creation of region notes for no-action regions
2593 until we're sure that an lsda will be required. */
2594 else if (last_action == -3)
2595 {
2596 first_no_action_insn = iter;
2597 last_action = -1;
2598 }
1ef1bf06 2599
52a11cbf 2600 if (this_action >= 0)
1d65f45c 2601 this_landing_pad = lp->landing_pad;
52a11cbf 2602 else
e67d1102 2603 this_landing_pad = NULL;
1ef1bf06 2604
52a11cbf
RH
2605 /* Differing actions or landing pads implies a change in call-site
2606 info, which implies some EH_REGION note should be emitted. */
2607 if (last_action != this_action
2608 || last_landing_pad != this_landing_pad)
2609 {
aaa52a96
JJ
2610 /* If there is a queued no-action region in the other section
2611 with hot/cold partitioning, emit it now. */
2612 if (first_no_action_insn_before_switch)
2613 {
2614 gcc_assert (this_action != -1
2615 && last_action == (first_no_action_insn
2616 ? -1 : -3));
2617 call_site = add_call_site (NULL_RTX, 0, 0);
2618 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2619 first_no_action_insn_before_switch);
2620 NOTE_EH_HANDLER (note) = call_site;
a4a51a52
UB
2621 note
2622 = emit_note_eh_region_end (last_no_action_insn_before_switch);
aaa52a96
JJ
2623 NOTE_EH_HANDLER (note) = call_site;
2624 gcc_assert (last_action != -3
2625 || (last_action_insn
2626 == last_no_action_insn_before_switch));
f8b23302
DM
2627 first_no_action_insn_before_switch = NULL;
2628 last_no_action_insn_before_switch = NULL;
aaa52a96
JJ
2629 call_site_base++;
2630 }
52a11cbf
RH
2631 /* If we'd not seen a previous action (-3) or the previous
2632 action was must-not-throw (-2), then we do not need an
2633 end note. */
2634 if (last_action >= -1)
2635 {
2636 /* If we delayed the creation of the begin, do it now. */
2637 if (first_no_action_insn)
2638 {
17f6e37d 2639 call_site = add_call_site (NULL_RTX, 0, cur_sec);
52a11cbf
RH
2640 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2641 first_no_action_insn);
2642 NOTE_EH_HANDLER (note) = call_site;
f8b23302 2643 first_no_action_insn = NULL;
52a11cbf
RH
2644 }
2645
a4a51a52 2646 note = emit_note_eh_region_end (last_action_insn);
52a11cbf
RH
2647 NOTE_EH_HANDLER (note) = call_site;
2648 }
2649
2650 /* If the new action is must-not-throw, then no region notes
2651 are created. */
2652 if (this_action >= -1)
2653 {
3f2c5d1a 2654 call_site = add_call_site (this_landing_pad,
17f6e37d
JJ
2655 this_action < 0 ? 0 : this_action,
2656 cur_sec);
52a11cbf
RH
2657 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
2658 NOTE_EH_HANDLER (note) = call_site;
2659 }
2660
2661 last_action = this_action;
2662 last_landing_pad = this_landing_pad;
2663 }
2664 last_action_insn = iter;
2665 }
17f6e37d
JJ
2666 else if (NOTE_P (iter)
2667 && NOTE_KIND (iter) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
2668 {
2669 gcc_assert (section_switch_note == NULL_RTX);
2670 gcc_assert (flag_reorder_blocks_and_partition);
2671 section_switch_note = iter;
2672 if (first_no_action_insn)
2673 {
2674 first_no_action_insn_before_switch = first_no_action_insn;
2675 last_no_action_insn_before_switch = last_action_insn;
f8b23302 2676 first_no_action_insn = NULL;
17f6e37d
JJ
2677 gcc_assert (last_action == -1);
2678 last_action = -3;
2679 }
2680 /* Force closing of current EH region before section switch and
2681 opening a new one afterwards. */
2682 else if (last_action != -3)
2683 last_landing_pad = pc_rtx;
9771b263
DN
2684 if (crtl->eh.call_site_record_v[cur_sec])
2685 call_site_base += crtl->eh.call_site_record_v[cur_sec]->length ();
17f6e37d 2686 cur_sec++;
0823efed 2687 gcc_assert (crtl->eh.call_site_record_v[cur_sec] == NULL);
9771b263 2688 vec_alloc (crtl->eh.call_site_record_v[cur_sec], 10);
17f6e37d 2689 }
1ef1bf06 2690
52a11cbf 2691 if (last_action >= -1 && ! first_no_action_insn)
1ef1bf06 2692 {
a4a51a52 2693 note = emit_note_eh_region_end (last_action_insn);
52a11cbf 2694 NOTE_EH_HANDLER (note) = call_site;
1ef1bf06
AM
2695 }
2696
17f6e37d
JJ
2697 call_site_base = saved_call_site_base;
2698
c2924966 2699 return 0;
52a11cbf 2700}
1ef1bf06 2701
27a4cd48
DM
2702namespace {
2703
2704const pass_data pass_data_convert_to_eh_region_ranges =
2705{
2706 RTL_PASS, /* type */
2707 "eh_ranges", /* name */
2708 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
2709 TV_NONE, /* tv_id */
2710 0, /* properties_required */
2711 0, /* properties_provided */
2712 0, /* properties_destroyed */
2713 0, /* todo_flags_start */
2714 0, /* todo_flags_finish */
ef330312 2715};
27a4cd48
DM
2716
2717class pass_convert_to_eh_region_ranges : public rtl_opt_pass
2718{
2719public:
c3284718
RS
2720 pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2721 : rtl_opt_pass (pass_data_convert_to_eh_region_ranges, ctxt)
27a4cd48
DM
2722 {}
2723
2724 /* opt_pass methods: */
725793af
DM
2725 bool gate (function *) final override;
2726 unsigned int execute (function *) final override
be55bfe6 2727 {
205b6026 2728 int ret = convert_to_eh_region_ranges ();
3fa410c0 2729 maybe_add_nop_after_section_switch ();
205b6026 2730 return ret;
be55bfe6 2731 }
27a4cd48
DM
2732
2733}; // class pass_convert_to_eh_region_ranges
2734
1a3d085c
TS
2735bool
2736pass_convert_to_eh_region_ranges::gate (function *)
2737{
2738 /* Nothing to do for SJLJ exceptions or if no regions created. */
2739 if (cfun->eh->region_tree == NULL)
2740 return false;
2741 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2742 return false;
2743 return true;
2744}
2745
27a4cd48
DM
2746} // anon namespace
2747
2748rtl_opt_pass *
2749make_pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2750{
2751 return new pass_convert_to_eh_region_ranges (ctxt);
2752}
52a11cbf
RH
2753\f
2754static void
9771b263 2755push_uleb128 (vec<uchar, va_gc> **data_area, unsigned int value)
52a11cbf
RH
2756{
2757 do
2758 {
2759 unsigned char byte = value & 0x7f;
2760 value >>= 7;
2761 if (value)
2762 byte |= 0x80;
9771b263 2763 vec_safe_push (*data_area, byte);
52a11cbf
RH
2764 }
2765 while (value);
2766}
1ef1bf06 2767
52a11cbf 2768static void
9771b263 2769push_sleb128 (vec<uchar, va_gc> **data_area, int value)
52a11cbf
RH
2770{
2771 unsigned char byte;
2772 int more;
1ef1bf06 2773
52a11cbf 2774 do
1ef1bf06 2775 {
52a11cbf
RH
2776 byte = value & 0x7f;
2777 value >>= 7;
2778 more = ! ((value == 0 && (byte & 0x40) == 0)
2779 || (value == -1 && (byte & 0x40) != 0));
2780 if (more)
2781 byte |= 0x80;
9771b263 2782 vec_safe_push (*data_area, byte);
1ef1bf06 2783 }
52a11cbf
RH
2784 while (more);
2785}
1ef1bf06 2786
52a11cbf 2787\f
52a11cbf 2788static int
17f6e37d 2789dw2_size_of_call_site_table (int section)
1ef1bf06 2790{
9771b263 2791 int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
52a11cbf
RH
2792 int size = n * (4 + 4 + 4);
2793 int i;
1ef1bf06 2794
52a11cbf
RH
2795 for (i = 0; i < n; ++i)
2796 {
7e5487a2 2797 struct call_site_record_d *cs =
9771b263 2798 (*crtl->eh.call_site_record_v[section])[i];
52a11cbf
RH
2799 size += size_of_uleb128 (cs->action);
2800 }
fac62ecf 2801
52a11cbf
RH
2802 return size;
2803}
2804
2805static int
502b8322 2806sjlj_size_of_call_site_table (void)
52a11cbf 2807{
9771b263 2808 int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
52a11cbf
RH
2809 int size = 0;
2810 int i;
77d33a84 2811
52a11cbf 2812 for (i = 0; i < n; ++i)
1ef1bf06 2813 {
7e5487a2 2814 struct call_site_record_d *cs =
9771b263 2815 (*crtl->eh.call_site_record_v[0])[i];
52a11cbf
RH
2816 size += size_of_uleb128 (INTVAL (cs->landing_pad));
2817 size += size_of_uleb128 (cs->action);
1ef1bf06 2818 }
52a11cbf
RH
2819
2820 return size;
2821}
52a11cbf
RH
2822
2823static void
17f6e37d 2824dw2_output_call_site_table (int cs_format, int section)
52a11cbf 2825{
9771b263 2826 int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
52a11cbf 2827 int i;
17f6e37d
JJ
2828 const char *begin;
2829
2830 if (section == 0)
2831 begin = current_function_func_begin_label;
2832 else if (first_function_block_is_cold)
2833 begin = crtl->subsections.hot_section_label;
2834 else
2835 begin = crtl->subsections.cold_section_label;
52a11cbf
RH
2836
2837 for (i = 0; i < n; ++i)
1ef1bf06 2838 {
9771b263 2839 struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[section])[i];
52a11cbf
RH
2840 char reg_start_lab[32];
2841 char reg_end_lab[32];
2842 char landing_pad_lab[32];
2843
2844 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
2845 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
2846
2847 if (cs->landing_pad)
2848 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
2849 CODE_LABEL_NUMBER (cs->landing_pad));
2850
2851 /* ??? Perhaps use insn length scaling if the assembler supports
2852 generic arithmetic. */
2853 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
2854 data4 if the function is small enough. */
17f6e37d
JJ
2855 if (cs_format == DW_EH_PE_uleb128)
2856 {
2857 dw2_asm_output_delta_uleb128 (reg_start_lab, begin,
2858 "region %d start", i);
2859 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
2860 "length");
2861 if (cs->landing_pad)
2862 dw2_asm_output_delta_uleb128 (landing_pad_lab, begin,
2863 "landing pad");
2864 else
2865 dw2_asm_output_data_uleb128 (0, "landing pad");
2866 }
52a11cbf 2867 else
17f6e37d
JJ
2868 {
2869 dw2_asm_output_delta (4, reg_start_lab, begin,
2870 "region %d start", i);
2871 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
2872 if (cs->landing_pad)
2873 dw2_asm_output_delta (4, landing_pad_lab, begin,
2874 "landing pad");
2875 else
2876 dw2_asm_output_data (4, 0, "landing pad");
2877 }
52a11cbf 2878 dw2_asm_output_data_uleb128 (cs->action, "action");
1ef1bf06
AM
2879 }
2880
52a11cbf
RH
2881 call_site_base += n;
2882}
2883
2884static void
502b8322 2885sjlj_output_call_site_table (void)
52a11cbf 2886{
9771b263 2887 int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
52a11cbf 2888 int i;
1ef1bf06 2889
52a11cbf 2890 for (i = 0; i < n; ++i)
1ef1bf06 2891 {
9771b263 2892 struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[0])[i];
4da896b2 2893
52a11cbf
RH
2894 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
2895 "region %d landing pad", i);
2896 dw2_asm_output_data_uleb128 (cs->action, "action");
2897 }
4da896b2 2898
52a11cbf 2899 call_site_base += n;
1ef1bf06
AM
2900}
2901
0f67af1c 2902/* Switch to the section that should be used for exception tables. */
96d0f4dc 2903
0f67af1c 2904static void
22ba88ef 2905switch_to_exception_section (const char * ARG_UNUSED (fnname))
96d0f4dc 2906{
55fc9e87
EB
2907 section *s;
2908
2909 if (exception_section)
2910 s = exception_section;
2911 else
96d0f4dc 2912 {
04218b35
AD
2913 int flags;
2914
2915 if (EH_TABLES_CAN_BE_READ_ONLY)
2916 {
2917 int tt_format =
2918 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2919 flags = ((! flag_pic
2920 || ((tt_format & 0x70) != DW_EH_PE_absptr
2921 && (tt_format & 0x70) != DW_EH_PE_aligned))
2922 ? 0 : SECTION_WRITE);
2923 }
2924 else
2925 flags = SECTION_WRITE;
2926
55fc9e87
EB
2927 /* Compute the section and cache it into exception_section,
2928 unless it depends on the function name. */
677f3fa8 2929 if (targetm_common.have_named_sections)
1a35e62d 2930 {
22ba88ef 2931#ifdef HAVE_LD_EH_GC_SECTIONS
3e6011cf 2932 if (flag_function_sections
cf288ed3 2933 || (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP))
22ba88ef 2934 {
1b4572a8 2935 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
3e6011cf
PB
2936 /* The EH table must match the code section, so only mark
2937 it linkonce if we have COMDAT groups to tie them together. */
cf288ed3 2938 if (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP)
3e6011cf 2939 flags |= SECTION_LINKONCE;
22ba88ef 2940 sprintf (section_name, ".gcc_except_table.%s", fnname);
3e6011cf 2941 s = get_section (section_name, flags, current_function_decl);
22ba88ef
EB
2942 free (section_name);
2943 }
2944 else
2945#endif
55fc9e87
EB
2946 exception_section
2947 = s = get_section (".gcc_except_table", flags, NULL);
1a35e62d
MM
2948 }
2949 else
55fc9e87 2950 exception_section
04218b35 2951 = s = flags == SECTION_WRITE ? data_section : readonly_data_section;
96d0f4dc 2952 }
55fc9e87
EB
2953
2954 switch_to_section (s);
96d0f4dc
JJ
2955}
2956
617a1b71 2957/* Output a reference from an exception table to the type_info object TYPE.
6fc0bb99 2958 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
617a1b71
PB
2959 the value. */
2960
2961static void
2962output_ttype (tree type, int tt_format, int tt_format_size)
2963{
2964 rtx value;
d858f359 2965 bool is_public = true;
617a1b71
PB
2966
2967 if (type == NULL_TREE)
2968 value = const0_rtx;
2969 else
2970 {
d7f09764
DN
2971 /* FIXME lto. pass_ipa_free_lang_data changes all types to
2972 runtime types so TYPE should already be a runtime type
2973 reference. When pass_ipa_free_lang data is made a default
2974 pass, we can then remove the call to lookup_type_for_runtime
2975 below. */
2976 if (TYPE_P (type))
2977 type = lookup_type_for_runtime (type);
2978
cda5bf39 2979 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
617a1b71
PB
2980
2981 /* Let cgraph know that the rtti decl is used. Not all of the
2982 paths below go through assemble_integer, which would take
2983 care of this for us. */
2984 STRIP_NOPS (type);
2985 if (TREE_CODE (type) == ADDR_EXPR)
2986 {
2987 type = TREE_OPERAND (type, 0);
8813a647 2988 if (VAR_P (type))
66058468 2989 is_public = TREE_PUBLIC (type);
617a1b71 2990 }
dd2c9f74
VR
2991 else
2992 gcc_assert (TREE_CODE (type) == INTEGER_CST);
617a1b71
PB
2993 }
2994
2995 /* Allow the target to override the type table entry format. */
2996 if (targetm.asm_out.ttype (value))
2997 return;
2998
2999 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3000 assemble_integer (value, tt_format_size,
3001 tt_format_size * BITS_PER_UNIT, 1);
3002 else
d858f359 3003 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
617a1b71
PB
3004}
3005
b78b513e
EB
3006/* Output an exception table for the current function according to SECTION.
3007
3008 If the function has been partitioned into hot and cold parts, value 0 for
3009 SECTION refers to the table associated with the hot part while value 1
3010 refers to the table associated with the cold part. If the function has
3011 not been partitioned, value 0 refers to the single exception table. */
3012
17f6e37d 3013static void
a68b5e52 3014output_one_function_exception_table (int section)
52a11cbf 3015{
1d65f45c 3016 int tt_format, cs_format, lp_format, i;
52a11cbf
RH
3017 char ttype_label[32];
3018 char cs_after_size_label[32];
3019 char cs_end_label[32];
52a11cbf 3020 int call_site_len;
52a11cbf 3021 int have_tt_data;
ae0ed63a 3022 int tt_format_size = 0;
1ef1bf06 3023
9771b263 3024 have_tt_data = (vec_safe_length (cfun->eh->ttype_data)
1d65f45c 3025 || (targetm.arm_eabi_unwinder
9771b263
DN
3026 ? vec_safe_length (cfun->eh->ehspec_data.arm_eabi)
3027 : vec_safe_length (cfun->eh->ehspec_data.other)));
52a11cbf 3028
b627d6fe
RH
3029 /* Indicate the format of the @TType entries. */
3030 if (! have_tt_data)
3031 tt_format = DW_EH_PE_omit;
3032 else
3033 {
3034 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
61214be1
TS
3035 if (HAVE_AS_LEB128)
3036 ASM_GENERATE_INTERNAL_LABEL (ttype_label,
3037 section ? "LLSDATTC" : "LLSDATT",
3038 current_function_funcdef_no);
3039
b627d6fe
RH
3040 tt_format_size = size_of_encoded_value (tt_format);
3041
7a900ebc 3042 assemble_align (tt_format_size * BITS_PER_UNIT);
b627d6fe 3043 }
52a11cbf 3044
17f6e37d
JJ
3045 targetm.asm_out.internal_label (asm_out_file, section ? "LLSDAC" : "LLSDA",
3046 current_function_funcdef_no);
52a11cbf
RH
3047
3048 /* The LSDA header. */
3049
3050 /* Indicate the format of the landing pad start pointer. An omitted
3051 field implies @LPStart == @Start. */
3052 /* Currently we always put @LPStart == @Start. This field would
3053 be most useful in moving the landing pads completely out of
3054 line to another section, but it could also be used to minimize
3055 the size of uleb128 landing pad offsets. */
2a1ee410
RH
3056 lp_format = DW_EH_PE_omit;
3057 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3058 eh_data_format_name (lp_format));
52a11cbf
RH
3059
3060 /* @LPStart pointer would go here. */
3061
2a1ee410
RH
3062 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3063 eh_data_format_name (tt_format));
52a11cbf 3064
61214be1
TS
3065 if (!HAVE_AS_LEB128)
3066 {
3067 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3068 call_site_len = sjlj_size_of_call_site_table ();
3069 else
3070 call_site_len = dw2_size_of_call_site_table (section);
3071 }
52a11cbf
RH
3072
3073 /* A pc-relative 4-byte displacement to the @TType data. */
3074 if (have_tt_data)
3075 {
61214be1 3076 if (HAVE_AS_LEB128)
1ef1bf06 3077 {
61214be1
TS
3078 char ttype_after_disp_label[32];
3079 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,
3080 section ? "LLSDATTDC" : "LLSDATTD",
3081 current_function_funcdef_no);
3082 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3083 "@TType base offset");
3084 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
1ef1bf06 3085 }
61214be1
TS
3086 else
3087 {
3088 /* Ug. Alignment queers things. */
3089 unsigned int before_disp, after_disp, last_disp, disp;
3090
3091 before_disp = 1 + 1;
3092 after_disp = (1 + size_of_uleb128 (call_site_len)
3093 + call_site_len
3094 + vec_safe_length (crtl->eh.action_record_data)
3095 + (vec_safe_length (cfun->eh->ttype_data)
3096 * tt_format_size));
3097
3098 disp = after_disp;
3099 do
3100 {
3101 unsigned int disp_size, pad;
52a11cbf 3102
61214be1
TS
3103 last_disp = disp;
3104 disp_size = size_of_uleb128 (disp);
3105 pad = before_disp + disp_size + after_disp;
3106 if (pad % tt_format_size)
3107 pad = tt_format_size - (pad % tt_format_size);
3108 else
3109 pad = 0;
3110 disp = after_disp + pad;
3111 }
3112 while (disp != last_disp);
3113
3114 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3115 }
3116 }
1ef1bf06 3117
52a11cbf 3118 /* Indicate the format of the call-site offsets. */
61214be1
TS
3119 if (HAVE_AS_LEB128)
3120 cs_format = DW_EH_PE_uleb128;
3121 else
3122 cs_format = DW_EH_PE_udata4;
3123
2a1ee410
RH
3124 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3125 eh_data_format_name (cs_format));
52a11cbf 3126
61214be1
TS
3127 if (HAVE_AS_LEB128)
3128 {
3129 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label,
3130 section ? "LLSDACSBC" : "LLSDACSB",
3131 current_function_funcdef_no);
3132 ASM_GENERATE_INTERNAL_LABEL (cs_end_label,
3133 section ? "LLSDACSEC" : "LLSDACSE",
3134 current_function_funcdef_no);
3135 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3136 "Call-site table length");
3137 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3138 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3139 sjlj_output_call_site_table ();
3140 else
3141 dw2_output_call_site_table (cs_format, section);
3142 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3143 }
52a11cbf 3144 else
61214be1
TS
3145 {
3146 dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length");
3147 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3148 sjlj_output_call_site_table ();
3149 else
3150 dw2_output_call_site_table (cs_format, section);
3151 }
52a11cbf
RH
3152
3153 /* ??? Decode and interpret the data for flag_debug_asm. */
1d65f45c
RH
3154 {
3155 uchar uc;
9771b263 3156 FOR_EACH_VEC_ELT (*crtl->eh.action_record_data, i, uc)
1d65f45c
RH
3157 dw2_asm_output_data (1, uc, i ? NULL : "Action record table");
3158 }
1ef1bf06 3159
52a11cbf 3160 if (have_tt_data)
7a900ebc 3161 assemble_align (tt_format_size * BITS_PER_UNIT);
1ef1bf06 3162
9771b263 3163 i = vec_safe_length (cfun->eh->ttype_data);
52a11cbf 3164 while (i-- > 0)
1ef1bf06 3165 {
9771b263 3166 tree type = (*cfun->eh->ttype_data)[i];
617a1b71 3167 output_ttype (type, tt_format, tt_format_size);
1ef1bf06 3168 }
52a11cbf 3169
61214be1
TS
3170 if (HAVE_AS_LEB128 && have_tt_data)
3171 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
52a11cbf
RH
3172
3173 /* ??? Decode and interpret the data for flag_debug_asm. */
1d65f45c 3174 if (targetm.arm_eabi_unwinder)
617a1b71 3175 {
1d65f45c
RH
3176 tree type;
3177 for (i = 0;
9771b263 3178 vec_safe_iterate (cfun->eh->ehspec_data.arm_eabi, i, &type); ++i)
1d65f45c
RH
3179 output_ttype (type, tt_format, tt_format_size);
3180 }
3181 else
3182 {
3183 uchar uc;
3184 for (i = 0;
9771b263 3185 vec_safe_iterate (cfun->eh->ehspec_data.other, i, &uc); ++i)
1d65f45c
RH
3186 dw2_asm_output_data (1, uc,
3187 i ? NULL : "Exception specification table");
617a1b71 3188 }
17f6e37d
JJ
3189}
3190
b78b513e
EB
3191/* Output an exception table for the current function according to SECTION,
3192 switching back and forth from the function section appropriately.
3193
3194 If the function has been partitioned into hot and cold parts, value 0 for
3195 SECTION refers to the table associated with the hot part while value 1
3196 refers to the table associated with the cold part. If the function has
3197 not been partitioned, value 0 refers to the single exception table. */
3198
17f6e37d 3199void
b78b513e 3200output_function_exception_table (int section)
17f6e37d 3201{
b78b513e 3202 const char *fnname = get_fnname_from_decl (current_function_decl);
f9417da1
RG
3203 rtx personality = get_personality_function (current_function_decl);
3204
17f6e37d 3205 /* Not all functions need anything. */
d989dba8
TV
3206 if (!crtl->uses_eh_lsda
3207 || targetm_common.except_unwind_info (&global_options) == UI_NONE)
b78b513e
EB
3208 return;
3209
3210 /* No need to emit any boilerplate stuff for the cold part. */
3211 if (section == 1 && !crtl->eh.call_site_record_v[1])
17f6e37d
JJ
3212 return;
3213
f9417da1 3214 if (personality)
a68b5e52
RH
3215 {
3216 assemble_external_libcall (personality);
3217
3218 if (targetm.asm_out.emit_except_personality)
3219 targetm.asm_out.emit_except_personality (personality);
3220 }
3221
3222 switch_to_exception_section (fnname);
3223
3224 /* If the target wants a label to begin the table, emit it here. */
3225 targetm.asm_out.emit_except_table_label (asm_out_file);
17f6e37d 3226
b78b513e
EB
3227 /* Do the real work. */
3228 output_one_function_exception_table (section);
52a11cbf 3229
d6b5193b 3230 switch_to_section (current_function_section ());
1ef1bf06 3231}
e2500fed 3232
b4660e5a 3233void
355fe088 3234set_eh_throw_stmt_table (function *fun, hash_map<gimple *, int> *table)
b4660e5a
JH
3235{
3236 fun->eh->throw_stmt_table = table;
3237}
3238
355fe088 3239hash_map<gimple *, int> *
b4660e5a
JH
3240get_eh_throw_stmt_table (struct function *fun)
3241{
3242 return fun->eh->throw_stmt_table;
3243}
1d65f45c
RH
3244\f
3245/* Determine if the function needs an EH personality function. */
f9417da1
RG
3246
3247enum eh_personality_kind
3248function_needs_eh_personality (struct function *fn)
3249{
f9417da1 3250 enum eh_personality_kind kind = eh_personality_none;
1d65f45c 3251 eh_region i;
f9417da1 3252
1d65f45c 3253 FOR_ALL_EH_REGION_FN (i, fn)
f9417da1
RG
3254 {
3255 switch (i->type)
3256 {
f9417da1
RG
3257 case ERT_CLEANUP:
3258 /* Can do with any personality including the generic C one. */
3259 kind = eh_personality_any;
3260 break;
3261
1d65f45c 3262 case ERT_TRY:
f9417da1
RG
3263 case ERT_ALLOWED_EXCEPTIONS:
3264 /* Always needs a EH personality function. The generic C
3265 personality doesn't handle these even for empty type lists. */
3266 return eh_personality_lang;
3267
1d65f45c
RH
3268 case ERT_MUST_NOT_THROW:
3269 /* Always needs a EH personality function. The language may specify
3270 what abort routine that must be used, e.g. std::terminate. */
f9417da1
RG
3271 return eh_personality_lang;
3272 }
f9417da1
RG
3273 }
3274
3275 return kind;
3276}
1d65f45c 3277\f
cc7220fd 3278/* Dump EH information to OUT. */
13a9fa44 3279
083cad55 3280void
13a9fa44 3281dump_eh_tree (FILE * out, struct function *fun)
cc7220fd 3282{
1d65f45c 3283 eh_region i;
cc7220fd 3284 int depth = 0;
1d65f45c
RH
3285 static const char *const type_name[] = {
3286 "cleanup", "try", "allowed_exceptions", "must_not_throw"
3287 };
cc7220fd
JH
3288
3289 i = fun->eh->region_tree;
13a9fa44 3290 if (!i)
cc7220fd
JH
3291 return;
3292
3293 fprintf (out, "Eh tree:\n");
3294 while (1)
3295 {
3296 fprintf (out, " %*s %i %s", depth * 2, "",
1d65f45c
RH
3297 i->index, type_name[(int) i->type]);
3298
3299 if (i->landing_pads)
0c0efb33 3300 {
1d65f45c
RH
3301 eh_landing_pad lp;
3302
3303 fprintf (out, " land:");
3304 if (current_ir_type () == IR_GIMPLE)
3305 {
3306 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3307 {
3308 fprintf (out, "{%i,", lp->index);
ef6cb4c7 3309 print_generic_expr (out, lp->post_landing_pad);
1d65f45c
RH
3310 fputc ('}', out);
3311 if (lp->next_lp)
3312 fputc (',', out);
3313 }
3314 }
3315 else
f645e9a2 3316 {
d7fde127 3317 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
1d65f45c
RH
3318 {
3319 fprintf (out, "{%i,", lp->index);
3320 if (lp->landing_pad)
3321 fprintf (out, "%i%s,", INSN_UID (lp->landing_pad),
3322 NOTE_P (lp->landing_pad) ? "(del)" : "");
3323 else
3324 fprintf (out, "(nil),");
3325 if (lp->post_landing_pad)
3326 {
e67d1102 3327 rtx_insn *lab = label_rtx (lp->post_landing_pad);
1d65f45c
RH
3328 fprintf (out, "%i%s}", INSN_UID (lab),
3329 NOTE_P (lab) ? "(del)" : "");
3330 }
3331 else
3332 fprintf (out, "(nil)}");
3333 if (lp->next_lp)
3334 fputc (',', out);
3335 }
f645e9a2 3336 }
0c0efb33 3337 }
1d65f45c 3338
13a9fa44
JH
3339 switch (i->type)
3340 {
3341 case ERT_CLEANUP:
1d65f45c 3342 case ERT_MUST_NOT_THROW:
13a9fa44
JH
3343 break;
3344
3345 case ERT_TRY:
3346 {
1d65f45c
RH
3347 eh_catch c;
3348 fprintf (out, " catch:");
3349 for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
3350 {
3351 fputc ('{', out);
3352 if (c->label)
3353 {
3354 fprintf (out, "lab:");
ef6cb4c7 3355 print_generic_expr (out, c->label);
1d65f45c
RH
3356 fputc (';', out);
3357 }
ef6cb4c7 3358 print_generic_expr (out, c->type_list);
1d65f45c
RH
3359 fputc ('}', out);
3360 if (c->next_catch)
3361 fputc (',', out);
3362 }
13a9fa44
JH
3363 }
3364 break;
3365
13a9fa44 3366 case ERT_ALLOWED_EXCEPTIONS:
0c0efb33 3367 fprintf (out, " filter :%i types:", i->u.allowed.filter);
ef6cb4c7 3368 print_generic_expr (out, i->u.allowed.type_list);
13a9fa44 3369 break;
13a9fa44 3370 }
1d65f45c
RH
3371 fputc ('\n', out);
3372
cc7220fd
JH
3373 /* If there are sub-regions, process them. */
3374 if (i->inner)
3375 i = i->inner, depth++;
3376 /* If there are peers, process them. */
3377 else if (i->next_peer)
3378 i = i->next_peer;
3379 /* Otherwise, step back up the tree to the next peer. */
3380 else
3381 {
13a9fa44
JH
3382 do
3383 {
3384 i = i->outer;
3385 depth--;
3386 if (i == NULL)
3387 return;
3388 }
3389 while (i->next_peer == NULL);
cc7220fd
JH
3390 i = i->next_peer;
3391 }
3392 }
3393}
3394
9994a182
DN
3395/* Dump the EH tree for FN on stderr. */
3396
24e47c76 3397DEBUG_FUNCTION void
9994a182
DN
3398debug_eh_tree (struct function *fn)
3399{
3400 dump_eh_tree (stderr, fn);
3401}
3402
0c0efb33
JH
3403/* Verify invariants on EH datastructures. */
3404
24e47c76 3405DEBUG_FUNCTION void
cc7220fd
JH
3406verify_eh_tree (struct function *fun)
3407{
1d65f45c
RH
3408 eh_region r, outer;
3409 int nvisited_lp, nvisited_r;
3410 int count_lp, count_r, depth, i;
3411 eh_landing_pad lp;
cc7220fd 3412 bool err = false;
cc7220fd 3413
98f358e5 3414 if (!fun->eh->region_tree)
cc7220fd 3415 return;
1d65f45c
RH
3416
3417 count_r = 0;
9771b263 3418 for (i = 1; vec_safe_iterate (fun->eh->region_array, i, &r); ++i)
1d65f45c 3419 if (r)
cc7220fd 3420 {
1d65f45c
RH
3421 if (r->index == i)
3422 count_r++;
3423 else
cc7220fd 3424 {
a9c697b8 3425 error ("%<region_array%> is corrupted for region %i", r->index);
cc7220fd
JH
3426 err = true;
3427 }
3428 }
3429
1d65f45c 3430 count_lp = 0;
9771b263 3431 for (i = 1; vec_safe_iterate (fun->eh->lp_array, i, &lp); ++i)
1d65f45c
RH
3432 if (lp)
3433 {
3434 if (lp->index == i)
3435 count_lp++;
3436 else
3437 {
a9c697b8 3438 error ("%<lp_array%> is corrupted for lp %i", lp->index);
1d65f45c
RH
3439 err = true;
3440 }
3441 }
3442
3443 depth = nvisited_lp = nvisited_r = 0;
3444 outer = NULL;
3445 r = fun->eh->region_tree;
cc7220fd
JH
3446 while (1)
3447 {
9771b263 3448 if ((*fun->eh->region_array)[r->index] != r)
cc7220fd 3449 {
a9c697b8 3450 error ("%<region_array%> is corrupted for region %i", r->index);
cc7220fd
JH
3451 err = true;
3452 }
1d65f45c 3453 if (r->outer != outer)
cc7220fd 3454 {
1d65f45c 3455 error ("outer block of region %i is wrong", r->index);
cc7220fd
JH
3456 err = true;
3457 }
1d65f45c 3458 if (depth < 0)
cc7220fd 3459 {
1d65f45c 3460 error ("negative nesting depth of region %i", r->index);
cc7220fd
JH
3461 err = true;
3462 }
1d65f45c
RH
3463 nvisited_r++;
3464
3465 for (lp = r->landing_pads; lp ; lp = lp->next_lp)
cc7220fd 3466 {
9771b263 3467 if ((*fun->eh->lp_array)[lp->index] != lp)
1d65f45c 3468 {
a9c697b8 3469 error ("%<lp_array%> is corrupted for lp %i", lp->index);
1d65f45c
RH
3470 err = true;
3471 }
3472 if (lp->region != r)
3473 {
3474 error ("region of lp %i is wrong", lp->index);
3475 err = true;
3476 }
3477 nvisited_lp++;
cc7220fd 3478 }
1d65f45c
RH
3479
3480 if (r->inner)
3481 outer = r, r = r->inner, depth++;
3482 else if (r->next_peer)
3483 r = r->next_peer;
cc7220fd
JH
3484 else
3485 {
98f358e5
JH
3486 do
3487 {
1d65f45c
RH
3488 r = r->outer;
3489 if (r == NULL)
3490 goto region_done;
98f358e5 3491 depth--;
1d65f45c 3492 outer = r->outer;
98f358e5 3493 }
1d65f45c
RH
3494 while (r->next_peer == NULL);
3495 r = r->next_peer;
cc7220fd
JH
3496 }
3497 }
1d65f45c
RH
3498 region_done:
3499 if (depth != 0)
3500 {
3501 error ("tree list ends on depth %i", depth);
3502 err = true;
3503 }
3504 if (count_r != nvisited_r)
3505 {
a9c697b8 3506 error ("%<region_array%> does not match %<region_tree%>");
1d65f45c
RH
3507 err = true;
3508 }
3509 if (count_lp != nvisited_lp)
3510 {
a9c697b8 3511 error ("%<lp_array%> does not match %<region_tree%>");
1d65f45c
RH
3512 err = true;
3513 }
617a1b71 3514
1d65f45c
RH
3515 if (err)
3516 {
3517 dump_eh_tree (stderr, fun);
a9c697b8 3518 internal_error ("%qs failed", __func__);
1d65f45c 3519 }
617a1b71 3520}
ef330312 3521\f
e2500fed 3522#include "gt-except.h"
This page took 8.275507 seconds and 5 git commands to generate.