1 /* Perform optimizations on tree structure.
3 Copyright (C) 1998, 1999, 2000 Free Software Foundation, Inc.
4 Written by Mark Michell (mark@codesourcery.com).
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful, but
14 WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
28 #include "insn-config.h"
29 #include "integrate.h"
34 o In order to make inlining-on-trees work, we pessimized
35 function-local static constants. In particular, they are now
36 always output, even when not addressed. Fix this by treating
37 function-local static constants just like global static
38 constants; the back-end already knows not to output them if they
41 o Provide heuristics to clamp inlining of recursive template
44 o It looks like the return label is not being placed in the optimal
45 place. Shouldn't it come before the returned value? */
47 /* Data required for function inlining. */
49 typedef struct inline_data
51 /* A stack of the functions we are inlining. For example, if we are
52 compiling `f', which calls `g', which calls `h', and we are
53 inlining the body of `h', the stack will contain, `h', followed
54 by `g', followed by `f'. */
56 /* The label to jump to when a return statement is encountered. */
58 /* The map from local declarations in the inlined function to
59 equivalents in the function into which it is being inlined. */
61 /* Nonzero if we are currently within the cleanup for a
63 int in_target_cleanup_p
;
68 static tree initialize_inlined_parameters
PARAMS ((inline_data
*, tree
, tree
));
69 static tree declare_return_variable
PARAMS ((inline_data
*, tree
*));
70 static tree copy_body_r
PARAMS ((tree
*, int *, void *));
71 static tree copy_body
PARAMS ((inline_data
*));
72 static tree expand_call_inline
PARAMS ((tree
*, int *, void *));
73 static void expand_calls_inline
PARAMS ((tree
*, inline_data
*));
74 static int inlinable_function_p
PARAMS ((tree
, inline_data
*));
75 static tree remap_decl
PARAMS ((tree
, inline_data
*));
76 static void remap_block
PARAMS ((tree
, tree
, inline_data
*));
77 static void copy_scope_stmt
PARAMS ((tree
*, int *, inline_data
*));
78 static tree calls_setjmp_r
PARAMS ((tree
*, int *, void *));
80 /* Remap DECL during the copying of the BLOCK tree for the function.
81 DATA is really an `inline_data *'. */
91 /* We only remap local variables in the current function. */
92 fn
= VARRAY_TOP_TREE (id
->fns
);
93 if (!nonstatic_local_decl_p (decl
) || DECL_CONTEXT (decl
) != fn
)
96 /* See if we have remapped this declaration. */
97 n
= splay_tree_lookup (id
->decl_map
, (splay_tree_key
) decl
);
98 /* If we didn't already have an equivalent for this declaration,
104 /* Make a copy of the variable or label. */
105 t
= copy_decl_for_inlining (decl
, fn
,
106 VARRAY_TREE (id
->fns
, 0));
107 /* Remember it, so that if we encounter this local entity
108 again we can reuse this copy. */
109 n
= splay_tree_insert (id
->decl_map
,
110 (splay_tree_key
) decl
,
111 (splay_tree_value
) t
);
114 return (tree
) n
->value
;
117 /* Copy the SCOPE_STMT_BLOCK associated with SCOPE_STMT to contain
118 remapped versions of the variables therein. And hook the new block
119 into the block-tree. If non-NULL, the DECLS are declarations to
120 add to use instead of the BLOCK_VARS in the old block. */
123 remap_block (scope_stmt
, decls
, id
)
128 /* We cannot do this in the cleanup for a TARGET_EXPR since we do
129 not know whether or not expand_expr will actually write out the
130 code we put there. If it does not, then we'll have more BLOCKs
131 than block-notes, and things will go awry. At some point, we
132 should make the back-end handle BLOCK notes in a tidier way,
133 without requiring a strict correspondence to the block-tree; then
134 this check can go. */
135 if (id
->in_target_cleanup_p
)
137 SCOPE_STMT_BLOCK (scope_stmt
) = NULL_TREE
;
141 /* If this is the beginning of a scope, remap the associated BLOCK. */
142 if (SCOPE_BEGIN_P (scope_stmt
) && SCOPE_STMT_BLOCK (scope_stmt
))
149 /* Make the new block. */
150 old_block
= SCOPE_STMT_BLOCK (scope_stmt
);
151 new_block
= make_node (BLOCK
);
152 TREE_USED (new_block
) = TREE_USED (old_block
);
153 BLOCK_ABSTRACT_ORIGIN (new_block
) = old_block
;
154 SCOPE_STMT_BLOCK (scope_stmt
) = new_block
;
156 /* Remap its variables. */
157 for (old_var
= decls
? decls
: BLOCK_VARS (old_block
);
159 old_var
= TREE_CHAIN (old_var
))
163 /* Remap the variable. */
164 new_var
= remap_decl (old_var
, id
);
166 /* We didn't remap this variable, so we can't mess with
171 TREE_CHAIN (new_var
) = BLOCK_VARS (new_block
);
172 BLOCK_VARS (new_block
) = new_var
;
175 /* We put the BLOCK_VARS in reverse order; fix that now. */
176 BLOCK_VARS (new_block
) = nreverse (BLOCK_VARS (new_block
));
177 /* Attach this new block after the DECL_INITIAL block for the
178 function into which this block is being inlined. In
179 rest_of_compilation we will straighten out the BLOCK tree. */
180 fn
= VARRAY_TREE (id
->fns
, 0);
181 BLOCK_CHAIN (new_block
) = BLOCK_CHAIN (DECL_INITIAL (fn
));
182 BLOCK_CHAIN (DECL_INITIAL (fn
)) = new_block
;
183 /* Remember the remapped block. */
184 splay_tree_insert (id
->decl_map
,
185 (splay_tree_key
) old_block
,
186 (splay_tree_value
) new_block
);
188 /* If this is the end of a scope, set the SCOPE_STMT_BLOCK to be the
190 else if (SCOPE_END_P (scope_stmt
) && SCOPE_STMT_BLOCK (scope_stmt
))
194 /* Find this block in the table of remapped things. */
195 n
= splay_tree_lookup (id
->decl_map
,
196 (splay_tree_key
) SCOPE_STMT_BLOCK (scope_stmt
));
197 my_friendly_assert (n
!= NULL
, 19991203);
198 SCOPE_STMT_BLOCK (scope_stmt
) = (tree
) n
->value
;
202 /* Copy the SCOPE_STMT pointed to by TP. */
205 copy_scope_stmt (tp
, walk_subtrees
, id
)
212 /* Remember whether or not this statement was nullified. When
213 making a copy, copy_tree_r always sets SCOPE_NULLIFIED_P (and
214 doesn't copy the SCOPE_STMT_BLOCK) to free callers from having to
215 deal with copying BLOCKs if they do not wish to do so. */
216 block
= SCOPE_STMT_BLOCK (*tp
);
217 /* Copy (and replace) the statement. */
218 copy_tree_r (tp
, walk_subtrees
, NULL
);
219 /* Restore the SCOPE_STMT_BLOCK. */
220 SCOPE_STMT_BLOCK (*tp
) = block
;
222 /* Remap the associated block. */
223 remap_block (*tp
, NULL_TREE
, id
);
226 /* Called from copy_body via walk_tree. DATA is really an
230 copy_body_r (tp
, walk_subtrees
, data
)
239 id
= (inline_data
*) data
;
240 fn
= VARRAY_TOP_TREE (id
->fns
);
242 /* All automatic variables should have a DECL_CONTEXT indicating
243 what function they come from. */
244 if ((TREE_CODE (*tp
) == VAR_DECL
|| TREE_CODE (*tp
) == LABEL_DECL
)
245 && DECL_NAMESPACE_SCOPE_P (*tp
))
246 my_friendly_assert (DECL_EXTERNAL (*tp
) || TREE_STATIC (*tp
),
249 /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
250 GOTO_STMT with the RET_LABEL as its target. */
251 if (TREE_CODE (*tp
) == RETURN_STMT
)
253 tree return_stmt
= *tp
;
256 /* Build the GOTO_STMT. */
257 goto_stmt
= build_min_nt (GOTO_STMT
, id
->ret_label
);
258 TREE_CHAIN (goto_stmt
) = TREE_CHAIN (return_stmt
);
260 /* If we're returning something, just turn that into an
261 assignment into the equivalent of the original
263 if (RETURN_EXPR (return_stmt
))
265 *tp
= build_min_nt (EXPR_STMT
,
266 RETURN_EXPR (return_stmt
));
267 /* And then jump to the end of the function. */
268 TREE_CHAIN (*tp
) = goto_stmt
;
270 /* If we're not returning anything just do the jump. */
274 /* Local variables and labels need to be replaced by equivalent
275 variables. We don't want to copy static variables; there's only
276 one of those, no matter how many times we inline the containing
278 else if (nonstatic_local_decl_p (*tp
) && DECL_CONTEXT (*tp
) == fn
)
282 /* Remap the declaration. */
283 new_decl
= remap_decl (*tp
, id
);
284 my_friendly_assert (new_decl
!= NULL_TREE
, 19991203);
285 /* Replace this variable with the copy. */
288 else if (nonstatic_local_decl_p (*tp
)
289 && DECL_CONTEXT (*tp
) != VARRAY_TREE (id
->fns
, 0))
290 my_friendly_abort (0);
291 else if (TREE_CODE (*tp
) == SAVE_EXPR
)
292 remap_save_expr (tp
, id
->decl_map
, VARRAY_TREE (id
->fns
, 0),
294 else if (TREE_CODE (*tp
) == UNSAVE_EXPR
)
295 my_friendly_abort (19991113);
296 /* For a SCOPE_STMT, we must copy the associated block so that we
297 can write out debugging information for the inlined variables. */
298 else if (TREE_CODE (*tp
) == SCOPE_STMT
&& !id
->in_target_cleanup_p
)
299 copy_scope_stmt (tp
, walk_subtrees
, id
);
300 /* Otherwise, just copy the node. Note that copy_tree_r already
301 knows not to copy VAR_DECLs, etc., so this is safe. */
304 copy_tree_r (tp
, walk_subtrees
, NULL
);
306 /* The copied TARGET_EXPR has never been expanded, even if the
307 original node was expanded already. */
308 if (TREE_CODE (*tp
) == TARGET_EXPR
&& TREE_OPERAND (*tp
, 3))
310 TREE_OPERAND (*tp
, 1) = TREE_OPERAND (*tp
, 3);
311 TREE_OPERAND (*tp
, 3) = NULL_TREE
;
313 /* Similarly, if we're copying a CALL_EXPR, the RTL for the
314 result is no longer valid. */
315 else if (TREE_CODE (*tp
) == CALL_EXPR
)
316 CALL_EXPR_RTL (*tp
) = NULL_RTX
;
319 /* Keep iterating. */
323 /* Make a copy of the body of FN so that it can be inserted inline in
332 body
= DECL_SAVED_TREE (VARRAY_TOP_TREE (id
->fns
));
333 walk_tree (&body
, copy_body_r
, id
);
338 /* Generate code to initialize the parameters of the function at the
339 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
342 initialize_inlined_parameters (id
, args
, fn
)
352 /* Figure out what the parameters are. */
353 parms
= DECL_ARGUMENTS (fn
);
355 /* Start with no initializations whatsoever. */
356 init_stmts
= NULL_TREE
;
358 /* Loop through the parameter declarations, replacing each with an
359 equivalent VAR_DECL, appropriately initialized. */
360 for (p
= parms
, a
= args
; p
; a
= TREE_CHAIN (a
), p
= TREE_CHAIN (p
))
365 /* Make an equivalent VAR_DECL. */
366 var
= copy_decl_for_inlining (p
, fn
, VARRAY_TREE (id
->fns
, 0));
367 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
368 that way, when the PARM_DECL is encountered, it will be
369 automatically replaced by the VAR_DECL. */
370 splay_tree_insert (id
->decl_map
,
372 (splay_tree_value
) var
);
373 /* Initialize this VAR_DECL from the equivalent argument. If
374 the argument is an object, created via a constructor or copy,
375 this will not result in an extra copy: the TARGET_EXPR
376 representing the argument will be bound to VAR, and the
377 object will be constructed in VAR. */
378 init_stmt
= build_min_nt (EXPR_STMT
,
379 build (INIT_EXPR
, TREE_TYPE (p
),
380 var
, TREE_VALUE (a
)));
381 /* Declare this new variable. Note that we do this *after* the
382 initialization because we are going to reverse all the
383 initialization statements below. */
384 TREE_CHAIN (init_stmt
) = build_min_nt (DECL_STMT
, var
);
385 /* Add this initialization to the list. */
386 TREE_CHAIN (TREE_CHAIN (init_stmt
)) = init_stmts
;
387 init_stmts
= init_stmt
;
390 /* The initialization statements have been built up in reverse
391 order. Straighten them out now. */
392 return nreverse (init_stmts
);
395 /* Declare a return variable to replace the RESULT_DECL for the
396 function we are calling. An appropriate DECL_STMT is returned.
397 The USE_STMT is filled in to contain a use of the declaration to
398 indicate the return value of the function. */
401 declare_return_variable (id
, use_stmt
)
402 struct inline_data
*id
;
405 tree fn
= VARRAY_TOP_TREE (id
->fns
);
406 tree result
= DECL_RESULT (fn
);
409 /* We don't need to do anything for functions that don't return
411 if (!result
|| same_type_p (TYPE_MAIN_VARIANT (TREE_TYPE (result
)),
414 *use_stmt
= NULL_TREE
;
418 /* Make an appropriate copy. */
419 var
= copy_decl_for_inlining (result
, fn
, VARRAY_TREE (id
->fns
, 0));
420 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
421 way, when the RESULT_DECL is encountered, it will be
422 automatically replaced by the VAR_DECL. */
423 splay_tree_insert (id
->decl_map
,
424 (splay_tree_key
) result
,
425 (splay_tree_value
) var
);
427 /* Build the USE_STMT. */
428 *use_stmt
= build_min_nt (EXPR_STMT
, var
);
430 /* Build the declaration statement. */
431 return build_min_nt (DECL_STMT
, var
);
434 /* Returns non-zero if FN is a function that can be inlined. */
437 inlinable_function_p (fn
, id
)
443 /* If we've already decided this function shouldn't be inlined,
444 there's no need to check again. */
445 if (DECL_UNINLINABLE (fn
))
448 /* Assume it is not inlinable. */
451 /* If we're not inlining things, then nothing is inlinable. */
452 if (!flag_inline_trees
)
454 /* If the function was not declared `inline', then we don't inline
456 else if (!DECL_INLINE (fn
))
458 /* If we don't have the function body available, we can't inline
460 else if (!DECL_SAVED_TREE (fn
))
462 /* We can't inline varargs functions. */
463 else if (varargs_function_p (fn
))
465 /* All is well. We can inline this function. Traditionally, GCC
466 has refused to inline functions using setjmp or alloca, or
467 functions whose values are returned in a PARALLEL, and a few
468 other such obscure conditions. We are not equally constrained at
473 /* Squirrel away the result so that we don't have to check again. */
474 DECL_UNINLINABLE (fn
) = !inlinable
;
476 /* Don't do recursive inlining, either. We don't record this in
477 DECL_UNLINABLE; we may be able to inline this function later. */
482 for (i
= 0; i
< id
->fns
->elements_used
; ++i
)
483 if (VARRAY_TREE (id
->fns
, i
) == fn
)
487 /* We can inline a template instantiation only if it's fully
490 && DECL_TEMPLATE_INFO (fn
)
491 && TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn
)))
493 fn
= instantiate_decl (fn
);
494 inlinable
= !TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn
));
497 /* Return the result. */
501 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
504 expand_call_inline (tp
, walk_subtrees
, data
)
519 /* See what we've got. */
520 id
= (inline_data
*) data
;
523 /* Recurse, but letting recursive invocations know that we are
524 inside the body of a TARGET_EXPR. */
525 if (TREE_CODE (*tp
) == TARGET_EXPR
)
527 int i
, len
= first_rtl_op (TARGET_EXPR
);
529 /* We're walking our own subtrees. */
532 /* Actually walk over them. This loop is the body of
533 walk_trees, omitting the case where the TARGET_EXPR
534 itself is handled. */
535 for (i
= 0; i
< len
; ++i
)
538 ++id
->in_target_cleanup_p
;
539 walk_tree (&TREE_OPERAND (*tp
, i
), expand_call_inline
, data
);
541 --id
->in_target_cleanup_p
;
547 /* From here on, we're only interested in CALL_EXPRs. */
548 if (TREE_CODE (t
) != CALL_EXPR
)
551 /* First, see if we can figure out what function is being called.
552 If we cannot, then there is no hope of inlining the function. */
553 fn
= get_callee_fndecl (t
);
557 /* Don't try to inline functions that are not well-suited to
559 if (!inlinable_function_p (fn
, id
))
562 /* Build a statement-expression containing code to initialize the
563 arguments, the actual inline expansion of the body, and a label
564 for the return statements within the function to jump to. The
565 type of the statement expression is the return type of the
567 expr
= build_min (STMT_EXPR
, TREE_TYPE (TREE_TYPE (fn
)), NULL_TREE
);
569 /* Local declarations will be replaced by their equivalents in this
572 id
->decl_map
= splay_tree_new (splay_tree_compare_pointers
,
575 /* Initialize the parameters. */
576 arg_inits
= initialize_inlined_parameters (id
, TREE_OPERAND (t
, 1), fn
);
577 /* Expand any inlined calls in the initializers. Do this before we
578 push FN on the stack of functions we are inlining; we want to
579 inline calls to FN that appear in the initializers for the
581 expand_calls_inline (&arg_inits
, id
);
582 /* And add them to the tree. */
583 STMT_EXPR_STMT (expr
) = chainon (STMT_EXPR_STMT (expr
), arg_inits
);
585 /* Record the function we are about to inline so that we can avoid
586 recursing into it. */
587 VARRAY_PUSH_TREE (id
->fns
, fn
);
589 /* Return statements in the function body will be replaced by jumps
591 id
->ret_label
= build_decl (LABEL_DECL
, NULL_TREE
, NULL_TREE
);
592 DECL_CONTEXT (id
->ret_label
) = VARRAY_TREE (id
->fns
, 0);
594 /* Create a block to put the parameters in. We have to do this
595 after the parameters have been remapped because remapping
596 parameters is different from remapping ordinary variables. */
597 scope_stmt
= build_min_nt (SCOPE_STMT
, DECL_INITIAL (fn
));
598 SCOPE_BEGIN_P (scope_stmt
) = 1;
599 SCOPE_NO_CLEANUPS_P (scope_stmt
) = 1;
600 remap_block (scope_stmt
, DECL_ARGUMENTS (fn
), id
);
601 TREE_CHAIN (scope_stmt
) = STMT_EXPR_STMT (expr
);
602 STMT_EXPR_STMT (expr
) = scope_stmt
;
604 /* Tell the debugging backends that this block represents the
605 outermost scope of the inlined function. */
606 if (SCOPE_STMT_BLOCK (scope_stmt
))
607 BLOCK_ABSTRACT_ORIGIN (SCOPE_STMT_BLOCK (scope_stmt
)) = DECL_ORIGIN (fn
);
609 /* Declare the return variable for the function. */
610 STMT_EXPR_STMT (expr
)
611 = chainon (STMT_EXPR_STMT (expr
),
612 declare_return_variable (id
, &use_stmt
));
614 /* After we've initialized the parameters, we insert the body of the
616 STMT_EXPR_STMT (expr
)
617 = chainon (STMT_EXPR_STMT (expr
), copy_body (id
));
619 /* Close the block for the parameters. */
620 scope_stmt
= build_min_nt (SCOPE_STMT
, DECL_INITIAL (fn
));
621 SCOPE_NO_CLEANUPS_P (scope_stmt
) = 1;
622 my_friendly_assert (DECL_INITIAL (fn
)
623 && TREE_CODE (DECL_INITIAL (fn
)) == BLOCK
,
625 remap_block (scope_stmt
, NULL_TREE
, id
);
626 STMT_EXPR_STMT (expr
)
627 = chainon (STMT_EXPR_STMT (expr
), scope_stmt
);
629 /* Finally, mention the returned value so that the value of the
630 statement-expression is the returned value of the function. */
631 STMT_EXPR_STMT (expr
) = chainon (STMT_EXPR_STMT (expr
), use_stmt
);
634 splay_tree_delete (id
->decl_map
);
637 /* After the body of the function comes the RET_LABEL. */
638 STMT_EXPR_STMT (expr
)
639 = chainon (STMT_EXPR_STMT (expr
),
640 build_min_nt (LABEL_STMT
, id
->ret_label
));
642 /* The new expression has side-effects if the old one did. */
643 TREE_SIDE_EFFECTS (expr
) = TREE_SIDE_EFFECTS (t
);
645 /* Replace the call by the inlined body. Wrap it in an
646 EXPR_WITH_FILE_LOCATION so that we'll get debugging line notes
647 pointing to the right place. */
648 chain
= TREE_CHAIN (*tp
);
649 *tp
= build_expr_wfl (expr
, DECL_SOURCE_FILE (fn
), DECL_SOURCE_LINE (fn
),
651 EXPR_WFL_EMIT_LINE_NOTE (*tp
) = 1;
652 TREE_CHAIN (*tp
) = chain
;
654 /* If the value of the new expression is ignored, that's OK. We
655 don't warn about this for CALL_EXPRs, so we shouldn't warn about
656 the equivalent inlined version either. */
659 /* Recurse into the body of the just inlined function. */
660 expand_calls_inline (tp
, id
);
661 VARRAY_POP (id
->fns
);
663 /* Don't walk into subtrees. We've already handled them above. */
666 /* Keep iterating. */
670 /* Walk over the entire tree *TP, replacing CALL_EXPRs with inline
671 expansions as appropriate. */
674 expand_calls_inline (tp
, id
)
678 /* Search through *TP, replacing all calls to inline functions by
679 appropriate equivalents. */
680 walk_tree (tp
, expand_call_inline
, id
);
683 /* Optimize the body of FN. */
686 optimize_function (fn
)
689 /* Expand calls to inline functions. */
690 if (flag_inline_trees
)
694 struct saved_scope
*s
;
697 memset (&id
, 0, sizeof (id
));
699 /* Don't allow recursion into FN. */
700 VARRAY_TREE_INIT (id
.fns
, 32, "fns");
701 VARRAY_PUSH_TREE (id
.fns
, fn
);
702 /* Or any functions that aren't finished yet. */
704 if (current_function_decl
)
706 VARRAY_PUSH_TREE (id
.fns
, current_function_decl
);
707 prev_fn
= current_function_decl
;
709 for (s
= scope_chain
; s
; s
= s
->prev
)
710 if (s
->function_decl
&& s
->function_decl
!= prev_fn
)
712 VARRAY_PUSH_TREE (id
.fns
, s
->function_decl
);
713 prev_fn
= s
->function_decl
;
716 /* Replace all calls to inline functions with the bodies of those
718 expand_calls_inline (&DECL_SAVED_TREE (fn
), &id
);
721 VARRAY_FREE (id
.fns
);
725 /* Called from calls_setjmp_p via walk_tree. */
728 calls_setjmp_r (tp
, walk_subtrees
, data
)
730 int *walk_subtrees ATTRIBUTE_UNUSED
;
731 void *data ATTRIBUTE_UNUSED
;
738 /* We're only interested in FUNCTION_DECLS. */
739 if (TREE_CODE (*tp
) != FUNCTION_DECL
)
742 special_function_p (*tp
, &setjmp_p
, &longjmp_p
, &malloc_p
, &alloca_p
);
744 return setjmp_p
? *tp
: NULL_TREE
;
747 /* Returns non-zero if FN calls `setjmp' or some other function that
748 can return more than once. This function is conservative; it may
749 occasionally return a non-zero value even when FN does not actually
756 return (walk_tree (&DECL_SAVED_TREE (fn
), calls_setjmp_r
, NULL
)