]> gcc.gnu.org Git - gcc.git/blob - gcc/except.c
tree.h (INT_CST_LT, [...]): Remove unneeded casts.
[gcc.git] / gcc / except.c
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
5
6 This file is part of GNU CC.
7
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
22
23
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
29
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
39
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
46
47 There are two major codegen options for exception handling. The
48 flag -fsjlj-exceptions can be used to select the setjmp/longjmp
49 approach, which is the default. -fno-sjlj-exceptions can be used to
50 get the PC range table approach. While this is a compile time
51 flag, an entire application must be compiled with the same codegen
52 option. The first is a PC range table approach, the second is a
53 setjmp/longjmp based scheme. We will first discuss the PC range
54 table approach, after that, we will discuss the setjmp/longjmp
55 based approach.
56
57 It is appropriate to speak of the "context of a throw". This
58 context refers to the address where the exception is thrown from,
59 and is used to determine which exception region will handle the
60 exception.
61
62 Regions of code within a function can be marked such that if it
63 contains the context of a throw, control will be passed to a
64 designated "exception handler". These areas are known as "exception
65 regions". Exception regions cannot overlap, but they can be nested
66 to any arbitrary depth. Also, exception regions cannot cross
67 function boundaries.
68
69 Exception handlers can either be specified by the user (which we
70 will call a "user-defined handler") or generated by the compiler
71 (which we will designate as a "cleanup"). Cleanups are used to
72 perform tasks such as destruction of objects allocated on the
73 stack.
74
75 In the current implementation, cleanups are handled by allocating an
76 exception region for the area that the cleanup is designated for,
77 and the handler for the region performs the cleanup and then
78 rethrows the exception to the outer exception region. From the
79 standpoint of the current implementation, there is little
80 distinction made between a cleanup and a user-defined handler, and
81 the phrase "exception handler" can be used to refer to either one
82 equally well. (The section "Future Directions" below discusses how
83 this will change).
84
85 Each object file that is compiled with exception handling contains
86 a static array of exception handlers named __EXCEPTION_TABLE__.
87 Each entry contains the starting and ending addresses of the
88 exception region, and the address of the handler designated for
89 that region.
90
91 If the target does not use the DWARF 2 frame unwind information, at
92 program startup each object file invokes a function named
93 __register_exceptions with the address of its local
94 __EXCEPTION_TABLE__. __register_exceptions is defined in libgcc2.c, and
95 is responsible for recording all of the exception regions into one list
96 (which is kept in a static variable named exception_table_list).
97
98 On targets that support crtstuff.c, the unwind information
99 is stored in a section named .eh_frame and the information for the
100 entire shared object or program is registered with a call to
101 __register_frame_info. On other targets, the information for each
102 translation unit is registered from the file generated by collect2.
103 __register_frame_info is defined in frame.c, and is responsible for
104 recording all of the unwind regions into one list (which is kept in a
105 static variable named unwind_table_list).
106
107 The function __throw is actually responsible for doing the
108 throw. On machines that have unwind info support, __throw is generated
109 by code in libgcc2.c, otherwise __throw is generated on a
110 per-object-file basis for each source file compiled with
111 -fexceptions by the C++ frontend. Before __throw is invoked,
112 the current context of the throw needs to be placed in the global
113 variable __eh_pc.
114
115 __throw attempts to find the appropriate exception handler for the
116 PC value stored in __eh_pc by calling __find_first_exception_table_match
117 (which is defined in libgcc2.c). If __find_first_exception_table_match
118 finds a relevant handler, __throw transfers control directly to it.
119
120 If a handler for the context being thrown from can't be found, __throw
121 walks (see Walking the stack below) the stack up the dynamic call chain to
122 continue searching for an appropriate exception handler based upon the
123 caller of the function it last sought a exception handler for. It stops
124 then either an exception handler is found, or when the top of the
125 call chain is reached.
126
127 If no handler is found, an external library function named
128 __terminate is called. If a handler is found, then we restart
129 our search for a handler at the end of the call chain, and repeat
130 the search process, but instead of just walking up the call chain,
131 we unwind the call chain as we walk up it.
132
133 Internal implementation details:
134
135 To associate a user-defined handler with a block of statements, the
136 function expand_start_try_stmts is used to mark the start of the
137 block of statements with which the handler is to be associated
138 (which is known as a "try block"). All statements that appear
139 afterwards will be associated with the try block.
140
141 A call to expand_start_all_catch marks the end of the try block,
142 and also marks the start of the "catch block" (the user-defined
143 handler) associated with the try block.
144
145 This user-defined handler will be invoked for *every* exception
146 thrown with the context of the try block. It is up to the handler
147 to decide whether or not it wishes to handle any given exception,
148 as there is currently no mechanism in this implementation for doing
149 this. (There are plans for conditionally processing an exception
150 based on its "type", which will provide a language-independent
151 mechanism).
152
153 If the handler chooses not to process the exception (perhaps by
154 looking at an "exception type" or some other additional data
155 supplied with the exception), it can fall through to the end of the
156 handler. expand_end_all_catch and expand_leftover_cleanups
157 add additional code to the end of each handler to take care of
158 rethrowing to the outer exception handler.
159
160 The handler also has the option to continue with "normal flow of
161 code", or in other words to resume executing at the statement
162 immediately after the end of the exception region. The variable
163 caught_return_label_stack contains a stack of labels, and jumping
164 to the topmost entry's label via expand_goto will resume normal
165 flow to the statement immediately after the end of the exception
166 region. If the handler falls through to the end, the exception will
167 be rethrown to the outer exception region.
168
169 The instructions for the catch block are kept as a separate
170 sequence, and will be emitted at the end of the function along with
171 the handlers specified via expand_eh_region_end. The end of the
172 catch block is marked with expand_end_all_catch.
173
174 Any data associated with the exception must currently be handled by
175 some external mechanism maintained in the frontend. For example,
176 the C++ exception mechanism passes an arbitrary value along with
177 the exception, and this is handled in the C++ frontend by using a
178 global variable to hold the value. (This will be changing in the
179 future.)
180
181 The mechanism in C++ for handling data associated with the
182 exception is clearly not thread-safe. For a thread-based
183 environment, another mechanism must be used (possibly using a
184 per-thread allocation mechanism if the size of the area that needs
185 to be allocated isn't known at compile time.)
186
187 Internally-generated exception regions (cleanups) are marked by
188 calling expand_eh_region_start to mark the start of the region,
189 and expand_eh_region_end (handler) is used to both designate the
190 end of the region and to associate a specified handler/cleanup with
191 the region. The rtl code in HANDLER will be invoked whenever an
192 exception occurs in the region between the calls to
193 expand_eh_region_start and expand_eh_region_end. After HANDLER is
194 executed, additional code is emitted to handle rethrowing the
195 exception to the outer exception handler. The code for HANDLER will
196 be emitted at the end of the function.
197
198 TARGET_EXPRs can also be used to designate exception regions. A
199 TARGET_EXPR gives an unwind-protect style interface commonly used
200 in functional languages such as LISP. The associated expression is
201 evaluated, and whether or not it (or any of the functions that it
202 calls) throws an exception, the protect expression is always
203 invoked. This implementation takes care of the details of
204 associating an exception table entry with the expression and
205 generating the necessary code (it actually emits the protect
206 expression twice, once for normal flow and once for the exception
207 case). As for the other handlers, the code for the exception case
208 will be emitted at the end of the function.
209
210 Cleanups can also be specified by using add_partial_entry (handler)
211 and end_protect_partials. add_partial_entry creates the start of
212 a new exception region; HANDLER will be invoked if an exception is
213 thrown with the context of the region between the calls to
214 add_partial_entry and end_protect_partials. end_protect_partials is
215 used to mark the end of these regions. add_partial_entry can be
216 called as many times as needed before calling end_protect_partials.
217 However, end_protect_partials should only be invoked once for each
218 group of calls to add_partial_entry as the entries are queued
219 and all of the outstanding entries are processed simultaneously
220 when end_protect_partials is invoked. Similarly to the other
221 handlers, the code for HANDLER will be emitted at the end of the
222 function.
223
224 The generated RTL for an exception region includes
225 NOTE_INSN_EH_REGION_BEG and NOTE_INSN_EH_REGION_END notes that mark
226 the start and end of the exception region. A unique label is also
227 generated at the start of the exception region, which is available
228 by looking at the ehstack variable. The topmost entry corresponds
229 to the current region.
230
231 In the current implementation, an exception can only be thrown from
232 a function call (since the mechanism used to actually throw an
233 exception involves calling __throw). If an exception region is
234 created but no function calls occur within that region, the region
235 can be safely optimized away (along with its exception handlers)
236 since no exceptions can ever be caught in that region. This
237 optimization is performed unless -fasynchronous-exceptions is
238 given. If the user wishes to throw from a signal handler, or other
239 asynchronous place, -fasynchronous-exceptions should be used when
240 compiling for maximally correct code, at the cost of additional
241 exception regions. Using -fasynchronous-exceptions only produces
242 code that is reasonably safe in such situations, but a correct
243 program cannot rely upon this working. It can be used in failsafe
244 code, where trying to continue on, and proceeding with potentially
245 incorrect results is better than halting the program.
246
247
248 Walking the stack:
249
250 The stack is walked by starting with a pointer to the current
251 frame, and finding the pointer to the callers frame. The unwind info
252 tells __throw how to find it.
253
254 Unwinding the stack:
255
256 When we use the term unwinding the stack, we mean undoing the
257 effects of the function prologue in a controlled fashion so that we
258 still have the flow of control. Otherwise, we could just return
259 (jump to the normal end of function epilogue).
260
261 This is done in __throw in libgcc2.c when we know that a handler exists
262 in a frame higher up the call stack than its immediate caller.
263
264 To unwind, we find the unwind data associated with the frame, if any.
265 If we don't find any, we call the library routine __terminate. If we do
266 find it, we use the information to copy the saved register values from
267 that frame into the register save area in the frame for __throw, return
268 into a stub which updates the stack pointer, and jump to the handler.
269 The normal function epilogue for __throw handles restoring the saved
270 values into registers.
271
272 When unwinding, we use this method if we know it will
273 work (if DWARF2_UNWIND_INFO is defined). Otherwise, we know that
274 an inline unwinder will have been emitted for any function that
275 __unwind_function cannot unwind. The inline unwinder appears as a
276 normal exception handler for the entire function, for any function
277 that we know cannot be unwound by __unwind_function. We inform the
278 compiler of whether a function can be unwound with
279 __unwind_function by having DOESNT_NEED_UNWINDER evaluate to true
280 when the unwinder isn't needed. __unwind_function is used as an
281 action of last resort. If no other method can be used for
282 unwinding, __unwind_function is used. If it cannot unwind, it
283 should call __terminate.
284
285 By default, if the target-specific backend doesn't supply a definition
286 for __unwind_function and doesn't support DWARF2_UNWIND_INFO, inlined
287 unwinders will be used instead. The main tradeoff here is in text space
288 utilization. Obviously, if inline unwinders have to be generated
289 repeatedly, this uses much more space than if a single routine is used.
290
291 However, it is simply not possible on some platforms to write a
292 generalized routine for doing stack unwinding without having some
293 form of additional data associated with each function. The current
294 implementation can encode this data in the form of additional
295 machine instructions or as static data in tabular form. The later
296 is called the unwind data.
297
298 The backend macro DOESNT_NEED_UNWINDER is used to conditionalize whether
299 or not per-function unwinders are needed. If DOESNT_NEED_UNWINDER is
300 defined and has a non-zero value, a per-function unwinder is not emitted
301 for the current function. If the static unwind data is supported, then
302 a per-function unwinder is not emitted.
303
304 On some platforms it is possible that neither __unwind_function
305 nor inlined unwinders are available. For these platforms it is not
306 possible to throw through a function call, and abort will be
307 invoked instead of performing the throw.
308
309 The reason the unwind data may be needed is that on some platforms
310 the order and types of data stored on the stack can vary depending
311 on the type of function, its arguments and returned values, and the
312 compilation options used (optimization versus non-optimization,
313 -fomit-frame-pointer, processor variations, etc).
314
315 Unfortunately, this also means that throwing through functions that
316 aren't compiled with exception handling support will still not be
317 possible on some platforms. This problem is currently being
318 investigated, but no solutions have been found that do not imply
319 some unacceptable performance penalties.
320
321 Future directions:
322
323 Currently __throw makes no differentiation between cleanups and
324 user-defined exception regions. While this makes the implementation
325 simple, it also implies that it is impossible to determine if a
326 user-defined exception handler exists for a given exception without
327 completely unwinding the stack in the process. This is undesirable
328 from the standpoint of debugging, as ideally it would be possible
329 to trap unhandled exceptions in the debugger before the process of
330 unwinding has even started.
331
332 This problem can be solved by marking user-defined handlers in a
333 special way (probably by adding additional bits to exception_table_list).
334 A two-pass scheme could then be used by __throw to iterate
335 through the table. The first pass would search for a relevant
336 user-defined handler for the current context of the throw, and if
337 one is found, the second pass would then invoke all needed cleanups
338 before jumping to the user-defined handler.
339
340 Many languages (including C++ and Ada) make execution of a
341 user-defined handler conditional on the "type" of the exception
342 thrown. (The type of the exception is actually the type of the data
343 that is thrown with the exception.) It will thus be necessary for
344 __throw to be able to determine if a given user-defined
345 exception handler will actually be executed, given the type of
346 exception.
347
348 One scheme is to add additional information to exception_table_list
349 as to the types of exceptions accepted by each handler. __throw
350 can do the type comparisons and then determine if the handler is
351 actually going to be executed.
352
353 There is currently no significant level of debugging support
354 available, other than to place a breakpoint on __throw. While
355 this is sufficient in most cases, it would be helpful to be able to
356 know where a given exception was going to be thrown to before it is
357 actually thrown, and to be able to choose between stopping before
358 every exception region (including cleanups), or just user-defined
359 exception regions. This should be possible to do in the two-pass
360 scheme by adding additional labels to __throw for appropriate
361 breakpoints, and additional debugger commands could be added to
362 query various state variables to determine what actions are to be
363 performed next.
364
365 Another major problem that is being worked on is the issue with stack
366 unwinding on various platforms. Currently the only platforms that have
367 support for the generation of a generic unwinder are the SPARC and MIPS.
368 All other ports require per-function unwinders, which produce large
369 amounts of code bloat.
370
371 For setjmp/longjmp based exception handling, some of the details
372 are as above, but there are some additional details. This section
373 discusses the details.
374
375 We don't use NOTE_INSN_EH_REGION_{BEG,END} pairs. We don't
376 optimize EH regions yet. We don't have to worry about machine
377 specific issues with unwinding the stack, as we rely upon longjmp
378 for all the machine specific details. There is no variable context
379 of a throw, just the one implied by the dynamic handler stack
380 pointed to by the dynamic handler chain. There is no exception
381 table, and no calls to __register_exceptions. __sjthrow is used
382 instead of __throw, and it works by using the dynamic handler
383 chain, and longjmp. -fasynchronous-exceptions has no effect, as
384 the elimination of trivial exception regions is not yet performed.
385
386 A frontend can set protect_cleanup_actions_with_terminate when all
387 the cleanup actions should be protected with an EH region that
388 calls terminate when an unhandled exception is throw. C++ does
389 this, Ada does not. */
390
391
392 #include "config.h"
393 #include "defaults.h"
394 #include "eh-common.h"
395 #include "system.h"
396 #include "rtl.h"
397 #include "tree.h"
398 #include "flags.h"
399 #include "except.h"
400 #include "function.h"
401 #include "insn-flags.h"
402 #include "expr.h"
403 #include "insn-codes.h"
404 #include "regs.h"
405 #include "hard-reg-set.h"
406 #include "insn-config.h"
407 #include "recog.h"
408 #include "output.h"
409 #include "toplev.h"
410 #include "intl.h"
411 #include "obstack.h"
412 #include "ggc.h"
413 #include "tm_p.h"
414
415 /* One to use setjmp/longjmp method of generating code for exception
416 handling. */
417
418 int exceptions_via_longjmp = 2;
419
420 /* One to enable asynchronous exception support. */
421
422 int asynchronous_exceptions = 0;
423
424 /* One to protect cleanup actions with a handler that calls
425 __terminate, zero otherwise. */
426
427 int protect_cleanup_actions_with_terminate;
428
429 /* A list of labels used for exception handlers. Created by
430 find_exception_handler_labels for the optimization passes. */
431
432 rtx exception_handler_labels;
433
434 /* Keeps track of the label used as the context of a throw to rethrow an
435 exception to the outer exception region. */
436
437 struct label_node *outer_context_label_stack = NULL;
438
439 /* Pseudos used to hold exception return data in the interim between
440 __builtin_eh_return and the end of the function. */
441
442 static rtx eh_return_context;
443 static rtx eh_return_stack_adjust;
444 static rtx eh_return_handler;
445
446 /* This is used for targets which can call rethrow with an offset instead
447 of an address. This is subtracted from the rethrow label we are
448 interested in. */
449
450 static rtx first_rethrow_symbol = NULL_RTX;
451 static rtx final_rethrow = NULL_RTX;
452 static rtx last_rethrow_symbol = NULL_RTX;
453
454
455 /* Prototypes for local functions. */
456
457 static void push_eh_entry PARAMS ((struct eh_stack *));
458 static struct eh_entry * pop_eh_entry PARAMS ((struct eh_stack *));
459 static void enqueue_eh_entry PARAMS ((struct eh_queue *, struct eh_entry *));
460 static struct eh_entry * dequeue_eh_entry PARAMS ((struct eh_queue *));
461 static rtx call_get_eh_context PARAMS ((void));
462 static void start_dynamic_cleanup PARAMS ((tree, tree));
463 static void start_dynamic_handler PARAMS ((void));
464 static void expand_rethrow PARAMS ((rtx));
465 static void output_exception_table_entry PARAMS ((FILE *, int));
466 static int can_throw PARAMS ((rtx));
467 static rtx scan_region PARAMS ((rtx, int, int *));
468 static void eh_regs PARAMS ((rtx *, rtx *, rtx *, int));
469 static void set_insn_eh_region PARAMS ((rtx *, int));
470 #ifdef DONT_USE_BUILTIN_SETJMP
471 static void jumpif_rtx PARAMS ((rtx, rtx));
472 #endif
473 static void mark_eh_node PARAMS ((struct eh_node *));
474 static void mark_eh_stack PARAMS ((struct eh_stack *));
475 static void mark_eh_queue PARAMS ((struct eh_queue *));
476 static void mark_tree_label_node PARAMS ((struct label_node *));
477 static void mark_func_eh_entry PARAMS ((void *));
478 static rtx create_rethrow_ref PARAMS ((int));
479 static void push_entry PARAMS ((struct eh_stack *, struct eh_entry*));
480 static void receive_exception_label PARAMS ((rtx));
481 static int new_eh_region_entry PARAMS ((int, rtx));
482 static int find_func_region PARAMS ((int));
483 static int find_func_region_from_symbol PARAMS ((rtx));
484 static void clear_function_eh_region PARAMS ((void));
485 static void process_nestinfo PARAMS ((int, eh_nesting_info *, int *));
486
487 rtx expand_builtin_return_addr PARAMS ((enum built_in_function, int, rtx));
488 static void emit_cleanup_handler PARAMS ((struct eh_entry *));
489 static int eh_region_from_symbol PARAMS ((rtx));
490
491 \f
492 /* Various support routines to manipulate the various data structures
493 used by the exception handling code. */
494
495 extern struct obstack permanent_obstack;
496
497 /* Generate a SYMBOL_REF for rethrow to use */
498 static rtx
499 create_rethrow_ref (region_num)
500 int region_num;
501 {
502 rtx def;
503 char *ptr;
504 char buf[60];
505
506 push_obstacks_nochange ();
507 end_temporary_allocation ();
508
509 ASM_GENERATE_INTERNAL_LABEL (buf, "LRTH", region_num);
510 ptr = ggc_alloc_string (buf, -1);
511 def = gen_rtx_SYMBOL_REF (Pmode, ptr);
512 SYMBOL_REF_NEED_ADJUST (def) = 1;
513
514 pop_obstacks ();
515 return def;
516 }
517
518 /* Push a label entry onto the given STACK. */
519
520 void
521 push_label_entry (stack, rlabel, tlabel)
522 struct label_node **stack;
523 rtx rlabel;
524 tree tlabel;
525 {
526 struct label_node *newnode
527 = (struct label_node *) xmalloc (sizeof (struct label_node));
528
529 if (rlabel)
530 newnode->u.rlabel = rlabel;
531 else
532 newnode->u.tlabel = tlabel;
533 newnode->chain = *stack;
534 *stack = newnode;
535 }
536
537 /* Pop a label entry from the given STACK. */
538
539 rtx
540 pop_label_entry (stack)
541 struct label_node **stack;
542 {
543 rtx label;
544 struct label_node *tempnode;
545
546 if (! *stack)
547 return NULL_RTX;
548
549 tempnode = *stack;
550 label = tempnode->u.rlabel;
551 *stack = (*stack)->chain;
552 free (tempnode);
553
554 return label;
555 }
556
557 /* Return the top element of the given STACK. */
558
559 tree
560 top_label_entry (stack)
561 struct label_node **stack;
562 {
563 if (! *stack)
564 return NULL_TREE;
565
566 return (*stack)->u.tlabel;
567 }
568
569 /* get an exception label. These must be on the permanent obstack */
570
571 rtx
572 gen_exception_label ()
573 {
574 rtx lab;
575 lab = gen_label_rtx ();
576 return lab;
577 }
578
579 /* Push a new eh_node entry onto STACK. */
580
581 static void
582 push_eh_entry (stack)
583 struct eh_stack *stack;
584 {
585 struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node));
586 struct eh_entry *entry = (struct eh_entry *) xmalloc (sizeof (struct eh_entry));
587
588 rtx rlab = gen_exception_label ();
589 entry->finalization = NULL_TREE;
590 entry->label_used = 0;
591 entry->exception_handler_label = rlab;
592 entry->false_label = NULL_RTX;
593 if (! flag_new_exceptions)
594 entry->outer_context = gen_label_rtx ();
595 else
596 entry->outer_context = create_rethrow_ref (CODE_LABEL_NUMBER (rlab));
597 entry->rethrow_label = entry->outer_context;
598 entry->goto_entry_p = 0;
599
600 node->entry = entry;
601 node->chain = stack->top;
602 stack->top = node;
603 }
604
605 /* push an existing entry onto a stack. */
606 static void
607 push_entry (stack, entry)
608 struct eh_stack *stack;
609 struct eh_entry *entry;
610 {
611 struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node));
612 node->entry = entry;
613 node->chain = stack->top;
614 stack->top = node;
615 }
616
617 /* Pop an entry from the given STACK. */
618
619 static struct eh_entry *
620 pop_eh_entry (stack)
621 struct eh_stack *stack;
622 {
623 struct eh_node *tempnode;
624 struct eh_entry *tempentry;
625
626 tempnode = stack->top;
627 tempentry = tempnode->entry;
628 stack->top = stack->top->chain;
629 free (tempnode);
630
631 return tempentry;
632 }
633
634 /* Enqueue an ENTRY onto the given QUEUE. */
635
636 static void
637 enqueue_eh_entry (queue, entry)
638 struct eh_queue *queue;
639 struct eh_entry *entry;
640 {
641 struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node));
642
643 node->entry = entry;
644 node->chain = NULL;
645
646 if (queue->head == NULL)
647 queue->head = node;
648 else
649 queue->tail->chain = node;
650 queue->tail = node;
651 }
652
653 /* Dequeue an entry from the given QUEUE. */
654
655 static struct eh_entry *
656 dequeue_eh_entry (queue)
657 struct eh_queue *queue;
658 {
659 struct eh_node *tempnode;
660 struct eh_entry *tempentry;
661
662 if (queue->head == NULL)
663 return NULL;
664
665 tempnode = queue->head;
666 queue->head = queue->head->chain;
667
668 tempentry = tempnode->entry;
669 free (tempnode);
670
671 return tempentry;
672 }
673
674 static void
675 receive_exception_label (handler_label)
676 rtx handler_label;
677 {
678 emit_label (handler_label);
679
680 #ifdef HAVE_exception_receiver
681 if (! exceptions_via_longjmp)
682 if (HAVE_exception_receiver)
683 emit_insn (gen_exception_receiver ());
684 #endif
685
686 #ifdef HAVE_nonlocal_goto_receiver
687 if (! exceptions_via_longjmp)
688 if (HAVE_nonlocal_goto_receiver)
689 emit_insn (gen_nonlocal_goto_receiver ());
690 #endif
691 }
692
693
694 struct func_eh_entry
695 {
696 int range_number; /* EH region number from EH NOTE insn's. */
697 rtx rethrow_label; /* Label for rethrow. */
698 int rethrow_ref; /* Is rethrow referenced? */
699 struct handler_info *handlers;
700 };
701
702
703 /* table of function eh regions */
704 static struct func_eh_entry *function_eh_regions = NULL;
705 static int num_func_eh_entries = 0;
706 static int current_func_eh_entry = 0;
707
708 #define SIZE_FUNC_EH(X) (sizeof (struct func_eh_entry) * X)
709
710 /* Add a new eh_entry for this function. The number returned is an
711 number which uniquely identifies this exception range. */
712
713 static int
714 new_eh_region_entry (note_eh_region, rethrow)
715 int note_eh_region;
716 rtx rethrow;
717 {
718 if (current_func_eh_entry == num_func_eh_entries)
719 {
720 if (num_func_eh_entries == 0)
721 {
722 function_eh_regions =
723 (struct func_eh_entry *) xmalloc (SIZE_FUNC_EH (50));
724 num_func_eh_entries = 50;
725 }
726 else
727 {
728 num_func_eh_entries = num_func_eh_entries * 3 / 2;
729 function_eh_regions = (struct func_eh_entry *)
730 xrealloc (function_eh_regions, SIZE_FUNC_EH (num_func_eh_entries));
731 }
732 }
733 function_eh_regions[current_func_eh_entry].range_number = note_eh_region;
734 if (rethrow == NULL_RTX)
735 function_eh_regions[current_func_eh_entry].rethrow_label =
736 create_rethrow_ref (note_eh_region);
737 else
738 function_eh_regions[current_func_eh_entry].rethrow_label = rethrow;
739 function_eh_regions[current_func_eh_entry].handlers = NULL;
740
741 return current_func_eh_entry++;
742 }
743
744 /* Add new handler information to an exception range. The first parameter
745 specifies the range number (returned from new_eh_entry()). The second
746 parameter specifies the handler. By default the handler is inserted at
747 the end of the list. A handler list may contain only ONE NULL_TREE
748 typeinfo entry. Regardless where it is positioned, a NULL_TREE entry
749 is always output as the LAST handler in the exception table for a region. */
750
751 void
752 add_new_handler (region, newhandler)
753 int region;
754 struct handler_info *newhandler;
755 {
756 struct handler_info *last;
757
758 /* If find_func_region returns -1, callers might attempt to pass us
759 this region number. If that happens, something has gone wrong;
760 -1 is never a valid region. */
761 if (region == -1)
762 abort ();
763
764 newhandler->next = NULL;
765 last = function_eh_regions[region].handlers;
766 if (last == NULL)
767 function_eh_regions[region].handlers = newhandler;
768 else
769 {
770 for ( ; ; last = last->next)
771 {
772 if (last->type_info == CATCH_ALL_TYPE)
773 pedwarn ("additional handler after ...");
774 if (last->next == NULL)
775 break;
776 }
777 last->next = newhandler;
778 }
779 }
780
781 /* Remove a handler label. The handler label is being deleted, so all
782 regions which reference this handler should have it removed from their
783 list of possible handlers. Any region which has the final handler
784 removed can be deleted. */
785
786 void remove_handler (removing_label)
787 rtx removing_label;
788 {
789 struct handler_info *handler, *last;
790 int x;
791 for (x = 0 ; x < current_func_eh_entry; ++x)
792 {
793 last = NULL;
794 handler = function_eh_regions[x].handlers;
795 for ( ; handler; last = handler, handler = handler->next)
796 if (handler->handler_label == removing_label)
797 {
798 if (last)
799 {
800 last->next = handler->next;
801 handler = last;
802 }
803 else
804 function_eh_regions[x].handlers = handler->next;
805 }
806 }
807 }
808
809 /* This function will return a malloc'd pointer to an array of
810 void pointer representing the runtime match values that
811 currently exist in all regions. */
812
813 int
814 find_all_handler_type_matches (array)
815 void ***array;
816 {
817 struct handler_info *handler, *last;
818 int x,y;
819 void *val;
820 void **ptr;
821 int max_ptr;
822 int n_ptr = 0;
823
824 *array = NULL;
825
826 if (!doing_eh (0) || ! flag_new_exceptions)
827 return 0;
828
829 max_ptr = 100;
830 ptr = (void **) xmalloc (max_ptr * sizeof (void *));
831
832 for (x = 0 ; x < current_func_eh_entry; x++)
833 {
834 last = NULL;
835 handler = function_eh_regions[x].handlers;
836 for ( ; handler; last = handler, handler = handler->next)
837 {
838 val = handler->type_info;
839 if (val != NULL && val != CATCH_ALL_TYPE)
840 {
841 /* See if this match value has already been found. */
842 for (y = 0; y < n_ptr; y++)
843 if (ptr[y] == val)
844 break;
845
846 /* If we break early, we already found this value. */
847 if (y < n_ptr)
848 continue;
849
850 /* Do we need to allocate more space? */
851 if (n_ptr >= max_ptr)
852 {
853 max_ptr += max_ptr / 2;
854 ptr = (void **) xrealloc (ptr, max_ptr * sizeof (void *));
855 }
856 ptr[n_ptr] = val;
857 n_ptr++;
858 }
859 }
860 }
861
862 if (n_ptr == 0)
863 {
864 free (ptr);
865 ptr = NULL;
866 }
867 *array = ptr;
868 return n_ptr;
869 }
870
871 /* Create a new handler structure initialized with the handler label and
872 typeinfo fields passed in. */
873
874 struct handler_info *
875 get_new_handler (handler, typeinfo)
876 rtx handler;
877 void *typeinfo;
878 {
879 struct handler_info* ptr;
880 ptr = (struct handler_info *) xmalloc (sizeof (struct handler_info));
881 ptr->handler_label = handler;
882 ptr->handler_number = CODE_LABEL_NUMBER (handler);
883 ptr->type_info = typeinfo;
884 ptr->next = NULL;
885
886 return ptr;
887 }
888
889
890
891 /* Find the index in function_eh_regions associated with a NOTE region. If
892 the region cannot be found, a -1 is returned. */
893
894 static int
895 find_func_region (insn_region)
896 int insn_region;
897 {
898 int x;
899 for (x = 0; x < current_func_eh_entry; x++)
900 if (function_eh_regions[x].range_number == insn_region)
901 return x;
902
903 return -1;
904 }
905
906 /* Get a pointer to the first handler in an exception region's list. */
907
908 struct handler_info *
909 get_first_handler (region)
910 int region;
911 {
912 int r = find_func_region (region);
913 if (r == -1)
914 abort ();
915 return function_eh_regions[r].handlers;
916 }
917
918 /* Clean out the function_eh_region table and free all memory */
919
920 static void
921 clear_function_eh_region ()
922 {
923 int x;
924 struct handler_info *ptr, *next;
925 for (x = 0; x < current_func_eh_entry; x++)
926 for (ptr = function_eh_regions[x].handlers; ptr != NULL; ptr = next)
927 {
928 next = ptr->next;
929 free (ptr);
930 }
931 free (function_eh_regions);
932 num_func_eh_entries = 0;
933 current_func_eh_entry = 0;
934 }
935
936 /* Make a duplicate of an exception region by copying all the handlers
937 for an exception region. Return the new handler index. The final
938 parameter is a routine which maps old labels to new ones. */
939
940 int
941 duplicate_eh_handlers (old_note_eh_region, new_note_eh_region, map)
942 int old_note_eh_region, new_note_eh_region;
943 rtx (*map) PARAMS ((rtx));
944 {
945 struct handler_info *ptr, *new_ptr;
946 int new_region, region;
947
948 region = find_func_region (old_note_eh_region);
949 if (region == -1)
950 fatal ("Cannot duplicate non-existant exception region.");
951
952 /* duplicate_eh_handlers may have been called during a symbol remap. */
953 new_region = find_func_region (new_note_eh_region);
954 if (new_region != -1)
955 return (new_region);
956
957 new_region = new_eh_region_entry (new_note_eh_region, NULL_RTX);
958
959 ptr = function_eh_regions[region].handlers;
960
961 for ( ; ptr; ptr = ptr->next)
962 {
963 new_ptr = get_new_handler (map (ptr->handler_label), ptr->type_info);
964 add_new_handler (new_region, new_ptr);
965 }
966
967 return new_region;
968 }
969
970
971 /* Given a rethrow symbol, find the EH region number this is for. */
972 static int
973 eh_region_from_symbol (sym)
974 rtx sym;
975 {
976 int x;
977 if (sym == last_rethrow_symbol)
978 return 1;
979 for (x = 0; x < current_func_eh_entry; x++)
980 if (function_eh_regions[x].rethrow_label == sym)
981 return function_eh_regions[x].range_number;
982 return -1;
983 }
984
985 /* Like find_func_region, but using the rethrow symbol for the region
986 rather than the region number itself. */
987 static int
988 find_func_region_from_symbol (sym)
989 rtx sym;
990 {
991 return find_func_region (eh_region_from_symbol (sym));
992 }
993
994 /* When inlining/unrolling, we have to map the symbols passed to
995 __rethrow as well. This performs the remap. If a symbol isn't foiund,
996 the original one is returned. This is not an efficient routine,
997 so don't call it on everything!! */
998 rtx
999 rethrow_symbol_map (sym, map)
1000 rtx sym;
1001 rtx (*map) PARAMS ((rtx));
1002 {
1003 int x, y;
1004 for (x = 0; x < current_func_eh_entry; x++)
1005 if (function_eh_regions[x].rethrow_label == sym)
1006 {
1007 /* We've found the original region, now lets determine which region
1008 this now maps to. */
1009 rtx l1 = function_eh_regions[x].handlers->handler_label;
1010 rtx l2 = map (l1);
1011 y = CODE_LABEL_NUMBER (l2); /* This is the new region number */
1012 x = find_func_region (y); /* Get the new permanent region */
1013 if (x == -1) /* Hmm, Doesn't exist yet */
1014 {
1015 x = duplicate_eh_handlers (CODE_LABEL_NUMBER (l1), y, map);
1016 /* Since we're mapping it, it must be used. */
1017 function_eh_regions[x].rethrow_ref = 1;
1018 }
1019 return function_eh_regions[x].rethrow_label;
1020 }
1021 return sym;
1022 }
1023
1024 int
1025 rethrow_used (region)
1026 int region;
1027 {
1028 if (flag_new_exceptions)
1029 {
1030 int ret = function_eh_regions[find_func_region (region)].rethrow_ref;
1031 return ret;
1032 }
1033 return 0;
1034 }
1035
1036 \f
1037 /* Routine to see if exception handling is turned on.
1038 DO_WARN is non-zero if we want to inform the user that exception
1039 handling is turned off.
1040
1041 This is used to ensure that -fexceptions has been specified if the
1042 compiler tries to use any exception-specific functions. */
1043
1044 int
1045 doing_eh (do_warn)
1046 int do_warn;
1047 {
1048 if (! flag_exceptions)
1049 {
1050 static int warned = 0;
1051 if (! warned && do_warn)
1052 {
1053 error ("exception handling disabled, use -fexceptions to enable");
1054 warned = 1;
1055 }
1056 return 0;
1057 }
1058 return 1;
1059 }
1060
1061 /* Given a return address in ADDR, determine the address we should use
1062 to find the corresponding EH region. */
1063
1064 rtx
1065 eh_outer_context (addr)
1066 rtx addr;
1067 {
1068 /* First mask out any unwanted bits. */
1069 #ifdef MASK_RETURN_ADDR
1070 expand_and (addr, MASK_RETURN_ADDR, addr);
1071 #endif
1072
1073 /* Then adjust to find the real return address. */
1074 #if defined (RETURN_ADDR_OFFSET)
1075 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
1076 #endif
1077
1078 return addr;
1079 }
1080
1081 /* Start a new exception region for a region of code that has a
1082 cleanup action and push the HANDLER for the region onto
1083 protect_list. All of the regions created with add_partial_entry
1084 will be ended when end_protect_partials is invoked. */
1085
1086 void
1087 add_partial_entry (handler)
1088 tree handler;
1089 {
1090 expand_eh_region_start ();
1091
1092 /* Make sure the entry is on the correct obstack. */
1093 push_obstacks_nochange ();
1094 resume_temporary_allocation ();
1095
1096 /* Because this is a cleanup action, we may have to protect the handler
1097 with __terminate. */
1098 handler = protect_with_terminate (handler);
1099
1100 /* For backwards compatibility, we allow callers to omit calls to
1101 begin_protect_partials for the outermost region. So, we must
1102 explicitly do so here. */
1103 if (!protect_list)
1104 begin_protect_partials ();
1105
1106 /* Add this entry to the front of the list. */
1107 TREE_VALUE (protect_list)
1108 = tree_cons (NULL_TREE, handler, TREE_VALUE (protect_list));
1109 pop_obstacks ();
1110 }
1111
1112 /* Emit code to get EH context to current function. */
1113
1114 static rtx
1115 call_get_eh_context ()
1116 {
1117 static tree fn;
1118 tree expr;
1119
1120 if (fn == NULL_TREE)
1121 {
1122 tree fntype;
1123 fn = get_identifier ("__get_eh_context");
1124 push_obstacks_nochange ();
1125 end_temporary_allocation ();
1126 fntype = build_pointer_type (build_pointer_type
1127 (build_pointer_type (void_type_node)));
1128 fntype = build_function_type (fntype, NULL_TREE);
1129 fn = build_decl (FUNCTION_DECL, fn, fntype);
1130 DECL_EXTERNAL (fn) = 1;
1131 TREE_PUBLIC (fn) = 1;
1132 DECL_ARTIFICIAL (fn) = 1;
1133 TREE_READONLY (fn) = 1;
1134 make_decl_rtl (fn, NULL_PTR, 1);
1135 assemble_external (fn);
1136 pop_obstacks ();
1137
1138 ggc_add_tree_root (&fn, 1);
1139 }
1140
1141 expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1142 expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1143 expr, NULL_TREE, NULL_TREE);
1144 TREE_SIDE_EFFECTS (expr) = 1;
1145
1146 return copy_to_reg (expand_expr (expr, NULL_RTX, VOIDmode, 0));
1147 }
1148
1149 /* Get a reference to the EH context.
1150 We will only generate a register for the current function EH context here,
1151 and emit a USE insn to mark that this is a EH context register.
1152
1153 Later, emit_eh_context will emit needed call to __get_eh_context
1154 in libgcc2, and copy the value to the register we have generated. */
1155
1156 rtx
1157 get_eh_context ()
1158 {
1159 if (current_function_ehc == 0)
1160 {
1161 rtx insn;
1162
1163 current_function_ehc = gen_reg_rtx (Pmode);
1164
1165 insn = gen_rtx_USE (GET_MODE (current_function_ehc),
1166 current_function_ehc);
1167 insn = emit_insn_before (insn, get_first_nonparm_insn ());
1168
1169 REG_NOTES (insn)
1170 = gen_rtx_EXPR_LIST (REG_EH_CONTEXT, current_function_ehc,
1171 REG_NOTES (insn));
1172 }
1173 return current_function_ehc;
1174 }
1175
1176 /* Get a reference to the dynamic handler chain. It points to the
1177 pointer to the next element in the dynamic handler chain. It ends
1178 when there are no more elements in the dynamic handler chain, when
1179 the value is &top_elt from libgcc2.c. Immediately after the
1180 pointer, is an area suitable for setjmp/longjmp when
1181 DONT_USE_BUILTIN_SETJMP is defined, and an area suitable for
1182 __builtin_setjmp/__builtin_longjmp when DONT_USE_BUILTIN_SETJMP
1183 isn't defined. */
1184
1185 rtx
1186 get_dynamic_handler_chain ()
1187 {
1188 rtx ehc, dhc, result;
1189
1190 ehc = get_eh_context ();
1191
1192 /* This is the offset of dynamic_handler_chain in the eh_context struct
1193 declared in eh-common.h. If its location is change, change this offset */
1194 dhc = plus_constant (ehc, POINTER_SIZE / BITS_PER_UNIT);
1195
1196 result = copy_to_reg (dhc);
1197
1198 /* We don't want a copy of the dcc, but rather, the single dcc. */
1199 return gen_rtx_MEM (Pmode, result);
1200 }
1201
1202 /* Get a reference to the dynamic cleanup chain. It points to the
1203 pointer to the next element in the dynamic cleanup chain.
1204 Immediately after the pointer, are two Pmode variables, one for a
1205 pointer to a function that performs the cleanup action, and the
1206 second, the argument to pass to that function. */
1207
1208 rtx
1209 get_dynamic_cleanup_chain ()
1210 {
1211 rtx dhc, dcc, result;
1212
1213 dhc = get_dynamic_handler_chain ();
1214 dcc = plus_constant (dhc, POINTER_SIZE / BITS_PER_UNIT);
1215
1216 result = copy_to_reg (dcc);
1217
1218 /* We don't want a copy of the dcc, but rather, the single dcc. */
1219 return gen_rtx_MEM (Pmode, result);
1220 }
1221
1222 #ifdef DONT_USE_BUILTIN_SETJMP
1223 /* Generate code to evaluate X and jump to LABEL if the value is nonzero.
1224 LABEL is an rtx of code CODE_LABEL, in this function. */
1225
1226 static void
1227 jumpif_rtx (x, label)
1228 rtx x;
1229 rtx label;
1230 {
1231 jumpif (make_tree (type_for_mode (GET_MODE (x), 0), x), label);
1232 }
1233 #endif
1234
1235 /* Start a dynamic cleanup on the EH runtime dynamic cleanup stack.
1236 We just need to create an element for the cleanup list, and push it
1237 into the chain.
1238
1239 A dynamic cleanup is a cleanup action implied by the presence of an
1240 element on the EH runtime dynamic cleanup stack that is to be
1241 performed when an exception is thrown. The cleanup action is
1242 performed by __sjthrow when an exception is thrown. Only certain
1243 actions can be optimized into dynamic cleanup actions. For the
1244 restrictions on what actions can be performed using this routine,
1245 see expand_eh_region_start_tree. */
1246
1247 static void
1248 start_dynamic_cleanup (func, arg)
1249 tree func;
1250 tree arg;
1251 {
1252 rtx dcc;
1253 rtx new_func, new_arg;
1254 rtx x, buf;
1255 int size;
1256
1257 /* We allocate enough room for a pointer to the function, and
1258 one argument. */
1259 size = 2;
1260
1261 /* XXX, FIXME: The stack space allocated this way is too long lived,
1262 but there is no allocation routine that allocates at the level of
1263 the last binding contour. */
1264 buf = assign_stack_local (BLKmode,
1265 GET_MODE_SIZE (Pmode)*(size+1),
1266 0);
1267
1268 buf = change_address (buf, Pmode, NULL_RTX);
1269
1270 /* Store dcc into the first word of the newly allocated buffer. */
1271
1272 dcc = get_dynamic_cleanup_chain ();
1273 emit_move_insn (buf, dcc);
1274
1275 /* Store func and arg into the cleanup list element. */
1276
1277 new_func = gen_rtx_MEM (Pmode, plus_constant (XEXP (buf, 0),
1278 GET_MODE_SIZE (Pmode)));
1279 new_arg = gen_rtx_MEM (Pmode, plus_constant (XEXP (buf, 0),
1280 GET_MODE_SIZE (Pmode)*2));
1281 x = expand_expr (func, new_func, Pmode, 0);
1282 if (x != new_func)
1283 emit_move_insn (new_func, x);
1284
1285 x = expand_expr (arg, new_arg, Pmode, 0);
1286 if (x != new_arg)
1287 emit_move_insn (new_arg, x);
1288
1289 /* Update the cleanup chain. */
1290
1291 x = force_operand (XEXP (buf, 0), dcc);
1292 if (x != dcc)
1293 emit_move_insn (dcc, x);
1294 }
1295
1296 /* Emit RTL to start a dynamic handler on the EH runtime dynamic
1297 handler stack. This should only be used by expand_eh_region_start
1298 or expand_eh_region_start_tree. */
1299
1300 static void
1301 start_dynamic_handler ()
1302 {
1303 rtx dhc, dcc;
1304 rtx x, arg, buf;
1305 int size;
1306
1307 #ifndef DONT_USE_BUILTIN_SETJMP
1308 /* The number of Pmode words for the setjmp buffer, when using the
1309 builtin setjmp/longjmp, see expand_builtin, case BUILT_IN_LONGJMP. */
1310 /* We use 2 words here before calling expand_builtin_setjmp.
1311 expand_builtin_setjmp uses 2 words, and then calls emit_stack_save.
1312 emit_stack_save needs space of size STACK_SAVEAREA_MODE (SAVE_NONLOCAL).
1313 Subtract one, because the assign_stack_local call below adds 1. */
1314 size = (2 + 2 + (GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
1315 / GET_MODE_SIZE (Pmode))
1316 - 1);
1317 #else
1318 #ifdef JMP_BUF_SIZE
1319 size = JMP_BUF_SIZE;
1320 #else
1321 /* Should be large enough for most systems, if it is not,
1322 JMP_BUF_SIZE should be defined with the proper value. It will
1323 also tend to be larger than necessary for most systems, a more
1324 optimal port will define JMP_BUF_SIZE. */
1325 size = FIRST_PSEUDO_REGISTER+2;
1326 #endif
1327 #endif
1328 /* XXX, FIXME: The stack space allocated this way is too long lived,
1329 but there is no allocation routine that allocates at the level of
1330 the last binding contour. */
1331 arg = assign_stack_local (BLKmode,
1332 GET_MODE_SIZE (Pmode)*(size+1),
1333 0);
1334
1335 arg = change_address (arg, Pmode, NULL_RTX);
1336
1337 /* Store dhc into the first word of the newly allocated buffer. */
1338
1339 dhc = get_dynamic_handler_chain ();
1340 dcc = gen_rtx_MEM (Pmode, plus_constant (XEXP (arg, 0),
1341 GET_MODE_SIZE (Pmode)));
1342 emit_move_insn (arg, dhc);
1343
1344 /* Zero out the start of the cleanup chain. */
1345 emit_move_insn (dcc, const0_rtx);
1346
1347 /* The jmpbuf starts two words into the area allocated. */
1348 buf = plus_constant (XEXP (arg, 0), GET_MODE_SIZE (Pmode)*2);
1349
1350 #ifdef DONT_USE_BUILTIN_SETJMP
1351 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, 1,
1352 TYPE_MODE (integer_type_node), 1,
1353 buf, Pmode);
1354 /* If we come back here for a catch, transfer control to the handler. */
1355 jumpif_rtx (x, ehstack.top->entry->exception_handler_label);
1356 #else
1357 {
1358 /* A label to continue execution for the no exception case. */
1359 rtx noex = gen_label_rtx();
1360 x = expand_builtin_setjmp (buf, NULL_RTX, noex,
1361 ehstack.top->entry->exception_handler_label);
1362 emit_label (noex);
1363 }
1364 #endif
1365
1366 /* We are committed to this, so update the handler chain. */
1367
1368 emit_move_insn (dhc, force_operand (XEXP (arg, 0), NULL_RTX));
1369 }
1370
1371 /* Start an exception handling region for the given cleanup action.
1372 All instructions emitted after this point are considered to be part
1373 of the region until expand_eh_region_end is invoked. CLEANUP is
1374 the cleanup action to perform. The return value is true if the
1375 exception region was optimized away. If that case,
1376 expand_eh_region_end does not need to be called for this cleanup,
1377 nor should it be.
1378
1379 This routine notices one particular common case in C++ code
1380 generation, and optimizes it so as to not need the exception
1381 region. It works by creating a dynamic cleanup action, instead of
1382 a using an exception region. */
1383
1384 int
1385 expand_eh_region_start_tree (decl, cleanup)
1386 tree decl;
1387 tree cleanup;
1388 {
1389 /* This is the old code. */
1390 if (! doing_eh (0))
1391 return 0;
1392
1393 /* The optimization only applies to actions protected with
1394 terminate, and only applies if we are using the setjmp/longjmp
1395 codegen method. */
1396 if (exceptions_via_longjmp
1397 && protect_cleanup_actions_with_terminate)
1398 {
1399 tree func, arg;
1400 tree args;
1401
1402 /* Ignore any UNSAVE_EXPR. */
1403 if (TREE_CODE (cleanup) == UNSAVE_EXPR)
1404 cleanup = TREE_OPERAND (cleanup, 0);
1405
1406 /* Further, it only applies if the action is a call, if there
1407 are 2 arguments, and if the second argument is 2. */
1408
1409 if (TREE_CODE (cleanup) == CALL_EXPR
1410 && (args = TREE_OPERAND (cleanup, 1))
1411 && (func = TREE_OPERAND (cleanup, 0))
1412 && (arg = TREE_VALUE (args))
1413 && (args = TREE_CHAIN (args))
1414
1415 /* is the second argument 2? */
1416 && TREE_CODE (TREE_VALUE (args)) == INTEGER_CST
1417 && compare_tree_int (TREE_VALUE (args), 2) == 0
1418
1419 /* Make sure there are no other arguments. */
1420 && TREE_CHAIN (args) == NULL_TREE)
1421 {
1422 /* Arrange for returns and gotos to pop the entry we make on the
1423 dynamic cleanup stack. */
1424 expand_dcc_cleanup (decl);
1425 start_dynamic_cleanup (func, arg);
1426 return 1;
1427 }
1428 }
1429
1430 expand_eh_region_start_for_decl (decl);
1431 ehstack.top->entry->finalization = cleanup;
1432
1433 return 0;
1434 }
1435
1436 /* Just like expand_eh_region_start, except if a cleanup action is
1437 entered on the cleanup chain, the TREE_PURPOSE of the element put
1438 on the chain is DECL. DECL should be the associated VAR_DECL, if
1439 any, otherwise it should be NULL_TREE. */
1440
1441 void
1442 expand_eh_region_start_for_decl (decl)
1443 tree decl;
1444 {
1445 rtx note;
1446
1447 /* This is the old code. */
1448 if (! doing_eh (0))
1449 return;
1450
1451 /* We need a new block to record the start and end of the
1452 dynamic handler chain. We also want to prevent jumping into
1453 a try block. */
1454 expand_start_bindings (2);
1455
1456 /* But we don't need or want a new temporary level. */
1457 pop_temp_slots ();
1458
1459 /* Mark this block as created by expand_eh_region_start. This
1460 is so that we can pop the block with expand_end_bindings
1461 automatically. */
1462 mark_block_as_eh_region ();
1463
1464 if (exceptions_via_longjmp)
1465 {
1466 /* Arrange for returns and gotos to pop the entry we make on the
1467 dynamic handler stack. */
1468 expand_dhc_cleanup (decl);
1469 }
1470
1471 push_eh_entry (&ehstack);
1472 note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_BEG);
1473 NOTE_EH_HANDLER (note)
1474 = CODE_LABEL_NUMBER (ehstack.top->entry->exception_handler_label);
1475 if (exceptions_via_longjmp)
1476 start_dynamic_handler ();
1477 }
1478
1479 /* Start an exception handling region. All instructions emitted after
1480 this point are considered to be part of the region until
1481 expand_eh_region_end is invoked. */
1482
1483 void
1484 expand_eh_region_start ()
1485 {
1486 expand_eh_region_start_for_decl (NULL_TREE);
1487 }
1488
1489 /* End an exception handling region. The information about the region
1490 is found on the top of ehstack.
1491
1492 HANDLER is either the cleanup for the exception region, or if we're
1493 marking the end of a try block, HANDLER is integer_zero_node.
1494
1495 HANDLER will be transformed to rtl when expand_leftover_cleanups
1496 is invoked. */
1497
1498 void
1499 expand_eh_region_end (handler)
1500 tree handler;
1501 {
1502 struct eh_entry *entry;
1503 struct eh_node *node;
1504 rtx note;
1505 int ret, r;
1506
1507 if (! doing_eh (0))
1508 return;
1509
1510 entry = pop_eh_entry (&ehstack);
1511
1512 note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_END);
1513 ret = NOTE_EH_HANDLER (note)
1514 = CODE_LABEL_NUMBER (entry->exception_handler_label);
1515 if (exceptions_via_longjmp == 0 && ! flag_new_exceptions
1516 /* We share outer_context between regions; only emit it once. */
1517 && INSN_UID (entry->outer_context) == 0)
1518 {
1519 rtx label;
1520
1521 label = gen_label_rtx ();
1522 emit_jump (label);
1523
1524 /* Emit a label marking the end of this exception region that
1525 is used for rethrowing into the outer context. */
1526 emit_label (entry->outer_context);
1527 expand_internal_throw ();
1528
1529 emit_label (label);
1530 }
1531
1532 entry->finalization = handler;
1533
1534 /* create region entry in final exception table */
1535 r = new_eh_region_entry (NOTE_EH_HANDLER (note), entry->rethrow_label);
1536
1537 enqueue_eh_entry (ehqueue, entry);
1538
1539 /* If we have already started ending the bindings, don't recurse. */
1540 if (is_eh_region ())
1541 {
1542 /* Because we don't need or want a new temporary level and
1543 because we didn't create one in expand_eh_region_start,
1544 create a fake one now to avoid removing one in
1545 expand_end_bindings. */
1546 push_temp_slots ();
1547
1548 mark_block_as_not_eh_region ();
1549
1550 expand_end_bindings (NULL_TREE, 0, 0);
1551 }
1552
1553 /* Go through the goto handlers in the queue, emitting their
1554 handlers if we now have enough information to do so. */
1555 for (node = ehqueue->head; node; node = node->chain)
1556 if (node->entry->goto_entry_p
1557 && node->entry->outer_context == entry->rethrow_label)
1558 emit_cleanup_handler (node->entry);
1559
1560 /* We can't emit handlers for goto entries until their scopes are
1561 complete because we don't know where they need to rethrow to,
1562 yet. */
1563 if (entry->finalization != integer_zero_node
1564 && (!entry->goto_entry_p
1565 || find_func_region_from_symbol (entry->outer_context) != -1))
1566 emit_cleanup_handler (entry);
1567 }
1568
1569 /* End the EH region for a goto fixup. We only need them in the region-based
1570 EH scheme. */
1571
1572 void
1573 expand_fixup_region_start ()
1574 {
1575 if (! doing_eh (0) || exceptions_via_longjmp)
1576 return;
1577
1578 expand_eh_region_start ();
1579 /* Mark this entry as the entry for a goto. */
1580 ehstack.top->entry->goto_entry_p = 1;
1581 }
1582
1583 /* End the EH region for a goto fixup. CLEANUP is the cleanup we just
1584 expanded; to avoid running it twice if it throws, we look through the
1585 ehqueue for a matching region and rethrow from its outer_context. */
1586
1587 void
1588 expand_fixup_region_end (cleanup)
1589 tree cleanup;
1590 {
1591 struct eh_node *node;
1592 int dont_issue;
1593
1594 if (! doing_eh (0) || exceptions_via_longjmp)
1595 return;
1596
1597 for (node = ehstack.top; node && node->entry->finalization != cleanup; )
1598 node = node->chain;
1599 if (node == 0)
1600 for (node = ehqueue->head; node && node->entry->finalization != cleanup; )
1601 node = node->chain;
1602 if (node == 0)
1603 abort ();
1604
1605 /* If the outer context label has not been issued yet, we don't want
1606 to issue it as a part of this region, unless this is the
1607 correct region for the outer context. If we did, then the label for
1608 the outer context will be WITHIN the begin/end labels,
1609 and we could get an infinte loop when it tried to rethrow, or just
1610 generally incorrect execution following a throw. */
1611
1612 if (flag_new_exceptions)
1613 dont_issue = 0;
1614 else
1615 dont_issue = ((INSN_UID (node->entry->outer_context) == 0)
1616 && (ehstack.top->entry != node->entry));
1617
1618 ehstack.top->entry->outer_context = node->entry->outer_context;
1619
1620 /* Since we are rethrowing to the OUTER region, we know we don't need
1621 a jump around sequence for this region, so we'll pretend the outer
1622 context label has been issued by setting INSN_UID to 1, then clearing
1623 it again afterwards. */
1624
1625 if (dont_issue)
1626 INSN_UID (node->entry->outer_context) = 1;
1627
1628 /* Just rethrow. size_zero_node is just a NOP. */
1629 expand_eh_region_end (size_zero_node);
1630
1631 if (dont_issue)
1632 INSN_UID (node->entry->outer_context) = 0;
1633 }
1634
1635 /* If we are using the setjmp/longjmp EH codegen method, we emit a
1636 call to __sjthrow.
1637
1638 Otherwise, we emit a call to __throw and note that we threw
1639 something, so we know we need to generate the necessary code for
1640 __throw.
1641
1642 Before invoking throw, the __eh_pc variable must have been set up
1643 to contain the PC being thrown from. This address is used by
1644 __throw to determine which exception region (if any) is
1645 responsible for handling the exception. */
1646
1647 void
1648 emit_throw ()
1649 {
1650 if (exceptions_via_longjmp)
1651 {
1652 emit_library_call (sjthrow_libfunc, 0, VOIDmode, 0);
1653 }
1654 else
1655 {
1656 #ifdef JUMP_TO_THROW
1657 emit_indirect_jump (throw_libfunc);
1658 #else
1659 emit_library_call (throw_libfunc, 0, VOIDmode, 0);
1660 #endif
1661 }
1662 emit_barrier ();
1663 }
1664
1665 /* Throw the current exception. If appropriate, this is done by jumping
1666 to the next handler. */
1667
1668 void
1669 expand_internal_throw ()
1670 {
1671 emit_throw ();
1672 }
1673
1674 /* Called from expand_exception_blocks and expand_end_catch_block to
1675 emit any pending handlers/cleanups queued from expand_eh_region_end. */
1676
1677 void
1678 expand_leftover_cleanups ()
1679 {
1680 struct eh_entry *entry;
1681
1682 for (entry = dequeue_eh_entry (ehqueue);
1683 entry;
1684 entry = dequeue_eh_entry (ehqueue))
1685 {
1686 /* A leftover try block. Shouldn't be one here. */
1687 if (entry->finalization == integer_zero_node)
1688 abort ();
1689
1690 free (entry);
1691 }
1692 }
1693
1694 /* Called at the start of a block of try statements. */
1695 void
1696 expand_start_try_stmts ()
1697 {
1698 if (! doing_eh (1))
1699 return;
1700
1701 expand_eh_region_start ();
1702 }
1703
1704 /* Called to begin a catch clause. The parameter is the object which
1705 will be passed to the runtime type check routine. */
1706 void
1707 start_catch_handler (rtime)
1708 tree rtime;
1709 {
1710 rtx handler_label;
1711 int insn_region_num;
1712 int eh_region_entry;
1713
1714 if (! doing_eh (1))
1715 return;
1716
1717 handler_label = catchstack.top->entry->exception_handler_label;
1718 insn_region_num = CODE_LABEL_NUMBER (handler_label);
1719 eh_region_entry = find_func_region (insn_region_num);
1720
1721 /* If we've already issued this label, pick a new one */
1722 if (catchstack.top->entry->label_used)
1723 handler_label = gen_exception_label ();
1724 else
1725 catchstack.top->entry->label_used = 1;
1726
1727 receive_exception_label (handler_label);
1728
1729 add_new_handler (eh_region_entry, get_new_handler (handler_label, rtime));
1730
1731 if (flag_new_exceptions && ! exceptions_via_longjmp)
1732 return;
1733
1734 /* Under the old mechanism, as well as setjmp/longjmp, we need to
1735 issue code to compare 'rtime' to the value in eh_info, via the
1736 matching function in eh_info. If its is false, we branch around
1737 the handler we are about to issue. */
1738
1739 if (rtime != NULL_TREE && rtime != CATCH_ALL_TYPE)
1740 {
1741 rtx call_rtx, rtime_address;
1742
1743 if (catchstack.top->entry->false_label != NULL_RTX)
1744 {
1745 error ("Never issued previous false_label");
1746 abort ();
1747 }
1748 catchstack.top->entry->false_label = gen_exception_label ();
1749
1750 rtime_address = expand_expr (rtime, NULL_RTX, Pmode, EXPAND_INITIALIZER);
1751 #ifdef POINTERS_EXTEND_UNSIGNED
1752 rtime_address = convert_memory_address (Pmode, rtime_address);
1753 #endif
1754 rtime_address = force_reg (Pmode, rtime_address);
1755
1756 /* Now issue the call, and branch around handler if needed */
1757 call_rtx = emit_library_call_value (eh_rtime_match_libfunc, NULL_RTX,
1758 0, TYPE_MODE (integer_type_node),
1759 1, rtime_address, Pmode);
1760
1761 /* Did the function return true? */
1762 emit_cmp_and_jump_insns (call_rtx, const0_rtx, EQ, NULL_RTX,
1763 GET_MODE (call_rtx), 0, 0,
1764 catchstack.top->entry->false_label);
1765 }
1766 }
1767
1768 /* Called to end a catch clause. If we aren't using the new exception
1769 model tabel mechanism, we need to issue the branch-around label
1770 for the end of the catch block. */
1771
1772 void
1773 end_catch_handler ()
1774 {
1775 if (! doing_eh (1))
1776 return;
1777
1778 if (flag_new_exceptions && ! exceptions_via_longjmp)
1779 {
1780 emit_barrier ();
1781 return;
1782 }
1783
1784 /* A NULL label implies the catch clause was a catch all or cleanup */
1785 if (catchstack.top->entry->false_label == NULL_RTX)
1786 return;
1787
1788 emit_label (catchstack.top->entry->false_label);
1789 catchstack.top->entry->false_label = NULL_RTX;
1790 }
1791
1792 /* Save away the current ehqueue. */
1793
1794 void
1795 push_ehqueue ()
1796 {
1797 struct eh_queue *q;
1798 q = (struct eh_queue *) xcalloc (1, sizeof (struct eh_queue));
1799 q->next = ehqueue;
1800 ehqueue = q;
1801 }
1802
1803 /* Restore a previously pushed ehqueue. */
1804
1805 void
1806 pop_ehqueue ()
1807 {
1808 struct eh_queue *q;
1809 expand_leftover_cleanups ();
1810 q = ehqueue->next;
1811 free (ehqueue);
1812 ehqueue = q;
1813 }
1814
1815 /* Emit the handler specified by ENTRY. */
1816
1817 static void
1818 emit_cleanup_handler (entry)
1819 struct eh_entry *entry;
1820 {
1821 rtx prev;
1822 rtx handler_insns;
1823
1824 /* Since the cleanup could itself contain try-catch blocks, we
1825 squirrel away the current queue and replace it when we are done
1826 with this function. */
1827 push_ehqueue ();
1828
1829 /* Put these handler instructions in a sequence. */
1830 do_pending_stack_adjust ();
1831 start_sequence ();
1832
1833 /* Emit the label for the cleanup handler for this region, and
1834 expand the code for the handler.
1835
1836 Note that a catch region is handled as a side-effect here; for a
1837 try block, entry->finalization will contain integer_zero_node, so
1838 no code will be generated in the expand_expr call below. But, the
1839 label for the handler will still be emitted, so any code emitted
1840 after this point will end up being the handler. */
1841
1842 receive_exception_label (entry->exception_handler_label);
1843
1844 /* register a handler for this cleanup region */
1845 add_new_handler (find_func_region (CODE_LABEL_NUMBER (entry->exception_handler_label)),
1846 get_new_handler (entry->exception_handler_label, NULL));
1847
1848 /* And now generate the insns for the cleanup handler. */
1849 expand_expr (entry->finalization, const0_rtx, VOIDmode, 0);
1850
1851 prev = get_last_insn ();
1852 if (prev == NULL || GET_CODE (prev) != BARRIER)
1853 /* Code to throw out to outer context when we fall off end of the
1854 handler. We can't do this here for catch blocks, so it's done
1855 in expand_end_all_catch instead. */
1856 expand_rethrow (entry->outer_context);
1857
1858 /* Finish this sequence. */
1859 do_pending_stack_adjust ();
1860 handler_insns = get_insns ();
1861 end_sequence ();
1862
1863 /* And add it to the CATCH_CLAUSES. */
1864 push_to_sequence (catch_clauses);
1865 emit_insns (handler_insns);
1866 catch_clauses = get_insns ();
1867 end_sequence ();
1868
1869 /* Now we've left the handler. */
1870 pop_ehqueue ();
1871 }
1872
1873 /* Generate RTL for the start of a group of catch clauses.
1874
1875 It is responsible for starting a new instruction sequence for the
1876 instructions in the catch block, and expanding the handlers for the
1877 internally-generated exception regions nested within the try block
1878 corresponding to this catch block. */
1879
1880 void
1881 expand_start_all_catch ()
1882 {
1883 struct eh_entry *entry;
1884 tree label;
1885 rtx outer_context;
1886
1887 if (! doing_eh (1))
1888 return;
1889
1890 outer_context = ehstack.top->entry->outer_context;
1891
1892 /* End the try block. */
1893 expand_eh_region_end (integer_zero_node);
1894
1895 emit_line_note (input_filename, lineno);
1896 label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
1897
1898 /* The label for the exception handling block that we will save.
1899 This is Lresume in the documentation. */
1900 expand_label (label);
1901
1902 /* Push the label that points to where normal flow is resumed onto
1903 the top of the label stack. */
1904 push_label_entry (&caught_return_label_stack, NULL_RTX, label);
1905
1906 /* Start a new sequence for all the catch blocks. We will add this
1907 to the global sequence catch_clauses when we have completed all
1908 the handlers in this handler-seq. */
1909 start_sequence ();
1910
1911 /* Throw away entries in the queue that we won't need anymore. We
1912 need entries for regions that have ended but to which there might
1913 still be gotos pending. */
1914 for (entry = dequeue_eh_entry (ehqueue);
1915 entry->finalization != integer_zero_node;
1916 entry = dequeue_eh_entry (ehqueue))
1917 free (entry);
1918
1919 /* At this point, all the cleanups are done, and the ehqueue now has
1920 the current exception region at its head. We dequeue it, and put it
1921 on the catch stack. */
1922 push_entry (&catchstack, entry);
1923
1924 /* If we are not doing setjmp/longjmp EH, because we are reordered
1925 out of line, we arrange to rethrow in the outer context. We need to
1926 do this because we are not physically within the region, if any, that
1927 logically contains this catch block. */
1928 if (! exceptions_via_longjmp)
1929 {
1930 expand_eh_region_start ();
1931 ehstack.top->entry->outer_context = outer_context;
1932 }
1933
1934 }
1935
1936 /* Finish up the catch block. At this point all the insns for the
1937 catch clauses have already been generated, so we only have to add
1938 them to the catch_clauses list. We also want to make sure that if
1939 we fall off the end of the catch clauses that we rethrow to the
1940 outer EH region. */
1941
1942 void
1943 expand_end_all_catch ()
1944 {
1945 rtx new_catch_clause;
1946 struct eh_entry *entry;
1947
1948 if (! doing_eh (1))
1949 return;
1950
1951 /* Dequeue the current catch clause region. */
1952 entry = pop_eh_entry (&catchstack);
1953 free (entry);
1954
1955 if (! exceptions_via_longjmp)
1956 {
1957 rtx outer_context = ehstack.top->entry->outer_context;
1958
1959 /* Finish the rethrow region. size_zero_node is just a NOP. */
1960 expand_eh_region_end (size_zero_node);
1961 /* New exceptions handling models will never have a fall through
1962 of a catch clause */
1963 if (!flag_new_exceptions)
1964 expand_rethrow (outer_context);
1965 }
1966 else
1967 expand_rethrow (NULL_RTX);
1968
1969 /* Code to throw out to outer context, if we fall off end of catch
1970 handlers. This is rethrow (Lresume, same id, same obj) in the
1971 documentation. We use Lresume because we know that it will throw
1972 to the correct context.
1973
1974 In other words, if the catch handler doesn't exit or return, we
1975 do a "throw" (using the address of Lresume as the point being
1976 thrown from) so that the outer EH region can then try to process
1977 the exception. */
1978
1979 /* Now we have the complete catch sequence. */
1980 new_catch_clause = get_insns ();
1981 end_sequence ();
1982
1983 /* This level of catch blocks is done, so set up the successful
1984 catch jump label for the next layer of catch blocks. */
1985 pop_label_entry (&caught_return_label_stack);
1986 pop_label_entry (&outer_context_label_stack);
1987
1988 /* Add the new sequence of catches to the main one for this function. */
1989 push_to_sequence (catch_clauses);
1990 emit_insns (new_catch_clause);
1991 catch_clauses = get_insns ();
1992 end_sequence ();
1993
1994 /* Here we fall through into the continuation code. */
1995 }
1996
1997 /* Rethrow from the outer context LABEL. */
1998
1999 static void
2000 expand_rethrow (label)
2001 rtx label;
2002 {
2003 if (exceptions_via_longjmp)
2004 emit_throw ();
2005 else
2006 if (flag_new_exceptions)
2007 {
2008 rtx insn;
2009 int region;
2010 if (label == NULL_RTX)
2011 label = last_rethrow_symbol;
2012 emit_library_call (rethrow_libfunc, 0, VOIDmode, 1, label, Pmode);
2013 region = find_func_region (eh_region_from_symbol (label));
2014 /* If the region is -1, it doesn't exist yet. We should be
2015 trying to rethrow there yet. */
2016 if (region == -1)
2017 abort ();
2018 function_eh_regions[region].rethrow_ref = 1;
2019
2020 /* Search backwards for the actual call insn. */
2021 insn = get_last_insn ();
2022 while (GET_CODE (insn) != CALL_INSN)
2023 insn = PREV_INSN (insn);
2024 delete_insns_since (insn);
2025
2026 /* Mark the label/symbol on the call. */
2027 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EH_RETHROW, label,
2028 REG_NOTES (insn));
2029 emit_barrier ();
2030 }
2031 else
2032 emit_jump (label);
2033 }
2034
2035 /* Begin a region that will contain entries created with
2036 add_partial_entry. */
2037
2038 void
2039 begin_protect_partials ()
2040 {
2041 /* Put the entry on the function obstack. */
2042 push_obstacks_nochange ();
2043 resume_temporary_allocation ();
2044
2045 /* Push room for a new list. */
2046 protect_list = tree_cons (NULL_TREE, NULL_TREE, protect_list);
2047
2048 /* We're done with the function obstack now. */
2049 pop_obstacks ();
2050 }
2051
2052 /* End all the pending exception regions on protect_list. The handlers
2053 will be emitted when expand_leftover_cleanups is invoked. */
2054
2055 void
2056 end_protect_partials ()
2057 {
2058 tree t;
2059
2060 /* For backwards compatibility, we allow callers to omit the call to
2061 begin_protect_partials for the outermost region. So,
2062 PROTECT_LIST may be NULL. */
2063 if (!protect_list)
2064 return;
2065
2066 /* End all the exception regions. */
2067 for (t = TREE_VALUE (protect_list); t; t = TREE_CHAIN (t))
2068 expand_eh_region_end (TREE_VALUE (t));
2069
2070 /* Pop the topmost entry. */
2071 protect_list = TREE_CHAIN (protect_list);
2072
2073 }
2074
2075 /* Arrange for __terminate to be called if there is an unhandled throw
2076 from within E. */
2077
2078 tree
2079 protect_with_terminate (e)
2080 tree e;
2081 {
2082 /* We only need to do this when using setjmp/longjmp EH and the
2083 language requires it, as otherwise we protect all of the handlers
2084 at once, if we need to. */
2085 if (exceptions_via_longjmp && protect_cleanup_actions_with_terminate)
2086 {
2087 tree handler, result;
2088
2089 /* All cleanups must be on the function_obstack. */
2090 push_obstacks_nochange ();
2091 resume_temporary_allocation ();
2092
2093 handler = make_node (RTL_EXPR);
2094 TREE_TYPE (handler) = void_type_node;
2095 RTL_EXPR_RTL (handler) = const0_rtx;
2096 TREE_SIDE_EFFECTS (handler) = 1;
2097 start_sequence_for_rtl_expr (handler);
2098
2099 emit_library_call (terminate_libfunc, 0, VOIDmode, 0);
2100 emit_barrier ();
2101
2102 RTL_EXPR_SEQUENCE (handler) = get_insns ();
2103 end_sequence ();
2104
2105 result = build (TRY_CATCH_EXPR, TREE_TYPE (e), e, handler);
2106 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
2107 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2108 TREE_READONLY (result) = TREE_READONLY (e);
2109
2110 pop_obstacks ();
2111
2112 e = result;
2113 }
2114
2115 return e;
2116 }
2117 \f
2118 /* The exception table that we build that is used for looking up and
2119 dispatching exceptions, the current number of entries, and its
2120 maximum size before we have to extend it.
2121
2122 The number in eh_table is the code label number of the exception
2123 handler for the region. This is added by add_eh_table_entry and
2124 used by output_exception_table_entry. */
2125
2126 static int *eh_table = NULL;
2127 static int eh_table_size = 0;
2128 static int eh_table_max_size = 0;
2129
2130 /* Note the need for an exception table entry for region N. If we
2131 don't need to output an explicit exception table, avoid all of the
2132 extra work.
2133
2134 Called from final_scan_insn when a NOTE_INSN_EH_REGION_BEG is seen.
2135 (Or NOTE_INSN_EH_REGION_END sometimes)
2136 N is the NOTE_EH_HANDLER of the note, which comes from the code
2137 label number of the exception handler for the region. */
2138
2139 void
2140 add_eh_table_entry (n)
2141 int n;
2142 {
2143 #ifndef OMIT_EH_TABLE
2144 if (eh_table_size >= eh_table_max_size)
2145 {
2146 if (eh_table)
2147 {
2148 eh_table_max_size += eh_table_max_size>>1;
2149
2150 if (eh_table_max_size < 0)
2151 abort ();
2152
2153 eh_table = (int *) xrealloc (eh_table,
2154 eh_table_max_size * sizeof (int));
2155 }
2156 else
2157 {
2158 eh_table_max_size = 252;
2159 eh_table = (int *) xmalloc (eh_table_max_size * sizeof (int));
2160 }
2161 }
2162 eh_table[eh_table_size++] = n;
2163 #endif
2164 }
2165
2166 /* Return a non-zero value if we need to output an exception table.
2167
2168 On some platforms, we don't have to output a table explicitly.
2169 This routine doesn't mean we don't have one. */
2170
2171 int
2172 exception_table_p ()
2173 {
2174 if (eh_table)
2175 return 1;
2176
2177 return 0;
2178 }
2179
2180 /* Output the entry of the exception table corresponding to the
2181 exception region numbered N to file FILE.
2182
2183 N is the code label number corresponding to the handler of the
2184 region. */
2185
2186 static void
2187 output_exception_table_entry (file, n)
2188 FILE *file;
2189 int n;
2190 {
2191 char buf[256];
2192 rtx sym;
2193 struct handler_info *handler = get_first_handler (n);
2194 int index = find_func_region (n);
2195 rtx rethrow;
2196
2197 /* form and emit the rethrow label, if needed */
2198 rethrow = function_eh_regions[index].rethrow_label;
2199 if (rethrow != NULL_RTX && !flag_new_exceptions)
2200 rethrow = NULL_RTX;
2201 if (rethrow != NULL_RTX && handler == NULL)
2202 if (! function_eh_regions[index].rethrow_ref)
2203 rethrow = NULL_RTX;
2204
2205
2206 for ( ; handler != NULL || rethrow != NULL_RTX; handler = handler->next)
2207 {
2208 /* rethrow label should indicate the LAST entry for a region */
2209 if (rethrow != NULL_RTX && (handler == NULL || handler->next == NULL))
2210 {
2211 ASM_GENERATE_INTERNAL_LABEL (buf, "LRTH", n);
2212 assemble_label(buf);
2213 rethrow = NULL_RTX;
2214 }
2215
2216 ASM_GENERATE_INTERNAL_LABEL (buf, "LEHB", n);
2217 sym = gen_rtx_SYMBOL_REF (Pmode, buf);
2218 assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
2219
2220 ASM_GENERATE_INTERNAL_LABEL (buf, "LEHE", n);
2221 sym = gen_rtx_SYMBOL_REF (Pmode, buf);
2222 assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
2223
2224 if (handler == NULL)
2225 assemble_integer (GEN_INT (0), POINTER_SIZE / BITS_PER_UNIT, 1);
2226 else
2227 {
2228 ASM_GENERATE_INTERNAL_LABEL (buf, "L", handler->handler_number);
2229 sym = gen_rtx_SYMBOL_REF (Pmode, buf);
2230 assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
2231 }
2232
2233 if (flag_new_exceptions)
2234 {
2235 if (handler == NULL || handler->type_info == NULL)
2236 assemble_integer (const0_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
2237 else
2238 if (handler->type_info == CATCH_ALL_TYPE)
2239 assemble_integer (GEN_INT (CATCH_ALL_TYPE),
2240 POINTER_SIZE / BITS_PER_UNIT, 1);
2241 else
2242 output_constant ((tree)(handler->type_info),
2243 POINTER_SIZE / BITS_PER_UNIT);
2244 }
2245 putc ('\n', file); /* blank line */
2246 /* We only output the first label under the old scheme */
2247 if (! flag_new_exceptions || handler == NULL)
2248 break;
2249 }
2250 }
2251
2252 /* Output the exception table if we have and need one. */
2253
2254 static short language_code = 0;
2255 static short version_code = 0;
2256
2257 /* This routine will set the language code for exceptions. */
2258 void
2259 set_exception_lang_code (code)
2260 int code;
2261 {
2262 language_code = code;
2263 }
2264
2265 /* This routine will set the language version code for exceptions. */
2266 void
2267 set_exception_version_code (code)
2268 int code;
2269 {
2270 version_code = code;
2271 }
2272
2273
2274 void
2275 output_exception_table ()
2276 {
2277 int i;
2278 char buf[256];
2279 extern FILE *asm_out_file;
2280
2281 if (! doing_eh (0) || ! eh_table)
2282 return;
2283
2284 exception_section ();
2285
2286 /* Beginning marker for table. */
2287 assemble_align (GET_MODE_ALIGNMENT (ptr_mode));
2288 assemble_label ("__EXCEPTION_TABLE__");
2289
2290 if (flag_new_exceptions)
2291 {
2292 assemble_integer (GEN_INT (NEW_EH_RUNTIME),
2293 POINTER_SIZE / BITS_PER_UNIT, 1);
2294 assemble_integer (GEN_INT (language_code), 2 , 1);
2295 assemble_integer (GEN_INT (version_code), 2 , 1);
2296
2297 /* Add enough padding to make sure table aligns on a pointer boundry. */
2298 i = GET_MODE_ALIGNMENT (ptr_mode) / BITS_PER_UNIT - 4;
2299 for ( ; i < 0; i = i + GET_MODE_ALIGNMENT (ptr_mode) / BITS_PER_UNIT)
2300 ;
2301 if (i != 0)
2302 assemble_integer (const0_rtx, i , 1);
2303
2304 /* Generate the label for offset calculations on rethrows */
2305 ASM_GENERATE_INTERNAL_LABEL (buf, "LRTH", 0);
2306 assemble_label(buf);
2307 }
2308
2309 for (i = 0; i < eh_table_size; ++i)
2310 output_exception_table_entry (asm_out_file, eh_table[i]);
2311
2312 free (eh_table);
2313 clear_function_eh_region ();
2314
2315 /* Ending marker for table. */
2316 /* Generate the label for end of table. */
2317 ASM_GENERATE_INTERNAL_LABEL (buf, "LRTH", CODE_LABEL_NUMBER (final_rethrow));
2318 assemble_label(buf);
2319 assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
2320
2321 /* for binary compatability, the old __throw checked the second
2322 position for a -1, so we should output at least 2 -1's */
2323 if (! flag_new_exceptions)
2324 assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
2325
2326 putc ('\n', asm_out_file); /* blank line */
2327 }
2328 \f
2329 /* Emit code to get EH context.
2330
2331 We have to scan thru the code to find possible EH context registers.
2332 Inlined functions may use it too, and thus we'll have to be able
2333 to change them too.
2334
2335 This is done only if using exceptions_via_longjmp. */
2336
2337 void
2338 emit_eh_context ()
2339 {
2340 rtx insn;
2341 rtx ehc = 0;
2342
2343 if (! doing_eh (0))
2344 return;
2345
2346 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2347 if (GET_CODE (insn) == INSN
2348 && GET_CODE (PATTERN (insn)) == USE)
2349 {
2350 rtx reg = find_reg_note (insn, REG_EH_CONTEXT, 0);
2351 if (reg)
2352 {
2353 rtx insns;
2354
2355 start_sequence ();
2356
2357 /* If this is the first use insn, emit the call here. This
2358 will always be at the top of our function, because if
2359 expand_inline_function notices a REG_EH_CONTEXT note, it
2360 adds a use insn to this function as well. */
2361 if (ehc == 0)
2362 ehc = call_get_eh_context ();
2363
2364 emit_move_insn (XEXP (reg, 0), ehc);
2365 insns = get_insns ();
2366 end_sequence ();
2367
2368 emit_insns_before (insns, insn);
2369 }
2370 }
2371 }
2372
2373 /* Scan the current insns and build a list of handler labels. The
2374 resulting list is placed in the global variable exception_handler_labels.
2375
2376 It is called after the last exception handling region is added to
2377 the current function (when the rtl is almost all built for the
2378 current function) and before the jump optimization pass. */
2379
2380 void
2381 find_exception_handler_labels ()
2382 {
2383 rtx insn;
2384
2385 exception_handler_labels = NULL_RTX;
2386
2387 /* If we aren't doing exception handling, there isn't much to check. */
2388 if (! doing_eh (0))
2389 return;
2390
2391 /* For each start of a region, add its label to the list. */
2392
2393 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2394 {
2395 struct handler_info* ptr;
2396 if (GET_CODE (insn) == NOTE
2397 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2398 {
2399 ptr = get_first_handler (NOTE_EH_HANDLER (insn));
2400 for ( ; ptr; ptr = ptr->next)
2401 {
2402 /* make sure label isn't in the list already */
2403 rtx x;
2404 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2405 if (XEXP (x, 0) == ptr->handler_label)
2406 break;
2407 if (! x)
2408 exception_handler_labels = gen_rtx_EXPR_LIST (VOIDmode,
2409 ptr->handler_label, exception_handler_labels);
2410 }
2411 }
2412 }
2413 }
2414
2415 /* Return a value of 1 if the parameter label number is an exception handler
2416 label. Return 0 otherwise. */
2417
2418 int
2419 is_exception_handler_label (lab)
2420 int lab;
2421 {
2422 rtx x;
2423 for (x = exception_handler_labels ; x ; x = XEXP (x, 1))
2424 if (lab == CODE_LABEL_NUMBER (XEXP (x, 0)))
2425 return 1;
2426 return 0;
2427 }
2428
2429 /* Perform sanity checking on the exception_handler_labels list.
2430
2431 Can be called after find_exception_handler_labels is called to
2432 build the list of exception handlers for the current function and
2433 before we finish processing the current function. */
2434
2435 void
2436 check_exception_handler_labels ()
2437 {
2438 rtx insn, insn2;
2439
2440 /* If we aren't doing exception handling, there isn't much to check. */
2441 if (! doing_eh (0))
2442 return;
2443
2444 /* Make sure there is no more than 1 copy of a label */
2445 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
2446 {
2447 int count = 0;
2448 for (insn2 = exception_handler_labels; insn2; insn2 = XEXP (insn2, 1))
2449 if (XEXP (insn, 0) == XEXP (insn2, 0))
2450 count++;
2451 if (count != 1)
2452 warning ("Counted %d copies of EH region %d in list.\n", count,
2453 CODE_LABEL_NUMBER (insn));
2454 }
2455
2456 }
2457
2458 /* Mark the children of NODE for GC. */
2459
2460 static void
2461 mark_eh_node (node)
2462 struct eh_node *node;
2463 {
2464 while (node)
2465 {
2466 if (node->entry)
2467 {
2468 ggc_mark_rtx (node->entry->outer_context);
2469 ggc_mark_rtx (node->entry->exception_handler_label);
2470 ggc_mark_tree (node->entry->finalization);
2471 ggc_mark_rtx (node->entry->false_label);
2472 ggc_mark_rtx (node->entry->rethrow_label);
2473 }
2474 node = node ->chain;
2475 }
2476 }
2477
2478 /* Mark S for GC. */
2479
2480 static void
2481 mark_eh_stack (s)
2482 struct eh_stack *s;
2483 {
2484 if (s)
2485 mark_eh_node (s->top);
2486 }
2487
2488 /* Mark Q for GC. */
2489
2490 static void
2491 mark_eh_queue (q)
2492 struct eh_queue *q;
2493 {
2494 while (q)
2495 {
2496 mark_eh_node (q->head);
2497 q = q->next;
2498 }
2499 }
2500
2501 /* Mark NODE for GC. A label_node contains a union containing either
2502 a tree or an rtx. This label_node will contain a tree. */
2503
2504 static void
2505 mark_tree_label_node (node)
2506 struct label_node *node;
2507 {
2508 while (node)
2509 {
2510 ggc_mark_tree (node->u.tlabel);
2511 node = node->chain;
2512 }
2513 }
2514
2515 /* Mark EH for GC. */
2516
2517 void
2518 mark_eh_status (eh)
2519 struct eh_status *eh;
2520 {
2521 if (eh == 0)
2522 return;
2523
2524 mark_eh_stack (&eh->x_ehstack);
2525 mark_eh_stack (&eh->x_catchstack);
2526 mark_eh_queue (eh->x_ehqueue);
2527 ggc_mark_rtx (eh->x_catch_clauses);
2528
2529 lang_mark_false_label_stack (eh->x_false_label_stack);
2530 mark_tree_label_node (eh->x_caught_return_label_stack);
2531
2532 ggc_mark_tree (eh->x_protect_list);
2533 ggc_mark_rtx (eh->ehc);
2534 ggc_mark_rtx (eh->x_eh_return_stub_label);
2535 }
2536
2537 /* Mark ARG (which is really a struct func_eh_entry**) for GC. */
2538
2539 static void
2540 mark_func_eh_entry (arg)
2541 void *arg;
2542 {
2543 struct func_eh_entry *fee;
2544 struct handler_info *h;
2545 int i;
2546
2547 fee = *((struct func_eh_entry **) arg);
2548
2549 for (i = 0; i < current_func_eh_entry; ++i)
2550 {
2551 ggc_mark_rtx (fee->rethrow_label);
2552 for (h = fee->handlers; h; h = h->next)
2553 {
2554 ggc_mark_rtx (h->handler_label);
2555 if (h->type_info != CATCH_ALL_TYPE)
2556 ggc_mark_tree ((tree) h->type_info);
2557 }
2558
2559 /* Skip to the next entry in the array. */
2560 ++fee;
2561 }
2562 }
2563
2564 /* This group of functions initializes the exception handling data
2565 structures at the start of the compilation, initializes the data
2566 structures at the start of a function, and saves and restores the
2567 exception handling data structures for the start/end of a nested
2568 function. */
2569
2570 /* Toplevel initialization for EH things. */
2571
2572 void
2573 init_eh ()
2574 {
2575 first_rethrow_symbol = create_rethrow_ref (0);
2576 final_rethrow = gen_exception_label ();
2577 last_rethrow_symbol = create_rethrow_ref (CODE_LABEL_NUMBER (final_rethrow));
2578
2579 ggc_add_rtx_root (&exception_handler_labels, 1);
2580 ggc_add_rtx_root (&eh_return_context, 1);
2581 ggc_add_rtx_root (&eh_return_stack_adjust, 1);
2582 ggc_add_rtx_root (&eh_return_handler, 1);
2583 ggc_add_rtx_root (&first_rethrow_symbol, 1);
2584 ggc_add_rtx_root (&final_rethrow, 1);
2585 ggc_add_rtx_root (&last_rethrow_symbol, 1);
2586 ggc_add_root (&function_eh_regions, 1, sizeof (function_eh_regions),
2587 mark_func_eh_entry);
2588 }
2589
2590 /* Initialize the per-function EH information. */
2591
2592 void
2593 init_eh_for_function ()
2594 {
2595 cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
2596 ehqueue = (struct eh_queue *) xcalloc (1, sizeof (struct eh_queue));
2597 eh_return_context = NULL_RTX;
2598 eh_return_stack_adjust = NULL_RTX;
2599 eh_return_handler = NULL_RTX;
2600 }
2601
2602 void
2603 free_eh_status (f)
2604 struct function *f;
2605 {
2606 free (f->eh->x_ehqueue);
2607 free (f->eh);
2608 f->eh = NULL;
2609 }
2610 \f
2611 /* This section is for the exception handling specific optimization
2612 pass. First are the internal routines, and then the main
2613 optimization pass. */
2614
2615 /* Determine if the given INSN can throw an exception. */
2616
2617 static int
2618 can_throw (insn)
2619 rtx insn;
2620 {
2621 /* Calls can always potentially throw exceptions, unless they have
2622 a REG_EH_REGION note with a value of 0 or less. */
2623 if (GET_CODE (insn) == CALL_INSN)
2624 {
2625 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2626 if (!note || XINT (XEXP (note, 0), 0) > 0)
2627 return 1;
2628 }
2629
2630 if (asynchronous_exceptions)
2631 {
2632 /* If we wanted asynchronous exceptions, then everything but NOTEs
2633 and CODE_LABELs could throw. */
2634 if (GET_CODE (insn) != NOTE && GET_CODE (insn) != CODE_LABEL)
2635 return 1;
2636 }
2637
2638 return 0;
2639 }
2640
2641 /* Scan a exception region looking for the matching end and then
2642 remove it if possible. INSN is the start of the region, N is the
2643 region number, and DELETE_OUTER is to note if anything in this
2644 region can throw.
2645
2646 Regions are removed if they cannot possibly catch an exception.
2647 This is determined by invoking can_throw on each insn within the
2648 region; if can_throw returns true for any of the instructions, the
2649 region can catch an exception, since there is an insn within the
2650 region that is capable of throwing an exception.
2651
2652 Returns the NOTE_INSN_EH_REGION_END corresponding to this region, or
2653 calls abort if it can't find one.
2654
2655 Can abort if INSN is not a NOTE_INSN_EH_REGION_BEGIN, or if N doesn't
2656 correspond to the region number, or if DELETE_OUTER is NULL. */
2657
2658 static rtx
2659 scan_region (insn, n, delete_outer)
2660 rtx insn;
2661 int n;
2662 int *delete_outer;
2663 {
2664 rtx start = insn;
2665
2666 /* Assume we can delete the region. */
2667 int delete = 1;
2668
2669 /* Can't delete something which is rethrown to. */
2670 if (rethrow_used (n))
2671 delete = 0;
2672
2673 if (insn == NULL_RTX
2674 || GET_CODE (insn) != NOTE
2675 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_EH_REGION_BEG
2676 || NOTE_EH_HANDLER (insn) != n
2677 || delete_outer == NULL)
2678 abort ();
2679
2680 insn = NEXT_INSN (insn);
2681
2682 /* Look for the matching end. */
2683 while (! (GET_CODE (insn) == NOTE
2684 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
2685 {
2686 /* If anything can throw, we can't remove the region. */
2687 if (delete && can_throw (insn))
2688 {
2689 delete = 0;
2690 }
2691
2692 /* Watch out for and handle nested regions. */
2693 if (GET_CODE (insn) == NOTE
2694 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2695 {
2696 insn = scan_region (insn, NOTE_EH_HANDLER (insn), &delete);
2697 }
2698
2699 insn = NEXT_INSN (insn);
2700 }
2701
2702 /* The _BEG/_END NOTEs must match and nest. */
2703 if (NOTE_EH_HANDLER (insn) != n)
2704 abort ();
2705
2706 /* If anything in this exception region can throw, we can throw. */
2707 if (! delete)
2708 *delete_outer = 0;
2709 else
2710 {
2711 /* Delete the start and end of the region. */
2712 delete_insn (start);
2713 delete_insn (insn);
2714
2715 /* We no longer removed labels here, since flow will now remove any
2716 handler which cannot be called any more. */
2717
2718 #if 0
2719 /* Only do this part if we have built the exception handler
2720 labels. */
2721 if (exception_handler_labels)
2722 {
2723 rtx x, *prev = &exception_handler_labels;
2724
2725 /* Find it in the list of handlers. */
2726 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2727 {
2728 rtx label = XEXP (x, 0);
2729 if (CODE_LABEL_NUMBER (label) == n)
2730 {
2731 /* If we are the last reference to the handler,
2732 delete it. */
2733 if (--LABEL_NUSES (label) == 0)
2734 delete_insn (label);
2735
2736 if (optimize)
2737 {
2738 /* Remove it from the list of exception handler
2739 labels, if we are optimizing. If we are not, then
2740 leave it in the list, as we are not really going to
2741 remove the region. */
2742 *prev = XEXP (x, 1);
2743 XEXP (x, 1) = 0;
2744 XEXP (x, 0) = 0;
2745 }
2746
2747 break;
2748 }
2749 prev = &XEXP (x, 1);
2750 }
2751 }
2752 #endif
2753 }
2754 return insn;
2755 }
2756
2757 /* Perform various interesting optimizations for exception handling
2758 code.
2759
2760 We look for empty exception regions and make them go (away). The
2761 jump optimization code will remove the handler if nothing else uses
2762 it. */
2763
2764 void
2765 exception_optimize ()
2766 {
2767 rtx insn;
2768 int n;
2769
2770 /* Remove empty regions. */
2771 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2772 {
2773 if (GET_CODE (insn) == NOTE
2774 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2775 {
2776 /* Since scan_region will return the NOTE_INSN_EH_REGION_END
2777 insn, we will indirectly skip through all the insns
2778 inbetween. We are also guaranteed that the value of insn
2779 returned will be valid, as otherwise scan_region won't
2780 return. */
2781 insn = scan_region (insn, NOTE_EH_HANDLER (insn), &n);
2782 }
2783 }
2784 }
2785
2786 /* This function determines whether any of the exception regions in the
2787 current function are targets of a rethrow or not, and set the
2788 reference flag according. */
2789 void
2790 update_rethrow_references ()
2791 {
2792 rtx insn;
2793 int x, region;
2794 int *saw_region, *saw_rethrow;
2795
2796 if (!flag_new_exceptions)
2797 return;
2798
2799 saw_region = (int *) xcalloc (current_func_eh_entry, sizeof (int));
2800 saw_rethrow = (int *) xcalloc (current_func_eh_entry, sizeof (int));
2801
2802 /* Determine what regions exist, and whether there are any rethrows
2803 to those regions or not. */
2804 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2805 if (GET_CODE (insn) == CALL_INSN)
2806 {
2807 rtx note = find_reg_note (insn, REG_EH_RETHROW, NULL_RTX);
2808 if (note)
2809 {
2810 region = eh_region_from_symbol (XEXP (note, 0));
2811 region = find_func_region (region);
2812 saw_rethrow[region] = 1;
2813 }
2814 }
2815 else
2816 if (GET_CODE (insn) == NOTE)
2817 {
2818 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2819 {
2820 region = find_func_region (NOTE_EH_HANDLER (insn));
2821 saw_region[region] = 1;
2822 }
2823 }
2824
2825 /* For any regions we did see, set the referenced flag. */
2826 for (x = 0; x < current_func_eh_entry; x++)
2827 if (saw_region[x])
2828 function_eh_regions[x].rethrow_ref = saw_rethrow[x];
2829
2830 /* Clean up. */
2831 free (saw_region);
2832 free (saw_rethrow);
2833 }
2834 \f
2835 /* Various hooks for the DWARF 2 __throw routine. */
2836
2837 /* Do any necessary initialization to access arbitrary stack frames.
2838 On the SPARC, this means flushing the register windows. */
2839
2840 void
2841 expand_builtin_unwind_init ()
2842 {
2843 /* Set this so all the registers get saved in our frame; we need to be
2844 able to copy the saved values for any registers from frames we unwind. */
2845 current_function_has_nonlocal_label = 1;
2846
2847 #ifdef SETUP_FRAME_ADDRESSES
2848 SETUP_FRAME_ADDRESSES ();
2849 #endif
2850 }
2851
2852 /* Given a value extracted from the return address register or stack slot,
2853 return the actual address encoded in that value. */
2854
2855 rtx
2856 expand_builtin_extract_return_addr (addr_tree)
2857 tree addr_tree;
2858 {
2859 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2860 return eh_outer_context (addr);
2861 }
2862
2863 /* Given an actual address in addr_tree, do any necessary encoding
2864 and return the value to be stored in the return address register or
2865 stack slot so the epilogue will return to that address. */
2866
2867 rtx
2868 expand_builtin_frob_return_addr (addr_tree)
2869 tree addr_tree;
2870 {
2871 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2872 #ifdef RETURN_ADDR_OFFSET
2873 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2874 #endif
2875 return addr;
2876 }
2877
2878 /* Choose three registers for communication between the main body of
2879 __throw and the epilogue (or eh stub) and the exception handler.
2880 We must do this with hard registers because the epilogue itself
2881 will be generated after reload, at which point we may not reference
2882 pseudos at all.
2883
2884 The first passes the exception context to the handler. For this
2885 we use the return value register for a void*.
2886
2887 The second holds the stack pointer value to be restored. For
2888 this we use the static chain register if it exists and is different
2889 from the previous, otherwise some arbitrary call-clobbered register.
2890
2891 The third holds the address of the handler itself. Here we use
2892 some arbitrary call-clobbered register. */
2893
2894 static void
2895 eh_regs (pcontext, psp, pra, outgoing)
2896 rtx *pcontext, *psp, *pra;
2897 int outgoing ATTRIBUTE_UNUSED;
2898 {
2899 rtx rcontext, rsp, rra;
2900 int i;
2901
2902 #ifdef FUNCTION_OUTGOING_VALUE
2903 if (outgoing)
2904 rcontext = FUNCTION_OUTGOING_VALUE (build_pointer_type (void_type_node),
2905 current_function_decl);
2906 else
2907 #endif
2908 rcontext = FUNCTION_VALUE (build_pointer_type (void_type_node),
2909 current_function_decl);
2910
2911 #ifdef STATIC_CHAIN_REGNUM
2912 if (outgoing)
2913 rsp = static_chain_incoming_rtx;
2914 else
2915 rsp = static_chain_rtx;
2916 if (REGNO (rsp) == REGNO (rcontext))
2917 #endif /* STATIC_CHAIN_REGNUM */
2918 rsp = NULL_RTX;
2919
2920 if (rsp == NULL_RTX)
2921 {
2922 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
2923 if (call_used_regs[i] && ! fixed_regs[i] && i != REGNO (rcontext))
2924 break;
2925 if (i == FIRST_PSEUDO_REGISTER)
2926 abort();
2927
2928 rsp = gen_rtx_REG (Pmode, i);
2929 }
2930
2931 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
2932 if (call_used_regs[i] && ! fixed_regs[i]
2933 && i != REGNO (rcontext) && i != REGNO (rsp))
2934 break;
2935 if (i == FIRST_PSEUDO_REGISTER)
2936 abort();
2937
2938 rra = gen_rtx_REG (Pmode, i);
2939
2940 *pcontext = rcontext;
2941 *psp = rsp;
2942 *pra = rra;
2943 }
2944
2945 /* Retrieve the register which contains the pointer to the eh_context
2946 structure set the __throw. */
2947
2948 #if 0
2949 rtx
2950 get_reg_for_handler ()
2951 {
2952 rtx reg1;
2953 reg1 = FUNCTION_VALUE (build_pointer_type (void_type_node),
2954 current_function_decl);
2955 return reg1;
2956 }
2957 #endif
2958
2959 /* Set up the epilogue with the magic bits we'll need to return to the
2960 exception handler. */
2961
2962 void
2963 expand_builtin_eh_return (context, stack, handler)
2964 tree context, stack, handler;
2965 {
2966 if (eh_return_context)
2967 error("Duplicate call to __builtin_eh_return");
2968
2969 eh_return_context
2970 = copy_to_reg (expand_expr (context, NULL_RTX, VOIDmode, 0));
2971 eh_return_stack_adjust
2972 = copy_to_reg (expand_expr (stack, NULL_RTX, VOIDmode, 0));
2973 eh_return_handler
2974 = copy_to_reg (expand_expr (handler, NULL_RTX, VOIDmode, 0));
2975 }
2976
2977 void
2978 expand_eh_return ()
2979 {
2980 rtx reg1, reg2, reg3;
2981 rtx stub_start, after_stub;
2982 rtx ra, tmp;
2983
2984 if (!eh_return_context)
2985 return;
2986
2987 current_function_cannot_inline = N_("function uses __builtin_eh_return");
2988
2989 eh_regs (&reg1, &reg2, &reg3, 1);
2990 #ifdef POINTERS_EXTEND_UNSIGNED
2991 eh_return_context = convert_memory_address (Pmode, eh_return_context);
2992 eh_return_stack_adjust =
2993 convert_memory_address (Pmode, eh_return_stack_adjust);
2994 eh_return_handler = convert_memory_address (Pmode, eh_return_handler);
2995 #endif
2996 emit_move_insn (reg1, eh_return_context);
2997 emit_move_insn (reg2, eh_return_stack_adjust);
2998 emit_move_insn (reg3, eh_return_handler);
2999
3000 /* Talk directly to the target's epilogue code when possible. */
3001
3002 #ifdef HAVE_eh_epilogue
3003 if (HAVE_eh_epilogue)
3004 {
3005 emit_insn (gen_eh_epilogue (reg1, reg2, reg3));
3006 return;
3007 }
3008 #endif
3009
3010 /* Otherwise, use the same stub technique we had before. */
3011
3012 eh_return_stub_label = stub_start = gen_label_rtx ();
3013 after_stub = gen_label_rtx ();
3014
3015 /* Set the return address to the stub label. */
3016
3017 ra = expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
3018 0, hard_frame_pointer_rtx);
3019 if (GET_CODE (ra) == REG && REGNO (ra) >= FIRST_PSEUDO_REGISTER)
3020 abort();
3021
3022 tmp = memory_address (Pmode, gen_rtx_LABEL_REF (Pmode, stub_start));
3023 #ifdef RETURN_ADDR_OFFSET
3024 tmp = plus_constant (tmp, -RETURN_ADDR_OFFSET);
3025 #endif
3026 tmp = force_operand (tmp, ra);
3027 if (tmp != ra)
3028 emit_move_insn (ra, tmp);
3029
3030 /* Indicate that the registers are in fact used. */
3031 emit_insn (gen_rtx_USE (VOIDmode, reg1));
3032 emit_insn (gen_rtx_USE (VOIDmode, reg2));
3033 emit_insn (gen_rtx_USE (VOIDmode, reg3));
3034 if (GET_CODE (ra) == REG)
3035 emit_insn (gen_rtx_USE (VOIDmode, ra));
3036
3037 /* Generate the stub. */
3038
3039 emit_jump (after_stub);
3040 emit_label (stub_start);
3041
3042 eh_regs (&reg1, &reg2, &reg3, 0);
3043 adjust_stack (reg2);
3044 emit_indirect_jump (reg3);
3045
3046 emit_label (after_stub);
3047 }
3048 \f
3049
3050 /* This contains the code required to verify whether arbitrary instructions
3051 are in the same exception region. */
3052
3053 static int *insn_eh_region = (int *)0;
3054 static int maximum_uid;
3055
3056 static void
3057 set_insn_eh_region (first, region_num)
3058 rtx *first;
3059 int region_num;
3060 {
3061 rtx insn;
3062 int rnum;
3063
3064 for (insn = *first; insn; insn = NEXT_INSN (insn))
3065 {
3066 if ((GET_CODE (insn) == NOTE)
3067 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG))
3068 {
3069 rnum = NOTE_EH_HANDLER (insn);
3070 insn_eh_region[INSN_UID (insn)] = rnum;
3071 insn = NEXT_INSN (insn);
3072 set_insn_eh_region (&insn, rnum);
3073 /* Upon return, insn points to the EH_REGION_END of nested region */
3074 continue;
3075 }
3076 insn_eh_region[INSN_UID (insn)] = region_num;
3077 if ((GET_CODE (insn) == NOTE) &&
3078 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
3079 break;
3080 }
3081 *first = insn;
3082 }
3083
3084 /* Free the insn table, an make sure it cannot be used again. */
3085
3086 void
3087 free_insn_eh_region ()
3088 {
3089 if (!doing_eh (0))
3090 return;
3091
3092 if (insn_eh_region)
3093 {
3094 free (insn_eh_region);
3095 insn_eh_region = (int *)0;
3096 }
3097 }
3098
3099 /* Initialize the table. max_uid must be calculated and handed into
3100 this routine. If it is unavailable, passing a value of 0 will
3101 cause this routine to calculate it as well. */
3102
3103 void
3104 init_insn_eh_region (first, max_uid)
3105 rtx first;
3106 int max_uid;
3107 {
3108 rtx insn;
3109
3110 if (!doing_eh (0))
3111 return;
3112
3113 if (insn_eh_region)
3114 free_insn_eh_region();
3115
3116 if (max_uid == 0)
3117 for (insn = first; insn; insn = NEXT_INSN (insn))
3118 if (INSN_UID (insn) > max_uid) /* find largest UID */
3119 max_uid = INSN_UID (insn);
3120
3121 maximum_uid = max_uid;
3122 insn_eh_region = (int *) xmalloc ((max_uid + 1) * sizeof (int));
3123 insn = first;
3124 set_insn_eh_region (&insn, 0);
3125 }
3126
3127
3128 /* Check whether 2 instructions are within the same region. */
3129
3130 int
3131 in_same_eh_region (insn1, insn2)
3132 rtx insn1, insn2;
3133 {
3134 int ret, uid1, uid2;
3135
3136 /* If no exceptions, instructions are always in same region. */
3137 if (!doing_eh (0))
3138 return 1;
3139
3140 /* If the table isn't allocated, assume the worst. */
3141 if (!insn_eh_region)
3142 return 0;
3143
3144 uid1 = INSN_UID (insn1);
3145 uid2 = INSN_UID (insn2);
3146
3147 /* if instructions have been allocated beyond the end, either
3148 the table is out of date, or this is a late addition, or
3149 something... Assume the worst. */
3150 if (uid1 > maximum_uid || uid2 > maximum_uid)
3151 return 0;
3152
3153 ret = (insn_eh_region[uid1] == insn_eh_region[uid2]);
3154 return ret;
3155 }
3156 \f
3157
3158 /* This function will initialize the handler list for a specified block.
3159 It may recursively call itself if the outer block hasn't been processed
3160 yet. At some point in the future we can trim out handlers which we
3161 know cannot be called. (ie, if a block has an INT type handler,
3162 control will never be passed to an outer INT type handler). */
3163 static void
3164 process_nestinfo (block, info, nested_eh_region)
3165 int block;
3166 eh_nesting_info *info;
3167 int *nested_eh_region;
3168 {
3169 handler_info *ptr, *last_ptr = NULL;
3170 int x, y, count = 0;
3171 int extra = 0;
3172 handler_info **extra_handlers = 0;
3173 int index = info->region_index[block];
3174
3175 /* If we've already processed this block, simply return. */
3176 if (info->num_handlers[index] > 0)
3177 return;
3178
3179 for (ptr = get_first_handler (block); ptr; last_ptr = ptr, ptr = ptr->next)
3180 count++;
3181
3182 /* pick up any information from the next outer region. It will already
3183 contain a summary of itself and all outer regions to it. */
3184
3185 if (nested_eh_region [block] != 0)
3186 {
3187 int nested_index = info->region_index[nested_eh_region[block]];
3188 process_nestinfo (nested_eh_region[block], info, nested_eh_region);
3189 extra = info->num_handlers[nested_index];
3190 extra_handlers = info->handlers[nested_index];
3191 info->outer_index[index] = nested_index;
3192 }
3193
3194 /* If the last handler is either a CATCH_ALL or a cleanup, then we
3195 won't use the outer ones since we know control will not go past the
3196 catch-all or cleanup. */
3197
3198 if (last_ptr != NULL && (last_ptr->type_info == NULL
3199 || last_ptr->type_info == CATCH_ALL_TYPE))
3200 extra = 0;
3201
3202 info->num_handlers[index] = count + extra;
3203 info->handlers[index] = (handler_info **) xmalloc ((count + extra)
3204 * sizeof (handler_info **));
3205
3206 /* First put all our handlers into the list. */
3207 ptr = get_first_handler (block);
3208 for (x = 0; x < count; x++)
3209 {
3210 info->handlers[index][x] = ptr;
3211 ptr = ptr->next;
3212 }
3213
3214 /* Now add all the outer region handlers, if they aren't they same as
3215 one of the types in the current block. We won't worry about
3216 derived types yet, we'll just look for the exact type. */
3217 for (y =0, x = 0; x < extra ; x++)
3218 {
3219 int i, ok;
3220 ok = 1;
3221 /* Check to see if we have a type duplication. */
3222 for (i = 0; i < count; i++)
3223 if (info->handlers[index][i]->type_info == extra_handlers[x]->type_info)
3224 {
3225 ok = 0;
3226 /* Record one less handler. */
3227 (info->num_handlers[index])--;
3228 break;
3229 }
3230 if (ok)
3231 {
3232 info->handlers[index][y + count] = extra_handlers[x];
3233 y++;
3234 }
3235 }
3236 }
3237
3238 /* This function will allocate and initialize an eh_nesting_info structure.
3239 It returns a pointer to the completed data structure. If there are
3240 no exception regions, a NULL value is returned. */
3241 eh_nesting_info *
3242 init_eh_nesting_info ()
3243 {
3244 int *nested_eh_region;
3245 int region_count = 0;
3246 rtx eh_note = NULL_RTX;
3247 eh_nesting_info *info;
3248 rtx insn;
3249 int x;
3250
3251 info = (eh_nesting_info *) xmalloc (sizeof (eh_nesting_info));
3252 info->region_index = (int *) xcalloc ((max_label_num () + 1), sizeof (int));
3253 nested_eh_region = (int *) xcalloc (max_label_num () + 1, sizeof (int));
3254
3255 /* Create the nested_eh_region list. If indexed with a block number, it
3256 returns the block number of the next outermost region, if any.
3257 We can count the number of regions and initialize the region_index
3258 vector at the same time. */
3259 for (insn = get_insns(); insn; insn = NEXT_INSN (insn))
3260 {
3261 if (GET_CODE (insn) == NOTE)
3262 {
3263 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
3264 {
3265 int block = NOTE_EH_HANDLER (insn);
3266 region_count++;
3267 info->region_index[block] = region_count;
3268 if (eh_note)
3269 nested_eh_region [block] =
3270 NOTE_EH_HANDLER (XEXP (eh_note, 0));
3271 else
3272 nested_eh_region [block] = 0;
3273 eh_note = gen_rtx_EXPR_LIST (VOIDmode, insn, eh_note);
3274 }
3275 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
3276 eh_note = XEXP (eh_note, 1);
3277 }
3278 }
3279
3280 /* If there are no regions, wrap it up now. */
3281 if (region_count == 0)
3282 {
3283 free (info->region_index);
3284 free (info);
3285 free (nested_eh_region);
3286 return NULL;
3287 }
3288
3289 region_count++;
3290 info->handlers = (handler_info ***) xcalloc (region_count,
3291 sizeof (handler_info ***));
3292 info->num_handlers = (int *) xcalloc (region_count, sizeof (int));
3293 info->outer_index = (int *) xcalloc (region_count, sizeof (int));
3294
3295 /* Now initialize the handler lists for all exception blocks. */
3296 for (x = 0; x <= max_label_num (); x++)
3297 {
3298 if (info->region_index[x] != 0)
3299 process_nestinfo (x, info, nested_eh_region);
3300 }
3301 info->region_count = region_count;
3302
3303 /* Clean up. */
3304 free (nested_eh_region);
3305
3306 return info;
3307 }
3308
3309
3310 /* This function is used to retreive the vector of handlers which
3311 can be reached by a given insn in a given exception region.
3312 BLOCK is the exception block the insn is in.
3313 INFO is the eh_nesting_info structure.
3314 INSN is the (optional) insn within the block. If insn is not NULL_RTX,
3315 it may contain reg notes which modify its throwing behavior, and
3316 these will be obeyed. If NULL_RTX is passed, then we simply return the
3317 handlers for block.
3318 HANDLERS is the address of a pointer to a vector of handler_info pointers.
3319 Upon return, this will have the handlers which can be reached by block.
3320 This function returns the number of elements in the handlers vector. */
3321 int
3322 reachable_handlers (block, info, insn, handlers)
3323 int block;
3324 eh_nesting_info *info;
3325 rtx insn ;
3326 handler_info ***handlers;
3327 {
3328 int index = 0;
3329 *handlers = NULL;
3330
3331 if (info == NULL)
3332 return 0;
3333 if (block > 0)
3334 index = info->region_index[block];
3335
3336 if (insn && GET_CODE (insn) == CALL_INSN)
3337 {
3338 /* RETHROWs specify a region number from which we are going to rethrow.
3339 This means we wont pass control to handlers in the specified
3340 region, but rather any region OUTSIDE the specified region.
3341 We accomplish this by setting block to the outer_index of the
3342 specified region. */
3343 rtx note = find_reg_note (insn, REG_EH_RETHROW, NULL_RTX);
3344 if (note)
3345 {
3346 index = eh_region_from_symbol (XEXP (note, 0));
3347 index = info->region_index[index];
3348 if (index)
3349 index = info->outer_index[index];
3350 }
3351 else
3352 {
3353 /* If there is no rethrow, we look for a REG_EH_REGION, and
3354 we'll throw from that block. A value of 0 or less
3355 indicates that this insn cannot throw. */
3356 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3357 if (note)
3358 {
3359 int b = XINT (XEXP (note, 0), 0);
3360 if (b <= 0)
3361 index = 0;
3362 else
3363 index = info->region_index[b];
3364 }
3365 }
3366 }
3367 /* If we reach this point, and index is 0, there is no throw. */
3368 if (index == 0)
3369 return 0;
3370
3371 *handlers = info->handlers[index];
3372 return info->num_handlers[index];
3373 }
3374
3375
3376 /* This function will free all memory associated with the eh_nesting info. */
3377
3378 void
3379 free_eh_nesting_info (info)
3380 eh_nesting_info *info;
3381 {
3382 int x;
3383 if (info != NULL)
3384 {
3385 if (info->region_index)
3386 free (info->region_index);
3387 if (info->num_handlers)
3388 free (info->num_handlers);
3389 if (info->outer_index)
3390 free (info->outer_index);
3391 if (info->handlers)
3392 {
3393 for (x = 0; x < info->region_count; x++)
3394 if (info->handlers[x])
3395 free (info->handlers[x]);
3396 free (info->handlers);
3397 }
3398 free (info);
3399 }
3400 }
This page took 0.196868 seconds and 5 git commands to generate.